prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
"""
=========
SMOTE SVM
=========
An illustration of the random SMOTE SVM method.
"""
print(__doc__)
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
# Define some color for the plotting
almost_black = '#262626'
palette = sns.color_palette()
from sklearn.datasets import make_classification
from sklearn.decomposition import PCA
from imblearn.over_sampling import SMOTE
# Generate the dataset
X, y = make_classification(n_classes=2, class_sep=2, weights=[0.1, | 0.9],
n_informative=3, n_redundant=1, flip_y=0,
n_features=20, n_clusters_per_class=1,
n_samples=5000, random_state=10)
# Instanciate a PCA object for the sake of easy visualisation
pca = PCA(n_components=2)
# Fit and transform x to visualise inside a 2D feature space
X_vis = pca.fit_transform(X)
# Apply SMOTE SVM
sm = SMOTE(kind='svm')
X_resampled, y_resa | mpled = sm.fit_sample(X, y)
X_res_vis = pca.transform(X_resampled)
# Two subplots, unpack the axes array immediately
f, (ax1, ax2) = plt.subplots(1, 2)
ax1.scatter(X_vis[y == 0, 0], X_vis[y == 0, 1], label="Class #0", alpha=0.5,
edgecolor=almost_black, facecolor=palette[0], linewidth=0.15)
ax1.scatter(X_vis[y == 1, 0], X_vis[y == 1, 1], label="Class #1", alpha=0.5,
edgecolor=almost_black, facecolor=palette[2], linewidth=0.15)
ax1.set_title('Original set')
ax2.scatter(X_res_vis[y_resampled == 0, 0], X_res_vis[y_resampled == 0, 1],
label="Class #0", alpha=.5, edgecolor=almost_black,
facecolor=palette[0], linewidth=0.15)
ax2.scatter(X_res_vis[y_resampled == 1, 0], X_res_vis[y_resampled == 1, 1],
label="Class #1", alpha=.5, edgecolor=almost_black,
facecolor=palette[2], linewidth=0.15)
ax2.set_title('SMOTE svm')
plt.show()
|
#!/usr/bin/env python
# coding: utf-8
#encoding: latin1
def ingresar_numero(numero_minimo, numero_maximo):
''' Muestra un cursor de ingreso al usuario para que ingrese un número
tal que numero_mínimo <= ingreso <= numero_máximo. Ante un ingreso
inv | álido muestra un mensaje descriptivo de error y repregunta.
Devuelve el número ingresado en formato entero.
'''
while True:
in | greso = input()
if not ingreso.isdigit():
print("El ingreso debe ser numérico.")
elif not numero_minimo <= int(ingreso) <= numero_maximo:
print("El ingreso debe estar entre {} y {}}.".format
(numero_minimo, numero_maximo))
else:
return int(ingreso)
def ingresar_cadena_no_vacia():
''' Muestra un cursor de ingreso al usuario para que ingrese una cadena
no vacía. Ante un ingreso inválido muestra un mensaje descriptivo de
error y repregunta.
Devuelve la cadena ingresada, en mayúsculas.
'''
while True:
ingreso = input()
if len(ingreso) == 0:
print("El ingreso no debe ser vacío.")
else:
return ingreso.upper()
def mostrar_menu_generico(opciones, opcion_por_defecto):
''' Muestra una pantalla de selección dada una lista de opciones y una
opción por defecto. El usuario tendrá la opción de elegir una opción de
acuerdo a la numeración mostrada, generada por la función. Se valida el
ingreso repreguntando tantas veces como sea necesario.
opciones es una lista de cadena con las opciones a mostrar.
opcion_por_defecto es una opción adicional, obligatoria, no incluida en
la lista de opciones. Se mostrará última y su uso se orienta a una
opción de tipo "cancelar".
Se devuelve un número entero según la elección del usuario. Si
selecciona un elemento de la lista de opciones, devuelve su índice. Si
selecciona la opción por defecto, devuelve -1.
'''
print("Seleccione una opción:")
for numero_opcion in range(len(opciones)):
print("{}. {}.".format(numero_opcion + 1, opciones[numero_opcion]))
print ("{}. {}.".format(len(opciones) + 1, opcion_por_defecto))
print
seleccion = ingresar_numero(1, len(opciones) + 1)
# Caso en el que elija la opción por defecto.
if seleccion == len(opciones) + 1:
return -1
# Caso en el que elija una opción dentro de la lista.
return seleccion - 1
|
from a10sdk.common.A10BaseClass import A10BaseClass
class DeviceGroup(A10BaseClass):
"""Class Description::
configure scaleout device groups.
Class device-group supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param device_group: {"description": "scaletout device group", "format": "number", " | type": "number", "maximum": 16, "minimum": 1, "optional": false}
:param device_id_start: {"optional": true, "type": "number", "format": "number"}
:param device_id_end: {"optional": true, "type": "number", "format": | "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/acos-scaleout/cluster-config/device-groups/device-group/{device_group}`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required = [ "device_group"]
self.b_key = "device-group"
self.a10_url="/axapi/v3/acos-scaleout/cluster-config/device-groups/device-group/{device_group}"
self.DeviceProxy = ""
self.device_group = ""
self.device_id_start = ""
self.device_id_end = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
|
##############################################################################
#
# Copyright (C) 2021 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import random
import itertools
from odoo import api, models
class PartnerCommunication(models.Model):
_inherit = "partner.communication.config"
@api.multi
def generate_test_cases_by_language_family_case(self, lang="de_DE", family_case="single", send_mode="digital"):
"""
Generates example communications for our multiple cases in CH
depending on the language and the family case
Outputs the texts in a file
:param lang:
:return: True
"""
self.ensure_one()
comm_obj = self.env["partner.communication.job"].with_context(
must_skip_send_to_printer=True)
res = []
for number_sponsorship in [1, 3, 4]:
partner = self._find_partner(number_sponsorship, lang, family_case)
if partner is None:
continue
object_ids = self._get_test_objects(partner)
object_ids = ",".join([str(id)
for id in object_ids[0:number_sponsorship]])
temp_comm = comm_obj.create({
"partner_id": partner.id,
"config_id": self.id,
"object_ids": | object_ids,
"auto_send": False,
"send_mode": send_mode,
})
| res.append({
"case": f"{family_case}_{number_sponsorship}_child",
"subject": temp_comm.subject,
"body_html": temp_comm.body_html
})
temp_comm.unlink()
return res
@api.multi
def generate_test_case_by_partner(self, partner=None, send_mode="digital"):
"""
Generates example communications for our multiple cases in CH
depending on partner
Outputs the texts in a file
:param partner:
:return: True
"""
self.ensure_one()
comm_obj = self.env["partner.communication.job"].with_context(
must_skip_send_to_printer=True)
res = []
object_ids = self._get_test_objects(partner)
object_ids = ",".join([str(id) for id in object_ids])
temp_comm = comm_obj.create({
"partner_id": partner.id,
"config_id": self.id,
"object_ids": object_ids,
"auto_send": False,
"send_mode": send_mode,
})
res = {
"case": "partner",
"subject": temp_comm.subject,
"body_html": temp_comm.body_html
}
temp_comm.unlink()
return res
def open_test_case_wizard(self):
return {
"name": "Test communication cases",
"type": "ir.actions.act_window",
"view_type": "form",
"view_mode": "form",
"context": self.env.context,
"res_model": "partner.communication.test.cases.wizard",
'target': 'current',
}
def _get_test_objects(self, partner):
if self.model == "res.partner":
object_ids = partner.ids
elif self.model == "recurring.contract":
object_ids = partner.sponsorship_ids.ids
elif self.model == "correspondence":
object_ids = partner.mapped("sponsorship_ids.child_letter_ids").ids
elif self.model == "compassion.child":
object_ids = partner.sponsored_child_ids.ids
elif self.model == "account.invoice.line":
object_ids = self.env["account.invoice.line"].search([
("partner_id", "=", partner.id),
("invoice_id.invoice_category", "=", "fund")
], limit=4).ids
elif self.model == "account.invoice":
object_ids = self.env["account.invoice"].search([
("partner_id", "=", partner.id)
], limit=4).ids
return object_ids
def _find_partner(self, number_sponsorships, lang, family_case):
family = self.env.ref("partner_compassion.res_partner_title_family")
query = [
("number_sponsorships", "=", number_sponsorships),
("lang", "=", lang),
]
if family_case == "single":
query += [("title", "!=", family.id), ("title.plural", "=", False)]
else:
query += [("title", "=", family.id)]
answers = self.env["res.partner"].search(query, limit=50)
# check that the query returned a result
if len(answers) <= 0:
return None
# randomly select one
answer = random.choice(answers)
return answer
|
from django.apps import AppConfig
class GeoTweetsC | onfig(AppConfig):
| name = 'geo_tweets'
|
string='Destination Address',
store=True,
readonly=True)
sale_id = fields.Many2one(related='move_id.procurement_id.sale_line_id.order_id',
string='Sale',
store=True,
| readonly=True)
delivery_date = fields.Datetime('Date of Transfer')
return_date = fields.Date('Return date')
company_id = fields.Many2one(related='move_id.company_id',
string='Date of Transfer',
store=True,
| readonly=True)
state = fields.Selection([('draft', 'Draft'), ('sale', 'Sale'),
('returned', 'Returned'),
('invoiced', 'Invoiced'),
('loss', 'Loss')], 'State',
readonly=True, required=True)
sale_move_id = fields.Many2one('stock.move', 'Sale Move', required=False,
readonly=True, ondelete='cascade', select=1)
sale_picking_id = fields.Many2one(related='sale_move_id.picking_id',
string='Sale picking',
readonly=True)
return_picking_id = fields.Many2one('stock.picking', 'Return Picking',
required=False, readonly=True,
ondelete='cascade', select=1)
loss_move_id = fields.Many2one('stock.move', 'Loss Move', required=False,
readonly=True, ondelete='cascade', select=1)
loss_picking_id = fields.Many2one(related='loss_move_id.picking_id',
string='Loss picking',
readonly=True)
user_id = fields.Many2one('res.users', 'Comercial', required=False,
readonly=False, ondelete='cascade', select=1)
cost_subtotal = fields.Float('Cost', related='move_id.cost_subtotal',
store=True, readonly=True)
@api.multi
def sale(self):
move_obj = self.env['stock.move']
picking_type_id = self.env.ref('stock.picking_type_out')
for deposit in self:
procurement_id = deposit.sale_id.procurement_group_id
picking = self.env['stock.picking'].create(
{'picking_type_id': picking_type_id.id,
'partner_id': deposit.partner_id.id,
'origin': deposit.sale_id.name,
'date_done': datetime.now(),
'invoice_state': '2binvoiced',
'commercial': deposit.user_id.id,
'group_id': procurement_id.id})
values = {
'product_id': deposit.product_id.id,
'product_uom_qty': deposit.product_uom_qty,
'product_uom': deposit.product_uom.id,
'partner_id': deposit.partner_id.id,
'name': 'Sale Deposit: ' + deposit.move_id.name,
'location_id': deposit.move_id.location_dest_id.id,
'location_dest_id': deposit.partner_id.property_stock_customer.id,
'invoice_state': '2binvoiced',
'picking_id': picking.id,
'procurement_id': deposit.move_id.procurement_id.id,
'commercial': deposit.user_id.id,
'group_id': procurement_id.id
}
move = move_obj.create(values)
move.action_confirm()
move.force_assign()
move.action_done()
deposit.write({'state': 'sale', 'sale_move_id': move.id})
@api.one
def _prepare_deposit_move(self, picking, group):
deposit_id = self.env.ref('stock_deposit.stock_location_deposit')
move_template = {
'name': 'RET' or '',
'product_id': self.product_id.id,
'product_uom': self.product_uom.id,
'product_uom_qty': self.product_uom_qty,
'product_uos': self.product_uom.id,
'location_id': deposit_id.id,
'location_dest_id':
picking.picking_type_id.default_location_dest_id.id,
'picking_id': picking.id,
'partner_id': self.partner_id.id,
'move_dest_id': False,
'state': 'draft',
'company_id': self.company_id.id,
'group_id': group.id,
'procurement_id': False,
'origin': False,
'route_ids':
picking.picking_type_id.warehouse_id and
[(6, 0,
[x.id for x in
picking.picking_type_id.warehouse_id.route_ids])] or [],
'warehouse_id': picking.picking_type_id.warehouse_id.id,
'invoice_state': 'none'
}
return move_template
@api.one
def _create_stock_moves(self, picking=False):
stock_move = self.env['stock.move']
todo_moves = self.env['stock.move']
new_group = self.env['procurement.group'].create(
{'name': 'deposit RET', 'partner_id': self.partner_id.id})
for vals in self._prepare_deposit_move(picking, new_group):
todo_moves += stock_move.create(vals)
todo_moves.action_confirm()
todo_moves.force_assign()
@api.multi
def return_deposit(self):
picking_type_id = self.env.ref('stock.picking_type_in')
for deposit in self:
picking = self.env['stock.picking'].create(
{'picking_type_id': picking_type_id.id,
'partner_id': deposit.partner_id.id})
deposit._create_stock_moves(picking)
deposit.write({'state': 'returned',
'return_picking_id': picking.id})
@api.model
def send_advise_email(self):
deposits = self.search([('return_date', '=', fields.Date.today())])
#~ mail_pool = self.env['mail.mail']
#~ mail_ids = self.env['mail.mail']
template = self.env.ref('stock_deposit.stock_deposit_advise_partner', False)
for deposit in deposits:
ctx = dict(self._context)
ctx.update({
'default_model': 'stock.deposit',
'default_res_id': deposit.id,
'default_use_template': bool(template.id),
'default_template_id': template.id,
'default_composition_mode': 'comment',
'mark_so_as_sent': True
})
composer_id = self.env['mail.compose.message'].with_context(
ctx).create({})
composer_id.with_context(ctx).send_mail()
#~ mail_id = template.send_mail(deposit.id)
#~ mail_ids += mail_pool.browse(mail_id)
#~ if mail_ids:
#~ mail_ids.send()
return True
@api.multi
def deposit_loss(self):
move_obj = self.env['stock.move']
picking_type_id = self.env.ref('stock.picking_type_out')
deposit_loss_loc = self.env.ref('stock_deposit.stock_location_deposit_loss')
for deposit in self:
procurement_id = deposit.sale_id.procurement_group_id
picking = self.env['stock.picking'].create(
{'picking_type_id': picking_type_id.id,
'partner_id': deposit.partner_id.id,
'origin': deposit.sale_id.name,
'date_done': fields.Datetime.now(),
'invoice_state': 'none',
'commercial': deposit.user_id.id,
'group_id': procurement_id.id})
values = {
'product_id': deposit.product_id.id,
'product_uom_qty': deposit.product_uom_qty,
'product_uom': deposit.product_uom.id,
'partner_id': deposit.partner_id.id,
'name': u'Loss Deposit: ' + deposit.move_id.name,
'location_id': deposit.move_id.location_dest_id.id,
'location_dest_id': deposit_loss_loc.id,
'invoice_state': 'none',
'pick |
"""
2.3.4: If `x` is not an object or function, fulfill `promise` with `x`
https://github.com/promises-aplus/promises-tests/blob/2.1.1/lib/tests/2.3.4.js
"""
from test.promises.helpers import generate_fulfilled_test_case, generate_rejected_test_case
dummy = {'dummy': 'dummy'}
sentinel = {'sentinel': 'sentinel'}
def primitive_fulfilled_wrapper(primitive_value):
def test_method(test_case, promise, done):
def return_primitive(value):
return primitive_value
def retrieve_primitive(value):
test_case.assertEqual(value, primitive_value)
| done()
promise.then(return_primitive).then(retrieve_primitive)
return test_method
def primitive_rejected_wrapper(primitive_value):
def test_method(test_case, promise, done):
def return_primitive(value):
return primitive_value
def retrieve_primitive(value):
test_case.assertEqual(value, primitive_value)
done()
promise.then(None, ret | urn_primitive).then(retrieve_primitive)
return test_method
None_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(None), dummy,
module=__name__,
name='None_FulfilledTestCase')
Zero_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(0), dummy,
module=__name__,
name='Zero_FulfilledTestCase')
One_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(1), dummy,
module=__name__,
name='One_FulfilledTestCase')
String_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper('asdf'), dummy,
module=__name__,
name='String_FulfilledTestCase')
EmptyString_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(''), dummy,
module=__name__,
name='EmptyString_FulfilledTestCase')
List_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(['asdf', 'value1']), dummy,
module=__name__,
name='List_FulfilledTestCase')
EmptyList_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper([]), dummy,
module=__name__,
name='EmptyList_FulfilledTestCase')
Dict_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(dict(key='value')), dummy,
module=__name__,
name='Dict_FulfilledTestCase')
EmptyDict_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(dict()), dummy,
module=__name__,
name='EmptyDict_FulfilledTestCase')
Tuple_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(('asdf', 'value1', )), dummy,
module=__name__,
name='Tuple_FulfilledTestCase')
EmptyTuple_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(()), dummy,
module=__name__,
name='EmptyTuple_FulfilledTestCase')
Object_FulfilledTestCase = generate_fulfilled_test_case(primitive_fulfilled_wrapper(object()), dummy,
module=__name__,
name='Object_FulfilledTestCase')
None_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(None), dummy,
module=__name__,
name='None_RejectedTestCase')
Zero_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(0), dummy,
module=__name__,
name='Zero_RejectedTestCase')
One_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(1), dummy,
module=__name__,
name='One_RejectedTestCase')
String_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper('asdf'), dummy,
module=__name__,
name='String_RejectedTestCase')
EmptyString_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(''), dummy,
module=__name__,
name='EmptyString_RejectedTestCase')
List_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(['asdf', 'value1']), dummy,
module=__name__,
name='List_RejectedTestCase')
EmptyList_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper([]), dummy,
module=__name__,
name='EmptyList_RejectedTestCase')
Dict_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(dict(key='value')), dummy,
module=__name__,
name='Dict_RejectedTestCase')
EmptyDict_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(dict()), dummy,
module=__name__,
name='EmptyDict_RejectedTestCase')
Tuple_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(('asdf', 'value1', )), dummy,
module=__name__,
name='Tuple_RejectedTestCase')
EmptyTuple_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(()), dummy,
module=__name__,
name='EmptyTuple_RejectedTestCase')
Object_RejectedTestCase = generate_rejected_test_case(primitive_rejected_wrapper(object()), dummy,
module=__name__,
name='Object_RejectedTestCase')
|
ntegrated in `LIMO MEEG`_, a MATLAB toolbox originally designed
to interface with EEGLAB_.
In summary, the example:
- Fetches epoched data files for a single subject of the LIMO dataset [1]_.
If the LIMO files are not found on disk, the
fetcher :func:`mne.datasets.limo.load_data()` will automatically download
the files from a remote repository.
- During import, information about the data (i.e., sampling rate, number of
epochs per condition, number and name of EEG channels per subject, etc.) is
extracted from the LIMO :file:`.mat` files stored on disk and added to the
epochs structure as metadata.
- Fits linear models on the single subject's data and visualizes inferential
measures to evaluate the significance of the estimated effects.
References
----------
.. [1] Guillaume, Rousselet. (2016). LIMO EEG Dataset, [dataset].
University of Edinburgh, Centre for Clinical Brain Sciences.
https://doi.org/10.7488/ds/1556.
.. [2] Rousselet, G. A., Gaspar, C. M., Pernet, C. R., Husk, J. S.,
Bennett, P. J., & Sekuler, A. B. (2010). Healthy aging delays scalp EEG
sensitivity to noise in a face discrimination task.
Frontiers in psychology, 1, 19. https://doi.org/10.3389/fpsyg.2010.00019
.. [3] Rousselet, G. A., Pernet, C. R., Bennett, P. J., & Sekuler, A. B.
(2008). Parametric study of EEG sensitivity to phase noise during face
processing. BMC neuroscience, 9(1), 98.
https://doi.org/10.1186/1471-2202-9-98
.. _LIMO dataset: https://datashare.is.ed.ac.uk/handle/10283/2189?show=full
.. _LIMO MEEG: https://github.com/LIMO-EEG-Toolbox
.. _EEGLAB: https://sccn.ucsd.edu/eeglab/index.php
.. _Fig 1: https://bmcneurosci.biomedcentral.com/articles/10.1186/1471-2202-9-98/figures/1
.. _least squares: https://docs.scipy.org/doc/scipy/reference/generated/scipy.linalg.lstsq.html
""" # noqa: E501
# Authors: Jose C. Garcia Alanis <alanis.jcg@gmail.com>
#
# License: BSD (3-clause)
import numpy as np
import matplotlib.pyplot as plt
from mne.datasets.limo import load_data
from mne.stats import linear_regression
from mne.viz import plot_events, plot_compare | _evokeds
from mne import combine_evoked
print(__doc__)
# subject to use
subj = 1
###############################################################################
# About the data
# --------------
#
# In the original LIMO experiment (see [2]_), participant | s performed a
# two-alternative forced choice task, discriminating between two face stimuli.
# The same two faces were used during the whole experiment,
# with varying levels of noise added, making the faces more or less
# discernible to the observer (see `Fig 1`_ in [3]_ for a similar approach).
#
# The presented faces varied across a noise-signal (or phase-coherence)
# continuum spanning from 0 to 85% in increasing steps of 5%.
# In other words, faces with high phase-coherence (e.g., 85%) were easy to
# identify, while faces with low phase-coherence (e.g., 5%) were hard to
# identify and by extension very hard to discriminate.
#
#
# Load the data
# -------------
#
# We'll begin by loading the data from subject 1 of the LIMO dataset.
# This step can take a little while if you're loading the data for the
# first time.
limo_epochs = load_data(subject=subj)
###############################################################################
# Note that the result of the loading process is an
# :class:`mne.EpochsArray` containing the data ready to interface
# with MNE-Python.
print(limo_epochs)
###############################################################################
# Visualize events
# ----------------
#
# We can visualise the distribution of the face events contained in the
# ``limo_epochs`` structure. Events should appear clearly grouped, as the
# epochs are ordered by condition.
fig = plot_events(limo_epochs.events, event_id=limo_epochs.event_id)
fig.suptitle("Distribution of events in LIMO epochs")
###############################################################################
# As it can be seen above, conditions are coded as ``Face/A`` and ``Face/B``.
# Information about the phase-coherence of the presented faces is stored in the
# epochs metadata. These information can be easily accessed by calling
# ``limo_epochs.metadata``. As shown below, the epochs metadata also contains
# information about the presented faces for convenience.
print(limo_epochs.metadata.head())
###############################################################################
# Now let's take a closer look at the information in the epochs
# metadata.
# We want include all columns in the summary table
epochs_summary = limo_epochs.metadata.describe(include='all').round(3)
print(epochs_summary)
###############################################################################
# The first column of the summary table above provides more or less the same
# information as the ``print(limo_epochs)`` command we ran before. There are
# 1055 faces (i.e., epochs), subdivided in 2 conditions (i.e., Face A and
# Face B) and, for this particular subject, there are more epochs for the
# condition Face B.
#
# In addition, we can see in the second column that the values for the
# phase-coherence variable range from -1.619 to 1.642. This is because the
# phase-coherence values are provided as a z-scored variable in the LIMO
# dataset. Note that they have a mean of zero and a standard deviation of 1.
#
#
# Visualize condition ERPs
# ------------------------
#
# Let's plot the ERPs evoked by Face A and Face B, to see how similar they are.
# only show -250 to 500 ms
ts_args = dict(xlim=(-0.25, 0.5))
# plot evoked response for face A
limo_epochs['Face/A'].average().plot_joint(times=[0.15],
title='Evoked response: Face A',
ts_args=ts_args)
# and face B
limo_epochs['Face/B'].average().plot_joint(times=[0.15],
title='Evoked response: Face B',
ts_args=ts_args)
###############################################################################
# We can also compute the difference wave contrasting Face A and Face B.
# Although, looking at the evoked responses above, we shouldn't expect great
# differences among these face-stimuli.
# Face A minus Face B
difference_wave = combine_evoked([limo_epochs['Face/A'].average(),
limo_epochs['Face/B'].average()],
weights=[1, -1])
# plot difference wave
difference_wave.plot_joint(times=[0.15], title='Difference Face A - Face B')
###############################################################################
# As expected, no clear pattern appears when contrasting
# Face A and Face B. However, we could narrow our search a little bit more.
# Since this is a "visual paradigm" it might be best to look at electrodes
# located over the occipital lobe, as differences between stimuli (if any)
# might easier to spot over visual areas.
# Create a dictionary containing the evoked responses
conditions = ["Face/A", "Face/B"]
evokeds = {condition: limo_epochs[condition].average()
for condition in conditions}
# concentrate analysis an occipital electrodes (e.g. B11)
pick = evokeds["Face/A"].ch_names.index('B11')
# compare evoked responses
plot_compare_evokeds(evokeds, picks=pick, ylim=dict(eeg=(-15, 7.5)))
###############################################################################
# We do see a difference between Face A and B, but it is pretty small.
#
#
# Visualize effect of stimulus phase-coherence
# --------------------------------------------
#
# Since phase-coherence
# determined whether a face stimulus could be easily identified,
# one could expect that faces with high phase-coherence should evoke stronger
# activation patterns along occipital electrodes.
phase_coh = limo_epochs.metadata['phase-coherence']
# get levels of phase coherence
levels = sorted(phase_coh.unique())
# create labels for levels of phase coherence (i.e., 0 - 85%)
labels = ["{0:.2f}".format(i) for i in np.arange(0., 0.90, 0.05)]
# create dict of evokeds for each level of |
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
##
## Test surrogatepass encoding error handler
##
import unittest
import codecs
from iptest import run_test
class SurrogatePassTest(unittest.TestCase):
def test_ascii(self):
self.assertEqual("abc".encode("ascii", errors="surrogatepass"), b"abc")
self.assertEqual(b"abc".decode("ascii", errors="surrogatepass"), "abc")
def test_utf_7(self):
self.assertEqual("abc\ud810xyz".encode("utf_7", errors="surrogatepass"), b"abc+2BA-xyz")
self.assertEqual(b"abc+2BA-xyz".decode("utf_7", errors="surrogatepass"), "abc\ud810xyz")
def test_utf_8(self):
self.assertEqual("abc\ud810xyz".encode("utf_8", errors="surrogatepass"), b"abc\xed\xa0\x90xyz")
self.assertEqual(b"abc\xed\xa0\x90xyz".decode("utf_8", errors="surrogatepass"), "abc\ud810xyz")
def test_utf_16_le(self):
# lone high surrogate
self.assertEqual("\ud810".encode("utf_16_le", errors="surrogatepass"), b"\x10\xd8")
self.assertEqual(b"\x10\xd8".decode("utf_16_le", errors="surrogatepass"), "\ud810")
#lone low surrogate
self.assertEqual("\udc0a".encode("utf_16_le", errors="surrogatepass"), b"\n\xdc")
self.assertEqual(b"\n\xdc".decode("utf_16_le", errors="surrogatepass"), "\udc0a")
# invalid surrogate pair (low, high)
self.assertEqual("\ude51\uda2f".encode("utf_16_le", errors="surrogatepass"), b"Q\xde/\xda")
self.assertEqual(b"Q\xde/\xda".decode("utf_16_le", errors="surrogatepass"), "\ude51\uda2f")
def test_utf_16_be(self):
# lone high surrogate
self.assertEqual("\ud810".encode("utf_16_be", errors="surrogatepass"), b"\xd8\x10")
self.assertEqual(b"\xd8\x10".decode("utf_16_be", errors="surrogatepass"), "\ud810")
#lone low surrogate
self.assertEqual("\udc0a".encode("utf_16_be", errors="surrogatepass"), b"\xdc\n")
self.assertEqual(b"\xdc\n".decode("utf_16_be", errors="surrogatepass"), "\udc0a")
# invalid surrogate pair (low, high)
self.assertEqual("\ude51\uda2f".encode("utf_16_be", errors="surrogatepass"), b"\xdeQ\xda/")
self.assertEqual(b"\xdeQ\xda/".decode("utf_16_be", errors="surrogatepass"), "\ude51\uda2f")
def test_utf_32_le(self):
# lone high surrogate
self.assertEqual("\ud810".encode("utf_32_le", errors="surrogatepass"), b"\x10\xd8\x00\x00")
self.assertEqual(b"\x10\xd8\x00\x00".decode("utf_32_le", errors="surrogatepass"), "\ud810")
#lone low surrogate
self.assertEqual("\udc0a".encode("utf_32_le", errors="surrogatepass"), b"\n\xdc\x00\x00")
self.assertEqual(b"\n\xdc\x00\x00".decode("utf_32_le", errors="surrogatepass"), "\udc0a")
# invalid surrogate pair (low, high)
self.assertEqual("\ude51\uda2f".encode("utf_32_le", errors="surrogatepass"), b"Q\xde\x00\x00/\xda\x00\x00")
self.assertEqual(b"Q\xde\x00\x00/\xda\x00\x00".decode("utf_32_le", errors="surrogatepass"), "\ude51\uda2f")
def test_utf_32_be(self):
# lone high surrogate
self.assertEqual("\ud810".encode("utf_32_be", errors="surrogatepass"), b"\x00\x00\xd8\x10")
self.assertEqual(b"\x00\x00\xd8\x10".decode("utf_32_be", errors="surrogatepass"), "\ud810")
#lone low surrogate
self.assertEqual("\udc0a".encode("utf_32_be", errors="surrogatepass"), b"\x00\x00\xdc\n")
self.assertEqual(b"\x00\x00\xdc | \n".decode("utf_32_be", errors="surrogatepass"), "\udc0a")
# invalid surrogate pair (low, high)
self.assertEqual("\ude51\uda2f".encode("utf_32_be", errors="surrogatepass"), b"\x00\x00\xdeQ\x00\x00\xda/")
self.assertEqual(b"\x00\x00\xdeQ\x00\x00\xda/".decode("utf_32_be", errors="surrogatepass"), "\ude51\uda2f")
|
run_test(__name__)
|
('IM.connectors.OpenNebula.OpenNebulaCloudConnector.getONEVersion')
@patch('IM.InfrastructureList.InfrastructureList.save_data')
def test_20_launch(self, save_data, getONEVersion, server_proxy):
radl_data = """
network net1 (provider_id = 'publica' and outbound = 'yes' and
outports = '8080,9000:9100' and sg_name= 'test')
network net2 ()
system test (
cpu.arch='x86_64' and
cpu.count=1 and
memory.size=512m and
availability_zone='0' and
net_interface.0.connection = 'net1' and
net_interface.0.dns_name = 'test' and
net_interface.1.connection = 'net2' and
instance_tags = 'key=value,key1=value2' and
disk.0.os.name = 'linux' and
disk.0.image.url = 'one://server.com/1' and
disk.0.os.credentials.username = 'user' and
disk.1.size=1GB and
disk.1.device='hdb' and
disk.1.mount_path='/mnt/path'
)"""
radl = radl_parse.parse_radl(radl_data)
radl.check()
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'},
{'type': 'InfrastructureManager', 'username': 'user',
'password': 'pass'}])
one_cloud = self.get_one_cloud()
getONEVersion.return_value = "4.14.0"
one_server = MagicMock()
one_server.one.vm.allocate.return_value = (True, "1", 0)
one_server.one.vnpool.info.return_value = (True, self.read_file_as | _string("files/nets.xml"), 0)
one_server.one.secgrouppool.info.return_value = (True, self.read_file_as_string("files/sgs.xml"), 0)
one_server.one.secgroup.allocate.return_value = (True, 1, 0)
server_proxy.return_value = one_ser | ver
inf = InfrastructureInfo()
inf.auth = auth
res = one_cloud.launch(inf, radl, radl, 1, auth)
success, _ = res[0]
self.assertTrue(success, msg="ERROR: launching a VM.")
sg_template = ('NAME = test\nRULE = [ PROTOCOL = TCP, RULE_TYPE = inbound, RANGE = 22:22 ]\n'
'RULE = [ PROTOCOL = TCP, RULE_TYPE = inbound, RANGE = 8080:8080 ]\n'
'RULE = [ PROTOCOL = TCP, RULE_TYPE = inbound, RANGE = 9000:9100 ]\n')
self.assertEqual(one_server.one.secgroup.allocate.call_args_list, [call('user:pass', sg_template)])
vm_template = """
NAME = userimage
CPU = 1
VCPU = 1
MEMORY = 512
OS = [ ARCH = "x86_64" ]
DISK = [ IMAGE_ID = "1" ]
DISK = [ SAVE = no, TYPE = fs , FORMAT = ext3, SIZE = 1024, TARGET = hdb ]
SCHED_REQUIREMENTS = "CLUSTER_ID=\\"0\\""\n"""
self.assertIn(vm_template, one_server.one.vm.allocate.call_args_list[0][0][1])
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
# Now test an error in allocate
one_server.one.vm.allocate.return_value = (False, "Error msg", 0)
res = one_cloud.launch(inf, radl, radl, 1, auth)
success, msg = res[0]
self.assertFalse(success)
self.assertEqual(msg, "ERROR: Error msg")
@patch('IM.connectors.OpenNebula.ServerProxy')
def test_30_updateVMInfo(self, server_proxy):
radl_data = """
network net (outbound = 'yes' and provider_id = 'publica')
network net1 (provider_id = 'privada')
system test (
cpu.arch='x86_64' and
cpu.count=1 and
memory.size=512m and
net_interface.0.connection = 'net' and
net_interface.0.dns_name = 'test' and
net_interface.1.connection = 'net1' and
disk.0.os.name = 'linux' and
disk.0.image.url = 'one://server.com/1' and
disk.0.os.credentials.username = 'user' and
disk.0.os.credentials.password = 'pass'
)"""
radl = radl_parse.parse_radl(radl_data)
radl.check()
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
inf = MagicMock()
vm = VirtualMachine(inf, "1", one_cloud.cloud, radl, radl, one_cloud, 1)
one_server = MagicMock()
one_server.one.vm.info.return_value = (True, self.read_file_as_string("files/vm_info.xml"), 0)
server_proxy.return_value = one_server
success, vm = one_cloud.updateVMInfo(vm, auth)
self.assertEquals(vm.info.systems[0].getValue("net_interface.1.ip"), "10.0.0.01")
self.assertEquals(vm.info.systems[0].getValue("net_interface.0.ip"), "158.42.1.1")
self.assertTrue(success, msg="ERROR: updating VM info.")
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
@patch('IM.connectors.OpenNebula.ServerProxy')
def test_40_stop(self, server_proxy):
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
inf = MagicMock()
vm = VirtualMachine(inf, "1", one_cloud.cloud, "", "", one_cloud, 1)
one_server = MagicMock()
one_server.one.vm.action.return_value = (True, "", 0)
server_proxy.return_value = one_server
success, _ = one_cloud.stop(vm, auth)
self.assertTrue(success, msg="ERROR: stopping VM info.")
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
@patch('IM.connectors.OpenNebula.ServerProxy')
def test_50_start(self, server_proxy):
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
inf = MagicMock()
vm = VirtualMachine(inf, "1", one_cloud.cloud, "", "", one_cloud, 1)
one_server = MagicMock()
one_server.one.vm.action.return_value = (True, "", 0)
server_proxy.return_value = one_server
success, _ = one_cloud.start(vm, auth)
self.assertTrue(success, msg="ERROR: stopping VM info.")
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
@patch('IM.connectors.OpenNebula.ServerProxy')
def test_52_reboot(self, server_proxy):
auth = Authentication([{'id': 'one', 'type': 'OpenNebula', 'username': 'user',
'password': 'pass', 'host': 'server.com:2633'}])
one_cloud = self.get_one_cloud()
inf = MagicMock()
vm = VirtualMachine(inf, "1", one_cloud.cloud, "", "", one_cloud, 1)
one_server = MagicMock()
one_server.one.vm.action.return_value = (True, "", 0)
server_proxy.return_value = one_server
success, _ = one_cloud.reboot(vm, auth)
self.assertTrue(success, msg="ERROR: stopping VM info.")
self.assertNotIn("ERROR", self.log.getvalue(), msg="ERROR found in log: %s" % self.log.getvalue())
@patch('IM.connectors.OpenNebula.ServerProxy')
def test_55_alter(self, server_proxy):
radl_data = """
network net ()
system test (
cpu.arch='x86_64' and
cpu.count=1 and
memory.size=512m and
net_interface.0.connection = 'net' and
net_interface.0.dns_name = 'test' and
disk.0.os.name = 'linux' and
disk.0.image.url = 'one://server.com/1' and
disk.0.os.credentials.username = 'user' and
disk.0.os.credentials.password = 'pass'
)"""
radl = radl_parse.parse_radl(radl_data)
new_radl_data = """
system test (
cpu.count>=2 and
memory.size>=2048m and
disk |
""" Copyright 2012, 2013 UW Information Technology, University of Washington
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, Http404
from spacescout_web.forms.share import ShareForm
from django.conf import settings
from django.utils.http import urlquote
from spacescout_web.spot import Spot, SpotException
from spacescout_web.views.contact import validate_back_link
import oauth2
import socket
import simplejson as json
import logging
logger = logging.getLogger(__name__)
@login_required(login_url='/login')
def share(request, spot_id=None):
if request.method == 'POST':
form = ShareForm(request.POST)
try:
back = request.POST['back']
validate_back_link(back)
except:
back = '/'
if form.is_valid():
spot_id = form.cleaned_data['spot_id']
back = form.cleaned_data['back']
sender = form.cleaned_data['sender']
recipient = form.cleaned_data['recipient']
subject = form.cleaned_data['subject']
message = form.cleaned_data['message']
bot_test = form.cleaned_data['email_confirmation']
url = "{0}/api/v1/spot/{1}/share".format(
settings.SS_WEB_SERVER_HOST,
spot_id
)
body = json.dumps({
'to': recipient,
'from': sender,
'comment': message,
'subject': subject
})
headers = {
"X-OAuth-User": "%s" % request.user.username,
'Content-Type': 'application/json',
'Accept': 'application/json'
}
consumer = oauth2.Consumer(
key=settings.SS_WEB_OAUTH_KEY,
secret=settings.SS_WEB_OAUTH_SECRET
)
client = oauth2.Client(consumer)
resp, content = client.request(url,
method='PUT',
body=body,
headers=h | eaders)
if not (resp.status == 200 or resp.status == 201):
logger.error('Share service failure %s: %s' % (
resp.status,
url
))
return HttpResponseRedirect('/share/sorry/')
return HttpResponseRedirect(
| '/share/thankyou/?back=' +
urlquote(back)
)
else:
# mask user from silliness
try:
back = request.GET['back']
validate_back_link(back)
except:
back = '/'
if request.user and request.user.is_authenticated():
consumer = oauth2.Consumer(
key=settings.SS_WEB_OAUTH_KEY,
secret=settings.SS_WEB_OAUTH_SECRET
)
client = oauth2.Client(consumer)
url = "{0}/api/v1/user/me".format(settings.SS_WEB_SERVER_HOST)
headers = {
"X-OAuth-User": "%s" % request.user.username,
'Content-Type': 'application/json',
'Accept': 'application/json'
}
resp, content = client.request(url,
method='GET',
headers=headers)
sender = "%s@%s" % (
request.user.username,
getattr(settings, 'SS_MAIL_DOMAIN', 'uw.edu')
)
if resp.status == 200:
me = content = json.loads(content)
if 'email' in me and len(me['email']):
sender = me['email']
else:
sender = ''
form = ShareForm(initial={
'spot_id': spot_id,
'back': back,
'sender': sender,
'subject': 'Check out this space I found on SpaceScout',
})
try:
spot = Spot(spot_id).get()
share_text = [spot["name"], spot["type"]]
if ('extended_info' in spot and
'location_description' in spot['extended_info']):
share_text.append(spot['extended_info']['location_description'])
except SpotException as e:
logger.error('Share failure for spot %s: %s' % (spot_id, e))
return render_to_response('spacescout_web/share-sorry.html', {
'problem': 'Sorry, but the space you wish '
'to share does not exist.',
'back': back,
}, context_instance=RequestContext(request))
share_url = 'http://%s/space/%s/%s' % (getattr(
settings, 'SS_APP_SERVER',
socket.gethostname()),
spot_id, urlquote(spot["name"])
)
return render_to_response('spacescout_web/share-form.html', {
'form': form,
'back': back,
'spot_id': spot_id,
'share_text': share_text,
'share_url': share_url,
'hidden': ["spot_id", "back"],
'is_mobile': (request.MOBILE == 1),
}, context_instance=RequestContext(request))
def thank_you(request, spot_id=None):
share_variables = _share_variables(request, spot_id)
try:
back = request.GET['back']
validate_back_link(back)
except:
back = share_variables['back']
return render_to_response('spacescout_web/share-thankyou.html', {
'spot_id': spot_id,
'back': back,
}, context_instance=RequestContext(request))
def sorry(request, spot_id=None):
share_variables = _share_variables(request, spot_id)
try:
back = request.GET['back']
validate_back_link(back)
except:
back = share_variables['back']
return render_to_response('spacescout_web/share-sorry.html', {
'problem': None,
'back': back
}, context_instance=RequestContext(request))
def _share_variables(request, spot_id):
spot_name = 'Unknown'
spot_description = ''
if spot_id is not None:
try:
spot = Spot(spot_id).get()
except SpotException as ex:
raise Http404
spot_name = spot["name"]
if ('extended_info' in spot and
'location_description' in spot['extended_info']):
spot_description = spot['extended_info']['location_description']
if request.MOBILE == 1:
is_mobile = True
else:
is_mobile = False
if request.GET['back']:
back = request.GET['back']
else:
back = '/'
return {
'spot_name': spot_name,
'spot_description': spot_description,
'is_mobile': is_mobile,
'back': back
}
|
"""
This module manages the interface between webpack and Django.
It loads webpack bundle tracker stats files, and catalogues the different files
that need to be served in order to inject that frontend code into a Django template.
Originally, it was a monkeypatch of django-webpack-loader - but as our needs are somewhat
different, much of the code has sim | ply been rewritten, and will continue to be done so to better much our use case.
"""
from __future__ import absolute_import, print_function, unicode_literals
from django.conf import settings
from django.utils.safestring import mark_safe
def render_as_url(chunk):
"""
This function returns the URL for a particular chunk (JS or CSS file), by
appending the url or public path for the file to the current STATIC_URL set
in settings.
:param chunk: A dictionary wi | th a url or publicPath attribute -
this is generated by Webpack.
:returns: The URL to the file for the client.
"""
static = getattr(settings, 'STATIC_URL')
url = chunk.get('publicPath') or chunk['url']
return "{static}{url}".format(static=static, url=url)
def webpack_asset_render(HookClass, async=False):
"""
This is produces content for a script tag for a WebpackInclusionHook subclass that implement
different render to html methods either sync or async.
:param HookClass: a subclass of WebpackInclusionHook
:param sync: Render sync or async.
:return: HTML of script tags to insert
"""
tags = []
for hook in HookClass().registered_hooks:
tags.append(
hook.render_to_page_load_sync_html() if not async else hook.render_to_page_load_async_html()
)
return mark_safe('\n'.join(tags))
|
# Field name made lowercase.
enclosedvolume = models.FloatField(db_column='enclosedVolume', blank=True, null=True) # Field name made lowercase.
wall1area = models.FloatField(db_column='wall1Area', blank=True, null=True) # Field name made lowercase.
wall1azimuth = models.FloatField(db_column='wall1Azimuth', blank=True, null=True) # Field name made lowercase.
wall2area = models.FloatField(db_column='wall2Area', blank=True, null=True) # Field name made lowercase.
wall2azimuth = models.FloatField(db_column='wall2Azimuth', blank=True, null=True) # Field name made lowercase.
wall3area = models.FloatField(db_column='wall3Area', blank=True, null=True) # Field name made lowercase.
wall3azimuth = models.FloatField(db_column='wall3Azimuth', blank=True, null=True) # Field name made lowercase.
wall4area = models.FloatField(db_column='wall4Area', blank=True, null=True) # Field name made lowercase.
wall4azimuth = models.FloatField(db_column='wall4Azimuth', blank=True, null=True) # Field name made lowercase.
envelopingsurface = models.FloatField(db_column='envelopingSurface', blank=True, null=True) # Field name made lowercase.
windowarea = models.FloatField(db_column='windowArea', blank=True, null=True) # Field name made lowercase.
windowwallratio = models.FloatField(db_column='windowWallRatio', blank=True, null=True) # Field name made lowercase.
avgthicknessouterwall = models.FloatField(db_column='avgThicknessOuterWall', blank=True, null=True) # Field name made lowercase.
avgthicknessinnerwall = models.FloatField(db_column='avgThicknessInnerWall', blank=True, null=True) # Field name made lowercase.
avgthicknessinsulation = models.FloatField(db_column='avgThicknessInsulation', blank=True, null=True) # Field name made lowercase.
materialouterwall = models.CharField(db_column='materialOuterWall', max_length=200, blank=True, null=True) # Field name made lowercase.
materialinnerwall = models.CharField(db_column='materialInnerWall', max_length=200, blank=True, null=True) # Field name made lowercase.
materialinsulation = models.CharField(db_column='materialInsulation', max_length=200, blank=True, null=True) # Field name made lowercase.
materialwindowframe = models.CharField(db_column='materialWindowFrame', max_length=200, blank=True, null=True) # Field name made lowercase.
materialwindowglazing = models.CharField(db_column='materialWindowGlazing', max_length=200, blank=True, null=True) # Field name made lowercase.
basematerial = models.CharField(db_column='baseMaterial', max_length=200, blank=True, null=True) # Field name made lowercase.
avguvalue = models.FloatField(db_column='avgUvalue', blank=True, null=True) # Field name made lowercase.
cellar = models.CharField(max_length=2, blank=True, null=True)
externalshading = models.CharField(db_column='externalShading', max_length=200, blank=True, null=True) # Field name made lowercase.
heating = models.CharField(max_length=2, blank=True, null=True)
cooling = models.CharField(max_length=2, blank=True, null=True)
centralizedcoolingsystem = models.CharField(db_column='centralizedCoolingSystem', max_length=2, blank=True, null=True) # Field name made lowercase.
ahu = models.CharField(db_column='AHU', max_length=2, blank=True, null=True) # Field name made lowercase.
absorptionchiller = models.CharField(db_column='absorptionChiller', max_length=2, blank=True, null=True) # Field name made lowercase.
computercenter = models.CharField(db_column='computerCenter', max_length=2, blank=True, null=True) # Field name made lowercase.
laboratory = models.CharField(max_length=2, blank=True, null=True)
serverroomwiringcenter = models.CharField(db_column='serverRoomWiringCenter', max_length=2, blank=True, null=True) # Field name made lowercase.
heatenergysource = models.CharField(db_column='heatEnergySource', max_length=200, blank=True, null=True) # Field name made lowercase.
heatingsystem = models.CharField(db_column='heatingSystem', max_length=2000, blank=True, null=True) # Field name made lowercase.
coolingsystem = models.CharField(db_column='coolingSystem', max_length=2000, blank=True, null=True) # Field name made lowercase.
ventilationsystem = models.CharField(db_column='ventilationSystem', max_length=2000, blank=True, null=True) # Field name made lowercase.
lightingsystem = models.CharField(db_column='lightingSystem', max_length=2000, blank=True, null=True) # Field name made lowercase.
| reasonsforhighconsumption = models.CharField(db_column='reasonsForHighConsumption', max_length=2000, blank=True, null=True) # Field name made lowercase.
renovationmeasures = models.CharField(db_column='renovationMeasures', max_length=2000, blank=True, null=True) # Field name made lowercase.
districtcoolingpower = models.FloatField(db_column='districtCoolingPower', blank=True, null=True) # Field name made lowercase.
districtcoolingtransferstationname = models.CharField(db_col | umn='districtCoolingTransferStationName', max_length=200, blank=True, null=True) # Field name made lowercase.
districtheatingvariabletemppower = models.FloatField(db_column='districtHeatingVariableTempPower', blank=True, null=True) # Field name made lowercase.
districtheatingconstanttemppower = models.FloatField(db_column='districtHeatingConstantTempPower', blank=True, null=True) # Field name made lowercase.
districtheatingtransferstationname = models.CharField(db_column='districtHeatingTransferStationName', max_length=200, blank=True, null=True) # Field name made lowercase.
transferstationcoordinatex = models.FloatField(db_column='transferStationCoordinateX', blank=True, null=True) # Field name made lowercase.
transferstationcoordinatey = models.FloatField(db_column='transferStationCoordinateY', blank=True, null=True) # Field name made lowercase.
ratiousetransferstation = models.FloatField(db_column='ratioUseTransferStation', blank=True, null=True) # Field name made lowercase.
solarthermalmodularea = models.FloatField(db_column='solarThermalModulArea', blank=True, null=True) # Field name made lowercase.
pvproduction = models.FloatField(db_column='PVProduction', blank=True, null=True) # Field name made lowercase.
pvmoduleareabestsuited = models.FloatField(db_column='PVModuleAreaBestSuited', blank=True, null=True) # Field name made lowercase.
pvmoduleareawellsuited = models.FloatField(db_column='PVModuleAreaWellSuited', blank=True, null=True) # Field name made lowercase.
pvmoduleareasuited = models.FloatField(db_column='PVModuleAreaSuited', blank=True, null=True) # Field name made lowercase.
solarmoduleareashadowed = models.FloatField(db_column='solarModuleAreaShadowed', blank=True, null=True) # Field name made lowercase.
solarmodulearea = models.FloatField(db_column='solarModuleArea', blank=True, null=True) # Field name made lowercase.
roofarea = models.FloatField(db_column='roofArea', blank=True, null=True) # Field name made lowercase.
heatconsumption2011 = models.FloatField(db_column='heatConsumption2011', blank=True, null=True) # Field name made lowercase.
heatconsumption2011_1 = models.FloatField(db_column='heatConsumption2011_1', blank=True, null=True) # Field name made lowercase.
heatconsumption2011_2 = models.FloatField(db_column='heatConsumption2011_2', blank=True, null=True) # Field name made lowercase.
heatconsumption2011_3 = models.FloatField(db_column='heatConsumption2011_3', blank=True, null=True) # Field name made lowercase.
heatconsumption2011_4 = models.FloatField(db_column='heatConsumption2011_4', blank=True, null=True) # Field name made lowercase.
heatconsumption2011_5 = models.FloatField(db_column='heatConsumption2011_5', blank=True, null=True) # Field name made lowercase.
heatconsumption2011_6 = models.FloatField(db_column='heatConsumption2011_6', blank=True, null=True) # Field name made lowercase.
heatconsumption2011_7 = models.FloatField(db_column='heatConsumption2011_7', blank=True, null=True) # Field name made lowercase.
heatconsu |
from zipfile import ZipFile
class ZipFileArchiver(object):
"""
An archiver used to generate .zip files.
This wraps Python's built in :class:`zipfile.ZipFile`
methods to operate exactly like :class:`tarfile.TarFile` does. |
"""
def __init__(self, *ar | gs, **kwargs):
"""
Create a :class:`.ZipFileArchiver` instance. We create a new
:class:`zipfile.ZipFile` and store it to the ``zipfile`` member.
"""
self.zipfile = ZipFile(*args, **kwargs)
@classmethod
def open(self, *args, **kwargs):
"""
Open the archive. This must be a classmethod.
"""
return ZipFileArchiver(*args,**kwargs)
def add(self, *args, **kwargs):
"""
Add file to the archive.
"""
self.zipfile.write(*args, **kwargs)
def extractall(self, *args, **kwargs):
"""
Extract all files from the archive.
"""
self.zipfile.extractall(*args, **kwargs)
def close(self):
"""
Close the archive.
"""
self.zipfile.close()
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.cit | rix.netscaler.ni | tro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class authenticationradiuspolicy_authenticationvserver_binding(base_resource) :
""" Binding class showing the authenticationvserver that can be bound to authenticationradiuspolicy.
"""
def __init__(self) :
self._boundto = ""
self._priority = 0
self._activepolicy = 0
self._name = ""
self.___count = 0
@property
def name(self) :
ur"""Name of the RADIUS authentication policy.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
ur"""Name of the RADIUS authentication policy.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def boundto(self) :
ur"""The entity name to which policy is bound.
"""
try :
return self._boundto
except Exception as e:
raise e
@boundto.setter
def boundto(self, boundto) :
ur"""The entity name to which policy is bound.
"""
try :
self._boundto = boundto
except Exception as e:
raise e
@property
def priority(self) :
try :
return self._priority
except Exception as e:
raise e
@property
def activepolicy(self) :
try :
return self._activepolicy
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(authenticationradiuspolicy_authenticationvserver_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.authenticationradiuspolicy_authenticationvserver_binding
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
ur""" Use this API to fetch authenticationradiuspolicy_authenticationvserver_binding resources.
"""
try :
obj = authenticationradiuspolicy_authenticationvserver_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
ur""" Use this API to fetch filtered set of authenticationradiuspolicy_authenticationvserver_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = authenticationradiuspolicy_authenticationvserver_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
ur""" Use this API to count authenticationradiuspolicy_authenticationvserver_binding resources configued on NetScaler.
"""
try :
obj = authenticationradiuspolicy_authenticationvserver_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
ur""" Use this API to count the filtered set of authenticationradiuspolicy_authenticationvserver_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = authenticationradiuspolicy_authenticationvserver_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class authenticationradiuspolicy_authenticationvserver_binding_response(base_response) :
def __init__(self, length=1) :
self.authenticationradiuspolicy_authenticationvserver_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.authenticationradiuspolicy_authenticationvserver_binding = [authenticationradiuspolicy_authenticationvserver_binding() for _ in range(length)]
|
#!/usr/bin/env python
"""
Implement common functions for tests
"""
from __fut | ure__ import print_function
from __future__ import unicode_literals
import io
import sys
def parse_yaml(yaml_file):
"""
Parses a yaml file, returning its contents as a dict.
"""
try:
import yaml
except ImportError:
sys.exit("Unable to import yaml module.")
| try:
with io.open(yaml_file, encoding='utf-8') as fname:
return yaml.load(fname)
except IOError:
sys.exit("Unable to open YAML file: {0}".format(yaml_file))
|
g.ParentalControl.servicepinactive))
if config.ParentalControl.servicepinactive.getValue():
self.list.append(getConfigListEntry(_("Parental control type"), config.ParentalControl.type))
if config.ParentalControl.mode.getValue() == "complex":
self.changePin = getConfigListEntry(_("Change service PINs"), NoSave(ConfigNothing()))
self.list.append(self.changePin)
elif config.ParentalControl.mode.getValue() == "simple":
self.changePin = getConfigListEntry(_("Change service PIN"), NoSave(ConfigNothing()))
self.list.append(self.changePin)
#Added Option to remember the service pin
self.list.append(getConfigListEntry(_("Remember service PIN"), config.ParentalControl.storeservicepin))
self.editListEntry = getConfigListEntry(_("Edit services list"), NoSave(ConfigNothing()))
self.list.append(self.editListEntry)
#New funtion: Possibility to add Bouquets to whitelist / blacklist
self.editBouquetListEntry = getConfigListEntry(_("Edit bouquets list"), NoSave(ConfigNothing()))
self.list.append(self.editBouquetListEntry)
#New option to reload service lists (for example if bouquets have changed)
self.reloadLists = getConfigListEntry(_("Reload black-/white lists"), NoSave(ConfigNothing()))
self.list.append(self.reloadLists)
self["config"].list = self.list
self["config"].setList(self.list)
def keyOK(self):
print "self[\"config\"].l.getCurrentSelection()", self["config"].l.getCurrentSelection()
if self["config"].l.getCurrentSelection() == self.editListEntry:
self.session.open(ParentalControlEditor)
elif self["config"].l.getCurrentSelection() == self.editBouquetListEntry:
self.session.open(ParentalControlBouquetEditor)
elif self["config"].l.getCurrentSelection() == self.changePin:
if config.ParentalControl.mode.getValue() == "complex":
pass
else:
self.session.open(ParentalControlChangePin, config.ParentalControl.servicepin[0], _("service PIN"))
elif self["config"].l.getCurrentSelection() == self.changeSetupPin:
self.session.open(ParentalControlChangePin, config.ParentalControl.setuppin, _("setup PIN"))
elif self["config"].l.getCurrentSelection() == self.reloadLists:
from Components.ParentalControl import parentalControl
parentalControl.open()
else:
ConfigListScreen.keyRight(self)
print "current selection:", self["config"].l.getCurrentSelection()
self.createSetup()
def keyLeft(self):
ConfigListScreen.keyLeft(self)
print "current selection:", self["config"].l.getCurrentSelection()
self.createSetup()
def keyRight(self):
ConfigListScreen.keyRight(self)
print "current selection:", self["config"].l.getCurrentSelection()
self.createSetup()
def SetupPinMessageCallback(self, value):
if | value:
self.session.openWithCallback(self.cancelCB, ParentalControlChangePin, config.ParentalControl.setuppin, _("setup PIN"))
else:
config.ParentalControl.setupp | inactive.setValue(False)
self.keyCancel()
def ServicePinMessageCallback(self, value):
if value:
self.session.openWithCallback(self.cancelCB, ParentalControlChangePin, config.ParentalControl.servicepin[0], _("service PIN"))
else:
config.ParentalControl.servicepinactive.setValue(False)
self.keyCancel()
def cancelCB(self,value):
self.keyCancel()
def keyCancel(self):
if config.ParentalControl.setuppinactive.getValue() and config.ParentalControl.setuppin.getValue() == 'aaaa':
self.session.openWithCallback(self.SetupPinMessageCallback, MessageBox, _("No valid setup PIN found!\nDo you like to change the setup PIN now?\nWhen you say 'No' here the setup protection stay disabled!"), MessageBox.TYPE_YESNO)
elif config.ParentalControl.servicepinactive.getValue() and config.ParentalControl.servicepin[0].getValue() == 'aaaa':
self.session.openWithCallback(self.ServicePinMessageCallback, MessageBox, _("No valid service PIN found!\nDo you like to change the service PIN now?\nWhen you say 'No' here the service protection stay disabled!"), MessageBox.TYPE_YESNO)
else:
for x in self["config"].list:
x[1].save()
self.close()
def keyNumberGlobal(self, number):
pass
# for summary:
def changedEntry(self):
for x in self.onChangedEntry:
x()
def getCurrentEntry(self):
return self["config"].getCurrent()[0]
def getCurrentValue(self):
return str(self["config"].getCurrent()[1].getText())
def createSummary(self):
from Screens.Setup import SetupSummary
return SetupSummary
SPECIAL_CHAR = 96
class ParentalControlEditor(Screen):
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _("Parental control editor"))
self.list = []
self.servicelist = ParentalControlList(self.list)
self["servicelist"] = self.servicelist
#self.onShown.append(self.chooseLetter)
self.currentLetter = chr(SPECIAL_CHAR)
self.readServiceList()
self.chooseLetterTimer = eTimer()
self.chooseLetterTimer.callback.append(self.chooseLetter)
self.onLayoutFinish.append(self.LayoutFinished)
self["actions"] = NumberActionMap(["DirectionActions", "ColorActions", "OkCancelActions", "NumberActions"],
{
"ok": self.select,
"cancel": self.cancel,
#"left": self.keyLeft,
#"right": self.keyRight,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
}, -1)
def LayoutFinished(self):
self.chooseLetterTimer.start(0, True)
def cancel(self):
self.chooseLetter()
def select(self):
self.servicelist.toggleSelectedLock()
def keyNumberGlobal(self, number):
pass
def readServiceList(self):
serviceHandler = eServiceCenter.getInstance()
refstr = '%s ORDER BY name' % (service_types_tv)
self.root = eServiceReference(refstr)
self.servicesList = {}
list = serviceHandler.list(self.root)
if list is not None:
services = list.getContent("CN", True) #(servicecomparestring, name)
for s in services:
key = s[1].lower()[0]
if key < 'a' or key > 'z':
key = chr(SPECIAL_CHAR)
#key = str(key)
if not self.servicesList.has_key(key):
self.servicesList[key] = []
self.servicesList[key].append(s)
def chooseLetter(self):
print "choose letter"
mylist = []
for x in self.servicesList.keys():
if x == chr(SPECIAL_CHAR):
x = (_("special characters"), x)
else:
x = (x, x)
mylist.append(x)
mylist.sort(key=itemgetter(1))
sel = ord(self.currentLetter) - SPECIAL_CHAR
self.session.openWithCallback(self.letterChosen, ChoiceBox, title=_("Show services beginning with"), list=mylist, keys = [], selection = sel)
def letterChosen(self, result):
from Components.ParentalControl import parentalControl
if result is not None:
print "result:", result
self.currentLetter = result[1]
#Replace getProtectionLevel by new getProtectionType
self.list = [ParentalControlEntryComponent(x[0], x[1], parentalControl.getProtectionType(x[0])) for x in self.servicesList[result[1]]]
self.servicelist.setList(self.list)
else:
parentalControl.save()
self.close()
class ParentalControlBouquetEditor(Screen):
#This new class allows adding complete bouquets to black- and whitelists
#The servicereference that is stored for bouquets is their refstr as listed in bouquets.tv
def __init__(self, session):
Screen.__init__(self, session)
self.skinName = "ParentalControlEditor"
self.list = []
self.bouquetslist = ParentalControlList(self.list)
self["servicelist"] = self.bouquetslist
self.readBouquetList()
self.onLayoutFinish.append(self.selectBouquet)
self["actions"] = NumberActionMap(["DirectionActions", "ColorActions", "OkCancelActions"],
{
"ok": self.select,
"cancel": self.cancel
}, -1)
def cancel(self):
from Components.ParentalControl import parentalControl
parentalControl.save()
self.close()
def select(self):
self.bouquetslist.toggleSelectedLock()
def readBouquetList(self):
serviceHandler = eServiceCenter.getInstance()
refstr = '1:134:1:0:0:0:0:0:0:0:FROM BOUQUET \"bouquets.tv\" ORDE |
"project/billing_overview/index.html"
table_class = project_tables.BillingOverviewTable
page_title = _("Billing Overview")
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context["tenant_id"] = get_tenant_id(self.request)
context["selected_month"] = get_month(self.request)
context["organizations"] = get_tenant_list(self.request)
year = time.strftime("%Y",time.localtime())
month = time.strftime("%m",time.localtime())
if int(month) == 1:
last_month = 12
last_year = int(year) - 1
else:
last_month = int(month) - 1
last_year = year
try:
context["year_begin"] = str((int(year)-1)) + "/" + str((int(month)))
context["year_end"] = str(last_year) + "/" + str(last_month)
# get last 12 months total cost
total_year = api.cloudkittyclient(self.request).billings.get_consumer_trends("month",
12,
get_tenant_id(self.request))
year_sum = 0
for billing_month in total_year["consumerTrends"]:
year_sum += billing_month["cost"]
context["billing_year"] = year_sum
#get current month cost
context["time_current_month"] = year+"/"+month
services_rate_list = api.cloudkittyclient(self.request).billings.list_services_cost(year+"-"+month,
get_tenant_id(self.request))
current_sum = 0
for rate in services_rate_list["servicesRate"]:
current_sum += rate["rate"]
context["billing_current_month"] = current_sum
#get last month cost
context["time_last_month"] = str(last_year)+"/"+str(last_month)
context["billing_last_month"] = api.cloudkittyclient(self.request).billings.get_consumer_trends("month",
1,
get_tenant_id(self.request))["consumerTrends"][0]["cost"]
except Exception:
exceptions.handle(self.request,_("Unable to retrieve month cost"))
today = date.today()
context["last_12_months"] = last_12_months()
return context;
def get_data(self):
try:
billings = api.cloudkittyclient(self.request).billings.get_total_cost(get_month(self.request), get_tenant_id(self.request))["totals"]
except Exception:
billings = []
exceptions.handle(self.request, _('Unable to retrieve billing list.'))
return billings
class ReportView(django.views.generic.TemplateView):
def get(self,request,*args,**kwargs):
tenant_id = get_tenant_id(self.request)
billing_month = get_month(self.request)
tenants = get_tenant_list(self.request)
for tenant in tenants:
if tenant.id == tenant_id:
tenant_name = tenant.name
break
reports = api.cloudkittyclient(self.request).billings.list_month_report(tenant_id,billing_month)
output = StringIO.StringIO()
workbook = xlsxwriter.Workbook(output)
month_sheet = workbook.add_worksheet(tenant_name)
#设置列宽度
month_sheet.set_column('A:Z',9)
#表头
head = (u'部门',u'资源',
u'1月',u'2月',u'3月', u'1Q合计',
u'4月',u'5月',u'6月', u'2Q合计', u'上半年计',
u'7月',u'8月',u'9月', u'3Q合计',
u'10月',u'11月',u'12月',u'4Q合计',u'下半年计',u'全年合计'
)
# 设置表头字符串和格式
head_format = workbook.add_format({
'bold':True,
'font_size':20,
'font_name':'Microsoft YaHei'
})
row = 1
col = 0
head_str = billing_month.split('-')[0] + u'年度月别计费一览表'
head_str1 = u'资源及使用费用情况'
month_sheet.write(row,col,head_str,head_format)
row += 1
month_sheet.write(row,col,u'如需查看季、年度合计,请在月份对应位置取消隐藏')
row += 2
month_sheet.write(row,col,head_str1,head_format)
explain_format = workbook.add_format({'align':'right'})
year_month = billing_month.split('-')
if billing_month == template_filters.date(date.today(), "Y-m"):
tab_date = u'制表日期:%d月%d日' %(int(year_month[1]),date.today().day-1)
else:
tab_date = u'制表日期:%d月%d日' %(int(year_month[1]),calendar.monthrange(int(year_month[0]),int(year_month[1]))[1])
month_sheet.write(row,len(head)-1,u'单位:元 ' + tab_date, explain_format)
row += 1
col = 0
head2_format = workbook.add_format({
'bold':True,
'align':'center',
'valign':'vcenter',
'bg_color':'#D8E4BC',
'left':1,
'font_name':'Microsoft YaHei'
})
#设置行高
month_sheet.set_row(row,30)
for index_str in head:
month_sheet.write(row,col,index_str,head2_format)
col += 1
row += 1
month_sheet.set_column('A:A',15)
#资源和合计所占行数
names = ['Compute','Volume',u'合计']
even_format = workbook.add_format({
'border':1,
'font_name':'Microsoft YaHei',
'num_format': '#,##0.00'
})
odd_format=workbook.add_format({
'border':1,
'font_name':'Microsoft YaHei',
'bg_color':'#D9D9D9',
'num_format': '#,##0.00'
})
resource_total_rows = 3
# 处理每个部门
merge_format = workbook.add_format({
'bold':True,
'f | ont_name':'Microsoft YaHei',
'font_size':14,
'align':'center',
'valign':'vcenter',
'border':1
})
for depart in reports['departs']:
col = 1
for index,name in enumerate(names):
if index % 2 != 0:
month_sheet.set_row(row+index,None,odd_format)
else:
month_sheet.set_row(row+index,None,even_format)
| month_sheet.write(row+index,col,name)
month_sheet.merge_range(row,0,row+resource_total_rows-1,0,depart['tenant_name'],merge_format)
tmp_row = row
write_col = col + 1
for month_report in depart['month_reports']:
for res_tpye in month_report['res_types']:
if res_tpye['res_type'] == "compute":
write_row = tmp_row
elif res_tpye['res_type'] == "volume":
write_row = tmp_row + 1
month_sheet.write(write_row,write_col,res_tpye['rate'])
write_col += 1
month = int(month_report["month"].split('-')[1])
if month == 3:
for index in range(resource_total_rows-1):
index_row = tmp_row + index
month_sheet.write(index_row,write_col,'=SUM(C' + str(index_row+1) + ':E' + str(index_row+1) + ')')
write_col += 1
elif month == 6:
for index in range(resource_total_rows-1):
index_row = tmp_row + index
month_sheet.write(index_row,write_col,'=SUM(G' + str(index_row+1) + ':I' + str(index_row+1) + ')')
month_sheet.write(index_row,write_col+1,'=SUM(F' + str(index_row+1) + '+J' + str(index_row+1) + ')')
write_col += 2
elif month == 9:
for index in range(resource_total_rows-1):
index_row = tmp_row + index
month_sheet.write(index_row,write_col,'=SUM(L' + str(index_row+1) + ':N' + str(index_row+1) + ')')
write_col += 1
elif month == 12:
for index in range(resource_total_rows-1):
index_row = tmp_row + index
month_sheet.write(index_row,write_col,'=SUM(P' + str(index_row+1) + ':R' + str(index_row+1) + ')')
month_ |
from setuptools import setup, find_packages
setup(
name='yandextank',
version='1.12.7',
description='a performance measurement tool',
longer_description='''
Yandex.Tank is a performance measurement and load testing automatization tool.
It uses other load generators such as JMeter, ab or phantom inside of it for
load generation and provides a comm | on configura | tion system for them and
analytic tools for the results they produce.
''',
maintainer='Yandex Load Team',
maintainer_email='load@yandex-team.ru',
url='http://yandex.github.io/yandex-tank/',
namespace_packages=["yandextank", "yandextank.plugins"],
packages=find_packages(exclude=["tests", "tmp", "docs", "data"]),
install_requires=[
'cryptography>=2.2.1', 'pyopenssl==18.0.0',
'psutil>=1.2.1', 'requests>=2.5.1', 'paramiko>=1.16.0',
'pandas==0.24.2', 'numpy==1.15.4', 'future>=0.16.0',
'pip>=8.1.2',
'pyyaml>=4.2b1', 'cerberus==1.3.1', 'influxdb>=5.0.0', 'netort>=0.7.6',
'retrying>=1.3.3', 'pytest-runner', 'typing'
],
setup_requires=[
],
tests_require=[
'pytest==4.6.3', 'flake8', 'pytest-benchmark'
],
license='LGPLv2',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)',
'Operating System :: POSIX',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Testing :: Traffic Generation',
'Programming Language :: Python :: 2',
],
entry_points={
'console_scripts': [
'yandex-tank = yandextank.core.cli:main',
'yandex-tank-check-ssh = yandextank.common.util:check_ssh_connection',
'tank-postloader = yandextank.plugins.DataUploader.cli:post_loader',
'tank-docs-gen = yandextank.validator.docs_gen:main'
],
},
package_data={
'yandextank.api': ['config/*'],
'yandextank.core': ['config/*'],
'yandextank.aggregator': ['config/*'],
'yandextank.plugins.Android': ['binary/*', 'config/*'],
'yandextank.plugins.Autostop': ['config/*'],
'yandextank.plugins.Bfg': ['config/*'],
'yandextank.plugins.Console': ['config/*'],
'yandextank.plugins.DataUploader': ['config/*'],
'yandextank.plugins.InfluxUploader': ['config/*'],
'yandextank.plugins.OpenTSDBUploader': ['config/*'],
'yandextank.plugins.JMeter': ['config/*'],
'yandextank.plugins.JsonReport': ['config/*'],
'yandextank.plugins.Pandora': ['config/*'],
'yandextank.plugins.Phantom': ['config/*'],
'yandextank.plugins.RCAssert': ['config/*'],
'yandextank.plugins.ResourceCheck': ['config/*'],
'yandextank.plugins.ShellExec': ['config/*'],
'yandextank.plugins.ShootExec': ['config/*'],
'yandextank.plugins.Telegraf': ['config/*'],
'yandextank.plugins.NeUploader': ['config/*']
},
use_2to3=False, )
|
om lever.tests.model_helpers import FlaskTestBase, TestUserACL
class ProcessTests(unittest.TestCase):
""" Ensures our metaclasses and decorators operate as we want for assigning
preprocessors and postprocessors """
def test_basic_preprocess(self):
class APIAwesome(API):
@preprocess(method='post')
def preprocess_those(self):
pass
@preprocess(action='something')
def preprocess_that(self):
pass
assert isinstance(APIAwesome._pre_method['post'][0],
types.FunctionType)
assert isinstance(APIAwesome._pre_action['something'][0],
types.FunctionType)
def test_inheritence_mixins(self):
class APIParent(object):
@preprocess(method='post')
def preprocess_those(self):
pass
class APIAwesome(API, APIParent):
pass
assert isinstance(APIAwesome._pre_method['post'][0],
types.FunctionType)
def test_inheritence(self):
class APIParent(API):
@preprocess(method='post')
def preprocess_those(self):
pass
class APIAwesome(APIParent):
pass
assert isinstance(APIAwesome._pre_method['post'][0],
types.FunctionType)
def test_inheritence_reversal(self):
class APIParent(API):
pass
class APIAwesome(APIParent):
@preprocess(method='post')
def preprocess_those(self):
pass
assert isinstance(APIAwesome._pre_method['post'][0],
types.FunctionType)
def test_multi_preprocess(self):
class APIAwesome(API):
@preprocess(method=['post', 'get'])
def preprocess_those(self):
pass
@preprocess(action=['create', 'other'])
def preprocess_that(self):
pass
assert isinstance(APIAwesome._pre_method['post'][0],
types.FunctionType)
assert isinstance(APIAwesome._pre_method['get'][0],
types.FunctionType)
assert isinstance(APIAwesome._pre_action['other'][0],
types.FunctionType)
assert isinstance(APIAwesome._pre_action['create'][0],
types.FunctionType)
def test_basic_postprocess(self):
class APIAwesome(API):
@postprocess(method='post')
def preprocess_those(self):
pass
@postprocess(action='something')
def preprocess_that(self):
pass
assert isinstance(APIAwesome._post_method['post'][0],
types.FunctionType)
assert isinstance(APIAwesome._post_action['something'][0],
types.FunctionType)
def test_multi_postprocess(self):
class APIAwesome(API):
@postprocess(method=['post', 'get'])
def preprocess_those(self):
| pass
@postprocess(action=['create', 'other'])
def preprocess_that(self):
pass
assert isinstance(APIAwesome._post_method['post'][0],
types.FunctionType)
assert isinstance(APIAwesome._post_method['get'][0],
types.FunctionType)
assert isinstance(APIAwesome._post_action['other'][0],
types.FunctionTy | pe)
assert isinstance(APIAwesome._post_action['create'][0],
types.FunctionType)
def test_preprocess_priority(self):
class APIAwesome(API):
@postprocess(method='post', pri=0)
def preprocess_those(self):
pass
@postprocess(method='post')
def preprocess_that(self):
pass
self.assertEqual(
APIAwesome._post_method['post'][0].__name__, 'preprocess_those')
def test_none(self):
class APIAwesome(API):
pass
assert APIAwesome._pre_method == {}
assert APIAwesome._pre_action == {}
class TestProcessorUsage(FlaskTestBase):
""" These tests ensure that preprocessors and postprocessors are getting
called when they should be """
def test_methods_preprocess(self):
for meth in ['post', 'get', 'delete', 'put']:
class APIAwesome(API):
@preprocess(method=meth)
def preprocessor_one(self):
raise SyntaxError # pick an obscure one to catch..
inst = APIAwesome()
self.assertRaises(SyntaxError, getattr(inst, meth))
def test_methods_postprocess(self):
obj = self.provision_single_asset()
data = [('post', {'name': 'test'}),
('get', {}),
('put', {'id': obj.id, 'name': 'test2'}),
('delete', {'id': obj.id})]
for meth, vals in data:
class APIAwesome(self.widget_api):
@postprocess(method=meth)
def postprocess_one(self, retval):
raise SyntaxError # pick an obscure one to catch..
self.app.add_url_rule('/' + meth, view_func=APIAwesome.as_view(meth))
for meth, vals in data:
self.assertRaises(SyntaxError, getattr(self, meth), meth, 500, params=vals)
class TestAPICreation(FlaskTestBase):
def test_create_bad_pkey(self):
""" ensure that exception is thrown for invalid primary_key """
class Testing(self.base):
__tablename__ = "testing_table"
bad_id = Column(Integer, primary_key=True)
class UserAPI(API):
model = Testing
session = self.session
t = UserAPI()
self.assertRaises(AttributeError, lambda: t.pkey)
class TestGet(FlaskTestBase):
""" Test facets of our get method """
def test_get_pkey(self):
obj = self.provision_single_asset()
d = self.get('widget', 200, {'id': obj.id})
assert len(d['objects']) > 0
assert d['objects'][0]['id'] == obj.id
def test_many_query(self):
self.provision_many_asset()
d = self.get('widget', 200)
assert len(d['objects']) >= 4
class TestPut(FlaskTestBase):
""" Test facets of our get method """
def test_update(self):
""" can we change an object """
obj = self.provision_single_asset()
test_string = "testing this thing"
p = {'id': obj.id, 'description': test_string}
self.put('widget', 200, params=p)
self.session.refresh(obj)
assert obj.description == test_string
def test_cant_find(self):
self.basic_api()
self.base.metadata.create_all(self.engine)
ret = self.put('widget', 404, params={'id': 123})
assert 'not be found' in ret['message']
def test_cant_find_invalid_key(self):
self.basic_api()
self.base.metadata.create_all(self.engine)
ret = self.put('widget', 404, params={'tid': 123})
assert 'any object to update' in ret['message']
class TestDelete(FlaskTestBase):
def test_delete(self):
""" can we delete an object """
obj = self.provision_single_asset()
obj_id = obj.id
self.delete('widget', 200, params={'id': obj_id})
obj = self.session.query(self.widget_model).filter_by(id=obj_id).first()
assert obj is None
def test_cant_find_put_delete(self):
self.basic_api()
self.base.metadata.create_all(self.engine)
ret = self.delete('widget', 404, params={'id': 123})
assert 'Object could not be found' in ret['message']
def test_cant_find(self):
self.basic_api()
self.base.metadata.create_all(self.engine)
ret = self.delete('widget', 404, params={'tid': 123})
assert 'object to delete' in ret['message']
class TestPost(FlaskTestBase):
def test_create_dup(self):
""" make a duplicate entry and fail """
obj = self.provision_single_asset()
p = self.post('widget', 40 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import tornado.testing
import tornado.ioloop
import tornado.gen
from tornadis.pubsub import PubSubClient
from tornadis.client import Client
from support import test_redis_or_raise_skiptest, mock
class PubSubClientTestCase(tornado.testing.AsyncTestCase):
def setUp(self):
test_redis_or_raise_skiptest()
super(PubSubClientTestCase, self).setUp()
def get_new_ioloop(self):
return tornado.ioloop.IOLoop.instance()
@tornado.gen.coroutine
def publish(self, c2):
yield tornado.gen.sleep(1)
yield c2.call("PUBLISH", "null", "value0")
yield c2.call("PUBLISH", "foo1", "value1")
yield c2.call("PUBLISH", "foo2", "value2")
yield c2.call("PUBLISH", "bar111", "value3")
yield c2.call("PUBLISH", "bar222", "value4")
@tornado.testi | ng.gen_test
def test_pubsub(self):
c = PubSubClient()
c2 = Client()
yield c.connect()
yield c2.connect()
try:
yield c.pubsub_pop_message()
raise Exception("exception not raised") |
except Exception:
pass
res = yield c.pubsub_subscribe("foo1", "foo2")
self.assertTrue(res)
self.assertTrue(c.subscribed)
self.assertFalse(c2.subscribed)
try:
yield c.call("PING")
raise Exception("exception not raised")
except Exception:
pass
res = yield c.pubsub_psubscribe("bar1*", "bar2*")
self.assertTrue(res)
tornado.ioloop.IOLoop.instance().add_future(self.publish(c2), None)
msg = yield c.pubsub_pop_message()
self.assertEqual(msg[2], b"value1")
msg = yield c.pubsub_pop_message()
self.assertEqual(msg[2], b"value2")
msg = yield c.pubsub_pop_message()
self.assertEqual(msg[3], b"value3")
msg = yield c.pubsub_pop_message()
self.assertEqual(msg[3], b"value4")
msg = yield c.pubsub_pop_message(deadline=1)
self.assertEqual(msg, None)
yield c.pubsub_unsubscribe("foo1")
yield c2.call("PUBLISH", "foo1", "value1")
c2.disconnect()
msg = yield c.pubsub_pop_message(deadline=1)
self.assertEqual(msg, None)
yield c.pubsub_unsubscribe("foo2")
yield c.pubsub_unsubscribe("foobar")
yield c.pubsub_punsubscribe("foobar*")
yield c.pubsub_punsubscribe("bar1*")
yield c.pubsub_punsubscribe("bar2*")
self.assertFalse(c.subscribed)
c.disconnect()
@tornado.testing.gen_test
def test_issue17(self):
c = PubSubClient()
yield c.connect()
res = yield c.pubsub_subscribe("foo")
self.assertTrue(res)
self.assertTrue(c.subscribed)
res = yield c.pubsub_unsubscribe()
self.assertTrue(res)
self.assertFalse(c.subscribed)
c.disconnect()
@tornado.testing.gen_test
def test_empty_subscribe(self):
c = PubSubClient()
yield c.connect()
res = yield c.pubsub_subscribe()
self.assertFalse(res)
c.disconnect()
@tornado.testing.gen_test
def test_subscribe_no_redis(self):
c = PubSubClient()
with mock.patch.object(c, "is_connected", return_value=False):
res = yield c.pubsub_subscribe("foo")
self.assertFalse(res)
self.assertFalse(c.subscribed)
@tornado.testing.gen_test
def test_unsubscribe_no_redis(self):
c = PubSubClient()
yield c.pubsub_subscribe("foo")
with mock.patch.object(c, "is_connected", return_value=False):
res = yield c.pubsub_unsubscribe("foo")
self.assertFalse(res)
|
def geti | tem(list, index):
return list[index]
de | f entry_point(i):
return getitem([i, 2, 3, 4], 2) + getitem(None, i)
def target(*args):
return entry_point, [int]
def get_llinterp_args():
return [1]
# _____ Run translated _____
def run(c_entry_point):
c_entry_point(0)
|
#i | mport DBusInterfa | ce
|
#!/usr/bin/env pyt | hon
# Test whether a retained PUBLISH to a topic with QoS 1 is retained.
# Subscription is made with QoS 0 so the retained message should also have QoS
# 0.
import subprocess
import socket
import time
import inspect, os, sys
# From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(insp | ect.getfile( inspect.currentframe() ))[0],"..")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import mosq_test
rc = 1
keepalive = 60
connect_packet = mosq_test.gen_connect("retain-qos1-test", keepalive=keepalive)
connack_packet = mosq_test.gen_connack(rc=0)
mid = 6
publish_packet = mosq_test.gen_publish("retain/qos1/test", qos=1, mid=mid, payload="retained message", retain=True)
puback_packet = mosq_test.gen_puback(mid)
mid = 18
subscribe_packet = mosq_test.gen_subscribe(mid, "retain/qos1/test", 0)
suback_packet = mosq_test.gen_suback(mid, 0)
publish0_packet = mosq_test.gen_publish("retain/qos1/test", qos=0, payload="retained message", retain=True)
cmd = ['../../src/mosquitto', '-p', '1888']
broker = mosq_test.start_broker(filename=os.path.basename(__file__), cmd=cmd)
try:
sock = mosq_test.do_client_connect(connect_packet, connack_packet)
sock.send(publish_packet)
if mosq_test.expect_packet(sock, "puback", puback_packet):
sock.send(subscribe_packet)
if mosq_test.expect_packet(sock, "suback", suback_packet):
if mosq_test.expect_packet(sock, "publish0", publish0_packet):
rc = 0
sock.close()
finally:
broker.terminate()
broker.wait()
if rc:
(stdo, stde) = broker.communicate()
print(stde)
exit(rc)
|
"""
Export to RAW/RPL file format
Based on:
http://www.nist.gov/lispix/doc/image-file-formats/raw-file-format.htm
"""
# Standard library modules.
import os
# Third party modules.
# Local modules.
from pyhmsa.fileformat.exporter.exporter import _Exporter, _ExporterThread
from pyhmsa.spec.datum.analysislist import AnalysisList2D
from pyhmsa.spec.datum.imageraster import ImageRaster2D, ImageRaster2DSpectral
# Globals and constants variables.
class _ExporterRAWThread(_ExporterThread):
def _run(self, datafile, dirpath, *args, **kwargs):
basefilename = datafile.header.title or 'Untitled'
keys = set(datafile.data.findkeys(AnalysisList2D)) | \
set(datafile.data.findkeys(ImageRaster2D)) | \
set(datafile.data.findkeys(ImageRaster2DSpectral))
length = len(keys)
filepaths = []
for i, identifier in enumerate(keys):
datum = datafile.data[identifier]
self._update_status(i / length, 'Exporting %s' % identifier)
filename = basefilename + '_' + identifier
lines = self._create_rpl_lines(identifier, datum)
rpl_filepath = os.path.join(dirpath, filename + '.rpl')
with open(rpl_filepath, 'w') as fp:
fp.write('\n'.join(lines))
raw_filepath = os.path.join(dirpath, filename + '.raw')
with open(raw_filepath, 'wb') as fp:
datum = datum.copy()
datum.dtype.newbyteorder('<')
fp.write(datum.tobytes())
filepaths.append(raw_filepath)
return filepaths
def _create_rpl_lines(self, identifier, datum):
lines = []
lines.append('key\t%s' % identifier)
lines.append('offset\t0')
if isinstance(datum, ImageRaster2D):
width, height = datum.shape
depth = 1
record_by = 'dont-care'
elif isinstance(datum, ImageRaster2DSpectral):
width, height, depth = datum.shape
record_by = 'vector'
elif isinstance(datum, AnalysisList2D):
depth, width, height = datum.shape
record_by = 'image'
else:
raise IOError('Unkmown datum type')
| lines.append('width\t%i' % width)
lines.append('height\t%i' % height)
lines.append('depth\t%i' % depth)
lines.append('record-by\t%s' % record_by)
dtype = datum.dtyp | e
lines.append('data-length\t%i' % dtype.itemsize)
byteorder = 'little-endian' if dtype.itemsize > 1 else 'dont-care'
lines.append('byte-order\t%s' % byteorder)
if dtype.kind == 'f':
data_type = 'float'
elif dtype.kind == 'u':
data_type = 'unsigned'
else:
data_type = 'signed'
lines.append('data-type\t%s' % data_type)
return lines
class ExporterRAW(_Exporter):
def _create_thread(self, datafile, dirpath, *args, **kwargs):
return _ExporterRAWThread(datafile, dirpath)
def validate(self, datafile):
super().validate(datafile)
identifiers = set(datafile.data.findkeys(AnalysisList2D)) | \
set(datafile.data.findkeys(ImageRaster2D)) | \
set(datafile.data.findkeys(ImageRaster2DSpectral))
if not identifiers:
raise ValueError('Datafile must contain at least one ' + \
'AnalysisList2D, ImageRaster2D or ' + \
'ImageRaster2DSpectral datum')
|
revision_id']
revision_ids.append(revision_id)
# Between revision 1 and 0, 1 bucket is created.
self._verify_buckets_status(
0, revision_ids[0], {b: 'created' for b in bucket_names[:1]})
# Between revision 2 and 0, 2 buckets are created.
self._verify_buckets_status(
0, revision_ids[1], {b: 'created' for b in bucket_names[:2]})
# Between revision 3 and 0, 3 buckets are created.
self._verify_buckets_status(
0, revision_ids[2], {b: 'created' for b in bucket_names})
def test_revision_diff_self(self):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
bucket_name = test_utils.rand_name('bucket')
documents = self.create_documents(bucket_name, payload)
revision_id = documents[0]['revision_id']
self._verify_buckets_status(
revision_id, revision_id, {bucket_name: 'unmodified'})
def test_revision_diff_multi_bucket_self(self):
bucket_names = []
revision_ids = []
for _ in range(3):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
bucket_name = test_utils.rand_name('bucket')
# Store each bucket that was created.
bucket_names.append(bucket_name)
documents = self.create_documents(bucket_name, payload)
# Store each revision that was created.
revision_id = documents[0]['revision_id']
revision_ids.append(revision_id)
# The last revision should contain history for the previous 2 revisions
# such that its diff history will show history for 3 buckets. Similarly
# the 2nd revision will have history for 2 buckets and the 1st revision
# for 1 bucket.
# 1st revision has revision history for 1 bucket.
self._verify_buckets_status(
revision_ids[0], revision_ids[0], {bucket_names[0]: 'unmodified'})
# 2nd revision has revision history for 2 buckets.
self._verify_buckets_status(
revision_ids[1], revision_ids[1],
{b: 'unmodified' for b in bucket_names[:2]})
# 3rd revision has revision history for 3 buckets.
self._verify_buckets_status(
revision_ids[2], revision_ids[2],
{b: 'unmodified' for b in bucket_names})
def test_revision_diff_modified(self):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
bucket_name = test_utils.rand_name('bucket')
documents = self.create_documents(bucket_name, payload)
revision_id = documents[0]['revision_id']
payload[0]['data'] = {'modified': 'modified'}
comparison_documents = self.create_documents(bucket_name, payload)
comparison_revision_id = comparison_documents[0]['revision_id']
self._verify_buckets_status(
revision_id, comparison_revision_id, {bucket_name: 'modified'})
def test_revision_diff_multi_revision_modified(self):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
bucket_name = test_utils.rand_name('bucket')
revision_ids = []
for _ in range(3):
payload[0]['data'] = {'modified': test_utils.rand_name('modified')}
documents = self.create_documents(bucket_name, payload)
revision_id = documents[0]['revision_id']
revision_ids.append(revision_id)
for pair in [(0, 1), (0, 2), (1, 2)]:
self._verify_buckets_status(
revision_ids[pair[0]], revision_ids[pair[1]],
{bucket_name: 'modified'})
def test_revision_diff_multi_revision_multi_bucket_modified(self):
revision_ids = []
bucket_name = test_utils.rand_name('bucket')
alt_bucket_name = test_utils.rand_name('bucket')
bucket_names = [bucket_name, alt_bucket_name] * 2
# Create revisions by modifying documents in `bucket_name` and
# `alt_bucket_name`.
for bucket_idx in range(4):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
documents = self.create_documents(
bucket_names[bucket_idx], payload)
revision_id = documents[0]['revision_id']
revision_ids.append(revision_id)
# Between revision_ids[0] and [1], bucket_name is unmodified and
# alt_bucket_name is created.
self._verify_buckets_status(
revision_ids[0], revision_ids[1],
{bucket_name: 'unmodified', alt_bucket_name: 'created'})
# Between revision_ids[0] and [2], bucket_name is modified (by 2) and
# alt_bucket_name is created (by 1).
self._verify_buckets_status(
revision_ids[0], revision_ids[2],
{bucket_name: 'modified', alt_bucket_name: 'created'})
# Between revision_ids[0] and [3], bucket_name is modified (by [2]) and
# alt_bucket_name is created (by [1]) (as well as modified by [3]).
self._verify_buckets_status(
revision_ids[0], revision_ids[3],
{bucket_name: 'modified', alt_bucket_name: 'created'})
# Between revision_ids[1] and [2], bucket_ | name is modified but
# alt_bucket_name remains unmodified.
self._verify_buckets_status(
revision_ids[1], revision_ids[2],
| {bucket_name: 'modified', alt_bucket_name: 'unmodified'})
# Between revision_ids[1] and [3], bucket_name is modified (by [2]) and
# alt_bucket_name is modified by [3].
self._verify_buckets_status(
revision_ids[1], revision_ids[3],
{bucket_name: 'modified', alt_bucket_name: 'modified'})
# Between revision_ids[2] and [3], alt_bucket_name is modified but
# bucket_name remains unmodified.
self._verify_buckets_status(
revision_ids[2], revision_ids[3],
{bucket_name: 'unmodified', alt_bucket_name: 'modified'})
def test_revision_diff_ignore_bucket_with_unrelated_documents(self):
payload = base.DocumentFixture.get_minimal_fixture()
alt_payload = base.DocumentFixture.get_minimal_fixture()
bucket_name = test_utils.rand_name('bucket')
alt_bucket_name = test_utils.rand_name('bucket')
# Create a bucket with a single document.
documents = self.create_documents(bucket_name, payload)
revision_id = documents[0]['revision_id']
# Create another bucket with an entirely different document (different
# schema and metadata.name).
self.create_documents(alt_bucket_name, alt_payload)
# Modify the document from the 1st bucket.
payload['data'] = {'modified': 'modified'}
documents = self.create_documents(bucket_name, payload)
comparison_revision_id = documents[0]['revision_id']
# The `alt_bucket_name` should be created.
self._verify_buckets_status(
revision_id, comparison_revision_id,
{bucket_name: 'modified', alt_bucket_name: 'created'})
def test_revision_diff_ignore_bucket_with_all_unrelated_documents(self):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
alt_payload = copy.deepcopy(payload)
bucket_name = test_utils.rand_name('bucket')
alt_bucket_name = test_utils.rand_name('bucket')
# Create a bucket with 3 documents.
documents = self.create_documents(bucket_name, payload)
revision_id = documents[0]['revision_id']
# Modify all 3 documents from first bucket.
for idx in range(3):
alt_payload[idx]['name'] = test_utils.rand_name('name')
alt_payload[idx]['schema'] = test_utils.rand_name('schema')
self.create_documents(
alt_bucket_name, alt_payload)
# Modify the document from the 1st bucket.
payload[0]['data'] = {'modified': 'modified'}
documents = self.create_documents(bucket_name, payload)
comparison_revision_id = documents[0]['revision_id']
# The alt_bucket_name should be created.
self._verify_buckets_status(
revision_id, |
""" Write the pseudocode and code for a function that reverses the words in a sentence. Input: "This is awesome" Output: "awesome is This". Give the Big O notation. """
def reverse(sentence):
""" split original sentence into a list, then append elements of the old list to the new list starting from last to first. then join the list back toghe | ther. """
original = sentence.split()
reverse = []
count = len(original) - 1
while count >= 0:
reverse.append(original[count])
count = count - 1
result = " ".join(reverse)
return result
""" sentence <- input sentence
result <- empty list
split_sentence <- sentence split into array
index <- length of split_sentence - 1
while index >= 0
result append split_sentence[index]
| index <- index - 1
end while
return result
O(N)
"""
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
FirstLastReturn.py
---------------------
Date : May 2014
Copyright : (C) 2014 by Niccolo' Marchi
Email : sciurusurbanus at hotmail dot it
***************************************************************************
* | *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as | published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = "Niccolo' Marchi"
__date__ = 'May 2014'
__copyright__ = "(C) 2014 by Niccolo' Marchi"
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from processing.core.parameters import ParameterFile
from processing.core.parameters import ParameterBoolean
from processing.core.outputs import OutputFile
from FusionAlgorithm import FusionAlgorithm
from FusionUtils import FusionUtils
class FirstLastReturn(FusionAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
SWITCH = 'SWITCH'
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('First&Last Return')
self.group, self.i18n_group = self.trAlgorithm('Points')
self.addParameter(ParameterFile(self.INPUT, self.tr('Input LAS layer')))
self.addParameter(ParameterBoolean(
self.SWITCH, self.tr('Use LAS info'), True))
self.addOutput(OutputFile(self.OUTPUT, self.tr('Output layers')))
self.addAdvancedModifiers()
def processAlgorithm(self, progress):
commands = [os.path.join(FusionUtils.FusionPath(), 'FirstLastReturn.exe')]
commands.append('/verbose')
if self.getParameterValue(self.SWITCH):
commands.append('/uselas')
self.addAdvancedModifiersToCommand(commands)
outFile = self.getOutputValue(self.OUTPUT)
commands.append(outFile)
files = self.getParameterValue(self.INPUT).split(';')
if len(files) == 1:
commands.append(self.getParameterValue(self.INPUT))
else:
FusionUtils.createFileList(files)
commands.append(FusionUtils.tempFileListFilepath())
FusionUtils.runFusion(commands, progress)
|
tring = color('part_big_problem', 'MISSING')
output['brief_description'] = ok_string if self.has_brief_description else missing_string
output['description'] = ok_string if self.has_description else missing_string
description_progress = ClassStatusProgress(
(self.has_brief_description + self.has_description) * overall_progress_description_weigth,
2 * overall_progress_description_weigth
)
items_progress = ClassStatusProgress()
for k in ['methods', 'constants', 'members', 'signals']:
items_progress += self.progresses[k]
output[k] = self.progresses[k].to_configured_colored_string()
output['items'] = items_progress.to_configured_colored_string()
output['overall'] = (description_progress + items_progress).to_colored_string('{percent}%', '{pad_percent}{s}')
if self.name.startswith('Total'):
output['url'] = color('url', 'http://docs.godotengine.org/en/latest/classes/')
if flags['s']:
output['comment'] = color('part_good', 'ALL OK')
else:
output['url'] = color('url', 'http://docs.godotengine.org/en/latest/classes/class_{name}.html'.format(name=self.name.lower()))
if flags['s'] and not flags['g'] and self.is_ok():
output['comment'] = color('part_good', 'ALL OK')
return output
def generate_for_class(c):
status = ClassStatus()
status.name = c.attrib['name']
# setgets do not count
methods = []
for tag in list(c):
if tag.tag in ['methods']:
for sub_tag in list(tag):
methods.append(sub_tag.find('name'))
if tag.tag in ['members']:
for sub_tag in list(tag):
try:
methods.remove(sub_tag.find('setter'))
methods.remove(sub_tag.find('getter'))
except:
pass
for tag in list(c):
if tag.tag == 'brief_description':
status.has_brief_description = len(tag.text.strip()) > 0
elif tag.tag == 'description':
status.has_description = len(tag.text.strip()) > 0
elif tag.tag in ['methods', 'signals']:
for sub_tag in list(tag):
if sub_tag.find('name') in methods or tag.tag == 'signals':
descr = sub_tag.find('description')
status.progresses[tag.tag].increment(len(descr.text.strip()) > 0)
elif tag.tag in ['constants', 'members']:
for sub_tag in list(tag):
status.progresses[tag.tag].increment(len(sub_tag.text.strip()) > 0)
elif tag.tag in ['tutorials', 'demos']:
pass # Ignore those tags for now
elif tag.tag in ['theme_items']:
pass # Ignore those tags, since they seem to lack description at all
else:
print(tag.tag, tag.attrib)
return status
################################################################################
# Arguments #
################################################################################
input_file_list = []
input_class_list = []
merged_file = ""
for arg in sys.argv[1:]:
if arg.startswith('--'):
flags[long_flags[arg[2:]]] = not flags[long_flags[arg[2:]]]
elif arg.startswith('-'):
for f in arg[1:]:
flags[f] = not flags[f]
elif os.path.isdir(arg):
for f in os.listdir(arg):
if f.endswith('.xml'):
input_file_list.append(os.path.join(arg, f));
else:
input_class_list.append(arg)
if flags['i']:
for r in ['methods', 'constants', 'members', 'signals']:
index = table_columns.index(r)
del table_column_names[index]
del table_columns[index]
table_column_names.append('Items')
table_columns.append('items')
if flags['o'] == (not flags['i']):
table_column_names.append('Overall')
table_columns.append('overall')
if flags['u']:
table_column_names.append('Docs URL')
table_columns.append('url')
################################################################################
# Help #
################################################################################
if len(input_file_list) < 1 or flags['h']:
if not flags['h']:
print(color('section', 'Invalid usage') + ': Please specify a classes directory')
print(color('section', 'Usage') + ': doc_status.py [flags] <classes_dir> [class names]')
print('\t< and > signify required parameters, while [ and ] signify optional parameters.')
print(color('section', 'Available flags') + ':')
possible_synonym_list = list(long_flags)
possible_synonym_list.sort()
flag_list = list(flags)
flag_list.sort()
for flag in flag_list:
synonyms = [color('name', '-' + flag)]
for synonym in possible_synonym_list:
if long_flags[synonym] == flag:
synonyms.append(color('name', '--' + synonym))
print(('{synonyms} (Currently ' + color('state_' + ('on' if flags[flag] else 'off'), '{value}') + ')\n\t{description}').format(
synonyms=', '.join(synonyms),
value=('on' if flags[flag] else 'off'),
description=flag_descriptions[flag]
))
sys.exit(0)
################################################################################
# Parse class list #
################################################################################
class_names = []
classes = {}
for file in input_file_list:
tree = ET.parse(file)
doc = tree.getroot()
if 'version' not in doc.attrib:
print('Version missing from "doc"')
sys.exit(255)
version = doc.attrib['version']
if doc.attrib['name'] in class_names:
continue
class_names.append(doc.attrib['name'])
classes[doc.attrib['name']] = doc
class_names.sort()
if len(input_class_list) < 1:
input_class_list = class_names
################################################################################
# Make output table #
################################################################################
table = [table_column_names]
table_row_chars = '| - '
table_column_chars = '|'
total_status = ClassStatus('Total')
for cn in input_class_list:
if not cn in classes:
print('Cannot find class ' + cn + '!')
sys.exit(255)
c = classes[cn]
validate_tag(c, 'class')
status = ClassStatus.generate_for_class(c)
total_status = total_status | + status
if (flags['b'] and status.is_ok()) or (flags['g'] and not status.is_ok()) or (not flags['a']):
continue
out = status.make_output()
row = []
for column in table_columns:
if column in out:
row.append(out[column])
else:
row.append('')
if 'comment' in out and out['comment'] != '':
row.append(out['comment'])
t | able.append(row)
################################################################################
# Print output table #
################################################################################
if len(table) == 1 and flags['a']:
print(color('part_big_problem', 'No classes suitable for printing!'))
sys.exit(0)
if len(table) > 2 or not flags['a']:
total_status.name = 'Total = {0}'.format(len(table) - 1)
out = total_status.make_output()
row = []
for column in table_columns:
if column in out:
row.append(out[column])
else:
row.append('')
table.append(row)
table_column_sizes = []
for row in table:
for cell_i, cell in enumerate(row):
if cell_i >= len(table_column_sizes):
t |
# vDial-up client
# Copyright (C) 2015 - 2017 Nathaniel Olsen
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from time import sleep
import socket
import libs.vDialupcore as core
from multiprocessing import Process
import sys
import struct
def MD5SUM_mismatch(vNumber_to_connect, sock):
print("*Warning: The server's MD5SUM does not match with the one listed on file, Do you wish to continue? (Y/N)")
if vNumber_to_connect == core.RegServ_vNumber:
MD5SUM_on_file = core.RegServ_MD5SUM
else:
pass # Right now, there is no way to retrieve a server's md5sum until I implement md5sum retriving in RegServ.
print("MD5SUM on file: %s" % (MD5SUM_on_file))
print("MD5SUM according to server: %s" % (received.split()[1]))
print("")
choice = input("Enter choice (Y/N): ")
if choice == 'Y' or choice == 'y':
init(sock, vNumber_to_connect)
if choice == 'N' or choice == 'n':
sys.exit() # Exit for now.
class main():
def send_msg(sock, msg):
# Prefix each message with a 4-byte length (network byte or | der)
msg = struct.pack('>I', len(msg)) + str.encode(msg)
sock.sendall(m | sg)
def recv_msg(sock):
# Read message length and unpack it into an integer
raw_msglen = main.recvall(sock, 4)
if not raw_msglen:
return None
msglen = struct.unpack('>I', str.encode(raw_msglen))[0]
return main.recvall(sock, msglen)
def recvall(sock, n):
# Helper function to recv n bytes or return None if EOF is hit
data = ''
while len(data) < n:
packet = (sock.recv(n - len(data)).decode('utf-8'))
if not packet:
return None
data += packet
return data
def servping(sock):
while 1:
sleep(20)
sock.sendall(bytes("SERVPING" + "\n", "utf-8"))
if main.listen_for_data(sock) == "PONG":
break
else:
print("Disconnected: Connection timeout.")
def vdialing(vNumber_to_connect, vNumber_IP):
if core.config['use_ipv6_when_possible']:
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print("vDialing %s..." % (vNumber_to_connect))
if core.config['vDial-up Settings']['vNumber'] == "000000000":
core.dialupnoise()
try:
sock.connect((vNumber_IP, 5000))
except ConnectionRefusedError:
print("Error: Connection Refused.")
sys.exit()
main.send_msg(sock, "INITPING")
if main.recv_msg(sock) == "PONG":
print("Connected.")
#Process(target=main.servping, args=[sock]).start() # The ability to check if a server connection is still alive is coming soon.
main.send_msg(sock, "MD5SUMCHECK")
if main.recv_msg(sock).split()[0] == "MD5SUM:":
if main.recv_msg(sock).split()[1] == core.RegServ_MD5SUM:
print("MD5SUM verification was succeeded.")
else:
MD5SUM_mismatch(vNumber_to_connect, sock)
else:
print("Error: Unable to retrieve MD5SUM.")
main.init(sock, vNumber_to_connect)
else:
print("Error: Server did not properly respond to INITPING, disconnecting.")
else:
Process(target=core.dialupnoise()).start()
sock.connect((vNumber_IP, 5000))
main.send_msg(sock, "INITPING")
if main.recv_msg(sock) == "PONG":
print("Connected to Registation Server!")
main.send_msg(sock, "MD5SUMCHECK")
if main.recv_msg(sock).split()[0] == "MD5SUM:":
if main.recv_msg(sock).split()[1] == core.RegServ_MD5SUM:
print("MD5SUM verification was succeeded.")
else:
MD5SUM_mismatch(vNumber_to_connect, sock)
else:
print("Error: Unable to retrieve MD5SUM.")
else:
print("Error: Server did not properly respond to INITPING, disconnecting.")
def init(sock, vNumber_to_connect):
main.send_msg(sock, "VNUMBER: {}".format(core.config['vDial-up Settings']['vNumber']))
if core.config['vDial-up Settings']['vNumber'] == "000000000":
main.send_msg(sock, "CLIENTREGISTER")
if main.recv_msg(sock).split()[0] == "CONFIG:":
if main.recv_msg(sock).split()[1] == "vNumber":
core.config['vDial-up Settings']['vNumber'] = main.recv_msg(sock).split()[2]
core.saveconfig()
if main.recv_msg(sock).split()[1] == "Key":
core.config['vDial-up Settings']['Key'] = main.recv_msg(sock).split()[2]
core.saveconfig()
if main.recv_msg(sock).split()[0] == "TOCLIENT:":
print(" ".join(main.recv_msg(sock).split()[2:]))
else:
main.send_msg(sock, "KEY: {}".format(core.config['vDial-up Settings']['Key']))
main.send_msg(sock, "INIT")
|
e software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
"""
TODO:
IDEAS:
LATER:
ISSUES:
Bugs:
Seg-faults when unregistering addon...
Mites:
* History back button does not light up on first cursor move.
It does light up on the second, or when mouse enters the tool-area
* Switching between local and global view triggers new cursor position in history trace.
* Each consecutive click on the linex operator triggers new cursor position in history trace.
(2011-01-16) Was not able to fix this because of some strange script behaviour
while trying to clear linexChoice from addHistoryLocation
QUESTIONS:
"""
import bpy
import bgl
import math
from mathutils import Vector, Matrix
from mathutils import geometry
from misc_utils import *
from constants_utils import *
from cursor_utils import *
from ui_utils import *
class CursorHistoryData(bpy.types.PropertyGroup):
# History tracker
historyDraw = bpy.props.BoolProperty(description="Draw history trace in 3D view",default=1)
historyDepth = 144
historyWindow = 12
historyPosition = [-1] # Integer must be in a list or else it can not be written to
historyLocation = []
#historySuppression = [False] # Boolean must be in a list or else it can not be written to
def addHistoryLocation(self, l):
if(self.historyPosition[0]==-1):
self.historyLocation.append(l.copy())
self.historyPosition[0]=0
return
if(l==self.historyLocation[self.historyPosition[0]]):
return
#if self.historySuppression[0]:
#self.historyPosition[0] = self.historyPosition[0] - 1
#else:
#self.hideLinexChoice()
while(len(self.historyLocation)>self.historyPosition[0]+1):
self.historyLocation.pop(self.historyPosition[0]+1)
#self.historySuppression[0] = False
self.historyLocation.append(l.copy())
if(len(self.historyLocation)>self.historyDepth):
self.historyLocation.pop(0)
self.historyPosition[0] = len(self.historyLocation)-1
#print (self.historyLocation)
#def enableHistorySuppression(self):
#self.historySuppression[0] = True
def previousLocation(self):
if(self.historyPosition[0]<=0):
return
self.historyPosition[0] = self.historyPosition[0] - 1
CursorAccess.setCursor(self.historyLocation[self.historyPosition[0]].copy())
def nextLocation(self):
if(self.historyPosition[0]<0):
return
if(self.historyPosition[0]+1==len(self.historyLocation)):
return
self.historyPosition[0] = self.historyPosition[0] + 1
CursorAccess.setCursor(self.historyLocation[self.historyPosition[0]].copy())
class VIEW3D_OT_cursor_previous(bpy.types.Operator):
"""Previous cursor location"""
bl_idname = "view3d.cursor_previous"
bl_label = "Previous cursor location"
bl_options = {'REGISTER'}
def modal(self, context, event):
return {'FINISHED'}
def execute(self, context):
cc = context.scene.cursor_history
cc.previousLocation()
return {'FINISHED'}
class VIEW3D_OT_cursor_next(bpy.types.Operator):
"""Next cursor location"""
bl_idname = "view3d.cursor_next"
bl_label = "Next cursor location"
bl_options = {'REGISTER'}
def modal(self, context, event):
return {'FINISHED'}
def execute(self, context):
cc = context.scene.cursor_history
cc.nextLocation()
return {'FINISHED'}
class VIEW3D_OT_cursor_history_show(bpy.types.Operator):
"""Show cursor trace"""
bl_idname = "view3d.cursor_history_show"
bl_label = "Show cursor trace"
bl_options = {'REGISTER'}
def modal(self, context, event):
return {'FINISHED'}
def execute(self, context):
cc = context.scene.cursor_history
cc.historyDraw = True
BlenderFake.forceRedraw()
return {'FINISHED'}
class VIEW3D_OT_cursor_history_hide(bpy.types.Operator):
"""Hide cursor trace"""
bl_idname = "view3d.cursor_history_hide"
bl_label = "Hide cursor trace"
bl_options = {'REGISTER'}
def modal(self, context, event):
return {'FINISHED'}
def execute(self, context):
cc = context.scene.cursor_history
cc.historyDraw = False
BlenderFake.forceRedraw()
return {'FINISHED'}
class VIEW3D_PT_cursor_history(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_label = "Cursor History"
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(self, context):
# Display in object or edit mode.
cc = context.scene.cursor_history
cc.addHistoryLocation(CursorAccess.getCursor())
if (context.area.type == 'VIEW_3D' and
(context.mode == 'EDIT_MESH'
or context.mode == 'OBJECT')):
return 1
return 0
def draw_header(self, context):
layout = self.layout
cc = context.scene.cursor_history
if cc.historyDraw:
GUI.drawIconButton(True, layout, 'RESTRICT_VIEW_OFF', "view3d.cursor_history_hide", False)
else:
GUI.drawIconButton(True, layout, 'RESTRICT_VIEW_ON' , "view3d.cursor_history_show", False)
def draw(self, context):
layout = self.layout
sce = context.scene
cc = context.scene.cursor_history
row = layout.row()
row.label("Navigation: | ")
GUI.drawIconButton(cc.historyPosition[0]>0, row, 'PLAY_REVERSE', "view3d.cursor_previous")
#if(cc.historyPosition[0]<0):
#row.label(" -- ")
| #else:
#row.label(" "+str(cc.historyPosition[0])+" ")
GUI.drawIconButton(cc.historyPosition[0]<len(cc.historyLocation)-1, row, 'PLAY', "view3d.cursor_next")
row = layout.row()
col = row.column()
col.prop(CursorAccess.findSpace(), "cursor_location")
class VIEW3D_PT_cursor_history_init(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_label = "Register callback"
bl_options = {'DEFAULT_CLOSED'}
initDone = False
_handle = None
@staticmethod
def handle_add(self, context):
VIEW3D_PT_cursor_history_init._handle = bpy.types.SpaceView3D.draw_handler_add(
cursor_history_draw, (self, context), 'WINDOW', 'POST_PIXEL')
@staticmethod
def handle_remove():
if VIEW3D_PT_cursor_history_init._handle is not None:
bpy.types.SpaceView3D.draw_handler_remove(VIEW3D_PT_cursor_history_init._handle, 'WINDOW')
VIEW3D_PT_cursor_history_init._handle = None
@classmethod
def poll(cls, context):
if VIEW3D_PT_cursor_history_init.initDone:
return False
print ("Cursor History draw-callback registration...")
sce = context.scene
if context.area.type == 'VIEW_3D':
VIEW3D_PT_cursor_history_init.handle_add(cls, context)
VIEW3D_PT_cursor_history_init.initDone = True
print ("Cursor History draw-callback registered")
# Unregister to prevent double registration...
# Started to fail after v2.57
# bpy.types.unregister(VIEW3D_PT_cursor_history_init)
else:
print("View3D not found, cannot run operator")
return False
|
m.
blossomparent[bv] = b
path.append(bv)
endps.append(labelend[bv])
assert (label[bv] == 2 or
(label[bv] == 1 and labelend[bv] == mate[blossombase[bv]]))
# Trace one step back.
assert labelend[bv] >= 0
v = endpoint[labelend[bv]]
bv = inblossom[v]
# Reverse lists, add endpoint that connects the pair of S vertices.
path.append(bb)
path.reverse()
endps.reverse()
endps.append(2*k)
# Trace back from w to base.
while bw != bb:
# Add bw to the new blossom.
blossomparent[bw] = b
path.append(bw)
endps.append(labelend[bw] ^ 1)
assert (label[bw] == 2 or
(label[bw] == 1 and labelend[bw] == mate[blossombase[bw]]))
# Trace one step back.
assert labelend[bw] >= 0
w = endpoint[labelend[bw]]
bw = inblossom[w]
# Set label to S.
assert label[bb] == 1
label[b] = 1
labelend[b] = labelend[bb]
# Set dual variable to zero.
dualvar[b] = 0
# Relabel vertices.
for v in blossomLeaves(b):
if label[inblossom[v]] == 2:
# This T-vertex now turns into an S-vertex because it becomes
# part of an S-blossom; add it to the queue.
queue.append(v)
inblossom[v] = b
# Compute blossombestedges[b].
bestedgeto = (2 * nvertex) * [ -1 ]
for bv in path:
if blossombestedges[bv] is None:
# This subblossom does not have a list of least-slack edges;
# get the information from the vertices.
nblists = [ [ p // 2 for p in neighbend[v] ]
for v in blossomLeaves(bv) ]
else:
# Walk this subblossom's least-slack edges.
nblists = [ blossombestedges[bv] ]
for nblist in nblists:
for k in nblist:
(i, j, wt) = edges[k]
if inblossom[j] == b:
i, j = j, i
bj = inblossom[j]
if (bj != b and label[bj] == 1 and
(bestedgeto[bj] == -1 or
slack(k) < slack(bestedgeto[bj]))):
bestedgeto[bj] = k
# Forget about least-slack edges of the subblossom.
blossombestedges[bv] = None
bestedge[bv] = -1
blossombestedges[b] = [ k for k in bestedgeto if k != -1 ]
# Select bestedge[b].
bestedge[b] = -1
for k in blossombestedges[b]:
if bestedge[b] == -1 or slack(k) < slack(bestedge[b]):
bestedge[b] = k
if DEBUG: DEBUG('blossomchilds[%d]=' % b + repr(blossomchilds[b]))
# Expand the given top-level blossom.
def expandBlossom(b, endstage):
if DEBUG: DEBUG('expandBlossom(%d,%d) %s' % (b, endstage, repr(blossomchilds[b])))
# Convert sub-blossoms into top-level blossoms.
for s in blossomchilds[b]:
blossomparent[s] = -1
if s < nvertex:
inblossom[s] = s
elif endstage and dualvar[s] == 0:
# Recursively expand this sub-blossom.
expandBlossom(s, endstage)
else:
for v in blossomLeaves(s):
inblossom[v] = s
# If we expand a T-blossom during a stage, its sub-blossoms must be
# relabeled.
if (not endstage) and label[b] == 2:
# Start at the sub-blossom through which the expanding
| # blossom obtained its label, and relabel sub-blossoms untili
# we reach the base.
# Figure out through which sub-blossom the expanding blossom
# obtained its label initially.
assert labelend[b] >= 0
entrychild = inblossom[endpoint[labelend[b] ^ 1]]
# Decide in which direction we will go round the blossom.
j = blossomchilds[b].index(entrychild)
| if j & 1:
# Start index is odd; go forward and wrap.
j -= len(blossomchilds[b])
jstep = 1
endptrick = 0
else:
# Start index is even; go backward.
jstep = -1
endptrick = 1
# Move along the blossom until we get to the base.
p = labelend[b]
while j != 0:
# Relabel the T-sub-blossom.
label[endpoint[p ^ 1]] = 0
label[endpoint[blossomendps[b][j-endptrick]^endptrick^1]] = 0
assignLabel(endpoint[p ^ 1], 2, p)
# Step to the next S-sub-blossom and note its forward endpoint.
allowedge[blossomendps[b][j-endptrick]//2] = True
j += jstep
p = blossomendps[b][j-endptrick] ^ endptrick
# Step to the next T-sub-blossom.
allowedge[p//2] = True
j += jstep
# Relabel the base T-sub-blossom WITHOUT stepping through to
# its mate (so don't call assignLabel).
bv = blossomchilds[b][j]
label[endpoint[p ^ 1]] = label[bv] = 2
labelend[endpoint[p ^ 1]] = labelend[bv] = p
bestedge[bv] = -1
# Continue along the blossom until we get back to entrychild.
j += jstep
while blossomchilds[b][j] != entrychild:
# Examine the vertices of the sub-blossom to see whether
# it is reachable from a neighbouring S-vertex outside the
# expanding blossom.
bv = blossomchilds[b][j]
if label[bv] == 1:
# This sub-blossom just got label S through one of its
# neighbours; leave it.
j += jstep
continue
for v in blossomLeaves(bv):
if label[v] != 0:
break
# If the sub-blossom contains a reachable vertex, assign
# label T to the sub-blossom.
if label[v] != 0:
assert label[v] == 2
assert inblossom[v] == bv
label[v] = 0
label[endpoint[mate[blossombase[bv]]]] = 0
assignLabel(v, 2, labelend[v])
j += jstep
# Recycle the blossom number.
label[b] = labelend[b] = -1
blossomchilds[b] = blossomendps[b] = None
blossombase[b] = -1
blossombestedges[b] = None
bestedge[b] = -1
unusedblossoms.append(b)
# Swap matched/unmatched edges over an alternating path through blossom b
# between vertex v and the base vertex. Keep blossom bookkeeping consistent.
def augmentBlossom(b, v):
if DEBUG: DEBUG('augmentBlossom(%d,%d)' % (b, v))
# Bubble up through the blossom tree from vertex v to an immediate
# sub-blossom of b.
t = v
while blossomparent[t] != b:
t = blossomparent[t]
# Recursively deal with the first sub-blossom.
if t >= nvertex:
augmentBlossom(t, v)
# Decide in which direction we will go round the blossom.
i = j = blossomchilds[b].index(t)
if i & 1:
# Start index is odd; go forward and wrap.
j -= len(blossomchilds[b])
jstep = 1
endptrick = 0
else:
# Start index is even; go backward.
jstep = -1
endptrick = 1
# Move along the blossom until we get to the base.
while j != 0:
# Step to the next sub-blossom and augment it recursively.
j += jstep
t = blossomchilds[b][j]
p = blossomendps[b][j-endptrick] ^ endptrick
if t >= nvertex:
augmentBlossom(t, endpoint[p])
# Step to the next sub-blossom and augment it recursively.
j += jstep |
# Copyright (c) 2016 Dustin Doloff
# Licensed under Apache License v2.0
import jinja2
import os
MESSAGE_FILL = '`'
AUTO_GEN_MESSAGE = """
``````````````````````````````````````````````````````
``````````````````````````````````````````````````````
````````______________________________________ ``````
```````/ /\ `````
``````/ /..\ ````
`````/ AUTO-GENERATED FILE. DO NOT EDIT /....\ ```
````/ /______\ ``
```/_____________________________________/````````````
``````````````````````````````````````````````````````
``````````````````````````````````````````````````````
"""
def reverse(v):
"""
Reverses any iterable value
"""
return v[::-1]
def auto_gen_message(open, fill, close):
"""
Produces the auto-generated warning header with language-spcific syntax
open - str - The language-specific opening of the comment
fill - str - The values to fill the background with
close | - str - The language-specific closing of the comment
"""
assert open or fill or close
message = AUTO_GEN_MESSAGE.strip()
if open:
message = message.replace(MESSAGE_FILL * len(open), open, 1)
if close:
message = revers | e(reverse(message).replace(MESSAGE_FILL * len(close), close[::-1], 1))
if fill:
message = message.replace(MESSAGE_FILL * len(fill), fill)
return message
def generate(template, config, out_file, pretty=False):
path, ext = os.path.splitext(out_file.name)
ext = ext[1:]
if pretty:
if ext == 'py':
out_file.write(auto_gen_message('#', '#', ''))
elif ext == 'html':
out_file.write(auto_gen_message('<!--', '-', '-->'))
template_path, template_filename = os.path.split(template)
env = jinja2.Environment(loader = jinja2.FileSystemLoader([template_path]))
template = env.get_template(template_filename)
template.stream(config).dump(out_file)
# There needs to be an extra line at the end to make it a valid text file. Jinja strips trailing
# whitespace
if pretty:
out_file.write(os.linesep)
|
#!/usr/bin/env python
from gnuradio import gr
from gnuradio import blocks
from gnuradio import digital
import numpy
#applies fftshift to a vecter.
#
class FFTshift(gr.basic_block):
#constructor
def __init__(self,size,drop_when_overloaded):
gr.basic_block.__init__(self, name="FFT_Shift",
in_sig=[(numpy.fl | oat32,size)],
out_sig=[(numpy.float32,size)])
self.drop_true = drop_when_overloaded
#run
def general_work(self, input_items, output_items):
in0 = input_items[0]
out = output_items[0]
if len(out) >= len(in0):
ps_len = len(in0)
consume_len = ps_len
elif self.drop_true:
ps_len = len(out)
consume_len = len(in0)
el | se:
ps_len = len(out)
consume_len = ps_len
for cnt in range(0,ps_len):
out[cnt] = numpy.fft.fftshift(in0[cnt])
self.consume_each(consume_len)
return ps_len
|
t_cxx(self):
"""Detect compiler name"""
# 'cxx' configure option excepts compiler name like 'gcc', 'icc', 'clang'; i.e. actually the C compiler command
# see also main/source/tools/build/basic.settings in Rosetta sources
self.cxx = os.getenv('CC_SEQ')
if self.cxx is None:
self.cxx = os.getenv('CC')
def configure_step(self):
"""
Configure build by creating tools/build/user.settings from configure options.
"""
# construct build options
defines = ['NDEBUG']
self.cfg.update('buildopts', "mode=release")
self.detect_cxx()
cxx_ver = None
if self.toolchain.comp_family() in [toolchain.GCC]: #@UndefinedVariable
cxx_ver = '.'.join(get_software_version('GCC').split('.')[:2])
elif self.toolchain.comp_family() in [toolchain.INTELCOMP]: #@UndefinedVariable
cxx_ver = '.'.join(get_icc_version().split('.')[:2])
else:
raise EasyBuildError("Don't know how to determine C++ compiler version.")
self.cfg.update('buildopts', "cxx=%s cxx_ver=%s" % (self.cxx, cxx_ver))
if self.toolchain.options.get('usempi', None):
self.cfg.update('buildopts', 'extras=mpi')
defines.extend(['USEMPI', 'MPICH_IGNORE_CXX_SEEK'])
# make sure important environment variables are passed down
# e.g., compiler env vars for MPI wrappers
env_vars = {}
for (key, val) in os.environ.items():
if key in ['I_MPI_CC', 'I_MPI_CXX', 'MPICH_CC', 'MPICH_CXX', 'OMPI_CC', 'OMPI_CXX']:
env_vars.update({key: val})
self.log.debug("List of extra environment variables to pass down: %s" % str(env_vars))
# create user.settings file
paths = os.getenv('PATH').split(':')
ld_library_paths = os.getenv('LD_LIBRARY_PATH').split(':')
cpaths = os.getenv('CPATH').split(':')
flags = [str(f).strip('-') for f in self.toolchain.variables['CXXFLAGS'].copy()]
txt = '\n'.join([
"settings = {",
" 'user': {",
" 'prepends': {",
" 'library_path': %s," % str(ld_library_paths),
" 'include_path': %s," % str(cpaths),
" },",
" 'appends': {",
" 'program_path': %s," % str(paths),
" 'flags': {",
" 'compile': %s," % str(flags),
#" 'mode': %s," % str(o_flags),
" },",
" 'defines': %s," % str(defines),
" },",
" 'overrides': {",
" 'cc': '%s'," % os.getenv('CC'),
" 'cxx': '%s'," % os.getenv('CXX'),
" 'ENV': {",
" 'INTEL_LICENSE_FILE': '%s'," % os.getenv('INTEL_LICENSE_FILE'), # Intel license file
" 'PATH': %s," % str(paths),
" 'LD_LIBRARY_PATH': %s," % str(ld_library_paths),
])
txt += '\n'
for (key, val) in env_vars.items():
txt += " '%s': '%s',\n" % (key, val)
txt += '\n'.join([
" },",
" },",
" 'removes': {",
" },",
" },",
"}",
])
us_fp = os.path.join(self.srcdir, "tools/build/user.settings")
try:
self.log.debug("Creating '%s' with: %s" % (us_fp, txt))
f = file(us_fp, 'w')
f.write(txt)
f.close()
except IOError, err:
raise EasyBuildError("Failed to write settings file %s: %s", us_fp, err)
# make sure specified compiler version is accepted by patching it in
os_fp = os.path.join(self.srcdir, "tools/build/options.settings")
cxxver_re = re.compile('(.*"%s".*)(,\s*"\*"\s*],.*)' % self.cxx, re.M)
for line in fileinput.input(os_fp, inplace=1, backup='.orig.eb'):
line = cxxver_re.sub(r'\1, "%s"\2' % cxx_ver, line)
sys.stdout.write(line)
def build_step(self):
"""
Build Rosetta using 'python ./scons.py bin <opts> -j <N>'
"""
try:
os.chdir(self.srcdir)
except OSError, err:
raise EasyBuildError("Failed to change to %s: %s", self.srcdir, err)
par = ''
if self.cfg['parallel']:
par = "-j %s" % self.cfg['parallel']
cmd = "python ./scons.py %s %s bin" % (self.cfg['buildopts'], par)
run_cmd(cmd, log_all=True, simple=True)
def install_step(self):
"""
Copy built files (from e.g. build/src/release/linux/2.6/64/x86/icc/10.0/mpi) to <installpath>/bin,
and copy (or untar) database and bioTools to install directory
"""
shlib_ext = get_shared_lib_ext()
bindir = os.path.join(self.installdir, 'bin')
libdir = os.path.join(self.installdir, 'lib')
try:
os.makedirs(bindir)
os.makedirs(libdir)
except OSError, err:
raise EasyBuildError("Failed to created bin/lib dirs: %s, %s", bindir, libdir)
for build_subdir in ['src', 'external']:
builddir = os.path.join('build', build_subdir)
if not os.path.exists(builddir):
continue
# walk the build/src dir to leaf
try:
while len(os.listdir(builddir)) == 1:
builddir = os.path.join(builddir, os.listdir(builddir)[0])
except OSError, err:
raise EasyBuildError("Failed to walk build/src dir: %s", err)
# copy binaries/libraries to install dir
lib_re = re.compile("^lib.*\.%s$" % shlib_ext)
try:
for fil in os.listdir(builddir):
srcfile = os.path.join(builddir, fil)
if os.path.isfile(srcfile):
if lib_re.match(fil):
self.log.debug("Copying %s to %s" % (srcfile, libdir))
shutil.copy2(srcfile, os.path.join(libdir, fil))
else:
self.log.debug("Copying %s to %s" % (srcfile, bindir))
shutil.copy2(srcfile, os.path.join(bindir, fil))
except OSError, err:
raise EasyBuildError("Copying executables from %s to bin/lib install dirs failed: %s", builddir, err)
os.chdir(self.cfg['start_dir'])
def extract_and_copy(dirname_tmpl, optional=False):
"""Copy specified directory, after extracting it (if required)."""
try:
srcdir = os.path.join(self.cfg['start_dir'], dirname_tmpl % '')
if not os.path.exists(srcdir):
# try to extract if directory is not there yet
src_tarball = os.path.join(self.cfg['start_dir'], (dirname_tmpl % self.version) + '.tgz')
if os.path.isfile(src_tarball):
srcdir = extract_file(src_tarball, self.cfg['start_dir'])
if os.path.exists(srcdir):
shutil.copytree(srcdir, os.path.join(self.installdir, os.path.basename(srcdir)))
elif not optional:
raise EasyBuildError("Neither source directory '%s', nor source tar | ball '%s' found.",
srcdir, src_tarball)
except OSError, e | rr:
raise EasyBuildError("Getting Rosetta %s dir ready failed: %s", dirname_tmpl, err)
# (extract and) copy database and biotools (if it's there)
if os.path.exists(os.path.join(self.cfg['start_dir'], 'main', 'database')):
extract_and_copy(os.path.join('main', 'database') + '%s')
else:
extract_and_copy('rosetta_database%s')
extract_and_copy('BioTools%s', optional=True)
if os.path.exists(os.path.join(self.cfg['start_dir'], 'tools')):
extract_and_copy('tools%s', optional=True)
else:
extra |
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from django import test
from common import api
from common import util
from common import validate
from common.test import base
class CommonViewTest(base.ViewTestCase):
def test_redirect_slash(self):
r = self.login_and_get('popular', '/user/popular/overview/')
redirected = self.assertRedirectsPrefix(r, '/user/popular/overview')
self.assertTemplateUsed(redirected, 'actor/templates/overview.html')
def test_confirm(self):
nonce = util.create_nonce('popular', 'entry_remove')
entry = 'stream/popular%40example.com/presence/12345'
path = '/user/popular/overview'
r = self.login_and_get('popular', path, {'entry_remove': entry,
'_nonce': nonce})
r = self.assertRedirectsPrefix(r, '/confirm')
self.assertContains(r, nonce)
self.assertContains(r, entry)
self.assertContains(r, path)
class UtilTestCase(test.TestCase):
def test_get_user_from_topic(self):
topics = [('root@example.com', 'inbox/root@example.com/presence'),
('root@example.com', 'inbox/root@example.com/overview'),
('root@example.com', 'stream/root@example.com/presence/12345'),
(None, 'stream//presence'),
(None, 'stream/something/else'),
('duuom+aasdd@gmail.com', 'crazy/duuom+aasdd@gmail.com/dddfff$$%%///'),
('asdad@asdasd@asdasd', 'multi/asdad@asdasd@asdasd/cllad/asdff')]
for t in topics:
self.assertEqual(util.get_user_from_topic(t[1]), t[0], t[1])
# We're going to import the rest of the test cases into the local
# namespace so that we can run them as
# python manage.py test common.WhateverTest
from | common.test.api import *
from common.test.clean import *
from common.test.db import *
from common.test.domain import *
from common.test.monitor import *
from common.test.notification import *
from common.test.patterns import *
from common.test.queue import *
from common.test.sms import *
from common.test.throttle import *
from common.test.validate import *
from common.te | mplatetags.test.avatar import *
from common.templatetags.test.format import *
from common.templatetags.test.presence import *
# This is for legacy compat with older tests
# TODO(termie): remove me when no longer needed
from common.test.base import *
from common.test.util import *
|
size_hint=(button_width, button_height),
pos_hint = {"x" : 1.0 - joystick_width - 0.01, "y" : 0.0 + 0.01},
)
else:
button_1 = Button(
background_normal=background_file_big,
background_down=background_down_file_big,
size_hint=(button_width_big, button_height_big),
pos_hint = {"x" : 1.0 - joystick_width - 0.01, "y" : 0.0 + 0.01},
)
elif button_defs[1][controls.IDX_CONTROL_BUTTON_POS] == "left":
if button_defs[1][controls.IDX_CONTROL_BUTTON_SIZE] == "small":
button_1 = Button(
background_normal=background_file,
background_down=background_down_file,
size_hint=(button_width, button_height),
pos_hint = {"x" : joystick_x, "y" : joystick_y},
)
else:
button_1 = Button(
background_normal=background_file_big,
background_down=background_down_file_big,
size_hint=(button_width_big, button_height_big),
pos_hint = {"x" : joystick_x, "y" : joystick_y},
)
self.add_widget(button_1)
button_1.bind(state=self.on_button_1)
if button_defs[1][controls.IDX_CONTROL_BUTTON_POS] == "right":
if button_defs[1][controls.IDX_CONTROL_BUTTON_SIZE] == "big":
self.add_widget(Image(source="assets/img/ui/button_b.png",
allow_stretch = True,
pos_hint = {"x" : 1.0 - button_width_big - 0.01, "y" : 0.0 + 0.01},
size_hint=(button_width_big, button_height_big),
))
if len(button_defs) > 2:
button_2 = Button(
background_normal=background_file,
background_down=background_down_file,
| size_hint=(button_width, button_height),
pos_hint = {"x" : 1.0 - button_width - 0.01, "y" : button_height + 0.01},
)
self.add_widget(button_2)
button_2.bind(state=self.on_button_2)
if len(button_defs) > 3:
butto | n_3 = Button(
background_normal=background_file,
background_down=background_down_file,
size_hint=(button_width, button_height),
pos_hint = {"x" : 1.0 - joystick_width - 0.01, "y" : button_height + 0.01},
)
self.add_widget(button_3)
button_3.bind(state=self.on_button_3)
if self.on_exit_function:
exit_button_size = (0.15, 0.05)
exit_button_x = (1.0 - exit_button_size[0]) / 2.0
exit_button = Button(text="",
background_normal="assets/img/ui/joy_option_button.png",
background_down="assets/img/ui/joy_option_button_down.png",
size_hint=exit_button_size,
pos_hint = {"x":exit_button_x, "y":0.01},
allow_stretch = True,
)
exit_button.bind(state=self.on_exit)
self.add_widget(exit_button)
if debugging:
NUM_DEBUG_LINES = debug.NUM_DEBUG_LINES + 1
DEBUG_LINE_SIZE = 0.05
self.debug_label_array = []
for i in xrange(NUM_DEBUG_LINES):
self.debug_label_array.append(Label(
color=(0, 1, 0, 1),
size_hint=(1.0, DEBUG_LINE_SIZE),
pos_hint = {"x": 0.05, "y": 1.0 - DEBUG_LINE_SIZE - DEBUG_LINE_SIZE * i},
markup=True,
text_size=(Window.width, Window.height / NUM_DEBUG_LINES),
))
self.debug_label_array[i].bind(texture_size=self.setter('size'))
self.add_widget(self.debug_label_array[i])
Clock.schedule_interval(self.on_debug_timer, 0.5)
if platform == 'win' or platform == 'linux' or platform == 'macosx':
Window.bind(on_key_down=self._on_keyboard_down)
Window.bind(on_key_up=self._on_keyboard_up)
def set_debug_text(self, line_no, txt):
self.debug_label_array[line_no].text = txt
def on_debug_timer(self, dt):
frame_time = self.display_widget.get_frame_time()
status_output = "fps:%.1f frame_time:%.1fms" % (float(Clock.get_fps()), frame_time)
self.set_debug_text(0, status_output)
for i in xrange(debug.NUM_DEBUG_LINES):
debug_line = debug.get_line(self.state, i)
self.set_debug_text(1 + i, debug_line)
def on_timer(self, dt):
if self.state:
controls.set_joystick(self.state, self.joystick.get_direction())
def on_exit(self, instance, value):
if self.parent:
Clock.unschedule(self.on_debug_timer)
Clock.unschedule(self.on_timer)
self.display_widget.destroy()
parent = self.parent
parent.remove_widget(self)
self.state = None
if self.on_exit_function:
(self.on_exit_function)(self.state, parent)
def on_button_0(self, instance, value):
if self.state:
controls.set_button_state(self.state, 0, True if value == "down" else False)
def on_button_1(self, instance, value):
if self.state:
controls.set_button |
import os
from pypers.core.step import CmdLineStep
class ReorderSam(CmdLineStep):
spec = {
"version": "0.0.1",
"descr": [
"Runs ReorderSam to reorder chromosomes into GATK order"
],
"args":
{
"inputs": [
{
"name" : "input_bam",
"type" : "file",
"iterable" : True,
"descr" : "the input bam file",
},
{
"name" : "reference",
"type" : "ref_genome",
"tool" : "reordersam",
"descr" : "Reference whole genome fasta"
}
],
"outputs": [
{
"name" : "output_bam",
"type" : "file",
"value" : "dummy",
"descr" : "the reo | rdered output bam",
}
],
"params": [
{
"name" : "jvm_args",
"value" : "-Xmx{{jvm_memory}}g -Djava.io.tmpdir={{output_dir}}",
"descr" : "java virtual machine arguments",
"readonly" : True
}
]
},
"cmd": [
"/usr/bin/java {{jvm_args}} -jar /software/pypers/picard-tools/picard-tools-1.119/picard-tools-1.119/Reo | rderSam.jar",
" I={{input_bam}} O={{output_bam}} CREATE_INDEX=True R={{reference}}"
],
"requirements": {
"memory": '8'
}
}
def preprocess(self):
"""
Set output bam name
"""
file_name = os.path.basename(self.input_bam)
self.output_bam = file_name.replace('.bam','.reord.bam')
super(ReorderSam, self).preprocess()
|
(self, options):
"""
Test the "run_all_servers" task.
"""
self.verify_run_all_servers_task(options)
@ddt.data(
[{}],
[{"settings": "aws"}],
)
@ddt.unpack
def test_celery(self, options):
"""
Test the "celery" task.
"""
settings = options.get("settings", "devstack_with_worker")
call_task("pavelib.servers.celery", options=options)
self.assertEqual(self.task_messages, [EXPECTED_CELERY_COMMAND.format(settings=settings)])
@ddt.data(
[{}],
[{"settings": "aws"}],
)
@ddt.unpack
def test_update_db(self, options):
"""
Test the "update_db" task.
"""
settings = options.get("settings", Env.DEVSTACK_SETTINGS)
call_task("pavelib.servers.update_db", options=options)
# pylint: disable=line-too-long
db_command = u"NO_EDXAPP_SUDO=1 EDX_PLATFORM_SETTINGS_OVERRIDE={settings} /edx/bin/edxapp-migrate-{server} | --traceback --pythonpath=. "
self.assertEqual(
self.task_messages,
[
db_command.format(server="lms", settings=settings),
db_command.format(server="cms", settings=settings),
]
)
@ddt.data(
["lms", {}],
["lms", {"settings": "aws"}],
["cms", {}],
["cms", | {"settings": "aws"}],
)
@ddt.unpack
def test_check_settings(self, system, options):
"""
Test the "check_settings" task.
"""
settings = options.get("settings", Env.DEVSTACK_SETTINGS)
call_task("pavelib.servers.check_settings", args=[system, settings])
self.assertEqual(
self.task_messages,
[
u"echo 'import {system}.envs.{settings}' "
u"| python manage.py {system} --settings={settings} shell --plain --pythonpath=.".format(
system=system, settings=settings
),
]
)
def verify_server_task(self, task_name, options, contracts_default=False):
"""
Verify the output of a server task.
"""
log_string = options.get("log_string", "> /dev/null")
settings = options.get("settings", None)
asset_settings = options.get("asset-settings", None)
is_optimized = options.get("optimized", False)
is_fast = options.get("fast", False)
no_contracts = options.get("no-contracts", not contracts_default)
if task_name == "devstack":
system = options.get("system")
elif task_name == "studio":
system = "cms"
else:
system = "lms"
port = options.get("port", "8000" if system == "lms" else "8001")
self.reset_task_messages()
if task_name == "devstack":
args = ["studio" if system == "cms" else system]
if settings:
args.append("--settings={settings}".format(settings=settings))
if asset_settings:
args.append("--asset-settings={asset_settings}".format(asset_settings=asset_settings))
if is_optimized:
args.append("--optimized")
if is_fast:
args.append("--fast")
if no_contracts:
args.append("--no-contracts")
call_task("pavelib.servers.devstack", args=args)
else:
call_task("pavelib.servers.{task_name}".format(task_name=task_name), options=options)
expected_messages = options.get("expected_messages", [])
expected_settings = settings if settings else Env.DEVSTACK_SETTINGS
expected_asset_settings = asset_settings if asset_settings else expected_settings
if is_optimized:
expected_settings = "devstack_optimized"
expected_asset_settings = "test_static_optimized"
expected_collect_static = not is_fast and expected_settings != Env.DEVSTACK_SETTINGS
if not is_fast:
expected_messages.append(u"xmodule_assets common/static/xmodule")
expected_messages.append(u"install npm_assets")
expected_messages.extend(
[c.format(settings=expected_asset_settings,
log_file=Env.PRINT_SETTINGS_LOG_FILE) for c in EXPECTED_PRINT_SETTINGS_COMMAND]
)
expected_messages.append(EXPECTED_WEBPACK_COMMAND.format(
node_env="production",
static_root_lms=None,
static_root_cms=None,
lms_root_url=None,
jwt_auth_cookie_header_payload_name=None,
user_info_cookie_name=None,
webpack_config_path=None
))
expected_messages.extend(self.expected_sass_commands(system=system, asset_settings=expected_asset_settings))
if expected_collect_static:
expected_messages.append(EXPECTED_COLLECT_STATIC_COMMAND.format(
system=system, asset_settings=expected_asset_settings, log_string=log_string
))
expected_run_server_command = EXPECTED_RUN_SERVER_COMMAND.format(
system=system,
settings=expected_settings,
port=port,
)
if not no_contracts:
expected_run_server_command += " --contracts"
expected_messages.append(expected_run_server_command)
self.assertEqual(self.task_messages, expected_messages)
def verify_run_all_servers_task(self, options):
"""
Verify the output of a server task.
"""
log_string = options.get("log_string", "> /dev/null")
settings = options.get("settings", None)
asset_settings = options.get("asset_settings", None)
is_optimized = options.get("optimized", False)
is_fast = options.get("fast", False)
self.reset_task_messages()
call_task("pavelib.servers.run_all_servers", options=options)
expected_settings = settings if settings else Env.DEVSTACK_SETTINGS
expected_asset_settings = asset_settings if asset_settings else expected_settings
if is_optimized:
expected_settings = "devstack_optimized"
expected_asset_settings = "test_static_optimized"
expected_collect_static = not is_fast and expected_settings != Env.DEVSTACK_SETTINGS
expected_messages = []
if not is_fast:
expected_messages.append(u"xmodule_assets common/static/xmodule")
expected_messages.append(u"install npm_assets")
expected_messages.extend(
[c.format(settings=expected_asset_settings,
log_file=Env.PRINT_SETTINGS_LOG_FILE) for c in EXPECTED_PRINT_SETTINGS_COMMAND]
)
expected_messages.append(EXPECTED_WEBPACK_COMMAND.format(
node_env="production",
static_root_lms=None,
static_root_cms=None,
lms_root_url=None,
jwt_auth_cookie_header_payload_name=None,
user_info_cookie_name=None,
webpack_config_path=None
))
expected_messages.extend(self.expected_sass_commands(asset_settings=expected_asset_settings))
if expected_collect_static:
expected_messages.append(EXPECTED_COLLECT_STATIC_COMMAND.format(
system="lms", asset_settings=expected_asset_settings, log_string=log_string
))
expected_messages.append(EXPECTED_COLLECT_STATIC_COMMAND.format(
system="cms", asset_settings=expected_asset_settings, log_string=log_string
))
expected_messages.append(
EXPECTED_RUN_SERVER_COMMAND.format(
system="lms",
settings=expected_settings,
port=8000,
)
)
expected_messages.append(
EXPECTED_RUN_SERVER_COMMAND.format(
system="cms",
settings=expected_settings,
port=8001,
)
)
expected_messages.append(EXPECTED_CELERY_COMMAND.format(settings="devstack_with_worker"))
se |
""" Git Parent model """
from django.db import models
class GitParentEntry(models.Model):
""" Git Parent """
project = models.ForeignKey('gitrepo.GitProjectEntry', related | _name='git_parent_project')
parent = models.ForeignKey('gitrepo.GitCommitEntry', related_name='git_parent_commit')
son = models.ForeignKey('gitrepo.GitCommitEntry', related_name='git_son_commit')
order = models.IntegerField(default=0)
created_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
def __unicode__(self):
return u'Parent:{0}, Son:{1}, order:{2}'.format(self.parent.commit_ha | sh, self.son.commit_hash, self.order)
|
#!/usr/bin/env python
from __future__ import division, print_function, absolute_import
import matplotlib.pyplot as plt
from matplotlib import cm
import numpy as np
#import calendar
#import datetime as dt
#import pprint
#import pickle
import sys
def plotFontSize(ax,fontsize=8):
for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] +
ax.get_xticklabels() + ax.get_yticklabels()):
item.set_fontsize(8)
return ax
#=====================================
if __name__ == "__main__":
# import warnings
# warnings.filterwarnings("ignore")
import argparse
parser = argparse.ArgumentParser(prog='nadirSiteSolution',description='Plot and analyase the pickle data object obatined from a nadir processing run',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
Example:
To create a consolidated phase residual file:
> python nadirSolution.py --model -f ./t/YAR2.2012.CL3
''')
#===================================================================
parser.add_argument('--about','-a',dest='about',default=False,action='store_true',help="Print meta data from solution file then exit")
#===================================================================
parser.add_argument('-f','--f1', dest='solutionFile', default='',help="Pickled solution file")
parser.add_argument('-n', dest='nfile', default='',help="Numpy solution file")
parser.add_argument('--pf',dest='post_fit',default=False,action='store_true',help="Plot post fit residuals")
#===================================================================
# Plot options
#===================================================================
parser.add_argument('--plot',dest='plot', default=False, action='store_true', help="Produce an elevation dependent plot of ESM phase residuals")
parser.add_argument('--SITEPCV',dest='sitePCV', default=False, action='store_true', help="Plot the site PCV estimates")
parser.add_argument('--ps','--plot_save',dest='plot_save',default=False,action='store_true', help="Save the plots in png format")
#===================================================================
# Compare Solutions
#===================================================================
parser.add_argument('--compare',dest='compare',default=False,action='store_true',help="Compare two solutions")
parser.add_argument('--f2', dest='comp2', default='',help="Pickled solution file")
# Debug function, not needed
args = parser.parse_args()
#if len(args.nfile) < 1 :
# args.nfile = args.solutionFile + ".sol.npz"
#args.compare_nfile = args.comp2 + ".sol.npz"
#=======================================================================================================
#
# Parse pickle data structure
#
#=======================================================================================================
# with open(args.solutionFile,'rb') as pklID:
# meta = pickle.load(pklID)
# # Just print the meta data and exit
# if args.about:
# pprint.pprint(meta)
# sys.exit(0)
# if args.post_fit:
# npzfile = np.load(args.nfile)
# prefit = npzfile['prefit']
# prefit_sums = npzfile['prefit_sums']
# prefit_res = npzfile['prefit_res']
# postfit = npzfile['p | ostfit']
# postfit_sums = npzfile['postfit_sums']
# postfit_res = npzfile['postfit_res']
# numObs = npzfile['numObs']
# numObs_sums = npzfile['numObs_sums']
# fig = plt.figure()
# #fig.canvas.set_window_title("All SVNs")
# ax = fig.add_subplot(111)
# ax.plot(nad,np.sqrt(postfit_sums[siz:eiz]/numObs_sums[siz:eiz])/np.sqrt(prefit_sums[siz:eiz]/numObs_sums[siz:eiz]),'r-')
# plt.show()
# sys.exit(0)
|
npzfile = np.load(args.nfile)
model = npzfile['model']
stdev = npzfile['stdev']
site_freq = npzfile['site_freq']
ele_model = npzfile['ele_model']
ele_stdev = npzfile['ele_model_stdev']
ele_site_freq = npzfile['ele_site_freq']
#if args.compare:
# compare_npzfile = np.load(args.compare_nfile)
# compare_Sol = compare_npzfile['sol']
# compare_Cov = compare_npzfile['cov']
# compare_nadir_freq = compare_npzfile['nadirfreq']
# compare_variances = np.diag(compare_Cov)
#zen = np.linspace(0,90, int(90./meta['zen_grid'])+1 )
#az = np.linspace(0,360. - meta['zen_grid'], int(360./meta['zen_grid']) )
print("Shape of model:",np.shape(model))
zen = np.linspace(0,90, np.shape(model)[1] )
print("zen:",zen,np.shape(model)[1])
az = np.linspace(0,360. - 360./np.shape(model)[0], np.shape(model)[0] )
print("az:",az,np.shape(model)[0])
#============================================
# Plot the Elevation depndent phase residual corrections
#============================================
fig = plt.figure()
#fig.canvas.set_window_title("All SVNs")
ax = fig.add_subplot(111)
ax.errorbar(zen,ele_model[0,:],yerr=ele_stdev[0,:]/2.,linewidth=2)
ax1 = ax.twinx()
ax1.bar(zen,ele_site_freq[0,:],0.1,color='gray',alpha=0.75)
ax1.set_ylabel('Number of observations',fontsize=8)
ax.set_xlabel('Zenith angle (degrees)',fontsize=8)
ax.set_ylabel('Correction to PCV (mm)',fontsize=8)
ax = plotFontSize(ax,8)
ax1 = plotFontSize(ax1,8)
plt.tight_layout()
#============================================
fig = plt.figure()
#fig.canvas.set_window_title("All SVNs")
ax = fig.add_subplot(111)
for i in range(0,np.size(az)):
for j in range(0,np.size(zen)):
ax.errorbar(zen[j],model[i,j],yerr=np.sqrt(stdev[i,j])/2.,linewidth=2)
#ax.plot(zen[j],model[i,j],'b.')
#ax1 = ax.twinx()
#ax1.bar(nad,nadir_freq[ctr,:],0.1,color='gray',alpha=0.75)
#ax1.set_ylabel('Number of observations',fontsize=8)
ax.set_xlabel('Zenith angle (degrees)',fontsize=8)
ax.set_ylabel('Correction to PCV (mm)',fontsize=8)
ax = plotFontSize(ax,8)
plt.tight_layout()
#============================================
# Do a polar plot
#============================================
fig = plt.figure()
#fig.canvas.set_window_title("All SVNs")
ax = fig.add_subplot(111,polar='true')
ax.set_theta_direction(-1)
ax.set_theta_offset(np.radians(90.))
ax.set_ylim([0,1])
ax.set_rgrids((0.00001, np.radians(20.)/np.pi*2, np.radians(40.)/np.pi*2,np.radians(60.)/np.pi*2,np.radians(80.)/np.pi*2),labels=('0', '20', '40', '60', '80'),angle=180)
ma,mz = np.meshgrid(az,zen,indexing='ij')
ma = ma.reshape(ma.size,)
mz = mz.reshape(mz.size,)
polar = ax.scatter(np.radians(ma), np.radians(mz)/np.pi*2., c=model[:,:], s=50, alpha=1., cmap=cm.RdBu,vmin=-15,vmax=15, lw=0)
cbar = plt.colorbar(polar,shrink=0.75,pad=.10)
cbar.ax.tick_params(labelsize=8)
cbar.set_label('ESM (mm)',size=8)
ax = plotFontSize(ax,8)
plt.tight_layout()
fig = plt.figure()
#fig.canvas.set_window_title("All SVNs")
ax = fig.add_subplot(111,polar='true')
ax.set_theta_direction(-1)
ax.set_theta_offset(np.radians(90.))
ax.set_ylim([0,1])
ax.set_rgrids((0.00001, np.radians(20.)/np.pi*2, np.radians(40.)/np.pi*2,np.radians(60.)/np.pi*2,np.radians(80.)/np.pi*2),labels=('0', '20', '40', '60', '80'),angle=180)
polar = ax.scatter(np.radians(ma), np.radians(mz)/np.pi*2., c=stdev[:,:], s=50, alpha=1., cmap=cm.RdBu,vmin=-15,vmax=15, lw=0)
cbar = plt.colorbar(polar,shrink=0.75,pad=.10)
cbar.ax.tick_params(labelsize=8)
cbar.set_label('Standard Deviation (mm)',size=8)
ax = plotFontSize(ax,8)
plt.tight_layout()
plt.show()
print("FINISHED")
|
lines = {
"initial_balance": account["initial_balance"],
"debit": account["debit"],
"credit": account["credit"],
"final_balance": account["ending_balance"],
}
return lines
def _get_group_lines(self, group_id, trial_balance):
lines = False
for group in trial_balance:
if group["id"] == group_id and group["type"] == "group_type":
lines = {
"initial_balance": group["initial_balance"],
"debit": group["debit"],
"credit": group["credit"],
"final_balance": group["ending_balance"],
}
return lines
def check_partner_in_report(self, account_id, partner_id, total_amount):
partner_in_report = False
if account_id in total_amount.keys():
if partner_id in total_amount[account_id]:
partner_in_report = True
return partner_in_report
def _get_partner_lines(self, account_id, partner_id, total_amount):
acc_id = account_id
prt_id = partner_id
lines = {
"initial_balance": total_amount[acc_id][prt_id]["initial_balance"],
"debit": total_amount[acc_id][prt_id]["debit"],
"credit": total_amount[acc_id][prt_id]["credit"],
"final_balance": total_amount[acc_id][prt_id]["ending_balance"],
}
return lines
def _sum_all_accounts(self, trial_balance, feature):
total = 0.0
for account in trial_balance:
if account["type"] == "account_type":
for key in account.keys():
if key == feature:
total += account[key]
return total
def test_00_account_group(self):
self.assertTrue(self.account100 in self.group1.compute_account_ids)
self.assertTrue(self.account200 in self.group2.compute_account_ids)
def test_01_account_balance_computed(self):
# Change code of the P&L for not being automatically included
# in group 1 balances
earning_accs = self.env["account.account"].search(
[("user_type_id", "=", self.env.ref("account.data_unaffected_earnings").id)]
)
for acc in earning_accs:
acc.code = "999" + acc.code
# Generate the general ledger line
res_data = self._get_report_lines()
trial_balance = res_data["trial_balance"]
check_receivable_account = self.check_account_in_report(
self.account100.id, trial_balance
)
self.assertFalse(check_receivable_account)
check_income_account = self.check_account_in_report(
self.account200.id, trial_balance
)
self.assertFalse(check_income_account)
self.assertTrue(
self.check_account_in_report(self.unaffected_account.id, trial_balance)
)
# Add a move at the previous day of the first day of fiscal year
# to check the initial balance
self._add_move(
date=self.previous_fy_date_end,
receivable_debit=1000,
receivable_credit=0,
income_debit=0,
income_credit=1000,
)
# Re Generate the trial balance line
res_data = self._get_report_lines()
trial_balance = res_data["trial_balance"]
check_receivable_account = self.check_account_in_report(
self.account100.id, trial_balance
)
self.assertTrue(check_receivable_account)
check_income_account = self.check_account_in_report(
self.account200.id, trial_balance
)
self.ass | ertFalse(check_income_account)
# Check the initial and final balance
account_receivable_lines = self._get_account_lines(
self.account100.id, trial_balance
)
group1_lines = self._get_group_lines(self.group1.id, trial_balance)
self.assertEqual(account_receivabl | e_lines["initial_balance"], 1000)
self.assertEqual(account_receivable_lines["debit"], 0)
self.assertEqual(account_receivable_lines["credit"], 0)
self.assertEqual(account_receivable_lines["final_balance"], 1000)
self.assertEqual(group1_lines["initial_balance"], 1000)
self.assertEqual(group1_lines["debit"], 0)
self.assertEqual(group1_lines["credit"], 0)
self.assertEqual(group1_lines["final_balance"], 1000)
# Add reversed move of the initial move the first day of fiscal year
# to check the first day of fiscal year is not used
# to compute the initial balance
self._add_move(
date=self.fy_date_start,
receivable_debit=0,
receivable_credit=1000,
income_debit=1000,
income_credit=0,
)
# Re Generate the trial balance line
res_data = self._get_report_lines()
trial_balance = res_data["trial_balance"]
check_receivable_account = self.check_account_in_report(
self.account100.id, trial_balance
)
self.assertTrue(check_receivable_account)
check_income_account = self.check_account_in_report(
self.account200.id, trial_balance
)
self.assertTrue(check_income_account)
# Re Generate the trial balance line with an account filter
res_data = self._get_report_lines(
account_ids=(self.account100 + self.account200).ids
)
trial_balance = res_data["trial_balance"]
self.assertTrue(self.check_account_in_report(self.account100.id, trial_balance))
self.assertTrue(self.check_account_in_report(self.account200.id, trial_balance))
# Unaffected account should not be present
self.assertFalse(
self.check_account_in_report(self.unaffected_account.id, trial_balance)
)
# Check the initial and final balance
account_receivable_lines = self._get_account_lines(
self.account100.id, trial_balance
)
account_income_lines = self._get_account_lines(
self.account200.id, trial_balance
)
group1_lines = self._get_group_lines(self.group1.id, trial_balance)
group2_lines = self._get_group_lines(self.group2.id, trial_balance)
self.assertEqual(account_receivable_lines["initial_balance"], 1000)
self.assertEqual(account_receivable_lines["debit"], 0)
self.assertEqual(account_receivable_lines["credit"], 1000)
self.assertEqual(account_receivable_lines["final_balance"], 0)
self.assertEqual(account_income_lines["initial_balance"], 0)
self.assertEqual(account_income_lines["debit"], 1000)
self.assertEqual(account_income_lines["credit"], 0)
self.assertEqual(account_income_lines["final_balance"], 1000)
self.assertEqual(group1_lines["initial_balance"], 1000)
self.assertEqual(group1_lines["debit"], 0)
self.assertEqual(group1_lines["credit"], 1000)
self.assertEqual(group1_lines["final_balance"], 0)
self.assertEqual(group2_lines["initial_balance"], 0)
self.assertEqual(group2_lines["debit"], 1000)
self.assertEqual(group2_lines["credit"], 0)
self.assertEqual(group2_lines["final_balance"], 1000)
# Add another move at the end day of fiscal year
# to check that it correctly used on report
self._add_move(
date=self.fy_date_end,
receivable_debit=0,
receivable_credit=1000,
income_debit=1000,
income_credit=0,
)
# Re Generate the trial balance line
res_data = self._get_report_lines()
trial_balance = res_data["trial_balance"]
check_receivable_account = self.check_account_in_report(
self.account100.id, trial_balance
)
self.assertTrue(check_receivable_account)
check_income_account = self.check_account_in_report(
self.account200.id, trial_balance
)
self.assertTrue(check_income_account)
# Ch |
import unittest
import numpy as np
from astropy import constants as const
from | astropy import units as u
from .context import superbol
from superbol.mag2flux import *
from yaml import load
class TestMag2Flux(unittest.TestCase):
def setUp(self):
self.filter_band = "V"
self.magnitude = 8.8
self.uncertainty = 0.02
self.effective_wl = 5450 | .0 * u.AA
self.flux_at_zero_mag = 3.631E-9 * (u.erg / (u.s * u.cm**2 * u.AA))
def test_mag2flux_converts_mag_to_correct_flux(self):
expected = self.flux_at_zero_mag * 10**(-0.4 * self.magnitude)
result_flux, result_uncertainty = mag2flux(self.magnitude,
self.uncertainty,
self.effective_wl,
self.flux_at_zero_mag)
self.assertEqual(expected.value, result_flux)
def test_mag2flux_converts_mag_to_correct_flux_uncertainty(self):
expected = np.sqrt((self.flux_at_zero_mag * -0.4 * np.log(10) * 10**(-0.4 * self.magnitude) * self.uncertainty)**2)
result_flux, result_uncertainty = mag2flux(self.magnitude,
self.uncertainty,
self.effective_wl,
self.flux_at_zero_mag)
self.assertAlmostEqual(expected.value, result_uncertainty)
def test_flux_at_mag_zero(self):
mag = 0.0
expected = self.flux_at_zero_mag
result_flux, result_uncertainty = mag2flux(0.0,
self.uncertainty,
self.effective_wl,
self.flux_at_zero_mag)
self.assertEqual(expected.value, result_flux)
|
#Embedded file name: /usr/lib/enigma2/python/upgrade.py
import os
from subprocess import Popen, PIPE
opkgDestinations = ['/']
opkgStatusPath = ''
overwriteSettingsFiles = False
overwriteDriversFiles = True
overwriteEmusFiles = True
overwritePiconsFiles = True
overwriteBootlogoFiles = True
overwriteSpinnerFiles = True
def findMountPoint(path):
path = os.path.abspath(path)
while not os.path.ismount(path):
path = os.path.dirname(path)
return path
def opkgExtraDestinations():
global opkgDestinations
return ''.join([ ' --add-dest %s:%s' % (i, i) for i in opkgDestinations ])
def opkgAddDestination(mountpoint):
if mountpoint not in opkgDestinations:
opkgDestinations.append(mountpoint)
print '[Ipkg] Added to OPKG destinations:', mountpoint
mounts = os.listdir('/media')
for mount in mounts:
mount = os.path.join('/media', mount)
if mount and not mount.startswith('/media/net'):
if opkgStatusPath == '':
opkgStatusPath = 'var/lib/opkg/status'
if not os.path.exists(os.path.join('/', opkgStatusPath)):
opkgStatusPath = 'usr/lib/opkg/status'
if os.path.exists(os.path.join(mount, opkgStatusPath)):
opkgAddDestination(mount)
def getValue(line):
dummy = line.split('=')
if len(dummy) != 2:
print 'Error: Wrong formatted settings file'
exit
if dummy[1] == 'false':
return False
elif dummy[1] == 'true':
return True
else:
return False
p = Popen('opkg list-upgradable', stdout=PIPE, stderr=PIPE, shell=True)
stdout, stderr = p.communicate()
if stderr != '':
print 'Error occured:', stderr
exit
try:
f = open('/etc/enigma2/settings', 'r')
lines = f.readlines()
f.close()
except:
print 'Error opening /etc/enigma2/settings file'
for line in lines:
if line.startswith('config.plugins.softwaremanager.overwriteSettingsFiles'):
overwriteSettingsFiles = getValue(line)
elif line.startswith('config.plugins.softwaremanager.overwriteDriversFiles'):
overwriteDriversFiles = getValue(line)
elif line.startswith('config.plugins.softwaremanager.overwriteEmusFiles'):
overwriteEmusFiles = getValue(line)
elif line.startswith('config.plugins.softwaremanager.overwritePiconsFiles'):
overwritePiconsFiles = getValue(line)
elif line.startswith('config.plugins.softwaremanager.overwriteBootlogoFiles'):
overwriteBootlogoFiles = getValue(line)
elif line.startswith('config.plugins.softwaremanager.overwriteSpinnerFiles'):
overwriteSpinnerFiles = getValue(line)
packages = stdout.split('\n')
try:
packages.remove('')
except:
pass
upgradePackages = []
for package in packages:
item = package.split(' - ', 2)
if item[0].find('-settings-') > -1 and not overwriteSettingsFiles:
continue
elif item[0].find('kernel-module-') > -1 and not overwriteDriversFiles:
continue
elif item[0].find('-softcams-') > -1 and not overwriteEmusFiles:
continue
elif item[0].find('-picons-') > -1 and not overwritePiconsFiles:
| continue
elif item[0].find('-boot | logo') > -1 and not overwriteBootlogoFiles:
continue
elif item[0].find('italysat-spinner') > -1 and not overwriteSpinnerFiles:
continue
else:
upgradePackages.append(item[0])
for p in upgradePackages:
os.system('opkg ' + opkgExtraDestinations() + ' upgrade ' + p + ' 2>&1 | tee /home/root/ipkgupgrade.log')
os.system('reboot')
|
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from datetime import datetime
from babel.dates import format_date
from odoo import api, models, fields, _
from odoo.exceptions import UserError
logger = logging.getLogger(__name__)
COMPASSION_QRR = "CH2430808007681434347"
class ContractGroup(models.Model):
_inherit = ["recurring.contract.group", "translatable.model"]
_name = "recurring.contract.group"
@api.multi
def get_months(self, months, sponsorships):
"""
Given the list of months to print,
returns the list of months grouped by the frequency payment
of the contract group and only containing unpaid sponsorships.
:param months: list of dates (date, datetime or string)
:param sponsorships: recordset of included sponsorships
:return: list of dates grouped in string format
"""
self.ensure_one()
freq = self.advance_billing_months
payment_mode = self.with_context(lang="en_US").payment_mode_id
# Take first open invoice or next_invoice_date
open_invoice = min([i for i in sponsorships.mapped("first_open_invoice") if i])
if open_invoice:
first_invoice_date = open_invoice.replace(day=1)
else:
raise UserError(_("No open invoice found !"))
for i, month in enumerate(months):
if isinstance(month, str):
months[i] = fields.Date.from_string(month)
if isinstance(month, datetime):
months[i] = month.date()
# check if first invoice is after last month
if first_invoice_date > months[-1]:
raise UserError(_(f"First invoice is after Date Stop"))
# Only keep unpaid months
valid_months = [
fields.Date.to_string(month) for month in months
if month >= first_invoice_date
]
if "Permanent" in payment_mode.name:
return valid_months[:1]
if freq == 1:
return valid_months
else:
# Group months
result = list()
count = 1
month_start = ""
for month in valid_months:
if not month_start:
month_start = month
if count < freq:
count += 1
else:
result.append(month_start + " - " + month)
month_start = ""
count = 1
if not result:
result.append(month_start + " - " + month)
return result
@api.multi
def get_communication(self, start, stop, sponsorships):
"""
Get the communication to print on the payment slip for sponsorship
:param start: the month start for which we print the payment slip (string)
:param stop: the month stop for which we print the payment slip (string)
:param sponsorships: recordset of sponsorships for which to print the
payment slips
:return: string of the communication
"""
self.ensure_one()
payment_mode = self.with_context(lang="en_US").payment_mode_id
amount = self.get_amount(start, stop, sponsorships)
valid = sponsorships
number_sponsorship = len(sponsorships)
date_start = fields.Date.to_date(start)
date_stop = fields.Date.to_date(stop)
vals = {
"amount": f"CHF {amount:.0f}",
"subject": _("for") + " ",
"date": "",
}
locale = self.partner_id.lang
context = {"lang": locale}
if start and stop:
start_date = format_date(date_start, format="MMMM yyyy", locale=locale)
stop_date = format_date(date_stop, format="MMMM yyyy", locale=locale)
if start == stop:
vals["date"] = start_date
else:
vals["date"] = f"{start_date} - {stop_date}"
if "Permanent" in payment_mode.name:
vals["payment_type"] = _("ISR for standing order")
vals["date"] = ""
else:
vals["payment_type"] = (
_("ISR") + " " + self.contract_ids[0].with_context(
context).group_freq
)
if number_sponsorship > 1:
vals["subject"] += str(number_sponsorship) + " " + _("sponsorships")
elif number_sponsorship and valid.child_id:
vals["subject"] = valid.child_id.preferred_name + " ({})".format(
valid.child_id.local_id
)
elif number_sponsorship and not valid.child_id and valid.display_name:
product_name = self.env["product.product"].search(
[("id", "in", valid.mapped("contract_line_ids.product_id").ids)]
)
vals["subject"] = ", ".join(product_name.mapped("thanks_name"))
return (
f"{vals['payment_type']} {vals['amount']}"
f"<br/>{ | vals['subject']}<br/>{vals['date']}"
)
@api.model
def get_company_qrr_account(self):
""" Utility to find the bvr account of the company. """
return self.env["res.partner.bank"].search([
('acc_number', '=', COMPASSION_QRR)])
def get_amount(self, start, stop, sponsorships):
self.ensure_one()
amount = sum(sponsorships.mapped("total_amount"))
months = int(stop | .split("-")[1]) - int(start.split("-")[1]) + 1
payment_mode = self.with_context(lang="en_US").payment_mode_id
if "Permanent" in payment_mode.name:
months = self.advance_billing_months
return amount * months
|
# Copyright (c) 2014-2017, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
from wtforms import validators
from ..forms import ModelForm
from digits import utils
class ImageModelForm(ModelForm):
"""
Defines the form used to create a new ImageModelJob
"""
crop_size = utils.forms.IntegerField(
'Crop Size',
validators=[
validators.NumberRange(min=1),
validators.Optional()
],
tooltip=("If specified, during training a random square crop will be "
"taken from the input image before using as input for the network.")
)
use_mean = utils.forms.SelectField(
'Subtract Mean',
choices=[
('none', 'None'),
('image', 'Image'),
('pixel', 'Pixel'),
],
default='image',
tooltip="Subtract the mean file or mean pixel for this dataset from each image."
)
aug_flip = utils.forms.SelectField(
'Flipping',
choices=[
('none', 'None'),
('fliplr', 'Horizontal'),
('flipud', 'Vertical'),
('fliplrud', 'Horizontal and/or Vertical'),
],
default='none',
tooltip="Randomly flips each image during batch preprocessing."
)
aug_quad_rot = utils.forms.SelectField(
'Quadrilateral Rotation',
choices=[
('none', 'None'),
('rot90', '0, 90 or 270 degrees'),
('rot180', '0 or 180 degrees'),
('rotall', '0, 90, 180 or 270 degrees.'),
],
default='none',
tooltip="Randomly rotates (90 degree steps) each image during batch preprocessing."
)
aug_rot = utils.forms.IntegerField(
'Rotation (+- deg)',
default=0,
validators=[
validators.NumberRange(min=0, max=180)
],
tooltip="The uniform-random rotation angle that will be performed during batch preprocessing."
)
aug_scale = utils.forms.FloatField(
'Rescale (stddev)',
default=0,
validators=[
validators.NumberRange(min=0, max=1)
],
tooltip=("Retaining image size, the image is rescaled with a "
"+-stddev of this parameter. Suggested value is 0.07.")
)
aug_noise = utils.forms.FloatField(
'Noise (stddev)',
default=0,
validators=[
validators.NumberRange(min=0, max=1)
| ],
tooltip=("Adds AWGN (Additive White Gaussian Noise) during batch "
"preprocessing, assuming [0 1] pixel-value range. Suggested value is 0.03.")
)
aug_hsv_use = utils.forms.BooleanField(
'HSV Shifting',
default=False,
tooltip=("Augmentation by normal-distribu | ted random shifts in HSV "
"color space, assuming [0 1] pixel-value range."),
)
aug_hsv_h = utils.forms.FloatField(
'Hue',
default=0.02,
validators=[
validators.NumberRange(min=0, max=0.5)
],
tooltip=("Standard deviation of a shift that will be performed during "
"preprocessing, assuming [0 1] pixel-value range.")
)
aug_hsv_s = utils.forms.FloatField(
'Saturation',
default=0.04,
validators=[
validators.NumberRange(min=0, max=0.5)
],
tooltip=("Standard deviation of a shift that will be performed during "
"preprocessing, assuming [0 1] pixel-value range.")
)
aug_hsv_v = utils.forms.FloatField(
'Value',
default=0.06,
validators=[
validators.NumberRange(min=0, max=0.5)
],
tooltip=("Standard deviation of a shift that will be performed during "
"preprocessing, assuming [0 1] pixel-value range.")
)
|
e '']))
def _success(self, message, result=True, advance=False):
if (advance or
self.TRUTHY.get(self.data.get('advance', ['no'])[0].lower())):
self._advance()
return SetupMagic._success(self, message, result=result)
def _testing(self):
self._testing_yes(lambda: True)
return (self.testing is not None)
def _testing_yes(self, method, *args, **kwargs):
testination = self.data.get('testing')
if testination:
self.testing = random.randint(0, 1)
if testination[0].lower() in self.TRUTHY:
self.testing = self.TRUTHY[testination[0].lower()]
return self.testing
self.testing = None
return method(*args, **kwargs)
def _testing_data(self, method, tdata, *args, **kwargs):
result = self._testing_yes(method, *args, **kwargs) or []
return (result
if (self.testing is None) else
(self.testing and tdata or []))
def setup_command(self, session):
raise Exception('FIXME')
class SetupGetEmailSettings(TestableWebbable):
"""Guess server details for an e-mail address"""
SYNOPSIS = (None, 'setup/email_servers', 'setup/email_servers', None)
HTTP_CALLABLE = ('GET', )
HTTP_QUERY_VARS = dict_merge(TestableWebbable.HTTP_QUERY_VARS, {
'email': 'E-mail address'
})
TEST_DATA = {
'imap_host': 'imap.wigglebonk.com',
'imap_port': 993,
'imap_tls': True,
'pop3_host': 'pop3.wigglebonk.com',
'pop3_port': 110,
'pop3_tls': False,
'smtp_host': 'smtp.wigglebonk.com',
'smtp_port': 465,
'smtp_tls': False
}
def _get_domain_settings(self, domain):
raise Exception('FIXME')
def setup_command(self, session):
results = {}
for email in list(self.args) + self.data.get('email'):
settings = self._testing_data(self._get_domain_settings,
self.TEST_DATA, email)
if settings:
results[email] = settings
if results:
self._success(_('Found settings for %d addresses'), results)
else:
self._error(_('No settings found'))
class SetupWelcome(TestableWebbable):
SYNOPSIS = (None, None, 'setup/welcome', None)
HTTP_CALLABLE = ('GET', 'POST')
HTTP_POST_VARS = dict_merge(TestableWebbable.HTTP_POST_VARS, {
'language': 'Language selection'
})
def bg_setup_stage_1(self):
# Wait a bit, so the user has something to look at befor we
# block the web server and do real work.
time.sleep(2)
# Intial configuration of app goes here...
if not self.session.config.tags:
with BLOCK_HTTPD_LOCK, Idle_HTTPD(allowed=0):
self.basic_app_config(self.session)
# Next, if we have any secret GPG keys, extract all the e-mail
# addresses and create a profile for each one.
with BLOCK_HTTPD_LOCK, Idle_HTTPD(allowed=0):
SetupProfiles(self.session).auto_create_profiles()
def setup_command(self, session):
config = session.config
if self.data.get('_method') == 'POST' or self._testing():
language = self.data.get('language', [''])[0]
if language:
try:
i18n = lambda: ActivateTranslation(session, config,
language)
if not self._testing_yes(i18n):
raise ValueError('Failed to configure i18n')
config.prefs.language = language
if not self._testing():
self._background_save(config=True)
except ValueError:
return self._error(_('Invalid language: %s') % language)
config.slow_worker.add_unique_task(
session, 'Setup, Stage 1', lambda: self.bg_setup_stage_1())
results = {
'languages': ListTranslations(config),
'language': config.prefs.language
}
return self._success(_('Welcome to Mailpile!'), results)
class SetupCrypto(TestableWebbable):
SYNOPSIS = (None, None, 'setup/crypto', None)
HTTP_CALLABLE = ('GET', 'POST')
HTTP_POST_VARS = dict_merge(TestableWebbable.HTTP_POST_VARS, {
'choose_key': 'Select an existing key to use',
'passphrase': 'Specify a passphrase',
'passphrase_confirm': 'Confirm the passphrase',
'index_encrypted': 'y/n: index encrypted mail?',
# 'obfuscate_index': 'y/n: obfuscate keywords?', # Omitted do to DANGER
'encrypt_mail': 'y/n: encrypt | locally stored mail?',
'encrypt_index': 'y/n: encrypt search index?',
'encrypt_vcards': 'y/n: encrypt vcards?',
'encrypt_events': 'y/n: encrypt event log?',
'encrypt_misc': 'y/n: encrypt plugin and misc | data?'
})
TEST_DATA = {}
def list_secret_keys(self):
today = date.today().strftime("%Y-%m-%d")
keylist = {}
for key, details in self._gnupg().list_secret_keys().iteritems():
# Ignore revoked keys
if ("revocation-date" in details and
details["revocation-date"] <= today):
# FIXME: Does this check expiry as well?
continue
# Ignore keys that cannot both encrypt and sign
caps = details["capabilities_map"]
if not caps["encrypt"] or not caps["sign"]:
continue
keylist[key] = details
return keylist
def gpg_key_ready(self, gpg_keygen):
if not gpg_keygen.failed:
self.session.config.prefs.gpg_recipient = gpg_keygen.generated_key
self.make_master_key()
self._background_save(config=True)
self.save_profiles_to_key()
def save_profiles_to_key(self, key_id=None, add_all=False, now=False,
profiles=None):
if key_id is None:
if (Setup.KEY_CREATING_THREAD and
not Setup.KEY_CREATING_THREAD.failed):
key_id = Setup.KEY_CREATING_THREAD.generated_key
add_all = True
if not add_all:
self.session.ui.warning('FIXME: Not updating GPG key!')
return
if key_id is not None:
uids = []
data = ListProfiles(self.session).run().result
for profile in data['profiles']:
uids.append({
'name': profile["fn"],
'email': profile["email"][0]["email"],
'comment': profile.get('note', '')
})
if not uids:
return
editor = GnuPGKeyEditor(key_id, set_uids=uids,
sps=self.session.config.gnupg_passphrase,
deletes=max(10, 2*len(uids)))
def start_editor(*unused_args):
with Setup.KEY_WORKER_LOCK:
Setup.KEY_EDITING_THREAD = editor
editor.start()
with Setup.KEY_WORKER_LOCK:
if now:
start_editor()
elif Setup.KEY_EDITING_THREAD is not None:
Setup.KEY_EDITING_THREAD.on_complete('edit keys',
start_editor)
elif Setup.KEY_CREATING_THREAD is not None:
Setup.KEY_CREATING_THREAD.on_complete('edit keys',
start_editor)
else:
start_editor()
def setup_command(self, session):
changed = authed = False
results = {
'secret_keys': self.list_secret_keys(),
}
error_info = None
if self.data.get('_method') == 'POST' or self._testing():
# 1st, are we choosing or creating a new key?
choose_key = self.data.get('choose_key', [''])[0]
if choose_key and not error_info:
|
t=False):
"""Subsets examples in dictionary.
:param example_dict: See doc for `write_example_file`.
:param indices_to_keep: 1-D numpy array with indices of examples to keep.
:param create_new_dict: Boolean flag. If True, this method will create a
new dictionary, leaving the input dictionary untouched.
:return: example_dict: Same as input, but possibly with fewer examples.
"""
error_checking.assert_is_integer_numpy_array(indices_to_keep)
error_checking.assert_is_numpy_array(indices_to_keep, num_dimensions=1)
error_checking.assert_is_boolean(create_new_dict)
if not create_new_dict:
for this_key in MAIN_KEYS:
optional_key_missing = (
this_key not in REQUIRED_MAIN_KEYS
and this_key not in example_dict
)
if optional_key_missing:
continue
if this_key == TARGET_MATRIX_KEY:
if this_key in example_dict:
example_dict[this_key] = (
example_dict[this_key][indices_to_keep, ...]
)
else:
example_dict[TARGET_VALUES_KEY] = (
example_dict[TARGET_VALUES_KEY][indices_to_keep]
)
continue
if this_key == FULL_IDS_KEY:
example_dict[this_key] = [
example_dict[this_key][k] for k in indices_to_keep
]
else:
example_dict[this_key] = example_dict[this_key][
indices_to_keep, ...]
return example_dict
new_example_dict = {}
for this_key in METADATA_KEYS:
sounding_key_missing = (
this_key in [SOUNDING_FIELDS_KEY, SOUNDING_HEIGHTS_KEY]
and this_key not in example_dict
)
if sounding_key_missing:
continue
if this_key == TARGET_NAMES_KEY:
if this_key in example_dict:
new_example_dict[this_key] = example_dict[this_key]
else:
new_example_dict[TARGET_NAME_KEY] = example_dict[
TARGET_NAME_KEY]
continue
new_example_dict[this_key] = example_dict[this_key]
for this_key in MAIN_KEYS:
optional_key_missing = (
this_key not in REQUIRED_MAIN_KEYS
and this_key not in example_dict
)
if optional_key_missing:
continue
if this_key == TARGET_MATRIX_KEY:
if this_key in example_dict:
new_example_dict[this_key] = (
example_dict[this_key][indices_to_keep, ...]
)
else:
new_example_dict[TARGET_VALUES_KEY] = (
example_dict[TARGET_VALUES_KEY][indices_to_keep]
)
continue
if this_key == FULL_IDS_KEY:
new_example_dict[this_key] = [
example_dict[this_key][k] for k in indices_to_keep
]
else:
new_example_dict[this_key] = example_dict[this_key][
indices_to_keep, ...]
return new_example_dict
def find_example_file(
top_directory_name, shuffled=True, spc_date_string=None,
batch_number=None, raise_error_if_missing=True):
"""Looks for file with input examples.
If `shuffled = True`, this method looks for a file with shuffled examples
(from many different times). If `shuffled = False`, this method looks for a
file with examples from one SPC date.
:param top_directory_name: Name of top-level directory with input examples.
:param shuffled: Boolean flag. The role of this flag is explained in the
general discussion above.
:param spc_date_string: [used only if `shuffled = False`]
SPC date (format "yyyymmdd").
:param batch_number: [used only if `shuffled = True`]
Batch number (integer).
:param raise_error_if_missing: Boolean flag. If file is missing and
`raise_error_if_missing = True`, this method will error out.
:return: example_file_name: Path to file with input examples. If file is
missing and `raise_error_if_missing = False`, this is the *expected*
path.
:raises: ValueError: if file is missing and `raise_error_if_missing = True`.
"""
error_checking.assert_is_string(top_directory_name)
error_checking.assert_is_boolean(shuffled)
error_checking.assert_is_boolean(raise_error_if_missing)
if shuffled:
error_checking.assert_is_integer(batch_number)
error_checking.assert_is_geq(batch_number, 0)
first_batch_number = int(number_rounding.floor_to_nearest(
batch_number, NUM_BATCHES_PER_DIRECTORY))
last_batch_number = first_batch_number + NUM_BATCHES_PER_DIRECTORY - 1
example_file_name = (
'{0:s}/batches{1:07d}-{2:07d}/input_examples_batch{3:07d}.nc'
).format(top_directory_name, first_batch_number, last_batch_number,
batch_number)
else:
time_conversion.spc_date_string_to_unix_sec(spc_date_string)
example_file_name = (
'{0:s}/{1:s}/input_examples_{2:s}.nc'
).format(top_directory_name, spc_date_string[:4], spc_date_string)
if raise_error_if_missing and not os.path.isfile(example_file_name):
error_string = | 'Cannot find file. Expected at: "{0:s}"'.format(
example_file_name)
raise ValueError(error_string)
return example_file_na | me
def find_many_example_files(
top_directory_name, shuffled=True, first_spc_date_string=None,
last_spc_date_string=None, first_batch_number=None,
last_batch_number=None, raise_error_if_any_missing=True):
"""Looks for many files with input examples.
:param top_directory_name: See doc for `find_example_file`.
:param shuffled: Same.
:param first_spc_date_string: [used only if `shuffled = False`]
First SPC date (format "yyyymmdd"). This method will look for all SPC
dates from `first_spc_date_string`...`last_spc_date_string`.
:param last_spc_date_string: See above.
:param first_batch_number: [used only if `shuffled = True`]
First batch number (integer). This method will look for all batches
from `first_batch_number`...`last_batch_number`.
:param last_batch_number: See above.
:param raise_error_if_any_missing: Boolean flag. If *any* desired file is
not found and `raise_error_if_any_missing = True`, this method will
error out.
:return: example_file_names: 1-D list of paths to example files.
:raises: ValueError: if no files are found.
"""
error_checking.assert_is_boolean(shuffled)
if shuffled:
error_checking.assert_is_integer(first_batch_number)
error_checking.assert_is_integer(last_batch_number)
error_checking.assert_is_geq(first_batch_number, 0)
error_checking.assert_is_geq(last_batch_number, first_batch_number)
example_file_pattern = (
'{0:s}/batches{1:s}-{1:s}/input_examples_batch{1:s}.nc'
).format(top_directory_name, BATCH_NUMBER_REGEX)
example_file_names = glob.glob(example_file_pattern)
if len(example_file_names) > 0:
batch_numbers = numpy.array(
[_file_name_to_batch_number(f) for f in example_file_names],
dtype=int)
good_indices = numpy.where(numpy.logical_and(
batch_numbers >= first_batch_number,
batch_numbers <= last_batch_number
))[0]
example_file_names = [example_file_names[k] for k in good_indices]
if len(example_file_names) == 0:
error_string = (
'Cannot find any files with batch number from {0:d}...{1:d}.'
).format(first_batch_number, last_batch_number)
raise ValueError(error_string)
return example_file_names
spc_date_strings = time_conversion.get_spc_dates_in_range(
first_spc_date_string=first_spc_date_string,
last_spc_date_string=last_spc_date_string)
|
(a single argvalue) and return
a string or return None. If None, the automatically generated id for that
argument will be used.
If no ids are provided they will be generated automatically from
the argvalues.
:arg scope: if specified it denotes the scope of the parameters.
The scope is used for grouping tests by parameter instances.
It will also override any fixture-function defined scope, allowing
to set a dynamic scope using test context or configuration.
"""
# individual parametrized argument sets can be wrapped in a series
# of markers in which case we unwrap the values and apply the mark
# at Function init
newkeywords = {}
unwrapped_argvalues = []
for i, argval in enumerate(argvalues):
while isinstance(argval, MarkDecorator):
newmark = MarkDecorator(argval.markname,
argval.args[:-1], argval.kwargs)
newmarks = newkeywords.setdefault(i, {})
newmarks[newmark.markname] = newmark
argval = argval.args[-1]
unwrapped_argvalues.append(argval)
argvalues = unwrapped_argvalues
if not isinstance(argnames, (tuple, list)):
argnames = [x.strip() for x in argnames.split(",") if x.strip()]
if len(argnames) == 1:
argvalues = [(val,) for val in argvalues]
if not argvalues:
argvalues = [(_notexists,) * len(argnames)]
if scope is None:
scope = "function"
scopenum = scopes.index(scope)
valtypes = {}
for arg in argnames:
if arg not in self.fixturenames:
raise ValueError("%r uses no fixture %r" %(self.function, arg))
if indirect is True:
valtypes = dict.fromkeys(argnames, "params")
elif indirect is False:
valtypes = dict.fromkeys(argnames, "funcargs")
elif isinstance(indirect, (tuple, list)):
valtypes = dict.fromkeys(argnames, "funcargs")
for arg in indirect:
if arg not in argnames:
raise ValueError("indirect given to %r: fixture %r doesn't exist" %(
self.function, arg))
valtypes[arg] = "params"
idfn = None
if callable(ids):
idfn = ids
ids = None
if ids and len(ids) != len(argvalues):
raise ValueError('%d tests specified with %d ids' %(
len(argvalues), len(ids)))
if not ids:
ids = idmaker(argnames, argvalues, idfn)
newcalls = []
for callspec in self._calls or [CallSpec2(self)]:
for param_index, valset in enumerate(argvalues):
assert len(valset) == len(argnames)
newcallspec = callspec.copy(self)
newcallspec.setmulti(valtypes, argnames, valset, ids[param_index],
newkeywords.get(param_index, {}), scopenum,
param_index)
newcalls.append(newcallspec)
self._calls = newcalls
def addcall(self, funcargs=None, id=_notexists, param=_notexists):
""" (deprecated, use parametrize) Add a new call to the underlying
test function during the collection phase of a test run. Note that
request.addcall() is called during the test collection phase prior and
independently to actual test execution. You should only use addcall()
if you need to specify multiple arguments of a test function.
:arg funcargs: argument keyword dictionary used when invoking
the test function.
:arg id: used for reporting and identification purposes. If you
don't supply an `id` an automatic unique id will be generated.
:arg param: a parameter which will be exposed to a later fixture function
invocation through the ``request.param`` attribute.
"""
assert funcargs is None or isinstance(funcargs, dict)
if funcargs is not None:
for name in funcargs:
if name not in self.fixturenames:
pytest.fail("funcarg %r not used in this function." % name)
else:
funcargs = {}
if id is None:
raise ValueError("id=None not allowed")
if id is _notexists:
id = len(self._calls)
id = str(id)
if id in self._ids:
raise ValueError("duplicate id %r" % id)
self._ids.add(id)
cs = CallSpec2(self)
cs.setall(funcargs, id | , param)
self._calls.append(cs)
def _idval(val, | argname, idx, idfn):
if idfn:
try:
s = idfn(val)
if s:
return s
except Exception:
pass
if isinstance(val, (float, int, str, bool, NoneType)):
return str(val)
elif isinstance(val, REGEX_TYPE):
return val.pattern
elif enum is not None and isinstance(val, enum.Enum):
return str(val)
elif isclass(val) and hasattr(val, '__name__'):
return val.__name__
return str(argname)+str(idx)
def _idvalset(idx, valset, argnames, idfn):
this_id = [_idval(val, argname, idx, idfn)
for val, argname in zip(valset, argnames)]
return "-".join(this_id)
def idmaker(argnames, argvalues, idfn=None):
ids = [_idvalset(valindex, valset, argnames, idfn)
for valindex, valset in enumerate(argvalues)]
if len(set(ids)) < len(ids):
# user may have provided a bad idfn which means the ids are not unique
ids = [str(i) + testid for i, testid in enumerate(ids)]
return ids
def showfixtures(config):
from _pytest.main import wrap_session
return wrap_session(config, _showfixtures_main)
def _showfixtures_main(config, session):
import _pytest.config
session.perform_collect()
curdir = py.path.local()
tw = _pytest.config.create_terminal_writer(config)
verbose = config.getvalue("verbose")
fm = session._fixturemanager
available = []
for argname, fixturedefs in fm._arg2fixturedefs.items():
assert fixturedefs is not None
if not fixturedefs:
continue
fixturedef = fixturedefs[-1]
loc = getlocation(fixturedef.func, curdir)
available.append((len(fixturedef.baseid),
fixturedef.func.__module__,
curdir.bestrelpath(loc),
fixturedef.argname, fixturedef))
available.sort()
currentmodule = None
for baseid, module, bestrel, argname, fixturedef in available:
if currentmodule != module:
if not module.startswith("_pytest."):
tw.line()
tw.sep("-", "fixtures defined from %s" %(module,))
currentmodule = module
if verbose <= 0 and argname[0] == "_":
continue
if verbose > 0:
funcargspec = "%s -- %s" %(argname, bestrel,)
else:
funcargspec = argname
tw.line(funcargspec, green=True)
loc = getlocation(fixturedef.func, curdir)
doc = fixturedef.func.__doc__ or ""
if doc:
for line in doc.strip().split("\n"):
tw.line(" " + line.strip())
else:
tw.line(" %s: no docstring available" %(loc,),
red=True)
def getlocation(function, curdir):
import inspect
fn = py.path.local(inspect.getfile(function))
lineno = py.builtin._getcode(function).co_firstlineno
if fn.relto(curdir):
fn = fn.relto(curdir)
return "%s:%d" %(fn, lineno+1)
# builtin pytest.raises helper
def raises(expected_exception, *args, **kwargs):
""" assert that a code block/function call raises @expected_exception
and raise a failure exception otherwise.
This helper produces a ``py.code.ExceptionInfo()`` object.
If using Python 2.5 or above, you may use this function as a
context manager::
|
from __future__ import unicode_literals
from future.builtins import int
from collections import defaultdict
from django.core.urlresolvers import reverse
from django.template.defaultfilters import linebreaksbr, urlize
from mezzanine import template
from mezzanine.conf import settings
from mezzanine.generic.forms import ThreadedCommentForm
from mezzanine.generic.models import ThreadedComment
from mezzanine.utils.importing import import_dotted_path
from mezzanine.pages.models import Page, RichTextPage
register = template.Library()
@register.assignment_tag
def allpages():
page_fields = [ 'content', 'created', 'description', 'expiry_date', 'gen_description', u'id', 'keywords', u'keywords_s | tring', | 'publish_date', 'short_url', 'slug', 'status', 'title', 'titles', 'updated']
output = []
# import pdb;pdb.set_trace()
AllPages = RichTextPage.objects.all()
for item in AllPages:
temp = {}
for fld in page_fields:
temp[fld] = getattr(item, fld)
output.append(temp)
return {
'pages': output
}
@register.filter()
def remove_slash(value):
return '#' + value[1:-1]
@register.filter()
def lower(value):
# import pdb;pdb.set_trace()
return value.lower() |
#!/usr/bin/env python
# coding: utf8
| import sys;
import struct;
def do_convert(filename):
fid_in = open(filename, 'rb')
fid_out = open('sound_conv.out','w')
data = fid_in.read(4) # read 4 bytes = 32 Bit Sample
while data:
ser = str(struct.unpack('<i', data)[0]) + '\n'
fid_out.write(ser)
data = fid_in.read(4)
fid_in.close()
fid_out.close()
if __name__ == "__main__":
print "Converting..."
do_convert(sys.argv[1]) |
print "done"
|
# -*- coding: utf-8 -*-
"""
UserBot module
Copyright 2015, Ismael R. Lugo G.
"""
import | translate
reload(translate)
from sysb import commands
from translate import lang
from translate import _
commands.addHandler('translate', '(tr|translate)2 (?P<in>[^ ]+) (?P<out>[^ ]+) '
'(?P<text>.*)', {'sintax': 'tr2 <input> <output> <text>',
'example': 'tr2 en es Hel | lo!',
'alias': ('traslate2',),
'desc': _('Traduce un texto de un idioma a otro', lang)},
anyuser=True)(translate.translate2_1)
commands.addHandler('translate', '(tr|translate) (?P<in>[^ ]+) (?P<out>[^ ]+) ('
'?P<text>.*)', {'sintax': 'tr <input> <output> <text>',
'example': 'tr en es Hello!',
'alias': ('traslate',),
'desc': _('Traduce un texto de un idioma a otro', lang)},
anyuser=True)(translate.translate2_2)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# The path to enlightenment starts with the following:
import unittest
from koans.about_asserts import AboutAsserts
from koans.about_strings import AboutStrings
from koans.about_none import AboutNone
from koans.about_lists import AboutLists
from koans.about_list_assignments import AboutListAssignments
from koans.about_dictionaries import AboutDictionaries
from koans.about_string_manipulation import AboutStringManipulation
from koans.about_tuples import AboutTuples
from koans.about_methods import AboutMethods
from koans.about_control_statements import AboutControlStatements
from koans.about_true_and_false import AboutTrueAndFalse
from koans.about_sets import AboutSets
from koans.about_triangle_project import AboutTriangleProject
from koans.about_exceptions import AboutExceptions
from koans.about_triangle_project2 import AboutTriangleProject2
from koans.about_iteration import AboutIteration
from koans.about_comprehension import AboutComprehension
from koans.about_generators import AboutGenerators
from koans.about_lambdas import AboutLambdas
from koans.about_scoring_project import AboutScoringProject
from koans.about_classes import AboutClasses
from koans.about_with_statements import AboutWithStatements
from koans.about_monkey_patching import AboutMonkeyPatching
from koans.about_dice_project import AboutDiceProject
from koans.about_method_bindings import AboutMethodBindings
from koans.about_decorating_with_functions import AboutDecoratingWithFunctions
from koans.about_decorating_with_classes import AboutDecoratingWithClasses
from koans.about_inheritance import AboutInheritance
from koans.about_multiple_inheritance import AboutMultipleInheritance
from koans.about_regex import AboutRegex
from koans.about_scope import AboutScope
from koans.about_modules import AboutModules
from koans.about_packages import AboutPackages
from koans.about_class_attributes import AboutClassAttributes
from koans.about_attribute_access import AboutAttributeAccess
from koans.about_deleting_objects import AboutDeletingObjects
from koans.about_proxy_object_project import *
from koans.about_extra_credit import AboutExtraCredit
def koans():
loader = unittest.TestLoader()
suite = unittest.TestSuite()
loader.sortTestMethodsUsing = None
suite.addTests(loader.loadTestsFromTestCase(AboutAsserts))
suite.addTests(loader.loadTestsFromTestCase(AboutStrings))
suite.addTests(loader.loadTestsFromTestCase(AboutNone))
suite.addTests(loader.loadTestsFromTestCase(AboutLists))
suite.addTests(loader.loadTestsFromTestCase(AboutListAssignments))
suite.addTests(loader.loadTestsFromTestCase(AboutDictionaries))
suite.addTests(loader.loadTestsFromTestCase(AboutStringManipulation))
suite.addTests(loader.loadTestsFromTestCase(AboutTuples))
suite.addTests(loader.loadTestsFromTestCase(AboutMethods))
suite.addTests(loader.loadTestsFromTestCase(AboutControlStatements))
suite.addTests(loader.loadTestsFromTestCase(AboutTrueAndFalse))
suite.addTests(loader.loadTestsFromTestCase(AboutSets))
suite.addTests(loader.loadTestsFromTestCase(AboutTriangleProject))
suite.addTests(loader.loadTestsFromTestCase(AboutExceptions))
suite.addTests(loader.loadTestsFromTestCase(About | TriangleProject2))
suite.addTests(loader.loadTestsFromTestCase(AboutIteration))
suite.addTests(loader.loadTestsFromTestCase(AboutComprehension))
suite.addTests(loader.loadTestsFromTestCase(AboutGenerators))
suite.addTests(loader.loadTestsFromTestCase(AboutLambdas))
suite.addTests(loader.loadTestsFromTestCase(AboutScoringProject))
suite.addTests(loader.loadTestsFromTes | tCase(AboutClasses))
suite.addTests(loader.loadTestsFromTestCase(AboutWithStatements))
suite.addTests(loader.loadTestsFromTestCase(AboutMonkeyPatching))
suite.addTests(loader.loadTestsFromTestCase(AboutDiceProject))
suite.addTests(loader.loadTestsFromTestCase(AboutMethodBindings))
suite.addTests(loader.loadTestsFromTestCase(AboutDecoratingWithFunctions))
suite.addTests(loader.loadTestsFromTestCase(AboutDecoratingWithClasses))
suite.addTests(loader.loadTestsFromTestCase(AboutInheritance))
suite.addTests(loader.loadTestsFromTestCase(AboutMultipleInheritance))
suite.addTests(loader.loadTestsFromTestCase(AboutScope))
suite.addTests(loader.loadTestsFromTestCase(AboutModules))
suite.addTests(loader.loadTestsFromTestCase(AboutPackages))
suite.addTests(loader.loadTestsFromTestCase(AboutClassAttributes))
suite.addTests(loader.loadTestsFromTestCase(AboutAttributeAccess))
suite.addTests(loader.loadTestsFromTestCase(AboutDeletingObjects))
suite.addTests(loader.loadTestsFromTestCase(AboutProxyObjectProject))
suite.addTests(loader.loadTestsFromTestCase(TelevisionTest))
suite.addTests(loader.loadTestsFromTestCase(AboutExtraCredit))
return suite
|
from datetime import datetime
path= str(datetime.now().date( | ))
per= datetime.now()
per_h= str(per.hour)
per_m= str(per.minute)
timeit= str("%s:%s"%(per_h,per_m))
def Final(file_name):
NPfile= str("%s-%s"%(file_name,timeit))
A_Dump= "airodump-ng wlan0 -w "
ADFN= A_Dump+NP | file
return ADFN
|
dules()
# to work when run from a virtualenv. The modules were chosen empirically
# so that the return value matches the return value without virtualenv.
import BaseHTTPServer
import zlib
def dotted_name_of_path(path, trimpure=False):
"""Given a relative path to a source file, return its dotted module name.
>>> dotted_name_of_path('mercurial/error.py')
'mercurial.error'
>>> dotted_name_of_path('mercurial/pure/parsers.py', trimpure=True)
'mercurial.parsers'
>>> dotted_name_of_path('zlibmodule.so')
'zlib'
"""
parts = path.split('/')
parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
if parts[-1].endswith('module'):
parts[-1] = parts[-1][:-6]
if trimpure:
return '.'.join(p for p in parts if p != 'pure')
return '.'.join(parts)
def list_stdlib_modules():
"""List the modules present in the stdlib.
>>> mods = set(list_stdlib_modules())
>>> 'BaseHTTPServer' in mods
True
os.path isn't really a module, so it's missing:
>>> 'os.path' in mods
False
sys requires special treatment, because it's baked into the
interpreter, but it should still appear:
>>> 'sys' in mods
True
>>> 'collections' in mods
True
>>> 'cStringIO' in mods
True
"""
for m in sys.builtin_module_names:
yield m
# These modules only exist on windows, but we should always
# consider them stdlib.
for m in ['msvcrt', '_winreg']:
yield m
# These get missed too
for m in 'ctypes', 'email':
yield m
yield 'builtins' # python3 only
for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only
yield m
stdlib_prefixes = set([sys.prefix, sys.exec_prefix])
# We need to supplement the list of prefixes for the search to work
# when run from within a virtualenv.
for mod in (BaseHTTPServer, zlib):
try:
# Not all module objects have a __file__ attribute.
filename = mod.__file__
except AttributeError:
continue
dirname = os.path.dirname(filename)
for prefix in stdlib_prefixes:
if dirname.startswith(prefix):
# Then this directory is redundant.
break
else:
stdlib_prefixes.add(dirname)
for libpath in sys.path:
# We want to walk everything in sys.path that starts with
# something in stdlib_prefixes. check-code suppressed because
# the ast module used by this script implies the availability
# of any().
if not any(libpath.startswith(p) for p in stdlib_prefixes): # no-py24
continue
if 'site-packages' in libpath:
continue
for top, dirs, files in os.walk(libpath):
for name in files:
if name == '__init__.py':
continue
if not (name.endswith('.py') or name.endswith('.so')
or name.endswith('.pyd')):
continue
full_path = os.path.join(top, name)
if 'site-packages' in full_path:
continue
rel_path = full_path[len(libpath) + 1:]
mod = dotted_name_of_path(rel_path)
yield mod
stdlib_modules = set(list_stdlib_modules())
def imported_modules(source, ignore_nested=False):
"""Given the source of a file as a string, yield the names
imported by that file.
Args:
source: The python source to examine as a string.
ignore_nested: If true, import statements that do not start in
column zero will be ignored.
Returns:
A list of module names imported by the given source.
>>> sorted(imported_modules(
... 'import foo ; from baz import bar; import foo.qux'))
['baz.bar', 'foo', 'foo.qux']
>>> sorted(imported_modules(
... '''import foo
... def wat():
... import bar
... ''', ignore_nested=True))
['foo']
"""
for node in ast.walk(ast.parse(source)):
if ignore_nested and getattr(node, 'col_offset', 0) > 0:
continue
if isinstance(node, ast.Import):
for n in node.names:
yield n.name
elif isinstance(node, ast.ImportFrom):
prefix = node.module + '.'
for n in node.names:
yield prefix + n.name
def verify_stdlib_on_own_line(source):
"""Given some python source, verify that stdlib imports are done
in separate statements from relative local module imports.
Observing this limitation is important as it works around an
annoying lib2to3 bug in relative import rewrites:
http://bugs.python.org/issue19510.
>>> list(verify_stdlib_on_own_line('import sys, foo'))
['mixed imports\\n stdlib: sys\\n relative: foo']
>>> list(verify_stdlib_on_own_line('import sys, os'))
[]
>>> list(verify_stdlib_on_own_line('import foo, bar'))
[]
"""
for node in ast.walk(ast.parse(source)):
if isinstance(node, ast.Import):
from_stdlib = {False: [], True: []}
for n in node.names:
from_stdlib[n.name in stdlib_modules].append(n.name)
if from_stdlib[True] and from_stdlib[False]:
yield ('mixed imports\n stdlib: %s\n relative: %s' %
(', '.join(sorted(from_stdlib[True])),
', '.join(sorted(from_stdlib[False]))))
class CircularImport(Exception):
pass
def checkmod(mod, imports):
shortest = {}
visit = [[mod]]
while visit:
path = visit.pop(0)
for i in sorted(imports.get(path[-1], [])):
if i not in stdlib_modules and not i.startswith('mercurial.'):
i = mod.rsplit('.', 1)[0] + '.' + i
if len(path) < shortest.get(i, 1000):
shortest[i] = len(path)
if i in path:
if i == path[0]:
raise CircularImport(path)
continue
visit.append(path + [i])
def rotatecycle(cycle):
"""arrange a cycle so that the lexicographically first module listed first
>>> rotatecycle(['foo', 'bar'])
['bar', 'foo', 'bar']
"""
lowest = min(cycle)
idx = cycle.index(lowest)
return cycle[idx:] + cycle[:idx] + [lowest]
def find_cycles(imports):
"""Find cycles in an already-loaded import graph.
>>> imports = {'top.foo': ['bar', 'os.path', 'qux'],
... 'top.bar': ['baz', 'sys'],
... 'top.baz': ['foo'],
... 'top.qux': ['foo']}
>>> print '\\n'.join(sorted(find_cycles(imports)))
top.bar -> top.baz -> top.foo -> top.bar
top.foo -> top.qux -> top.foo
"""
cycles = set()
for mod in sorted(imports.iterkeys()):
try:
checkmod(mod, imports)
except CircularImport, e:
cycle = e.args[0]
cycles.add(" -> ".join(rotatecycle(cycle)))
return cycles
def _cycle_sortkey(c):
return len(c), c
def main(argv):
if len(argv) < 2:
print 'Usage: %s file [file] [file] ...'
return 1
used_imports = {}
any_errors = False
for source_path in argv[1:]:
f = open(source_path)
modname = dotted_name_of_path(source_path, trimpure=True)
src = f.read()
used_imports[modname] = sorted(
imported_modules(src, ignore_nested=True))
for error in verify_stdlib_on_own_line(src):
any_errors = True
print source_path, error
| f.close()
cycles = find_cycles(used_imports)
if cycles:
firstmods = set()
for c in sorted(cycl | es, key=_cycle_sortkey):
first = c.split()[0]
# As a rough cut, ignore any cycle that starts with the
# same module as some other cycle. Otherwise we see lots
# of cycles that are effectively duplicates.
if first in firstmods:
continue
print 'Import cycle:', c
firstmods.add(first)
any_errors = True
return not any_errors
if |
import pyaf.Bench.TS_datasets as tsds
| import tests.artificial.process_arti | ficial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 0, transform = "RelativeDifference", sigma = 0.0, exog_count = 20, ar_order = 12); |
#!/usr/ | bin/env python
import argparse
import sys
from util import add_common_args, init_protocol
from local_thrift import thrift
from thrift.Thrift import T | MessageType, TType
# TODO: generate from ThriftTest.thrift
def test_string(proto, value):
method_name = 'testString'
ttype = TType.STRING
proto.writeMessageBegin(method_name, TMessageType.CALL, 3)
proto.writeStructBegin(method_name + '_args')
proto.writeFieldBegin('thing', ttype, 1)
proto.writeString(value)
proto.writeFieldEnd()
proto.writeFieldStop()
proto.writeStructEnd()
proto.writeMessageEnd()
proto.trans.flush()
_, mtype, _ = proto.readMessageBegin()
assert mtype == TMessageType.REPLY
proto.readStructBegin()
_, ftype, fid = proto.readFieldBegin()
assert fid == 0
assert ftype == ttype
result = proto.readString()
proto.readFieldEnd()
_, ftype, _ = proto.readFieldBegin()
assert ftype == TType.STOP
proto.readStructEnd()
proto.readMessageEnd()
assert value == result
def main(argv):
p = argparse.ArgumentParser()
add_common_args(p)
p.add_argument('--limit', type=int)
args = p.parse_args()
proto = init_protocol(args)
test_string(proto, 'a' * (args.limit - 1))
test_string(proto, 'a' * (args.limit - 1))
print('[OK]: limit - 1')
test_string(proto, 'a' * args.limit)
test_string(proto, 'a' * args.limit)
print('[OK]: just limit')
try:
test_string(proto, 'a' * (args.limit + 1))
except:
print('[OK]: limit + 1')
else:
print('[ERROR]: limit + 1')
assert False
if __name__ == '__main__':
main(sys.argv[1:])
|
# pyOCD debugger
# Copyright (c) 2006-2016 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ...core.memory_map import (FlashRegion, RamRegion, MemoryMap)
from .target_LPC4088FBD144 import (LARGE_ERASE_SECTOR_WEIGHT, LARGE_PROGRAM_PAGE_WEIGHT, LPC4088)
from .target_LPC4088FBD144 import FLASH_ALGO as INTERNAL_FLASH_ALGO
FLASH_ALGO = {
'load_address' : 0x10000000,
'instructions' : [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x28100b00, 0x210ebf24, 0x00d0eb01, 0xe92d4770, 0xf8df4df0, 0x4606831c, 0x460c44c8, 0x0000f8d8,
0x1c402500, 0x0000f8c8, 0x0f01f010, 0x461749c1, 0x2080f44f, 0x63c8bf14, 0x05306388, 0xa2f8f8df,
0xf04f0d00, 0x44ca0b00, 0xf8cad111, 0xf44fb010, 0xf8ca5080, 0xe9ca6000, 0xf8ca0b01, 0xf8d8b00c,
0x4651000c, 0xf1a16882, 0x47900080, 0x2018b9c0, 0xb008f8ca, 0xb003e9ca, 0xf5b4b1cc, 0xbf8c7f80,
0x7b80f44f, 0x197046a3, 0x0b00e9ca, 0x000cf8d8, 0x19794aa9, 0x6843444a, 0x0080f1a2, 0xb1104798,
0xe8bd2001, 0x445d8df0, 0x040bebb4, 0x2000d1e5, 0x8df0e8bd, 0x41f0e92d, 0x8274f8df, 0x60e0f642,
0x4d9e44c8, 0x0008f8c8, 0x70282000, 0x732820aa, 0x73282055, 0xf8052001, 0x22000c40, 0xf0002112,
0x2200f91a, 0x4610210d, 0xf915f000, 0x210d2200, 0xf0002001, 0x2200f910, 0x20022113, 0xf90bf000,
0x68204c8c, 0x5000f440, 0x6a206020, 0x2084f440, 0x6c206220, 0x2000f440, 0xf44f6420, 0x63e72780,
0x61a6117e, 0xf4406c68, 0x64683080, 0xf8c52002, 0x22050134, 0xf0002107, 0x2205f8ee, 0x20002116,
0xf8e9f000, 0x210f2205, 0xf0002000, 0x2205f8e4, 0x20002110, 0xf8dff000, 0x21112205, 0xf0002000,
0x2205f8da, 0x20002112, 0xf8d5f000, 0xf44f4874, 0x6800727a, 0xf8c86940, 0xf8d8000c, 0xfbb11008,
0xf8d5f1f2, 0xf8d02134, 0xf002c000, 0xfbb1021f, 0x496cf3f2, 0xfba1486c, 0x08892103, 0x444822c0,
0x280047e0, 0x61e6bf04, 0x81f0e8bd, 0x61e663a7, 0xe8bd2001, 0x200081f0, 0xe92d4770, 0x4c6341f0,
0x444c2032, 0x251d2700, 0xe9c460a5, 0x4e600700, 0x0114f104, 0x47b04620, 0xb9806960, 0x60a52034,
0x0700e9c4, 0xf1044852, 0x44480114, 0x60e06880, 0x47b04620, 0x28006960, 0xe8bdbf08, 0x200181f0,
0x81f0e8bd, 0x5f20f1b0, 0xf5b0bf32, 0x20002f00, 0xb5704770, 0x2c100b04, 0x200ebf24, 0x04d4eb00,
0x4d4a2032, 0x444d4e4a, 0x0114f105, 0x0400e9c5, 0x60ac4628, 0x696847b0, 0x2034b978, 0x0400e9c5,
0x60ac483b, 0xf1054448, 0x68800114, 0x462860e8, 0x696847b0, 0xbf082800, 0x2001bd70, 0xe92dbd70,
0x4f3341f0, 0x444f4605, 0x68784614, 0x1c404a31, 0xf0106078, 0xf44f0f01, 0xbf145000, 0x619061d0,
0x5f20f1b5, 0x4622d305, 0x5020f1a5, 0x41f0e8bd, 0xf5b5e6bd, 0xd3052f00, 0xf5a54622, 0xe8bd2000,
0xe6b441f0, 0xe9d4b975, 0x44080100, 0x1202e9d4, 0x44084411, 0x44086921, 0x44086961, 0x440869a1,
0x61e04240, 0x28100b28, 0x210ebf24, 0x00d0eb01, 0x4e1e2132, 0x8078f8df, 0xe9c6444e, 0x60b01000,
0x0114f106, 0x47c04630, 0xb9886970, 0xe9c62033, 0xf44f0500, 0xe9c67000, 0x68b84002, 0xf1066130,
0x46300114, 0x697047c0, 0xbf082800, 0x81f0e8bd, 0xe8bd2001, 0xeb0181f0, 0x490e1040, 0 | x0080eb01,
0xf0216801, 0x60010107, 0x43116801, 0x47706001, 0x00000004, 0x20098000, 0x000000b4, 0x400fc080,
0x1fff1ff8, 0xcccccccd, 0x00000034, 0x00000014, 0x1fff1ff1, 0x4002c000, 0x00000000, 0x00000001,
0x00000000, 0x00000000, 0x00000000,
],
'pc_init' : 0x100000D5,
'pc_unInit': 0x100001D7,
'pc_program_page': 0x1000027F,
'pc_erase_sector': 0x10000225,
'pc_eraseAll' : 0x100001DB,
'static_base' : 0x10000000 + 0x | 00000020 + 0x00000400,
'begin_stack' : 0x10000000 + 0x00000800,
# Double buffering is not supported since there is not enough ram
'begin_data' : 0x10000000 + 0x00000A00, # Analyzer uses a max of 120 B data (30 pages * 4 bytes / page)
'page_size' : 0x00000200,
'min_program_length' : 512,
'analyzer_supported' : True,
'analyzer_address' : 0x10002000 # Analyzer 0x10002000..0x10002600
}
class LPC4088qsb(LPC4088):
MEMORY_MAP = MemoryMap(
FlashRegion( start=0, length=0x10000, is_boot_memory=True,
blocksize=0x1000,
page_size=0x200,
algo=INTERNAL_FLASH_ALGO),
FlashRegion( start=0x10000, length=0x70000, blocksize=0x8000,
page_size=0x400,
erase_sector_weight=LARGE_ERASE_SECTOR_WEIGHT,
program_page_weight=LARGE_PROGRAM_PAGE_WEIGHT,
algo=INTERNAL_FLASH_ALGO),
FlashRegion( start=0x28000000, length=0x1000000, blocksize=0x1000,
page_size=0x200,
algo=FLASH_ALGO),
RamRegion( start=0x10000000, length=0x10000),
)
def __init__(self, session):
super(LPC4088qsb, self).__init__(session, self.MEMORY_MAP)
|
=="\n" or LINEEND=="\r":
reflineend = " "+LINEEND # as per spec
elif LINEEND=="\r\n":
reflineend = LINEEND
else:
raise ValueError, "bad end of line! %s" % repr(LINEEND)
return string.join(entries, LINEEND)
class PDFCrossReferenceTable:
__PDFObject__ = True
def __init__(self):
self.sections = []
def addsection(self, firstentry, ids):
section = PDFCrossReferenceSubsection(firstentry, ids)
self.sections.append(section)
def format(self, document):
sections = self.sections
if not sections:
raise ValueError, "no crossref sections"
L = ["xref"+LINEEND]
for s in self.sections:
fs = format(s, document)
L.append(fs)
return string.join(L, "")
TRAILERFMT = ("trailer%(LINEEND)s"
"%(dict)s%(LINEEND)s"
"startxref%(LINEEND)s"
"%(startxref)s%(LINEEND)s"
"%(PERCENT)s%(PERCENT)sEOF%(LINEEND)s")
class PDFTrailer:
__PDFObject__ = True
def __init__(self, startxref, Size=None, Prev=None, Root=None, Info=None, ID=None, Encrypt=None):
self.startxref = startxref
if Size is None or Root is None:
raise ValueError, "Size and Root keys required"
dict = self.dict = PDFDictionary()
for (n,v) in [("Size", Size), ("Prev", Prev), ("Root", Root),
("Info", Info), ("ID", ID), ("Encrypt", Encrypt)]:
if v is not None:
dict[n] = v
def format(self, document):
fdict = format(self.dict, document)
D = LINEENDDICT.copy()
D["dict"] = fdict
D["startxref"] = self.startxref
return TRAILERFMT % D
#### XXXX skipping incremental update,
#### encryption
#### chapter 6, doc structure
class PDFCatalog:
__PDFObject__ = True
__Comment__ = "Document Root"
__RefOnly__ = 1
# to override, set as attributes
__Defaults__ = {"Type": PDFName("Catalog"),
"PageMode": PDFName("UseNone"),
}
__NoDefault__ = string.split("""
Dests Outlines Pages Threads AcroForm Names OpenActions PageMode URI
ViewerPreferences PageLabels PageLayout JavaScript StructTreeRoot SpiderInfo"""
)
__Refs__ = __NoDefault__ # make these all into references, if present
def format(self, document):
self.check_format(document)
defaults = self.__Defaults__
Refs = self.__Refs__
D = {}
for k in defaults.keys():
default = defaults[k]
v = None
if hasattr(self, k) and getattr(self,k) is not None:
v = getattr(self, k)
elif default is not None:
v = default
if v is not None:
D[k] = v
for k in self.__NoDefault__:
if hasattr(self, k):
v = getattr(self,k)
if v is not None:
D[k] = v
# force objects to be references where required
for k in Refs:
if k in D:
#print"k is", k, "value", D[k]
D[k] = document.Reference(D[k])
dict = PDFDictionary(D)
return format(dict, document)
def showOutline(self):
self.setPageMode("UseOutlines")
def showFullScreen(self):
self.setPageMode("FullScreen")
def setPageLayout(self,layout):
if layout:
self.PageLayout = PDFName(layout)
def setPageMode(self,mode):
if mode:
self.PageMode = PDFName(mode)
def check_format(self, document):
"""for use in subclasses"""
pass
class PDFPages(PDFCatalog):
"""PAGES TREE WITH ONE INTERNAL NODE, FOR "BALANCING" CHANGE IMPLEMENTATION"""
__Comment__ = "page tree"
__RefOnly__ = 1
# note: could implement page attribute inheritance...
__Defaults__ = {"Type": PDFName("Pages"),
}
__NoDefault__ = string.split("Kids Count Parent")
__Refs__ = ["Parent"]
def __init__(self):
self.pages = []
def __getitem__(self, item):
return self.pages[item]
def addPage(self, page):
self.pages.append(page)
def check_format(self, document):
# convert all pages to page references
pages = self.pages
kids = PDFArray(pages)
# make sure all pages are references
kids.References(document)
self.Kids = kids
self.Count = len(pages)
class PDFPage(PDFCatalog):
__Comment__ = "Page dictionary"
# all PDF attributes can be set explicitly
# if this flag is set, the "usual" behavior will be suppressed
Override_default_compilation = 0
__RefOnly__ = 1
__Defaults__ = {"Type": PDFName("Page"),
# "Parent": PDFObjectReference(Pages), # no! use document.Pages
}
__NoDefault__ = string.split(""" Parent
MediaBox Resources Contents CropBox Rotate Thumb Annots B Dur Hid Trans AA
PieceInfo LastModified SeparationInfo ArtBox TrimBox BleedBox ID PZ
Trans
""")
__Refs__ = string.split("""
Contents Parent ID
""")
pagewidth = 595
pageheight = 842
stream = None
hasImages = 0
compression = 0
XObjects = None
_colorsUsed = {}
Trans = None
# transitionstring?
# xobjects?
# annotations
def __init__(self):
# set all nodefaults to None
for name in self.__NoDefault__:
setattr(self, name, None)
def setCompression(self, onoff):
self.compression = onoff
def setStream(self, code):
if self.Override_default_compilation:
raise ValueError, "overridden! must set stream explicitly"
from types import ListType
if type(code) is ListType:
code = string.join(code, LINEEND)+LINEEND
self.stream = code
def setPageTransition(self, tranDict):
self.Trans = PDFDictionary(tranDict)
def check_format(self, document):
# set up parameters unless usual behaviour is suppressed
if self.Override_default_compilation:
return
self.MediaBox = self.MediaBox or PDFArray(self.Rotate in (90,270) and [0,0,self.pageheight,self.pagewidth] or [0, 0, self.pagewidth, self.pageheight])
if not self.Annots:
self.Annots = None
else:
#print self.Annots
#raise ValueError, "annotations not reimplemented yet"
if not hasattr(self.Annots,'__PDFObject__'):
self.Annots = PDFArray(self.Annots)
if not self.Contents:
stream = self.stream
if not stream:
self.Contents = teststream()
else:
S = PDFStream()
if self.compression:
S.filters = | rl_config.useA85 and [PDFBa | se85Encode, PDFZCompress] or [PDFZCompress]
S.content = stream
S.__Comment__ = "page stream"
self.Contents = S
if not self.Resources:
resources = PDFResourceDictionary()
# fonts!
resources.basicFonts()
if self.hasImages:
resources.allProcs()
else:
resources.basicProcs()
if self.XObjects:
#print "XObjects", self.XObjects.dict
resources.XObject = self.XObjects
if self.ExtGState:
resources.ExtGState = self.ExtGState
resources.setColorSpace(self._colorsUsed)
self.Resources = resources
if not self.Parent:
pages = document.Pages
self.Parent = document.Reference(pages)
#this code contributed by Christian Jacobs <cljacobsen@gmail.com>
class PDFPageLabels(PDFCatalog):
__comment__ = None
__RefOnly__ = 0
__Defaults__ = {}
__NoDefault__ = ["Nums"]
__Refs__ = []
def __init__(self):
self.labels = []
def addPageLabel(self, page, label):
""" Adds a new PDFPageLabel to this catalog.
The 'page' argument, an integer, is the page number |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from airflow.hooks.base_hook import BaseHook
from azure.storage.blob import BlockBlobService
class WasbHook(BaseHook):
"""
Interacts with Azure Blob Storage through the wasb:// protocol.
Additional options passed in the 'extra' field of the connection will be
passed to the `BlockBlockService()` constructor. For example, authenticate
using a SAS token by adding {"sas_token": "YOUR_TOKEN"}.
:param wasb_conn_id: Reference to the wasb connection.
:type wasb_conn_id: str
"""
def __init__(self, wasb_conn_id='wasb_default'):
self.conn_id = wasb_conn_id
self.connection = self.get_conn()
def get_conn(self):
"""Return the BlockBlobService object."""
conn = self.get_connection(self.conn_id)
service_options = conn.extra_dejson
return BlockBlobService(account_name=conn.login,
account_key=conn.password, **service_options)
def check_for_blob(self, container_name, blob_name, **kwargs):
| """
Check if a blob exists on Azure Blob Storage.
:param container_name: Name of the container.
:type container_name: str
:param blob_name: Name of the blob.
:type blob_name: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.exists()` takes.
:type kwargs: object
:return: True if the blob exists, False otherwise.
| :rtype bool
"""
return self.connection.exists(container_name, blob_name, **kwargs)
def check_for_prefix(self, container_name, prefix, **kwargs):
"""
Check if a prefix exists on Azure Blob storage.
:param container_name: Name of the container.
:type container_name: str
:param prefix: Prefix of the blob.
:type prefix: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.list_blobs()` takes.
:type kwargs: object
:return: True if blobs matching the prefix exist, False otherwise.
:rtype bool
"""
matches = self.connection.list_blobs(container_name, prefix,
num_results=1, **kwargs)
return len(list(matches)) > 0
def load_file(self, file_path, container_name, blob_name, **kwargs):
"""
Upload a file to Azure Blob Storage.
:param file_path: Path to the file to load.
:type file_path: str
:param container_name: Name of the container.
:type container_name: str
:param blob_name: Name of the blob.
:type blob_name: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.create_blob_from_path()` takes.
:type kwargs: object
"""
# Reorder the argument order from airflow.hooks.S3_hook.load_file.
self.connection.create_blob_from_path(container_name, blob_name,
file_path, **kwargs)
def load_string(self, string_data, container_name, blob_name, **kwargs):
"""
Upload a string to Azure Blob Storage.
:param string_data: String to load.
:type string_data: str
:param container_name: Name of the container.
:type container_name: str
:param blob_name: Name of the blob.
:type blob_name: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.create_blob_from_text()` takes.
:type kwargs: object
"""
# Reorder the argument order from airflow.hooks.S3_hook.load_string.
self.connection.create_blob_from_text(container_name, blob_name,
string_data, **kwargs)
def get_file(self, file_path, container_name, blob_name, **kwargs):
"""
Download a file from Azure Blob Storage.
:param file_path: Path to the file to download.
:type file_path: str
:param container_name: Name of the container.
:type container_name: str
:param blob_name: Name of the blob.
:type blob_name: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.create_blob_from_path()` takes.
:type kwargs: object
"""
return self.connection.get_blob_to_path(container_name, blob_name,
file_path, **kwargs)
def read_file(self, container_name, blob_name, **kwargs):
"""
Read a file from Azure Blob Storage and return as a string.
:param container_name: Name of the container.
:type container_name: str
:param blob_name: Name of the blob.
:type blob_name: str
:param kwargs: Optional keyword arguments that
`BlockBlobService.create_blob_from_path()` takes.
:type kwargs: object
"""
return self.connection.get_blob_to_text(container_name,
blob_name,
**kwargs).content
|
# Copyright 2015 Dimitri Racor | don
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY | KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import Blueprint, current_app, redirect, render_template, url_for
from mushi.core.auth import parse_auth_token, require_auth_token, validate_auth_token
from mushi.core.exc import AuthenticationError
bp = Blueprint('views', __name__)
@bp.route('/')
@require_auth_token
def index(auth_token):
return render_template('spa.html', api_root=current_app.config['API_ROOT'])
@bp.route('/login')
def login():
try:
auth_token = parse_auth_token()
validate_auth_token(auth_token)
return redirect(url_for('views.index'))
except AuthenticationError:
return render_template('login.html', api_root=current_app.config['API_ROOT'])
|
from sqlalchemy import *
from migrate import *
|
from migrate.changeset import schema
pre_meta = MetaData()
post_meta = MetaData()
user = Table('user', post_meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('username', String(length=64)),
Column('email', String(length=120)),
Column('role', SmallInteger, default=ColumnDefault(0)),
Column('name', String(length=120)),
)
def upgrade(migrate_engin | e):
# Upgrade operations go here. Don't create your own engine; bind
# migrate_engine to your metadata
pre_meta.bind = migrate_engine
post_meta.bind = migrate_engine
post_meta.tables['user'].columns['name'].create()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pre_meta.bind = migrate_engine
post_meta.bind = migrate_engine
post_meta.tables['user'].columns['name'].drop()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Ivan Vanderbyl <ivan@app.io>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: logentries
author: "Ivan Vanderbyl (@ivanvanderbyl)"
short_description: Module for tracking logs via logentries.com
description:
- Sends logs to LogEntries in realtime
version_added: "1.6"
options:
path:
description:
- path to a log file
required: true
state:
description:
- following state of the log
choices: [ 'present', 'absent' ]
required: false
default: present
name:
description:
- name of the log
required: false
logtype:
description:
- type of the log
required: false
notes:
- Requires the LogEntries agent which can be installed following the instructions at logentries.com
'''
EXAMPLES = '''
# Track nginx logs
- logentries:
path: /var/log/nginx/access.log
state: present
name: nginx-access-log
# Stop tracking nginx logs
- logentries:
path: /var/log/nginx/error.log
state: absent
'''
def query_log_status(module, le_path, path, state="present"):
""" Returns whether a log is followed or not. """
if state == "present":
rc, out, err = module.run_command("%s followed %s" % (le_path, path))
if rc == 0:
return True
return False
def follow_log(module, le_path, logs, name=None, logtype=None):
""" Follows one or more logs if not already followed. """
followed_count = 0
for log in logs:
if query_log_status(module, le_path, log):
continue
if module.check_mode:
module.exit_json(changed=True)
cmd = [le_path, 'follow', log]
if name:
cmd.extend(['--name',name])
if logtype:
cmd.extend(['--type',logtype])
rc, out, err = module.run_command(' '.join(cmd))
if not query_log_status(module, le_path, log):
module.fail_json(msg="failed to follow '%s': %s" % (log, err.strip()))
followed_count += 1
if followed_count > 0:
module.exit_json(changed=True, msg="followed %d log(s)" % (followed_count,))
module.exit_json(changed=False, msg="logs(s) already followed")
def unfollow_log(module, le_path, logs):
""" Unfollows one or more logs if followed. """
removed_count = 0
# Using a for loop incase of error, we can report the package that failed
for log in logs:
# Query the log first, to see if we even need to remove.
if not query_log_status(module, le_path, log):
continue
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command([le_path, 'rm', log])
if query_log_status(module, le_path, log):
module.fail_json(msg="failed to remove '%s': %s" % (log, err.strip()))
removed_count += 1
if removed_count > 0:
module.exit_json(changed=True, msg="removed %d package(s)" % removed_count)
module.exit_json(changed=False, msg="logs(s) already unfollowed")
def main():
module = AnsibleModule(
argument_spec = dict(
path = dict(required=True),
state = dict(default="present", choices=["present", "followed", "absent", "unfollowed"]),
name = dict(required=False, default=None, type='str'),
logtype = dict(required=False, default=None, type='str', aliases=['type'])
),
supports_check_mode | =True
)
le_path = module.get_bi | n_path('le', True, ['/usr/local/bin'])
p = module.params
# Handle multiple log files
logs = p["path"].split(",")
logs = filter(None, logs)
if p["state"] in ["present", "followed"]:
follow_log(module, le_path, logs, name=p['name'], logtype=p['logtype'])
elif p["state"] in ["absent", "unfollowed"]:
unfollow_log(module, le_path, logs)
# import module snippets
from ansible.module_utils.basic import *
main()
|
import dj_database_url
import os
from .base import *
# Parse database configuration from $DATABASE_URL
DATABASES = { 'default': {} }
DATABAS | ES['default'] = dj_database_url.config()
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all | host headers
ALLOWED_HOSTS = ['*']
# Static asset configuration
BASE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),'..')
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Usage: db_ext_split.py <src> <dst> <prob>
Options:
-h --help
"""
import os
import cv2
from glob import glob
from docopt import docopt
from mscr.split import Split, RandomSplitPredicate
from mscr.util import Crop
from mscr.data import MyProgressBar
PAD = 8
if __name__ == '__main__':
args = docopt(__doc__)
src = args['<src>']
dst = args['<dst>']
prob = float(args['<prob>'])
split = Split(RandomSplitPredi | cate(p=prob))
crop = Crop() |
count = 0
if os.path.exists(src) and os.path.exists(dst):
filz = glob(os.path.join(src, '*.jpg'))
pbar = MyProgressBar(len(filz), 'extending db:')
for im in filz:
img = cv2.imread(im)
img = crop.run(img)
for bl in split.run(img):
out = os.path.join(dst, str(count).zfill(PAD) + '.jpg')
cv2.imwrite(out, bl.img)
count += 1
pbar.update()
pbar.finish()
else:
print 'err: dimstat.py: path doesn\'t exists'
|
else:
logger.log(u'DB error: ' + ex(e), logger.ERROR)
raise
except sqlite3.DatabaseError as e:
logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
raise
return sqlResult
def select(self, query, args=None):
sqlResults = self.action(query, args).fetchall()
if sqlResults is None:
return []
return sqlResults
def upsert(self, tableName, valueDict, keyDict):
changesBefore = self.connection.total_changes
genParams = lambda myDict: [x + ' = ?' for x in myDict.keys()]
query = 'UPDATE [%s] SET %s WHERE %s' % (
tableName, ', '.join(genParams(valueDict)), ' AND '.join(genParams(keyDict)))
self.action(query, valueDict.values() + keyDict.values())
if self.connection.total_changes == changesBefore:
query = 'INSERT INTO [' + tableName + '] (' + ', '.join(valueDict.keys() + keyDict.keys()) + ')' + \
' VALUES (' + ', '.join(['?'] * len(valueDict.keys() + keyDict.keys())) + ')'
self.action(query, valueDict.values() + keyDict.values())
def tableInfo(self, tableName):
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
sqlResult = self.select('PRAGMA table_info([%s])' % tableName)
columns = {}
for column in sqlResult:
columns[column['name']] = {'type': column['type']}
return columns
# http://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query
@staticmethod
def _dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
| d[col[0]] = ro | w[idx]
return d
def hasTable(self, tableName):
return len(self.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (tableName, ))) > 0
def hasColumn(self, tableName, column):
return column in self.tableInfo(tableName)
def hasIndex(self, tableName, index):
sqlResults = self.select('PRAGMA index_list([%s])' % tableName)
for result in sqlResults:
if result['name'] == index:
return True
return False
def addColumn(self, table, column, type='NUMERIC', default=0):
self.action('ALTER TABLE [%s] ADD %s %s' % (table, column, type))
self.action('UPDATE [%s] SET %s = ?' % (table, column), (default,))
def close(self):
"""Close database connection"""
if getattr(self, 'connection', None) is not None:
self.connection.close()
self.connection = None
def sanityCheckDatabase(connection, sanity_check):
sanity_check(connection).check()
class DBSanityCheck(object):
def __init__(self, connection):
self.connection = connection
def check(self):
pass
def upgradeDatabase(connection, schema):
logger.log(u'Checking database structure...', logger.MESSAGE)
_processUpgrade(connection, schema)
def prettyName(class_name):
return ' '.join([x.group() for x in re.finditer('([A-Z])([a-z0-9]+)', class_name)])
def restoreDatabase(filename, version):
logger.log(u'Restoring database before trying upgrade again')
if not sickbeard.helpers.restoreVersionedFile(dbFilename(filename=filename, suffix='v%s' % version), version):
logger.log_error_and_exit(u'Database restore failed, abort upgrading database')
return False
else:
return True
def _processUpgrade(connection, upgradeClass):
instance = upgradeClass(connection)
logger.log(u'Checking %s database upgrade' % prettyName(upgradeClass.__name__), logger.DEBUG)
if not instance.test():
logger.log(u'Database upgrade required: %s' % prettyName(upgradeClass.__name__), logger.MESSAGE)
try:
instance.execute()
except sqlite3.DatabaseError as e:
# attemping to restore previous DB backup and perform upgrade
try:
instance.execute()
except:
result = connection.select('SELECT db_version FROM db_version')
if result:
version = int(result[0]['db_version'])
# close db before attempting restore
connection.close()
if restoreDatabase(connection.filename, version):
logger.log_error_and_exit(u'Successfully restored database version: %s' % version)
else:
logger.log_error_and_exit(u'Failed to restore database version: %s' % version)
logger.log('%s upgrade completed' % upgradeClass.__name__, logger.DEBUG)
else:
logger.log('%s upgrade not required' % upgradeClass.__name__, logger.DEBUG)
for upgradeSubClass in upgradeClass.__subclasses__():
_processUpgrade(connection, upgradeSubClass)
# Base migration class. All future DB changes should be subclassed from this class
class SchemaUpgrade(object):
def __init__(self, connection):
self.connection = connection
def hasTable(self, tableName):
return len(self.connection.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (tableName, ))) > 0
def hasColumn(self, tableName, column):
return column in self.connection.tableInfo(tableName)
def addColumn(self, table, column, type='NUMERIC', default=0):
self.connection.action('ALTER TABLE [%s] ADD %s %s' % (table, column, type))
self.connection.action('UPDATE [%s] SET %s = ?' % (table, column), (default,))
def dropColumn(self, table, column):
# get old table columns and store the ones we want to keep
result = self.connection.select('pragma table_info([%s])' % table)
keptColumns = [c for c in result if c['name'] != column]
keptColumnsNames = []
final = []
pk = []
# copy the old table schema, column by column
for column in keptColumns:
keptColumnsNames.append(column['name'])
cl = [column['name'], column['type']]
'''
To be implemented if ever required
if column['dflt_value']:
cl.append(str(column['dflt_value']))
if column['notnull']:
cl.append(column['notnull'])
'''
if int(column['pk']) != 0:
pk.append(column['name'])
b = ' '.join(cl)
final.append(b)
# join all the table column creation fields
final = ', '.join(final)
keptColumnsNames = ', '.join(keptColumnsNames)
# generate sql for the new table creation
if len(pk) == 0:
sql = 'CREATE TABLE [%s_new] (%s)' % (table, final)
else:
pk = ', '.join(pk)
sql = 'CREATE TABLE [%s_new] (%s, PRIMARY KEY(%s))' % (table, final, pk)
# create new temporary table and copy the old table data across, barring the removed column
self.connection.action(sql)
self.connection.action('INSERT INTO [%s_new] SELECT %s FROM [%s]' % (table, keptColumnsNames, table))
# copy the old indexes from the old table
result = self.connection.select("SELECT sql FROM sqlite_master WHERE tbl_name=? and type='index'", [table])
# remove the old table and rename the new table to take it's place
self.connection.action('DROP TABLE [%s]' % table)
self.connection.action('ALTER TABLE [%s_new] RENAME TO [%s]' % (table, table))
# write any indexes to the new table
if len(result) > 0:
for index in result:
self.connection.action(index['sql'])
# vacuum the db as we will have a lot of space to reclaim after dropping tables
self.connection.action('VACUUM')
def checkDBVersion(self):
return self.connection.checkDBVersion()
def incDBVersion(self):
new_version = self.checkDBVersion() + 1
self.connection.action('UPDATE db_version SET db_version = ?', [new_version])
return new_version
def setDBVersion(self, |
from marshmallow import Schema, fields, post_load, EXCLUDE
from ..resource import Resource
from collections import namedtuple
class Plan(Resource):
"""
https://dev.chartmogul.com/v1.0/reference#plans
"""
_path = "/plans{/uuid}"
_root_key = 'plans'
_many = namedtu | ple('Plans', [_root_key, "current_page", "total_pages"])
class _Schema(Schema):
uuid = fields.String()
data_source_uuid = fields.String()
name = fields.String()
interval_count = fields.Int()
interval_unit = fields.String()
external_id = fields.String()
@post_load
def make(self, d | ata, **kwargs):
return Plan(**data)
_schema = _Schema(unknown=EXCLUDE)
|
return module object and object
"""
if isinstance(path, string_types):
module, func = path.rsplit('.', 1)
mod = __import__(module, fromlist=['*'])
f = getattr(mod, func)
else:
f = path
mod = inspect.getmodule(path)
return mod, f
def import_attr(func):
mod, f = import_mod_attr(func)
return f
def myimport(module):
mod = __import__(module, fromlist=['*'])
return mod
class MyPkg(object):
@staticmethod
def resource_filename(module, path):
mod = myimport(module)
p = os.path.dirname(mod.__file__)
if path:
return os.path.join(p, path)
else:
return p
@staticmethod
def resource_listdir(module, path):
d = MyPkg.resource_filename(module, path)
return os.listdir(d)
@staticmethod
def resource_isdir(module, path):
d = MyPkg.resource_filename(module, path)
return os.path.isdir(d)
try:
import pkg_resources as pkg
except:
pkg = MyPkg
def extract_file(module, path, dist, verbose=False, replace=True):
outf = os.path.join(dist, os.path.basename(path))
# d = pkg.get_distribution(module)
# if d.has_metadata('zip-safe'):
# f = open(outf, 'wb')
# f.write(pkg.resource_string(module, path))
# f.close()
# if verbose:
# print 'Info : Extract %s/%s to %s' % (module, path, outf)
# else:
import shutil
inf = pkg.resource_filename(module, path)
sfile = os.path.basename(inf)
if os.path.isdir(dist):
dfile = os.path.join(dist, sfile)
else:
dfile = dist
f = os.path.exists(dfile)
if replace or not f:
shutil.copy2(inf, dfile)
if verbose:
print('Copy %s to %s' % (inf, dfile))
def extract_dirs(mod, path, dst, verbose=False, exclude=None, exclude_ext=None, recursion=True, replace=True):
"""
mod name
path mod path
dst output directory
resursion True will extract all sub module of mod
"""
default_exclude = ['.svn', '_svn', '.git']
default_exclude_ext = ['.pyc', '.pyo', '.bak', '.tmp']
exclude = exclude or []
exclude_ext = exclude_ext or []
# log = logging.getLogger('uliweb')
if not os.path.exists(dst):
os.makedirs(dst)
if verbose:
print('Make directory %s' % dst)
for r in pkg.resource_listdir(mod, path):
if r in exclude or r in default_exclude:
continue
fpath = os.path.join(path, r)
if pkg.resource_isdir(mod, fpath):
if recursion:
extract_dirs(mod, fpath, os.path.join(dst, r), verbose, exclude, exclude_ext, recursion, replace)
else:
ext = os.path.splitext(fpath)[1]
if ext in exclude_ext or ext in default_exclude_ext:
continue
extract_file(mod, fpath, dst, verbose, replace)
def match(f, patterns):
from fnmatch import fnmatch
flag = False
for x in patterns:
if fnmatch(f, x):
return True
def walk_dirs(path, include=None, include_ext=None, exclude=None,
exclude_ext=None, recursion=True, file_only=False):
"""
path directory path
resursion True will extract all sub module of mod
"""
default_exclude = ['.svn', '_svn', '.git']
default_exclude_ext = ['.pyc', '.pyo', '.bak', '.tmp']
exclude = exclude or []
exclude_ext = exclude_ext or []
include_ext = include_ext or []
include = include or []
if not os.path.exists(path):
raise StopIteration
for r in os.listdir(path):
if match(r, exclude) or r in default_exclude:
continue
if include and r not in include:
continue
fpath = os.path.join(path, r)
if os.path.isdir(fpath):
if not file_only:
yield os.path.normpath(fpath).replace('\\', '/')
if recursion:
for f in walk_dirs(fpath, include, include_ext, exclude,
exclude_ext, recursion, file_only):
yield os.path.normpath(f).replace('\\', '/')
else:
ext = os.path.splitext(fpath)[1]
if ext in exclude_ext or ext in default_exclude_ext:
continue
if include_ext and ext not in include_ext:
continue
yield os.path.normpath(fpath).replace('\\', '/')
def copy_dir(src, dst, verbose=False, check=False, processor=None):
import shutil
# log = logging.getLogger('uliweb')
def _md5(filename):
try:
import hashlib
a = hashlib.md5()
except ImportError:
import md5
a = md5.new()
a.update(open(filename, 'rb').read())
return a.digest()
if not os.path.exists(dst):
os.makedirs(dst)
if verbose:
print("Processing %s" % src)
for r in os.listdir(src):
if r in ['.svn', '_svn', '.git']:
continue
fpath = os.path.join(src, r)
if os.path.isdir(fpath):
if os.path.abspath(fpath) != os.path.abspath(dst):
copy_dir(fpath, os.path.join(dst, r), verbose, check, processor)
else:
continue
else:
ext = os.path.splitext(fpath)[1]
if ext in ['.pyc', '.pyo', '.bak', '.tmp']:
continue
df = os.path.join(dst, r)
if check:
if os.path.exists(df):
a = _md5(fpath)
b = _md5(df)
if a != b:
print("Error: Target file %s is already existed, and "
"it not same as source one %s, so copy failed" % (fpath, dst))
else:
if processor:
if processor(fpath, dst, df):
continue
shutil.copy2(fpath, dst)
if verbose:
print("Copy %s to %s" % (fpath, dst))
else:
if processor:
if processor(fpath, dst, df):
continue
shutil.copy2(fpath, dst)
if verbose:
print("Copy %s to %s" % (fpath, dst))
def copy_dir_with_check(dirs, dst, verbose=False, check=True, processor=None):
# log = logging.getLogger('uliweb')
for d in dirs:
if not os.path | .exists(d):
| continue
copy_dir(d, dst, verbose, check, processor)
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
def encode_unicode(s):
"""Return a JSON representation of a Python unicode
"""
return '"' + s.encode('unicode_escape') + '"'
def simple_value(v):
from uliweb.i18n.lazystr import LazyString
if callable(v):
v = v()
if isinstance(v, LazyString) or isinstance(v, decimal.Decimal) or isinstance(v, datetime.datetime):
return str(v)
else:
return v
class JSONEncoder(object):
def __init__(self, encoding='utf-8', unicode=False, default=None):
self.encoding = encoding
self.unicode = unicode
self.default = default
def iterencode(self, obj, key=False):
if self.default:
x = self.default(obj)
obj = x
if isinstance(obj, str):
if self.unicode:
yield encode_unicode(unicode(obj, self.encoding))
else:
yield encode_basestring(obj)
elif isinstance(obj, unicode):
if self.unicode:
yield encode_unicode(obj)
else:
yield encode_basestring(obj.enc |
import unittest
from ietfparse.datastructures import ContentType
class ContentTypeCreationTests(unittest.TestCase):
def test_that_primary_type_is_normalized(self):
self.assertEqual('contenttype',
ContentType('COntentType', 'b').content_type)
def test_that_subtype_is_normalized(self):
self.assertEqual('subtype',
ContentType('a', ' SubType ').content_subtype)
def test_that_content_suffix_is_normalized(self):
self.assertEqual(
'json',
ContentType('a', 'b', content_suffix=' JSON').content_suffix)
def test_that_parameter_names_are_casefolded(self):
self.assertDictEqual({'key': 'Value'},
ContentType('a', 'b', parameters={
'KEY': 'Value'
}).parameters)
class ContentTypeStringificationTests(unittest.TestCase):
def test_that_simple_case_works(self):
self.assertEqual('primary/subtype',
str(ContentType('primary', 'subtype')))
def test_that_parameters_are_sorted_by_name(self):
ct = ContentType('a', 'b', {'one': '1', 'two': '2', 'three': 3})
self.assertEqual('a/b; one=1; three=3; two=2', str(ct))
def test_that_content_suffix_is_appended(self):
ct = ContentType('a', 'b', {'foo': 'bar'}, content_suffix='xml')
self.assertEqual('a/b+xml; foo=bar', str(ct))
class ContentTypeComparisonTests(unittest.TestCase):
def test_type_equals_itself(self):
self.assertEqual(ContentType('a', 'b'), ContentType('a', 'b'))
def test_that_differing_types_are_not_equal(self):
self.assertNotEqual(ContentType('a', 'b'), ContentType('b', 'a'))
def test_that_differing_suffixes_are_not_equal(self):
self.assertNotEqual(ContentType('a', 'b', content_suffix='1'),
ContentType('a', 'b', content_suffix='2'))
def test_that_differing_params_are_not_equal(self):
self.assertNotEqual(ContentType('a', 'b', parameters={'one': '1'}),
ContentType('a', 'b'))
def test_that_case_is_ignored_when_comparing_types(self):
self.assertEqual(ContentType('text', 'html', {'level': '3.2'}, 'json'),
ContentType('Text', 'Html', {'Level': '3.2'}, 'JSON'))
def | test_primary_wildcard_is_less_than_anything_else(self):
self.assertLess(ContentType('*', '*'), ContentType('text', 'plain'))
self.assertLess(ContentType('*', '*'), ContentType('text', '*'))
def test_subtype_wildcard_is_less_than_concrete_types(self):
self.assertLess(ContentType('application', | '*'),
ContentType('application', 'json'))
self.assertLess(ContentType('text', '*'),
ContentType('application', 'json'))
def test_type_with_fewer_parameters_is_lesser(self):
self.assertLess(
ContentType('application', 'text', parameters={'1': 1}),
ContentType('application', 'text', parameters={
'1': 1,
'2': 2
}))
def test_otherwise_equal_types_ordered_by_primary(self):
self.assertLess(ContentType('first', 'one', parameters={'1': 1}),
ContentType('second', 'one', parameters={'1': 1}))
def test_otherwise_equal_types_ordered_by_subtype(self):
self.assertLess(
ContentType('application', 'first', parameters={'1': 1}),
ContentType('application', 'second', parameters={'1': 1}))
|
import unittest
import mock
from mopidy_playbackdefaults import PlaybackDefaultsFrontend
class PlaybackDefaultsFrontendTest(unittest.TestCase):
def test_no_settings(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
def test_random(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
config['playbackdefaults']['default_random'] = True
PlaybackDefaultsFrontend(config, core)
core.tracklist.set_random.assert_called_once_with(True)
config['playbackdefaults']['default_random'] = False
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_random.call_count, 2)
core.tracklist.set_random.assert_called_with(False)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
def test_repeat(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
config['playbackdefaults']['default_repeat'] = True
PlaybackDefaultsFrontend(config, core)
core.tracklist.set_repeat.assert_called_once_with(True)
config['playbackdefaults']['default_repeat'] = False
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_repeat.call_count, 2)
core.tracklist.set_repeat.assert_called_with(False)
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
def test_consume(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
config['playbackdefaults']['default_consume'] = True
PlaybackDefaultsFrontend(config, core)
core.tracklist.set_consume.assert_called_once_with(True)
config | ['playbackdefaults']['default_consume'] = False
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_consume.call_count, 2)
core.tracklist.set_consume.assert_called | _with(False)
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
def test_single(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
config['playbackdefaults']['default_single'] = True
PlaybackDefaultsFrontend(config, core)
core.tracklist.set_single.assert_called_once_with(True)
config['playbackdefaults']['default_single'] = False
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_single.call_count, 2)
core.tracklist.set_single.assert_called_with(False)
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
|
# Copyright 2016 Rudrajit Tapadar
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from base import TestPscan
import errno
import mock
from StringIO import StringIO
import sys
class TestScan(TestPscan):
@mock.patch('socket.socket.connect')
def test_tcp_port_open(self, mock_connect):
hosts = "127.0.0.1"
ports = "22"
mock_connect.return_value = None
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
h = self.get_host_obj(hosts, [22])
h[0].ports[0].status = "Open"
self.assertPortsEqual(scanner.hosts[0].ports,
h[0].ports)
@mock.patch('socket.socket.connect')
def test_tcp_port_closed(self, mock_connect):
hosts = "127.0.0.1"
ports = "22"
mock_connect.side_effect = IOError()
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
h = self.get_host_obj(hosts, [22])
h[0].ports[0].status = "Closed"
self.assertPortsEqual(scanner.hosts[0].ports,
h[0].ports)
@mock.patch('socket.socket.connect')
def test_tcp_port_range(self, mock_connect):
hosts = "127.0.0.1"
ports = "21-22"
mock_connect.return_value = None
mock_connect.side_effect = [IOError(), None]
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
h = self.get_host_obj(hosts, [21, 22])
h[0].ports[0].status = "Closed"
h[0].ports[1].status = "Open"
self.assertPortsEqual(scanner.hosts[0].ports,
h[0].ports)
@mock.patch('socket.socket.connect')
def test_show_open_port(self, mock_connect):
hosts = "127.0.0.1"
ports = "5672"
mock_connect.return_value = None
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
s = sys.stdout
o = StringIO()
sys.stdout = o
output = (
"Showing results for target: 127.0.0.1\n"
"+------+----------+-------+---------+\n"
"| Port | Protocol | State | Service |\n"
"+------+----------+-------+---------+\n"
"| 5672 | TCP | Open | amqp |\n"
"+------+----------+-------+---------+"
)
scanner.show()
self.asser | tEqual(o.getvalue().strip(), output)
sys.stdout = s
@mock.patch('socket.socket.connect')
def test_show_closed_port(self, mock_connect):
hosts = "127.0.0.1"
ports = "5673"
mock_connect.side_effect = IOError()
scanner | = self.get_scanner_obj(hosts, ports)
scanner.tcp()
s = sys.stdout
o = StringIO()
sys.stdout = o
output = (
"Showing results for target: 127.0.0.1\n"
"+------+----------+--------+---------+\n"
"| Port | Protocol | State | Service |\n"
"+------+----------+--------+---------+\n"
"| 5673 | TCP | Closed | unknown |\n"
"+------+----------+--------+---------+"
)
scanner.show()
self.assertEqual(o.getvalue().strip(), output)
sys.stdout = s
@mock.patch('socket.socket.connect')
def test_show_closed_port_range(self, mock_connect):
hosts = "127.0.0.1"
ports = "5673-5674"
mock_connect.side_effect = IOError(errno.ECONNREFUSED)
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
s = sys.stdout
o = StringIO()
sys.stdout = o
output = (
"Showing results for target: 127.0.0.1\n"
"All 2 scanned ports are closed on the target."
)
scanner.show()
self.assertEqual(o.getvalue().strip(), output)
sys.stdout = s
@mock.patch('socket.socket.connect')
def test_show_partially_open_port_range(self, mock_connect):
hosts = "127.0.0.1"
ports = "5671-5672"
mock_connect.return_value = None
mock_connect.side_effect = [IOError(), None]
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
s = sys.stdout
o = StringIO()
sys.stdout = o
output = (
"Showing results for target: 127.0.0.1\n"
"+------+----------+-------+---------+\n"
"| Port | Protocol | State | Service |\n"
"+------+----------+-------+---------+\n"
"| 5672 | TCP | Open | amqp |\n"
"+------+----------+-------+---------+"
)
scanner.show()
self.assertEqual(o.getvalue().strip(), output)
@mock.patch('socket.socket.connect')
def test_udp_port_open(self, mock_connect):
hosts = "127.0.0.1"
ports = "53"
mock_connect.return_value = None
scanner = self.get_scanner_obj(hosts, ports)
scanner.udp()
#h = self.get_host_obj(hosts, [22])
#h[0].ports[0].status = "Open"
#self.assertPortsEqual(scanner.hosts[0].ports,
# h[0].ports)
|
"""
this example shows how to freeze degrees of freedom using the Lennard Jones potential as
an example
"""
import numpy as np
from pele.potentials import LJ, FrozenPotentialWrapper
from pele.optimize import mylbfgs
def main():
natoms = 4
pot = LJ()
reference_coords = np.random.uniform(-1, 1, [3 * natoms])
print reference_coords
# freeze the first two atoms (6 degrees of freedom)
frozen_dof = range(6)
fpot = FrozenPotentialWrapper(pot, reference_coords, frozen_dof)
reduced_coords = fpot.get_reduced_coords(reference_coords)
print "the energy in the full representation:"
print pot.getEnergy(reference_coords)
print "is the same as the energy in the reduced representation:"
print fpot.getEnergy(reduced_coords)
| ret = mylbfgs(reduced_coords, fpot)
print "after a minimization the energy is ", ret.energy, "and the rms gradient is", ret.rms
print "the coordinates of the frozen degrees of freedom are unchanged"
print "starting coords:", reference_coords
print "minimized coords:", fpot.get_full_coords(ret.coords)
if __name | __ == "__main__":
main() |
#!/usr/bin/env python3
from optparse import OptionParser
from datetime import datetime
from datetime import timedelta
import pyopencl as cl
import numpy as np
import time
MIN_ELAPSED = 0.25
KEY_LENGTH = 64
BUF_MAX_SIZE= 1024 * 1024
class BurnInTarget():
def __init__(self, platform, kernel):
self.name = platform.get_info(cl.platform_info.NAME)
self.devices = platform.get_devices()
self.context = cl.Context(self.devices)
self.queue = cl.CommandQueue(self.context)
self.program = cl.Program(self.context, kernel).build()
self.minXSize = 16
self.minYSize = 16
# Host bufs
self.hostInfoBuf = np.array(range(2), dtype=np.uint32)
self.hostInfoBuf[0] = 8 # Rounds for each kernel
self.hostInfoBuf[1] = 8
self.hostInBuf = np.random.rand(BUF_MAX_SIZE).astype(np.uint32)
self.hostOutBuf = np.array(range(BUF_MAX_SIZE), dtype=np.uint32)
# Device bufs
self.devInfoBuf = cl.Buffer(self.context, cl.mem_flags.READ_ONLY | cl.mem_flags.COPY_HOST_PTR, hostbuf=self.hostInfoBuf)
self.devInBuf = cl.Buffer(self.context, cl.mem_flags.READ_ONLY | cl.mem_flags.COPY_HOST_PTR, hostbuf=self.hostInBuf)
self.devOutBuf = cl.Buffer(self.context, cl.mem_flags.WRITE_ONLY, self.hostOutBuf.nbytes)
def burn(self, shape):
event = self.program.burn(self.queue, shape, None, self.devInfoBuf, self.devInBuf, self.devOutBuf)
return event
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-k", "--kernel",
dest="kernel", default='default.cl',
help="Kernel to burn in")
(opts, args) = parser.parse_args()
kernel = open(opts.kernel).read()
# Get all available device and create context for each
platforms = cl.get_platforms()
targets = []
for p in platforms:
vendor = p.get_info(cl.platform_info.VENDOR)
name = p.get_info(cl.platform_info.NAME)
if('Intel' in vendor):
print("Found platform: %s" % name)
targets.append(BurnInTarget(p, kernel))
# Tune runtime for each target
for t in targets:
xsize = 8
ysize = 32
print("Adjusting runtime for platform: %s" % t.name)
elapsed = timedelta()
while(elapsed.total_seconds() < MIN_ELAPSED):
if(elapsed.total_seconds() < (MIN_ELAPSED/2)):
xsize = xsize << 1
else:
xsize = xsize + 8
# Get some power credit
time.sleep(10)
startTime = datetime.utcnow()
event = t.burn((xsize, ysize))
event.wait()
endTime = datetime.utcnow()
elapsed = endTime - startTime
print("Kernel Elapsed Time: %s" % elapsed.total_seconds())
t.minXSize = xsize
t.minYSize = ysize
print("Final min size: %d, %d" % (t.minXSize, t.minYSize))
# Burn in one by one
time.sleep(20)
for t in targets:
print("Burning platform: %s" % t.name)
startTime = datetime.utcnow()
events =[]
# Make sure this is longer than Tu of PL2
for i in range(16):
events.append(t.burn((8*t.minXSize, 2*t.minYSize)))
for e in events:
e.wait()
endTime = datetime.utcnow()
elapsed = endTime - startTime
print("Kernel Elapsed Time: %s" % elapsed.total_seconds())
time.sleep(20)
#
# # All together
# events =[]
# print("Burning platforms all together, at the same time")
# startTime = datetime.utcnow()
# for i in range(8):
# for t in targets:
# events.append(t.burn((8*t.minXSize, 2*t.minYSize)))
#
# for e in events:
# e.wait()
#
# endTime = datetime.utcnow()
# elapsed = endTime - startTime
# print("Kernel Elapsed Time: %s" % elapsed.total_seconds())
# time.sleep(30)
#
# time.sleep(30)
# print("Burning platforms with sequence")
# events =[]
# startTime = datetime.utcnow()
# for i in range(8):
# for t in sorted(targets, key=lambda x:x.name):
# ev | ents.append(t.burn((8*t.minXSize, 2*t.minYSize)))
# time.sleep(2)
#
# for | e in events:
# e.wait()
#
# endTime = datetime.utcnow()
# elapsed = endTime - startTime
# print("Kernel Elapsed Time: %s" % elapsed.total_seconds())
#
# time.sleep(30)
# print("Burning platforms with reverse sequence")
# events =[]
# startTime = datetime.utcnow()
# for i in range(8):
# for t in sorted(targets, key=lambda x:x.name, reverse=True):
# events.append(t.burn((8*t.minXSize, 2*t.minYSize)))
# time.sleep(2)
#
# for e in events:
# e.wait()
#
# endTime = datetime.utcnow()
# elapsed = endTime - startTime
# print("Kernel Elapsed Time: %s" % elapsed.total_seconds())
print("Burn in test done", flush=True)
time.sleep(2)
|
#
# #/usr/bin/env python3
# -*- coding:utf-8 -*-
from database import Measure
import json
import requests
import time
import yaml
with open("param.yml", 'r') as stream:
try:
param = yaml.load(stream)
except yaml.YAMLError as e:
print(e)
def getkwatthours(url, data, headers, sensorId, t0, t1):
sumEnergy=0
try:
result=requests.post(
url + '/api/' + str(sensorId) + '/get/kwatthours/by_time/' + str(t0) + '/' + str(t1),
headers=headers,
data=data)
except json.JSONDecodeError as e:
print("getkwatthours() - ERROR : requests.post \n-> %s" % e)
else:
parsed_json=json.loads(result.text)
try:
sumEnergy=(parsed_json['data']['value']) * 10000 # / | 100 for test and debug
except Exception as e:
sumEnergy=0
print("getkwatthours() - ERROR : json.loads(result.text) \n-> %s" % e)
print("getkwatthours() : " + str(sumEnergy))
return sumEnergy
def getkwatthoursOem(url, data, headers, sensorId):
sumEnergy=0
try:
result=requests.post(
url + '/emoncms/feed/value.json?id=' + str(sensorId) + data,
headers=headers,
data='') |
except json.JSONDecodeError as e:
print("getkwatthoursOem() - ERROR : requests.post \n-> %s" % e)
else:
sumEnergy=json.loads(result.text)
print("getkwatthours() : " + str(sumEnergy))
return sumEnergy
def get_all_data():
# this function collects data from all sensors (connected to each piece of work (=item))
# definition of the time interval, in order to collect data
time0 = time.time()
delay = int(param['delay'])
time.sleep(delay)
time1 = time.time()
# getting energy produced or consumed for each item
headers = {'Content-Type': 'application/json', }
items = param['listItems'] # items must be defined in param.yml
allData = []
# loop on items to retrieve consumption or production data over the defined interval
for item in items:
itemData = {}
itemData["id"] = item
itemData["name"] = param[item]['name']
itemData["type"] = param[item]['type']
itemData["lat"] = param[item]['lat']
itemData["lon"] = param[item]['lon']
itemUrl = param[item]['url']
itemSensorId = param[item]['sensorId']
itemLogin = param[item]['login']
itemPswd = param[item]['password']
itemSource = param[item]['source']
try:
if itemSource == 'CW':
data='login=' + itemLogin + '&password=' + itemPswd
value = getkwatthours(itemUrl, data, headers, itemSensorId, time0, time1)
else:
data='&apikey=' + itemLogin
value = getkwatthoursOem(itemUrl, data, headers, itemSensorId)
except Exception as e:
value=0
print("get_all_data() - ERROR : api call (%s) \n-> %s" % (itemSource, e))
itemData["value"] = value
allData.append(itemData.copy())
print('get_all_data(): time : ' + time.strftime("%D %H:%M:%S", time.localtime(int(time1))) + ', allData = '
+ str(allData))
return allData
def get_last_data():
items = param['listItems'] # items must be defined in param.yml
lastData = []
for item in items:
query = Measure.query.filter_by(item=item).order_by(Measure.timestamp.desc()).first()
print(query)
itemData = {}
itemData["id"] = item
itemData["name"] = param[item]['name']
itemData["type"] = param[item]['type']
itemData["lat"] = param[item]['lat']
itemData["lon"] = param[item]['lon']
if query is None:
itemData["value"] = 0
else:
itemData["value"] = query.value
lastData.append(itemData.copy())
print('get_all_data(): lastData = ' + str(lastData))
return lastData
def get_flux_data():
items=param['listItems'] # items must be defined in param.yml
fluxData=[]
for item in items:
if param[item]['flux'] is not None:
for item2 in param[item]['flux']:
itemData={}
fr = str(param[item]['lon']) + ',' + str(param[item]['lat'])
to = str(param[item2]['lon']) + ',' + str(param[item2]['lat'])
nm = param[item]['name'] + ',' + param[item2]['name']
itemData["from"] = fr.split(',')
itemData["to"] = to.split(',')
itemData["labels"] = nm.split(',')
itemData["color"] = "#ff3a31"
fluxData.append(itemData.copy())
print('get_flux_data(): lastData = ' + str(fluxData))
return fluxData |
"""
Module contains helper functions
"""
def get_user(username, db):
"""
Usage: queries through and database and r | eturns user
object with passed username argument
:return: User object or None if no such user
"""
for user in db:
if user.username.lower() == username.lower():
return user
return None
def get_bucket(title, db, current_user):
"""
Usage: queries through and database and returns bucket
object with passed title argument
:return: Bucket object or None if no such Bucket
| """
for bucket in db:
if bucket.title.lower() == title.lower():
return bucket
return None
|
# -*- coding: utf-8 -*-
from __future__ import unicode_liter | als
from django.db import migrations
from corehq.doctypemigrations.djangomigrations import assert_initial_complete
from corehq.doctypemigrations.migrator_instances import users_migration
from corehq.sql_db.operations import HqRunPython
class Migration(migrations.Migration):
dependencies = [
('doctypemigrations', '0003_doctypemigration_cleanup_complete'),
]
operations = [
| HqRunPython(assert_initial_complete(users_migration))
]
|
GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ShinySDR is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ShinySDR. If not, see <http://www.gnu.org/licenses/>.
"""API for plugins, and related things.
This module contains objects and interfaces used by plugins to declare
the functionality they provide.
"""
# pylint: disable=signature-differs
# (pylint is confused by interfaces)
from __future__ import absolute_import, division, print_function, unicode_literals
from collections import namedtuple
import six
from twisted.plugin import IPlugin
from zope.interface import Attribute, Interface, implementer
from shinysdr.i.modes import IModeDef
from shinysdr.types import EnumRow
__all__ = [] # appended later
class IDemodulatorFactory(Interface):
def __call__(mode, input_rate, context):
"""
Returns a new IDemodulator.
mode: unicode, the mode to be demodulated (should be one the factory/class was declared to support)
input_rate: float, sample rate the demodulator must accept
context: an IDemodulatorContext
May support additional keyword arguments as supplied by unserialize_exported_state.
"""
__all__.append('IDemodulatorFactory')
class IDemodulator(Interface):
"""
Demodulators may also wish to implement:
IDemodulatorModeChange
ITunableDemodulator
Additional constraints:
The object must also be GNU Radio block with one gr_complex input, and output as described by get_output_type().
"""
def get_band_shape():
"""
Returns a BandShape object describing the portion of its input signal which the demodulator uses (typically, the shape of its filter).
Should be exported, typically like:
@exported_value(type=BandShape, changes='never')
This is used to display the filter on-screen and to determine when the demodulator's input requirements are satisfied by the device's tuning.
"""
def get_output_type():
"""
Return the SignalType of the demodulator's output.
The output must be stereo audio, mono audio, or nothing. Note that stereo audio is represented as a vector of two | floats, not as two output ports.
"""
__all__.append('IDemodulator')
class IDemodulatorContext(Interface):
def rebuild_me():
"""Request that | this demodulator be discarded and an identically configured copy be created.
This is needed when something such as the output type of the demodulator changes; it may also be used any time constructing a new demodulator is more convenient than changing the internal structure of an existing one.
"""
def lock():
"""
Use this method instead of gr.hier_block2.lock().
This differs in that it will avoid acquiring the lock if it is already held (implementing a "recursive lock"). It is therefore suitable for use when the demodulator is being invoked in a situation where the lock may already be held.
"""
def unlock():
"""Use in pairs with IDemodulatorContext.lock()."""
def output_message(message):
"""Report a message output from the demodulator, such as in demodulators which handle packets rather than audio.
The message object should provide shinysdr.telemetry.ITelemetryMessage.
"""
def get_absolute_frequency_cell():
"""Returns a cell containing the original RF carrier frequency of the signal to be demodulated — the frequency the signal entering the demodulator has been shifted down from."""
class ITunableDemodulator(IDemodulator):
"""If a demodulator implements this interface, then there may be a arbitrary frequency offset in its input signal, which it will be informed of via the set_rec_freq method."""
def set_rec_freq(freq):
"""
Set the nominal (carrier) frequency offset of the signal to be demodulated within the input signal.
"""
__all__.append('ITunableDemodulator')
class IDemodulatorModeChange(IDemodulator):
"""If a demodulator implements this interface, then it may be asked to reconfigure itself to demodulate a different mode."""
def can_set_mode(mode):
"""
Return whether this demodulator can reconfigure itself to demodulate the specified mode.
If it returns False, it will typically be replaced with a newly created demodulator.
"""
def set_mode(mode):
"""
Per can_set_mode.
"""
__all__.append('IDemodulatorModeChange')
# TODO: BandShape doesn't really belong here but it is related to IDemodulator. Find better location.
# All frequencies are relative to the demodulator's input signal (i.e. baseband)
_BandShape = namedtuple('BandShape', [
'stop_low', # float; lower edge of stopband
'pass_low', # float; lower edge of passband
'pass_high', # float; upper edge of passband
'stop_high', # float; upper edge of stopband
'markers', # dict of float to string; labels of significant frequencies (e.g. FSK mark and space)
])
class BandShape(_BandShape):
@classmethod
def lowpass_transition(cls, cutoff, transition, markers=None):
if markers is None:
markers = {}
h = transition / 2.0
return cls(
stop_low=-cutoff - h,
pass_low=-cutoff + h,
pass_high=cutoff - h,
stop_high=cutoff + h,
markers=markers)
@classmethod
def bandpass_transition(cls, transition, low, high, markers=None):
if markers is None:
markers = {}
h = transition / 2.0
return cls(
stop_low=low - h,
pass_low=low + h,
pass_high=high - h,
stop_high=high + h,
markers=markers)
__all__.append('BandShape')
class IModulatorFactory(Interface):
def __call__(mode, context):
"""
Returns a new IModulator.
mode: unicode, the mode to be modulated (should be one the factory/class was declared to support)
context: always None, will later become IModulatorContext when that exists.
May support additional keyword arguments as supplied by unserialize_exported_state.
"""
class IModulator(Interface):
"""
Additional constraints:
The object must also be a GNU Radio block with one gr_complex output, and input as described by get_input_type().
"""
def can_set_mode(mode):
"""
Return whether this modulator can reconfigure itself to modulate the specified mode.
If it returns False, it will typically be replaced with a newly created modulator.
"""
def set_mode(mode):
"""
Per can_set_mode.
"""
def get_input_type():
"""
Return the SignalType of the modulator's required input, which must currently be mono audio at any sample rate.
"""
def get_output_type():
"""
Return the SignalType of the modulator's output, which must currently be IQ at any sample rate.
"""
__all__.append('IModulator')
class IHasFrequency(Interface):
# TODO: document this
def get_freq():
pass
__all__.append('IHasFrequency')
@implementer(IPlugin, IModeDef)
class ModeDef(object):
# Twisted plugin system caches whether-a-plugin-class-was-found permanently, so we need to avoid _not_ having a ModeDef if the plugin has some sort of dependency it checks -- thus the 'available' flag can be used to hide a mode while still having an _IModeDef
def __init__(self,
mode,
info,
demod_class,
mod_class=None,
unavailabil |
torEntity, LightEntity):
"""Representation of a Hue light."""
def __init__(self, coordinator, bridge, is_group, light, supported_features):
"""Initialize the light."""
super().__init__(coordinator)
self.light = light
self.bridge = bridge
self.is_group = is_group
self._supported_features = supported_features
if is_group:
self.is_osram = False
self.is_philips = False
self.is_innr = False
self.gamut_typ = GAMUT_TYPE_UNAVAILABLE
self.gamut = None
else:
self.is_osram = light.manufacturername == "OSRAM"
self.is_philips = light.manufacturername == "Philips"
self.is_innr = light.manufacturername == "innr"
self.gamut_typ = self.light.colorgamuttype
self.gamut = self.light.colorgamut
_LOGGER.debug("Color gamut of %s: %s", self.name, str(self.gamut))
if self.light.swupdatestate == "readytoinstall":
err = (
"Please check for software updates of the %s "
"bulb in the Philips Hue App."
)
_LOGGER.warning(err, self.name)
if self.gamut:
if not color.check_valid_gamut(self.gamut):
err = "Color gamut of %s: %s, not valid, setting gamut to None."
_LOGGER.warning(err, self.name, str(self.gamut))
self.gamut_typ = GAMUT_TYPE_UNAVAILABLE
self.gamut = None
@property
def unique_id(self):
"""Return the unique ID of this Hue light."""
return self.light.uniqueid
@property
def device_id(self):
"""Return the ID of this Hue light."""
return self.unique_id
@property
def name(self):
"""Return the name of the Hue light."""
return self.light.name
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
if self.is_group:
bri = self.light.action.get("bri")
else:
bri = self.light.state.get("bri")
if bri is None:
return bri
return hue_brightness_to_hass(bri)
@property
def _color_mode(self):
"""Return the hue color mode."""
if self.is_group:
return self.light.action.get("colormode")
return self.light.state.get("colormode")
@property
def hs_color(self):
"""Return the hs color value."""
mode = self._color_mode
source = self.light.action if self.is_group else self.light.state
if mode in ("xy", "hs") and "xy" in source:
return color.color_xy_to_hs(*source["xy"], self.gamut)
return None
@property
def color_temp(self):
"""Return the CT color value."""
# Don't return color temperature unless in color temperature mode
if self._color_mode != "ct":
return None
if self.is_group:
return self.light.action.get("ct")
return self.light.state.get("ct")
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
if self.is_group:
return super().min_mireds
min_mireds = self.light.controlcapabilities.get( | "ct", {}).get("min")
# We filter out '0' too, which can be incorrectly reported by 3rd party buls
if not min_mireds:
return super().min_mireds
return min_mireds
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
if self.is_group:
| return super().max_mireds
max_mireds = self.light.controlcapabilities.get("ct", {}).get("max")
if not max_mireds:
return super().max_mireds
return max_mireds
@property
def is_on(self):
"""Return true if device is on."""
if self.is_group:
return self.light.state["any_on"]
return self.light.state["on"]
@property
def available(self):
"""Return if light is available."""
return self.coordinator.last_update_success and (
self.is_group
or self.bridge.allow_unreachable
or self.light.state["reachable"]
)
@property
def supported_features(self):
"""Flag supported features."""
return self._supported_features
@property
def effect(self):
"""Return the current effect."""
return self.light.state.get("effect", None)
@property
def effect_list(self):
"""Return the list of supported effects."""
if self.is_osram:
return [EFFECT_RANDOM]
return [EFFECT_COLORLOOP, EFFECT_RANDOM]
@property
def device_info(self):
"""Return the device info."""
if self.light.type in ("LightGroup", "Room", "Luminaire", "LightSource"):
return None
return {
"identifiers": {(HUE_DOMAIN, self.device_id)},
"name": self.name,
"manufacturer": self.light.manufacturername,
# productname added in Hue Bridge API 1.24
# (published 03/05/2018)
"model": self.light.productname or self.light.modelid,
# Not yet exposed as properties in aiohue
"sw_version": self.light.raw["swversion"],
"via_device": (HUE_DOMAIN, self.bridge.api.config.bridgeid),
}
async def async_turn_on(self, **kwargs):
"""Turn the specified or all lights on."""
command = {"on": True}
if ATTR_TRANSITION in kwargs:
command["transitiontime"] = int(kwargs[ATTR_TRANSITION] * 10)
if ATTR_HS_COLOR in kwargs:
if self.is_osram:
command["hue"] = int(kwargs[ATTR_HS_COLOR][0] / 360 * 65535)
command["sat"] = int(kwargs[ATTR_HS_COLOR][1] / 100 * 255)
else:
# Philips hue bulb models respond differently to hue/sat
# requests, so we convert to XY first to ensure a consistent
# color.
xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut)
command["xy"] = xy_color
elif ATTR_COLOR_TEMP in kwargs:
temp = kwargs[ATTR_COLOR_TEMP]
command["ct"] = max(self.min_mireds, min(temp, self.max_mireds))
if ATTR_BRIGHTNESS in kwargs:
command["bri"] = hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS])
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
command["alert"] = "lselect"
del command["on"]
elif flash == FLASH_SHORT:
command["alert"] = "select"
del command["on"]
elif not self.is_innr:
command["alert"] = "none"
if ATTR_EFFECT in kwargs:
effect = kwargs[ATTR_EFFECT]
if effect == EFFECT_COLORLOOP:
command["effect"] = "colorloop"
elif effect == EFFECT_RANDOM:
command["hue"] = random.randrange(0, 65535)
command["sat"] = random.randrange(150, 254)
else:
command["effect"] = "none"
if self.is_group:
await self.bridge.async_request_call(
partial(self.light.set_action, **command)
)
else:
await self.bridge.async_request_call(
partial(self.light.set_state, **command)
)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs):
"""Turn the specified or all lights off."""
command = {"on": False}
if ATTR_TRANSITION in kwargs:
command["transitiontime"] = int(kwargs[ATTR_TRANSITION] * 10)
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
command["alert"] = "lselect"
del command["on"]
elif flash == FLASH_SHORT:
command["alert"] = "select"
del command["on"]
elif not self.is_innr:
command["alert"] |
# vertex.py
# This module contains all the things for creating
# and using vertices...starting with vector, and
# going on to edge and face.
# Observe two things, though:
# First, I tried to keep small numbers as "zeros"
# by rounding divisions (see __div__ and norm) to
# five significant digits. So if a number is one
# like 5.2e-6, it will be rounded to 0.
# Second, to make sure that division works
# appropriately, I initialized the original vector
# with float(etc).
from math import sqrt
import pygame
ROUNDOFF = 5
class vector(object):
def __init__(self, x, y, z):
# Note that despite the w, these are
# still 3D vectors.
# Also note that I'd like to remove the w, but I cannot for now.
self.x = float(x)
self.y = float(y)
self.z = float(z)
# self.w = 1.0
def __add__(self, v):
x = self.x+v.x
y = self.y+v.y
z = self.z+v.z
return vector(x, y, z)
def __sub__(self, v):
x = self.x-v.x
y = self.y-v.y
z = self.z-v.z
return vector(x, y, z)
def __mul__(self, s):
x = round(self.x*s, ROUNDOFF)
y = round(self.y*s, ROUNDOFF)
z = round(self.z*s, ROUNDOFF)
return vector(x, y, z)
def __div__(self, s):
x = round(self.x/s, ROUNDOFF)
y = round(self.y/s, ROUNDOFF)
z = round(self.z/s, ROUNDOFF)
return vector(x, y, z)
def __neg__(self):
return vector(-self.x, -self.y, -self.z)
def dot(self, v):
return round(self.x*v.x + self.y*v.y + self.z*v.z, ROUNDOFF)
def cross(self, v):
x = round(self.y*v.z - self.z*v.y, ROUNDOFF)
y = round(self.z*v.x - self.x*v.z, ROUNDOFF)
z = round(self.x*v.y - self.y*v.x, ROUNDOFF)
return vector(x, y, z)
def dist(self):
return round(sqrt(self.x*self.x + self.y*self.y + self.z*self.z), ROUNDOFF)
# return sqrt(self.x*self.x + self.y*self.y + self.z*self.z)
# For some reason, I can't get full rotations to work out
# if I don't allow for the possibility that self.dist() might
# be zero...
#def norm(self):
# return self/self.dist()
def norm(self):
d = self.dist()
if d == 0:
return self
else:
return self/d
def __str__(self):
return "<%s, %s, %s>" % (self.x, self.y, self.z)
# Here are a few vector constants that are nice to
# define: in particular, note that [Left, Up, Fwd]
# is a left-hand coord system, while [Right, Up, Fwd]
# represents a right-hand one.
Zero = vector(0, 0, 0)
Up = vector(0, 1, 0)
Left = vector(1, 0, 0)
Right = vector(-1, 0, 0)
Fwd = vector(0, 0, 1)
# I defined these functions separately from the
# classes because it seems more natural to say
# "x = dist(v)" rather than "x = v.dist()", etc.
def dist(v):
return round(sqrt(v.x*v.x + v.y*v.y + v.z*v.z), ROUNDOFF)
def norm(v):
return v/dist(v), 5
def orthonorm(x, y, z):
"""Returns a tuple of orthonormal vectors via the
Gramm-Schmidt process. See Apostal's Linear
Algebra, pg. 111, or another LinAlg book for
the theoretical background of this process."""
q1 = x
q2 = y - q1*(y.dot(q1)/q1.dot(q1))
q3 = z - q1*(z.dot(q1)/q1.dot(q1)) - \
q2*(z.dot(q2)/q2.dot(q2))
return (q1.norm(), q2.norm(), q3.norm())
# Now that we have our vector defined, we could
# define the things that will make our vector a
# vertex.
class edge(object):
def __init__(self, v1, v2, color='none'):
"""I | nitializes an edge for a wireframe.
v1, v2 are vertex indices, and color is the
default color with which to draw the edge.
For purposes of comparison, each edge is stored
with the first vertex index less than or equal
to the second vertex index."""
if v1 < v2:
self.v1 = v1
self.v2 = v2
else:
self.v1 = v2
self.v2 = v1
self.color = color
def __eq__(self, e):
"""Ret | urns true if both vertex indices are equal."""
return (self.v1 == e.v1) and (self.v2 == e.v2)
def __ne__(self, e):
"""Returns true if one one of the vertex indices
is unequal."""
return (self.v1 != e.v1) or (self.v2 != e.v2)
def __str__(self):
return "[ %s, %s ] %s" % (self.v1, self.v2, self.color)
class face(object):
def __init__(self, vertices, edges, color='none'):
"""Initializes a face for a wireframe.
In addition to vertices and color, this class also
keeps track of edges, center, normal and norm*Vertex
of the face.
Note that the normal is calculated assuming that
the vertices are in a clockwise order around the
face when viewed from the outside of the wirefame."""
# This is a list of indices for vertices.
self.vertices = vertices
self.color = color
# This is a list of the indices of the edges.
self.edges = edges
# Note that, ideally, this class should have a
# function that calculates its center and normal;
# since only a wireframe class has this information,
# however, only a wirframe class can calculate it!
def __str__(self):
return "%s <%s>" % (self.vertices, self.color)
# These colors are included with vertices so that
# faces and edges can have colors.
#egacolors = { 'none':-1, 'black':0, 'blue':1,
#'green':2, 'cyan':3, 'red':4, 'purple':5,
#'brown':6, 'gray':7, 'brightblack':8,
#'darkgray':8, 'brightblue':9, 'brightgreen':10,
#'brightcyan':11, 'brightred':12, 'pink':12,
#'brightpurple':13, 'brightbrown':14, 'yellow':14,
#'brightgray': 15, 'white':15 }
# These colors are included with vertices so that
# faces and edges can have colors.
# Now that I'm using pygame, these need to be tweaked!
egacolor = { 'none': -1, 'black': pygame.color.Color('black'),
'blue': pygame.color.Color('blue'), 'green': pygame.color.Color('green'),
'cyan': pygame.color.Color('cyan'), 'red': pygame.color.Color('red'),
'purple': pygame.color.Color('purple'), 'brown': pygame.color.Color('brown'), 'gray': pygame.color.Color('gray'),
'darkgray': pygame.color.Color('darkgray'), 'lightblue': pygame.color.Color('lightblue'),
'lightgreen': pygame.color.Color('lightgreen'), 'lightcyan': pygame.color.Color('lightcyan'),
'pink': pygame.color.Color('pink'),
'lightpurple': pygame.color.Color('red'), 'yellow': pygame.color.Color('yellow'),
'white': pygame.color.Color('white') }
bwcolor = {}
for i in range(0, 16):
bwcolor['black%s' % (i)] = (i*16, i*16, i*16, 255)
|
"""
WSGI config for bball_intel project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djan | goproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bball_intel.settings.base")
from django.core.wsgi import get_wsgi_application
from dj_static import | Cling
application = Cling(get_wsgi_application())
|
thon_version)s/site-packages/" % env
env.work_on = "workon %(virtualenv_name)s; " % env
env.backup_root = "%(user_home)s/backups" % env
env.offsite_backup_dir = "aglzen@quantumimagery.com:/home/aglzen/%(project_name)s/data/" % env
env.update(overrides)
def setup_env_centos(project_name, system_user="root", initial_settings={}, overrides={}):
global env
env.dry_run = False
env.project_name = project_name
env.system_user = system_user
env.is_webfaction = False
env.is_centos = True
# Custom Config Start
env.python_version = "2.6"
env.parent = "origin"
env.working_branch = "master"
env.live_branch = "live"
env.staging_branch = "staging"
env.python = "python"
env.is_local = False
env.local_working_path = "~/workingCopy"
env.media_dir = "media"
env.admin_symlink = "admin"
env.production_hosts = []
env.staging_hosts = []
env.production_db_hosts = []
env.staging_db_hosts = []
env.update(initial_settings)
env.production_hosts = ["%(system_user)s@%(h)s" % {'system_user':env.system_user,'h':h} for h in env.production_hosts]
env.staging_hosts = ["%(system_user)s@%(h)s" % {'system_user':env.system_user,'h':h} for h in env.staging_hosts]
env.production_db_hosts = ["%(system_user)s@%(h)s" % {'system_user':env.system_user,'h':h} for h in env.production_db_hosts]
env.staging_db_hosts = ["%(system_user)s@%(h)s" % {'system_user':env.system_user,'h':h} for h in env.staging_db_hosts]
if env.system_user == "root":
env.user_home = "/root"
else:
env.user_home = "/home/%(system_user)s" % env
env.virtualenv_name = env.project_name
env.staging_virtualenv_name = "staging_%(virtualenv_name)s" % env
env.live_app_dir = "/var/www"
env.git_path = "%(live_app_dir)s/%(project_name)s.git" % env
env.live_static_dir = "%(git_path)s/media" % env
env.staging_app_dir = env.live_app_dir
env.staging_static_dir = env.live_static_dir
env.virtualenv_path = "%(user_home)s/.virtualenvs/%(virtualenv_name)s/lib/python%(python_version)s/site-packages/" % env
env.work_on = "workon %(virtualenv_name)s; " % env
env.backup_root = "%(user_home)s/backups" % env
env.offsite_backup_dir = "aglzen@quantumimagery.com:/home/aglzen/%(project_name)s/data/" % env
env.update(overrides)
def setup_backup_env_webfaction():
env.current_backup_file = "%(backup_dir)s/currentBackup.json" % env
env.daily_backup_script = daily_backup_script()
env.weekly_backup_script = weekly_backup_script()
env.monthly_backup_script = monthly_backup_script()
def live(dry_run="False"):
if not confirm("You do mean live, right?"):
abort("Bailing out!")
else:
env.dry_run = dry_run.lower() == "true"
env.python = "python%(python_version)s" % env
env.role = "live"
env.settings_file = "envs.%(role)s" % env
env.hosts = env.production_hosts
env.base_path = env.live_app_dir
env.git_path = "%(live_app_dir)s/%(project_name)s.git" % env
env.backup_dir = "%(user_home)s/backups/%(project_name)s" % env
env.media_path = env.live_static_dir
env.pull_branch = env.live_branch
env.release_tag = "%(role)s_release" % env
setup_backup_env_webfaction()
def staging(dry_run="False"):
env.dry_run = dry_run.lower() == "true"
env.python = "python%(python_version)s" % env
env.role = "staging"
env.settings_file = "envs.%(role)s" % env
env.hosts = env.staging_hosts
env.base_path = env.staging_app_dir
env.git_path = "%(staging_app_dir)s/%(project_name)s.git" % env
env.media_path = env.staging_static_dir
env.backup_dir = "%(user_home)s/backups/staging_%(project_name)s" % env
env.pull_branch = env.live_branch
env.release_tag = "%(role)s_release" % env
env.virtualenv_name = env.staging_virtualenv_name
env.virtualenv_path = "%(user_home)s/.virtualenvs/%(virtualenv_name)s/lib/python%(python_ver | sion)s/site-packages/" % env
env.work_on = "workon %(virtualenv_name)s; " % env
setup_backup_env_webfaction()
def l | ocalhost(dry_run="False"):
env.dry_run = dry_run.lower() == "true"
env.hosts = ['localhost']
env.role = "localhost"
env.settings_file = "envs.dev" % env
env.is_webfaction = False
env.is_centos = False
env.base_path = "%(local_working_path)s/%(project_name)s" % env
env.git_path = env.base_path
env.backup_dir = "%(local_working_path)s/db" % env
env.pull_branch = env.working_branch
env.release_tag = "%(role)s_release" % env
env.virtualenv_path = "~/.virtualenvs/%(virtualenv_name)s/lib/python%(python_version)s/site-packages/" % env
env.is_local = True
env.media_path = "%(base_path)s/%(media_dir)s" % env
setup_backup_env_webfaction()
def live_db():
env.hosts = env.production_db_hosts
def staging_db():
env.hosts = env.staging_db_hosts
def live_celery():
env.hosts = env.production_celery_hosts
def staging_celery():
env.hosts = env.staging_celery_hosts
def has_separate_celery_server():
return hasattr(env,"%s_celery_hosts" % env.role)
env.roledefs = {
'live': [live],
'staging': [staging],
'local':[local],
'live_db':[live_db],
'staging_db':[staging_db],
'live_celery':[live_celery],
'staging_celery':[staging_celery],
}
def safe(function_call, *args, **kwargs):
try:
ret = function_call(*args, **kwargs)
return ret
except:
pass
def safe_magic_run(function_call, *args, **kwargs):
with settings(warn_only=True):
return magic_run(function_call, *args, **kwargs)
# Custom Config End
def magic_run(function_call, custom_env=None):
global env
prev_env = env
if custom_env:
env = custom_env
if env.dry_run:
print function_call % c_env
else:
if env.is_local:
ret = local(function_call % env)
else:
ret = run(function_call % env)
env = prev_env
return ret
def setup_server():
if env.is_webfaction:
try:
safe_magic_run("mkdir %(user_home)s/src")
magic_run("echo \"alias l='ls -agl'\nalias python=python%(python_version)s\nexport WORKON_HOME=$HOME/.virtualenvs\nsource ~/bin/virtualenvwrapper.sh\" >> %(user_home)s/.bashrc")
except:
pass
try:
magic_run("git --version")
except:
env.git_file_version = "1.7.3.3"
magic_run("cd src;wget http://kernel.org/pub/software/scm/git/git-%(git_file_version)s.tar.bz2")
magic_run("cd %(user_home)s/src/; tar fxj git-%(git_file_version)s.tar.bz2;")
magic_run("cd %(user_home)s/src/git-%(git_file_version)s; ./configure --prefix=%(user_home)s/git/; make; make install;")
magic_run("echo \"export PATH=$PATH:/%(user_home)s/git/bin/\" >> %(user_home)s/.bashrc")
try:
magic_run("pip --version")
except:
try:
safe_magic_run("mkdir %(user_home)s/lib:")
except:
pass
try:
safe_magic_run("mkdir %(user_home)s/lib/python%(python_version)s")
except:
pass
magic_run("easy_install-%(python_version)s pip")
magic_run("pip install --upgrade pip virtualenv virtualenvwrapper")
safe_magic_run("mkdir %(user_home)s/.virtualenvs")
magic_run("mkvirtualenv --no-site-packages %(virtualenv_name)s;")
magic_run("echo 'cd %(git_path)s/' > %(user_home)s/.virtualenvs/%(virtualenv_name)s/bin/postactivate")
magic_run("echo 'export DJANGO_SETTINGS_MODULE=\"envs.%(role)s\"' >> %(user_home)s/.virtualenvs/%(virtualenv_name)s/bin/postactivate")
magic_run("echo 'export PYTHONPATH=\"%(user_home)s/.virtualenvs/%(virtualenv_name)s/lib/site-packages/:%(base_path)s/%(project_name)s\"' >> %(user_home)s/.virtualenvs/%(virtualenv_name)s/bin/postactivate")
safe_magic_run("mkdir %(base_path)s")
magic_run("git clone %(git_origin)s %(git_path)s")
magic_run("%(work_on)s git checkout %(pull_branch)s; git |
"""
clipocr1.py
Demonstrates a technique that often improves ocr quality on screen captures.
Reads an image from the system clipboard,
and writes to stdout various versions of the text recognized in the image.
Uses tesseract OCR.
The technique is based on judicious rescaling of image dimensions.
SIDE EFFECT:
Creates image files and text file in current working directory.
REQUIREMENTS:
Written and tested 2014 March, 2014 April
on an Ubuntu 12.04 system (64-bit Intel)
Relies on system having these python packages installed
(it's ok to install them as Ubuntu/Debian packages):
- wx
for portable clipboard access.
- PIL [can we make do with Pillow?]
for rescaling the image
NOTE: We might be able to get away with rewriting to use
the right version(s) of wx for this instead?
Relies on system having this software installed,
e.g. as an Ubuntu/Debian package:
- tesseract
the OCR software.
Conveniently, these packages are all open source.
COPYRIGHT:
Copyright (c) 2014 Chris Niswander.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import os
from PIL import Image
import wx # just to access the system clipboard.
def get_file_text(fname):
"""Reads the text out of the text file having pathname /fname/."""
with open(fname) as fin:
return fin.read()
def read_test1(fname):
"""Demonstrates OCRing the text from an image file named /fname/,
and printing it to stdout.
Makes multiple OCR attempts,
based on resizing the image to different size image files,
and prints multiple OCR attempts' text.
"""
def params_textname(params):
"""Given /params/, a resize method specification from resize_methods,
constructs a text string that can be used in a filename
for a resized/rescaled image.
"""
params = params[0][0], params[0][1], params | [1]
return '_'.join([str(x).strip() for x in params])
# do ocr on original, non-rescaled image.
print 'ORIGINAL IMAGE:'
print do_ocr_to_imagefile(fname)
im1 = Image.open(fname)
# List of image resizing methods to try.
# Each method consists of:
# [Rescale factor tuple, image rescaling method].
# A rescale factor tuple is (width-rescale-factor, height-rescale-factor)
# Image resca | ling method is given as eval()-able text because:
# - convenient for naming image files produced using that method.
resize_methods = [
[(2, 2), 'Image.BICUBIC'],
[(2, 2), 'Image.BILINEAR'],
[(3, 2), 'Image.BICUBIC'],
[(3, 2), 'Image.BILINEAR'],
[(3, 3), 'Image.BICUBIC'],
[(3, 3), 'Image.BILINEAR'],
]
for resize_method in resize_methods:
rescale = resize_method[0]
im_resized = im1.resize(
(im1.size[0] * rescale[0], im1.size[1] * rescale[1]),
(eval (resize_method[1]) ))
resized_path = fname + '__' + params_textname(resize_method) + '.png'
print resized_path
im_resized.save(resized_path)
print do_ocr_to_imagefile(resized_path)
def do_ocr_to_imagefile(fname):
"""Runs tesseract command line utility on image file /fname/
and returns the perceived text.
SIDE EFFECTS:
Creates file 3.txt in current working directory.
"""
os.system('tesseract ' + fname + ' 3' )
# ^ OCR text from the file named /resized_path/, save the text to 3.txt.
return get_file_text('3.txt')
def save_clipboard(fname):
"""Saves an image from the system clipboard to the filename /fname/."""
app = wx.App()
if not wx.TheClipboard:
del app
raise Exception("can't get clipboard")
wx.TheClipboard.Open()
data = wx.BitmapDataObject()
clipboard_getdata_status = wx.TheClipboard.GetData(data)
wx.TheClipboard.Close()
if not clipboard_getdata_status:
del app
raise Exception("couldn't find image data in clipboard")
image = data.GetBitmap().ConvertToImage()
image.SaveFile(fname, 1) # 1 --> save as Windows bitmap.
del app
def clippy():
"""Demonstrates OCRing the text from an image in the system clipboard,
and printing it to stdout.
Makes multiple OCR attempts,
based on resizing the image to different sizes,
and prints multiple OCR attempts' text.
"""
clippy_fname = 'image_from_clipboard'
save_clipboard(clippy_fname)
read_test1(clippy_fname)
clippy()
#---------------------------------------------------------------------------
# Test code not normally called, but tester might run it from e.g. IDE.
def clear_clipboard():
"""Clear the clipboard, which can be useful for error testing."""
app = wx.App()
if not wx.TheClipboard:
del app
raise Exception("can't get clipboard")
wx.TheClipboard.Open()
wx.TheClipboard.Clear()
wx.TheClipboard.Close()
del app
|
#!/u | sr/bin/env python
import sys, os
import numpy as np
import logging
import IPython as ip
from IPython import parallel
from IPython.parallel.error import RemoteError
from msmbuilder import arglib
from msmbuilder import metrics
from msmbuilder import Project
from parallel_assign import remote, local
def setup_logger(console_stream=sys.stdout):
"""
Setup the logger
"""
formatter = logging.Formatter('%(name)s: %(asctime)s: %(message)s',
'%I:%M:%S %p')
| console_handler = logging.StreamHandler(console_stream)
console_handler.setFormatter(formatter)
logger = logging.getLogger(os.path.split(sys.argv[0])[1])
logger.root.handlers = [console_handler]
return logger
def main(args, metric, logger):
project = Project.load_from(args.project)
if not os.path.exists(args.generators):
raise IOError('Could not open generators')
generators = os.path.abspath(args.generators)
output_dir = os.path.abspath(args.output_dir)
# connect to the workers
try:
json_file = client_json_file(args.profile, args.cluster_id)
client = parallel.Client(json_file, timeout=2)
except parallel.error.TimeoutError as exception:
msg = '\nparallel.error.TimeoutError: ' + str(exception)
msg += "\n\nPerhaps you didn't start a controller?\n"
msg += "(hint, use ipcluster start)"
print >> sys.stderr, msg
sys.exit(1)
lview = client.load_balanced_view()
# partition the frames into a bunch of vtrajs
all_vtrajs = local.partition(project, args.chunk_size)
# initialze the containers to save to disk
f_assignments, f_distances = local.setup_containers(output_dir,
project, all_vtrajs)
# get the chunks that have not been computed yet
valid_indices = np.where(f_assignments.root.completed_vtrajs[:] == False)[0]
remaining_vtrajs = np.array(all_vtrajs)[valid_indices].tolist()
logger.info('%d/%d jobs remaining', len(remaining_vtrajs), len(all_vtrajs))
# send the workers the files they need to get started
# dview.apply_sync(remote.load_gens, generators, project['ConfFilename'],
# metric)
# get the workers going
n_jobs = len(remaining_vtrajs)
amr = lview.map(remote.assign, remaining_vtrajs,
[generators]*n_jobs, [metric]*n_jobs, chunksize=1)
pending = set(amr.msg_ids)
while pending:
client.wait(pending, 1e-3)
# finished is the set of msg_ids that are complete
finished = pending.difference(client.outstanding)
# update pending to exclude those that just finished
pending = pending.difference(finished)
for msg_id in finished:
# we know these are done, so don't worry about blocking
async = client.get_result(msg_id)
try:
assignments, distances, chunk = async.result[0]
except RemoteError as e:
print 'Remote Error:'
e.print_traceback()
raise
vtraj_id = local.save(f_assignments, f_distances, assignments, distances, chunk)
log_status(logger, len(pending), n_jobs, vtraj_id, async)
f_assignments.close()
f_distances.close()
logger.info('All done, exiting.')
def log_status(logger, n_pending, n_jobs, job_id, async_result):
"""After a job has completed, log the status of the map to the console
Parameters
----------
logger : logging.Logger
logger to print to
n_pending : int
number of jobs still remaining
n_jobs : int
total number of jobs in map
job_id : int
the id of the job that just completed (between 0 and n_jobs)
async_esult : IPython.parallel.client.asyncresult.AsyncMapResult
the container with the job results. includes not only the output,
but also metadata describing execution time, etc.
"""
if ip.release.version >= '0.13':
t_since_submit = async_result.completed - async_result.submitted
time_remaining = n_pending * (t_since_submit) / (n_jobs - n_pending)
td = (async_result.completed - async_result.started)
#this is equivalent to the td.total_seconds() method, which was
#introduced in python 2.7
execution_time = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / float(10**6)
eta = (async_result.completed + time_remaining).strftime('%I:%M %p')
else:
execution_time, eta = '?', '?'
logger.info('engine: %s; chunk %s; %ss; status: %s; %s/%s remaining; eta %s',
async_result.metadata.engine_id, job_id, execution_time,
async_result.status, n_pending, n_jobs, eta)
def setup_parser():
parser = arglib.ArgumentParser("""
Assign data that were not originally used in the clustering (because of
striding) to the microstates. This is applicable to all medoid-based clustering
algorithms, which includes all those implemented by Cluster.py except the
hierarchical methods. (For assigning to a hierarchical clustering, use
AssignHierarchical.py)
This code uses IPython.parallel to get parallelism accross many nodes. Consult
the documentation for details on how to run it""", get_metric=True)
parser.add_argument('project')
parser.add_argument( dest='generators', help='''Trajectory file containing
the structures of each of the cluster centers.''')
parser.add_argument('output_dir')
parser.add_argument('chunk_size', help='''Number of frames to processes per worker.
Each chunk requires some communication overhead, so you should use relativly large chunks''',
default=1000, type=int)
parser.add_argument('profile', help='IPython.parallel profile to use.', default='default')
parser.add_argument('cluster_id', help='IPython.parallel cluster_id to use', default='')
args, metric = parser.parse_args()
return args, metric
def client_json_file(profile='default', cluster_id=None):
"""
Get the path to the ipcontroller-client.json file. This really shouldn't be necessary, except that
IPython doesn't automatically insert the cluster_id in the way that it should. I submitted a pull
request to fix it, but here is a monkey patch in the mean time
"""
from IPython.core.profiledir import ProfileDir
from IPython.utils.path import get_ipython_dir
profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
if not cluster_id:
client_json = 'ipcontroller-client.json'
else:
client_json = 'ipcontroller-%s-client.json' % cluster_id
filename = os.path.join(profile_dir.security_dir, client_json)
if not os.path.exists(filename):
raise ValueError('controller information not found at: %s' % filename)
return filename
if __name__ == '__main__':
args, metric = setup_parser()
logger = setup_logger()
main(args, metric, logger)
|
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from paasta_tools.api.client import get_paasta_oapi_client
from paasta_tools.api.client import renew_issue_cert
def test_get_paasta_oapi_client(system_paasta_config):
with mock.patch(
"paasta_tools.api.client.load_system_paasta_config", autospec=True
) as mock | _load_system_paasta_config:
mock_load_system_paasta_config.return_value = system_paasta_config
client = get_paasta_oapi_client()
assert client
def test_renew_issue_cert():
with mock.patch(
"paasta_tools.api.client.get_secret_provider", autospec=True
) as mock_get_secret_provider:
| mock_config = mock.Mock()
renew_issue_cert(mock_config, "westeros-prod")
mock_get_secret_provider.return_value.renew_issue_cert.assert_called_with(
pki_backend=mock_config.get_pki_backend(),
ttl=mock_config.get_auth_certificate_ttl(),
)
|
testbed = testbed.Testbed()
self.testbed.activate()
# If you have a NeedIndexError, here is the switch you need to flip to make
# the new required indexes to be automatically added. Change
# train_index_yaml to True to have index.yaml automatically updated, then
# run your test case. Do not forget to put it back to False.
train_index_yaml = False
if self.SKIP_INDEX_YAML_CHECK:
# See comment for skip_index_yaml_check above.
self.assertIsNone(self.APP_DIR)
self.testbed.init_app_identity_stub()
self.testbed.init_datastore_v3_stub(
require_indexes=not train_index_yaml and not self.SKIP_INDEX_YAML_CHECK,
root_path=self.APP_DIR,
consistency_policy=datastore_stub_util.PseudoRandomHRConsistencyPolicy(
probability=1))
self.testbed.init_logservice_stub()
self.testbed.init_memcache_stub()
self.testbed.init_modules_stub()
# Use mocked time in memcache.
memcache = self.testbed.get_stub(testbed.MEMCACHE_SERVICE_NAME)
memcache._gettime = lambda: int(utils.time_time())
# Email support.
self.testbed.init_mail_stub()
self.mail_stub = self.testbed.get_stub(testbed.MAIL_SERVICE_NAME)
self.old_send_to_admins = self.mock(
self.mail_stub, '_Dynamic_SendToAdmins', self._SendToAdmins)
self.testbed.init_taskqueue_stub()
self._taskqueue_stub = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME)
self._taskqueue_stub._root_path = self.APP_DIR
self.testbed.init_user_stub()
def tearDown(self):
try:
if not self.has_failed():
remaining = self.execute_tasks()
self.assertEqual(0, remaining,
'Passing tests must leave behind no pending tasks, found %d.'
% remaining)
self.testbed.deactivate()
finally:
super(TestCase, self).tearDown()
def mock_now(self, now, seconds=0):
return mock_now(self, now, seconds)
def mock_milliseconds_since_epoch(self, milliseconds):
self.mock(utils, "milliseconds_since_epoch", lambda: milliseconds)
def _SendToAdmins(self, request, *args, **kwargs):
"""Make sure the request is logged.
See google_appengine/google/appengine/api/mail_stub.py around line 299,
MailServiceStub._SendToAdmins().
"""
self.mail_stub._CacheMessage(request)
return self.old_send_to_admins(request, *args, **kwargs)
def execute_tasks(self, **kwargs):
"""Executes enqueued tasks that are ready to run and return the number run.
A task may trigger another task.
Sadly, taskqueue_stub implement | ation does not provide a nice way to run
them so run the pending tasks manually.
"""
self.assertEqual([None], self._taskqueue_stub._queues.keys())
ran_total = 0
while True:
# Do multiple loops until no task was run.
ran = 0
for queue in self._taskqueue_stub.GetQueues():
if queue['mode'] == 'pull':
continue
for task in self._taskqueue_stub.GetTasks(queue['name']):
# Remove 2 se | conds for jitter.
eta = task['eta_usec'] / 1e6 - 2
if eta >= time.time():
continue
self.assertEqual('POST', task['method'])
logging.info('Task: %s', task['url'])
self._post_task(task, **kwargs)
self._taskqueue_stub.DeleteTask(queue['name'], task['name'])
ran += 1
if not ran:
return ran_total
ran_total += ran
def execute_task(self, url, queue_name, payload):
"""Executes a specified task.
Raise error if the task isn't in the queue.
"""
task = self._find_task(url, queue_name, payload)
expected = {'url': url, 'queue_name': queue_name, 'payload': payload}
if not task:
raise AssertionError("Task is not enqueued. expected: %r" % expected)
self._post_task(task)
def _post_task(self, task, **kwargs):
# Not 100% sure why the Content-Length hack is needed, nor why the
# stub returns unicode values that break webtest's assertions.
body = base64.b64decode(task['body'])
headers = {k: str(v) for k, v in task['headers']}
headers['Content-Length'] = str(len(body))
try:
self.app.post(task['url'], body, headers=headers, **kwargs)
except:
logging.error(task)
raise
def _find_task(self, url, queue_name, payload):
for t in self._taskqueue_stub.GetTasks(queue_name):
if t['url'] != url:
continue
if t['queue_name'] != queue_name:
continue
if base64.b64decode(t['body']) != payload:
continue
return t
return None
class Endpoints(object):
"""Handles endpoints API calls."""
def __init__(self, api_service_cls, regex=None, source_ip='127.0.0.1'):
super(Endpoints, self).__init__()
self._api_service_cls = api_service_cls
kwargs = {}
if regex:
kwargs['regex'] = regex
self._api_app = webtest.TestApp(
endpoints_webapp2.api_server([self._api_service_cls], **kwargs),
extra_environ={'REMOTE_ADDR': source_ip})
def call_api(self, method, body=None, status=(200, 204)):
"""Calls endpoints API method identified by its name."""
# Because body is a dict and not a ResourceContainer, there's no way to tell
# which parameters belong in the URL and which belong in the body when the
# HTTP method supports both. However there's no harm in supplying parameters
# in both the URL and the body since ResourceContainers don't allow the same
# parameter name to be used in both places. Supplying parameters in both
# places produces no ambiguity and extraneous parameters are safely ignored.
assert hasattr(self._api_service_cls, method), method
info = getattr(self._api_service_cls, method).method_info
path = info.get_path(self._api_service_cls.api_info)
# Identify which arguments are path parameters and which are query strings.
body = body or {}
query_strings = []
for key, value in sorted(body.items()):
if '{%s}' % key in path:
path = path.replace('{%s}' % key, value)
else:
# We cannot tell if the parameter is a repeated field from a dict.
# Allow all query strings to be multi-valued.
if not isinstance(value, list):
value = [value]
for val in value:
query_strings.append('%s=%s' % (key, val))
if query_strings:
path = '%s?%s' % (path, '&'.join(query_strings))
path = '/_ah/api/%s/%s/%s' % (self._api_service_cls.api_info.name,
self._api_service_cls.api_info.version,
path)
try:
if info.http_method in ('GET', 'DELETE'):
return self._api_app.get(path, status=status)
return self._api_app.post_json(path, body, status=status)
except Exception as e:
# Useful for diagnosing issues in test cases.
logging.info('%s failed: %s', path, e)
raise
class EndpointsTestCase(TestCase):
"""Base class for a test case that tests Cloud Endpoint Service.
Usage:
class MyTestCase(test_case.EndpointsTestCase):
api_service_cls = MyEndpointsService
def test_stuff(self):
response = self.call_api('my_method')
self.assertEqual(...)
def test_expected_fail(self):
with self.call_should_fail(403):
self.call_api('protected_method')
"""
# Should be set in subclasses to a subclass of remote.Service.
api_service_cls = None
# Should be set in subclasses to a regular expression to match against path
# parameters. See components.endpoints_webapp2.adapter.api_server.
api_service_regex = None
# See call_should_fail.
expected_fail_status = None
_endpoints = None
def setUp(self):
super(EndpointsTestCase, self).setUp()
self._endpoints = Endpoints(
self.api_service_cls, regex=self.api_service_regex)
def call_api(self, method, body=None, status=(200, 204)):
if self.expected_fail_status:
status = self.expected_fail_status
return self._endpoints.call_api(method, body, status)
@contextlib.contextmanager
def call_should_fail(self, status):
"""Asserts that Endpoints call inside the guarded region of code fails. |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2012:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
# This Class is a plugin for the Shinken Broker. It is in charge
# to brok log into the syslog
import syslog
from shinken.basemodule import BaseModule
from shinken.log import logger
properties = {
'daemons': ['broker'],
'type': 'syslog',
'external': False,
'phases': ['running'],
}
# called by the plugin manager to get a broker
def get_instance(plugin):
logger.info("Get a Syslog broker for plugin %s" % plugin.get_name())
#Catch errors
#path = plugin.path
instance = Syslog_broker(plugin)
return instance
# Class for the Merlindb Broker
# Get broks and puts them in merlin database
class Syslog_broker(BaseModule):
def __init__(self, modconf):
BaseModule.__init__(self, modconf)
# A service check have just arriv | ed, we UPDATE data info with this
def manage_log_brok(self, b):
| data = b.data
syslog.syslog(data['log'].encode('UTF-8'))
|
import sys
from captcha import get_version as get_captcha_version
from setuptools import find_packages, setup
from setuptools.command.test import test as test_command
class Tox(test_command):
user_options = [("tox-args=", "a", "Arguments to pass to tox")]
def initialize_options(self):
test_command.initialize_options(self)
self.tox_args = None
def finalize_options(self):
test_command.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import tox
import shlex
args = self.tox_args
if args:
a | rgs = shlex.split(self.tox_args)
errno = tox.cmdline(args=args)
sys.exit(errno)
install_requires = ["Django >= 2.2", "Pillow >=6.2.0", "django-ranged-response == 0.2.0"]
EXTRAS_REQUIRE = {"test": ("testfixtures",)}
with open("README.rst") as readme:
long_description = readme.read()
setup(
name="django-simple-captcha",
version=get_captcha_version(),
description="A very simple, yet powerful, Django captcha applica | tion",
long_description=long_description,
author="Marco Bonetti",
author_email="mbonetti@gmail.com",
url="https://github.com/mbi/django-simple-captcha",
license="MIT",
packages=find_packages(exclude=["testproject", "testproject.*"]),
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Security",
"Topic :: Internet :: WWW/HTTP",
"Framework :: Django",
],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
extras_require=EXTRAS_REQUIRE,
tests_require=["tox"],
cmdclass={"test": Tox},
)
|
return exp.FieldEqual(self, other)
def __ne__(self, other):
return exp.FieldNotEqual(self, other)
def __invert__(self):
other = copy.copy(self)
other.inv = not other.inv
return other
# Expression
def __call__(self, request):
value = self.__get__(request)
result = False if value is None else value
if self.inv:
result = not result
return result
def __str__(self):
return '{inv}{op}'.format(
inv='not ' if self.inv else '',
op=self.name,
)
def compile(self, symbols):
field_key = symbols.field(self)
return '{inv}request[{field}]'.format(
inv='not ' if self.inv else '',
field=field_key,
)
class Boolean(BooleanMixin, pilo.fields.Boolean, exp.UnaryOp, PathMixin):
type = bool
class BooleanSubField(BooleanMixin, exp.SubField):
type = bool
class StringMixin(object):
"""
Mix-in for adding string expression capabilities to a field with type
``rump.type.str``.
"""
def __eq__(self, other):
return exp.FieldEqual(self, other)
def __ne__(self, other):
return exp.FieldNotEqual(self, other)
def contains(self, item):
return exp.FieldContains(self, item)
def in_(self, others):
return exp.FieldIn(self, others)
def match(self, pattern, ignore_case=False):
flags = 0
if ignore_case:
flags |= re.I
pattern_re = re.compile(pattern, flags)
return exp.FieldMatch(self, pattern_re)
def startswith(self, prefix):
return exp.FieldStartswith(self, prefix)
def endswith(self, suffix):
return exp.FieldEndswith(self, suffix)
class String(pilo.fields.String, PathMixin, StringMixin):
type = str
class StringSubField(exp.SubField, StringMixin):
type = str
class IntegerMixin(object):
"""
Mix-in for adding integer expression capabilities to a field with type
``rump.type.int``.
"""
def __eq__(self, other):
return exp.FieldEqual(self, other)
def __ne__(self, other):
return exp.FieldNotEqual(self, other)
def __lt__(self, other):
return exp.FieldLessThan(self, other)
def __le__(self, other):
return exp.FieldLessThanEqual(self, other)
def __gt__(self, other):
return exp.FieldGreaterThan(self, other)
def __ge__(self, other):
return exp.FieldGreaterThanEqual(self, other)
def in_(self, others):
return exp.FieldIn(self, others)
class Integer(pilo.fields.Integer, PathMixin, IntegerMixin):
type = int
class NamedTuple(pilo.Field, PathMixin):
type = pilo.NOT_SET
def __init__(self, *args, **kwargs):
self.type = kwargs.pop('type', pilo.NOT_SET)
if self.type is pilo.NOT_SET:
args, self.type | = pilo.fields.pluck(args, lambda arg: (
inspect.isclass(arg) and issubclass(arg, types.NamedTuple)
| ))
if self.type is pilo.NOT_SET:
raise TypeError('Missing type=NamedTuple')
super(NamedTuple, self).__init__(*args, **kwargs)
def __getattr__(self, name):
field = getattr(self.type, name, None)
if field is None:
raise AttributeError(
'{0}.{1} does not exist'.format(self.type, name)
)
if not hasattr(field, 'type'):
raise AttributeError(
'{0}.{1}.type does not exist'.format(self.type, name)
)
if issubclass(field.type, str):
sub_field_type = StringSubField
elif issubclass(field.type, bool):
sub_field_type = BooleanSubField
else:
raise TypeError(
'{0}.{1}.type={2} is not supported, must be on of {3}'
.format(self.type, name, field.type, [str, bool])
)
sub_field = sub_field_type(self, name)
setattr(self, name, sub_field)
return sub_field
class StringHash(pilo.Field, PathMixin):
type = types.StringHash
def __getattr__(self, name):
return StringSubField(self, name)
def contains(self, item):
return exp.FieldContains(self, item)
class ArgumentHash(pilo.Field, PathMixin):
type = types.ArgumentHash
def __getattr__(self, name):
return StringSubField(self, name)
def contains(self, item):
return exp.FieldContains(self, item)
class IPAddress(pilo.Field, PathMixin):
type = types.IPAddress
def __eq__(self, other):
return exp.FieldEqual(self, other)
def __ne__(self, other):
return exp.FieldNotEqual(self, other)
def in_(self, others):
return exp.FieldIn(self, others)
class Object(pilo.Field, PathMixin):
type = object
class HeaderHash(pilo.fields.Group, PathMixin):
type = types.HeaderHash
def __init__(self, *args, **kwargs):
super(HeaderHash, self).__init__(
(re.compile('HTTP\_(.+)'), String()), *args, **kwargs
)
def _munge(self, value):
return dict(
(match.group(0).lower(), value)
for _, match, value in super(HeaderHash, self)._munge(value)
)
def __getattr__(self, name):
return StringSubField(self, name)
def contains(self, item):
return exp.FieldContains(self, item)
class BasicAuthorization(types.NamedTuple):
username = String()
password = String()
class Request(pilo.Form):
"""
Defines a request schema as collections of fields:
- ``rump.request.String``
- ``rump.request.Integer`
- ``rump.request.NamedTuple``
- ...
all of which parse or compute values that one of these ``rump.types``. If
you need to add custom fields just:
.. code:: python
import rump
class MyRequest(rump.Request)
x_sauce = rump.request.String('HTTP_X_SAUCE', default='blue')
env = rump.request.String()
@env.compute
def env(self)
if not self.authorized or not self.password:
return 'public'
return self.password.split('-')[0]
which can then be used in matching expressions:
.. code:: python
print rump._and(MyRequest.x_sauce.in_(['mayo', 'ketchup']), MyRequest.env != 'open')
"""
def __init__(self, environ, router=None):
"""
:param environ: The WSGI environment for the request. This will be
wrapped and stored as `src`.
:param router: Optional `Router` examining this request. This can be
useful when fields uses `Router` information when
computing a value.
"""
super(Request, self).__init__()
self.src = pilo.source.DefaultSource(environ)
self.router = router
def context(self, symbols):
"""
Creates a context for this request to be used when evaluating a
**compiled** rule.
:param symbols: An instance of `exp.Symbols`.
:return: The `exp.Context` for this request.
"""
return exp.Context(self, symbols)
method = String('REQUEST_METHOD')
path = String('PATH_INFO')
query_string = String('QUERY_STRING')
query = ArgumentHash()
@query.compute
def query(self):
if self.query_string:
query = dict(
(k, v[-1])
for k, v in urlparse.parse_qs(self.query_string).iteritems()
)
else:
query = {}
query_hash = types.StringHash(**query)
return query_hash
content_type = String('CONTENT_TYPE', default=None)
content_length = Integer('CONTENT_LENGTH', default=None)
headers = HeaderHash()
host = String('HTTP_HOST')
authenticated = Boolean('HTTP_AUTHORIZATION', default=False)
@authenticated.parse
def authenticated(self, path):
return path.exists and not path.is_null
basic_authorization = NamedTuple(
BasicAuthorization, 'HTTP_AUTHORIZATION', default=None
)
@basic_authorization.parse
|
import bpy
def main(operator, context):
space = context.space_data
node_tree = space.node_tree
node_active = context.active_node
node_selected = context.selected_nodes
# now we have the context, perform a simple operation
if node_active in node_selected:
node_selected.remove(node_active)
if len(node_selected) != 1:
operator.report({'ERROR'}, "2 nodes must be selected")
return
node_other, = node_selected
# now we have 2 nodes to operate on
if not node_active.inputs:
operator.report({'ERROR'}, "Active node has no inputs")
return
if not node_other.outputs:
operator.report({'ERROR'}, "Selected node has no outputs")
return
socket_in = node_active.inputs[0]
socket_out = node_other.outputs[0]
# add a link between the two nodes
node_link = node_tree.links.new(socket_in, socket_out)
class NodeOperator(bpy.types.Operator):
"""Tooltip"""
bl_idname = "node.simple_operator"
bl_label = "Simple Node Operator"
@classmethod
def poll(cls, context):
space = context.space_data
return space.type == 'NODE_EDITOR'
def execute(self, context):
main(self, context)
return {'FINISHED'}
def register():
| bpy.utils.register_class(NodeOperator)
def unregister():
bpy.utils.unregister_class(NodeOperator)
if __name__ == "__main__":
register() | |
# | -*- coding: utf-8 -*-
"""
This package contains all the forms and models
for the rheumatism questionnaires.
""" | |
import cv2
import numpy
import Tool
class HueEqualiser(Tool.Tool):
def on_init(self):
self.id = "hueequaliser"
self.name = "Hue Equaliser"
self.icon_path = "ui/PF2_Icons/HueEqualiser.png"
self.properties = [
Tool.Property("header", "Hue Equaliser", "Header", None, has_toggle=False, has_button=False),
Tool.Property("bleed", "Hue Bleed", "Slider", 0.5, max=2.0, min=0.01),
Tool.Property("neighbour_bleed", "Neighbour Bleed", "Slider", 0.25, max=2.0, min=0.0),
# Red
Tool.Property("header_red", "Red", "Header", None, has_toggle=False, has_button=False),
Tool.Property("red_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("red_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Yellow
Tool.Property("header_yellow", "Yellow", "Header", None, has_toggle=False, has_button=False),
Tool.Property("yellow_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("yellow_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Green
Tool.Property("header_green", "Green", "Header", None, has_toggle=False, has_button=False),
Tool.Property("green_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("green_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Cyan
Tool.Property("header_cyan", "Cyan", "Header", None, has_toggle=False, has_button=False),
Tool.Property("cyan_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("cyan_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Blue
Tool.Property("header_blue", "Blue", "Header", None, has_toggle=False, has_button=False),
Tool.Property("blue_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("blue_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Violet
Tool.Property("header_violet", "Violet", "Header", None, has_toggle=False, has_button=False),
Tool.Property("violet_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("violet_saturation", "Saturation", "Slider", 0, max=50, min=-50),
]
def on_update(self, image):
hues = {
"red": 0,
"yellow": 60,
"green": 120,
"cyan": 180,
"blue": 240,
"violet": 300,
"_red": 360,
}
out = image
if(not self.is_default()):
bleed = self.props["bleed"].get_value()
neighbour_bleed = self.props["neighbour_bleed"].get_value()
out = out.astype(numpy.float32)
# Convert to HSV colorspace
out = cv2.cvtColor(out, cv2.COLOR_BGR2HSV)
# Bits per pixel
bpp = float(str(image.dtype).replace("uint", "").replace("float", ""))
# Pixel value range
np = float(2 ** bpp - 1)
imhue = out[0:, 0:, 0]
imsat = out[0:, 0:, 1]
imval = out[0:, 0:, 2]
for hue in hues:
hsat = self.props["%s_saturation" % hue.replace('_', '')].get_value()
hval = self.props["%s_value" % hue.replace('_', '')].get_value()
isHue = self._is_hue(imhue, hues[hue], (3.5/bleed))
isHue = self._neighbour_bleed(isHue, neighbour_bleed)
imsat = imsat + ((hsat / 10000) * 255) * isHue
imval = imval + ((hval / 1000) * np) * isHue
# Clip any values out of bounds
imval[imval < 0.0] = 0.0
imval[imval > np] = np
imsat[imsat < 0.0] = 0.0
imsat[imsat > 1.0] = 1.0
out[0:, 0:, 1] = imsat
out[0:, 0:, 2] = imval
# Convert back to BGR colorspace
out = cv2.cvtColor(out, cv2.COLOR_HSV2BGR)
out = out.astype(image.dtype)
return out
def _is_hue(self, image, hue_value, bleed_value = 3.5):
mif = hue_value - 30
mir = hue_value + 30
if (mir > 360):
mir = 360
if (mif < 0):
mif = 0
bleed = float(360 / bleed_value)
icopy = image.copy()
print(bleed, mif, mir)
if(mif != 0):
icopy[icopy < mif - bleed] = 0.0
icopy[icopy > mir + bleed] = 0.0
icopy[(icopy < mif) * (icopy != 0.0)] = (((mif - (icopy[(icopy < mif) * (icopy != 0.0)]))/360.0) / (bleed/360.0)) * -1 + 1
icopy[(icopy > mir) * (icopy != 0.0)] = ((((icopy[(icopy > mir) * (icopy != 0.0)]) - mir)/360.0) / (bleed/360.0)) * -1 + 1
icopy[(icopy > | = mif) * (icopy <= mir)] = 1.0
if(mif == 0):
icopy[icopy > mir + bleed] = 0.0
icopy[(icopy > mir) * | (icopy != 0.0)] = ((((icopy[(icopy > mir) * (icopy != 0.0)]) - mir) / 360.0) / (bleed/360.0)) * -1 + 1
return icopy
def _neighbour_bleed(self, map, bleed):
strength = bleed*30
if (strength > 0):
height, width = map.shape[:2]
size = (height * width)
mul = numpy.math.sqrt(size) / 1064.416 # numpy.math.sqrt(1132982.0)
map = map*255
blur_size = abs(2 * round((round(strength * mul) + 1) / 2) - 1)
im = cv2.blur(map, (int(blur_size), int(blur_size)))
return im/255.0
return map |
#! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2018 the HERA Collaboration
# Licensed under the 2-clause BSD license.
"""Gather correlator log outputs and log them into M&C."""
import sys
import json
import redis
import socket
import logging
from astropy.time import Time
from hera_mc import mc
from hera_mc.correlator import DEFAULT_REDIS_ADDRESS
allowed_levels = ["DEBUG", "INFO", "NOTIFY", "WARNING", "ERROR", "CRITICAL"]
logging.addLevelName(logging.INFO + 1, "NOTIFY")
parser = mc.get_mc_argument_parser()
parser.add_argument(
"--redishost",
"-r",
dest="redishost",
default=DEFAULT_REDIS_ADDRESS,
help="The redis db hostname",
)
parser.add_argument(
"--channel",
"-c",
dest="channel",
default="mc-log-channel",
help="The redis channel to listen on.",
)
parser.add_argument(
"-l",
dest="level",
type=str,
default="NOTIFY",
help=(
"Don't log messages below this level. "
"Allowed values are {vals:}".format(vals=allowed_levels)
),
choices=allowed_levels,
)
args = parser.pars | e_args()
db = m | c.connect_to_mc_db(args)
hostname = socket.gethostname()
redis_pool = redis.ConnectionPool(host=args.redishost)
level = logging.getLevelName(args.level)
while True:
try:
with db.sessionmaker() as session, redis.Redis(
connection_pool=redis_pool
) as redis_db:
pubsub = redis_db.pubsub()
pubsub.ignore_subscribe_messages = True
pubsub.subscribe(args.channel)
# pubsub.listen() will create an infinite generator
# that yields messages in our channel
for message in pubsub.listen():
if (
message["data"].decode()
!= "UnicodeDecodeError on emit!"
# messages come as byte strings, make sure an error didn't occur
):
message_dict = json.loads(message["data"])
msg_level = message_dict["levelno"]
if msg_level >= level:
session.add_subsystem_error(
Time(message_dict["logtime"], format="unix"),
message_dict["subsystem"],
message_dict["severity"],
message_dict["message"],
)
session.add_daemon_status(
"mc_listen_to_corr_logger", hostname, Time.now(), "good"
)
session.commit()
except KeyboardInterrupt:
sys.exit()
except Exception as e:
# some common exceptions are this Nonetype being yielded by the iterator
# and a forcible connection closure by the server.
# Ignore for now and re-attach to the pubsub channel
if not any(
str(e).startswith(err)
for err in [
"'NoneType' object has no attribute 'readline'",
"Connection closed by server.",
]
):
print(e)
session.add_daemon_status(
"mc_listen_to_corr_logger", hostname, Time.now(), "errored"
)
session.commit()
continue
|
"""
Rate Service Module
============ | =======
This package contains classes to request pre-ship rating information and to
determine estimated or courtesy billing quotes. Time in Transit can be
returned with the rates if it is specified in the request.
"""
from datetime import datetime
from .. base_service import FedexBaseService
class FedexRateServiceRequest(FedexBaseService):
"""
This class allows you to get the shipping charges for a particular addre | ss.
You will need to populate the data structures in self.RequestedShipment,
then send the request.
"""
def __init__(self, config_obj, *args, **kwargs):
"""
The optional keyword args detailed on L{FedexBaseService}
apply here as well.
@type config_obj: L{FedexConfig}
@param config_obj: A valid FedexConfig object.
"""
self._config_obj = config_obj
# Holds version info for the VersionId SOAP object.
self._version_info = {'service_id': 'crs', 'major': '16',
'intermediate': '0', 'minor': '0'}
self.RequestedShipment = None
"""@ivar: Holds the RequestedShipment WSDL object."""
# Call the parent FedexBaseService class for basic setup work.
super(FedexRateServiceRequest, self).__init__(self._config_obj,
'RateService_v16.wsdl',
*args, **kwargs)
self.ClientDetail.Region = config_obj.express_region_code
def _prepare_wsdl_objects(self):
"""
This is the data that will be used to create your shipment. Create
the data structure and get it ready for the WSDL request.
"""
# Default behavior is to not request transit information
self.ReturnTransitAndCommit = False
# This is the primary data structure for processShipment requests.
self.RequestedShipment = self.client.factory.create('RequestedShipment')
self.RequestedShipment.ShipTimestamp = datetime.now()
TotalWeight = self.client.factory.create('Weight')
# Start at nothing.
TotalWeight.Value = 0.0
# Default to pounds.
TotalWeight.Units = 'LB'
# This is the total weight of the entire shipment. Shipments may
# contain more than one package.
self.RequestedShipment.TotalWeight = TotalWeight
# This is the top level data structure for Shipper information.
ShipperParty = self.client.factory.create('Party')
ShipperParty.Address = self.client.factory.create('Address')
ShipperParty.Contact = self.client.factory.create('Contact')
# Link the ShipperParty to our master data structure.
self.RequestedShipment.Shipper = ShipperParty
# This is the top level data structure for Recipient information.
RecipientParty = self.client.factory.create('Party')
RecipientParty.Contact = self.client.factory.create('Contact')
RecipientParty.Address = self.client.factory.create('Address')
# Link the RecipientParty object to our master data structure.
self.RequestedShipment.Recipient = RecipientParty
Payor = self.client.factory.create('Payor')
# Grab the account number from the FedexConfig object by default.
Payor.AccountNumber = self._config_obj.account_number
# Assume US.
Payor.CountryCode = 'US'
# Start with no packages, user must add them.
self.RequestedShipment.PackageCount = 0
self.RequestedShipment.RequestedPackageLineItems = []
# This is good to review if you'd like to see what the data structure
# looks like.
self.logger.debug(self.RequestedShipment)
def _assemble_and_send_request(self):
"""
Fires off the Fedex request.
@warning: NEVER CALL THIS METHOD DIRECTLY. CALL send_request(),
WHICH RESIDES ON FedexBaseService AND IS INHERITED.
"""
# Fire off the query.
return self.client.service.getRates(
WebAuthenticationDetail=self.WebAuthenticationDetail,
ClientDetail=self.ClientDetail,
TransactionDetail=self.TransactionDetail,
Version=self.VersionId,
RequestedShipment=self.RequestedShipment,
ReturnTransitAndCommit=self.ReturnTransitAndCommit)
def add_package(self, package_item):
"""
Adds a package to the ship request.
@type package_item: WSDL object, type of RequestedPackageLineItem
WSDL object.
@keyword package_item: A RequestedPackageLineItem, created by
calling create_wsdl_object_of_type('RequestedPackageLineItem') on
this ShipmentRequest object. See examples/create_shipment.py for
more details.
"""
self.RequestedShipment.RequestedPackageLineItems.append(package_item)
package_weight = package_item.Weight.Value
self.RequestedShipment.TotalWeight.Value += package_weight
self.RequestedShipment.PackageCount += 1
|
import unittest
from p5.pmath.utils import (
constrain,
lerp,
remap,
normalize,
magnitude,
distance,
sq)
class TestUtils(unittest.TestCase):
def test_constrain(self):
self.assertEqual(constrain(5, 0, 10), 5)
self.assertEqual(constrain(-10, 0, 10), 0)
self.assertEqual(constrain(20, 0, 10), 10)
def test_lerp(self):
self.assertEqual(lerp(0, 100, 0.5), 50)
def test_remap(self):
self.assertEqual(remap(50, (0, 100), (0, 10)), 5.0)
def test_normalize(self):
self.assertEqual(normalize(50, | 0, 100), 0.5)
def test_magnitude(self):
self.assertEqual(magnitude(3, 4), 5)
def test_distance(self):
self.assertEqual(distance((0, 0, 0), (2, 3, 6)), 7)
| def test_sq(self):
self.assertEqual(sq(4), 16)
if __name__ == "__main__":
unittest.main()
|
__author__ = 'erik'
import numpy as np
from PetersScheme.Edge import Edge
from PetersScheme.Quad import Quad
from PetersScheme.Vertex import Vertex
def getABsC_ind(quadIndex, indVertex, indOtherVertex, regularPoints):
'''
:param _quad:
:param indVertex:
:param indOtherVertex:
:param regularPoints:
:return:
'''
assert isinstance(quadIndex,int)
assert isinstance(indVertex,int)
assert isinstance(indOtherVertex,int)
assert isinstance(regularPoints,np.ndarray)
# assuming 16 vertices per row of regularPoints
# listed like
'''
4-------------------2
| 12 13 14 15 |
| 8 9 10 11 |
| 4 5 6 7 |
| 0 1 2 3 |
0-------------------1
'''
# points in order A, B1, B2, C
# B1 is the one closest to the edge
clockwiseQuadrantIndices = np.array([[5,4,1,0],\
[6,2,7,3],\
[10,11,14,15],\
[9,13,8,12]], dtype=int)
counterclockwiseQuadrantIndices = np.array([[5,1,4,0],\
[6,7,2,3],\
[10,14,11,15],\
[9,8,13,12]], dtype=int)
if indOtherVertex == (indVertex - 1)%4: #clockwise
return regularPoints[quadIndex,clockwiseQuadrantIndices[indVertex, :]]
else:
return regularPoints[quadIndex,counterclockwiseQuadrantIndices[indVertex, :]]
def getABsC(_quad, _edge, _vertex, regularPoints):
"""
:param:_quad: Quad
:param:_edge: Edge
:param:_vertex: Vertex
:param:regularPoints: numpy.ndarray
:return: A, B1, B2, C
:rtype: numpy.ndarray([int, int, int, int])
"""
assert isinstance(regularPoints, np.ndarray)
assert isinstance(_quad, Quad)
assert isinstance(_edge, Edge)
assert isinstance(_vertex, Vertex)
vertex_inquad_index = _quad.get_vertices().index(_vertex)
neighbour_vertex = _edge.get_other_vertex(_vertex)
other_inquad_index = _quad.get_vertices().index(neighbour_vertex)
return getABsC_ind(_quad.get_id(), vertex_inquad_index, other_inquad_index, regularPoints)
def dualCont_to_ABC_simpl(quad_objs, vert_objs):
num_quads = quad_objs.__len__()
num_verts = len(vert_objs)
points_per_quad = 16
As = np.full((num_verts,7,2),-1,dtype=int)
B1s = np.full((num_verts,7,4),-1,dtype=int)
B2s = np.full((num_verts,7,4),-1,dtype=int)
Cs = np.full((num_verts,7,2),-1,dtype=int)
regularPoints = np.arange(num_quads*points_per_quad, dtype=int).reshape((num_quads, points_per_quad))
for vertex in vert_objs:
vert_id = int(vertex.get_id())
# print "Vert. id: %d, Vert. number: %d" % (vertex.get_id(), vert_id)
number_of_quads = vertex.number_quads()
assert number_of_quads > 2, "Found course mesh vertex %d with 2 or less quads, probably manifold" % vert_id
one_edge = next(iter(verte | x.get_edges()))
one_quad = next(iter(one_edge.get_qua | ds()))
vertex_inquad_index = one_quad.get_vertices().index(vertex)
for quadIndex in range(number_of_quads):
# save the vertex ids on the edges closest to B1 and B2
B1s[vert_id, quadIndex, 3] = B2s[vert_id, quadIndex, 3] = vert_id
#first save B2 edge, then switch, then save B1 edge
B2s[vert_id, quadIndex, 2] = one_edge.get_other_vertex(vertex).get_id()
one_edge = one_quad.get_other_edge_sharing_vertex(one_edge, vertex)
B1s[vert_id, quadIndex, 2] = one_edge.get_other_vertex(vertex).get_id()
#get the ABC IDs of the vertex points
As[vert_id,quadIndex,0],\
B1s[vert_id,quadIndex,0],\
B2s[vert_id,quadIndex,0],\
Cs[vert_id,quadIndex,0] = getABsC(one_quad,one_edge, vertex, regularPoints)
#save the quad ID
As[vert_id,quadIndex,1] =\
B1s[vert_id,quadIndex,1] =\
B2s[vert_id,quadIndex,1] =\
Cs[vert_id,quadIndex,1] = int(one_quad.get_id())
#shift to next quad
one_quad = one_quad.get_neighbour_sharing_edge(one_edge)
return As, B1s, B2s, Cs, regularPoints
|
# -*- coding: utf-8 -*-
###############################################################################
#
# ListZones
# Retrieves the list of Zone resources for the specified project.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ListZones(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ListZones Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(ListZones, self).__init__(temboo_session, '/Library/Google/ComputeEngine/Zones/ListZones')
def new_input_set(self):
return ListZonesInputSet()
def _make_result_set(self, result, path):
return ListZonesResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ListZonesChoreographyExecution(session, exec_id, path)
class | ListZonesInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ListZones
Choreo. Th | e InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((optional, string) A valid access token retrieved during the OAuth process. This is required unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new access token.)
"""
super(ListZonesInputSet, self)._set_input('AccessToken', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required unless providing a valid AccessToken.)
"""
super(ListZonesInputSet, self)._set_input('ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for this Choreo. ((conditional, string) The Client Secret provided by Google. Required unless providing a valid AccessToken.)
"""
super(ListZonesInputSet, self)._set_input('ClientSecret', value)
def set_Fields(self, value):
"""
Set the value of the Fields input for this Choreo. ((optional, string) Comma-seperated list of fields you want to include in the response.)
"""
super(ListZonesInputSet, self)._set_input('Fields', value)
def set_Filter(self, value):
"""
Set the value of the Filter input for this Choreo. ((optional, string) A filter expression for narrowing results in the form: {field_name} {comparison_string} {literal_string} (e.g. name eq europe-west1-a). Comparison strings can be eq (equals) or ne (not equals).)
"""
super(ListZonesInputSet, self)._set_input('Filter', value)
def set_MaxResults(self, value):
"""
Set the value of the MaxResults input for this Choreo. ((optional, integer) The maximum number of results to return.)
"""
super(ListZonesInputSet, self)._set_input('MaxResults', value)
def set_PageToken(self, value):
"""
Set the value of the PageToken input for this Choreo. ((optional, string) The "nextPageToken" found in the response which is used to page through results.)
"""
super(ListZonesInputSet, self)._set_input('PageToken', value)
def set_Project(self, value):
"""
Set the value of the Project input for this Choreo. ((required, string) The ID of a Google Compute project.)
"""
super(ListZonesInputSet, self)._set_input('Project', value)
def set_RefreshToken(self, value):
"""
Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth refresh token used to generate a new access token when the original token is expired. Required unless providing a valid AccessToken.)
"""
super(ListZonesInputSet, self)._set_input('RefreshToken', value)
class ListZonesResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ListZones Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Google.)
"""
return self._output.get('Response', None)
def get_NewAccessToken(self):
"""
Retrieve the value for the "NewAccessToken" output from this Choreo execution. ((string) Contains a new AccessToken when the RefreshToken is provided.)
"""
return self._output.get('NewAccessToken', None)
class ListZonesChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ListZonesResultSet(response, path)
|
import math
import datetime
#from subgen.subtitle import Subtitle
#def time_parse(s):
#h | our, minute, second_decimal = [t for t in s.split(':')]
#second, microsec | = second_decimal.split('.')
#microsec = microsec.ljust(6, '0')
#return [int(hour), int(minute), int(second), int(microsec)]
def subtitle_parser(sub_file):
res_start = []
res_end = []
res_offset = 0
with open(sub_file, 'r') as f:
inEvents = False
ss = f.readlines()
for s in ss:
if inEvents == True:
sl = s.strip().split(',')
if sl[1] != ' Start':
#rs = time_parse(sl[1])
#re = time_parse(sl[2])
rs = sl[1]
re = sl[2]
res_start.append(rs)
res_end.append(re)
else:
res_offset += len(s)
if s.strip() == "[Events]":
inEvents = True
res_offset += len(s)
return res_start, res_end, res_offset
#if __name__ == "__main__":
#import sys
#import os
#if os.path.exists(sys.argv[1]) == True:
#subtitle_parser(sys.argv[1])
#else:
#raise FileNotFoundError(sys.argv[0] + ": " + sys.argv[1] + "not found.")
|
nsed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cPickle as | pickle
import os
import sys
import unittest
from gzip import GzipFile
from shutil import rmtree
from tempfile import mkdtemp
from eventlet import spawn, TimeoutError, listen
from eventlet.timeout import Timeout
from swift.common import utils
from swift.container import updater as container_updater
from swift.container import server as container_server |
from swift.common.db import ContainerBroker
from swift.common.ring import RingData
from swift.common.utils import normalize_timestamp
class TestContainerUpdater(unittest.TestCase):
def setUp(self):
utils.HASH_PATH_SUFFIX = 'endcap'
self.testdir = os.path.join(mkdtemp(), 'tmp_test_container_updater')
rmtree(self.testdir, ignore_errors=1)
os.mkdir(self.testdir)
pickle.dump(RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
[{'id': 0, 'ip': '127.0.0.1', 'port': 12345, 'device': 'sda1',
'zone': 0},
{'id': 1, 'ip': '127.0.0.1', 'port': 12345, 'device': 'sda1',
'zone': 2}], 30),
GzipFile(os.path.join(self.testdir, 'account.ring.gz'), 'wb'))
self.devices_dir = os.path.join(self.testdir, 'devices')
os.mkdir(self.devices_dir)
self.sda1 = os.path.join(self.devices_dir, 'sda1')
os.mkdir(self.sda1)
def tearDown(self):
rmtree(os.path.dirname(self.testdir), ignore_errors=1)
def test_creation(self):
cu = container_updater.ContainerUpdater({
'devices': self.devices_dir,
'mount_check': 'false',
'swift_dir': self.testdir,
'interval': '1',
'concurrency': '2',
'node_timeout': '5',
})
self.assert_(hasattr(cu, 'logger'))
self.assert_(cu.logger is not None)
self.assertEquals(cu.devices, self.devices_dir)
self.assertEquals(cu.interval, 1)
self.assertEquals(cu.concurrency, 2)
self.assertEquals(cu.node_timeout, 5)
self.assert_(cu.get_account_ring() is not None)
def test_run_once(self):
cu = container_updater.ContainerUpdater({
'devices': self.devices_dir,
'mount_check': 'false',
'swift_dir': self.testdir,
'interval': '1',
'concurrency': '1',
'node_timeout': '15',
'account_suppression_time': 0
})
cu.run_once()
containers_dir = os.path.join(self.sda1, container_server.DATADIR)
os.mkdir(containers_dir)
cu.run_once()
self.assert_(os.path.exists(containers_dir))
subdir = os.path.join(containers_dir, 'subdir')
os.mkdir(subdir)
cb = ContainerBroker(os.path.join(subdir, 'hash.db'), account='a',
container='c')
cb.initialize(normalize_timestamp(1))
cu.run_once()
info = cb.get_info()
self.assertEquals(info['object_count'], 0)
self.assertEquals(info['bytes_used'], 0)
self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0)
cb.put_object('o', normalize_timestamp(2), 3, 'text/plain',
'68b329da9893e34099c7d8ad5cb9c940')
cu.run_once()
info = cb.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 3)
self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0)
def accept(sock, addr, return_code):
try:
with Timeout(3):
inc = sock.makefile('rb')
out = sock.makefile('wb')
out.write('HTTP/1.1 %d OK\r\nContent-Length: 0\r\n\r\n' %
return_code)
out.flush()
self.assertEquals(inc.readline(),
'PUT /sda1/0/a/c HTTP/1.1\r\n')
headers = {}
line = inc.readline()
while line and line != '\r\n':
headers[line.split(':')[0].lower()] = \
line.split(':')[1].strip()
line = inc.readline()
self.assert_('x-put-timestamp' in headers)
self.assert_('x-delete-timestamp' in headers)
self.assert_('x-object-count' in headers)
self.assert_('x-bytes-used' in headers)
except BaseException, err:
import traceback
traceback.print_exc()
return err
return None
bindsock = listen(('127.0.0.1', 0))
def spawn_accepts():
events = []
for _junk in xrange(2):
sock, addr = bindsock.accept()
events.append(spawn(accept, sock, addr, 201))
return events
spawned = spawn(spawn_accepts)
for dev in cu.get_account_ring().devs:
if dev is not None:
dev['port'] = bindsock.getsockname()[1]
cu.run_once()
for event in spawned.wait():
err = event.wait()
if err:
raise err
info = cb.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 3)
self.assertEquals(info['reported_object_count'], 1)
self.assertEquals(info['reported_bytes_used'], 3)
def test_unicode(self):
cu = container_updater.ContainerUpdater({
'devices': self.devices_dir,
'mount_check': 'false',
'swift_dir': self.testdir,
'interval': '1',
'concurrency': '1',
'node_timeout': '15',
})
containers_dir = os.path.join(self.sda1, container_server.DATADIR)
os.mkdir(containers_dir)
subdir = os.path.join(containers_dir, 'subdir')
os.mkdir(subdir)
cb = ContainerBroker(os.path.join(subdir, 'hash.db'), account='a',
container='\xce\xa9')
cb.initialize(normalize_timestamp(1))
cb.put_object('\xce\xa9', normalize_timestamp(2), 3, 'text/plain',
'68b329da9893e34099c7d8ad5cb9c940')
def accept(sock, addr):
try:
with Timeout(3):
inc = sock.makefile('rb')
out = sock.makefile('wb')
out.write('HTTP/1.1 201 OK\r\nContent-Length: 0\r\n\r\n')
out.flush()
inc.read()
except BaseException, err:
import traceback
traceback.print_exc()
return err
return None
bindsock = listen(('127.0.0.1', 0))
def spawn_accepts():
events = []
for _junk in xrange(2):
with Timeout(3):
sock, addr = bindsock.accept()
events.append(spawn(accept, sock, addr))
return events
spawned = spawn(spawn_accepts)
for dev in cu.get_account_ring().devs:
if dev is not None:
dev['port'] = bindsock.getsockname()[1]
cu.run_once()
for event in spawned.wait():
err = event.wait()
if err:
raise err
info = cb.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 3)
self.assertEquals(info['reported_object_count'], 1)
self.assertEquals(info['reported_bytes_used'], 3)
if __name__ == |
AUTHENTICATION_REQUIRED_401 = 401
NOT_AUTHORIZED_ | 403 = 403
INVALID_REQUEST_400 = 400
REQUEST_ENTITY_TOO_LARGE_413 = 413
NOT_FOUND_404 = 404
INTERNAL_SERVER_ERROR_500 = 500
RESOURCE_CONFLICT_409 = 409
PAYMENT_REQUIRED_402 = 402
HEADER_PRECONDITIONS_FAILED = 412
LOCKED_423 = 423
TOO_MANY_REQUESTS_429 = 429
class JSONAPIException(Exception):
status_code = IN | TERNAL_SERVER_ERROR_500
error_title = "Internal Error."
error_type_ = None
error_description = None
@property
def error_type(self):
return self.error_type_ or type(self).__name__
def __init__(self, status_code=None, error_type=None, error_title=None,
error_description=None):
self.status_code = status_code or self.status_code
self.error_title = error_title or self.error_title
self.error_description = error_description or self.error_description or self.error_title
self.error_type_ = error_type
class ParameterMissing(JSONAPIException):
status_code = INVALID_REQUEST_400
parameter_name = None
def __init__(self, parameter_name=None):
self.error_title = \
"Parameter '{}' is missing.".format(parameter_name or self.parameter_name)
class ParameterInvalid(JSONAPIException):
status_code = INVALID_REQUEST_400
def __init__(self, parameter_name, parameter_value):
self.error_title = \
"Invalid value for parameter '{}'.".format(parameter_name)
self.error_description = \
"Invalid parameter for '{0}': {1}.".format(parameter_name, parameter_value)
class BadPageCountParameter(ParameterInvalid):
def __init__(self, parameter_value):
super().__init__(parameter_name='page[count]',
parameter_value=parameter_value)
self.error_description = "Page sizes must be integers."
class BadPageCursorParameter(ParameterInvalid):
def __init__(self, parameter_value):
super().__init__(parameter_name='page[cursor]',
parameter_value=parameter_value)
self.error_description = "Provided cursor was not parsable."
class BadPageOffsetParameter(ParameterInvalid):
def __init__(self, parameter_value):
super().__init__(parameter_name='page[offset]',
parameter_value=parameter_value)
self.error_description = "Page offsets must be integers."
class DataMissing(ParameterMissing):
parameter_name = 'data'
|
from io import BytesIO
import json
import os
import urllib.parse
import six
import sys
from ftptool import FTPHost
import xlsxwriter
# from https://docs.djangoproject.com/en/1.10/_modules/django/utils/encoding/
def smart_text(s, encoding="utf-8", strings_only=False, errors="strict"):
"""
Returns a text object representing 's' -- unicode on Python 2 and str on
Python 3. Treats bytestrings using the 'encoding' codec.
If strings_only is True, don't convert (some) non-string-like objects.
"""
return force_text(s, encoding, strings_only, errors)
# from https://docs.djangoproject.com/en/1.10/_modules/django/utils/encoding/
def force_text(s, encoding="utf-8", strings_only=False, errors="strict"):
"""
Similar to smart_text, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first for performance reasons.
if issubclass(type(s), six.text_type):
return s
try:
if not issubclass(type(s), six.string_types):
if six.PY3:
if isinstance(s, bytes):
s = six.text_type(s, encoding, errors)
else:
s = six.text_type(s)
elif hasattr(s, "__unicode__"):
s = six.text_type(s)
else:
s = six.text_type(bytes(s), encoding, errors)
else:
# Note: We use .decode() here, instead of six.text_type(s, encoding,
# errors), so that if s is a SafeBytes, it ends up being a
# SafeText at the end.
s = s.decode(encoding, errors)
except UnicodeDecodeError as e:
# If we get to here, the caller has passed in an Exception
# subclass populated with non-ASCII bytestring data without a
# working unicode method. Try to handle this without raising a
| # further exception by individually forcing the exception args
# to unicod | e.
s = " ".join(force_text(arg, encoding, strings_only, errors) for arg in s)
return s
def _get_ftp_data(data):
outputs = []
conn = FTPHost.connect(
data["address"], user=data["user"], password=data["password"]
)
for (dirname, subdirs, files) in conn.walk(data.get("path", "/")):
outputs.append((dirname, files))
conn.try_quit()
return outputs
def _get_root_url(data):
return f'ftp://{data["user"]}:{data["password"]}@{data["address"]}'
def _populate_workbook(wb, root_url, data):
ws = wb.add_worksheet()
# write header rows
ws.write(0, 0, "Folder")
ws.write(0, 1, "Filename")
ws.write(0, 2, "URL")
parser = urllib.parse.quote
# write data rows
row = 0
for path, files in data:
for fn in files:
row += 1
path_url = parser(
os.path.join(path.decode("utf8"), fn.decode("utf8")).encode("utf8")
)
url = root_url + path_url
ws.write(row, 0, smart_text(path))
ws.write(row, 1, smart_text(fn))
ws.write(row, 2, smart_text(url))
# setup header and autofilter
bold = wb.add_format({"bold": True})
ws.set_row(0, None, bold)
ws.autofilter(f"A1:C{row + 1}")
# set widths
ws.set_column("A:A", 30)
ws.set_column("B:B", 65)
ws.set_column("C:C", 100)
def _generate_xlsx(data):
# create workbook
output = BytesIO()
wb = xlsxwriter.Workbook(output, {"constant_memory": True})
# add stuff to workbook
ftp_data = _get_ftp_data(data)
root_url = _get_root_url(data)
_populate_workbook(wb, root_url, ftp_data)
# return base64 encoded workbook
wb.close()
output.seek(0)
return output.read().encode("base64")
if __name__ == "__main__":
for data in sys.stdin:
b64 = _generate_xlsx(json.loads(data))
print(json.dumps({"xlsx": b64}))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.