code
stringlengths 1
199k
|
|---|
import mock
from buildbot.changes import base
from buildbot.test.util import changesource
from buildbot.test.util import compat
from twisted.internet import defer
from twisted.internet import reactor
from twisted.internet import task
from twisted.trial import unittest
class TestChangeSource(changesource.ChangeSourceMixin, unittest.TestCase):
class Subclass(base.ChangeSource):
pass
@defer.inlineCallbacks
def setUp(self):
yield self.setUpChangeSource()
self.attachChangeSource(self.Subclass(name="DummyCS"))
def tearDown(self):
return self.tearDownChangeSource()
@defer.inlineCallbacks
def test_activation(self):
cs = self.Subclass(name="DummyCS")
cs.activate = mock.Mock(return_value=defer.succeed(None))
cs.deactivate = mock.Mock(return_value=defer.succeed(None))
# set the changesourceid, and claim the changesource on another master
self.attachChangeSource(cs)
self.setChangeSourceToMaster(self.OTHER_MASTER_ID)
cs.startService()
cs.clock.advance(cs.POLL_INTERVAL_SEC / 2)
cs.clock.advance(cs.POLL_INTERVAL_SEC / 5)
cs.clock.advance(cs.POLL_INTERVAL_SEC / 5)
self.assertFalse(cs.activate.called)
self.assertFalse(cs.deactivate.called)
self.assertFalse(cs.active)
self.assertEqual(cs.serviceid, self.DUMMY_CHANGESOURCE_ID)
# clear that masterid
self.setChangeSourceToMaster(None)
cs.clock.advance(cs.POLL_INTERVAL_SEC)
self.assertTrue(cs.activate.called)
self.assertFalse(cs.deactivate.called)
self.assertTrue(cs.active)
# stop the service and see that deactivate is called
yield cs.stopService()
self.assertTrue(cs.activate.called)
self.assertTrue(cs.deactivate.called)
self.assertFalse(cs.active)
class TestPollingChangeSource(changesource.ChangeSourceMixin, unittest.TestCase):
class Subclass(base.PollingChangeSource):
pass
def setUp(self):
# patch in a Clock so we can manipulate the reactor's time
self.clock = task.Clock()
self.patch(reactor, 'callLater', self.clock.callLater)
self.patch(reactor, 'seconds', self.clock.seconds)
d = self.setUpChangeSource()
def create_changesource(_):
self.attachChangeSource(self.Subclass(name="DummyCS"))
d.addCallback(create_changesource)
return d
def tearDown(self):
return self.tearDownChangeSource()
def runClockFor(self, _, secs):
self.clock.pump([1.0] * secs)
def test_loop_loops(self):
# track when poll() gets called
loops = []
self.changesource.poll = \
lambda: loops.append(self.clock.seconds())
self.changesource.pollInterval = 5
self.startChangeSource()
d = defer.Deferred()
d.addCallback(self.runClockFor, 12)
def check(_):
# note that it does *not* poll at time 0
self.assertEqual(loops, [5.0, 10.0])
d.addCallback(check)
reactor.callWhenRunning(d.callback, None)
return d
@compat.usesFlushLoggedErrors
def test_loop_exception(self):
# track when poll() gets called
loops = []
def poll():
loops.append(self.clock.seconds())
raise RuntimeError("oh noes")
self.changesource.poll = poll
self.changesource.pollInterval = 5
self.startChangeSource()
d = defer.Deferred()
d.addCallback(self.runClockFor, 12)
def check(_):
# note that it keeps looping after error
self.assertEqual(loops, [5.0, 10.0])
self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 2)
d.addCallback(check)
reactor.callWhenRunning(d.callback, None)
return d
def test_poll_only_if_activated(self):
"""The polling logic only applies if the source actually starts!"""
self.setChangeSourceToMaster(self.OTHER_MASTER_ID)
loops = []
self.changesource.poll = \
lambda: loops.append(self.clock.seconds())
self.changesource.pollInterval = 5
self.startChangeSource()
d = defer.Deferred()
d.addCallback(self.runClockFor, 12)
@d.addCallback
def check(_):
# it doesnt do anything because it was already claimed
self.assertEqual(loops, [])
reactor.callWhenRunning(d.callback, None)
return d
def test_pollAtLaunch(self):
# track when poll() gets called
loops = []
self.changesource.poll = \
lambda: loops.append(self.clock.seconds())
self.changesource.pollInterval = 5
self.changesource.pollAtLaunch = True
self.startChangeSource()
d = defer.Deferred()
d.addCallback(self.runClockFor, 12)
def check(_):
# note that it *does* poll at time 0
self.assertEqual(loops, [0.0, 5.0, 10.0])
d.addCallback(check)
reactor.callWhenRunning(d.callback, None)
return d
|
import unittest
"""684. Redundant Connection
https://leetcode.com/problems/redundant-connection/description/
In this problem, a tree is an **undirected** graph that is connected and has
no cycles.
The given input is a graph that started as a tree with N nodes (with distinct
values 1, 2, ..., N), with one additional edge added. The added edge has two
different vertices chosen from 1 to N, and was not an edge that already
existed.
The resulting graph is given as a 2D-array of `edges`. Each element of `edges`
is a pair `[u, v]` with `u < v`, that represents an **undirected** edge
connecting nodes `u` and `v`.
Return an edge that can be removed so that the resulting graph is a tree of N
nodes. If there are multiple answers, return the answer that occurs last in
the given 2D-array. The answer edge `[u, v]` should be in the same format,
with `u < v`.
**Example 1:**
**Input:** [[1,2], [1,3], [2,3]]
**Output:** [2,3]
**Explanation:** The given undirected graph will be like this:
1
/ \
2 - 3
**Example 2:**
**Input:** [[1,2], [2,3], [3,4], [1,4], [1,5]]
**Output:** [1,4]
**Explanation:** The given undirected graph will be like this:
5 - 1 - 2
| |
4 - 3
**Note:**
* The size of the input 2D-array will be between 3 and 1000.
* Every integer represented in the 2D-array will be between 1 and N, where N is the size of the input array.
**Update (2017-09-26):**
We have overhauled the problem description + test cases and specified clearly
the graph is an **_undirected_** graph. For the **_directed_** graph follow up
please see **[Redundant Connection II](https://leetcode.com/problems
/redundant-connection-ii/description/)** ). We apologize for any inconvenience
caused.
Similar Questions:
Redundant Connection II (redundant-connection-ii)
Accounts Merge (accounts-merge)
"""
class Solution(object):
def findRedundantConnection(self, edges):
"""
:type edges: List[List[int]]
:rtype: List[int]
"""
def test(self):
pass
if __name__ == "__main__":
unittest.main()
|
import json
from flask import Blueprint, jsonify, request, g
from flask_autodoc import Autodoc
item_api = Blueprint('itemApi', __name__)
auto = Autodoc()
def get_item_as_object(item):
return {
"_id": str(item.mongo_id),
"name": item.name,
"description": item.description,
"imageURL": item.imageURL,
"price": item.price,
"calories": item.calories,
"category": item.category,
"tags": item.tags
}
@item_api.route('/item/spec', strict_slashes=False)
def login_doc():
"""
Documentation for the /item route
:return:
"""
return auto.html()
@item_api.route('/item', strict_slashes=False, methods=['GET'])
@auto.doc()
def get_all_items() -> dict:
"""
returns all the items as a json array
:return:
"""
from keanu.models.items import Item
# get all items
items = Item.query.all()
# create items list
items_list = []
# create response
for item in items:
items_list.append(get_item_as_object(item))
return jsonify({'data': {'items': items_list}})
@item_api.route('/item/id/<item_id>', strict_slashes=False, methods=['GET'])
@auto.doc()
def get_item_by_id(item_id) -> tuple:
from keanu.models.items import Item
# find specific item
item = Item.query.filter(Item.mongo_id == item_id).first()
item_json = get_item_as_object(item)
return jsonify({'data': {'item': item_json}})
@item_api.route('/item/category/<category>', strict_slashes=False, methods=['GET'])
@auto.doc()
def get_item_by_category(category) -> tuple:
from keanu.models.items import Item
# find items by category
items = Item.query.filter(Item.category == category)
# create items list
items_list = []
# create response
for item in items:
items_list.append(get_item_as_object(item))
return jsonify({'data': {'items': items_list}})
@item_api.route('/item/category/<category>/count', strict_slashes=False, methods=['GET'])
def get_category_count(category) -> tuple:
"""
Returns the number items in that category
:param category:
:return:
"""
json_response = get_item_by_category(category)
return jsonify({'data': {'count': len(json.loads(json_response.data)['data']['items'])}})
@item_api.route('/item/search', strict_slashes=False, methods=['GET'])
@auto.doc()
def search_item() -> tuple:
"""
Searches items if query less that 3
it only searches the name else it will
search the names and tags
:return:
"""
from keanu.models.items import Item
items_list = []
query: str = request.args['q']
if not len(query) > 0:
return jsonify({'error': 'no search results provided'})
query = query.title()
items = Item.query.filter(Item.name.startswith(query.lower())).all()
if len(query) > 3:
items = items + Item.query.filter(Item.tags.startswith(query.lower())).all()
unique_ids = []
for item in items:
if str(item.mongo_id) not in unique_ids:
items_list.append({
"_id": str(item.mongo_id),
"name": item.name,
"description": item.description,
"imageURL": item.imageURL,
"price": item.price,
"calories": item.calories,
"category": item.category,
"tags": item.tags
})
unique_ids.append(str(item.mongo_id))
return jsonify({'data': {'items': items_list}})
@item_api.route('/admin/item/add', strict_slashes=False, methods=['POST'])
@auto.doc()
def add_new_item() -> tuple:
from keanu.models.items import Item
if request.json is not None and g.is_admin:
new_item = Item(
name=request.json['name'],
description=request.json['description'],
imageURL=request.json['imageURL'],
price=request.json['price'],
calories=request.json['calories'],
category=request.json['category'],
tags=request.json['tags']
)
new_item.save()
return jsonify({'data': {'item': request.json, 'itemId': str(new_item.mongo_id)}})
else:
return jsonify({'error': 'invalid item' + request.json}), 403
@item_api.route('/admin/item/delete/<item_id>', strict_slashes=False, methods=['POST'])
@auto.doc()
def delete_item(item_id):
from keanu.models.items import Item
# search for item by id
item = Item.query.get(str(item_id))
if item is not None and g.is_admin:
# remove item
item.remove()
return jsonify({'data': {'success': True}})
else:
return jsonify({'error': 'No item found with id ' + item_id})
@item_api.route('/admin/item/update', strict_slashes=False, methods=['POST'])
@auto.doc()
def update_item():
from keanu.models.items import Item
if request.json is not None:
item_update = Item.query.get(request.json['_id'])
item_update.calories = request.json['calories']
item_update.category = request.json['category']
item_update.description = request.json['description']
item_update.imageURL = request.json['imageURL']
item_update.name = request.json['name']
item_update.price = request.json['price']
item_update.tags = request.json['tags']
item_update.save()
return jsonify({'data': {'message': 'Updated with item id: ' + str(item_update.mongo_id),
'mongo_id': str(item_update.mongo_id)}
})
else:
return jsonify({'error': 'item not updated'})
|
from tracks.core import MultiTracksReader, dump_track, MultiTracksWriter
import numpy
__all__ = [
"pca_levels", "CovarianceMatrix", "CovarianceBlocks",
"cov_overlap", "cov_overlap_multi",
"pca_common_usage", "pca_common_script",
]
def pca_levels(mtr, num_levels, weights=None, correlation=False, reference=None):
"""Perform a full principal component analysis at different block levels
Argumets:
mtr -- A MultiTracksReader that iters of the input at each time step.
num_levels -- The number of levels of block sizes to perform the pca
on. The first level is all data in one block, the second
level has the data divide in two equal blocks, then
four equal blocks, then eight equal blocks and so on.
Optional arguments:
weights -- When given, the principal components are computed in
weighted coordinates.
correlation -- When True, the analysis is performed on the matrix
with correlation coefficients. This might be usefull
to compare the eigenvalue spectrum with the Wishart
distribution.
reference -- When given, the reference is assumed to be the vector
with the averages of the inputs. Otherwise, the averages
are derived from the input.
Returns:
cm -- A covariance matrix object for the entire trajectory. (if the
number of frames is uneven, the last frame is dropped.)
overlap -- The overlap between the covariance matrix of the first
and the last half of the trajectory. (see cov_overlap)
"""
if weights is not None and correlation:
raise ValueError("Weighted coordinates have no effect when computing the correlation matrix.")
block_size = mtr.shortest/(2**(num_levels-1))
cb_max = CovarianceBlocks(block_size, weights, correlation, reference)
for data in mtr.iter_buffers():
data = data["data"]
cb_max.add_data(data)
cbs = [cb_max]
for i in xrange(num_levels-1):
cbs.insert(0, cbs[0].reduce_blocks())
return cbs[0].blocks[0], cov_overlap(cbs[1].blocks[0].cov, cbs[1].blocks[1].cov), cbs
class CovarianceMatrix(object):
"""A container for all information related to a covariance matrix."""
def __init__(self, length, matrix, sum, weights=None, correlation=False, reference=None):
"""Initialize a covariance matrix object
The arguments to initialize a CovarianceMatrix instance are built up
by processing a trajectory. Look at CovarianceBlocks for an example.
They are all stored as attributes. Also some direved properties are
computed during the initialization. In a second stage, one can
reprocess that data with init_proj, data_proj and finish_proj to
compute the principal components and the cosine content.
Arguments:
length -- the length of (a part of) the trajectory used to
construct the matrix
matrix -- the matrix built up by adding
numpy.data(data.transpose(),data) for multiple data
arrays belonging to one block
sum -- the sum of the data over time
Optional arguments:
weights -- When given, the principal components are computed in
weighted coordinates.
correlation -- When True, the analysis is performed on the matrix
with correlation coefficients. This might be usefull
to compare the eigenvalue spectrum with the Wishart
distribution.
reference -- When given, the reference is assumed to be the vector
with the averages of the inputs. Otherwise, the averages
are derived from the input.
Derive attributes:
cov -- The actual covariance/correlation matrix
mean -- The average of the inputs over time
evals -- The eigenvalues of the covariance matrix
evecs -- The corresponding eigenvectors
sigmas -- The square roots of the eigenvalues.
Attributes available after projection:
sqnorms_data -- squared norms of the principal components
sqnorms_cos -- squared norms of the cosines
dot_data_cos -- inner product between principal component and
cosine
ccs -- the cosine contents of each principal component
"""
# the raw data
self.length = length
self.matrix = matrix
self.sum = sum
self.weights = weights
self.correlation = correlation
self.reference = reference
# the derived properties
self.cov = self.matrix / self.length # the actual covariance matrix
if self.reference is None:
self.mean = self.sum / self.length
else:
self.mean = self.reference
self.cov -= numpy.outer(self.mean, self.mean)
if self.correlation:
diag_sqrt = numpy.sqrt(numpy.diag(self.cov))
self.cov /= numpy.outer(diag_sqrt, diag_sqrt)
elif self.weights is not None:
scale = numpy.sqrt(self.weights)
self.cov *= numpy.outer(scale, scale)
# the eigen decomposition
self.evals, self.evecs = numpy.linalg.eigh(self.cov)
# largest eigenvalues first
self.evals = self.evals[::-1]
self.evecs = self.evecs[:,::-1]
self.sigmas = numpy.sqrt(abs(self.evals))
def init_proj(self, output_prefix=None):
"""Setup the projection of the trajectory on the pca eigenmodes.
When output prefix is given, the principal components are written
to tracks with the following filenames: ${output_prefix}.${index}.
After init_proj, call data_proj one or more times with the relevant
data segments from the trajectory. Finally, call finish_proj.
"""
N = len(self.evals)
if output_prefix is not None:
paths_out = [
"%s.pc.%07i" % (output_prefix, index)
for index in xrange(N)
]
dtype = numpy.dtype([("data", float, N)])
self.proj_mtw = MultiTracksWriter(paths_out, dtype)
else:
self.proj_mtw = None
self.sqnorms_data = numpy.zeros(N, float)
self.sqnorms_cos = numpy.zeros(N, float)
self.dot_data_cos = numpy.zeros(N, float)
self.proj_counter = 0
def data_proj(self, data):
"""Process data to compute the principal components and the cosine content
First call init_proj, then call this routine multiple times. Finally
call finish_proj.
"""
data = data - self.mean
if self.correlation:
data /= numpy.sqrt(numpy.diag(self.cov))
elif self.weights is not None:
data *= self.weights
pcs = numpy.dot(data, self.evecs)
if self.proj_mtw is not None:
self.proj_mtw.dump_buffer({"data": pcs})
t = numpy.arange(self.proj_counter, self.proj_counter+len(data))*(numpy.pi/self.length)
for i in xrange(data.shape[1]): # iterate ove the columns
c = numpy.cos((1+i)*t)
self.sqnorms_data[i] += numpy.dot(pcs[:,i], pcs[:,i])
self.sqnorms_cos[i] += numpy.dot(c, c)
self.dot_data_cos[i] += numpy.dot(pcs[:,i], c)
self.proj_counter += len(data)
def finish_proj(self):
"""Compute the actual cosine contents.
Call finish_proj after the last call to data_proj.
"""
self.ccs = self.dot_data_cos**2/(self.sqnorms_data*self.sqnorms_cos+1e-10)
if self.proj_mtw is not None:
self.proj_mtw.finish()
del self.proj_mtw
del self.proj_counter
class CovarianceBlocks(object):
"""A tool to compute covariance matrices efficiently at different block sizes.
Start with an instance with small blocks sizes, preferentially using the
following formula:
blocks_size = total_size / (2**(levels-1))
Where levels is the number of different block_sizes one wants to
consider. Then call the process method with data arrays. (Rows correspond
to different time steps, columns are different coordinates/fields/...)
A list of covariance matrix objects, self.blocks, is generated gradually
when more data is provided with the add_data method.
The reduce method can be used to obtain similar information for large
block sizes. It takes one argument, num, which is the number of blocks
will be put toghether to form a new block. A new CovarianceBlocks
instance is returned as if it was constructed with a larger block_size,
equal to the orignal block_size times num. When num is larger than the
number of available blocks, an error is raised.
Finally one can reprocess all the data with the method project_data to
project the trajectories on the eigen modes as to compute the principal
components and the cosine content associated with each mode.
"""
def __init__(self, block_size, weights=None, correlation=False, reference=None):
"""Intialize a CovarianceBlocks object.
Arguments:
block_size -- The sizes of the data blocks for which covariance
matrices are constructed.
weights -- The weights associated with the coordinates. When not
given, all weights are equal to one.
correlation -- Produce matrices with correlation coefficients
intead of covariance matrices. (default=False)
reference -- A predefined mean for the input signals. If not
given the mean is derived from the input data.
"""
if weights is not None and correlation:
raise ValueError("Weighted coordinates have no effect when computing the correlation matrix.")
if block_size <= 0:
raise ValueError("The second argument, block_size, must be strictly positive.")
self.block_size = block_size
self.weights = weights
self.correlation = correlation
self.reference = reference
self.blocks = []
self._init_matrix()
self._init_proj()
def _init_matrix(self):
"""Private methode used by add_data"""
self._current_length = 0
self._current_matrix = 0.0
if self.reference is None:
self._current_sum = 0.0
else:
self._current_sum = None
def _add_matrix(self, data):
"""Private methode used by add_data"""
self._current_length += len(data)
self._current_matrix += numpy.dot(data.transpose(), data)
if self.reference is None:
self._current_sum += data.sum(axis=0)
def _finish_matrix(self):
"""Private methode used by add_data"""
self.blocks.append(CovarianceMatrix(
self._current_length, self._current_matrix, self._current_sum,
self.weights, self.correlation, self.reference
))
self._init_matrix()
def add_data(self, data):
"""Provide new data to compute covariance matrices.
Argument:
data -- a new array with input data: rows correspond to time
frames and columns to coordinates/fields/...
Make sure the number of columns is always the same for one
CovarianceBlocks instance.
"""
pos = 0
while pos < len(data):
remaining = len(data) - pos
todo = self.block_size - self._current_length
if remaining >= todo:
self._add_matrix(data[pos:pos+todo])
pos += todo
self._finish_matrix()
else: # todo > remaining
self._add_matrix(data[pos:])
break
def reduce_blocks(self, num=2):
"""Create a new CovarianceBlocks object based on fewer, but larger blocks.
Argument:
num -- The reduction factor for the number of blocks.
The return CovarianceBlocks object acts as if it was constructed with
a block size equal to the original block size times the num argument.
However it is computationally much cheaper to call this method.
"""
if num > len(self.blocks):
raise ValueError("Not enough blocks: num=%i > len(self.covs)=%i" % (num, len(self.blocks)))
result = CovarianceBlocks(self.block_size*num, self.weights, self.correlation, self.reference)
for i in xrange(len(self.blocks)/num):
for j in xrange(num):
result._current_matrix += self.blocks[i*num+j].matrix
if self.reference is None:
result._current_sum += self.blocks[i*num+j].sum
result._current_length += self.blocks[i*num+j].length
result._finish_matrix()
return result
def _init_proj(self):
self.proj_counter = -1
self.proj_done = 0
def project_data(self, data, output_prefix=None):
"""Process the data in a second run to compute the principal components
and the cosine content.
The cosine contents are stored as a ccs attribute of each covariance
matrix object. The cosine content is a measure for random walk motion
in the trajectory data. More background can be found in the work of
Berk Hess:
http://dx.doi.org/10.1103/PhysRevE.65.031910
http://dx.doi.org/10.1103/PhysRevE.62.8438
Argument:
data -- a new array with input data: rows correspond to time
frames and columns to coordinates/fields/...
Optional argument
output_prefix -- A filename prefix for the tracks that will
contain the principal components. If not given,
the principal components are not written to
disk.
Make sure the number of columns is always the same for one
CovarianceBlocks instance.
"""
def get_output_prefix():
if output_prefix is None:
return None
else:
if len(self.blocks) == 1:
return output_prefix
else:
return "%s.%07i" % (output_prefix, self.proj_counter)
if self.proj_counter == -1:
self.proj_counter = 0
self.blocks[0].init_proj(get_output_prefix())
pos = 0
while pos < len(data):
#print "self.proj_counter", self.proj_counter
#print "len(self.blocks)", len(self.blocks)
#print "len(data)", len(data)
#print "pos", pos
#print "self.block_size", self.block_size
#print
if self.proj_counter >= len(self.blocks):
return
#raise ValueError("Can not project more data than was provided through add_data.")
remaining = len(data) - pos
todo = self.block_size - self.proj_done
if remaining >= todo:
self.blocks[self.proj_counter].data_proj(data[pos:pos+todo])
#print self.blocks[self.proj_counter], "finish"
self.blocks[self.proj_counter].finish_proj()
pos += todo
self.proj_done += todo
self.proj_counter += 1
if self.proj_counter < len(self.blocks):
self.proj_done = 0
self.blocks[self.proj_counter].init_proj(get_output_prefix())
else: # todo > remaining
self.blocks[self.proj_counter].data_proj(data[pos:])
self.proj_done += remaining
break
def get_averages(self):
"""Compute the most relevant global properties over the blocks."""
all_sigmas = numpy.array([block.sigmas for block in self.blocks])
all_ccs = numpy.array([block.ccs for block in self.blocks])
sigmas = all_sigmas.mean(axis=0)
ccs = all_ccs.mean(axis=0)
if len(self.blocks) > 1:
sigmas_err = all_sigmas.std(axis=0)/len(self.blocks)
ccs_err = all_ccs.std(axis=0)/len(self.blocks)
overlap_multi = cov_overlap_multi([block.cov for block in self.blocks])
else:
sigmas_err = None
ccs_err = None
overlap_multi = None
return sigmas, sigmas_err, ccs, ccs_err, overlap_multi
def cov_overlap(A, B):
"""Compute the overlap between two covariance matrices.
A and B are two square matrices of the same size (numpy arrays). The
return value is a scalar in the range [0,1]. When the result is zero,
the matrices are each others opposites, note that this will never happen
for covariance matrices because such matrices are positive definite. When
the result is one, both matrices are identical.
The exact formula is derived in the following paper:
Hess, B. Physical Review E 2002, 65, 031910
Cite this paper when you use this routine.
"""
distance_max = numpy.sqrt(numpy.trace(A) + numpy.trace(B))
if distance_max == 0:
return 1.0
evals_A, evecs_A = numpy.linalg.eigh(A)
evals_B, evecs_B = numpy.linalg.eigh(B)
tmp_A = evecs_A * numpy.sqrt(abs(evals_A))
tmp_B = evecs_B * numpy.sqrt(abs(evals_B))
root_A = numpy.dot(evecs_A.transpose(), tmp_A)
root_B = numpy.dot(evecs_B.transpose(), tmp_B)
distance = numpy.linalg.norm((root_A - root_B).ravel())
return 1-distance/distance_max
def cov_overlap_multi(covs):
"""Compute the similarity for a list of covariance matrices.
covs is a list of square matrices with at least two elements (numpy
arrays). The similarity between the matrices is computed as an RMSD over
RMSA ratio, where RSMD is the RMS with respect to the average of the
square root of the matrices and RMSA is an RMS with respect to zero. (A
stands for absolute.)
"""
if len(covs) < 2:
raise ValueError("At least two covariance matrices are expected")
covs_sqrt = numpy.zeros((len(covs), covs[0].shape[0], covs[0].shape[1]), float)
for i, cov in enumerate(covs):
evals, evecs = numpy.linalg.eigh(cov)
tmp = evecs * numpy.sqrt(abs(evals))
covs_sqrt[i] = numpy.dot(evecs.transpose(), tmp)
average = covs_sqrt.mean(axis=0)
msd = ((covs_sqrt - average)**2).mean()
msa = (covs_sqrt**2).mean()
if msa == 0:
return 0
else:
return numpy.sqrt(msd/msa)
pca_common_usage = """The following files are always written to the tracks database
${output_prefix}.cov : a flattened covariance matrix (NxN elements)
${output_prefix}.evals : the covariance eigen values
${output_prefix}.sigmas : the square roots of the covariance eigen values
${output_prefix}.mode.${i} : the covariance modes
${output_prefix}.ccs : the cosine content of each mode
${output_prefix}.cosamp : the amplitudes of the cosines of each mode
Optionally, the principal components (the time dependent amplitudes) can be
written to disk.
${output_prefix}.pc.${i} : the principal components
"""
def pca_common_script(
paths_in, dtype, sub, weights, correlation, reference, output_prefix,
num_levels, dump_pcs, unit_name, unit
):
"""Shared code by tr-pca and tr-pca-geom.
This function has only one purpose: bugs common to tr-pca and tr-pca-geom
have to be fixed only once.
"""
if num_levels < 2:
raise ValueError("num_levels must be at least 2.")
# call pca routine in tracks.api
mtr = MultiTracksReader(paths_in, dtype, sub=sub)
cm, overlap, cbs = pca_levels(mtr, num_levels, weights, correlation, reference)
# dump some stuff to disk
dump_track("%s.cov" % output_prefix, cm.cov.ravel())
dump_track("%s.evals" % output_prefix, cm.evals)
dump_track("%s.sigmas" % output_prefix, cm.sigmas)
for i in xrange(len(cm.evals)):
dump_track("%s.mode.%07i" % (output_prefix, i), cm.evecs[:,i])
# compute the cosine contents and optionally write the principal components to disk
mtr = MultiTracksReader(paths_in, dtype, sub=sub)
for data in mtr.iter_buffers():
data = data["data"]
for level in xrange(num_levels):
if level==0 and dump_pcs:
cbs[level].project_data(data, output_prefix)
else:
cbs[level].project_data(data)
dump_track("%s.ccs" % output_prefix, cm.ccs)
dump_track("%s.cosamp" % output_prefix, cm.dot_data_cos/cm.sqnorms_cos)
# Print some nice screen output with the most relevant results
print "Overlap between the covariance of the first and the second half of the trajectory:"
print " %.2f %%" % (overlap*100)
print
white = (" "*len(unit_name))
for level in xrange(num_levels):
sigmas, sigmas_err, ccs, ccs_err, overlap_multi = cbs[level].get_averages()
print "Level %i: averages over blocks with size total/%i=%i" % (
level, 2**level, mtr.shortest/(2**level)
)
if level > 0:
print "RMSD/RMSA of the covariance matrices of each block: %.2f %%" % (overlap_multi*100)
print
if level == 0:
print " Sigma [%s] Cosine content " % unit_name
print " ------------%s ---------------- " % white
for i in xrange(len(cm.sigmas)):
print " %7i %9.3e%s %8.2f %%" % (
i, sigmas[i]/unit, white, ccs[i]*100
)
else:
print " Sigma [%s] Cosine content " % unit_name
print " ----------------------%s ---------------------- " % white
for i in xrange(len(cm.sigmas)):
print " %7i %9.3e +- %9.3e%s %8.2f %% +- %6.2f %%" % (
i, sigmas[i]/unit, sigmas_err[i]/unit, white,
ccs[i]*100, ccs_err[i]*100
)
print
print
if level > 0:
dump_track("%s.ccs.%07i" % (output_prefix, level), ccs)
dump_track("%s.ccs_err.%07i" % (output_prefix, level), ccs_err)
dump_track("%s.sigmas.%07i" % (output_prefix, level), sigmas)
dump_track("%s.sigmas_err.%07i" % (output_prefix, level), sigmas_err)
return cm.mean
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('shop', '0008_auto_20150417_0958'),
]
operations = [
migrations.AlterField(
model_name='positioninorder',
name='qty',
field=models.IntegerField(default=0),
),
]
|
"""
plainbox.impl.commands.test_run
===============================
Test definitions for plainbox.impl.run module
"""
import os
import shutil
import tempfile
from collections import OrderedDict
from inspect import cleandoc
from mock import patch
from unittest import TestCase
from plainbox.impl.box import main
from plainbox.impl.exporter.json import JSONSessionStateExporter
from plainbox.impl.exporter.rfc822 import RFC822SessionStateExporter
from plainbox.impl.exporter.text import TextSessionStateExporter
from plainbox.impl.exporter.xml import XMLSessionStateExporter
from plainbox.testing_utils.io import TestIO
class TestRun(TestCase):
def setUp(self):
# session data are kept in XDG_CACHE_HOME/plainbox/.session
# To avoid resuming a real session, we have to select a temporary
# location instead
self._sandbox = tempfile.mkdtemp()
self._env = os.environ
os.environ['XDG_CACHE_HOME'] = self._sandbox
self._exporters = OrderedDict([
('json', JSONSessionStateExporter),
('rfc822', RFC822SessionStateExporter),
('text', TextSessionStateExporter),
('xml', XMLSessionStateExporter),
])
def test_help(self):
with TestIO(combined=True) as io:
with self.assertRaises(SystemExit) as call:
main(['run', '--help'])
self.assertEqual(call.exception.args, (0,))
self.maxDiff = None
expected = """
usage: plainbox run [-h] [--not-interactive] [-n] [-f FORMAT] [-p OPTIONS]
[-o FILE] [-t TRANSPORT] [--transport-where WHERE]
[--transport-options OPTIONS] [-i PATTERN] [-x PATTERN]
[-w WHITELIST]
optional arguments:
-h, --help show this help message and exit
user interface options:
--not-interactive Skip tests that require interactivity
-n, --dry-run Don't actually run any jobs
output options:
-f FORMAT, --output-format FORMAT
Save test results in the specified FORMAT (pass ? for
a list of choices)
-p OPTIONS, --output-options OPTIONS
Comma-separated list of options for the export
mechanism (pass ? for a list of choices)
-o FILE, --output-file FILE
Save test results to the specified FILE (or to stdout
if FILE is -)
-t TRANSPORT, --transport TRANSPORT
use TRANSPORT to send results somewhere (pass ? for a
list of choices)
--transport-where WHERE
Where to send data using the selected transport. This
is passed as-is and is transport-dependent.
--transport-options OPTIONS
Comma-separated list of key-value options (k=v) to be
passed to the transport.
job definition options:
-i PATTERN, --include-pattern PATTERN
Run jobs matching the given regular expression.
Matches from the start to the end of the line.
-x PATTERN, --exclude-pattern PATTERN
Do not run jobs matching the given regular expression.
Matches from the start to the end of the line.
-w WHITELIST, --whitelist WHITELIST
Load whitelist containing run patterns
"""
self.assertEqual(io.combined, cleandoc(expected) + "\n")
def test_run_without_args(self):
with TestIO(combined=True) as io:
with self.assertRaises(SystemExit) as call:
with patch('plainbox.impl.commands.run.authenticate_warmup') as mock_warmup:
mock_warmup.return_value = 0
main(['run'])
self.assertEqual(call.exception.args, (0,))
expected1 = """
===============================[ Analyzing Jobs ]===============================
Estimated duration cannot be determined for automated jobs.
Estimated duration cannot be determined for manual jobs.
==============================[ Running All Jobs ]==============================
==================================[ Results ]===================================
"""
expected2 = """
===============================[ Authentication ]===============================
===============================[ Analyzing Jobs ]===============================
Estimated duration cannot be determined for automated jobs.
Estimated duration cannot be determined for manual jobs.
==============================[ Running All Jobs ]==============================
==================================[ Results ]===================================
"""
self.assertIn(io.combined, [
cleandoc(expected1) + "\n",
cleandoc(expected2) + "\n"])
def test_output_format_list(self):
with TestIO(combined=True) as io:
with self.assertRaises(SystemExit) as call:
with patch('plainbox.impl.commands.run.get_all_exporters') as mock_get_all_exporters:
mock_get_all_exporters.return_value = self._exporters
main(['run', '--output-format=?'])
self.assertEqual(call.exception.args, (0,))
expected = """
Available output formats: json, rfc822, text, xml
"""
self.assertEqual(io.combined, cleandoc(expected) + "\n")
def test_output_option_list(self):
with TestIO(combined=True) as io:
with self.assertRaises(SystemExit) as call:
with patch('plainbox.impl.commands.run.get_all_exporters') as mock_get_all_exporters:
mock_get_all_exporters.return_value = self._exporters
main(['run', '--output-option=?'])
self.assertEqual(call.exception.args, (0,))
expected = """
Each format may support a different set of options
json: with-io-log, squash-io-log, flatten-io-log, with-run-list, with-job-list, with-resource-map, with-job-defs, with-attachments, with-comments, with-job-via, with-job-hash, machine-json
rfc822: with-io-log, squash-io-log, flatten-io-log, with-run-list, with-job-list, with-resource-map, with-job-defs, with-attachments, with-comments, with-job-via, with-job-hash
text: with-io-log, squash-io-log, flatten-io-log, with-run-list, with-job-list, with-resource-map, with-job-defs, with-attachments, with-comments, with-job-via, with-job-hash
xml:
"""
self.assertEqual(io.combined, cleandoc(expected) + "\n")
def tearDown(self):
shutil.rmtree(self._sandbox)
os.environ = self._env
|
"""
"""
from ulakbus.models.auth import Unit
from ulakbus.models.personel import Personel
from ulakbus.models.ogrenci import Okutman
from .general import ints, gender, marital_status, blood_type, driver_license_class, id_card_serial, birth_date
from .general import fake
from random import random, randint
__author__ = 'Halil İbrahim Yılmaz'
def yeni_okutman():
personel_list = Personel.objects.filter(unvan=1)
random_personel = personel_list[randint(0, len(personel_list) - 1)]
program_list = Unit.objects.filter(unit_type='Program')
random_bolum = program_list[randint(0, len(program_list) - 1)]
o = Okutman()
o.ad = random_personel.ad
o.soyad = random_personel.soyad
o.unvan = random_personel.unvan
o.birim_no = random_bolum.yoksis_no
o.personel = random_personel
o.save()
|
import wx
from .helpers import AutoListCtrl
class ItemProperties(wx.Panel):
def __init__(self, parent, stuff, item, context=None):
wx.Panel.__init__(self, parent)
mainSizer = wx.BoxSizer(wx.VERTICAL)
self.paramList = AutoListCtrl(self, wx.ID_ANY,
style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.LC_VRULES | wx.NO_BORDER)
mainSizer.Add(self.paramList, 1, wx.ALL | wx.EXPAND, 0)
self.SetSizer(mainSizer)
self.toggleView = 1
self.stuff = stuff
self.item = item
self.attrInfo = {}
self.attrValues = {}
self._fetchValues()
self.m_staticline = wx.StaticLine(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LI_HORIZONTAL)
mainSizer.Add(self.m_staticline, 0, wx.EXPAND)
bSizer = wx.BoxSizer(wx.HORIZONTAL)
self.totalAttrsLabel = wx.StaticText(self, wx.ID_ANY, " ", wx.DefaultPosition, wx.DefaultSize, 0)
bSizer.Add(self.totalAttrsLabel, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT)
mainSizer.Add(bSizer, 0, wx.ALIGN_RIGHT)
self.PopulateList()
def _fetchValues(self):
if self.stuff is None:
self.attrInfo.clear()
self.attrValues.clear()
self.attrInfo.update(self.item.attributes)
self.attrValues.update(self.item.attributes)
elif self.stuff.item == self.item:
self.attrInfo.clear()
self.attrValues.clear()
self.attrInfo.update(self.stuff.item.attributes)
self.attrValues.update(self.stuff.itemModifiedAttributes)
elif self.stuff.charge == self.item:
self.attrInfo.clear()
self.attrValues.clear()
self.attrInfo.update(self.stuff.charge.attributes)
self.attrValues.update(self.stuff.chargeModifiedAttributes)
# When item for stats window no longer exists, don't change anything
else:
return
def PopulateList(self):
self.paramList.InsertColumn(0, "Attribute")
self.paramList.InsertColumn(1, "Current Value")
self.paramList.SetColumnWidth(0, 110)
self.paramList.SetColumnWidth(1, 1500)
self.paramList.setResizeColumn(0)
if self.stuff:
names = dir(self.stuff)
else:
names = dir(self.item)
names = [a for a in names if not (a.startswith('__') and a.endswith('__'))]
idNameMap = {}
idCount = 0
for name in names:
try:
if self.stuff:
attrName = name.title()
value = getattr(self.stuff, name)
else:
attrName = name.title()
value = getattr(self.item, name)
index = self.paramList.InsertItem(self.paramList.GetItemCount(), attrName)
# index = self.paramList.InsertImageStringItem(sys.maxint, attrName)
idNameMap[idCount] = attrName
self.paramList.SetItemData(index, idCount)
idCount += 1
valueUnit = str(value)
self.paramList.SetItem(index, 1, valueUnit)
except (KeyboardInterrupt, SystemExit):
raise
except:
# TODO: Add logging to this.
# We couldn't get a property for some reason. Skip it for now.
continue
self.paramList.SortItems(lambda id1, id2: (idNameMap[id1] > idNameMap[id2]) - (idNameMap[id1] < idNameMap[id2]))
self.paramList.RefreshRows()
self.totalAttrsLabel.SetLabel("%d attributes. " % idCount)
self.Layout()
|
import os
import sys
import numpy as np
from PIL import Image
from m64py.core.defs import Buttons
import ag.logging as log
class Processing():
"""The image processing.
This will be used by both the Process and Playback
Modules for conversion of images for Tensorflow.
"""
def __init__(self, folders=""):
self.folders = folders
def print(self, msg):
print(msg)
def doIt(self, load_dir, folders, saveDir, currentGame):
"""Take care of all processing."""
log.debug()
if not os.path.isdir(saveDir):
self.print("Creating folder: {}".format(saveDir))
os.mkdir(saveDir)
saveDir = os.path.join(saveDir, currentGame)
if not os.path.isdir(saveDir):
self.print("Creating folder: {}".format(saveDir))
os.mkdir(saveDir)
datasetIndex = len(os.listdir(saveDir))
dataset_x = []
dataset_y = []
datasetFilename = "{}_dataset_{}".format(
currentGame, datasetIndex)
self.print("#############################################")
self.print("# Processing Game folders to dataset")
self.print("# Game Name: {}".format(currentGame))
self.print("# Dataset Path: {}".format(saveDir))
self.print("# Number of saves to process: {}".format(
len(folders)))
self.print("#############################################")
# for each folder given...
for i in folders:
current_path = os.path.join(load_dir, i)
self.print(
"# Processing folder: {}".format(
current_path))
self.print(
"# Step 1: Assert #imgs == #labels")
labels, imgs = self.gamepadImageMatcher(current_path)
log.info("Input and Image matching completed",
inputs=len(labels),
images=len(imgs))
dataset_y.append(labels) # BOOM!
self.print(
"# Step 2: Convert img to BW np array of (x,y)")
for image in imgs:
img = self.prepare_image(
os.path.join(current_path, image))
dataset_x.append(img)
self.print(
"# Step 3: Save files...\n\t{}.npz".format(
datasetFilename))
dataset_x = np.asarray(dataset_x)
dataset_y = np.concatenate(dataset_y)
# super_set = [dataset_x, dataset_y]
self.print(
"# To Dir:\t{}".format(saveDir))
# DEPRICATED.
# np.save(os.path.join(saveDir, datasetFilename_x), dataset_x)
# np.save(os.path.join(saveDir, datasetFilename_y), dataset_y)
np.savez(os.path.join(saveDir, datasetFilename),
images=dataset_x,
labels=dataset_y)
self.print("# Finished preparing dataset")
@staticmethod
def make_BW(rgb):
"""The "rec601 luma" algorithm to compute 8-bit greyscale."""
return np.dot(rgb[..., :3], [0.299, 0.587, 0.114])
def prepare_image(self, img, makeBW=False):
"""Resize the image to a standard size."""
pil_image = Image.open(img)
x = pil_image.resize((200, 66), Image.ANTIALIAS)
numpy_img = np.array(x)
if makeBW:
numpy_img = self.make_BW(numpy_img)
return numpy_img
def gamepadImageMatcher(self, path):
"""Match gamepad data rows to images based on timestamps.
Params: A path with timestamped pictures and a
timestamped .csv of varying lenghs.
Returns: two arrays of matched length img, labels.
"""
# Open input data for reading
# FIXME: support more than player 0
csv_path = os.path.join(path, "controller0.dat")
csv_io = open(csv_path, 'r')
# Convert to a true array
csv = []
for line in csv_io:
# Convert the compact controller data to an array of the inputs we
# are interested in
rawdata = [item.strip() for item in line.split(',')]
if len(rawdata) == 2:
data = []
data.append(rawdata[0]) # timestamp
buttons = Buttons()
buttons.value = int(rawdata[1], 16) # hex data
data.append(buttons.bits.X_AXIS)
data.append(buttons.bits.Y_AXIS)
data.append(buttons.bits.A_BUTTON)
data.append(buttons.bits.B_BUTTON)
data.append(buttons.bits.R_TRIG)
csv.append(data)
else:
log.error("Bad data in controller input log", line=csv_io)
if not csv:
# print ("CSV HAS NO DATA")
return None, None
# Get list of images in directory and sort it
all_files = os.listdir(path)
images = []
for filename in all_files:
if filename.endswith('.png'):
images.append(filename)
images = sorted(images)
if not images:
# print ("FOUND NO IMAGES");
return None, None
# We're going to build up 2 arrays of matching size:
keep_csv = []
keep_images = []
# Prime the pump (queue)...
prev_line = csv.pop(0)
prev_csvtime = int(prev_line[0])
while images:
imgfile = images[0]
# Get image time:
# Cut off the "gamename-" from the front and the ".png"
hyphen = imgfile.rfind('-') # Get last index of '-'
if hyphen < 0:
break
imgtime = int(imgfile[hyphen+1:-4]) # cut it out!
lastKeptWasImage = False # Did we last keep an image, or a line?
if imgtime > prev_csvtime:
keep_images.append(imgfile)
del images[0]
lastKeptWasImage = True
# We just kept an image, so we need to keep a
# corresponding input row too
while csv:
line = csv.pop(0)
csvtime = int(line[0])
if csvtime >= imgtime:
# We overshot the input queue... ready to
# keep the previous data line
# truncate the timestamp
keep_csv.append(prev_line[1:])
lastKeptWasImage = False
prev_line = line
prev_csvtime = csvtime
if csvtime >= imgtime:
break
if not csv:
if lastKeptWasImage:
# truncate off the timestamp
keep_csv.append(prev_line[1:])
break
else:
del images[0]
return keep_csv, keep_images
|
"""
find the sum of two binary numbers represented by a string, and print out the result in a string format.
https://leetcode.com/problems/add-binary/
date: 10/09/21
"""
def add(str1,str2):
bin_arr1 = list(str1)
bin_arr2 = list(str2)
min_len = min(len(bin_arr1), len(bin_arr2))
max_len = max(len(bin_arr1), len(bin_arr2))
if len(bin_arr1) < len(bin_arr2):
for i in range(0,len(bin_arr2)-len(bin_arr1)):
bin_arr1.insert(i,'0')
if len(bin_arr2) < len(bin_arr1):
for i in range(0,len(bin_arr1)-len(bin_arr2)):
bin_arr2.insert(i,'0')
#print(bin_arr1)
#print(bin_arr2)
stack = []
rlt = []
carry = 0
for i in range(max_len-1,-1,-1):
a = int(bin_arr1[i])
b = int(bin_arr2[i])
c = a+b+carry
remainder = c%2
carry = c//2
#print('i=%d,carry=%d,remainder=%d'%(i,carry,remainder))
stack.append(remainder)
#rlt.insert(i+1,remainder)
if carry > 0:
stack.append(carry)
while len(stack) > 0:
rlt.append(stack.pop())
return ''.join(str(e) for e in rlt)
if __name__ == '__main__':
s1 = "11"
s2 = "1"
print(add(s1,s2))
s1 = "1010"
s2 = "1011"
print(add(s1,s2))
|
"""Run dfoil test dataset"""
import sys
import subprocess
args = ("../fasta2dfoil.py", sys.argv[1],
"-o", sys.argv[1] + ".counts",
"--names P1,P2,P3,P4,PO")
print("Running ", " ".join(args))
proc = subprocess.Popen(' '.join(args), stdout=sys.stdout, stderr=sys.stderr,
shell=True)
proc.communicate()
del proc
args = ("../dfoil.py",
"--infile", sys.argv[1] + ".counts",
"--out", sys.argv[1] + ".dfoil",
"--mode dfoil",
"--plot", sys.argv[1] + ".dfoil.pdf")
print("Running ", " ".join(args))
proc = subprocess.Popen(' '.join(args), stdout=sys.stdout, stderr=sys.stderr,
shell=True)
proc.communicate()
del proc
|
from karaage.conf.defaults import * # NOQA
from karaage.tests.defaults import * # NOQA
PLUGINS = [
'kgapplications.plugin',
]
import sys
from karaage.conf.process import post_process
post_process(sys.modules[__name__])
|
import curses
class Item:
""" Base class for all ingame objects, including creatures and scenery
"""
def __init__(self):
self.blocking = False
self.carryable = False
self.glyph = "@"
self.colour = curses.COLOR_GREEN
def isBlocking(self):
""" Does this item prevent movement on a map?
"""
return self.blocking
def isCarryable(self):
""" Can this item be carried in a backpack?
"""
return self.carryable
def getGlyph(self):
""" The character used to render this in curses.
"""
return self.glyph
def getColour(self):
""" The colour used to render this tile in curses.
"""
return self.colour
|
import cairo
import copy
import gtk
import pangocairo
import webbrowser
from collections import namedtuple
from itertools import chain
import action
from appearance import CellPropertiesDialog
import constants
from gui_common import launch_dialog
from grid import Grid, decompose_word
import preferences
from preferences import read_pref_color
import transform
from view import GridPreview, DEFAULTS_CELL
from word import (CWordList,
search_wordlists,
analyze_words,
)
import cPalabra
from gui_debug import FillDebugDialog
DEFAULT_FILL_OPTIONS = {
constants.FILL_OPTION_START: constants.FILL_START_AT_AUTO
, constants.FILL_OPTION_NICE: constants.FILL_NICE_FALSE
, constants.FILL_OPTION_DUPLICATE: constants.FILL_DUPLICATE_FALSE
, constants.FILL_NICE_COUNT: 0
}
Selection = namedtuple('Selection', ['x', 'y', 'direction'])
EditorAction = namedtuple('EditorAction', ['type', 'args'])
mouse_buttons_down = [False, False, False]
class EditorSettings:
def __init__(self):
self.surface = None
self.pattern = None
self.settings = {
"symmetries": constants.SYM_180
, "locked_grid": False
}
self.warnings = {}
for w in constants.WARNINGS:
self.warnings[w] = False
self.force_redraw = True
self.reset_controls()
def reset_controls(self):
self.selection = Selection(-1, -1, "across")
self.current = (-1, -1)
e_settings = EditorSettings()
e_tools = {}
def get_char_slots(grid, c):
return [(x, y, "across", 1) for x, y in grid.cells() if grid.data[y][x]["char"] == c]
def get_length_slots(grid, length):
cells = []
for d in ["across", "down"]:
for n, x, y in grid.words_by_direction(d):
if grid.word_length(x, y, d) == length:
cells.append((x, y, d, length))
return cells
def get_open_slots(grid):
return [(x, y, "across", 1) for x, y in grid.compute_open_squares()]
def expand_slots(slots):
cells = []
for x, y, d, l in slots:
if d == "across":
cells += [(x, y) for x in xrange(x, x + l)]
elif d == "down":
cells += [(x, y) for y in xrange(y, y + l)]
return cells
def apply_symmetry(grid, symms, x, y):
"""Apply one or more symmetrical transforms to (x, y)."""
if not grid.is_valid(x, y):
return []
cells = []
width = grid.width
height = grid.height
if constants.SYM_HORIZONTAL in symms:
cells.append((x, height - 1 - y))
if constants.SYM_VERTICAL in symms:
cells.append((width - 1 - x, y))
if ((constants.SYM_HORIZONTAL in symms and constants.SYM_VERTICAL in symms)
or constants.SYM_180 in symms
or constants.SYM_90 in symms
or constants.SYM_DIAGONALS in symms):
p = width - 1 - x
q = height - 1 - y
cells.append((p, q))
if constants.SYM_DIAGONALS in symms:
p = int((y / float(height - 1)) * (width - 1))
q = int((x / float(width - 1)) * (height - 1))
cells.append((p, q))
r = width - 1 - p
s = height - 1 - q
cells.append((r, s))
if constants.SYM_90 in symms:
cells.append((width - 1 - y, x))
cells.append((y, height - 1 - x))
return cells
def transform_blocks(grid, symms, x, y, status):
"""Determine cells that need to modified a block at (x, y) and its symmetrical cells."""
if not grid.is_valid(x, y):
return []
cells = [(x, y)] + apply_symmetry(grid, symms, x, y)
return [(p, q, status) for p, q in cells if status != grid.data[q][p]["block"]]
def compute_word_cells(grid, word, x, y, d):
"""Compute the cells and the characters that are part of the overlay."""
if word is None:
return []
p, q = grid.get_start_word(x, y, d)
result = decompose_word(word, p, q, d)
return [(x, y, c.upper()) for x, y, c in result if grid.data[y][x]["char"] == ""]
def compute_search_args(grid, slot, force=False):
"""
Compute the arguments for searching words,
based on the given grid and slot.
"""
x, y, d = slot
if not grid.is_available(x, y):
return None
p, q = grid.get_start_word(x, y, d)
length = grid.word_length(p, q, d)
if length <= 1:
return None
constraints = grid.gather_constraints(p, q, d)
if len(constraints) == length and not force:
return None
more = grid.gather_all_constraints(x, y, d)
return length, constraints, more
def fill(grid, words, fill_options):
meta = []
g_words = [i for i in grid.words(allow_duplicates=True, include_dir=True)]
g_lengths = {}
g_cs = {}
for n, x, y, d in g_words:
g_lengths[x, y, d] = grid.word_length(x, y, d)
g_cs[x, y, d] = grid.gather_constraints(x, y, d)
result = analyze_words(grid, g_words, g_cs, g_lengths, words)
for n, x, y, d in g_words:
d_i = 0 if d == "across" else 1
l = g_lengths[x, y, d]
cs = g_cs[x, y, d]
meta.append((x, y, d_i, l, cs, result[x, y, d]))
return cPalabra.fill(grid, words, meta, fill_options)
def attempt_fill(grid, words):
"""
Return a grid with possibly the given words filled in.
This is not intended as full-blown search so keep len(words) small.
"""
clist = CWordList(words, index=constants.MAX_WORD_LISTS)
options = {}
options.update(DEFAULT_FILL_OPTIONS)
options.update({
constants.FILL_OPTION_NICE: constants.FILL_NICE_TRUE
, constants.FILL_OPTION_DUPLICATE: constants.FILL_DUPLICATE_TRUE
, constants.FILL_NICE_COUNT: len(words)
})
results = fill(grid, clist.words, options)
if results:
g = copy.deepcopy(grid)
transform.modify_chars(g, chars=results[0])
return g
return grid
def compute_highlights(grid, f=None, arg=None, clear=False):
"""Compute the cells to highlight according to the specified function."""
cells = []
if not clear:
if f == "length":
cells = get_length_slots(grid, arg)
elif f == "char":
cells = get_char_slots(grid, arg)
elif f == "open":
cells = get_open_slots(grid)
elif f == "cells":
cells = [(x, y, "across", 1) for x, y in arg]
elif f == "slot":
x, y, d = arg
cells = [(x, y, d, grid.word_length(x, y, d))]
elif f == "slots":
cells = [(x, y, d, grid.word_length(x, y, d)) for x, y, d in arg]
return cells
def compute_warnings_of_cells(grid, cells, settings):
"""Determine undesired cells based on warning settings."""
lengths = {}
starts = {}
warn_unchecked = settings[constants.WARN_UNCHECKED]
warn_consecutive = settings[constants.WARN_CONSECUTIVE]
warn_two_letter = settings[constants.WARN_TWO_LETTER]
check_count = grid.get_check_count
width, height = grid.size
if warn_unchecked or warn_consecutive:
counts = grid.get_check_count_all()
if warn_two_letter:
get_start_word = grid.get_start_word
in_direction = grid.in_direction
word_length = grid.word_length
for p, q in cells:
if warn_unchecked:
# Color cells that are unchecked. Isolated cells are also colored.
if 0 <= counts[p, q] <= 1:
yield p, q
continue
if warn_consecutive:
# Color consecutive (two or more) unchecked cells.
warn = False
if 0 <= counts[p, q] <= 1:
for dx, dy in [(-1, 0), (1, 0), (0, -1), (0, 1)]:
if not (0 <= p + dx < width and 0 <= q + dy < height):
continue
if 0 <= counts[p + dx, q + dy] <= 1:
warn = True
break
if warn:
yield p, q
continue
if warn_two_letter:
# Color words with length two.
warn = False
for d in ["across", "down"]:
if (p, q, d) in starts:
sx, sy = starts[p, q, d]
else:
sx, sy = get_start_word(p, q, d)
starts[p, q, d] = sx, sy
for zx, zy in in_direction(sx, sy, d):
starts[zx, zy, d] = sx, sy
lengths[sx, sy, d] = word_length(sx, sy, d)
if lengths[sx, sy, d] == 2:
warn = True
break
if warn:
yield p, q
continue
def compute_editor_of_cell(cells, puzzle, e_settings):
"""Compute cells that have editor related colors."""
grid = puzzle.grid
selection = e_settings.selection
current = e_settings.current
symmetries = e_settings.settings["symmetries"]
warnings = e_settings.warnings
# warnings for undesired cells
render = []
for wx, wy in compute_warnings_of_cells(grid, cells, warnings):
render.append((wx, wy, constants.COLOR_WARNING))
# blacklist
for p, q in cells:
if False: # TODO until ready
for bx, by, direction, length in self.blacklist:
if direction == "across" and bx <= p < bx + length and by == q:
render.append((p, q, constants.COLOR_WARNING))
elif direction == "down" and by <= q < by + length and bx == p:
render.append((p, q, constants.COLOR_WARNING))
# selection line
render.extend([(i, j, constants.COLOR_CURRENT_WORD) for i, j in grid.slot(*selection) if (i, j) in cells])
cx, cy = current
for p, q in cells:
# selection cell
if (p, q) == (selection.x, selection.y):
render.append((p, q, constants.COLOR_PRIMARY_SELECTION))
# current cell and symmetrical cells
if 0 <= cx < grid.width and 0 <= cy < grid.height:
if (p, q) in apply_symmetry(grid, symmetries, cx, cy):
render.append((p, q, constants.COLOR_SECONDARY_ACTIVE))
# draw current cell last to prevent
# symmetrical cells from overlapping it
if (p, q) == current:
render.append((p, q, constants.COLOR_PRIMARY_ACTIVE))
return render
def _render_cells(puzzle, cells, e_settings, drawing_area, editor=True):
if not cells or not e_settings.surface:
return
view = puzzle.view
view.select_mode(constants.VIEW_MODE_EDITOR)
context = cairo.Context(e_settings.surface)
width, height = puzzle.grid.size
cs = [(x, y) for x, y in cells if 0 <= x < width and 0 <= y < height]
view.render_bottom(context, cs)
if editor:
e_cells = compute_editor_of_cell(cs, puzzle, e_settings)
render = []
for x, y, code in e_cells:
r, g, b = read_pref_color(code)
render.append((x, y, r, g, b))
view.render_locations(context, render)
view.render_top(context, cs)
context = drawing_area.window.cairo_create()
context.set_source(e_settings.pattern)
context.paint()
def on_button_release_event(drawing_area, event, window, puzzle, e_settings):
if 1 <= event.button <= 3:
mouse_buttons_down[event.button - 1] = False
return True
def on_key_press_event(drawing_area, event, window, puzzle, e_settings):
return True
def on_key_release_event(drawing_area, event, window, puzzle, e_settings):
# prevent conflicts with menu shortcut keys
if ((event.state & gtk.gdk.SHIFT_MASK) or
(event.state & gtk.gdk.CONTROL_MASK)):
return True
arrows_change_dir = preferences.prefs[constants.PREF_ARROWS_CHANGE_DIR]
actions = determine_editor_actions(puzzle.grid, e_settings.selection, event.keyval, arrows_change_dir)
process_editor_actions(window, puzzle, e_settings, actions)
return True
def determine_editor_actions(grid, selection, key, arrows_change_dir=False):
"""
Determine all actions that need to take place given the current grid,
the current selection and the key that the user has pressed.
"""
actions = []
if key == gtk.keysyms.BackSpace:
actions = on_backspace(grid, selection)
elif key == gtk.keysyms.Tab:
x, y, d = selection
if grid.is_available(x, y):
actions = [EditorAction("swapdir", None)]
elif key == gtk.keysyms.Home:
x, y, d = selection
if grid.is_available(x, y):
x, y = grid.get_cell_of_slot(selection, "start")
actions = [EditorAction("selection", {'x': x, 'y': y})]
elif key == gtk.keysyms.End:
x, y, d = selection
if grid.is_available(x, y):
x, y = grid.get_cell_of_slot(selection, "end")
actions = [EditorAction("selection", {'x': x, 'y': y})]
elif key == gtk.keysyms.Left:
actions = apply_selection_delta(grid, selection, -1, 0)
elif key == gtk.keysyms.Up:
actions = apply_selection_delta(grid, selection, 0, -1)
elif key == gtk.keysyms.Right:
actions = apply_selection_delta(grid, selection, 1, 0)
if arrows_change_dir and selection[2] == "down":
actions.append(EditorAction("swapdir", None))
elif key == gtk.keysyms.Down:
actions = apply_selection_delta(grid, selection, 0, 1)
if arrows_change_dir and selection[2] == "across":
actions.append(EditorAction("swapdir", None))
elif key == gtk.keysyms.Delete:
actions = on_delete(grid, selection)
else:
actions = on_typing(grid, key, selection)
return actions
def process_editor_actions(window, puzzle, e_settings, actions):
"""
Process all the editor actions and apply them to the grid and editor controls.
"""
is_locked = e_settings.settings["locked_grid"]
for a in actions:
if a.type in ["blocks", "chars"] and is_locked:
continue
if a.type == "blocks":
x = a.args['x']
y = a.args['y']
status = a.args['status']
blocks = transform_blocks(puzzle.grid, e_settings.settings["symmetries"], x, y, status)
if not blocks:
continue
window.transform_grid(transform.modify_blocks, blocks=blocks)
elif a.type in ["chars"]:
window.transform_grid(transform.modify_chars, chars=a.args['cells'])
elif a.type == "selection":
x = a.args['x']
y = a.args['y']
set_selection(window, puzzle, e_settings, x, y)
elif a.type == "swapdir":
set_selection(window, puzzle, e_settings, other_dir=True)
elif a.type == "popup":
x = a.args['x']
y = a.args['y']
button = a.args['button']
time = a.args['time']
_create_popup_menu(window, puzzle, button, time, x, y)
elif a.type == "render":
cells = a.args['cells']
_render_cells(puzzle, cells, e_settings, window.drawing_area)
def on_typing(grid, keyval, selection):
"""Place an alphabetical character in the grid and move the selection."""
valid = gtk.keysyms.a <= keyval <= gtk.keysyms.z or keyval == gtk.keysyms.period
if not valid:
return []
x, y, direction = selection
if not grid.is_available(x, y):
return []
actions = []
if keyval == gtk.keysyms.period:
actions.append(EditorAction("blocks", {'x': x, 'y': y, 'status': True}))
else:
c = chr(keyval).capitalize()
if c != grid.get_char(x, y):
actions.append(EditorAction("chars", {'cells': [(x, y, c)]}))
dx = 1 if direction == "across" else 0
dy = 1 if direction == "down" else 0
nx, ny = x + dx, y + dy
if grid.is_available(nx, ny):
actions.append(EditorAction("selection", {'x': nx, 'y': ny}))
return actions
def on_delete(grid, selection):
"""Remove the character in the selected cell."""
x, y, d = selection
if grid.get_char(x, y) != "":
return [EditorAction("chars", {'cells': [(x, y, '')]})]
return []
def compute_selection(prev, x=None, y=None, direction=None, other_dir=False):
"""
Compute a new selection based on the previous selection
and the desired modifications.
"""
if other_dir:
direction = {"across": "down", "down": "across"}[prev.direction]
nx = x if x is not None else prev[0]
ny = y if y is not None else prev[1]
ndir = direction if direction is not None else prev[2]
return nx, ny, ndir
def set_selection(window, puzzle, e_settings
, x=None
, y=None
, direction=None
, full_update=True
, other_dir=False
, selection_changed=True):
"""
Select (x, y), the direction or both.
Use other_dir to switch the typing direction to the other direction.
"""
prev = e_settings.selection
nx, ny, ndir = compute_selection(prev, x, y, direction, other_dir)
# update the selection of the clue tool when the grid selection changes
grid = puzzle.grid
clue_tool = e_tools["clue"]
if grid.is_part_of_word(nx, ny, ndir):
p, q = grid.get_start_word(nx, ny, ndir)
clue_tool.select(p, q, ndir)
else:
clue_tool.deselect()
# if selection really changed compared to previous one, clear overlay
if selection_changed:
set_overlay(window, puzzle, e_settings, None)
e_settings.selection = e_settings.selection._replace(x=nx, y=ny, direction=ndir)
if full_update:
window.update_window()
else:
cells = chain(grid.slot(*prev), grid.slot(nx, ny, ndir))
_render_cells(puzzle, cells, e_settings, window.drawing_area)
def set_overlay(window, puzzle, e_settings, word=None):
"""
Display the word in the selected slot without storing it the grid.
If the word is None, the overlay will be cleared.
"""
if not puzzle.view.overlay and word is None:
return
x, y, d = e_settings.selection
cells = compute_word_cells(puzzle.grid, word, x, y, d)
old = puzzle.view.overlay
puzzle.view.overlay = cells
render = [(x, y) for x, y, c in (old + cells)]
_render_cells(puzzle, render, e_settings, window.drawing_area)
def apply_selection_delta(grid, selection, dx, dy):
"""Move the selection to an available nearby cell."""
x, y, d = selection
nx, ny = x + dx, y + dy
if grid.is_available(nx, ny):
return [EditorAction("selection", {'x': nx, 'y': ny})]
return []
def on_backspace(grid, selection):
"""Remove a character in the current or previous cell."""
x, y, direction = selection
if grid.data[y][x]["char"] != "":
return [EditorAction("chars", {'cells': [(x, y, '')]})]
actions = []
x -= (1 if direction == "across" else 0)
y -= (1 if direction == "down" else 0)
if grid.is_available(x, y):
if grid.data[y][x]["char"] != "":
actions.append(EditorAction("chars", {'cells': [(x, y, '')]}))
actions.append(EditorAction("selection", {'x': x, 'y': y}))
return actions
def insert(grid, slot, word):
"""Insert a word in the selected slot."""
x, y, d = slot
if not grid.is_available(x, y):
return []
cells = compute_word_cells(grid, word, x, y, d)
if not cells:
return []
return [EditorAction("chars", {'cells': cells})]
def highlight_cells(window, puzzle, f=None, arg=None, clear=False):
"""
Highlight cells according to a specified function.
Use clear=True to clear the highlights.
"""
cells = compute_highlights(puzzle.grid, f, arg, clear)
old = puzzle.view.highlights
puzzle.view.highlights = cells
render = list(set(expand_slots(old + cells)))
_render_cells(puzzle, render, e_settings, window.drawing_area)
return cells
def on_button_press(grid, event, prev, next):
prev_x, prev_y = prev
x, y = next
if not grid.is_valid(x, y):
return [EditorAction("selection", {'x': -1, 'y': -1})]
actions = []
if (event.state & gtk.gdk.SHIFT_MASK):
if event.button in [1, 3]:
args = {'x': x, 'y': y, 'status': event.button == 1}
actions.append(EditorAction("blocks", args))
else:
if event.button == 1:
# type is needed to assure rapid clicking
# doesn't trigger it multiple times
if (prev_x, prev_y) == (x, y) and event.type == gtk.gdk._2BUTTON_PRESS:
actions.append(EditorAction("swapdir", None))
if grid.is_available(x, y):
actions.append(EditorAction("selection", {'x': x, 'y': y}))
elif event.button == 3:
if grid.is_valid(x, y):
actions.append(EditorAction("popup", {'button': event.button
, 'time': event.time, 'x': x, 'y': y}))
# popup menu right-click should not interfere with
# normal editing controls
mouse_buttons_down[2] = False
return actions
def has_chars(grid, x, y, direction):
return any([grid.data[q][p]["char"] != '' for p, q in grid.slot(x, y, direction)])
def view_cell_properties(window, puzzle, x, y):
props = {"cell": (x, y), "grid": puzzle.grid, "defaults": {}}
for k in DEFAULTS_CELL:
props[k] = puzzle.view.properties.style(x, y)[k]
props["defaults"][k] = puzzle.view.properties.style()[k]
f_done = lambda w: w.gather_appearance()
response, appearance = launch_dialog(CellPropertiesDialog, window, props, f_done=f_done)
if response == gtk.RESPONSE_OK:
puzzle.view.properties.update(x, y, appearance.items())
_render_cells(puzzle, [(x, y)], e_settings, window.drawing_area)
def on_clear_slot_select(grid, direction, x, y):
sx, sy = grid.get_start_word(x, y, direction)
msg = ''.join(["Clear all letters in the slot: "
, str(grid.data[sy][sx]["number"]), " "
, {"across": "across", "down": "down"}[direction]])
return msg
def clearable(grid, slot):
return grid.is_part_of_word(*slot) and has_chars(grid, *slot)
def _create_popup_menu(window, puzzle, button, time, x, y):
def create_item(title, activate, tooltip):
item = gtk.MenuItem(title)
item.connect("activate", activate)
select = lambda item: window.update_status(constants.STATUS_MENU, tooltip)
item.connect("select", select)
item.connect("deselect", lambda i: window.pop_status(constants.STATUS_MENU))
return item
menu = gtk.Menu()
on_clear_slot_deselect = lambda item: window.pop_status(constants.STATUS_MENU)
activate = lambda i: clear_slot_of(window, puzzle.grid, x, y, "across")
tooltip = on_clear_slot_select(puzzle.grid, "across", x, y)
item = create_item(u"Clear across slot", activate, tooltip)
item.set_sensitive(clearable(puzzle.grid, (x, y, "across")))
menu.append(item)
activate = lambda i: clear_slot_of(window, puzzle.grid, x, y, "down")
tooltip = on_clear_slot_select(puzzle.grid, "down", x, y)
item = create_item(u"Clear down slot", activate, tooltip)
item.set_sensitive(clearable(puzzle.grid, (x, y, "down")))
menu.append(item)
menu.append(gtk.SeparatorMenuItem())
activate = lambda i: view_cell_properties(window, puzzle, x, y)
item = create_item(u"Properties", activate, u"View properties of this cell")
menu.append(item)
menu.show_all()
menu.popup(None, None, None, button, time)
def clear_slot_of(window, grid, x, y, d):
"""Clear all letters of the slot in the specified direction
that contains (x, y)."""
chars = [(r, s, "") for r, s in grid.slot(x, y, d) if grid.data[s][r]["char"] != '']
if len(chars) > 0:
window.transform_grid(transform.modify_chars, chars=chars)
def on_button_press_event(drawing_area, event, window, puzzle, e_settings):
if 1 <= event.button <= 3:
mouse_buttons_down[event.button - 1] = True
drawing_area.grab_focus()
prev_x, prev_y = e_settings.selection.x, e_settings.selection.y
x, y = puzzle.view.properties.screen_to_grid(event.x, event.y)
actions = on_button_press(puzzle.grid, event, (prev_x, prev_y), (x, y))
process_editor_actions(window, puzzle, e_settings, actions)
return True
def on_motion_notify_event(drawing_area, event, window, puzzle, e_settings):
if event.is_hint:
ex, ey, estate = event.window.get_pointer()
else:
ex, ey, estate = event.x, event.y, event.state
cx, cy = puzzle.view.properties.screen_to_grid(ex, ey)
previous = e_settings.current
e_settings.current = (cx, cy)
symms = e_settings.settings["symmetries"]
actions = compute_motion_actions(puzzle, symms, previous, (cx, cy)
, estate & gtk.gdk.SHIFT_MASK, mouse_buttons_down)
process_editor_actions(window, puzzle, e_settings, actions)
return True
def compute_motion_actions(puzzle, symmetries, previous, current, shift_down, mouse_buttons_down):
"""Compute all editor actions that take place when mouse cursor is moved."""
actions = []
if previous != current:
c0 = apply_symmetry(puzzle.grid, symmetries, *previous)
c1 = apply_symmetry(puzzle.grid, symmetries, *current)
cells = c0 + c1 + [previous, current]
actions.append(EditorAction("render", {'cells': cells}))
if shift_down:
cx, cy = current
if mouse_buttons_down[0] and not mouse_buttons_down[2]:
actions.append(EditorAction("blocks", {'x': cx, 'y': cy, 'status': True}))
elif mouse_buttons_down[2] and not mouse_buttons_down[0]:
actions.append(EditorAction("blocks", {'x': cx, 'y': cy, 'status': False}))
return actions
def on_expose_event(drawing_area, event, window, puzzle, e_settings):
"""Render the main editing component."""
if not e_settings.surface or e_settings.force_redraw:
grid, view = puzzle.grid, puzzle.view
width, height = view.properties.visual_size(True)
e_settings.surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height)
e_settings.pattern = cairo.SurfacePattern(e_settings.surface)
# TODO should not be needed
view.grid = grid
e_settings.force_redraw = False
_render_cells(puzzle, list(grid.cells()), e_settings, drawing_area, True)
context = drawing_area.window.cairo_create()
context.set_source(e_settings.pattern)
context.paint()
return True
EDITOR_EVENTS = {
"expose_event": on_expose_event
, "button_press_event": on_button_press_event
, "button_release_event": on_button_release_event
, "motion_notify_event": on_motion_notify_event
, "key_press_event": on_key_press_event
, "key_release_event": on_key_release_event
}
def compute_words(grid, wordlists, selection, force_refresh=False, options=None):
"""Compute the words that should be displayed in the main words control."""
args = compute_search_args(grid, selection, force_refresh)
if args:
return search_wordlists(wordlists, *args, options=options)
return []
class Editor:
def __init__(self, window):
self.window = window
self.blacklist = []
self.fill_options = {}
self.fill_options.update(DEFAULT_FILL_OPTIONS)
def fill(self):
for wlist in self.window.wordlists:
#backup = copy.deepcopy(self.window.puzzle.grid)
results = fill(self.window.puzzle.grid, wlist.words, self.fill_options)
if False:
w = FillDebugDialog(self.window, [backup] + results)
w.show_all()
w.run()
w.destroy()
break
self.window.transform_grid(transform.modify_chars, chars=results[0])
break
def insert(self, word):
"""Insert a word in the selected slot."""
actions = insert(self.window.puzzle.grid, e_settings.selection, word)
process_editor_actions(self.window, self.window.puzzle, e_settings, actions)
def set_overlay(self, word=None):
"""
Display the word in the selected slot without storing it the grid.
If the word is None, the overlay will be cleared.
"""
set_overlay(self.window, self.window.puzzle, e_settings, word)
|
import coefficient_example_1
import random
ARRAY1 = [random.random() for _ in range(0, 10000)]
ARRAY2 = [random.random() for _ in range(0, 10000)]
def slow_test():
coefficient_example_1.slow_processing(ARRAY1, ARRAY2)
return True
def fast_test():
coefficient_example_1.faster_processing(ARRAY1, ARRAY2)
return True
|
import threading
class AbstractWriter(object):
def __init__(self, file):
self.external_stop = False
self._condition = threading.Condition()
self.file = file
def open(self):
""" Opens the currently set port"""
raise NotImplementedError()
def is_open(self):
""" Fluch of file like objects. """
raise NotImplementedError()
def close(self):
raise NotImplementedError()
def send_action_payload(self, payload):
""" Send the given payload as an action command
@param bytearray payload Payload to send as an action payload
"""
raise NotImplementedError()
def send_query_payload(self, payload):
""" Send the given payload as a query command
@param bytearray payload Payload to send as a query packey
@return The packet returned by send_command
"""
raise NotImplementedError()
def set_external_stop(self):
with self._condition:
self.external_stop = True
|
from __future__ import division
import numpy as np
import pylab as pl
from pybrain.structure.modules import LSTMLayer
import pybrain.tools.shortcuts as pybrain_tools
import pybrain.datasets
import pybrain.supervised.trainers.rprop as pybrain_rprop
import multiprocessing
import timeit
from mpi4py import MPI
import sys
pl.close('all')
def init_sin_dataset():#regresa un vector de una entrada, una salida
#construct target signal:
T = 1 #periodo de la senal
Nyq = 40 #minimo 2 por el teorema de Nyquist
Ts = T/Nyq #periodo de muestreo
f = 1/T #frecuencia de la senal
fs = 1/Ts #frecuencia de periodo
A = 1 #amplitud
Tiempo = 1 #tiempo total de muestreo
#NN input signal:
t0 = np.arange(0,Tiempo+Ts,Ts) #genera un vector de n hasta N, con incrementos de i (n,N,i)
L = len(t0) #valor aleatorio
print 'numero de datos %i' % L
#valor en el instante t0
#np.sin(Wn*t0) Wn=2*pi*f*t0
x0 = A*np.cos(2*np.pi*f*t0) #senal de entrada a la red
dataset = pybrain.datasets.SupervisedDataSet(1, 1)#1 entradas,1 salida
for i in range(L):
dataset.addSample(t0[i], x0[i])#muestra n
return dataset
def chart_original_output(entrada,salida,out):#graficar dataset
#print net.params
fig = pl.figure()
fsize=8
pl.plot(entrada,salida,'r.-',label='input')
pl.plot(entrada,out,'bx-',label='predicted')
pl.xlabel('Time',fontsize=fsize)
pl.ylabel('Amplitude',fontsize=fsize)
pl.xlim(np.min(entrada),1.2*np.max(entrada))
pl.ylim(1.2*np.min(salida),1.2*np.max(salida))
pl.grid()
pl.legend(loc='lower right',ncol=2,fontsize=fsize)
pl.title('Target range = [0,1]',fontsize=fsize)
epochs = 2500#numero de iteraciones de la red
size = MPI.COMM_WORLD.Get_size()
rank = MPI.COMM_WORLD.Get_rank()
name = MPI.Get_processor_name()
sys.stdout.write("Hello, World! I am process %d of %d on %s.\n"% (rank, size, name))
comm = MPI.COMM_WORLD
np.random.seed(0)
net = pybrain_tools.buildNetwork(1, 40, 1)#ajusta la red a i entradas,j capas ocultas,k salidas
net.randomize()#ajusta aleatoriamente los parametros de la red
print 'entrenando red standard'
data=init_sin_dataset()
trainer = pybrain_rprop.RPropMinusTrainer(net, dataset=data)#red, datos de entrenamiento
trainer.trainEpochs(epochs)#numero de iteraciones
entrada = data['input']
salida = data['target']
L = len(entrada)
out = np.zeros(L)
aux3 = np.zeros(L)
aux4 = np.zeros(L)
for c in range(L):
out[c] = net.activate([entrada[c]])
print out
if name == 'Pi03':
aux3 = out
comm.Send(aux3, dest = 0)
if name == 'Pi04':
aux4 = out
comm.Send(aux4, dest = 0)
if name == 'Pi01':
chart_original_output(entrada,salida,out)#graficar dataset
if size >=2:
comm.Recv(aux3,source = 1)
chart_original_output(entrada,salida,aux3)#graficar dataset
if size >= 3:
comm.Recv(aux4,source = 2)
chart_original_output(entrada,salida,aux4)#graficar dataset
pl.show()
|
import unittest
from ua.core.utils import htmlutils
class Test_HtmlUtils(unittest.TestCase):
def test_strip_tags_none(self):
result = htmlutils.strip_tags(None)
self.assertIsNone(result)
def test_strip_tags_match(self):
result = htmlutils.strip_tags('text <tag>tag content</tag> more text <another tag>tag content</another tag> the end.<br/>')
self.assertEquals('text tag content more text tag content the end.', result)
def test_strip_tags_no_tags(self):
result = htmlutils.strip_tags('This is text.')
self.assertEquals('This is text.', result)
|
import struct
import subprocess
import re
import binascii
from shutil import copyfile
ORIG_EXE = "QCRACK.EXE"
TARGET_EXE = "QCRACK01.EXE"
copyfile(ORIG_EXE, TARGET_EXE)
def get_24_bytes(addr, game, challenge, breakpoint):
with open(TARGET_EXE, "r+b") as f:
addr = struct.pack("L", addr)
f.seek(breakpoint - 0x800)
f.write("\x68" + addr) # PUSH ADDR
f.write("\x5F") # POP EDI
f.write("\x8B\x07") # MOV EAX,DWORD PTR DS:[EDI]
f.write("\x8B\x5F\x04") # MOV EBX,DWORD PTR DS:[EDI+0x04]
f.write("\x8B\x4F\x08") # MOV ECX,DWORD PTR DS:[EDI+0x08]
f.write("\x8B\x57\x0C") # MOV EDX,DWORD PTR DS:[EDI+0x0C]
f.write("\x8B\x77\x10") # MOV ESI,DWORD PTR DS:[EDI+0x10]
f.write("\x8B\x7F\x14") # MOV EDI,DWORD PTR DS:[EDI+0x14]
f.write("\xCC") # INT3
p = subprocess.Popen([TARGET_EXE, "-g", game, challenge],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output, errors = p.communicate()
pattern = "eax=(.*) ebx=(.*) ecx=(.*) edx=(.*) esi=(.*) edi=(.*)\r"
match = re.findall(pattern, errors).pop()
result = [struct.pack("L", int(reg, 16)) for reg in match]
return binascii.hexlify("".join(result))
def get_many_bytes(addr, count, codename, challenge, breakpoint):
ptr, data = 0, ""
while ptr < count:
data += get_24_bytes(addr + ptr, codename, challenge, breakpoint)
ptr += 24
return data[:count*2]
def pmem(game, chall, breakpoint, addr, size):
dump = get_many_bytes(addr, size, game, chall, breakpoint)
print("[%s] mem points to bytes:\n%s" % (game, dump))
return dump
bpointused = 0x2c61
addr = 0xc49c
length = 508
data = pmem("doom2", "Q58880852966", bpointused, addr, length)
|
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ec2_group_facts
short_description: Gather facts about ec2 security groups in AWS.
description:
- Gather facts about ec2 security groups in AWS.
version_added: "2.3"
author: "Henrique Rodrigues (github.com/Sodki)"
options:
filters:
description:
- A dict of filters to apply. Each dict item consists of a filter key and a filter value. See \
U(https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSecurityGroups.html) for \
possible filters. Filter names and values are case sensitive. You can also use underscores (_) \
instead of dashes (-) in the filter keys, which will take precedence in case of conflict.
required: false
default: {}
notes:
- By default, the module will return all security groups. To limit results use the appropriate filters.
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
- ec2_group_facts:
- ec2_group_facts:
filters:
vpc-id: vpc-12345678
- ec2_group_facts:
filters:
vpc-id: vpc-12345678
- ec2_group_facts:
filters:
group-name: example-1
- ec2_group_facts:
filters:
group-id: sg-12345678
- ec2_group_facts:
filters:
group_id: sg-12345678
vpc-id: vpc-12345678
- ec2_group_facts:
filters:
group-name:
- example-1
- example-2
- example-3
- ec2_group_facts:
filters:
"tag:Name": Example
'''
RETURN = '''
security_groups:
description: Security groups that match the provided filters. Each element consists of a dict with all the information related to that security group.
type: list
returned: always
sample:
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import ec2_argument_spec, boto3_conn, HAS_BOTO3
from ansible.module_utils.ec2 import get_aws_connection_info, boto3_tag_list_to_ansible_dict
from ansible.module_utils.ec2 import ansible_dict_to_boto3_filter_list, camel_dict_to_snake_dict
try:
from botocore.exceptions import ClientError
except ImportError:
pass # caught by imported HAS_BOTO3
import traceback
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
filters=dict(default={}, type='dict')
)
)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
if region:
connection = boto3_conn(
module,
conn_type='client',
resource='ec2',
region=region,
endpoint=ec2_url,
**aws_connect_params
)
else:
module.fail_json(msg="region must be specified")
# Replace filter key underscores with dashes, for compatibility, except if we're dealing with tags
sanitized_filters = module.params.get("filters")
for key in sanitized_filters:
if not key.startswith("tag:"):
sanitized_filters[key.replace("_", "-")] = sanitized_filters.pop(key)
try:
security_groups = connection.describe_security_groups(
Filters=ansible_dict_to_boto3_filter_list(sanitized_filters)
)
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc())
# Modify boto3 tags list to be ansible friendly dict and then camel_case
snaked_security_groups = []
for security_group in security_groups['SecurityGroups']:
security_group['Tags'] = boto3_tag_list_to_ansible_dict(security_group['Tags'])
snaked_security_groups.append(camel_dict_to_snake_dict(security_group))
module.exit_json(security_groups=snaked_security_groups)
if __name__ == '__main__':
main()
|
from conan.packager import ConanMultiPackager
if __name__ == "__main__":
builder = ConanMultiPackager(username="pix4d", channel="testing",
upload="https://api.bintray.com/conan/pix4d/conan")
builder.add_common_builds(shared_option_name="Expat:shared")
builder.run()
|
'''
This example demonstrates supervised backpropagation training in a simple network.
'''
import os, sys
sys.path.append(os.path.abspath(os.path.join(os.getcwd(), '../..', )))
import logging
import numpy as np
import random
import theano
logging.root.setLevel(logging.DEBUG)
theano.config.compute_test_value = 'warn'
from nnkit.dendrite import CompleteDendrite
from nnkit.feed import DataFeed
from nnkit.layer import InputLayer, NeuronLayer, OutputLayer
from nnkit.monitor import NetworkMonitor
from nnkit.network import Network
from nnkit.objective import ClassifyInput
from nnkit.reporter import ClassificationErrorReporter
from nnkit.sink import LoggingDataSink
from nnkit.synapse import LogisticSynapse, Synapse
from nnkit.trainer import NetworkTrainer
from nnkit.update_rule import SimpleBackprop
class BarsAndStripesTrainingDataSet(object):
'''
Creates a sequence of nxn (default is 4x4) matrix in which one of the rows or one of the
columns is set to 1 and the rest of the image is 0. Input is labeled according 0...n-1
if row 0...n-1 is set to 1; otherwise n...2n-1 if column 0...n-1 is set to 1.
'''
def __init__(self, image_size=4):
'''
`image_size` defaults to 4. It should be an integer >= 2.
'''
self.image_size = image_size
def generate(self, N=1):
'''
Generate `N` random labeled bar and stripe images; return them as a list of tuples:
(image, label).
'''
result = []
for i in range(N):
img = np.zeros((self.image_size, self.image_size))
x = random.randint(0, 2*self.image_size-1)
if x >= self.image_size:
img[:, x - self.image_size] = 1
else:
img[x, :] = 1
result.append((img.reshape(1, -1).astype(np.single), np.array([x], dtype=np.int64)))
return result
def main():
'''
'''
layers = [InputLayer(16), # 4x4 bars and stripes require inputs, one for each pixel.
# CompleteDendrite will have 1 connection for each input-output pair; Hence the
# weight matrix will have 256 entries, and the bias vector will have 16 elements.
# LogisticSynapse applies the logistic function f(x) = 1/(1 + exp(-x)) to each
# output. SimpleBackprop as an update rule means that W <-- W - k * grad c/W, where
# grad c/W is the gradient of the cost expression, and k is the weight learning
# rate. The cost expression is defined below by the ClassifyInput objective.
NeuronLayer(CompleteDendrite(), LogisticSynapse(), SimpleBackprop(), size=16),
# ClassifyInput applies a softmax to its input, so a non-trivial synapse is not
# required here. The size of this layer should be the number of output classes
# the input should be divided into.
NeuronLayer(CompleteDendrite(), Synapse(), SimpleBackprop(), size=8),
# The objective of the output layer defines the cost expression. In this case, it
# is uses the mean negative log likelihood (according to the model) of the correct
# label. This induces the network to implement logistic regression, and the argmax
# of the output should give the network's estimate of the class of the input.
OutputLayer(objective=ClassifyInput())]
# When a network is constructed with a list of layers, it automatically connects the
# predecessor successor relationships exactly as they are given in the list. More complex
# network geometries can be created by explicitly constructing each layer with its intended
# predecessor. In that case, all that the network constructor would need to receive are is
# the input layer, eg N = Network(input_layer=L_input).
N = Network(layers=layers)
# Create a monitor to capture values from the network.
M = NetworkMonitor()
# Compute the classification error over the last 100 items (100 is the default window size
# when computing the moving average).
R = ClassificationErrorReporter(expected_value_feed=DataFeed(M, layers[-1], 'expected_value'),
output_feed=DataFeed(M, layers[-1], 'output'),
sink=LoggingDataSink())
# Create the data generator.
G = BarsAndStripesTrainingDataSet()
# create the training set.
training_set = G.generate(1500)
validation_set = None
# Create the trainer.
T = NetworkTrainer(N, training_set, validation_set, batch_size=10, training_monitor=M, training_reporter=R)
# Prepare the network for training. This constructs and compiles the computation graph for
# training the network.
T.prepare()
# Run through one full set of training data. Given this simple problem, it is more than
# adequate to reach 0% error.
T.train()
# return the trainer. All of the objects constructed above can be accessed through this
# object.
return T
if __name__ == '__main__':
main()
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.errors import AnsibleParserError, AnsibleUndefinedVariable, AnsibleFileNotFound
from ansible.module_utils.six import string_types
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
def load_list_of_blocks(ds, play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
'''
Given a list of mixed task/block data (parsed from YAML),
return a list of Block() objects, where implicit blocks
are created for each bare Task.
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.block import Block
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role_include import IncludeRole
assert isinstance(ds, (list, type(None)))
block_list = []
if ds:
for block_ds in ds:
b = Block.load(
block_ds,
play=play,
parent_block=parent_block,
role=role,
task_include=task_include,
use_handlers=use_handlers,
variable_manager=variable_manager,
loader=loader,
)
# Implicit blocks are created by bare tasks listed in a play without
# an explicit block statement. If we have two implicit blocks in a row,
# squash them down to a single block to save processing time later.
if b._implicit and len(block_list) > 0 and block_list[-1]._implicit:
for t in b.block:
if isinstance(t._parent, (TaskInclude, IncludeRole)):
t._parent._parent = block_list[-1]
else:
t._parent = block_list[-1]
block_list[-1].block.extend(b.block)
else:
block_list.append(b)
return block_list
def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
'''
Given a list of task datastructures (parsed from YAML),
return a list of Task() or TaskInclude() objects.
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.block import Block
from ansible.playbook.handler import Handler
from ansible.playbook.task import Task
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role_include import IncludeRole
from ansible.playbook.handler_task_include import HandlerTaskInclude
from ansible.template import Templar
assert isinstance(ds, list)
task_list = []
for task_ds in ds:
assert isinstance(task_ds, dict)
if 'block' in task_ds:
t = Block.load(
task_ds,
play=play,
parent_block=block,
role=role,
task_include=task_include,
use_handlers=use_handlers,
variable_manager=variable_manager,
loader=loader,
)
task_list.append(t)
else:
if 'include' in task_ds or 'import_tasks' in task_ds or 'include_tasks' in task_ds:
if 'include' in task_ds:
display.deprecated("The use of 'include' for tasks has been deprecated. "
"Use 'import_tasks' for static inclusions or 'include_tasks' for dynamic inclusions")
if use_handlers:
include_class = HandlerTaskInclude
else:
include_class = TaskInclude
t = include_class.load(
task_ds,
block=block,
role=role,
task_include=None,
variable_manager=variable_manager,
loader=loader
)
all_vars = variable_manager.get_vars(play=play, task=t)
templar = Templar(loader=loader, variables=all_vars)
# check to see if this include is dynamic or static:
# 1. the user has set the 'static' option to false or true
# 2. one of the appropriate config options was set
if 'include_tasks' in task_ds:
is_static = False
elif 'import_tasks' in task_ds:
is_static = True
elif t.static is not None:
display.deprecated("The use of 'static' has been deprecated. "
"Use 'import_role' for static inclusion, or 'include_role' for dynamic inclusion")
is_static = t.static
else:
is_static = C.DEFAULT_TASK_INCLUDES_STATIC or \
(use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
(not templar._contains_vars(t.args['_raw_params']) and t.all_parents_static() and not t.loop)
if is_static:
if t.loop is not None:
if 'import_tasks' in task_ds:
raise AnsibleParserError("You cannot use loops on 'import_tasks' statements. You should use 'include_tasks' instead.", obj=task_ds)
else:
raise AnsibleParserError("You cannot use 'static' on an include with a loop", obj=task_ds)
# we set a flag to indicate this include was static
t.statically_loaded = True
# handle relative includes by walking up the list of parent include
# tasks and checking the relative result to see if it exists
parent_include = block
cumulative_path = None
found = False
subdir = 'tasks'
if use_handlers:
subdir = 'handlers'
while parent_include is not None:
if not isinstance(parent_include, TaskInclude):
parent_include = parent_include._parent
continue
parent_include_dir = templar.template(os.path.dirname(parent_include.args.get('_raw_params')))
if cumulative_path is None:
cumulative_path = parent_include_dir
elif not os.path.isabs(cumulative_path):
cumulative_path = os.path.join(parent_include_dir, cumulative_path)
include_target = templar.template(t.args['_raw_params'])
if t._role:
new_basedir = os.path.join(t._role._role_path, subdir, cumulative_path)
include_file = loader.path_dwim_relative(new_basedir, subdir, include_target)
else:
include_file = loader.path_dwim_relative(loader.get_basedir(), cumulative_path, include_target)
if os.path.exists(include_file):
found = True
break
else:
parent_include = parent_include._parent
if not found:
try:
include_target = templar.template(t.args['_raw_params'])
except AnsibleUndefinedVariable:
raise AnsibleParserError(
"Error when evaluating variable in include name: %s.\n\n"
"When using static includes, ensure that any variables used in their names are defined in vars/vars_files\n"
"or extra-vars passed in from the command line. Static includes cannot use variables from inventory\n"
"sources like group or host vars." % t.args['_raw_params'],
obj=task_ds,
suppress_extended_error=True,
)
if t._role:
include_file = loader.path_dwim_relative(t._role._role_path, subdir, include_target)
else:
include_file = loader.path_dwim(include_target)
try:
data = loader.load_from_file(include_file)
if data is None:
return []
elif not isinstance(data, list):
raise AnsibleParserError("included task files must contain a list of tasks", obj=data)
# since we can't send callbacks here, we display a message directly in
# the same fashion used by the on_include callback. We also do it here,
# because the recursive nature of helper methods means we may be loading
# nested includes, and we want the include order printed correctly
display.vv("statically imported: %s" % include_file)
except AnsibleFileNotFound:
if t.static or \
C.DEFAULT_TASK_INCLUDES_STATIC or \
C.DEFAULT_HANDLER_INCLUDES_STATIC and use_handlers:
raise
display.deprecated(
"Included file '%s' not found, however since this include is not "
"explicitly marked as 'static: yes', we will try and include it dynamically "
"later. In the future, this will be an error unless 'static: no' is used "
"on the include task. If you do not want missing includes to be considered "
"dynamic, use 'static: yes' on the include or set the global ansible.cfg "
"options to make all inclues static for tasks and/or handlers" % include_file, version="2.7"
)
task_list.append(t)
continue
ti_copy = t.copy(exclude_parent=True)
ti_copy._parent = block
included_blocks = load_list_of_blocks(
data,
play=play,
parent_block=None,
task_include=ti_copy,
role=role,
use_handlers=use_handlers,
loader=loader,
variable_manager=variable_manager,
)
# pop tags out of the include args, if they were specified there, and assign
# them to the include. If the include already had tags specified, we raise an
# error so that users know not to specify them both ways
tags = ti_copy.vars.pop('tags', [])
if isinstance(tags, string_types):
tags = tags.split(',')
if len(tags) > 0:
if len(ti_copy.tags) > 0:
raise AnsibleParserError(
"Include tasks should not specify tags in more than one way (both via args and directly on the task). "
"Mixing styles in which tags are specified is prohibited for whole import hierarchy, not only for single import statement",
obj=task_ds,
suppress_extended_error=True,
)
display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option",
version="2.7")
else:
tags = ti_copy.tags[:]
# now we extend the tags on each of the included blocks
for b in included_blocks:
b.tags = list(set(b.tags).union(tags))
# END FIXME
# FIXME: handlers shouldn't need this special handling, but do
# right now because they don't iterate blocks correctly
if use_handlers:
for b in included_blocks:
task_list.extend(b.block)
else:
task_list.extend(included_blocks)
else:
task_list.append(t)
elif 'include_role' in task_ds or 'import_role' in task_ds:
ir = IncludeRole.load(
task_ds,
block=block,
role=role,
task_include=None,
variable_manager=variable_manager,
loader=loader,
)
# 1. the user has set the 'static' option to false or true
# 2. one of the appropriate config options was set
if 'import_role' in task_ds:
is_static = True
if ir.static is not None:
display.deprecated("The use of 'static' for 'include_role' has been deprecated. "
"Use 'import_role' for static inclusion, or 'include_role' for dynamic inclusion")
is_static = ir.static
else:
display.debug('Determine if include_role is static')
# Check to see if this include is dynamic or static:
all_vars = variable_manager.get_vars(play=play, task=ir)
templar = Templar(loader=loader, variables=all_vars)
needs_templating = False
for param in ir.args:
if templar._contains_vars(ir.args[param]):
if not templar.is_template(ir.args[param]):
needs_templating = True
break
is_static = (
C.DEFAULT_TASK_INCLUDES_STATIC or
(use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or
(not needs_templating and ir.all_parents_static() and not ir.loop)
)
display.debug('Determined that if include_role static is %s' % str(is_static))
if is_static:
# uses compiled list from object
t = task_list.extend(ir.get_block_list(variable_manager=variable_manager, loader=loader))
else:
# passes task object itself for latter generation of list
t = task_list.append(ir)
else:
if use_handlers:
t = Handler.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
else:
t = Task.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
task_list.append(t)
return task_list
def load_list_of_roles(ds, play, current_role_path=None, variable_manager=None, loader=None):
'''
Loads and returns a list of RoleInclude objects from the datastructure
list of role definitions
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.role.include import RoleInclude
assert isinstance(ds, list)
roles = []
for role_def in ds:
i = RoleInclude.load(role_def, play=play, current_role_path=current_role_path, variable_manager=variable_manager, loader=loader)
roles.append(i)
return roles
|
import numpy
def matrixToNumpyArray(m):
rn= m.noRows
cn= m.noCols
retval= numpy.empty([rn,cn])
for i in range(0,rn):
for j in range(0,cn):
retval[i][j]= m(i,j)
return retval
def vectorToNumpyArray(v):
rn= v.size()
retval= numpy.empty([rn,1])
for i in range(0,rn):
retval[i][0]= v[i]
return retval
|
import os
import json
import re
import sys
from io import open as uopen
from collections import OrderedDict
if len(sys.argv) == 1 or not sys.argv[1]:
raise SystemExit('Build dir missing.')
proj_dir = os.path.join(os.path.split(os.path.abspath(__file__))[0], '..')
build_dir = os.path.abspath(sys.argv[1])
chromium_manifest = {}
webext_manifest = {}
chromium_manifest_file = os.path.join(proj_dir, 'platform', 'chromium', 'manifest.json')
with open(chromium_manifest_file) as f1:
chromium_manifest = json.load(f1)
webext_manifest_file = os.path.join(build_dir, 'webextension', 'manifest.json')
with open(webext_manifest_file) as f2:
webext_manifest = json.load(f2)
webext_manifest['version'] = chromium_manifest['version']
with open(webext_manifest_file, 'w') as f2:
json.dump(webext_manifest, f2, indent=2, separators=(',', ': '), sort_keys=True)
f2.write('\n')
descriptions = OrderedDict({})
source_locale_dir = os.path.join(build_dir, 'webextension', '_locales')
for alpha2 in sorted(os.listdir(source_locale_dir)):
locale_path = os.path.join(source_locale_dir, alpha2, 'messages.json')
with uopen(locale_path, encoding='utf-8') as f:
strings = json.load(f, object_pairs_hook=OrderedDict)
alpha2 = alpha2.replace('_', '-')
descriptions[alpha2] = strings['extShortDesc']['message']
webext_manifest['author'] = chromium_manifest['author'];
webext_manifest['name'] = chromium_manifest['name'] + '/embed-webext';
webext_manifest['homepage'] = 'https://github.com/uBlockAdmin/uBlock'
webext_manifest['description'] = descriptions['en']
del descriptions['en']
webext_manifest['localized'] = []
t = ' '
t3 = 3 * t
for alpha2 in descriptions:
if alpha2 == 'en':
continue
webext_manifest['localized'].append(
'\n' + t*2 + '<em:localized><Description>\n' +
t3 + '<em:locale>' + alpha2 + '</em:locale>\n' +
t3 + '<em:name>' + webext_manifest['name'] + '</em:name>\n' +
t3 + '<em:description>' + descriptions[alpha2] + '</em:description>\n' +
t3 + '<em:creator>' + webext_manifest['author'] + '</em:creator>\n' +
t3 + '<em:homepageURL>' + webext_manifest['homepage'] + '</em:homepageURL>\n' +
t*2 + '</Description></em:localized>'
)
webext_manifest['localized'] = '\n'.join(webext_manifest['localized'])
install_rdf = os.path.join(build_dir, 'install.rdf')
with uopen(install_rdf, 'r+t', encoding='utf-8', newline='\n') as f:
install_rdf = f.read()
f.seek(0)
f.write(install_rdf.format(**webext_manifest))
f.truncate()
|
from tkinter import *
from tkinter.filedialog import *
import tkinter.messagebox
import os, time, sys, random, math, string, socket, _thread, webbrowser, Pmw
from WsprMod import g
from WsprMod import palettes
from math import log10
import numpy.core.multiarray
import array
from PIL import Image, ImageTk, ImageDraw
from WsprMod.palettes import colormapblue, colormapgray0, colormapHot, \
colormapAFMHot, colormapgray1, colormapLinrad, Colormap2Palette
from types import *
from WsprMod import w
from WsprMod import smeter
import urllib.request, urllib.parse, urllib.error
import tkinter.font
root = Tk()
Version="4.0 r" + "$Rev: 6470 $"[6:-2]
print("******************************************************************")
print("WSPR Version " + Version + ", by K1JT")
print("Revision date ...: " + "$Date: 2016-01-12 01:54:47 -0700 (Tue, 12 Jan 2016) $"[7:-1])
print("Run date ........: " + time.asctime(time.gmtime()) + " UTC")
g.Win32=0
if sys.platform=="win32":
g.Win32=1
try:
root.option_readfile('wsprrc.win')
except:
pass
else:
try:
root.option_readfile('wsprrc')
except:
pass
root_geom=""
appdir=os.getcwd()
w.acom1.nappdir=len(appdir)
w.acom1.appdir=(appdir+(' '*80))[:80]
i1,i2=w.audiodev(0,2)
from WsprMod import options
from WsprMod import advanced
from WsprMod import iq
from WsprMod import hopping
adv0=999
adv1=999
band=[-1,600,160,80,60,40,30,20,17,15,12,10,6,4,2,0]
bandmap=[]
bm={}
btune0=999
encal0=999
f0=DoubleVar()
ftx=DoubleVar()
ftx0=0.
ft=[]
fileopened=""
fmid=0.0
fmid0=0.0
font1='Helvetica'
hopping0=99
iband=IntVar()
iband0=0
idle=IntVar()
idle0=999
ierr=0
inbad0=999
ipctx=IntVar()
ndgain=IntVar()
isec0=0
isync=1
itx0=0
loopall=0
modpixmap0=0
mrudir=os.getcwd()
ndbm0=-999
ncal0=999
ncall=0
ndebug=IntVar()
ndecoding0=999
nin0=0
nout0=0
nred=0
ntune0=999
newdat=1
newspec=1
no_beep=IntVar()
npal=IntVar()
npal.set(2)
nparam=0
nsave=IntVar()
nscroll=0
nsec0=0
nspeed0=IntVar()
ntr0=0
ntxfirst=IntVar()
NX=500
NY=160
outbad0=999
param20=""
sf0=StringVar()
sftx=StringVar()
start_idle=IntVar()
startup=1
t0=""
timer1=0
txmsg=StringVar()
txmute=IntVar()
txmute0=999
upload0=999
nreject=0
gain=1.0
phdeg=0.0
a=array.array('h')
im=Image.new('P',(NX,NY))
draw=ImageDraw.Draw(im)
im.putpalette(Colormap2Palette(colormapLinrad),"RGB")
pim=ImageTk.PhotoImage(im)
receiving=0
scale0=1.0
offset0=0.0
s0=0.0
c0=0.0
slabel="MinSync "
transmitting=0
tw=[]
fw=[] # band labels for spectrum display
upload=IntVar()
balloon=Pmw.Balloon(root)
g.appdir=appdir
g.cmap="Linrad"
g.cmap0="Linrad"
g.ndevin=IntVar()
g.ndevout=IntVar()
g.DevinName=StringVar()
g.DevoutName=StringVar()
pwrlist=(-30,-27,-23,-20,-17,-13,-10,-7,-3, \
0,3,7,10,13,17,20,23,27,30,33,37,40,43,47,50,53,57,60)
freq0=[0,0.4742,1.8366,3.5926,5.2872,7.0386,10.1387,14.0956,18.1046,\
21.0946,24.9246,28.1246,50.2930,70.0286,144.4890,0.1360]
freqtx=[0,0.4742,1.8366,3.5926,5.2872,7.0386,10.1387,14.0956,18.1046,\
21.0946,24.9246,28.1246,50.2930,70.0301,144.4890,0.1375]
for i in range(15):
freqtx[i]=freq0[i]+0.001500
socktimeout = 20
socket.setdefaulttimeout(socktimeout)
extsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
EXT_PORT = 10000
extsock.bind(('127.0.0.1', EXT_PORT))
extsock.settimeout(0.01)
evtsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
EVT_PORT = 10001
lastBndCmd = -1 # Last received band switch command
extAddr = None # Address of external caller
S_IDLE = 0
S_RX = 1
S_TX = 2
lastState = S_IDLE # Remember last state
allowSwitch = True # Allow band switch
def pal_gray0():
g.cmap="gray0"
im.putpalette(Colormap2Palette(colormapgray0),"RGB")
def pal_gray1():
g.cmap="gray1"
im.putpalette(Colormap2Palette(colormapgray1),"RGB")
def pal_linrad():
g.cmap="Linrad"
im.putpalette(Colormap2Palette(colormapLinrad),"RGB")
def pal_blue():
g.cmap="blue"
im.putpalette(Colormap2Palette(colormapblue),"RGB")
def pal_Hot():
g.cmap="Hot"
im.putpalette(Colormap2Palette(colormapHot),"RGB")
def pal_AFMHot():
g.cmap="AFMHot"
im.putpalette(Colormap2Palette(colormapAFMHot),"RGB")
def quit(event=NONE):
root.destroy()
def openfile(event=NONE):
global mrudir,fileopened,nopen,tw
nopen=1 #Work-around for "click feedthrough" bug
upload.set(0)
try:
os.chdir(mrudir)
except:
pass
fname=askopenfilename(filetypes=[("Wave files","*.wav *.WAV")])
if fname:
w.getfile(fname,len(fname))
mrudir=os.path.dirname(fname)
fileopened=os.path.basename(fname)
i1=fileopened.find('.')
t=fileopened[i1-4:i1]
t=t[0:2] + ':' + t[2:4]
n=len(tw)
if n>12: tw=tw[:n-1]
tw=[t,] + tw
os.chdir(appdir)
idle.set(1)
def stop_loopall(event=NONE):
global loopall
loopall=0
def opennext(event=NONE):
global ncall,fileopened,loopall,mrudir,tw,ndecoding0
if int(w.acom1.ndecoding) != 0:
return
upload.set(0)
if fileopened=="" and ncall==0:
openfile()
ncall=1
else:
la=os.listdir(mrudir)
la.sort()
lb=[]
for i in range(len(la)):
j=la[i].find(".wav") + la[i].find(".WAV")
if j>0: lb.append(la[i])
for i in range(len(lb)):
if lb[i]==fileopened:
break
if i<len(lb)-1:
fname=mrudir+"/"+lb[i+1]
w.getfile(fname,len(fname))
mrudir=os.path.dirname(fname)
fileopened=os.path.basename(fname)
i1=fileopened.find('.')
t=fileopened[i1-4:i1]
t=t[0:2] + ':' + t[2:4]
n=len(tw)
if n>12: tw=tw[:n-1]
tw=[t,] + tw
else:
t="No more *.wav files in this directory."
result=tkinter.messagebox.showwarning(message=t)
ncall=0
loopall=0
def decodeall(event=NONE):
global loopall
loopall=1
opennext()
def hopping1(event=NONE):
t=''
if root_geom.find('+')>=0:
t=root_geom[root_geom.index('+'):]
hopping.hopping2(t)
def options1(event=NONE):
t=''
if root_geom.find('+')>=0:
t=root_geom[root_geom.index('+'):]
options.options2(t)
def advanced1(event=NONE):
t=""
if root_geom.find("+")>=0:
t=root_geom[root_geom.index("+"):]
advanced.advanced2(t)
def iq1(event=NONE):
t=""
if root_geom.find("+")>=0:
t=root_geom[root_geom.index("+"):]
iq.iq2(t)
def stub(event=NONE):
MsgBox("Sorry, this function is not yet implemented.")
def MsgBox(t):
result=tkinter.messagebox.showwarning(message=t)
def msgpos():
g=root_geom[root_geom.index("+"):]
t=g[1:]
x=int(t[:t.index("+")]) # + 70
y=int(t[t.index("+")+1:]) # + 70
return "+%d+%d" % (x,y)
def about(event=NONE):
global Version
about=Toplevel(root)
about.geometry(msgpos())
if g.Win32: about.iconbitmap("wsjt.ico")
t="WSPR Version " + Version + ", by K1JT"
Label(about,text=t,font=(font1,16)).pack(padx=20,pady=5)
t="""
WSPR (pronounced "whisper") stands for "Weak Signal
Propagation Reporter". The program generates and decodes
a digital soundcard mode optimized for beacon-like
transmissions on the LF, MF, and HF bands.
Copyright (c) 2008-2014 by Joseph H. Taylor, Jr., K1JT, with
contributions from VA3DB, G4KLA, W1BW, 4X6IZ, KE6HDU and KI7MT.
WSPR is Open Source software, licensed under the GNU General Public
License (GPL-3). Source code and programming information may
be found at http://sourceforge.net/projects/wsjt/.
"""
Label(about,text=t,justify=LEFT).pack(padx=20)
t="Revision date: " + \
"$Date: 2014-12-02 10:40:00 -0600 (Tue, 02 Dec 2014) $"[7:-1]
Label(about,text=t,justify=LEFT).pack(padx=20)
about.focus_set()
def help(event=NONE):
about=Toplevel(root)
about.geometry(msgpos())
if g.Win32: about.iconbitmap("wsjt.ico")
t="Basic Operating Instructions"
Label(about,text=t,font=(font1,14)).pack(padx=20,pady=5)
t="""
1. Open the Setup | Station Parameters screen and enter
your callsign and grid locator 6 characters). Select
desired devices for Audio In and Audio Out, and your
power level in dBm.
2. Select your PTT method (CAT control, DTR, or RTS). If
you choose DTR or RTS, select a PTT port. If T/R
switching or frequency setting will be done by CAT
control, select a CAT port and be sure that "Enable CAT"
is checked. You will need to enter a Rig number and
correct parameters for the serial connection.
3. Select the desired band from the Band menu and if
necessary correct your USB dial frequency on the main
screen. Select a Tx frequency by double-clicking
somewhere on the waterfall display.
4. Select a desired 'Tx fraction' using the large slider. Zero
percent means Rx only; 100% means Tx only.
5. Be sure that your computer clock is correct to +/- 1 s.
Many people like to use an automatic internet-based
clock-setting utility.
6. WSPR will begin a Tx or Rx sequence at the start of each
even-numbered minute. The waterfall will update and
decoding will take place at the end of each Rx sequence.
During reception, you can adjust the Rx noise level to get
something close to 0 dB. Use the operating system's audio
mixer control or change your receiver's output level.
"""
Label(about,text=t,justify=LEFT).pack(padx=20)
about.focus_set()
def usersguide(event=NONE):
url='http://physics.princeton.edu/pulsar/K1JT/doc/wspr/wspr-main.html'
_thread.start_new_thread(browser,(url,))
def fmtguide(event=NONE):
url='http://physics.princeton.edu/pulsar/K1JT/FMT_User.pdf'
_thread.start_new_thread(browser,(url,))
def wsprnet(event=NONE):
url='http://wsprnet.org/'
_thread.start_new_thread(browser,(url,))
def homepage(event=NONE):
url='http://physics.princeton.edu/pulsar/K1JT/'
_thread.start_new_thread(browser,(url,))
def browser(url):
webbrowser.open(url)
def erase(event=NONE):
global bandmap,bm
text.configure(state=NORMAL)
text.delete('1.0',END)
text.configure(state=DISABLED)
text1.configure(state=NORMAL)
text1.delete('1.0',END)
text1.configure(state=DISABLED)
bandmap=[]
bm={}
def tune(event=NONE):
idle.set(1)
w.acom1.ntune=1
btune.configure(bg='yellow')
def txnext(event=NONE):
if ipctx.get()>0:
w.acom1.ntxnext=1
btxnext.configure(bg="green")
def df_readout(event):
global fmid,nred
nred=10
df=12000/8192.0
nhz=1000000*fmid + (80.0-event.y)*df + 2
nhz=int(nhz%1000)
t="%3d Hz" % nhz
lab02.configure(text=t,bg='red')
def set_tx_freq(event):
global fmid
df=12000/8192.0
nftx=int(1000000.0*fmid + (80.0-event.y)*df) + 2
fmhz=0.000001*nftx
t="Please confirm setting Tx frequency to " + "%.06f MHz" % fmhz
result=tkinter.messagebox.askyesno(message=t)
if result:
ftx.set(0.000001*nftx)
sftx.set('%.06f' % ftx.get())
def draw_axis():
global fmid
c.delete(ALL)
nfmid=int(1.0e6*fmid + 0.5)%1000
df=12000.0/8192.0
for iy in range(-120,120,10):
j=80 - iy/df
i1=7
if (iy%50)==0:
i1=12
if (iy%100)==0: i1=15
n=nfmid+iy
if n<0: n=n+1000
c.create_text(27,j,text=str(n))
c.create_line(0,j,i1,j,fill='black')
iy=1000000.0*(ftx.get()-f0.get()) - 1500
if abs(iy)<=100:
j=80 - iy/df
c.create_line(0,j,13,j,fill='red',width=3)
def del_all():
fname=appdir+'/ALL_WSPR.TXT'
try:
os.remove(fname)
except:
pass
def delwav():
t="Are you sure you want to delete\nall *.WAV files in the Save directory?"
result=tkinter.messagebox.askyesno(message=t)
if result:
la=os.listdir(appdir+'/save')
lb=[]
for i in range(len(la)):
j=la[i].find(".wav") + la[i].find(".WAV")
if j>0: lb.append(la[i])
for i in range(len(lb)):
fname=appdir+'/save/'+lb[i]
os.remove(fname)
def get_decoded():
global bandmap,bm,newdat,loopall
try:
f=open(appdir+'/decoded.txt',mode='r')
decodes = []
for line in f:
fields = line.split()
if len(fields) < 10: continue
msg = fields[6:-3]
d = {}
d['date'] = fields[0]
d['time'] = fields[1]
d['sync'] = fields[2]
d['snr'] = fields[3]
d['dt'] = fields[4]
d['freq'] = fields[5]
d['msg'] = msg
d['drift'] = fields[-3]
d['cycles'] = fields[-2]
d['ii'] = fields[-1]
d['type1'] = True
d['type2'] = False
d['type3'] = False
if len(msg) != 3 or len(msg[1]) != 4 or len(msg[0]) < 3 or \
len(msg[0]) > 6 or not msg[2].isdigit():
d['type1'] = False
else:
dbm = int(msg[2])
if dbm < 0 or dbm > 60:
d['type1'] = False
n=dbm%10
if n!=0 and n!=3 and n!=7:
d['type1'] = False
if not d['type1']:
if len(msg)==2:
d['type2']=True
else:
d['type3']=True
callsign = d['msg'][0]
if callsign[0]=='<':
n=callsign.find('>')
callsign=callsign[1:n]
d['call'] = callsign
decodes.append(d)
f.close()
except:
decodes = []
if len(decodes) > 0:
text.configure(state=NORMAL)
nseq=0
nfmid=int(1.0e6*fmid)%1000
for d in decodes:
text.insert(END, "%4s %3s %4s %10s %2s %s\n" % \
(d['time'],d['snr'],d['dt'],d['freq'],d['drift'],\
' '.join(d['msg'])))
try:
callsign=d['call']
tmin=60*int(d['time'][0:2]) + int(d['time'][2:4])
ndf=int(d['freq'][-3:])
bandmap.append((ndf,callsign,tmin))
except:
pass
text.configure(state=DISABLED)
text.see(END)
bm={}
iz=len(bandmap)
for i in range(iz):
bm[bandmap[i][1]]=(bandmap[i][0],bandmap[i][2])
bandmap=[]
for callsign,ft in bm.items():
if callsign!='...':
ndf,tdecoded=ft
tmin=int((time.time()%86400)/60)
tdiff=tmin-tdecoded
if tdiff<0: tdiff=tdiff+1440
if w.acom1.ndiskdat==1: tdiff=2
if tdiff < 60: #60 minutes
bandmap.append((ndf,callsign,tdecoded))
bm={}
iz=len(bandmap)
for i in range(iz):
bm[bandmap[i][1]]=(bandmap[i][0],bandmap[i][2])
bandmap.sort()
bandmap.reverse()
text1.configure(state=NORMAL)
text1.delete('1.0',END)
for i in range(iz):
t="%4d" % (bandmap[i][0],) + " " + bandmap[i][1]
nage=int((tmin - bandmap[i][2])/15)
if nage<0: nage=nage+96
attr='age0'
if nage==1: attr='age1'
if nage==2: attr='age2'
if nage>=3: attr='age3'
if w.acom1.ndiskdat==1: attr='age0'
text1.insert(END,t+"\n",attr)
text1.configure(state=DISABLED)
text1.see(END)
if upload.get():
#Dispatch autologger thread.
_thread.start_new_thread(autolog, (decodes,))
if loopall:
time.sleep(0.2)
opennext()
def autolog(decodes):
# Random delay of up to 20 seconds to spread load out on server --W1BW
time.sleep(random.random() * 20.0)
try:
# This code originally by W6CQZ ... modified by W1BW
# TODO: Cache entries for later uploading if net is down.
# TODO: (Maybe??) Allow for stations wishing to collect spot data but
# only upload in batch form vs real-time.
# Any spots to upload?
if len(decodes) > 0:
for d in decodes:
# now to format as a string to use for autologger upload using urlencode
# so we get a string formatted for http get/put operations:
m=d['msg']
tcall=m[0]
if d['type2']:
tgrid=''
dbm=m[1]
else:
tgrid=m[1]
dbm=m[2]
if tcall[0]=='<':
n=tcall.find('>')
tcall=tcall[1:n]
if tcall=='...': continue
dfreq=float(d['freq'])-w.acom1.f0b-0.001500
if abs(dfreq)>0.0001:
print('Frequency changed, no upload of spots')
continue
reportparams = urllib.parse.urlencode({'function': 'wspr',
'rcall': options.MyCall.get(),
'rgrid': options.MyGrid.get(),
'rqrg': str(f0.get()),
'date': d['date'],
'time': d['time'],
'sig': d['snr'],
'dt': d['dt'],
'tqrg': d['freq'],
'drift': d['drift'],
'tcall': tcall,
'tgrid': tgrid,
'dbm': dbm,
'version': Version})
urlf = urllib.request.urlopen("http://wsprnet.org/post?%s" \
% reportparams)
reply = urlf.readlines()
urlf.close()
else:
# No spots to report, so upload status message instead. --W1BW
reportparams = urllib.parse.urlencode({'function': 'wsprstat',
'rcall': options.MyCall.get(),
'rgrid': options.MyGrid.get(),
'rqrg': str(fmid),
'tpct': str(ipctx.get()),
'tqrg': sftx.get(),
'dbm': str(options.dBm.get()),
'version': Version})
urlf = urllib.request.urlopen("http://wsprnet.org/post?%s" \
% reportparams)
reply = urlf.readlines()
urlf.close()
except:
t=" UTC: attempted access to WSPRnet failed."
if not no_beep.get(): t=t + "\a"
print(time.asctime(time.gmtime()) + t)
def put_params(param3=NONE):
global param20
w.acom1.callsign=(options.MyCall.get().strip().upper()+' ')[:12]
w.acom1.grid=(options.MyGrid.get().strip().upper()+' ')[:4]
w.acom1.grid6=(options.MyGrid.get().strip().upper()+' ')[:6]
w.acom1.ctxmsg=(txmsg.get().strip().upper()+' ')[:22]
# numeric port ==> COM%d, else string of device. --W1BW
port = options.PttPort.get()
if port=='None': port='0'
if port[:3]=='COM': port=port[3:]
if port.isdigit():
w.acom1.nport = int(port)
port = "COM%d" % (int(port))
else:
w.acom1.nport = 0
w.acom1.pttport = (port + 80*' ')[:80]
try:
dbm=int(options.dBm.get())
except:
dbm=37
i1=options.MyCall.get().find('/')
if dbm<0 and (i1>0 or advanced.igrid6.get()):
MsgBox("Negative dBm values are permitted\n only for Type 1 messages.")
dbm=0
options.dBm.set(0)
mindiff=9999
for i in range(len(pwrlist)):
if abs(dbm-pwrlist[i])<mindiff:
mindiff=abs(dbm-pwrlist[i])
ibest=i
dbm=pwrlist[ibest]
options.dBm.set(dbm)
w.acom1.ndbm=dbm
w.acom1.ntxfirst=ntxfirst.get()
w.acom1.nsave=nsave.get()
try:
w.acom1.nbfo=advanced.bfofreq.get()
except:
w.acom1.nbfo=1500
try:
w.acom1.idint=advanced.idint.get()
except:
w.acom1.idint=0
w.acom1.igrid6=advanced.igrid6.get()
w.acom1.iqmode=iq.iqmode.get()
w.acom1.iqrx=iq.iqrx.get()
w.acom1.iqrxapp=iq.iqrxapp.get()
w.acom1.iqrxadj=iq.iqrxadj.get()
w.acom1.iqtx=iq.iqtx.get()
w.acom1.ntxdb=advanced.isc1.get()
bal=iq.isc2.get() + 0.02*iq.isc2a.get()
w.acom1.txbal=bal
pha=iq.isc3.get() + 0.02*iq.isc3a.get()
w.acom1.txpha=pha
try:
w.acom1.nfiq=iq.fiq.get()
except:
w.acom1.nfiq=0
w.acom1.ndevin=g.ndevin.get()
w.acom1.ndevout=g.ndevout.get()
w.acom1.nbaud=options.serial_rate.get()
w.acom1.ndatabits=options.databits.get()
w.acom1.nstopbits=options.stopbits.get()
w.acom1.chs=(options.serial_handshake.get() + \
' ')[:40]
w.acom1.catport=(options.CatPort.get()+' ')[:12]
try:
w.acom1.nrig=options.rignum.get()
except:
pass
def update():
global root_geom,isec0,im,pim,ndbm0,nsec0,a,ftx0,nin0,nout0, \
receiving,transmitting,newdat,nscroll,newspec,scale0,offset0, \
modpixmap0,tw,s0,c0,fmid,fmid0,loopall,ntr0,txmsg,iband0, \
bandmap,bm,t0,nreject,gain,phdeg,ierr,itx0,timer1,ndecoding0, \
hopping0,ntune0,startup,nred
# Added - Bob Cowdery (G3UKB)
global lastBndCmd, extAddr, lastState, allowSwitch
# End - Bob Cowdery (G3UKB)
tsec=time.time()
utc=time.gmtime(tsec)
nsec=int(tsec)
nsec0=nsec
if hopping.hoppingconfigured.get()==1:
if hopping0!=1:
hopping0=1
bhopping.configure(state=NORMAL)
else:
if hopping0!=2:
hopping0=2
bhopping.configure(state=DISABLED)
hopped=0
if not idle.get():
if hopping.hopping.get()==1:
w.acom1.nfhopping=1
if w.acom1.nfhopok or startup:
w.acom1.nfhopok=0
startup=0
b=-1
if hopping.coord_bands.get()==1:
ns=nsec % 86400
ns1=ns % (10*120)
b=int(ns1/120) + 3
if b==12: b=2
if hopping.hoppingflag[int(b)].get()==0: b=-1
if b<0:
found=False
while not found:
b = random.randint(1,len(hopping.bandlabels)-1)
if hopping.hoppingflag[b].get()!=0:
found=True
ipctx.set(hopping.hoppingpctx[b].get())
if b!=iband.get(): hopped=1
iband.set(b)
else:
w.acom1.nfhopping=0
ns=nsec % 86400
ns1=ns % (10*120)
b=ns1/120 + 3
if b==12: b=2
if iband.get()==b and random.randint(1,2)==1 and ipctx.get()>0:
w.acom1.ntxnext=1
try:
f0.set(float(sf0.get()))
ftx.set(float(sftx.get()))
except:
pass
isec=utc[5]
trmin=2
twait=trmin - (tsec % trmin)
if iband.get()!=iband0 or advanced.fset.get():
advanced.fset.set(0)
f0.set(freq0[iband.get()])
t="%.6f" % (f0.get(),)
sf0.set(t)
ftx.set(freqtx[iband.get()])
t="%.6f" % (ftx.get(),)
sftx.set(t)
if options.cat_enable.get():
if advanced.encal.get():
nHz=int(advanced.Acal.get() + \
f0.get()*(1000000.0 + advanced.Bcal.get()) + 0.5)
else:
nHz=int(1000000.0*f0.get() + 0.5)
if options.rignum.get()==2509 or options.rignum.get()==2511:
nHzLO=nHz - iq.fiq.get()
cmd="rigctl -m %d -r %s F %d" % \
(options.rignum.get(),options.CatPort.get(),nHzLO)
elif options.rignum.get()==1901:
cmd="rigctl -m 1901 -r localhost F %d" % (nHz,)
else:
cmd="rigctl -m %d -r %s -s %d -C data_bits=%s -C stop_bits=%s -C serial_handshake=%s F %d" % \
(options.rignum.get(),options.CatPort.get(), \
options.serial_rate.get(),options.databits.get(), \
options.stopbits.get(),options.serial_handshake.get(),nHz)
ierr=os.system(cmd)
if ierr==0:
ierr2=0
bandmap=[]
bm={}
text1.configure(state=NORMAL)
text1.delete('1.0',END)
text1.configure(state=DISABLED)
iband0=iband.get()
f=open(appdir+'/fmt.ini',mode='w')
f.write(cmd+'\n')
f.write(str(g.ndevin.get())+'\n')
f.write(options.MyCall.get()+'\n')
f.write(options.MyGrid.get()+'\n')
f.close()
cmd2=''
if os.path.exists('.\\user_hardware.bat') or \
os.path.exists('.\\user_hardware.cmd') or \
os.path.exists('.\\user_hardware.exe'):
cmd2='.\\user_hardware ' + str(band[iband0])
elif os.path.exists('./user_hardware'):
cmd2='./user_hardware ' + str(band[iband0])
if cmd2!='':
try:
ierr2=os.system(cmd2)
except:
ierr2=-1
if ierr2!=0:
print('Execution of "'+cmd2+'" failed.')
MsgBox('Execution of "'+cmd2+ \
'" failed.\nEntering Idle mode.')
else:
print('Error attempting to set rig frequency.\a')
print(cmd + '\a')
iband.set(iband0)
f0.set(freq0[iband.get()])
t="%.6f" % (f0.get(),)
sf0.set(t)
ftx.set(freqtx[iband.get()])
t="%.6f" % (ftx.get(),)
sftx.set(t)
if ierr==0 and ierr2==0 and w.acom1.nfhopping==1 and hopped==1 \
and hopping.tuneupflag[iband.get()].get(): w.acom1.ntune=-3
else:
iband0=iband.get()
iq.ib.set(iband.get())
iq.newband()
freq0[iband.get()]=f0.get()
freqtx[iband.get()]=ftx.get()
w.acom1.iband=iband.get()
try:
w.acom1.f0=f0.get()
w.acom1.ftx=ftx.get()
except:
pass
newsecond=0 # =1 if a new second
if isec != isec0: #Do once per second
# this code block is executed once per second
# ===============================================================================
# Added - Bob Cowdery (G3UKB)
LOW_DB = -5
HIGH_DB = 5
# This code allows some external control to be exercised over a UDP socklet
try:
data, extAddr = extsock.recvfrom(100)
asciidata = data.decode(encoding='UTF-8')
# Commands are as follows:
# 'iqmode:n' where n=0 (IQ off), n=1 (IQ on)
# 'audioin:<descriptor>'
# 'audioout:<descriptor>'
# 'band:n' where n is 2 (160m) - 14 (2m)
# 'tx:n' where n=0 (no TX), n=1 (20% TX)
# 'power:n where n is the dBm reduction
# 'idle:n where n=0 (set IDLE), n=1 (set RUN)
# 'upload:n' where n=0 (don't upload spots), n=1 (upload spots)
# 'reset' Something went wrong so reset to a start state
if 'iqmode' in asciidata:
_, iqmode = asciidata.split(':')
iqmode = int(iqmode)
if iqmode == 0:
iq.iqmode.set(0)
else:
iq.iqmode.set(1)
elif 'audioin' in asciidata:
_, descriptor = asciidata.split(':', 1)
g.DevinName.set(descriptor)
try:
g.ndevin.set(int(descriptor[:2]))
except:
g.ndevin.set(0)
options.DevinName.set(descriptor)
elif 'audioout' in asciidata:
_, descriptor = asciidata.split(':', 1)
g.DevoutName.set(value)
try:
g.ndevout.set(int(value[:2]))
except:
g.ndevout.set(0)
options.DevoutName.set(value)
elif 'band' in asciidata:
_, asciiband = asciidata.split(':')
ibandno = int(asciiband)
if ibandno >= 2 and ibandno <= 14:
lastBndCmd = ibandno
else:
print('Band out of range ', idata)
elif 'tx' in asciidata:
_, asciitx = asciidata.split(':')
itx = int(asciitx)
if itx == 0:
ipctx.set(0)
else:
ipctx.set(20)
elif 'power' in asciidata:
_, asciipwr = asciidata.split(':')
idBm = int(asciipwr)
# Adjust power by idBm
advanced.isc1.set(-idBm)
elif 'idle' in asciidata:
_, asciiidle = asciidata.split(':')
iidle = int(asciiidle)
if iidle == 0:
idle.set(0)
else:
idle.set(1)
elif 'upload' in asciidata:
_, asciiupload = asciidata.split(':')
iupload = int(asciiupload)
if iupload == 0:
upload.set(0)
else:
upload.set(1)
elif 'reset' in asciidata:
lastBndCmd = -1
allowSwitch = True
except socket.timeout:
pass
except Exception as e:
print('Exception processing external command [%s][%s]' % (asciidata, str(e)))
# Process commands that must be executed while IDLE
if not receiving and not transmitting and allowSwitch:
# Idle
if lastBndCmd != -1:
# Switch band
allowSwitch = False
iband.set(lastBndCmd)
# Let client know we have now switched
evtsock.sendto(('band:%d' % lastBndCmd).encode('UTF-8'), (extAddr[0], EVT_PORT))
lastBndCmd = -1
# Tail end processing and check for events due
if receiving:
currentState = S_RX
# Once we see an RX cycle we can allow a band switch on next IDLE
allowSwitch = True
# Try to adjust the level to around 0dB as different bands will have different noise levels
iterations = 5
while iterations > 0:
ndb=int(w.acom1.xdb1-41.0+ndgain.get())
if ndb < LOW_DB or ndb > HIGH_DB:
# Not within a good range
if ndb < 0:
# Increase gain
ndgain.set(ndgain.get() + abs(ndb))
else:
# Decrease gain
ndgain.set(ndgain.get() - ndb)
iterations -= 1
else:
break
elif transmitting: currentState = S_TX
else: currentState = S_IDLE
if lastState != currentState:
if lastState == S_TX or lastState == S_IDLE and currentState == S_RX:
# Start of RX cycle
if extAddr != None:
evtsock.sendto('rx-cycle-start'.encode('UTF-8'), (extAddr[0], EVT_PORT))
elif lastState == S_RX and currentState == S_IDLE:
# End of a receive cycle
if extAddr != None:
evtsock.sendto('rx-cycle-end'.encode('UTF-8'), (extAddr[0], EVT_PORT))
elif lastState == S_RX or lastState == S_IDLE and currentState == S_TX:
# Start of TX cycle
if extAddr != None:
evtsock.sendto('tx-cycle-start'.encode('UTF-8'), (extAddr[0], EVT_PORT))
elif lastState == S_TX and currentState == S_IDLE or currentState == S_RX:
# End of TX cycle
if extAddr != None:
evtsock.sendto('tx-cycle-end'.encode('UTF-8'), (extAddr[0], EVT_PORT))
lastState = currentState
# End - Bob Cowdery (G3UKB)
# ===============================================================================
newsecond=1
t=time.strftime('%Y %b %d\n%H:%M:%S',utc)
ldate.configure(text=t)
root_geom=root.geometry()
utchours=utc[3]+utc[4]/60.0 + utc[5]/3600.0
try:
if options.dBm.get()!=ndbm0:
ndbm0=options.dBm.get()
options.dbm_balloon()
except:
pass
put_params()
nndf=int(1000000.0*(ftx.get()-f0.get()) + 0.5) - 1500
gain=w.acom1.gain
phdeg=57.2957795*w.acom1.phase
nreject=int(w.acom1.reject)
ndb=int(w.acom1.xdb1-41.0+ndgain.get())
if ndb<-30: ndb=-30
dbave=w.acom1.xdb1
if iq.iqmode.get():
t='Bal: %6.4f Pha: %6.1f >%3d dB' % (gain,phdeg,nreject)
iq.lab1.configure(text=t)
ndb2=int(w.acom1.xdb2-41.0)
if ndb2<-30: ndb2=-30
dbave=0.5*(w.acom1.xdb1 + w.acom1.xdb2)
t='Rx Noise: %3d %3d dB' % (ndb,ndb2)
else:
t='Rx Noise: %3d dB' % (ndb,)
bg='gray85'
r=SUNKEN
smcolor="green"
if w.acom1.receiving==0:
t=''
r=FLAT
if isec!=isec0:
msg1.configure(text=t,relief=r)
isec0=isec
dbave=dbave + ndgain.get()
if not receiving: dbave=0
sm.updateProgress(newValue=dbave,newColor=smcolor)
if nred>0:
nred=nred-1
if nred==0: lab02.configure(text="",bg='gray85')
ntr=int(w.acom1.ntr)
itx=w.acom1.transmitting
if ntr!=ntr0 or itx!=itx0:
ntr0=ntr
itx0=int(itx)
if ntr==-1 or itx==1:
transmitting=1
receiving=0
elif ntr==0:
transmitting=0
receiving=0
else:
transmitting=0
receiving=1
n=len(tw)
if n>12: tw=tw[:n-1]
rxtime=w.acom1.rxtime.tostring().decode('utf-8')
rxtime=rxtime[:2] + ':' + rxtime[2:]
tw=[rxtime,] + tw
global fw
if n>12: fw=fw[:n-1]
fw=[hopping.bandlabels[ iband.get()][:-2],] + fw
if receiving:
filemenu.entryconfig(0,state=DISABLED)
filemenu.entryconfig(1,state=DISABLED)
filemenu.entryconfig(2,state=DISABLED)
else:
filemenu.entryconfig(0,state=NORMAL)
filemenu.entryconfig(1,state=NORMAL)
filemenu.entryconfig(2,state=NORMAL)
if transmitting:
btxnext.configure(bg="gray85")
for i in range(15):
bandmenu.entryconfig(i,state=DISABLED)
else:
for i in range(15):
bandmenu.entryconfig(i,state=NORMAL)
bgcolor='gray85'
t='Waiting to start'
bgcolor='pink'
if transmitting:
t='Txing: ' + w.acom1.sending.tostring().decode('utf-8')
bgcolor='yellow'
if receiving:
t='Receiving'
bgcolor='green'
if t!=t0: # dont draw unless changed
msg6.configure(text=t,bg=bgcolor)
t0=t
ntune=int(w.acom1.ntune)
if ntune!=ntune0:
ntune0=ntune
if ntune==0:
btune.configure(bg='gray85')
pctscale.configure(state=NORMAL)
else:
pctscale.configure(state=DISABLED)
global ncal0
ncal=w.acom1.ncal
if ncal!=ncal0:
ncal0=ncal
if ncal==0:
advanced.bmeas.configure(bg='gray85')
else:
idle.set(1)
if ierr==0 and txmute.get()==0:
w.acom1.pctx=ipctx.get()
else:
w.acom1.pctx=0
global txmute0
if txmute.get()!=txmute0:
txmute0=txmute.get()
if txmute0:
w.acom1.pctx=0
w.acom1.ntxnext=0
bmute.configure(bg='red')
btxnext.configure(state=DISABLED)
btxnext.configure(bg='gray85')
else:
bmute.configure(bg='gray85')
btxnext.configure(state=NORMAL)
w.acom1.idle=idle.get()
global idle0
if idle0!=idle.get():
idle0=idle.get()
if idle0==0:
bidle.configure(bg='gray85')
else:
bidle.configure(bg='yellow')
global btune0
if w.acom1.transmitting or w.acom1.receiving or options.outbad.get():
if btune0!=1:
btune0=1
btune.configure(state=DISABLED)
else:
if btune0!=2:
btune0=2
btune.configure(state=NORMAL)
global adv0
if w.acom1.transmitting or w.acom1.receiving or twait < 6.0:
if adv0!=1:
adv0=1
advanced.bmeas.configure(state=DISABLED)
else:
if adv0!=2:
adv0=2
advanced.bmeas.configure(state=NORMAL)
global upload0
if upload.get()==1:
if upload0!=1:
upload0=1
bupload.configure(bg='gray85')
else:
if upload0!=2:
upload0=2
bupload.configure(bg='yellow')
if w.acom1.ndecdone:
get_decoded()
w.acom1.ndecdone=0
try:
modpixmap=os.stat('pixmap.dat')[8]
if modpixmap!=modpixmap0:
f=open('pixmap.dat','rb')
a=array.array('h')
a.fromfile(f,NX*NY)
f.close()
newdat=1
modpixmap0=modpixmap
except:
newdat=0
scale=math.pow(10.0,0.003*sc1.get())
offset=0.3*sc2.get()
if newdat or scale!= scale0 or offset!=offset0 or g.cmap!=g.cmap0:
im.putdata(a,scale,offset) #Compute whole new image
if newdat:
n=len(tw)
for i in range(n-1,-1,-1):
x=465-39*i
draw.text((x,148),tw[i],fill=253) #Insert time label
if i<len(fw):
draw.text((x+10,1),fw[i],fill=253) #Insert band label
pim=ImageTk.PhotoImage(im) #Convert Image to PhotoImage
graph1.delete(ALL)
graph1.create_image(0,0+2,anchor='nw',image=pim)
g.ndecphase=2
newMinute=0
scale0=scale
offset0=offset
g.cmap0=g.cmap
newdat=0
s0=sc1.get()
c0=sc2.get()
try:
fmid=f0.get() + 0.001500
except:
pass
if fmid!=fmid0 or ftx.get()!=ftx0:
fmid0=fmid
ftx0=ftx.get()
draw_axis()
lftx.configure(validate={'validator':'real',
'min':f0.get()+0.001500-0.000100,'minstrict':0,
'max':f0.get()+0.001500+0.000100,'maxstrict':0})
w.acom1.ndebug=ndebug.get()
if options.rignum.get()==2509 or options.rignum.get()==2511:
options.pttmode.set('CAT')
options.CatPort.set('USB')
if options.pttmode.get()=='CAT':
options.cat_enable.set(1)
if options.pttmode.get()=='CAT' or options.pttmode.get()=='VOX':
options.PttPort.set('None')
options.ptt_port._entryWidget['state']=DISABLED
else:
options.ptt_port._entryWidget['state']=NORMAL
global adv1
if options.cat_enable.get():
options.lrignum._entryWidget['state']=NORMAL
if options.cat_port.get() != 'USB':
options.cat_port._entryWidget['state']=NORMAL
options.cbbaud._entryWidget['state']=NORMAL
options.cbdata._entryWidget['state']=NORMAL
options.cbstop._entryWidget['state']=NORMAL
options.cbhs._entryWidget['state']=NORMAL
else:
options.cat_port._entryWidget['state']=DISABLED
options.cbbaud._entryWidget['state']=DISABLED
options.cbdata._entryWidget['state']=DISABLED
options.cbstop._entryWidget['state']=DISABLED
options.cbhs._entryWidget['state']=DISABLED
if adv1!=1:
adv1=1
advanced.bsetfreq.configure(state=NORMAL)
advanced.breadab.configure(state=NORMAL)
advanced.enable_cal.configure(state=NORMAL)
else:
options.cat_port._entryWidget['state']=DISABLED
options.lrignum._entryWidget['state']=DISABLED
options.cbbaud._entryWidget['state']=DISABLED
options.cbdata._entryWidget['state']=DISABLED
options.cbstop._entryWidget['state']=DISABLED
options.cbhs._entryWidget['state']=DISABLED
if adv1!=2:
adv1=2
advanced.bsetfreq.configure(state=DISABLED)
advanced.breadab.configure(state=DISABLED)
advanced.enable_cal.configure(state=DISABLED)
advanced.encal.set(0)
w.acom1.pttmode=(options.pttmode.get().strip()+' ')[:3]
w.acom1.ncat=options.cat_enable.get()
w.acom1.ncoord=hopping.coord_bands.get()
w.acom1.ntrminutes=2
if g.ndevin.get()!= nin0 or g.ndevout.get()!=nout0:
audio_config()
nin0=g.ndevin.get()
nout0=g.ndevout.get()
global inbad0
if inbad0!=options.inbad.get():
inbad0=options.inbad.get()
if inbad0==0:
msg2.configure(text='',bg='gray85')
else:
msg2.configure(text='Invalid audio input device.',bg='red')
global outbad0
if outbad0!=options.outbad.get():
outbad0=options.outbad.get()
if outbad0==0:
msg3.configure(text='',bg='gray85')
else:
msg3.configure(text='Invalid audio output device.',bg='red')
if ndecoding0!=int(w.acom1.ndecoding):
ndecoding0=int(w.acom1.ndecoding)
if ndecoding0:
msg5.configure(text='Decoding',bg='#66FFFF',relief=SUNKEN)
else:
msg5.configure(text='',bg='gray85',relief=FLAT)
global encal0
if encal0!=advanced.encal.get():
encal0=advanced.encal.get()
if encal0:
advanced.A_entry.configure(entry_state=NORMAL,label_state=NORMAL)
advanced.B_entry.configure(entry_state=NORMAL,label_state=NORMAL)
else:
advanced.A_entry.configure(entry_state=DISABLED, \
label_state=DISABLED)
advanced.B_entry.configure(entry_state=DISABLED, \
label_state=DISABLED)
timer1=ldate.after(200,update)
def audio_config():
inbad,outbad=w.audiodev(g.ndevin.get(),g.ndevout.get())
options.inbad.set(inbad)
options.outbad.set(outbad)
if inbad or outbad:
w.acom1.ndevsok=0
options1()
else:
w.acom1.ndevsok=1
def save_params():
f=open(appdir+'/WSPR.INI',mode='w')
f.write("WSPRGeometry " + root_geom + "\n")
if options.MyCall.get()=='': options.MyCall.set('##')
f.write("MyCall " + options.MyCall.get() + "\n")
if options.MyGrid.get()=='': options.MyGrid.set('##')
f.write("MyGrid " + options.MyGrid.get() + "\n")
f.write("CWID " + str(advanced.idint.get()) + "\n")
f.write("dBm " + str(options.dBm.get()) + "\n")
f.write("PttPort " + str(options.PttPort.get()) + "\n")
f.write("CatPort " + str(options.CatPort.get()) + "\n")
if options.DevinName.get()=='': options.DevinName.set('0')
f.write("AudioIn " + options.DevinName.get().replace(" ","#") + "\n")
if options.DevoutName.get()=='': options.DevoutName.set('2')
f.write("AudioOut " + options.DevoutName.get().replace(" ","#") + "\n")
f.write("BFOfreq " + str(advanced.bfofreq.get()) + "\n")
f.write("PTTmode " + options.pttmode.get() + "\n")
f.write("CATenable " + str(options.cat_enable.get()) + "\n")
f.write("Acal " + str(advanced.Acal.get()) + "\n")
f.write("Bcal " + str(advanced.Bcal.get()) + "\n")
f.write("CalEnable " + str(advanced.encal.get()) + "\n")
f.write("IQmode " + str(iq.iqmode.get()) + "\n")
f.write("IQrx " + str(iq.iqrx.get()) + "\n")
f.write("IQtx " + str(iq.iqtx.get()) + "\n")
f.write("FIQ " + str(iq.fiq.get()) + "\n")
f.write("Ntxdb " + str(advanced.isc1.get()) + "\n")
f.write("SerialRate " + str(options.serial_rate.get()) + "\n")
f.write("DataBits " + str(options.databits.get()) + "\n")
f.write("StopBits " + str(options.stopbits.get()) + "\n")
f.write("Handshake " + options.serial_handshake.get().replace(" ","#") \
+ "\n")
t=str(options.rig.get().replace(" ","#"))
f.write("Rig " + str(t.replace("\t","#"))[:46] + "\n")
f.write("Nsave " + str(nsave.get()) + "\n")
f.write("PctTx " + str(ipctx.get()) + "\n")
f.write("DGain " + str(ndgain.get()) + "\n")
f.write("Upload " + str(upload.get()) + "\n")
f.write("Idle " + str(idle.get()) + "\n")
f.write("Debug " + str(ndebug.get()) + "\n")
f.write("WatScale " + str(s0) + "\n")
f.write("WatOffset " + str(c0) + "\n")
f.write("Palette " + g.cmap + "\n")
mrudir2=mrudir.replace(" ","#")
f.write("MRUdir " + mrudir2 + "\n")
f.write("freq0_600 " + str( freq0[1]) + "\n")
f.write("freqtx_600 " + str(freqtx[1]) + "\n")
f.write("freq0_160 " + str( freq0[2]) + "\n")
f.write("freqtx_160 " + str(freqtx[2]) + "\n")
f.write("freq0_80 " + str( freq0[3]) + "\n")
f.write("freqtx_80 " + str(freqtx[3]) + "\n")
f.write("freq0_60 " + str( freq0[4]) + "\n")
f.write("freqtx_60 " + str(freqtx[4]) + "\n")
f.write("freq0_40 " + str( freq0[5]) + "\n")
f.write("freqtx_40 " + str(freqtx[5]) + "\n")
f.write("freq0_30 " + str( freq0[6]) + "\n")
f.write("freqtx_30 " + str(freqtx[6]) + "\n")
f.write("freq0_20 " + str( freq0[7]) + "\n")
f.write("freqtx_20 " + str(freqtx[7]) + "\n")
f.write("freq0_17 " + str( freq0[8]) + "\n")
f.write("freqtx_17 " + str(freqtx[8]) + "\n")
f.write("freq0_15 " + str( freq0[9]) + "\n")
f.write("freqtx_15 " + str(freqtx[9]) + "\n")
f.write("freq0_12 " + str( freq0[10]) + "\n")
f.write("freqtx_12 " + str(freqtx[10]) + "\n")
f.write("freq0_10 " + str( freq0[11]) + "\n")
f.write("freqtx_10 " + str(freqtx[11]) + "\n")
f.write("freq0_6 " + str( freq0[12]) + "\n")
f.write("freqtx_6 " + str(freqtx[12]) + "\n")
f.write("freq0_4 " + str( freq0[13]) + "\n")
f.write("freqtx_4 " + str(freqtx[13]) + "\n")
f.write("freq0_2 " + str( freq0[14]) + "\n")
f.write("freqtx_2 " + str(freqtx[14]) + "\n")
f.write("freq0_other " + str( freq0[15]) + "\n")
f.write("freqtx_other " + str(freqtx[15]) + "\n")
f.write("iband " + str(iband.get()) + "\n")
f.write("StartIdle " + str(start_idle.get()) + "\n")
f.write("NoBeep " + str(no_beep.get()) + "\n")
f.write("Reject " + str(nreject) + "\n")
f.write("RxApply " + str(iq.iqrxapp.get()) + "\n")
f.close()
hopping.save_params(appdir)
frame = Frame(root)
mbar = Frame(frame)
mbar.pack(fill = X)
filebutton = Menubutton(mbar, text = 'File')
filebutton.pack(side = LEFT)
filemenu = Menu(filebutton, tearoff=0)
filebutton['menu'] = filemenu
filemenu.add('command', label = 'Open', command = openfile, \
accelerator='Ctrl+O')
filemenu.add('command', label = 'Open next in directory', command = opennext, \
accelerator='F6')
filemenu.add('command', label = 'Decode remaining files in directory', \
command = decodeall, accelerator='Shift+F6')
filemenu.add_separator()
filemenu.add('command', label = 'Delete all *.WAV files in Save', \
command = delwav)
filemenu.add_separator()
filemenu.add('command', label = 'Erase ALL_WSPR.TXT', command = del_all)
filemenu.add_separator()
filemenu.add('command', label = 'Save user parameters', command = save_params)
filemenu.add_separator()
filemenu.add('command', label = 'Exit', command = quit, accelerator='Alt+F4')
setupbutton = Menubutton(mbar, text = 'Setup')
setupbutton.pack(side = LEFT)
setupmenu = Menu(setupbutton, tearoff=0)
setupbutton['menu'] = setupmenu
setupmenu.add('command', label = 'Station parameters', command = options1,
accelerator='F2')
setupmenu.add('command', label = 'Advanced', command = advanced1,
accelerator='F7')
setupmenu.add('command', label = 'IQ Mode', command = iq1,
accelerator='F8')
setupmenu.add('command', label = 'Band Hopping', command = hopping1,
accelerator='F9')
setupmenu.add_separator()
setupmenu.add_checkbutton(label = 'Always start in Idle mode',
variable=start_idle)
setupmenu.add_checkbutton(label = 'No beep when access to WSPRnet fails',
variable=no_beep)
viewbutton = Menubutton(mbar, text = 'View', )
viewbutton.pack(side = LEFT)
viewmenu = Menu(viewbutton, tearoff=0)
viewbutton['menu'] = viewmenu
viewmenu.palettes=Menu(setupmenu,tearoff=0)
viewmenu.palettes.add_radiobutton(label='Gray0',command=pal_gray0,
value=0,variable=npal)
viewmenu.palettes.add_radiobutton(label='Gray1',command=pal_gray1,
value=1,variable=npal)
viewmenu.palettes.add_radiobutton(label='Linrad',command=pal_linrad,
value=2,variable=npal)
viewmenu.palettes.add_radiobutton(label='Blue',command=pal_blue,
value=3,variable=npal)
viewmenu.palettes.add_radiobutton(label='Hot',command=pal_Hot,
value=4,variable=npal)
viewmenu.palettes.add_radiobutton(label='AFMHot',command=pal_AFMHot,
value=5,variable=npal)
viewmenu.add_cascade(label = 'Palette',menu=viewmenu.palettes)
savebutton = Menubutton(mbar, text = 'Save')
savebutton.pack(side = LEFT)
savemenu = Menu(savebutton, tearoff=0)
savebutton['menu'] = savemenu
savemenu.add_radiobutton(label = 'None', variable=nsave,value=0)
savemenu.add_radiobutton(label = 'Save all', variable=nsave,value=2)
nsave.set(0)
bandbutton = Menubutton(mbar, text = 'Band')
bandbutton.pack(side = LEFT)
bandmenu = Menu(bandbutton, tearoff=0)
bandbutton['menu'] = bandmenu
iband.set(6)
bandmenu.add_radiobutton(label = '600 m',variable=iband,value=1)
bandmenu.add_radiobutton(label = '160 m',variable=iband,value=2)
bandmenu.add_radiobutton(label = '80 m', variable=iband,value=3)
bandmenu.add_radiobutton(label = '60 m', variable=iband,value=4)
bandmenu.add_radiobutton(label = '40 m', variable=iband,value=5)
bandmenu.add_radiobutton(label = '30 m', variable=iband,value=6)
bandmenu.add_radiobutton(label = '20 m', variable=iband,value=7)
bandmenu.add_radiobutton(label = '17 m', variable=iband,value=8)
bandmenu.add_radiobutton(label = '15 m', variable=iband,value=9)
bandmenu.add_radiobutton(label = '12 m', variable=iband,value=10)
bandmenu.add_radiobutton(label = '10 m', variable=iband,value=11)
bandmenu.add_radiobutton(label = '6 m', variable=iband,value=12)
bandmenu.add_radiobutton(label = '4 m', variable=iband,value=13)
bandmenu.add_radiobutton(label = '2 m', variable=iband,value=14)
bandmenu.add_radiobutton(label = 'Other',variable=iband,value=15)
helpbutton = Menubutton(mbar, text = 'Help')
helpbutton.pack(side = LEFT)
helpmenu = Menu(helpbutton, tearoff=0)
helpbutton['menu'] = helpmenu
helpmenu.add('command',label='Help',command=help,accelerator='F1')
helpmenu.add('command',label="Online WSPR User's Guide",command=usersguide, \
accelerator='F3')
helpmenu.add('command',label="Online FMT User's Guide",command=fmtguide)
helpmenu.add('command',label="WSPRnet.org",command=wsprnet, \
accelerator='F4')
helpmenu.add('command',label="WSJT Home Page",command=homepage)
helpmenu.add('command', label='About WSPR',command=about,accelerator='F5')
root.bind_all('<Escape>', stop_loopall)
root.bind_all('<F1>', help)
root.bind_all('<F2>', options1)
root.bind_all('<F3>', usersguide)
root.bind_all('<F4>', wsprnet)
root.bind_all('<Alt-F4>', quit)
root.bind_all('<F5>', about)
root.bind_all('<F6>', opennext)
root.bind_all('<F7>', advanced1)
root.bind_all('<F8>', iq1)
root.bind_all('<F9>', hopping1)
root.bind_all('<Shift-F6>', decodeall)
root.bind_all('<Control-o>',openfile)
root.bind_all('<Control-O>',openfile)
iframe1 = Frame(frame, bd=1, relief=SUNKEN)
graph1=Canvas(iframe1, bg='black', width=NX, height=NY,cursor='crosshair')
Widget.bind(graph1,"<Motion>",df_readout)
Widget.bind(graph1,"<Double-Button-1>",set_tx_freq)
graph1.pack(side=LEFT)
c=Canvas(iframe1, bg='white', width=40, height=NY,bd=0)
c.pack(side=LEFT)
text1=Text(iframe1, height=10, width=15, bg='Navy', fg="yellow")
text1.pack(side=LEFT, padx=1)
text1.tag_configure('age0',foreground='red')
text1.tag_configure('age1',foreground='yellow')
text1.tag_configure('age2',foreground='gray75')
text1.tag_configure('age3',foreground='gray50')
text1.insert(END,'132 ZL1BPU')
sb = Scrollbar(iframe1, orient=VERTICAL, command=text1.yview)
sb.pack(side=RIGHT, fill=Y)
text1.configure(yscrollcommand=sb.set)
iframe1.pack(expand=1, fill=X, padx=4)
iframe2 = Frame(frame, bd=1, relief=FLAT)
sc1=Scale(iframe2,from_=-100.0,to_=100.0,orient='horizontal',
showvalue=0,sliderlength=5)
sc1.pack(side=LEFT)
sc2=Scale(iframe2,from_=-100.0,to_=100.0,orient='horizontal',
showvalue=0,sliderlength=5)
sc2.pack(side=LEFT)
balloon.bind(sc1,"Brightness")
balloon.bind(sc2,"Contrast")
bupload=Checkbutton(iframe2,text='Upload spots',justify=RIGHT,variable=upload)
balloon.bind(bupload,"Check to send spots to WSPRnet.org")
bupload.place(x=330,y=12, anchor='e')
bhopping=Checkbutton(iframe2,text='Band Hop',justify=RIGHT, \
variable=hopping.hopping)
bhopping.place(x=445,y=12, anchor='e')
bhopping.configure(state=DISABLED)
balloon.bind(bhopping,"Check to band hop; configure in Setup->Band Hopping")
lab00=Label(iframe2, text='Band Map').place(x=623,y=10, anchor='e')
lab02=Label(iframe2,text='',pady=5)
lab02.place(x=500,y=10, anchor='e')
iframe2.pack(expand=1, fill=X, padx=4)
iframe2a = Frame(frame, bd=1, relief=FLAT)
g1=Pmw.Group(iframe2a,tag_text="Frequencies (MHz)")
lf0=Pmw.EntryField(g1.interior(),labelpos=W,label_text='Dial:',
value=10.1387,entry_textvariable=sf0,entry_width=12,
validate='real')
lftx=Pmw.EntryField(g1.interior(),labelpos=W,label_text='Tx: ',
value=10.140000,entry_textvariable=sftx,entry_width=12,validate='real')
widgets = (lf0,lftx)
for widget in widgets:
widget.pack(side=TOP,padx=5,pady=4)
balloon.bind(lf0,"Set radio's dial frequency to this value and select USB mode")
balloon.bind(lftx,"Will transmit on this frequency")
Pmw.alignlabels(widgets)
g1.pack(side=LEFT,fill=BOTH,expand=0,padx=10,pady=6)
lab01=Label(iframe2a, text='').pack(side=LEFT,padx=1)
g2=Pmw.Group(iframe2a,tag_text="Tx fraction (%)")
pctscale=Scale(g2.interior(),orient=HORIZONTAL,length=350,from_=0, \
to=100,tickinterval=10,variable=ipctx)
pctscale.pack(side=LEFT,padx=4)
balloon.bind(pctscale,"Select desired fraction of sequences to transmit")
ipctx.set(0)
g2.pack(side=LEFT,fill=BOTH,expand=0,padx=10,pady=6)
g3=Pmw.Group(iframe2a,tag_text='Special')
bidle=Checkbutton(g3.interior(),text='Idle ',justify=RIGHT, \
variable=idle,width=5)
bidle.grid(row=0,column=1,padx=4,pady=3)
balloon.bind(bidle,"Check for no automatic T/R sequences")
bmute=Checkbutton(g3.interior(),text='Tx Mute',justify=RIGHT, \
variable=txmute,width=7)
bmute.grid(row=1,column=1,padx=4,pady=3)
balloon.bind(bmute,"Check for no Tx")
btune=Button(g3.interior(), text='Tune',underline=0,command=tune,width=9)
btune.grid(row=1,column=0,padx=2,pady=3)
balloon.bind(btune,"Transmit for number of seconds set by Tx fraction slider")
btxnext=Button(g3.interior(), text='Tx Next',underline=3,command=txnext,width=9)
btxnext.grid(row=0,column=0,padx=2,pady=3)
balloon.bind(btxnext,"Make the next 2-minute period a transmission")
g3.pack(side=LEFT,fill=X,expand=0,padx=10,pady=1)
iframe2a.pack(expand=1, fill=X, padx=1)
iframe2 = Frame(frame, bd=1, relief=FLAT,height=15)
lab2=Label(iframe2,text='UTC dB DT Freq Drift')
if g.Win32:
lab2.place(x=208,y=6, anchor='w')
else:
lab2.place(x=198,y=6, anchor='w')
iframe2.pack(expand=1, fill=X, padx=4)
iframe4 = Frame(frame, bd=1, relief=FLAT)
f4aa=Frame(iframe4,height=170,bd=2,relief=RIDGE)
sm=smeter.Smeter(f4aa,fillColor='green',orientation='vertical', \
width=10,height=170,doLabel=0,min=0,max=80)
sm.frame.pack(side=LEFT)
dgainscale=Scale(f4aa,orient=VERTICAL,length=170,from_=50, \
to=-50,variable=ndgain,sliderlength=20,showvalue=0,width=9)
dgainscale.pack(side=LEFT,padx=4)
balloon.bind(dgainscale,"Digital gain control")
ndgain.set(0)
g2.pack(side=LEFT,fill=BOTH,expand=0,padx=10,pady=6)
f4aa.pack(side=LEFT,expand=0,fill=Y)
f4a=Frame(iframe4,height=170,bd=2,relief=RIDGE)
berase=Button(f4a, text='Erase',underline=0,command=erase,\
width=9,padx=1,pady=1)
berase.pack(side=TOP,padx=0,pady=40)
balloon.bind(berase,"Erase decoded text and band map")
ldate=Label(f4a, bg='black', fg='yellow', width=11, bd=4,
text='2005 Apr 22\n01:23:45', relief=RIDGE,
justify=CENTER, font=(font1,14))
ldate.pack(side=TOP,padx=10,pady=0)
f4a.pack(side=LEFT,expand=0,fill=Y)
f4b=Frame(iframe4,height=170,bd=2,relief=RIDGE)
text=Text(f4b, height=11, width=63, bg='white')
sb = Scrollbar(f4b, orient=VERTICAL, command=text.yview)
sb.pack(side=RIGHT, fill=Y)
text.pack(side=RIGHT, fill=X, padx=1)
text.insert(END,'1054 4 -25 1.12 10.140140 K1JT FN20 25')
text.configure(yscrollcommand=sb.set)
f4b.pack(side=LEFT,expand=0,fill=Y)
iframe4.pack(expand=1, fill=X, padx=4)
iframe6 = Frame(frame, bd=1, relief=SUNKEN)
msg1=Message(iframe6, text=' ', width=300,relief=SUNKEN)
msg1.pack(side=LEFT, fill=X, padx=1)
msg2=Message(iframe6, text=' ', width=300,relief=FLAT)
msg2.pack(side=LEFT, fill=X, padx=1)
msg3=Message(iframe6, text=' ',width=300,relief=FLAT)
msg3.pack(side=LEFT, fill=X, padx=1)
msg5=Message(iframe6, text=' ', width=300,relief=FLAT)
msg6=Message(iframe6, text=' ', width=400,relief=SUNKEN)
msg6.pack(side=RIGHT, fill=X, padx=1)
msg5.pack(side=RIGHT, fill=X, padx=1)
iframe6.pack(expand=1, fill=X, padx=4)
frame.pack()
isync=1
iband.set(6)
idle.set(1)
ipctx.set(20)
try:
f=open(appdir+'/WSPR.INI',mode='r')
params=f.readlines()
except:
params=""
badlist=[]
def readinit():
global nparam,mrudir
try:
for i in range(len(params)):
if badlist.count(i)>0:
print('Skipping bad entry in WSPR.INI:\a',params[i])
continue
key,value=params[i].split()
if key == 'WSPRGeometry': root.geometry(value)
elif key == 'MyCall': options.MyCall.set(value)
elif key == 'MyGrid': options.MyGrid.set(value)
elif key == 'CWID': advanced.idint.set(value)
elif key == 'dBm': options.dBm.set(value)
elif key == 'PctTx': ipctx.set(value)
elif key == 'DGain': ndgain.set(value)
elif key == 'PttPort': options.PttPort.set(value)
elif key == 'CatPort': options.CatPort.set(value)
elif key == 'AudioIn':
value=value.replace("#"," ")
g.DevinName.set(value)
try:
g.ndevin.set(int(value[:2]))
except:
g.ndevin.set(0)
options.DevinName.set(value)
elif key == 'AudioOut':
value=value.replace("#"," ")
g.DevoutName.set(value)
try:
g.ndevout.set(int(value[:2]))
except:
g.ndevout.set(0)
options.DevoutName.set(value)
elif key == 'BFOfreq': advanced.bfofreq.set(value)
elif key == 'Acal': advanced.Acal.set(value)
elif key == 'Bcal': advanced.Bcal.set(value)
elif key == 'CalEnable': advanced.encal.set(value)
elif key == 'IQmode': iq.iqmode.set(value)
elif key == 'IQrx': iq.iqrx.set(value)
elif key == 'IQtx': iq.iqtx.set(value)
elif key == 'FIQ': iq.fiq.set(value)
elif key == 'Ntxphaf': iq.isc3a.set(value)
elif key == 'PTTmode': options.pttmode.set(value)
elif key == 'CATenable': options.cat_enable.set(value)
elif key == 'SerialRate': options.serial_rate.set(int(value))
elif key == 'DataBits': options.databits.set(int(value))
elif key == 'StopBits': options.stopbits.set(int(value))
elif key == 'Handshake': options.serial_handshake.set( \
value.replace("#"," ") )
elif key == 'Rig':
t=value.replace("#"," ")
options.rig.set(t)
options.rignum.set(int(t[:4]))
elif key == 'Nsave': nsave.set(value)
elif key == 'Upload': upload.set(value)
elif key == 'Idle': idle.set(value)
elif key == 'Debug': ndebug.set(value)
elif key == 'WatScale': sc1.set(value)
elif key == 'WatOffset': sc2.set(value)
elif key == 'Palette': g.cmap=value
elif key == 'freq0_600': freq0[1]=float(value)
elif key == 'freq0_160': freq0[2]=float(value)
elif key == 'freq0_80': freq0[3]=float(value)
elif key == 'freq0_60': freq0[4]=float(value)
elif key == 'freq0_40': freq0[5]=float(value)
elif key == 'freq0_30': freq0[6]=float(value)
elif key == 'freq0_20': freq0[7]=float(value)
elif key == 'freq0_17': freq0[8]=float(value)
elif key == 'freq0_15': freq0[9]=float(value)
elif key == 'freq0_12': freq0[10]=float(value)
elif key == 'freq0_10': freq0[11]=float(value)
elif key == 'freq0_6': freq0[12]=float(value)
elif key == 'freq0_4': freq0[13]=float(value)
elif key == 'freq0_2': freq0[14]=float(value)
elif key == 'freq0_other': freq0[15]=float(value)
elif key == 'freqtx_600': freqtx[1]=float(value)
elif key == 'freqtx_160': freqtx[2]=float(value)
elif key == 'freqtx_80': freqtx[3]=float(value)
elif key == 'freqtx_60': freqtx[4]=float(value)
elif key == 'freqtx_40': freqtx[5]=float(value)
elif key == 'freqtx_30': freqtx[6]=float(value)
elif key == 'freqtx_20': freqtx[7]=float(value)
elif key == 'freqtx_17': freqtx[8]=float(value)
elif key == 'freqtx_15': freqtx[9]=float(value)
elif key == 'freqtx_12': freqtx[10]=float(value)
elif key == 'freqtx_10': freqtx[11]=float(value)
elif key == 'freqtx_6': freqtx[12]=float(value)
elif key == 'freqtx_4': freqtx[13]=float(value)
elif key == 'freqtx_2': freqtx[14]=float(value)
elif key == 'freqtx_other': freqtx[15]=float(value)
elif key == 'iband': iband.set(value)
elif key == 'StartIdle': start_idle.set(value)
elif key == 'NoBeep': no_beep.set(value)
elif key == 'Reject': w.acom1.reject=float(value)
elif key == 'RxApply': iq.iqrxapp.set(value)
elif key == 'MRUdir':
mrudir=value.replace("#"," ")
nparam=i
except:
badlist.append(i)
nparam=i
w.acom1.gain=1.0
w.acom1.phase=0.0
w.acom1.reject=0.
while nparam < len(params)-1:
readinit()
hopping.restore_params(appdir)
iq.ib.set(iband.get())
iq.restore()
r=options.chkcall(options.MyCall.get())
if r<0:
options.lcall._entryFieldEntry['background']='pink'
options1()
else:
options.lcall._entryFieldEntry['background']='white'
r=options.chkgrid(options.MyGrid.get())
if r<0:
options.lgrid._entryFieldEntry['background']='pink'
options1()
else:
options.lgrid._entryFieldEntry['background']='white'
if g.DevinName.get()=="":
g.ndevin.set(-1)
f0.set(freq0[iband.get()])
ftx.set(freqtx[iband.get()])
if start_idle.get():
idle.set(1)
if g.cmap == "gray0":
pal_gray0()
npal.set(0)
if g.cmap == "gray1":
pal_gray1()
npal.set(1)
if g.cmap == "Linrad":
pal_linrad()
npal.set(2)
if g.cmap == "blue":
pal_blue()
npal.set(3)
if g.cmap == "Hot":
pal_Hot()
npal.set(4)
if g.cmap == "AFMHot":
pal_AFMHot()
npal.set(5)
options.dbm_balloon()
fmid=f0.get() + 0.001500
sftx.set('%.06f' % ftx.get())
draw_axis()
erase()
if g.Win32: root.iconbitmap("wsjt.ico")
Title='WSPR ' + Version + ' by K1JT'
root.title(Title)
put_params()
try:
os.remove('decoded.txt')
except:
pass
try:
os.remove('pixmap.dat')
except:
pass
iband0=iband.get()
graph1.focus_set()
w.acom1.ndevsok=0
w.acom1.ntxnext=0
w.acom1.nstoptx=0
w.wspr1()
t="%.6f" % (f0.get(),)
sf0.set(t)
t="%.6f" % (ftx.get(),)
sftx.set(t)
font2=tkinter.font.Font(font=text['font'])
lab2.config(font=font2)
ldate.after(100,update)
ldate.after(100,audio_config)
root.mainloop()
ldate.after_cancel(timer1)
if options.pttmode.get()=='CAT':
if options.rignum.get()==2509 or options.rignum.get()==2511:
cmd="rigctl -m %d -r %s T 0" % \
(options.rignum.get(),options.CatPort.get())
else:
cmd="rigctl -m %d -r %s -s %d -C data_bits=%s -C stop_bits=%s -C serial_handshake=%s T 0" % \
(options.rignum.get(),options.CatPort.get(), \
options.serial_rate.get(),options.databits.get(), \
options.stopbits.get(),options.serial_handshake.get())
ierr=os.system(cmd)
save_params()
w.paterminate()
time.sleep(0.5)
|
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import PasswordChangeForm
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect, HttpResponse, Http404
from django.template.context import RequestContext
from django.utils.translation import ugettext_lazy as _
from django.utils import simplejson
from django.db.models import Q
from django.conf import settings
from notification import models as notification
from dialer_campaign.models import common_contact_authorization
from dialer_campaign.views import current_view, notice_count, grid_common_function
from dialer_campaign.function_def import user_dialer_setting_msg, variable_value
from dialer_settings.models import DialerSetting
from user_profile.models import UserProfile
from user_profile.forms import UserChangeDetailForm, \
UserChangeDetailExtendForm, \
CheckPhoneNumberForm,\
UserProfileForm
@login_required
def customer_detail_change(request):
"""User Detail change on Customer UI
**Attributes**:
* ``form`` - UserChangeDetailForm, UserChangeDetailExtendForm, PasswordChangeForm, CheckPhoneNumberForm
* ``template`` - 'frontend/registration/user_detail_change.html'
**Logic Description**:
* User is able to change his/her detail.
"""
user_detail = User.objects.get(username=request.user)
try:
user_detail_extened = UserProfile.objects.get(user=user_detail)
except UserProfile.DoesNotExist:
#create UserProfile
user_detail_extened = UserProfile(user=user_detail)
user_detail_extened.save()
user_detail_form = UserChangeDetailForm(request.user,
instance=user_detail)
user_detail_extened_form = UserChangeDetailExtendForm(request.user,
instance=user_detail_extened)
user_password_form = PasswordChangeForm(user=request.user)
check_phone_no_form = CheckPhoneNumberForm()
try:
user_ds = UserProfile.objects.get(user=request.user)
dialer_set = DialerSetting.objects.get(id=user_ds.dialersetting.id)
except:
dialer_set = ''
user_notification = \
notification.Notice.objects.filter(recipient=request.user)
# Search on sender name
q = (Q(sender=request.user))
if q:
user_notification = user_notification.filter(q)
msg_detail = ''
msg_pass = ''
msg_number = ''
msg_note = ''
error_detail = ''
error_pass = ''
error_number = ''
action = ''
if 'action' in request.GET:
action = request.GET['action']
if request.GET.get('msg_note') == 'true':
msg_note = request.session['msg_note']
# Mark all notification as read
if request.GET.get('notification') == 'mark_read_all':
notification_list = notification.Notice.objects.filter(unseen=1, recipient=request.user)
notification_list.update(unseen=0)
msg_note = _('All notifications are marked as read.')
if request.method == 'POST':
if request.POST['form-type'] == "change-detail":
user_detail_form = UserChangeDetailForm(request.user, request.POST,
instance=user_detail)
user_detail_extened_form = UserChangeDetailExtendForm(request.user,
request.POST,
instance=user_detail_extened)
action = 'tabs-1'
if user_detail_form.is_valid() and user_detail_extened_form.is_valid():
user_detail_form.save()
user_detail_extened_form.save()
msg_detail = _('Detail has been changed.')
else:
error_detail = _('Please correct the errors below.')
elif request.POST['form-type'] == "check-number": # check phone no
action = 'tabs-5'
check_phone_no_form = CheckPhoneNumberForm(data=request.POST)
if check_phone_no_form.is_valid():
if not common_contact_authorization(request.user,
request.POST['phone_number']):
error_number = _('This phone number is not authorized.')
else:
msg_number = _('This phone number is authorized.')
else:
error_number = _('Please correct the errors below.')
else: # "change-password"
user_password_form = PasswordChangeForm(user=request.user,
data=request.POST)
action = 'tabs-2'
if user_password_form.is_valid():
user_password_form.save()
msg_pass = _('Your password has been changed.')
else:
error_pass = _('Please correct the errors below.')
template = 'frontend/registration/user_detail_change.html'
data = {
'module': current_view(request),
'user_detail_form': user_detail_form,
'user_detail_extened_form': user_detail_extened_form,
'user_password_form': user_password_form,
'check_phone_no_form': check_phone_no_form,
'user_notification': user_notification,
'msg_detail': msg_detail,
'msg_pass': msg_pass,
'msg_number': msg_number,
'msg_note': msg_note,
'error_detail': error_detail,
'error_pass': error_pass,
'error_number': error_number,
'notice_count': notice_count(request),
'dialer_set': dialer_set,
'dialer_setting_msg': user_dialer_setting_msg(request.user),
'action': action,
}
return render_to_response(template, data,
context_instance=RequestContext(request))
def call_style(val):
"""Notification icon style"""
unseen_style = \
'style="text-decoration:none;background-image:url(%snewfies/icons/new.png);"' \
% settings.STATIC_URL
seen_style = \
'style="text-decoration:none;background-image:url(%snewfies/icons/tick.png);"' \
% settings.STATIC_URL
if val:
return unseen_style
else:
return seen_style
@login_required
def notification_grid(request):
"""notification list in json format for flexigrid
**Model**: notification.Notice
"""
grid_data = grid_common_function(request)
page = int(grid_data['page'])
start_page = int(grid_data['start_page'])
end_page = int(grid_data['end_page'])
sortorder_sign = grid_data['sortorder_sign']
sortname = grid_data['sortname']
user_notification = \
notification.Notice.objects.filter(recipient=request.user)
# Search on sender name
q = (Q(sender=request.user))
if q:
user_notification = user_notification.filter(q)
count = user_notification.count()
user_notification_list = \
user_notification.order_by(sortorder_sign + sortname)[start_page:end_page]
rows = [{'id': row.id,
'cell': ['<input type="checkbox" name="select" class="checkbox"\
value="' + str(row.id) + '" />',
str(row.message),
str(row.notice_type),
str(row.sender),
str(row.added),
str('<a href="../update_notice_status_cust/' + str(row.id) + '/" class="icon" ' \
+ call_style(row.unseen) + '> </a>' ),
]}for row in user_notification_list ]
data = {'rows': rows,
'page': page,
'total': count}
return HttpResponse(simplejson.dumps(data), mimetype='application/json',
content_type="application/json")
@login_required
def notification_del_read(request, object_id):
"""Delete notification for the logged in user
**Attributes**:
* ``object_id`` - Selected notification object
* ``object_list`` - Selected notification objects
**Logic Description**:
* Delete/Mark as Read the selected notification from the notification list
"""
try:
# When object_id is not 0
notification_obj = notification.Notice.objects.get(pk=object_id)
# Delete/Read notification
if object_id:
if request.POST.get('mark_read') == 'false':
request.session["msg_note"] = _('"%(name)s" is deleted.') \
% {'name': notification_obj.notice_type}
notification_obj.delete()
else:
request.session["msg_note"] = _('"%(name)s" is marked as read.') \
% {'name': notification_obj.notice_type}
notification_obj.update(unseen=0)
return HttpResponseRedirect('/user_detail_change/?action=tabs-3&msg_note=true')
except:
# When object_id is 0 (Multiple records delete/mark as read)
values = request.POST.getlist('select')
values = ", ".join(["%s" % el for el in values])
notification_list = notification.Notice.objects.extra(where=['id IN (%s)' % values])
if request.POST.get('mark_read') == 'false':
request.session["msg_note"] = _('%(count)s notification(s) are deleted.')\
% {'count': notification_list.count()}
notification_list.delete()
else:
request.session["msg_note"] = _('%(count)s notification(s) are marked as read.')\
% {'count': notification_list.count()}
notification_list.update(unseen=0)
return HttpResponseRedirect('/user_detail_change/?action=tabs-3&msg_note=true')
def common_notification_status(request, id):
"""Notification Status (e.g. seen/unseen) need to be change.
It is a common function for admin and customer UI
**Attributes**:
* ``pk`` - primary key of notice record
**Logic Description**:
* Selected Notification's status need to be changed.
Changed status can be seen or unseen.
"""
notice = notification.Notice.objects.get(pk=id)
if notice.unseen == 1:
notice.unseen = 0
else:
notice.unseen = 1
notice.save()
return True
@login_required
def update_notice_status_cust(request, id):
"""Notification Status (e.g. seen/unseen) can be changed from
customer interface"""
common_notification_status(request, id)
return HttpResponseRedirect('/user_detail_change/?action=tabs-3')
|
import datetime
import gpxpy
import gpxpy.gpx
import sqlite3
import sys
import time
def main(dbsource, filename):
real_filename = filename + '.gpx'
gpx = gpxpy.gpx.GPX()
# Create first track in our GPX:
gpx_track = gpxpy.gpx.GPXTrack()
gpx.tracks.append(gpx_track)
# Create first segment in our GPX track:
gpx_segment = gpxpy.gpx.GPXTrackSegment()
gpx_track.segments.append(gpx_segment)
# Read the DB and create points:
try:
con = sqlite3.connect(dbsource, detect_types=sqlite3.PARSE_DECLTYPES)
cur = con.cursor()
lat = 0
lon = 0
alt = 0
cur.execute('SELECT * FROM coordinates')
for row in cur.fetchall():
key_id, time_stmp, lat, lon, alt = row
print 'Time: %s Lat: %s Lon: %s Alt: %s' % (time_stmp, lat, lon, alt)
if key_id != 1:
newtimestamp = datetime.datetime.strptime(time_stmp, '%Y-%m-%d %H:%M:%S.%f')
#print newtimestamp
newertimestamp = time.mktime(newtimestamp.timetuple())
#print newertimestamp
gpx_segment.points.append(gpxpy.gpx.GPXTrackPoint(lat, lon, elevation=alt, time=newtimestamp))
except sqlite3.Error, e:
if con:
con.rollback()
print "Error %s:" % e.args[0]
sys.exit(1)
finally:
if con:
con.close()
foo = gpx.to_xml()
f = open(real_filename, 'w')
f.write(foo)
print 'Done.'
if __name__ == "__main__":
# call with 2 args, first is your DB, second is output filename (no type)
main(sys.argv[1], sys.argv[2])
|
"""Setup forDjango Imager app."""
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
REQUIRES = [
'django',
'psycopg2',
'django-registration',
'sorl-thumbnail',
]
TEST = [
'tox',
'coverage',
'pytest-cov',
'factory-boy',
]
DEV = [
'ipython',
]
setup(name='Django Imager',
version='0.0',
description='Web application to upload and save images.',
# long_description=README + '\n\n' + CHANGES,
author=('Will Weatherford'),
author_email='weatherford.william@gmail.com',
url='',
license='MIT',
keywords='django',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='django-imager',
install_requires=REQUIRES,
extras_require={
'test': TEST,
'dev': DEV
},
)
|
import os
from datetime import date
import json
from django.core.cache import cache
from django.db.utils import DatabaseError
from django.http.response import Http404
from django.test import override_settings
from django.test.client import RequestFactory
from bedrock.base.urlresolvers import reverse
from mock import ANY, patch
from bedrock.mozorg.tests import TestCase
from bedrock.mozorg import views
from scripts import update_tableau_data
class TestViews(TestCase):
@patch.dict(os.environ, FUNNELCAKE_5_LOCALES='en-US', FUNNELCAKE_5_PLATFORMS='win')
def test_download_button_funnelcake(self):
"""The download button should have the funnelcake ID."""
with self.activate('en-US'):
resp = self.client.get(reverse('mozorg.home'), {'f': '5'})
assert 'product=firefox-stub-f5&' in resp.content
def test_download_button_bad_funnelcake(self):
"""The download button should not have a bad funnelcake ID."""
with self.activate('en-US'):
resp = self.client.get(reverse('mozorg.home'), {'f': '5dude'})
assert 'product=firefox-stub&' in resp.content
assert 'product=firefox-stub-f5dude&' not in resp.content
resp = self.client.get(reverse('mozorg.home'), {'f': '999999999'})
assert 'product=firefox-stub&' in resp.content
assert 'product=firefox-stub-f999999999&' not in resp.content
class TestContributeStudentAmbassadorsLanding(TestCase):
def setUp(self):
self.rf = RequestFactory()
self.get_req = self.rf.get('/')
self.no_exist = views.TwitterCache.DoesNotExist()
cache.clear()
@patch.object(views.l10n_utils, 'render')
@patch.object(views.TwitterCache.objects, 'get')
def test_db_exception_works(self, mock_manager, mock_render):
"""View should function properly without the DB."""
mock_manager.side_effect = DatabaseError
views.contribute_studentambassadors_landing(self.get_req)
mock_render.assert_called_with(ANY, ANY, {'tweets': []})
@patch.object(views.l10n_utils, 'render')
@patch.object(views.TwitterCache.objects, 'get')
def test_no_db_row_works(self, mock_manager, mock_render):
"""View should function properly without data in the DB."""
mock_manager.side_effect = views.TwitterCache.DoesNotExist
views.contribute_studentambassadors_landing(self.get_req)
mock_render.assert_called_with(ANY, ANY, {'tweets': []})
@patch.object(views.l10n_utils, 'render')
@patch.object(views.TwitterCache.objects, 'get')
def test_db_cache_works(self, mock_manager, mock_render):
"""View should use info returned by DB."""
good_val = 'The Dude tweets, man.'
mock_manager.return_value.tweets = good_val
views.contribute_studentambassadors_landing(self.get_req)
mock_render.assert_called_with(ANY, ANY, {'tweets': good_val})
class TestRobots(TestCase):
def setUp(self):
self.rf = RequestFactory()
self.view = views.Robots()
def test_production_disallow_all_is_false(self):
self.view.request = self.rf.get('/', HTTP_HOST='www.mozilla.org')
self.assertFalse(self.view.get_context_data()['disallow_all'])
def test_non_production_disallow_all_is_true(self):
self.view.request = self.rf.get('/', HTTP_HOST='www.allizom.org')
self.assertTrue(self.view.get_context_data()['disallow_all'])
def test_robots_no_redirect(self):
response = self.client.get('/robots.txt', HTTP_HOST='www.mozilla.org')
self.assertEqual(response.status_code, 200)
self.assertFalse(response.context_data['disallow_all'])
self.assertEqual(response.get('Content-Type'), 'text/plain')
class TestMozIDDataView(TestCase):
def setUp(self):
with patch.object(update_tableau_data, 'get_external_data') as ged:
ged.return_value = (
(date(2015, 2, 2), 'Firefox', 'bugzilla', 100, 10),
(date(2015, 2, 2), 'Firefox OS', 'bugzilla', 100, 10),
(date(2015, 2, 9), 'Sumo', 'sumo', 100, 10),
(date(2015, 2, 9), 'Firefox OS', 'sumo', 100, 10),
(date(2015, 2, 9), 'QA', 'reps', 100, 10),
)
update_tableau_data.run()
def _get_json(self, source):
cache.clear()
req = RequestFactory().get('/')
resp = views.mozid_data_view(req, source)
assert resp['content-type'] == 'application/json'
assert resp['access-control-allow-origin'] == '*'
return json.loads(resp.content)
def test_all(self):
assert self._get_json('all') == [
{'wkcommencing': '2015-02-09', 'totalactive': 300, 'new': 30},
{'wkcommencing': '2015-02-02', 'totalactive': 200, 'new': 20},
]
def test_team(self):
"""When acting on a team, should just return sums for that team."""
assert self._get_json('firefoxos') == [
{'wkcommencing': '2015-02-09', 'totalactive': 100, 'new': 10},
{'wkcommencing': '2015-02-02', 'totalactive': 100, 'new': 10},
]
def test_source(self):
"""When acting on a source, should just return sums for that source."""
assert self._get_json('sumo') == [
{'wkcommencing': '2015-02-09', 'totalactive': 100, 'new': 10},
]
@patch('bedrock.mozorg.models.CONTRIBUTOR_SOURCE_NAMES', {})
def test_unknown(self):
"""An unknown source should raise a 404."""
with self.assertRaises(Http404):
self._get_json('does-not-exist')
@patch('bedrock.mozorg.views.l10n_utils.render')
class TestTechnology(TestCase):
def setUp(self):
self.rf = RequestFactory()
def test_technology_template(self, render_mock):
view = views.TechnologyView()
view.request = RequestFactory().get('/technology/')
view.request.locale = 'en-US'
assert view.get_template_names() == ['mozorg/technology-en.html']
def test_technology_locale_template(self, render_mock):
view = views.TechnologyView()
view.request = RequestFactory().get('/technology/')
view.request.locale = 'es-ES'
assert view.get_template_names() == ['mozorg/technology.html']
@patch('bedrock.mozorg.views.l10n_utils.render')
class TestHomePage(TestCase):
def setUp(self):
self.rf = RequestFactory()
def test_home_en_template(self, render_mock):
req = RequestFactory().get('/')
req.locale = 'en-US'
views.home_view(req)
render_mock.assert_called_once_with(req, 'mozorg/home/home-en.html', ANY)
def test_home_de_template(self, render_mock):
req = RequestFactory().get('/')
req.locale = 'de'
views.home_view(req)
render_mock.assert_called_once_with(req, 'mozorg/home/home-de.html', ANY)
def test_home_locale_template(self, render_mock):
req = RequestFactory().get('/')
req.locale = 'fr'
views.home_view(req)
render_mock.assert_called_once_with(req, 'mozorg/home/home.html', ANY)
@patch('bedrock.mozorg.views.l10n_utils.render')
class TestAboutPage(TestCase):
def setUp(self):
self.rf = RequestFactory()
@patch.object(views, 'lang_file_is_active', lambda *x: True)
def test_about_2019_template(self, render_mock):
req = RequestFactory().get('/')
req.locale = 'en-US'
views.about_view(req)
render_mock.assert_called_once_with(req, 'mozorg/about-2019.html')
@patch.object(views, 'lang_file_is_active', lambda *x: False)
def test_about_old_template(self, render_mock):
req = RequestFactory().get('/')
req.locale = 'de'
views.about_view(req)
render_mock.assert_called_once_with(req, 'mozorg/about.html')
@override_settings(DEV=True)
class TestOAuthFxa(TestCase):
def setUp(self):
self.rf = RequestFactory()
@override_settings(DEV=False)
@override_settings(SWITCH_FIREFOX_CONCERT_SERIES=False)
def test_switch_off(self):
"""Should redirect to the home page if the whole system is turned off"""
req = self.rf.get('/mozorg/oauth/fxa?state=thedude&code=abides')
response = views.oauth_fxa(req)
assert response.status_code == 302
assert response['Location'] == '/'
def test_missing_expected_state(self):
req = self.rf.get('/mozorg/oauth/fxa?state=thedude&code=abides')
response = views.oauth_fxa(req)
assert response.status_code == 302
assert response['Location'] == '/oauth/fxa/error/'
def test_missing_provided_state(self):
req = self.rf.get('/mozorg/oauth/fxa?code=abides')
req.COOKIES['fxaOauthState'] = 'thedude'
response = views.oauth_fxa(req)
assert response.status_code == 302
assert response['Location'] == '/oauth/fxa/error/'
def test_state_mismatch(self):
req = self.rf.get('/mozorg/oauth/fxa?state=thedude&code=abides')
req.COOKIES['fxaOauthState'] = 'walter'
response = views.oauth_fxa(req)
assert response.status_code == 302
assert response['Location'] == '/oauth/fxa/error/'
def test_missing_code(self):
req = self.rf.get('/mozorg/oauth/fxa?state=thedude')
req.COOKIES['fxaOauthState'] = 'thedude'
response = views.oauth_fxa(req)
assert response.status_code == 302
assert response['Location'] == '/oauth/fxa/error/'
@patch('bedrock.mozorg.views.get_fxa_oauth_token')
def test_token_failure(self, gfot_mock):
req = self.rf.get('/mozorg/oauth/fxa?state=thedude&code=abides')
req.COOKIES['fxaOauthState'] = 'thedude'
gfot_mock.return_value = None
response = views.oauth_fxa(req)
assert response.status_code == 302
assert response['Location'] == '/oauth/fxa/error/'
@patch('bedrock.mozorg.views.get_fxa_oauth_token')
@patch('bedrock.mozorg.views.get_fxa_profile_email')
def test_email_failure(self, gfpe_mock, gfot_mock):
req = self.rf.get('/mozorg/oauth/fxa?state=thedude&code=abides')
req.COOKIES['fxaOauthState'] = 'thedude'
gfot_mock.return_value = 'atoken'
gfpe_mock.return_value = None
response = views.oauth_fxa(req)
assert response.status_code == 302
assert response['Location'] == '/oauth/fxa/error/'
@patch('bedrock.mozorg.views.get_fxa_oauth_token')
@patch('bedrock.mozorg.views.get_fxa_profile_email')
@patch('bedrock.mozorg.views.fxa_concert_rsvp')
def test_rsvp_failure(self, rsvp_mock, gfpe_mock, gfot_mock):
req = self.rf.get('/mozorg/oauth/fxa?state=thedude&code=abides')
req.COOKIES['fxaOauthState'] = 'thedude'
gfot_mock.return_value = 'atoken'
gfpe_mock.return_value = 'maude@example.com'
rsvp_mock.return_value = None
response = views.oauth_fxa(req)
assert response.status_code == 302
assert response['Location'] == '/oauth/fxa/error/'
@patch('bedrock.mozorg.views.get_fxa_oauth_token')
@patch('bedrock.mozorg.views.get_fxa_profile_email')
@patch('bedrock.mozorg.views.fxa_concert_rsvp')
def test_success(self, rsvp_mock, gfpe_mock, gfot_mock):
req = self.rf.get('/mozorg/oauth/fxa?state=thedude&code=abides')
req.COOKIES['fxaOauthState'] = 'thedude'
gfot_mock.return_value = 'atoken'
gfpe_mock.return_value = 'maude@example.com'
response = views.oauth_fxa(req)
assert response.cookies['fxaOauthVerified'].value == 'True'
assert response.status_code == 302
assert response['Location'] == '/firefox/concerts/'
@patch('bedrock.mozorg.views.get_fxa_oauth_token')
@patch('bedrock.mozorg.views.get_fxa_profile_email')
@patch('bedrock.mozorg.views.fxa_concert_rsvp')
def test_rsvp_is_firefox(self, rsvp_mock, gfpe_mock, gfot_mock):
req = self.rf.get('/mozorg/oauth/fxa?state=thedude&code=abides', HTTP_USER_AGENT='Firefox')
req.COOKIES['fxaOauthState'] = 'thedude'
gfot_mock.return_value = 'atoken'
gfpe_mock.return_value = 'maude@example.com'
views.oauth_fxa(req)
rsvp_mock.assert_called_with('maude@example.com', True)
@patch('bedrock.mozorg.views.get_fxa_oauth_token')
@patch('bedrock.mozorg.views.get_fxa_profile_email')
@patch('bedrock.mozorg.views.fxa_concert_rsvp')
def test_rsvp_not_firefox(self, rsvp_mock, gfpe_mock, gfot_mock):
req = self.rf.get('/mozorg/oauth/fxa?state=thedude&code=abides', HTTP_USER_AGENT='Safari')
req.COOKIES['fxaOauthState'] = 'thedude'
gfot_mock.return_value = 'atoken'
gfpe_mock.return_value = 'maude@example.com'
views.oauth_fxa(req)
rsvp_mock.assert_called_with('maude@example.com', False)
|
from positive_alert_test_case import PositiveAlertTestCase
from negative_alert_test_case import NegativeAlertTestCase
from alert_test_suite import AlertTestSuite
class TestAlertProxyDropExecutable(AlertTestSuite):
alert_filename = "proxy_drop_executable"
# This event is the default positive event that will cause the
# alert to trigger
default_event = {
"_type": "event",
"_source": {
"category": "proxy",
"details": {
"sourceipaddress": "1.2.3.4",
"destination": "http://evil.com/evil.exe",
"proxyaction": "TCP_DENIED",
},
},
}
# This event is an alternate destination that we'd want to aggregate
default_event2 = AlertTestSuite.copy(default_event)
default_event2["_source"]["details"]["destination"] = "http://evil.com/evil.sh"
# This event is the default negative event that will not cause the
# alert to trigger
default_negative_event = AlertTestSuite.copy(default_event)
default_negative_event["_source"]["details"][
"destination"
] = "http://foo.mozilla.com/index.html"
# This alert is the expected result from running this task
default_alert = {
"category": "squid",
"tags": ["squid", "proxy"],
"severity": "WARNING",
"summary": "Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following executable file destination(s): http://evil.com/evil.exe",
}
# This alert is the expected result from this task against multiple matching events
default_alert_aggregated = AlertTestSuite.copy(default_alert)
default_alert_aggregated[
"summary"
] = "Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following executable file destination(s): http://evil.com/evil.exe,http://evil.com/evil.sh"
test_cases = []
test_cases.append(
PositiveAlertTestCase(
description="Positive test with default events and default alert expected",
events=AlertTestSuite.create_events(default_event, 1),
expected_alert=default_alert,
)
)
test_cases.append(
PositiveAlertTestCase(
description="Positive test with default events and default alert expected - dedup",
events=AlertTestSuite.create_events(default_event, 2),
expected_alert=default_alert,
)
)
events1 = AlertTestSuite.create_events(default_event, 1)
events2 = AlertTestSuite.create_events(default_event2, 1)
test_cases.append(
PositiveAlertTestCase(
description="Positive test with default events and default alert expected - different dests",
events=events1 + events2,
expected_alert=default_alert_aggregated,
)
)
test_cases.append(
NegativeAlertTestCase(
description="Negative test with default negative event",
events=AlertTestSuite.create_events(default_negative_event, 1),
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event["_source"]["category"] = "bad"
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with incorrect category",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event["_source"][
"utctimestamp"
] = AlertTestSuite.subtract_from_timestamp_lambda({"minutes": 241})
event["_source"][
"receivedtimestamp"
] = AlertTestSuite.subtract_from_timestamp_lambda({"minutes": 241})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with old timestamp", events=events
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event["_source"]["details"]["destination"] = "http://evil.com/evil.pdf"
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with non blacklisted extension",
events=events,
)
)
|
import collections
from django.conf import settings
from elasticsearch_dsl import F, Q, query
from rest_framework.filters import BaseFilterBackend
from kuma.wiki.search import WikiDocumentType
from .models import Filter, FilterGroup
def get_filters(getter_func):
filters = collections.OrderedDict()
for slug in FilterGroup.objects.values_list('slug', flat=True):
for filters_slug in getter_func(slug, []):
filters[filters_slug] = None
return filters.keys()
class LanguageFilterBackend(BaseFilterBackend):
"""
A django-rest-framework filter backend that filters the given queryset
based on the current request's locale, or a different locale (or none at
all) specified by query parameter
First, we bail if the locale query parameter is set to *. It's a short cut
for the macros search.
Then, if the current language is the standard language (English) we only
show those documents.
But if the current language is any non-standard language (non-English)
we're limiting the documents to either English or the requested
language, effectively filtering out all other languages. We also boost
the non-English documents to show up before the English ones.
"""
def filter_queryset(self, request, queryset, view):
locale = request.GET.get('locale', None)
if '*' == locale:
return queryset
sq = queryset.to_dict().pop('query', query.MatchAll().to_dict())
if request.LANGUAGE_CODE == settings.LANGUAGE_CODE:
locales = [request.LANGUAGE_CODE]
else:
locales = [request.LANGUAGE_CODE, settings.LANGUAGE_CODE]
positive_sq = {
'filtered': {
'query': sq,
'filter': {'terms': {'locale': locales}}
}
}
negative_sq = {
'bool': {
'must_not': [
{'term': {'locale': request.LANGUAGE_CODE}}
]
}
}
# Note: Here we are replacing the query rather than calling
# `queryset.query` which would result in a boolean must query.
queryset.query = query.Boosting(positive=positive_sq,
negative=negative_sq,
negative_boost=0.5)
return queryset
class SearchQueryBackend(BaseFilterBackend):
"""
A django-rest-framework filter backend that filters the given
queryset based on the search query found in the current request's
query parameters.
"""
search_operations = [
# (<query type>, <field>, <boost factor>)
('match', 'title', 6.0),
('match', 'summary', 2.0),
('match', 'content', 1.0),
('match_phrase', 'title', 10.0),
('match_phrase', 'content', 8.0),
]
def filter_queryset(self, request, queryset, view):
search_term = view.query_params.get('q')
if search_term:
queries = []
for query_type, field, boost in self.search_operations:
queries.append(
Q(query_type, **{field: {'query': search_term,
'boost': boost}}))
queryset = queryset.query(
'function_score',
query=query.Bool(should=queries),
functions=[query.SF('field_value_factor', field='boost')],
)
if request.user.is_superuser:
queryset = queryset.extra(explain=True)
return queryset
class AdvancedSearchQueryBackend(BaseFilterBackend):
"""
A django-rest-framework filter backend that filters the given queryset
based on additional query parameters that correspond to advanced search
indexes.
"""
fields = (
'kumascript_macros',
'css_classnames',
'html_attributes',
)
def filter_queryset(self, request, queryset, view):
queries = []
for field in self.fields:
search_param = view.query_params.get(field)
if not search_param:
continue
queries.append(
Q('match', **{field: {'query': search_param,
'boost': 10.0}}))
queries.append(
Q('prefix', **{field: {'value': search_param,
'boost': 5.0}}))
if queries:
queryset = queryset.query(query.Bool(should=queries))
return queryset
class DatabaseFilterBackend(BaseFilterBackend):
"""
A django-rest-framework filter backend that filters the given
queryset based on the filters stored in the database.
If there are more than one tag attached to the filter it will
use the filter's operator to determine which logical operation to
use with those tags. The default is OR.
It then applies custom aggregations based on those database filters
but will ignore non-raw aggregations.
"""
def filter_queryset(self, request, queryset, view):
active_filters = []
active_facets = []
for serialized_filter in view.serialized_filters:
filter_tags = serialized_filter['tags']
filter_operator = Filter.OPERATORS[serialized_filter['operator']]
if serialized_filter['slug'] in view.selected_filters:
if len(filter_tags) > 1:
tag_filters = []
for filter_tag in filter_tags:
tag_filters.append(F('term', tags=filter_tag))
active_filters.append(F(filter_operator, tag_filters))
else:
active_filters.append(F('term', tags=filter_tags[0]))
if len(filter_tags) > 1:
facet_params = F('terms', tags=filter_tags)
else:
facet_params = F('term', tags=filter_tags[0])
active_facets.append((serialized_filter['slug'], facet_params))
if active_filters:
if len(active_filters) == 1:
queryset = queryset.post_filter(active_filters[0])
else:
queryset = queryset.post_filter(F('or', active_filters))
for facet_slug, facet_params in active_facets:
queryset.aggs.bucket(facet_slug, 'filter',
**facet_params.to_dict())
return queryset
class HighlightFilterBackend(BaseFilterBackend):
"""
A django-rest-framework filter backend that adds search term highlighting.
"""
def filter_queryset(self, request, queryset, view):
highlight = view.query_params.get('highlight')
if highlight:
queryset = queryset.highlight(*WikiDocumentType.excerpt_fields)
queryset = queryset.highlight_options(order='score',
pre_tags=['<mark>'],
post_tags=['</mark>'])
return queryset
|
import datetime
import django_filters
from dateutil import parser
from django.core.exceptions import ObjectDoesNotExist
from django.db import models as django_models
from rest_framework import viewsets
from rest_framework.decorators import action
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework.status import (HTTP_400_BAD_REQUEST,
HTTP_404_NOT_FOUND)
from treeherder.model.error_summary import get_error_summary
from treeherder.model.models import (Job,
JobDetail,
JobLog,
OptionCollection,
Repository,
TextLogError,
TextLogStep)
from treeherder.webapp.api import (pagination,
serializers)
from treeherder.webapp.api.utils import (CharInFilter,
NumberInFilter,
to_timestamp)
class JobFilter(django_filters.FilterSet):
"""
We use this gigantic class to provide the same filtering interface
as the previous jobs API
"""
id = django_filters.NumberFilter(field_name='id')
id__in = NumberInFilter(field_name='id', lookup_expr='in')
tier__in = NumberInFilter(field_name='tier', lookup_expr='in')
push_id__in = NumberInFilter(field_name='push_id', lookup_expr='in')
job_guid = django_filters.CharFilter(field_name='guid')
job_guid__in = CharInFilter(field_name='guid', lookup_expr='in')
build_architecture = django_filters.CharFilter(
field_name='build_platform__architecture')
build_os = django_filters.CharFilter(
field_name='build_platform__os_name')
build_platform = django_filters.CharFilter(
field_name='build_platform__platform')
build_system_type = django_filters.CharFilter(
field_name='signature__build_system_type')
job_group_id = django_filters.NumberFilter(
field_name='job_group_id')
job_group_name = django_filters.CharFilter(
field_name='job_group__name')
job_group_symbol = django_filters.CharFilter(
field_name='job_group__symbol')
job_type_name = django_filters.CharFilter(
field_name='job_type__name')
job_type_symbol = django_filters.CharFilter(
field_name='job_type__symbol')
machine_name = django_filters.CharFilter(
field_name='machine__name')
machine_platform_architecture = django_filters.CharFilter(
field_name='machine_platform__architecture')
machine_platform_os = django_filters.CharFilter(
field_name='machine_platform__os_name')
platform = django_filters.CharFilter(
field_name='machine_platform__platform')
ref_data_name = django_filters.CharFilter(
field_name='signature__name')
signature = django_filters.CharFilter(
field_name='signature__signature')
class Meta:
model = Job
fields = {
'option_collection_hash': ['exact'],
'build_platform_id': ['exact'],
'failure_classification_id': ['exact'],
'job_type_id': ['exact'],
'job_group_id': ['exact'],
'reason': ['exact'],
'state': ['exact'],
'result': ['exact'],
'who': ['exact'],
'tier': ['lt', 'lte', 'exact', 'gt', 'gte'],
'id': ['lt', 'lte', 'exact', 'gt', 'gte'],
'push_id': ['lt', 'lte', 'exact', 'gt', 'gte'],
'last_modified': ['lt', 'lte', 'exact', 'gt', 'gte'],
'submit_time': ['lt', 'lte', 'exact', 'gt', 'gte'],
'start_time': ['lt', 'lte', 'exact', 'gt', 'gte'],
'end_time': ['lt', 'lte', 'exact', 'gt', 'gte']
}
filter_overrides = {
django_models.DateTimeField: {
'filter_class': django_filters.IsoDateTimeFilter
}
}
class JobsViewSet(viewsets.ViewSet):
"""
This viewset is responsible for the jobs endpoint.
"""
# data that we want to do select_related on when returning job objects
# (so we don't have a zillion db queries)
_default_select_related = [
'build_platform',
'job_type',
'job_group',
'machine_platform',
'machine',
'signature',
'repository'
]
_property_query_mapping = [
('build_architecture', 'build_platform__architecture', None),
('build_os', 'build_platform__os_name', None),
('build_platform', 'build_platform__platform', None),
('build_platform_id', 'build_platform_id', None),
('build_system_type', 'signature__build_system_type', None),
('end_timestamp', 'end_time', to_timestamp),
('failure_classification_id', 'failure_classification_id', None),
('id', 'id', None),
('job_group_description', 'job_group__description', None),
('job_group_id', 'job_group_id', None),
('job_group_name', 'job_group__name', None),
('job_group_symbol', 'job_group__symbol', None),
('job_guid', 'guid', None),
('job_type_description', 'job_type__description', None),
('job_type_id', 'job_type_id', None),
('job_type_name', 'job_type__name', None),
('job_type_symbol', 'job_type__symbol', None),
('last_modified', 'last_modified', None),
('machine_name', 'machine__name', None),
('machine_platform_architecture', 'machine_platform__architecture', None),
('machine_platform_os', 'machine_platform__os_name', None),
('option_collection_hash', 'option_collection_hash', None),
('platform', 'machine_platform__platform', None),
('push_id', 'push_id', None),
('reason', 'reason', None),
('ref_data_name', 'signature__name', None),
('result', 'result', None),
('result_set_id', 'push_id', None),
('signature', 'signature__signature', None),
('start_timestamp', 'start_time', to_timestamp),
('state', 'state', None),
('submit_timestamp', 'submit_time', to_timestamp),
('tier', 'tier', None),
('who', 'who', None),
]
_option_collection_hash_idx = [pq[0] for pq in _property_query_mapping].index(
'option_collection_hash')
def _get_job_list_response(self, job_qs, offset, count, return_type):
'''
custom method to serialize + format jobs information
It's worth doing this big ugly thing (as opposed to using
the django rest framework serializer or whatever) as
this function is often in the critical path
'''
option_collection_map = OptionCollection.objects.get_option_collection_map()
results = []
for values in job_qs[offset:(offset+count)].values_list(
*[pq[1] for pq in self._property_query_mapping]):
platform_option = option_collection_map.get(
values[self._option_collection_hash_idx],
"")
# some values need to be transformed
values = list(values)
for (i, _) in enumerate(values):
func = self._property_query_mapping[i][2]
if func:
values[i] = func(values[i])
# append results differently depending on if we are returning
# a dictionary or a list
if return_type == 'dict':
results.append(dict(zip(
[pq[0] for pq in self._property_query_mapping] +
['platform_option'],
values + [platform_option])))
else:
results.append(values + [platform_option])
response_dict = {
'results': results
}
if return_type == 'list':
response_dict.update({
'job_property_names': [pq[0] for pq in self._property_query_mapping] + ['platform_option']
})
return response_dict
def retrieve(self, request, project, pk=None):
"""
GET method implementation for detail view
Return a single job with log_references and
artifact names and links to the artifact blobs.
"""
try:
job = Job.objects.select_related(
*self._default_select_related + ['taskcluster_metadata']).get(
repository__name=project, id=pk)
except Job.DoesNotExist:
return Response("No job with id: {0}".format(pk), status=HTTP_404_NOT_FOUND)
resp = serializers.JobSerializer(job, read_only=True).data
resp["resource_uri"] = reverse("jobs-detail",
kwargs={"project": project, "pk": pk})
resp["logs"] = []
for (name, url) in JobLog.objects.filter(job=job).values_list(
'name', 'url'):
resp["logs"].append({'name': name, 'url': url})
platform_option = job.get_platform_option()
if platform_option:
resp["platform_option"] = platform_option
try:
resp['taskcluster_metadata'] = {
'task_id': job.taskcluster_metadata.task_id,
'retry_id': job.taskcluster_metadata.retry_id
}
except ObjectDoesNotExist:
pass
status_map = {k: v for k, v in Job.AUTOCLASSIFY_STATUSES}
resp["autoclassify_status"] = status_map[job.autoclassify_status]
return Response(resp)
def list(self, request, project):
"""
GET method implementation for list view
Optional parameters (default):
- offset (0)
- count (10)
- return_type (dict)
"""
MAX_JOBS_COUNT = 2000
# make a mutable copy of these params
filter_params = request.query_params.copy()
# various hacks to ensure API backwards compatibility
for param_key in filter_params.keys():
# replace `result_set_id` with `push_id`
if param_key.startswith('result_set_id'):
new_param_key = param_key.replace('result_set_id', 'push_id')
filter_params[new_param_key] = filter_params[param_key]
del filter_params[param_key]
# convert legacy timestamp parameters to time ones
elif param_key in ['submit_timestamp', 'start_timestamp',
'end_timestamp']:
new_param_key = param_key.replace('timestamp', 'time')
filter_params[new_param_key] = datetime.datetime.fromtimestamp(
float(filter_params[param_key]))
del filter_params[param_key]
# sanity check 'last modified'
elif param_key.startswith('last_modified'):
datestr = filter_params[param_key]
try:
parser.parse(datestr)
except ValueError:
return Response(
"Invalid date value for `last_modified`: {}".format(datestr),
status=HTTP_400_BAD_REQUEST)
try:
offset = int(filter_params.get("offset", 0))
count = int(filter_params.get("count", 10))
except ValueError:
return Response(
"Invalid value for offset or count",
status=HTTP_400_BAD_REQUEST)
return_type = filter_params.get("return_type", "dict").lower()
if count > MAX_JOBS_COUNT:
msg = "Specified count exceeds API MAX_JOBS_COUNT value: {}".format(MAX_JOBS_COUNT)
return Response({"detail": msg}, status=HTTP_400_BAD_REQUEST)
try:
repository = Repository.objects.get(name=project)
except Repository.DoesNotExist:
return Response({
"detail": "No project with name {}".format(project)
}, status=HTTP_404_NOT_FOUND)
jobs = JobFilter({k: v for (k, v) in filter_params.items()},
queryset=Job.objects.filter(
repository=repository).select_related(
*self._default_select_related)).qs
response_body = self._get_job_list_response(jobs, offset, count,
return_type)
response_body["meta"] = dict(repository=project, offset=offset,
count=count)
return Response(response_body)
@action(detail=True, methods=['get'])
def text_log_steps(self, request, project, pk=None):
"""
Gets a list of steps associated with this job
"""
try:
job = Job.objects.get(repository__name=project,
id=pk)
except ObjectDoesNotExist:
return Response("No job with id: {0}".format(pk), status=HTTP_404_NOT_FOUND)
textlog_steps = TextLogStep.objects.filter(job=job).order_by(
'started_line_number').prefetch_related('errors')
return Response(serializers.TextLogStepSerializer(textlog_steps,
many=True,
read_only=True).data)
@action(detail=True, methods=['get'])
def text_log_errors(self, request, project, pk=None):
"""
Gets a list of steps associated with this job
"""
try:
job = Job.objects.get(repository__name=project,
id=pk)
except Job.DoesNotExist:
return Response("No job with id: {0}".format(pk),
status=HTTP_404_NOT_FOUND)
textlog_errors = (TextLogError.objects
.filter(step__job=job)
.select_related("_metadata",
"_metadata__failure_line")
.prefetch_related("classified_failures", "matches")
.order_by('id'))
return Response(serializers.TextLogErrorSerializer(textlog_errors,
many=True,
read_only=True).data)
@action(detail=True, methods=['get'])
def bug_suggestions(self, request, project, pk=None):
"""
Gets a set of bug suggestions for this job
"""
try:
job = Job.objects.get(repository__name=project, id=pk)
except ObjectDoesNotExist:
return Response("No job with id: {0}".format(pk), status=HTTP_404_NOT_FOUND)
return Response(get_error_summary(job))
@action(detail=True, methods=['get'])
def similar_jobs(self, request, project, pk=None):
"""
Get a list of jobs similar to the one selected.
"""
try:
repository = Repository.objects.get(name=project)
except Repository.DoesNotExist:
return Response({
"detail": "No project with name {}".format(project)
}, status=HTTP_404_NOT_FOUND)
try:
job = Job.objects.get(repository=repository, id=pk)
except ObjectDoesNotExist:
return Response("No job with id: {0}".format(pk),
status=HTTP_404_NOT_FOUND)
filter_params = request.query_params.copy()
try:
offset = int(filter_params.get("offset", 0))
# we don't need a big page size on this endoint,
# let's cap it to 50 elements
count = int(filter_params.get("count", 50))
except ValueError:
return Response("Invalid value for offset or count",
status=HTTP_400_BAD_REQUEST)
return_type = filter_params.get("return_type", "dict").lower()
jobs = JobFilter({k: v for (k, v) in filter_params.items()},
queryset=Job.objects.filter(
job_type_id=job.job_type_id,
repository=repository).exclude(
id=job.id).select_related(
*self._default_select_related)).qs
# similar jobs we want in descending order from most recent
jobs = jobs.order_by('-start_time')
response_body = self._get_job_list_response(jobs, offset, count,
return_type)
response_body["meta"] = dict(offset=offset, count=count,
repository=project)
return Response(response_body)
class JobDetailViewSet(viewsets.ReadOnlyModelViewSet):
'''
Endpoint for retrieving metadata (e.g. links to artifacts, file sizes)
associated with a particular job
'''
queryset = JobDetail.objects.all().select_related('job', 'job__repository')
serializer_class = serializers.JobDetailSerializer
class JobDetailFilter(django_filters.rest_framework.FilterSet):
job_id = django_filters.NumberFilter(field_name='job')
job_id__in = NumberInFilter(field_name='job', lookup_expr='in')
job_guid = django_filters.CharFilter(field_name='job__guid')
job__guid = django_filters.CharFilter(field_name='job__guid') # for backwards compat
title = django_filters.CharFilter(field_name='title')
value = django_filters.CharFilter(field_name='value')
push_id = django_filters.NumberFilter(field_name='job__push')
repository = django_filters.CharFilter(field_name='job__repository__name')
class Meta:
model = JobDetail
fields = ['job_id', 'job_guid', 'job__guid', 'job_id__in', 'title',
'value', 'push_id', 'repository']
filter_backends = [django_filters.rest_framework.DjangoFilterBackend]
filterset_class = JobDetailFilter
# using a custom pagination size of 2000 to avoid breaking mozscreenshots
# which doesn't paginate through results yet
# https://github.com/mnoorenberghe/mozscreenshots/issues/28
class JobDetailPagination(pagination.IdPagination):
page_size = 2000
pagination_class = JobDetailPagination
# one of these is required
required_filters = ['job_guid', 'job__guid', 'job_id', 'job_id__in', 'push_id']
def list(self, request):
query_param_keys = request.query_params.keys()
# unfiltered requests can potentially create huge sql queries, so
# make sure the user passes a job id or guid
if set(self.required_filters).isdisjoint(set(query_param_keys)):
raise ParseError("Must filter on one of: {}".format(
", ".join(self.required_filters)))
return viewsets.ReadOnlyModelViewSet.list(self, request)
|
from __future__ import absolute_import, print_function, unicode_literals
import datetime
import jsone
import pipes
import yaml
import os
import slugid
import taskcluster
from git import Repo
from lib.tasks import schedule_task
ROOT = os.path.join(os.path.dirname(__file__), '../..')
def calculate_branch_and_head_rev(root):
repo = Repo(root)
branch = repo.head.reference
return str(branch), str(branch.commit)
def make_decision_task(params):
"""Generate a basic decision task, based on the root .taskcluster.yml"""
with open(os.path.join(ROOT, '.taskcluster.yml'), 'rb') as f:
taskcluster_yml = yaml.safe_load(f)
slugids = {}
def as_slugid(name):
if name not in slugids:
slugids[name] = slugid.nice()
return slugids[name]
# provide a similar JSON-e context to what taskcluster-github provides
context = {
'tasks_for': 'cron',
'cron': {
'task_id': params['cron_task_id'],
},
'now': datetime.datetime.utcnow().isoformat()[:23] + 'Z',
'as_slugid': as_slugid,
'event': {
'repository': {
'clone_url': params['repository_url'],
},
'release': {
'tag_name': params['head_rev'],
'target_commitish': params['branch'],
},
'sender': {
'login': 'TaskclusterHook'
},
},
}
rendered = jsone.render(taskcluster_yml, context)
if len(rendered['tasks']) != 1:
raise Exception("Expected .taskcluster.yml to only produce one cron task")
task = rendered['tasks'][0]
task_id = task.pop('taskId')
return (task_id, task)
if __name__ == "__main__":
queue = taskcluster.Queue({'rootUrl': os.environ['TASKCLUSTER_PROXY_URL']})
branch, head_rev = calculate_branch_and_head_rev(ROOT)
params = {
'repository_url': 'https://github.com/mozilla-mobile/focus-android',
'head_rev': head_rev,
'branch': branch,
'cron_task_id': os.environ.get('CRON_TASK_ID', '<cron_task_id>')
}
decisionTaskId, decisionTask = make_decision_task(params)
schedule_task(queue, decisionTaskId, decisionTask)
print('All scheduled!')
|
"""This script use the nearest marker to the transcript as control, increasing permutation rounds according to the p-value"""
import string
import sys
import MySQLdb
import getpass
import time
def translateAlias(str):
if str == "B6":
return "C57BL/6J"
elif str == "D2":
return "DBA/2J"
else:
return str
dataStart = 1
GeneChipId = int( raw_input("Enter GeneChipId:") )
ProbeSetFreezeId = int( raw_input("Enter ProbeSetFreezeId:") )
input_file_name = raw_input("Enter file name with suffix:")
fp = open("%s" % input_file_name, 'rb')
try:
passwd = getpass.getpass('Please enter mysql password here : ')
con = MySQLdb.Connect(db='db_webqtl',host='localhost', user='username',passwd=passwd)
db = con.cursor()
print "You have successfully connected to mysql.\n"
except:
print "You entered incorrect password.\n"
sys.exit(0)
time0 = time.time()
print 'Checking if each line have same number of members'
GeneList = []
isCont = 1
header = fp.readline()
header = string.split(string.strip(header),'\t')
header = map(string.strip, header)
nfield = len(header)
line = fp.readline()
kj=0
while line:
line2 = string.split(string.strip(line),'\t')
line2 = map(string.strip, line2)
if len(line2) != nfield:
print "Error : " + line
isCont = 0
GeneList.append(line2[0])
line = fp.readline()
kj+=1
if kj%100000 == 0:
print 'checked ',kj,' lines'
GeneList = map(string.lower, GeneList)
GeneList.sort()
if isCont==0:
sys.exit(0)
print 'used ',time.time()-time0,' seconds'
print 'Checking if each strain exist in database'
isCont = 1
fp.seek(0)
header = fp.readline()
header = string.split(string.strip(header),'\t')
header = map(string.strip, header)
header = map(translateAlias, header)
header = header[dataStart:]
Ids = []
for item in header:
try:
db.execute('select Id from Strain where Name = "%s"' % item)
Ids.append(db.fetchall()[0][0])
except:
print item,'does not exist, check the if the strain name is correct'
isCont=0
if isCont==0:
sys.exit(0)
print 'used ',time.time()-time0,' seconds'
print 'Check if each ProbeSet exist in database'
line = fp.readline()
line = fp.readline()
line2 = string.split(string.strip(line),'\t')
line2 = map(string.strip, line2)
PId = line2[0]
db.execute('select Id from ProbeSet where Name="%s" and ChipId=%d' % (PId, GeneChipId))
results = db.fetchall()
IdStr = 'TargetId'
if len(results)>0:
IdStr = 'Name'
db.execute('select distinct(%s) from ProbeSet where ChipId=%d order by %s' % (IdStr, GeneChipId, IdStr))
results = db.fetchall()
Names = []
for item in results:
Names.append(item[0])
Names = map(string.lower, Names)
Names.sort() # -- Fixed the lower case problem of ProbeSets affx-mur_b2_at doesn't exist --#
x=y=0
x1=-1
GeneList2=[]
while x<len(GeneList) and y<len(Names):
if GeneList[x]==Names[y]:
x += 1
y += 1
elif GeneList[x]<Names[y]:
if x!=x1:
GeneList2.append(GeneList[x])
x1 = x
x += 1
elif GeneList[x]>Names[y]:
y += 1
if x%100000==0:
print 'check Name, checked %d lines'%x
while x<len(GeneList):
GeneList2.append(GeneList[x])
x += 1
isCont=1
ferror = open("ProbeSetError.txt", "wb")
for item in GeneList2:
ferror.write(item + " doesn't exist \n")
print item, " doesn't exist"
isCont = 0
if isCont==0:
sys.exit(0)
print 'used ',time.time()-time0,' seconds'
db.execute("""
select ProbeSet.%s, ProbeSetXRef.DataId from ProbeSet, ProbeSetXRef
where ProbeSet.Id=ProbeSetXRef.ProbeSetId and ProbeSetXRef.ProbeSetFreezeId=%d"""
% (IdStr, ProbeSetFreezeId))
results = db.fetchall()
ProbeNameId = {}
for Name, Id in results:
ProbeNameId[Name] = Id
ferror = open("ProbeError.txt", "wb")
DataValues = []
fp.seek(0) #XZ add this line
line = fp.readline() #XZ add this line
line = fp.readline()
kj = 0
while line:
line2 = string.split(string.strip(line),'\t')
line2 = map(string.strip, line2)
CellId = line2[0]
if not ProbeNameId.has_key(CellId):
ferror.write(CellId + " doesn't exist\n")
print CellId, " doesn't exist"
else:
DataId = ProbeNameId[CellId]
datasorig = line2[dataStart:]
i = 0
for item in datasorig:
if item != '':
value = '('+str(DataId)+','+str(Ids[i])+','+str(item)+')'
DataValues.append(value)
i += 1
kj += 1
if kj % 100 == 0:
Dataitems = ','.join(DataValues)
cmd = 'insert ProbeSetSE values %s' % Dataitems
db.execute(cmd)
DataValues = []
print 'inserted ',kj,' lines'
print 'used ',time.time()-time0,' seconds'
line = fp.readline()
if len(DataValues)>0:
DataValues = ','.join(DataValues)
cmd = 'insert ProbeSetSE values %s' % DataValues
db.execute(cmd)
con.close()
|
"""
Structured Tagging based on XBlockAsides
"""
import json
from xblock.core import XBlockAside, XBlock
from web_fragments.fragment import Fragment
from xblock.fields import Scope, Dict
from xmodule.x_module import AUTHOR_VIEW
from xmodule.capa_module import CapaModule
from edxmako.shortcuts import render_to_string
from django.conf import settings
from django.db import transaction
from django.core.exceptions import ObjectDoesNotExist
from webob import Response
_ = lambda text: text
class StructuredTagsAside(XBlockAside):
"""
Aside that allows tagging blocks
"""
saved_tags = Dict(help=_("Dictionary with the available tags"),
scope=Scope.content,
default={},)
def get_available_tags(self):
"""
Return available tags
"""
from .models import TagCategories
return TagCategories.objects.all()
def _get_studio_resource_url(self, relative_url):
"""
Returns the Studio URL to a static resource.
"""
return settings.STATIC_URL + relative_url
def _check_user_access(self, role, user=None):
from student.auth import user_has_role
from student.roles import CourseStaffRole, CourseInstructorRole
roles = {
CourseStaffRole.ROLE: CourseStaffRole,
CourseInstructorRole.ROLE: CourseInstructorRole
}
if not user:
return False
elif self.runtime.user_is_superuser:
return True
elif role in roles:
return user_has_role(user, roles[role](self.runtime.course_id))
return False
@XBlockAside.aside_for(AUTHOR_VIEW)
def student_view_aside(self, block, context): # pylint: disable=unused-argument
"""
Display the tag selector with specific categories and allowed values,
depending on the context.
"""
from student.models import User
if isinstance(block, CapaModule) or block.category in ['html', 'video', 'drag-and-drop-v2'] or \
(block.category == 'openassessment' and len(block.rubric_criteria) == 0):
tags = []
user = None
has_access_any_tag = False
for tag in self.get_available_tags():
course_id = None
org = None
if tag.scoped_by:
if tag.scoped_by == 'course':
course_id = self.scope_ids.usage_id.course_key
elif tag.scoped_by == 'org':
org = self.scope_ids.usage_id.course_key.org
values = tag.get_values(course_id=course_id, org=org)
current_values = self.saved_tags.get(tag.name, [])
if isinstance(current_values, basestring):
current_values = [current_values]
values_not_exists = [cur_val for cur_val in current_values if cur_val not in values]
has_access_this_tag = True
if tag.role:
if not user:
try:
user = User.objects.get(pk=self.runtime.user_id)
except ObjectDoesNotExist:
pass
has_access_this_tag = self._check_user_access(tag.role, user)
if has_access_this_tag:
has_access_any_tag = True
else:
has_access_any_tag = True
tags.append({
'key': tag.name,
'title': tag.title,
'values': values,
'values_json': json.dumps(values),
'current_values': values_not_exists + current_values,
'current_values_json': json.dumps(values_not_exists + current_values),
'editable': tag.editable_in_studio,
'has_access': has_access_this_tag,
})
fragment = Fragment(render_to_string('structured_tags_block.html', {'tags': tags,
'tags_count': len(tags),
'block_location': block.location,
'show_save_btn': has_access_any_tag,
}))
fragment.add_javascript_url(self._get_studio_resource_url('/cms/js/magicsuggest-1.3.1.js'))
fragment.add_javascript_url(self._get_studio_resource_url('/js/xblock_asides/structured_tags.js'))
fragment.initialize_js('StructuredTagsInit')
return fragment
else:
return Fragment(u'')
@XBlock.handler
def edit_tags_view(self, request=None, suffix=None): # pylint: disable=unused-argument
from .models import TagCategories
tag_category_param = request.GET.get('tag_category', None)
if tag_category_param:
try:
tag = TagCategories.objects.get(name=tag_category_param)
course_id = None
org = None
if tag.scoped_by:
if tag.scoped_by == 'course':
course_id = self.scope_ids.usage_id.course_key
elif tag.scoped_by == 'org':
org = self.scope_ids.usage_id.course_key.org
tpl_params = {
'key': tag.name,
'title': tag.title,
'values': '\n'.join(tag.get_values(course_id=course_id, org=org))
}
data = {
'html': render_to_string('structured_tags_block_editor.html', tpl_params)
}
return Response(json=data)
except TagCategories.DoesNotExist:
pass
return Response("Invalid 'tag_category' parameter", status=400)
@XBlock.handler
def update_values(self, request=None, suffix=None): # pylint: disable=unused-argument
with transaction.atomic():
for tag_key in request.POST:
for tag in self.get_available_tags():
if tag.name == tag_key:
course_id = None
org = None
if tag.scoped_by:
if tag.scoped_by == 'course':
course_id = self.scope_ids.usage_id.course_key
elif tag.scoped_by == 'org':
org = self.scope_ids.usage_id.course_key.org
tag_values = tag.get_values(course_id=course_id, org=org)
tmp_list = [v for v in request.POST[tag_key].splitlines() if v.strip()]
values_to_add = list(set(tmp_list) - set(tag_values))
values_to_remove = list(set(tag_values) - set(tmp_list))
self._add_tag_values(tag, values_to_add, course_id, org)
self._remove_tag_values(tag, values_to_remove, course_id, org)
return Response()
def _add_tag_values(self, tag_category, values, course_id=None, org=None):
from .models import TagAvailableValues
for val in values:
kwargs = {
'category': tag_category,
'value': val
}
if course_id:
kwargs['course_id'] = course_id
if org:
kwargs['org'] = org
TagAvailableValues(**kwargs).save()
def _remove_tag_values(self, tag_category, values, course_id=None, org=None):
from .models import TagAvailableValues
for val in values:
kwargs = {
'category': tag_category,
'value': val
}
if course_id:
kwargs['course_id'] = course_id
if org:
kwargs['org'] = org
TagAvailableValues.objects.filter(**kwargs).delete()
@XBlock.handler
def save_tags(self, request=None, suffix=None): # pylint: disable=unused-argument
"""
Handler to save chosen tags with connected XBlock
"""
posted_data = request.params.dict_of_lists()
saved_tags = {}
for av_tag in self.get_available_tags():
tag_key = '%s[]' % av_tag.name
if tag_key in posted_data and len(posted_data[tag_key]) > 0:
saved_tags[av_tag.name] = posted_data[tag_key]
self.saved_tags = saved_tags
return Response()
def get_event_context(self, event_type, event): # pylint: disable=unused-argument
"""
This method return data that should be associated with the "check_problem" event
"""
if self.saved_tags and event_type in ("problem_check", "edx.drag_and_drop_v2.item.dropped"):
return {'saved_tags': self.saved_tags}
else:
return None
|
from __future__ import absolute_import, print_function, unicode_literals, division
from jormungandr import InstanceManager
from pytest import fixture
from pytest_mock import mocker
from jormungandr import app
from jormungandr.instance_manager import choose_best_instance
class FakeInstance:
def __init__(self, name, is_free=False, priority=0):
self.name = name
self.is_free = is_free
self.priority = priority
@fixture
def manager():
instance_manager = InstanceManager()
instance_manager.instances['paris'] = FakeInstance('paris')
instance_manager.instances['pdl'] = FakeInstance('pdl')
return instance_manager
def get_instances_test(manager):
with app.test_request_context('/'):
instances = manager.get_instances()
assert len(instances) == 2
assert {'paris', 'pdl'} == {i.name for i in instances}
instances = manager.get_instances('paris')
assert len(instances) == 1
assert 'paris' == instances[0].name
assert manager.get_instances('foo') is None
def get_instances_by_coord_test(manager, mocker):
mock = mocker.patch.object(manager, '_all_keys_of_coord', return_value=['paris'])
with app.test_request_context('/'):
instances = manager.get_instances(lon=4, lat=3)
assert len(instances) == 1
assert 'paris' == instances[0].name
assert mock.called
def get_instances_by_object_id_test(manager, mocker):
mock = mocker.patch.object(manager, '_all_keys_of_id', return_value=['pdl'])
with app.test_request_context('/'):
instances = manager.get_instances(object_id='sa:pdl')
assert len(instances) == 1
assert 'pdl' == instances[0].name
assert mock.called
def choose_best_instance_test():
"""
Test to choose the best instance according to comparator : priority > is_free=False > is_free=True
"""
instances_list = [
FakeInstance('fr-nw', is_free=True, priority=0),
FakeInstance('fr-nw-c', is_free=True, priority=0),
FakeInstance('fr-auv', is_free=True, priority=0),
]
assert choose_best_instance(instances_list).name == 'fr-auv'
instances_list[1].is_free = False
assert choose_best_instance(instances_list).name == 'fr-nw-c'
instances_list.append(FakeInstance('fr-bre', is_free=True, priority=1000))
assert choose_best_instance(instances_list).name == 'fr-bre'
|
import unittest
try:
from unittest import mock
except ImportError:
import mock
from pyramid import testing as pyramid_testing
from .. import testing
from ... import config
from .views_test_data import COLLECTION_METADATA
class OaiViewsTestCase(unittest.TestCase):
fixture = testing.data_fixture
@classmethod
def setUpClass(cls):
cls.settings = testing.integration_test_settings()
@testing.db_connect
def setUp(self, cursor):
self.fixture.setUp()
self.request = pyramid_testing.DummyRequest()
self.request.headers['HOST'] = 'cnx.org'
self.request.application_url = 'http://cnx.org'
config = pyramid_testing.setUp(settings=self.settings,
request=self.request)
# Set up routes
from ... import declare_api_routes
declare_api_routes(config)
# Set up type info
from ... import declare_type_info
declare_type_info(config)
# Clear all cached searches
import memcache
mc_servers = self.settings['memcache-servers'].split()
mc = memcache.Client(mc_servers, debug=0)
mc.flush_all()
mc.disconnect_all()
# Patch database search so that it's possible to assert call counts
# later
from ... import cache
original_search = cache.database_search
self.db_search_call_count = 0
def patched_search(*args, **kwargs):
self.db_search_call_count += 1
return original_search(*args, **kwargs)
cache.database_search = patched_search
self.addCleanup(setattr, cache, 'database_search', original_search)
def tearDown(self):
pyramid_testing.tearDown()
self.fixture.tearDown()
def test_oai_general(self):
self.request.matched_route = mock.Mock()
self.request.matched_route.name = 'oai'
self.request.GET = {'verb': 'Identify'}
from ...views.oai import oai
oai = oai(self.request)
self.assertTrue('dateTime' in oai.keys())
self.assertEqual(oai['baseURL'], self.request.path_url)
self.assertEqual(oai['query_request'], [{'name': 'verb', 'val': 'Identify'}])
def test_oai_identify(self):
self.request.matched_route = mock.Mock()
self.request.matched_route.name = 'oai'
self.request.GET = {'verb': 'Identify'}
from ...views.oai import oai
oai = oai(self.request)
self.assertEqual(oai['host'], self.request.host)
self.assertEqual(oai['adminEmail'], "support@openstax.org")
self.assertEqual(oai['repository'], config.REPOSITORY_NAME)
def test_oai_listIdentifiers(self):
from_date = '2016-01-01'
until_date = '2017-01-01'
self.request.matched_route = mock.Mock()
self.request.matched_route.name = 'oai'
self.request.GET = {'verb': 'ListIdentifiers',
'metadataPrefix': 'cnx_dc',
'until': until_date}
from ...views.oai import oai
oai = oai(self.request)
for result in oai['results']:
self.assertTrue(str(result['revised']) <= until_date)
self.assertTrue(set(result.keys()) == set(["revised", "uuid"]))
def test_oai_listMetadataFormats(self):
self.request.matched_route = mock.Mock()
self.request.matched_route.name = 'oai'
self.request.GET = {'verb': 'ListMetadataFormats'}
from ...views.oai import oai
oai = oai(self.request)
prefixes = [result['prefix'] for result in oai['results']]
self.assertEqual(prefixes, ['oai_dc', 'ims1_2_1', 'cnx_dc'])
def test_oai_listRecords(self):
from_date = '2016-01-01'
until_date = '2017-01-01'
self.request.matched_route = mock.Mock()
self.request.matched_route.name = 'oai'
self.request.GET = {'verb': 'ListRecords',
'metadataPrefix': 'cnx_dc',
'from': from_date,
'until': until_date}
from ...views.oai import oai
oai = oai(self.request)
columns = set(['name', 'created', 'revised', 'uuid', 'link', 'portal_type',
'language', 'version', 'keywords', 'subjects',
'author_emails', 'authors', 'maintainers', 'translators',
'abstract', 'licenses_url'])
for result in oai['results']:
self.assertTrue(str(result['revised']) >= from_date and
str(result['revised']) <= until_date)
self.assertEqual(set(result.keys()), columns)
def test_oai_getRecord(self):
uuid = COLLECTION_METADATA[u'id']
self.request.matched_route = mock.Mock()
self.request.matched_route.name = 'oai'
self.request.GET = {'verb': 'GetRecord',
'metadataPrefix': 'cnx_dc',
'identifier': "oai:{}:{}".format(self.request.host, uuid)}
from ...views.oai import oai
oai = oai(self.request)
self.assertEqual(len(oai['results']), 1)
self.assertEqual(oai['results'][0]['uuid'], uuid)
def test_oai_errors(self):
self.request.matched_route = mock.Mock()
self.request.matched_route.name = 'oai'
from ...views.oai import oai
# Invalid Verb
self.request.GET = {'verb': 'FakeVerb'}
oai_0 = oai(self.request)
self.assertEqual(oai_0['error']['code'], 'badVerb')
# Missing a required argument
self.request.GET = {'verb': 'ListRecords'}
oai_1 = oai(self.request)
self.assertEqual(oai_1['error'], {'code': 'badArgument',
'message': 'Required argument {} missing'.
format(['metadataPrefix'])})
# Invalid argument
self.request.GET = {'verb': 'Identify', 'fake': 'test'}
oai_2 = oai(self.request)
self.assertEqual(oai_2['error'], {'code': 'badArgument',
'message': 'Illegal arguments: {}'.
format(['fake'])})
# Invalid MetadataPrefix
self.request.GET = {'verb': 'ListIdentifiers', 'metadataPrefix': 'fake'}
oai_3 = oai(self.request)
self.assertEqual(oai_3['error'], {'code': 'cannotDisseminateFormat',
'message': 'metadataPrefix {} not supported'.
format('fake')})
# Invalid Identifier
identifier = 'fake_identifier'
self.request.GET = {'verb': 'GetRecord', 'metadataPrefix': 'cnx_dc',
'identifier': identifier}
oai_4 = oai(self.request)
self.assertEqual(oai_4['error'], {'code': 'idDoesNotExist',
'message': "id does not exist {}".
format(identifier)})
# ListRecords no Records match
self.request.GET = {'verb': 'ListRecords', 'metadataPrefix': 'oai_dc',
'from': '2017-01-02', 'until': '2017-01-01'}
oai_5 = oai(self.request)
self.assertEqual(oai_5['error'], {'code': 'noRecordsMatch',
'message': 'No matches for the given request'})
# ListIdentifiers no Records match
self.request.GET = {'verb': 'ListIdentifiers', 'metadataPrefix': 'ims1_2_1',
'from': '2017-01-02', 'until': '2017-01-01'}
oai_6 = oai(self.request)
self.assertEqual(oai_6['error'], {'code': 'noRecordsMatch',
'message': 'No matches for the given request'})
# IdDoesNotExist error
identifier = 'oai:{}:aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'.format(self.request.host)
self.request.GET = {'verb': 'GetRecord', 'metadataPrefix': 'cnx_dc',
'identifier': identifier}
oai_6 = oai(self.request)
self.assertEqual(oai_6['error'], {'code': 'idDoesNotExist',
'message': 'id does not exist {}'.
format(identifier)})
|
from django.contrib import admin
from sorl.thumbnail import get_thumbnail
from sorl.thumbnail.admin import AdminImageMixin
from . import models
@admin.register(models.Slide)
class SlideAdmin(admin.ModelAdmin):
list_display = ('id', 'content_object', 'content_type', 'sort_order', 'is_active')
list_filter = ('is_active', )
@admin.register(models.ImageContent)
class ImageContentAdmin(AdminImageMixin, admin.ModelAdmin):
list_display = ('thumbnail', 'caption')
search_fields = ('caption', )
def thumbnail(self, obj):
if obj.image:
im = get_thumbnail(obj.image, '100x100')
return '<img src="%s" />' % ( im.url )
else:
return "NO IMAGE FOUND"
thumbnail.allow_tags = True
@admin.register(models.QuoteContent)
class QuoteContentAdmin(admin.ModelAdmin):
search_fields = ('quote', 'attribution')
list_display = ('id', 'quote', 'attribution')
|
import StringIO
import datetime
import urllib
from django.utils import simplejson
import re
import collections
import logging
from django.template.loader import render_to_string
from django.template import RequestContext
from django.core import serializers
from django.http import \
HttpResponse, HttpResponseRedirect, HttpResponseServerError, HttpResponsePermanentRedirect, HttpResponseBadRequest
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.views.decorators.csrf import csrf_exempt
import django.views.generic
import mysite.base.controllers
import mysite.base.unicode_sanity
import mysite.profile.controllers
import mysite.base.helpers
from mysite.profile.models import \
Person, Tag, TagType, \
Link_Project_Tag, Link_Person_Tag, \
DataImportAttempt, PortfolioEntry, Citation
from mysite.search.models import Project
from mysite.base.decorators import view
import mysite.profile.forms
import mysite.profile.tasks
from mysite.base.helpers import render_response
@login_required
def add_citation_manually_do(request):
# {{{
form = mysite.profile.forms.ManuallyAddACitationForm(request.POST)
form.set_user(request.user)
output = {
'form_container_element_id': request.POST['form_container_element_id']
}
if form.is_valid():
citation = form.save()
# Manually added citations are published automatically.
citation.is_published = True
citation.save()
json = simplejson.dumps(output)
return HttpResponse(json, mimetype='application/json')
else:
error_msgs = []
for error in form.errors.values():
error_msgs.extend(eval(error.__repr__())) # don't ask questions.
output['error_msgs'] = error_msgs
json = simplejson.dumps(output)
return HttpResponseServerError(json, mimetype='application/json')
#}}}
@view
def display_person_web(request, user_to_display__username=None):
# {{{
user = get_object_or_404(User, username=user_to_display__username)
person, was_created = Person.objects.get_or_create(user=user)
data = get_personal_data(person)
data['edit_mode'] = False
data['editable'] = (request.user == user)
data['notifications'] = mysite.base.controllers.get_notification_from_request(request)
data['explain_to_anonymous_users'] = True
data['how_many_archived_pf_entries'] = person.get_published_portfolio_entries().filter(is_archived=True).count()
return (request, 'profile/main.html', data)
# }}}
def get_personal_data(person):
# {{{
# FIXME: Make this more readable.
data_dict = {
'person': person,
'photo_url': person.get_photo_url_or_default(),
}
data_dict['tags'] = tags_dict_for_person(person)
data_dict['tags_flat'] = dict(
[ (key, ', '.join([k.text for k in data_dict['tags'][key]]))
for key in data_dict['tags'] ])
data_dict['has_set_info'] = any(data_dict['tags_flat'].values())
data_dict['contact_blurb'] = mysite.base.controllers.put_forwarder_in_contact_blurb_if_they_want(person.contact_blurb, person.user)
data_dict['projects_i_wanna_help'] = person.projects_i_wanna_help.all()
return data_dict
# }}}
def tags_dict_for_person(person):
# {{{
ret = collections.defaultdict(list)
links = Link_Person_Tag.objects.filter(person=person).order_by('id')
for link in links:
ret[link.tag.tag_type.name].append(link.tag)
return ret
# }}}
def widget_display_undecorated(request, user_to_display__username):
"""We leave this function unwrapped by @view """
"""so it can referenced by widget_display_string."""
# {{{
user = get_object_or_404(User, username=user_to_display__username)
person = get_object_or_404(Person, user=user)
data = get_personal_data(person)
data.update(mysite.base.controllers.get_uri_metadata_for_generating_absolute_links(
request))
return (request, 'profile/widget.html', data)
# }}}
widget_display = view(widget_display_undecorated)
def widget_display_string(request, user_to_display__username):
request, template, data = widget_display_undecorated(request, user_to_display__username)
return render_to_string(template, data)
def widget_display_js(request, user_to_display__username):
# FIXME: In the future, use:
html_doc = widget_display_string(request, user_to_display__username)
# to generate html_doc
encoded_for_js = simplejson.dumps(html_doc)
# Note: using application/javascript as suggested by
# http://www.ietf.org/rfc/rfc4329.txt
return render_response(request, 'base/append_ourselves.js',
{'in_string': encoded_for_js},
mimetype='application/javascript')
def add_one_debtag_to_project(project_name, tag_text):
# {{{
tag_type, created = TagType.objects.get_or_create(name='Debtags')
project, project_created = Project.objects.get_or_create(name=project_name)
tag, tag_created = Tag.objects.get_or_create(
text=tag_text, tag_type=tag_type)
new_link = Link_Project_Tag.objects.create(
tag=tag, project=project,
source='Debtags')
new_link.save()
return new_link
def list_debtags_of_project(project_name):
# {{{
debtags_list = list(TagType.objects.filter(name='Debtags'))
if debtags_list:
debtags = debtags_list[0]
else:
return []
project_list = list(Project.objects.filter(name=project_name))
if project_list:
project = project_list[0]
else:
return []
resluts = list(Link_Project_Tag.objects.filter(project=project,
tag__tag_type=debtags))
return [link.tag.text for link in resluts]
# }}}
def import_debtags(cooked_string = None):
# {{{
if cooked_string is None:
# Warning: this re-downloads the list from Alioth every time this
# is called
import urllib2
import gzip
fd = urllib2.urlopen('http://debtags.alioth.debian.org/tags/tags-current.gz')
gzipped_sio = StringIO.StringIO(fd.read()) # this sucks, but I
# can't stream to
# gzip.GzipFile because
# urlopen()'s result
# lacks tell()
gunzipped = gzip.GzipFile(fileobj=gzipped_sio)
else:
gunzipped = StringIO.StringIO(cooked_string)
for line in gunzipped:
if ':' in line:
package, tagstring = line.split(':', 1)
tags = map(lambda s: s.strip(), tagstring.split(','))
for tag in tags:
add_one_debtag_to_project(package, tag)
# }}}
def _project_hash(project_name):
# {{{
# This prefix is a sha256 of 1MiB of /dev/urandom
PREFIX = '_project_hash_2136870e40a759b56b9ba97a0'
PREFIX += 'd7f60b84dbc90097a32da284306e871105d96cd'
import hashlib
hashed = hashlib.sha256(PREFIX + project_name)
return hashed.hexdigest()
# }}}
@login_required
def edit_person_info_do(request):
# {{{
person = request.user.get_profile()
edit_info_form = mysite.profile.forms.EditInfoForm(request.POST, prefix='edit-tags')
contact_blurb_form = mysite.profile.forms.ContactBlurbForm(request.POST, prefix='edit-tags')
contact_blurb_error = False
errors_occurred = False
# Grab the submitted homepage URL.
if edit_info_form.is_valid():
person.homepage_url = edit_info_form.cleaned_data['homepage_url']
else:
errors_occurred = True
# grab their submitted bio
person.bio = edit_info_form['bio'].data
# grab the irc nick
person.irc_nick = edit_info_form['irc_nick'].data
# We can map from some strings to some TagTypes
for known_tag_type_name in ('understands', 'understands_not',
'studying', 'can_pitch_in', 'can_mentor'):
tag_type, _ = TagType.objects.get_or_create(name=known_tag_type_name)
text = edit_info_form[known_tag_type_name].data or ''
# Set the tags to this thing
new_tag_texts_for_this_type_raw = text.split(',')
new_tag_texts_for_this_type = [tag.strip()
for tag in new_tag_texts_for_this_type_raw]
# Now figure out what tags are in the DB
old_tag_links = Link_Person_Tag.objects.filter(
tag__tag_type=tag_type, person=person)
# FIXME: Churn, baby churn
for link in old_tag_links:
link.delete()
for tag_text in new_tag_texts_for_this_type:
if not tag_text.strip(): # Don't save blank tags.
continue
# HACK
if type(tag_text) == str:
tag_text = unicode(tag_text, 'utf-8')
# The following code gets the first matching tag or creates one. We
# previously used a straight-up get_or_create, but the parameters
# (name, tagtype) no longer uniquely select a tag. We get errors
# like this: "MultipleObjectsReturned: get() returned more than one
# Tag -- it returned 25! Lookup parameters were {'text__regex':
# u'^fran\\\xe7ais$', 'tag_type': <TagType: understands>}" Our
# data, as you can see, is not very healthy. But I don't think it
# will make a difference.
matching_tags = Tag.objects.filter(
text__regex=r"^%s$" % re.escape(tag_text),
tag_type=tag_type)
if matching_tags:
tag = matching_tags[0]
else:
tag = Tag.objects.create(tag_type=tag_type, text=tag_text)
new_link, _ = Link_Person_Tag.objects.get_or_create(
tag=tag, person=person)
posted_contact_blurb = contact_blurb_form['contact_blurb'].data or ''
# If their new contact blurb contains $fwd, but they don't have an email
# address in our database, give them an error.
if '$fwd' in posted_contact_blurb and not person.user.email:
contact_blurb_error = True
errors_occurred = True
else:
# if their new contact blurb contains $fwd and their old one didn't,
# then make them a new forwarder
if '$fwd' in posted_contact_blurb and not '$fwd' in person.contact_blurb:
mysite.base.controllers.generate_forwarder(person.user)
person.contact_blurb = posted_contact_blurb
person.save()
if errors_occurred:
return edit_info(request,
edit_info_form=edit_info_form,
contact_blurb_form=contact_blurb_form,
contact_blurb_error=contact_blurb_error,
has_errors=errors_occurred)
else:
return HttpResponseRedirect(person.profile_url)
# FIXME: This is racey. Only one of these functions should run at once.
# }}}
@login_required
def ask_for_tag_input(request, username):
# {{{
return display_person_web(request, username, 'tags', edit='1')
# }}}
def cut_list_of_people_in_three_columns(people):
third = len(people)/3
return [people[0:third], people[third:(third*2)], people[(third*2):]]
def cut_list_of_people_in_two_columns(people):
half = len(people)/2
return [people[0:half], people[half:]]
def permanent_redirect_to_people_search(request, property, value):
'''Property is the "tag name", and "value" is the text in it.'''
if property == 'seeking':
property = 'can_pitch_in'
if ' ' in value:
escaped_value = '"' + value + '"'
else:
escaped_value = value
q = '%s:%s' % (property, escaped_value)
get_args = {u'q': q}
destination_url = (reverse('mysite.profile.views.people') + '?' +
mysite.base.unicode_sanity.urlencode(get_args))
return HttpResponsePermanentRedirect(destination_url)
@view
def people(request):
"""Display a list of people."""
data = {}
# pull in q from GET
query = request.GET.get('q', '')
# Store the raw query in the template data
data['raw_query'] = query
# Parse the query, and store that in the template.
parsed_query = mysite.profile.controllers.parse_string_query(query)
data.update(parsed_query)
# Get the list of people to display.
if parsed_query['q'].strip():
search_results = parsed_query['callable_searcher']()
everybody, extra_data = search_results.people, search_results.template_data
data.update(extra_data)
else:
everybody = Person.objects.all().order_by('user__username')
data['people'] = everybody
# Add JS-friendly version of people data to template
person_id_ranges = mysite.base.helpers.int_list2ranges([x.id for x in data['people']])
person_ids = ''
for stop, start in person_id_ranges:
if stop == start:
person_ids += '%d,' % (stop,)
else:
person_ids += '%d-%d,' % (stop, start)
data['person_ids'] = simplejson.dumps(person_ids)
return (request, 'profile/search_people.html', data)
def gimme_json_for_portfolio(request):
"Get JSON used to live-update the portfolio editor."
"""JSON includes:
* The person's data.
* DataImportAttempts.
* other stuff"""
# Since this view is meant to be accessed asynchronously, it doesn't make
# much sense to decorate it with @login_required, since this will redirect
# the user to the login page. Not much use if the browser is requesting
# this page async'ly! So let's use a different method that explicitly warns
# the user if they're not logged in. At time of writing, this error message
# is NOT displayed on screen. I suppose someone will see if it they're
# using Firebug, or accessing the page synchronously.
if not request.user.is_authenticated():
return HttpResponseServerError("Oops, you're not logged in.")
person = request.user.get_profile()
# Citations don't naturally serialize summaries.
citations = list(Citation.untrashed.filter(portfolio_entry__person=person))
portfolio_entries_unserialized = PortfolioEntry.objects.filter(person=person, is_deleted=False)
projects_unserialized = [p.project for p in portfolio_entries_unserialized]
# Serialize citation summaries
summaries = {}
for c in citations:
summaries[c.pk] = render_to_string(
"profile/portfolio/citation_summary.html",
{'citation': c})
# FIXME: Maybe we can serialize directly to Python objects.
# fixme: zomg don't recycle variable names for objs of diff types srsly u guys!
five_minutes_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=5)
recent_dias = DataImportAttempt.objects.filter(person=person, date_created__gt=five_minutes_ago)
recent_dias_json = simplejson.loads(serializers.serialize('json', recent_dias))
portfolio_entries = simplejson.loads(serializers.serialize('json',
portfolio_entries_unserialized))
projects = simplejson.loads(serializers.serialize('json', projects_unserialized))
# FIXME: Don't send like all the flippin projects down the tubes.
citations = simplejson.loads(serializers.serialize('json', citations))
recent_dias_that_are_completed = recent_dias.filter(completed=True)
import_running = recent_dias.count() > 0 and (
recent_dias_that_are_completed.count() != recent_dias.count())
progress_percentage = 100
if import_running:
progress_percentage = int(recent_dias_that_are_completed.count() * 100.0 / recent_dias.count())
import_data = {
'running': import_running,
'progress_percentage': progress_percentage,
}
json = simplejson.dumps({
'dias': recent_dias_json,
'import': import_data,
'citations': citations,
'portfolio_entries': portfolio_entries,
'projects': projects,
'summaries': summaries,
'messages': request.user.get_and_delete_messages(),
})
return HttpResponse(json, mimetype='application/json')
def replace_icon_with_default(request):
"Expected postcondition: project's icon_dict says it is generic."
"""
Expected output will look something like this:
{
'success': true,
'portfolio_entry__pk': 0
}"""
portfolio_entry = PortfolioEntry.objects.get(
pk=int(request.POST['portfolio_entry__pk']),
person__user=request.user)
# FIXME: test for naughty people trying to replace others' icons with the default!
project = portfolio_entry.project
project_before_changes = mysite.search.models.Project.objects.get(pk=project.pk)
# make a record of the old, wrong project icon in the database
mysite.search.models.WrongIcon.spawn_from_project(project)
try:
wrong_icon_url = project_before_changes.icon_for_profile.url
except ValueError:
wrong_icon_url = "icon_url"
# set project icon as default
project.invalidate_all_icons()
project.save()
# email all@ letting them know that we did so
from mysite.project.tasks import send_email_to_all_because_project_icon_was_marked_as_wrong
send_email_to_all_because_project_icon_was_marked_as_wrong.delay(
project__pk=project_before_changes.pk,
project__name=project_before_changes.name,
project_icon_url=wrong_icon_url)
# prepare output
data = {}
data['success'] = True
data['portfolio_entry__pk'] = portfolio_entry.pk
return mysite.base.helpers.json_response(data)
@login_required
@csrf_exempt
def prepare_data_import_attempts_do(request):
"""
Input: request.POST contains a list of usernames or email addresses.
These are identifiers under which the authorized user has committed code
to an open-source repository, or at least so says the user.
Side-effects: Create DataImportAttempts that a user might want to execute.
Not yet implemented: This means, don't show the user DIAs that relate to
non-existent accounts on remote networks. And what *that* means is,
before bothering the user, ask those networks beforehand if they even
have accounts named identifiers[0], etc."""
# {{{
# For each commit identifier, prepare some DataImportAttempts.
prepare_data_import_attempts(identifiers=request.POST.values(), user=request.user)
return HttpResponse('1')
# }}}
def prepare_data_import_attempts(identifiers, user):
"Enqueue and track importation tasks."
"""Expected input: A list of committer identifiers, e.g.:
['paulproteus', 'asheesh@asheesh.org']
For each data source, enqueue a background task.
Keep track of information about the task in an object
called a DataImportAttempt."""
# Side-effects: Create DIAs that a user might want to execute.
for identifier in identifiers:
if identifier.strip(): # Skip blanks or whitespace
for source_key, _ in DataImportAttempt.SOURCE_CHOICES:
dia = DataImportAttempt(
query=identifier,
source=source_key,
person=user.get_profile())
dia.save()
dia.do_what_it_says_on_the_tin()
@login_required
@view
def importer(request, test_js = False):
"""Get the DIAs for the logged-in user's profile. Pass them to the template."""
# {{{
person = request.user.get_profile()
data = get_personal_data(person)
# This is used to create a blank 'Add another record' form, which is printed
# to the bottom of the importer page. The HTML underlying this form is used
# to generate forms dynamically.
data['citation_form'] = mysite.profile.forms.ManuallyAddACitationForm(auto_id=False)
# This variable is checked in base/templates/base/base.html
data['test_js'] = test_js or request.GET.get('test', None)
return (request, 'profile/importer.html', data)
# }}}
portfolio_editor = importer
def portfolio_editor_test(request):
return portfolio_editor(request, test_js=True)
def filter_by_key_prefix(dict, prefix):
"""Return those and only those items in a dictionary whose keys have the given prefix."""
out_dict = {}
for key, value in dict.items():
if key.startswith(prefix):
out_dict[key] = value
return out_dict
@login_required
def user_selected_these_dia_checkboxes(request):
""" Input: Request POST contains a list of checkbox IDs corresponding to DIAs.
Side-effect: Make a note on the DIA that its affiliated person wants it.
Output: Success?
"""
# {{{
prepare_data_import_attempts(request.POST, request.user)
checkboxes = filter_by_key_prefix(request.POST, "person_wants_")
identifiers = filter_by_key_prefix(request.POST, "identifier_")
for checkbox_id, value in checkboxes.items():
if value == 'on':
x, y, identifier_index, source_key = checkbox_id.split('_')
identifier = identifiers["identifier_%s" % identifier_index]
if identifier:
# FIXME: For security, ought this filter include only dias
# associated with the logged-in user's profile?
dia = DataImportAttempt(
identifier, source_key,
request.user.get_profile())
dia.person_wants_data = True
dia.save()
dia.do_what_it_says_on_the_tin()
# There may be data waiting or not,
# but no matter; this function may
# run unconditionally.
dia.give_data_to_person()
return HttpResponse('1')
# }}}
@login_required
@view
def display_person_edit_name(request, name_edit_mode):
'''Show a little edit form for first name and last name.
Why separately handle first and last names? The Django user
model already stores them separately.
'''
# {{{
data = get_personal_data(request.user.get_profile())
data['name_edit_mode'] = name_edit_mode
data['editable'] = True
return (request, 'profile/main.html', data)
# }}}
@login_required
def display_person_edit_name_do(request):
'''Take the new first name and last name out of the POST.
Jam them into the Django user model.'''
# {{{
user = request.user
new_first = request.POST['first_name']
new_last = request.POST['last_name']
user.first_name = new_first
user.last_name = new_last
user.save()
return HttpResponseRedirect('/people/%s' % urllib.quote(user.username))
# }}}
@login_required
def publish_citation_do(request):
try:
pk = request.POST['pk']
except KeyError:
return HttpResponse("0")
try:
c = Citation.objects.get(pk=pk, portfolio_entry__person__user=request.user)
except Citation.DoesNotExist:
return HttpResponse("0")
c.is_published = True
c.save()
return HttpResponse("1")
@login_required
def delete_citation_do(request):
try:
pk = request.POST['citation__pk']
except KeyError:
return HttpResponse("0")
try:
c = Citation.objects.get(pk=pk, portfolio_entry__person__user=request.user)
except Citation.DoesNotExist:
return HttpResponse("0")
c.is_deleted = True
c.save()
return HttpResponse("1")
@login_required
def delete_portfolio_entry_do(request):
try:
pk = int(request.POST['portfolio_entry__pk'])
except KeyError:
return mysite.base.helpers.json_response({'success': False})
try:
p = PortfolioEntry.objects.get(pk=pk, person__user=request.user)
except PortfolioEntry.DoesNotExist:
return mysite.base.helpers.json_response({'success': False})
p.is_deleted = True
p.save()
return mysite.base.helpers.json_response({
'success': True,
'portfolio_entry__pk': pk})
@login_required
def save_portfolio_entry_do(request):
pk = request.POST['portfolio_entry__pk']
if pk == 'undefined':
project, _ = Project.objects.get_or_create(name=request.POST['project_name'])
p = PortfolioEntry(project=project, person=request.user.get_profile())
else:
p = PortfolioEntry.objects.get(pk=pk, person__user=request.user)
p.project_description = request.POST['project_description']
p.experience_description = request.POST['experience_description']
p.receive_maintainer_updates = \
request.POST['receive_maintainer_updates'].lower() not in ('false', '0')
p.is_published = True
p.save()
# Publish all attached Citations
citations = Citation.objects.filter(portfolio_entry=p)
for c in citations:
c.is_published = True
c.save()
return mysite.base.helpers.json_response({
'success': True,
'pf_entry_element_id': request.POST['pf_entry_element_id'],
'project__pk': p.project_id,
'portfolio_entry__pk': p.pk
})
@login_required
def dollar_username(request):
return HttpResponseRedirect(reverse(display_person_web,
kwargs={'user_to_display__username':
request.user.username}))
@login_required
def set_expand_next_steps_do(request):
input_string = request.POST.get('value', None)
string2value = {'True': True,
'False': False}
if input_string not in string2value:
return HttpResponseBadRequest("Bad POST.")
person = request.user.get_profile()
person.expand_next_steps = string2value[input_string]
person.save()
return HttpResponseRedirect(person.profile_url)
@login_required
@view
def edit_info(request, contact_blurb_error=False, edit_info_form=None, contact_blurb_form=None, has_errors=False):
person = request.user.get_profile()
data = get_personal_data(person)
data['info_edit_mode'] = True
if edit_info_form is None:
edit_info_form = mysite.profile.forms.EditInfoForm(initial={
'bio': person.bio,
'homepage_url': person.homepage_url,
'irc_nick': person.irc_nick,
'understands': data['tags_flat'].get('understands', ''),
'understands_not': data['tags_flat'].get('understands_not', ''),
'studying': data['tags_flat'].get('studying', ''),
'can_pitch_in': data['tags_flat'].get('can_pitch_in', ''),
'can_mentor': data['tags_flat'].get('can_mentor', ''),
}, prefix='edit-tags')
if contact_blurb_form is None:
contact_blurb_form = mysite.profile.forms.ContactBlurbForm(initial={
'contact_blurb': person.contact_blurb,
}, prefix='edit-tags')
data['form'] = edit_info_form
data['contact_blurb_form'] = contact_blurb_form
data['contact_blurb_error'] = contact_blurb_error
data['forwarder_sample'] = mysite.base.controllers.put_forwarder_in_contact_blurb_if_they_want("$fwd", person.user)
data['has_errors'] = has_errors
return request, 'profile/info_wrapper.html', data
@login_required
def set_pfentries_dot_use_my_description_do(request):
project = Project.objects.get(pk=request.POST['project_pk'])
pfe_pks = project.portfolioentry_set.values_list('pk', flat=True)
Form = mysite.profile.forms.UseDescriptionFromThisPortfolioEntryForm
for pfe_pk in pfe_pks:
pfe_before_save = PortfolioEntry.objects.get(pk=pfe_pk)
form = Form(request.POST,
instance=pfe_before_save,
prefix=str(pfe_pk))
if form.is_valid():
pfe_after_save = form.save()
logging.info("Project description settings edit: %s just edited a project. The portfolioentry's data originally read as follows: %s. Its data now read as follows: %s" % (
request.user.get_profile(), pfe_before_save.__dict__, pfe_after_save.__dict__))
return HttpResponseRedirect(project.get_url())
@view
def unsubscribe(request, token_string):
context = {'unsubscribe_this_user':
mysite.profile.models.UnsubscribeToken.whose_token_string_is_this(token_string),
'token_string': token_string}
return (request, 'unsubscribe.html', context)
def unsubscribe_do(request):
token_string = request.POST.get('token_string', None)
person = mysite.profile.models.UnsubscribeToken.whose_token_string_is_this(token_string)
person.email_me_weekly_re_projects = False
person.save()
return HttpResponseRedirect(reverse(unsubscribe, kwargs={'token_string': token_string}))
@login_required
def bug_recommendation_list_as_template_fragment(request):
suggested_searches = request.user.get_profile().get_recommended_search_terms()
recommender = mysite.profile.controllers.RecommendBugs(
suggested_searches, n=5)
recommended_bugs = list(recommender.recommend())
response_data = {}
if recommended_bugs:
response_data['result'] = 'OK'
template_path = 'base/recommended_bugs_content.html'
context = RequestContext(request, { 'recommended_bugs': recommended_bugs })
response_data['html'] = render_to_string(template_path, context)
else:
response_data['result'] = 'NO_BUGS'
return HttpResponse(simplejson.dumps(response_data), mimetype='application/json')
class LocationDataApiView(django.views.generic.View):
### Entry point for requests from the web
def get(self, request):
person_ids = self.extract_person_ids(request.GET)
data_dict = self.raw_data_for_person_ids(person_ids)
as_json = simplejson.dumps(data_dict)
return HttpResponse(as_json, mimetype='application/javascript')
### Helper functions
@staticmethod
def raw_data_for_person_ids(person_ids):
persons = mysite.profile.models.Person.objects.filter(
id__in=person_ids).select_related()
return LocationDataApiView.raw_data_for_person_collection(persons)
@staticmethod
def raw_data_for_person_collection(people):
person_id2data = dict([
(person.pk, LocationDataApiView.raw_data_for_one_person(person))
for person in people])
return person_id2data
@staticmethod
def raw_data_for_one_person(person):
location = person.get_public_location_or_default()
name = person.get_full_name_or_username()
ret = {
'name': name,
'location': location,
}
ret['lat_long_data'] = {
'is_inaccessible': (location == mysite.profile.models.DEFAULT_LOCATION),
'latitude': person.get_public_latitude_or_default(),
'longitude': person.get_public_longitude_or_default(),
}
extra_person_info = {'username': person.user.username,
'photo_thumbnail_url': person.get_photo_url_or_default(),
}
ret['extra_person_info'] = extra_person_info
return ret
@staticmethod
def range_from_string(s):
on_hyphens = s.split('-')
if len(on_hyphens) != 2:
return None
try:
from_, to = map(int, on_hyphens)
except ValueError:
return None
return range(from_, to + 1)
@staticmethod
def extract_person_ids(get_data):
person_ids_as_string = get_data.get('person_ids', '')
id_set = set()
if not person_ids_as_string:
return id_set
splitted_from_commas = person_ids_as_string.split(',')
for item in splitted_from_commas:
if '-' in item:
as_ints = LocationDataApiView.range_from_string(item)
if as_ints is not None:
id_set.update(as_ints)
continue
try:
as_int = int(item)
except ValueError:
continue
id_set.add(as_int)
return id_set
|
"""View for viewing all coding jobs (for a user)"""
from django.shortcuts import render
from api.rest.datatable import Datatable
from navigator.utils.auth import check
from amcat.models.user import User
CODINGJOB_MENU=None
from api.rest.resources import CodingJobResource
@check(User, args='coder_id', args_map={'coder_id' : 'id'})
def index(request, coder=None):
"""
Show unfinished jobs
"""
is_firefox = "Firefox" in request.META["HTTP_USER_AGENT"]
coder = coder if coder is not None else request.user
jobs = Datatable(CodingJobResource, rowlink='/annotator/codingjob/{id}')
#jobs = jobs.filter(coder=coder).hide('coder',)#.filter(status='unfinished')
ctx = locals()
ctx.update({
'menu' : CODINGJOB_MENU,
'context' : coder,
'selected' : 'unfinished jobs'
})
return render(request, 'codingjobs.html', locals())
|
"""
This module contains helper functions used in HTML application templates.
An ``helper`` object linked to the application is created by this module to be used in all
the application.
"""
import re
import time
import traceback
from logging import getLogger, INFO
from alignak_webui import get_app_config
logger = getLogger(__name__)
logger.setLevel(INFO)
class Helper(object):
"""
Helper functions
"""
def __init__(self):
""" Empty ... """
# self.config = get_app_config()
@staticmethod
def print_date(timestamp, fmt='%Y-%m-%d %H:%M:%S'):
"""
For a unix timestamp return something like
2015-09-18 00:00:00
Returns n/a if provided timestamp is not valid
:param timestamp: unix timestamp
:type timestamp: long int
:param fmt: python date/time format string
:type fmt: sting
:return: formatted date
:rtype: string
"""
if not timestamp:
return 'n/a'
if fmt:
return time.strftime(fmt, time.localtime(timestamp))
else:
return time.asctime(time.localtime(timestamp))
@staticmethod
def print_duration(timestamp, duration_only=False, x_elts=0, ts_is_duration=False):
"""
For a unix timestamp return something like
1h 15m 12s
Returns n/a if provided timestamp is not valid
Returns:
in 1h 15m 12s
Now
1h 15m 12s ago
Returns 1h 15m 12s if only_duration is True
:param ts_is_duration:
:param x_elts:
:param duration_only:
:param timestamp: unix timestamp
:type timestamp: long int
:return: formatted date
:rtype: string
"""
if not timestamp:
return 'n/a'
# Get the seconds elapsed since the timestamp
seconds = timestamp
if not ts_is_duration:
seconds = int(time.time()) - int(timestamp)
# If it's now, say it :)
if seconds < 3:
if 0 > seconds > -4:
return _('Very soon')
if seconds >= 0:
return _('Just now')
in_future = False
# Remember if it's in the future or not
if seconds < 0:
in_future = True
# Now manage all case like in the past
seconds = abs(seconds)
seconds = int(round(seconds))
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
weeks, days = divmod(days, 7)
months, weeks = divmod(weeks, 4)
years, months = divmod(months, 12)
minutes = int(minutes)
hours = int(hours)
days = int(days)
weeks = int(weeks)
months = int(months)
years = int(years)
duration = []
if years > 0:
duration.append(_('%dy') % years)
else:
if months > 0:
duration.append(_('%dM') % months)
if weeks > 0:
duration.append(_('%dw') % weeks)
if days > 0:
duration.append(_('%dd') % days)
if hours > 0:
duration.append(_('%dh') % hours)
if minutes > 0:
duration.append(_('%dm') % minutes)
if seconds > 0:
duration.append(_('%ds') % seconds)
# Now filter the number of printed elements if ask
if x_elts >= 1:
duration = duration[:x_elts]
# Maybe the user just wants the duration
if duration_only:
return ' '.join(duration)
# Now manage the future or not print
if in_future:
return _('in ') + ' '.join(duration)
else:
return _(' ') + ' '.join(duration) + _(' ago')
@staticmethod
def get_on_off(status=False, title=None, message=''):
"""
Give an enabled/disabled state based on glyphicons with optional title and message
"""
if not title:
title = _('Enabled') if status else _('Disabled')
if isinstance(title, list):
if status:
title = title[0]
else:
title = title[1]
if status:
return '''<i title="%s" class="fa fa-fw fa-check text-success">%s</i>''' % (
title, message
)
else:
return '''<i title="%s" class="fa fa-fw fa-close text-danger">%s</i>''' % (
title, message
)
@staticmethod
def get_html_business_impact(business_impact, icon=True, text=False):
"""
Give a business impact as text and stars if needed.
If text=True, returns text+stars, else returns stars only ...
"""
if not 0 <= business_impact <= 5:
return 'n/a - value'
if not icon and not text:
return 'n/a - parameters'
bi_texts = {
0: _('None'),
1: _('Low'),
2: _('Normal'),
3: _('Important'),
4: _('Very important'),
5: _('Business critical')
}
# nb_stars = max(0, business_impact - 2)
stars = '<i class="fa fa-star"></i>' * business_impact
if not text:
return stars
if not icon:
return bi_texts.get(business_impact, _('Unknown'))
text = "%s %s" % (bi_texts.get(business_impact, _('Unknown')), stars)
return text.strip()
@staticmethod
def get_urls(obj, url, default_title="Url", default_icon="globe", popover=False):
"""
Returns formatted HTML for an element URL
url string may contain a list of urls separated by | (pipe symbol)
Each url may be formatted as:
- url,,description
- title::description,,url
- title,,icon::description,,url
description is optional
If title is not specified, default_title is used as title
If icon is not specified, default_icon is used as icon
If popover is true, a bootstrap popover is built, else a standard link ...
"""
logger.debug(
"get_urls: %s / %s / %s / %d", url, default_title, default_icon, popover
)
result = []
for item in url.split('|'):
try:
(title, url) = item.split('::')
except Exception:
title = "%s,,%s" % (default_title, default_icon)
url = item
try:
(title, icon) = title.split(',,')
except Exception:
icon = default_icon
try:
(description, real_url) = url.split(',,')
except Exception:
description = 'No description provided'
real_url = url
# Replace MACROS in url and description
if hasattr(obj, 'get_data_for_checks'):
# url = MacroResolver().resolve_simple_macros_in_string(
# real_url, obj.get_data_for_checks()
# )
url = real_url
# description = MacroResolver().resolve_simple_macros_in_string(
# description, obj.get_data_for_checks()
# )
logger.debug("get_urls, found: %s / %s / %s / %s", title, icon, url, description)
if popover:
if url != '':
result.append(
'<a href="%s" target="_blank" role="button" data-toggle="popover urls" '
'data-container="body" '
'data-html="true" data-content="%s" data-trigger="hover focus" '
'data-placement="bottom"><i class="fa fa-%s"></i> %s</a>' % (
url, description, icon, title
)
)
else:
result.append(
'<span data-toggle="popover urls" data-html="true" data-content="%s" '
'data-container="body" '
'data-trigger="hover focus" data-placement="bottom">'
'<i class="fa fa-%s"></i> %s</span>''' % (
description, icon, title
)
)
else:
if url != '':
result.append(
'<a href="%s" target="_blank" title="%s">'
'<i class="fa fa-%s"></i> %s</a>' % (
url, description, icon, title
)
)
else:
result.append(
'<span title="%s"><i class="fa fa-%s"></i> %s</span>' % (
description, icon, title
)
)
return result
@staticmethod
def get_element_actions_url(obj, default_title="Url", default_icon="globe", popover=False):
"""
Return list of element action urls
"""
if obj is not None:
return Helper.get_urls(
obj, obj.action_url,
default_title=default_title, default_icon=default_icon, popover=popover
)
return None
@staticmethod
def get_element_notes_url(obj, default_title="Url", default_icon="globe", popover=False):
"""
Return list of element notes urls
"""
if obj is not None and obj.notes:
notes = []
i = 0
for item in obj.notes.split('|'):
if not obj.notes_url:
notes.append("%s,," % item)
else:
notes_url = obj.notes_url.split('|')
if len(notes_url) > i:
notes.append("%s,,%s" % (item, notes_url[i]))
else:
notes.append("%s,," % item)
i += 1
logger.debug("get_element_notes_url, note: %s", notes)
return Helper.get_urls(
obj, '|'.join(notes),
default_title=default_title, default_icon=default_icon, popover=popover
)
return []
@staticmethod
def decode_search(search):
"""
Convert string from:
isnot:0 isnot:ack isnot:"downtime fred" name "vm fred"
to:
{
'isnot': 0,
'isnot':'ack',
'name': name,
'name': 'vm fred'
}
:search: Search string
:returns: list of matching items
"""
logger.debug("decode_search, search string:%s", search)
# Search patterns like: isnot:0 isnot:ack isnot:"downtime test" name "vm test"
regex = re.compile(
r"""
# 1/ Search a key:value pattern.
(?P<key>\w+): # Key consists of only a word followed by a colon
(?P<quote2>["']?) # Optional quote character.
(?P<value>.*?) # Value is a non greedy match
(?P=quote2) # Closing quote equals the first.
($|\s) # Entry ends with whitespace or end of string
| # OR
# 2/ Search a single string quoted or not
(?P<quote>["']?) # Optional quote character.
(?P<name>.*?) # Name is a non greedy match
(?P=quote) # Closing quote equals the opening one.
($|\s) # Entry ends with whitespace or end of string
""",
re.VERBOSE
)
patterns = []
for match in regex.finditer(search):
if match.group('name'):
patterns.append(('name', match.group('name')))
elif match.group('key'):
patterns.append((match.group('key'), match.group('value')))
logger.debug("decode_search, search patterns: %s", patterns)
parameters = {}
for t, s in patterns:
t = t.lower()
logger.debug("decode_search, searching for %s %s", t, s)
if '|' in s:
s = {t: s.split('|')}
t = "$in"
elif s.startswith('!'):
s = {t: s[1:]}
t = "$ne"
parameters.update({t: s})
logger.debug("decode_search, parameters: %s", parameters)
return parameters
@staticmethod
def get_pagination_control(page_url, total, start=0, count=25, nb_max_items=5):
"""
Build page navigation buttons as a list of elements containing:
- button label
- start element (None to create a disabled element)
- count of elements
- total number of elements
- active element (True / False)
The first element in the list contains:
- page_url, the current page main URL
- start
- count
- total
Note that nb_max_items should be an odd number ... it will have a better look ;)
The list contains:
- fast forward and forward buttons if more than nb_max_items are left-hidden
- fast backward and backward buttons if more than nb_max_items are right-hidden
- nb_max_items page buttons to build a direct link to the corresponding pages
"""
if count <= 0 or total <= 0:
return [(page_url, start, count, total, False)]
max_page = (total // count) + 1
current_page = (start // count) + 1
logger.debug(
"Get navigation controls, total: %d, start: %d, count: %d, max items: %d",
total, start, count, nb_max_items
)
logger.debug(
"Get navigation controls, page: %d, count: %d, max page: %d",
current_page, count, max_page
)
# First element contains pagination global data
res = [(page_url, start, count, total, False)]
if current_page > (nb_max_items / 2) + 1:
# First page
start = 0
res.append(
(_('<i class="fa fa-fast-backward"></i>'), start, count, total, False)
)
# Previous pages sequence
start = int((current_page - nb_max_items - 1) * count)
res.append(
(_('<i class="fa fa-backward"></i>'), start, count, total, False)
)
start_page = max(1, current_page - (nb_max_items / 2) + 1)
end_page = min(start_page + nb_max_items - 1, max_page)
if end_page == max_page and (end_page - start_page) < nb_max_items:
start_page = max(1, end_page - nb_max_items)
logger.debug(
"Get navigation controls, page sequence, from: %d to %d",
start_page, end_page
)
page = start_page
while page < end_page + 1:
active = (page == current_page)
res.append(
(_('%d') % page, (page - 1) * count, count, total, active)
)
if (page * count) + 1 > total:
break
page += 1
if current_page < max_page - ((nb_max_items / 2) + 1):
# Next pages sequence
start = int((current_page + nb_max_items - 1) * count)
res.append(
(_('<i class="fa fa-forward"></i>'), start, count, total, False)
)
# Last page
start = int((max_page - 1) * count)
res.append(
(_('<i class="fa fa-fast-forward"></i>'), start, count, total, False)
)
return res
@staticmethod
def get_html_timeperiod(tp, title=None):
"""
Build an html definition list for the timeperiod date ranges and exclusions.
"""
if tp is None or len(tp.dateranges) == 0:
return ''
content = '<button class="btn btn-default btn-xs btn-block" type="button"' \
'data-toggle="collapse" data-target="#html_tp_%s" aria-expanded="false" ' \
'aria-controls="html_tp_%s">%s</button>' \
'<div class="collapse" id="html_tp_%s"><div class="well">' % (
tp.id, tp.id, tp.name if not title else title, tp.id
)
# Build the included list ...
if tp.dateranges:
content += '''<ul class="list-group">'''
for daterange in tp.dateranges:
for key in daterange.keys():
content += \
'<li class="list-group-item">'\
'<span class="fa fa-check"> %s - %s</li>' % (
key, daterange[key]
)
content += '''</ul>'''
# Build the excluded list ...
if tp.exclude:
content += '<ul class="list-group">'
for daterange in tp.exclude:
for key in daterange.keys():
content += \
'<li class="list-group-item">'\
'<span class="fa fa-close"> %s - %s</li>''' % (
key, daterange[key]
)
content += '</ul>'
content += '</div></div>'
return content
@staticmethod
def get_html_item_list(object_id, object_type, objects_list, title=None, max_items=10):
"""
Build an html definition list for the items list
"""
if not objects_list or not isinstance(objects_list, list):
return ''
# Get global configuration
app_config = get_app_config()
content = ''
if len(objects_list) == 1:
item = objects_list[0]
list_item = app_config.get('tables.lists.unique')
if isinstance(item, basestring):
content = list_item.replace("##content##", item)
elif isinstance(item, dict):
content = list_item.replace("##content##", str(item))
elif hasattr(item, '_type'):
content = list_item.replace("##content##", item.get_html_state_link())
else:
content = list_item.replace("##content##", item)
return content
button = app_config.get('tables.lists.button')
button = button.replace("##id##", object_id)
button = button.replace("##type##", object_type)
button = button.replace("##title##", object_type if not title else title)
items_list = app_config.get('tables.lists.list')
if len(objects_list) > max_items:
objects_list = objects_list[:max_items]
list_item = app_config.get('tables.lists.item')
content += list_item.replace("##content##", _('Only %d items...') % max_items)
content += list_item.replace("##content##", _('-- / --'))
for item in objects_list:
list_item = app_config.get('tables.lists.item')
if isinstance(item, basestring):
content += list_item.replace("##content##", item)
elif isinstance(item, dict):
content += list_item.replace("##content##", str(item))
elif hasattr(item, '_type'):
content = list_item.replace("##content##", item.get_html_state_link())
else:
content = list_item.replace("##content##", item)
content = items_list.replace("##content##", content)
content = button.replace("##content##", content)
return content
@classmethod
def get_html_commands_buttons(cls, bo_object, title=''):
"""
Build an html button bar for a livestate element
"""
if not bo_object:
return ''
content = ''
try:
# Get global configuration
app_config = get_app_config()
buttons = []
if bo_object.event_handler_enabled:
button = app_config.get('buttons.livestate_command')
button = button.replace("##id##", bo_object.id)
button = button.replace("##type##", bo_object.get_type())
button = button.replace("##name##", bo_object.name)
button = button.replace("##action##", 'event_handler')
button = button.replace("##title##", _('Try to fix this problem'))
button = button.replace("##icon##", 'magic')
if getattr(bo_object, 'state_id', 0) > 0:
button = button.replace("##disabled##", 'disabled="disabled"')
else:
button = button.replace("##disabled##", '')
buttons.append(button)
button = app_config.get('buttons.livestate_command')
button = button.replace("##id##", bo_object.id)
button = button.replace("##type##", bo_object.get_type())
button = button.replace("##name##", bo_object.name)
button = button.replace("##action##", 'acknowledge')
button = button.replace("##title##", _('Acknowledge this problem'))
button = button.replace("##icon##", 'check')
if getattr(bo_object, 'state_id', 0) > 0:
if bo_object.acknowledged:
button = button.replace("##disabled##", 'disabled="disabled"')
else:
button = button.replace("##disabled##", '')
else:
button = button.replace("##disabled##", 'disabled="disabled"')
buttons.append(button)
button = app_config.get('buttons.livestate_command')
button = button.replace("##id##", bo_object.id)
button = button.replace("##type##", bo_object.get_type())
button = button.replace("##name##", bo_object.name)
button = button.replace("##action##", 'recheck')
button = button.replace("##title##", _('Re-check this element'))
button = button.replace("##icon##", 'refresh')
if getattr(bo_object, 'active_checks_enabled', None) is not None:
if not getattr(bo_object, 'active_checks_enabled'):
button = button.replace("##disabled##", 'disabled="disabled"')
else:
button = button.replace("##disabled##", 'disabled="disabled"')
buttons.append(button)
button = app_config.get('buttons.livestate_command')
button = button.replace("##id##", bo_object.id)
button = button.replace("##type##", bo_object.get_type())
button = button.replace("##name##", bo_object.name)
button = button.replace("##action##", 'downtime')
button = button.replace("##title##", _('Schedule a downtime'))
button = button.replace("##icon##", 'ambulance')
if getattr(bo_object, 'state_id', 0) > 0:
if bo_object.downtimed:
button = button.replace("##disabled##", 'disabled="disabled"')
else:
button = button.replace("##disabled##", '')
else:
button = button.replace("##disabled##", 'disabled="disabled"')
buttons.append(button)
content = app_config.get('buttons.livestate_commands')
content = content.replace("##title##", title)
content = content.replace("##commands##", ''.join(buttons))
logger.debug("Content: %s", content)
logger.debug("get_html_commands_buttons, content: %s", content)
except Exception as e:
logger.error("get_html_commands_buttons, exception: %s", str(e))
logger.error("traceback: %s", traceback.format_exc())
return content
@staticmethod
def get_html_hosts_count_panel(hs, url, collapsed=False, percentage=False):
"""
:param hs: hosts livesynthesis as provided by the get_livesynthesis or
get_livesynthesis_history functions
:param url: url to use for the links to an host table
:param collapsed: True if the panel is collapsed
:param percentage: True to build a percentage panel, else build a count panel
:return:
"""
content = ''
sla = hs['pct_up']
font = 'ok' if sla >= 95.0 else 'warning' if sla >= 90.0 else 'critical'
# unmanaged_problems = hs['nb_problems'] - (hs['nb_acknowledged'] + hs['nb_in_downtime'])
# pct_unmanaged_problems = round(100.0 * unmanaged_problems / hs['nb_elts'], 2) \
# if hs['nb_elts'] else -1
# _('Unmanaged problems')
if percentage:
pp_h = """
<div id="panel_percentage_hosts">
<div class="panel panel-default">
<div class="panel-heading clearfix">
<i class="fa fa-server"></i>
<span class="hosts-all"
data-count="#hs_nb_elts#"
data-problems="#hs_nb_problems#">
#hs_nb_elts# hosts (#hs_nb_problems# problems).
</span>
<div class="pull-right">
<a href="#p_pp_h" class="btn btn-xs btn-raised"
data-toggle="collapse">
<i class="fa fa-fw %s"></i>
</a>
</div>
</div>
<div id="p_pp_h" class="panel-collapse collapse %s">
<div class="panel-body">
<div class="row">
<div class="col-xs-3 col-sm-3 text-center">
<div class="col-xs-12 text-center">
<a href="#hosts_table_url#" class="sla_hosts_%s">
<div>#hs_pct_sla#%%</div>
<i class="fa fa-4x fa-server"></i>
<p>%s</p>
</a>
</div>
</div>
<div class="col-xs-9 col-sm-9 text-center">
<div class="row">
<div class="col-xs-4 text-center">
<a href="#hosts_table_url#?search=ls_state:UP"
class="item_host_up" title="Up">
<span class="hosts-count">#hs_pct_up#%%</span>
</a>
</div>
<div class="col-xs-4 text-center">
<a href="#hosts_table_url#?search=ls_state:DOWN"
class="item_host_down" title="Down">
<span class="hosts-count">#hs_pct_down#%%</span>
</a>
</div>
<div class="col-xs-4 text-center">
<a href="#hosts_table_url#?search=ls_state:UNREACHABLE"
class="item_host_unreachable" title="Unreachable">
<span class="hosts-count">#hs_pct_unreachable#%%</span>
</a>
</div>
</div>
<div class="row">
<br/>
</div>
<div class="row">
<div class="col-xs-12 text-center">
<a href="#hosts_table_url#?search=ls_state:acknowledged"
class="item_host_acknowledged" title="Acknowledged">
<span class="hosts-count">#hs_pct_acknowledged#%%</span>
</a>
<span>/</span>
<a href="#hosts_table_url#?search=ls_state:IN_DOWNTIME"
class="item_host_in_downtime" title="In downtime">
<span class="hosts-count">#hs_pct_in_downtime#%%</span>
</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
""" % ('fa-caret-down' if not collapsed else 'fa-caret-up',
'in' if not collapsed else '',
font, _('Hosts SLA'))
pp_h = pp_h.replace("#hs_nb_elts#", "%d" % hs['nb_elts'])
pp_h = pp_h.replace("#hs_nb_problems#", "%d" % hs['nb_problems'])
pp_h = pp_h.replace("#hs_pct_sla#", "%d" % hs['pct_up'])
pp_h = pp_h.replace("#hs_pct_up#", "%d" % hs['pct_up'])
pp_h = pp_h.replace("#hs_pct_down#", "%d" % hs['pct_down'])
pp_h = pp_h.replace("#hs_pct_unreachable#", "%d" % hs['pct_unreachable'])
pp_h = pp_h.replace("#hs_pct_acknowledged#", "%d" % hs['pct_acknowledged'])
pp_h = pp_h.replace("#hs_pct_in_downtime#", "%d" % hs['pct_in_downtime'])
pp_h = pp_h.replace("#hosts_table_url#", url)
content = pp_h
else:
pc_h = """
<div id="panel_counters_hosts">
<div class="panel panel-default">
<div class="panel-heading clearfix">
<i class="fa fa-server"></i>
<span class="hosts-all"
data-count="#hs_nb_elts#"
data-problems="#hs_nb_problems#">
#hs_nb_elts# hosts (#hs_nb_problems# problems).
</span>
<div class="pull-right">
<a href="#p_pc_h" class="btn btn-xs btn-raised"
data-toggle="collapse">
<i class="fa fa-fw %s"></i>
</a>
</div>
</div>
<div id="p_pc_h" class="panel-collapse collapse %s">
<div class="panel-body">
<div class="col-xs-12 col-sm-9 text-center">
<div class="col-xs-4 text-center">
<a href="#hosts_table_url#?search=ls_state:UP"
class="item_host_up" title="Up">
<span class="hosts-count">#hs_nb_up#</span>
</a>
</div>
<div class="col-xs-4 text-center">
<a href="#hosts_table_url#?search=ls_state:DOWN"
class="item_host_down" title="Down">
<span class="hosts-count">#hs_nb_down#</span>
</a>
</div>
<div class="col-xs-4 text-center">
<a href="#hosts_table_url#?search=ls_state:UNREACHABLE"
class="item_host_unreachable" title="Unreachable">
<span class="hosts-count">#hs_nb_unreachable#</span>
</a>
</div>
</div>
<div class="col-xs-12 col-sm-3 text-center">
<a href="#hosts_table_url#?search=ls_state:acknowledged"
class="item_host_acknowledged" title="Acknowledged">
<span class="hosts-count">#hs_nb_acknowledged#</span>
</a>
<span>/</span>
<a href="#hosts_table_url#?search=ls_state:IN_DOWNTIME"
class="item_host_in_downtime" title="In downtime">
<span class="hosts-count">#hs_nb_in_downtime#</span>
</a>
</div>
</div>
</div>
</div>
</div>
""" % ('fa-caret-down' if not collapsed else 'fa-caret-up',
'in' if not collapsed else '')
pc_h = pc_h.replace("#hs_nb_elts#", "%d" % hs['nb_elts'])
pc_h = pc_h.replace("#hs_nb_problems#", "%d" % hs['nb_problems'])
pc_h = pc_h.replace("#hs_nb_up#", "%d" % hs['nb_up'])
pc_h = pc_h.replace("#hs_nb_down#", "%d" % hs['nb_down'])
pc_h = pc_h.replace("#hs_nb_unreachable#", "%d" % hs['nb_unreachable'])
pc_h = pc_h.replace("#hs_nb_acknowledged#", "%d" % hs['nb_acknowledged'])
pc_h = pc_h.replace("#hs_nb_in_downtime#", "%d" % hs['nb_in_downtime'])
pc_h = pc_h.replace("#hosts_table_url#", url)
content = pc_h
return content
@staticmethod
def get_html_services_count_panel(ss, url, collapsed=False, percentage=False):
"""
:param hs: services livesynthesis as provided by the get_livesynthesis or
get_livesynthesis_history functions
:param url: url to use for the links to an host table
:param collapsed: True if the panel is collapsed
:param percentage: True to build a percentage panel, else build a count panel
:return:
"""
content = ''
sla = ss['pct_ok']
font = 'ok' if sla >= 95.0 else 'warning' if sla >= 90.0 else 'critical'
# unmanaged_problems = ss['nb_problems'] - (ss['nb_acknowledged'] + ss['nb_in_downtime'])
# pct_unmanaged_problems = round(100.0 * unmanaged_problems / ss['nb_elts'], 2) \
# if ss['nb_elts'] else -1
# _('Unmanaged problems')
if percentage:
pp_s = """
<div id="panel_percentage_services">
<div class="panel panel-default">
<div class="panel-heading clearfix">
<i class="fa fa-server"></i>
<span class="services-all"
data-count="#ss_nb_elts#"
data-problems="#ss_nb_problems#">
#ss_nb_elts# services (#ss_nb_problems# problems).
</span>
<div class="pull-right">
<a href="#p_pp_s" class="btn btn-xs btn-raised"
data-toggle="collapse">
<i class="fa fa-fw %s"></i>
</a>
</div>
</div>
<div id="p_pp_s" class="panel-collapse collapse %s">
<div class="panel-body">
<div class="row">
<div class="col-xs-3 col-sm-3 text-center">
<div class="col-xs-12 text-center">
<a href="#services_table_url#" class="sla_services_%s">
<div>#ss_pct_ok#%%</div>
<i class="fa fa-4x fa-server"></i>
<p>%s</p>
</a>
</div>
</div>
<div class="col-xs-9 col-sm-9 text-center">
<div class="row">
<div class="col-xs-4 text-center">
<a href="#services_table_url#?search=ls_state:OK"
class="item_service_ok" title="ok">
<span class="services-count">#ss_pct_ok#%%</span>
</a>
</div>
<div class="col-xs-4 text-center">
<a href="#services_table_url#?search=ls_state:WARNING"
class="item_service_warning" title="warning">
<span class="services-count">#ss_pct_warning#%%</span>
</a>
</div>
<div class="col-xs-4 text-center">
<a href="#services_table_url#?search=ls_state:CRITICAL"
class="item_service_critical" title="critical">
<span class="services-count">#ss_pct_critical#%%</span>
</a>
</div>
<div class="col-xs-4 text-center">
<a href="#services_table_url#?search=ls_state:UNKNONW"
class="item_service_unknown" title="unknown">
<span class="services-count">#ss_pct_unknown#%%</span>
</a>
</div>
<div class="col-xs-4 text-center">
<a href="#services_table_url#?search=ls_state:UNREACHABLE"
class="item_service_unreachable" title="unreachable">
<span class="services-count">#ss_pct_unreachable#%%</span>
</a>
</div>
</div>
<div class="row">
<br/>
</div>
<div class="row">
<div class="col-xs-12 text-center">
<a href="#services_table_url#?search=ls_state:ACKNOWLEDGED"
class="item_service_acknowledged" title="acknowledged">
<span class="services-count">#ss_pct_acknowledged#%%</span>
</a>
<span>/</span>
<a href="#services_table_url#?search=ls_state:IN_DOWNTIME"
class="item_service_in_downtime" title="in_downtime">
<span class="services-count">#ss_pct_in_downtime#%%</span>
</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
""" % ('fa-caret-down' if not collapsed else 'fa-caret-up',
'in' if not collapsed else '',
font, _('Services SLA'))
pp_s = pp_s.replace("#ss_nb_elts#", "%d" % ss['nb_elts'])
pp_s = pp_s.replace("#ss_nb_problems#", "%d" % ss['nb_problems'])
pp_s = pp_s.replace("#ss_pct_ok#", "%d" % ss['pct_ok'])
pp_s = pp_s.replace("#ss_pct_warning#", "%d" % ss['pct_warning'])
pp_s = pp_s.replace("#ss_pct_critical#", "%d" % ss['pct_critical'])
pp_s = pp_s.replace("#ss_pct_unknown#", "%d" % ss['pct_unknown'])
pp_s = pp_s.replace("#ss_pct_unreachable#", "%d" % ss['pct_unreachable'])
pp_s = pp_s.replace("#ss_pct_acknowledged#", "%d" % ss['pct_acknowledged'])
pp_s = pp_s.replace("#ss_pct_in_downtime#", "%d" % ss['pct_in_downtime'])
pp_s = pp_s.replace("#services_table_url#", url)
content = pp_s
else:
pc_s = """
<div id="panel_counters_services">
<div class="panel panel-default">
<div class="panel-heading clearfix">
<i class="fa fa-server"></i>
<span class="services-all"
data-count="#ss_nb_elts#"
data-problems="#ss_nb_problems#">
#ss_nb_elts# services (#ss_nb_problems# problems).
</span>
<div class="pull-right">
<a href="#p_pc_s" class="btn btn-xs btn-raised"
data-toggle="collapse">
<i class="fa fa-fw %s"></i>
</a>
</div>
</div>
<div id="p_pc_s" class="panel-collapse collapse %s">
<div class="panel-body">
<div class="col-xs-12 col-sm-9 text-center">
<div class="col-xs-2 text-center">
<a href="#services_table_url#?search=ls_state:OK"
class="item_service_ok" title="Ok">
<span class="services-count">#ss_nb_ok#</span>
</a>
</div>
<div class="col-xs-2 text-center">
<a href="#services_table_url#?search=ls_state:WARNING"
class="item_service_critical" title="Warning">
<span class="services-count">#ss_nb_warning#</span>
</a>
</div>
<div class="col-xs-2 text-center">
<a href="#services_table_url#?search=ls_state:CRITICAL"
class="item_service_critical" title="Critical">
<span class="services-count">#ss_nb_critical#</span>
</a>
</div>
<div class="col-xs-2 text-center">
<a href="#services_table_url#?search=ls_state:UNKNOWN"
class="item_service_unknown" title="Unknown">
<span class="services-count">#ss_nb_unknown#</span>
</a>
</div>
<div class="col-xs-2 text-center">
<a href="#services_table_url#?search=ls_state:UNREACHABLE"
class="item_service_unreachable" title="Unreachable">
<span class="services-count">#ss_nb_unreachable#</span>
</a>
</div>
</div>
<div class="col-xs-12 col-sm-3 text-center">
<a href="#services_table_url#?search=ls_state:acknowledged"
class="item_service_acknowledged" title="Acknowledged">
<span class="services-count">#ss_nb_acknowledged#</span>
</a>
<span>/</span>
<a href="#services_table_url#?search=ls_state:IN_DOWNTIME"
class="item_service_in_downtime" title="In downtime">
<span class="services-count">#ss_nb_in_downtime#</span>
</a>
</div>
</div>
</div>
</div>
</div>
""" % ('fa-caret-down' if not collapsed else 'fa-caret-up',
'in' if not collapsed else '')
pc_s = pc_s.replace("#ss_nb_elts#", "%d" % ss['nb_elts'])
pc_s = pc_s.replace("#ss_nb_problems#", "%d" % ss['nb_problems'])
pc_s = pc_s.replace("#ss_nb_ok#", "%d" % ss['nb_ok'])
pc_s = pc_s.replace("#ss_nb_warning#", "%d" % ss['nb_warning'])
pc_s = pc_s.replace("#ss_nb_critical#", "%d" % ss['nb_critical'])
pc_s = pc_s.replace("#ss_nb_unreachable#", "%d" % ss['nb_unreachable'])
pc_s = pc_s.replace("#ss_nb_unknown#", "%d" % ss['nb_unknown'])
pc_s = pc_s.replace("#ss_nb_acknowledged#", "%d" % ss['nb_acknowledged'])
pc_s = pc_s.replace("#ss_nb_in_downtime#", "%d" % ss['nb_in_downtime'])
pc_s = pc_s.replace("#services_table_url#", url)
content = pc_s
return content
@staticmethod
def get_html_id(obj_type, name):
"""
Returns an host/service/contact ... HTML identifier
If parameters are not valid, returns 'n/a'
obj_type specifies object type
name specifes the object name
:param obj_type: host, service, contact
:type obj_type: string
:param name: object name
:type name: string
:return: valid HTML identifier
:rtype: string
"""
if not obj_type or not name:
return 'n/a'
return re.sub('[^A-Za-z0-9-_]', '', "%s-%s" % (obj_type, name))
@staticmethod
# pylint: disable=too-many-locals
def get_html_livestate(datamgr, panels, bi=-1, search=None, actions=False):
"""
Get HTML formatted live state
Update system live synthesis and build header elements
:param bi: business impact
:type bi: int
:return: hosts_states and services_states HTML strings in a dictionary
:rtype: dict
"""
logger.info("get_html_livestate, BI: %d, search: '%s'", bi, search)
if search is None or not isinstance(search, dict):
search = {}
if 'where' not in search:
search.update({'where': {"ls_state_id": {"$ne": 0}}})
search['where'].update({'ls_acknowledged': False})
search['where'].update({'ls_downtimed': False})
if 'sort' not in search:
search.update({'sort': '-_overall_state_id'})
if bi != -1:
search['where'].update({'business_impact': bi})
items = []
# Copy because the search filter is updated by the function ...
search_hosts = search.copy()
hosts = datamgr.get_hosts(search=search_hosts, embedded=False)
items.extend(hosts)
logger.debug("get_html_livestate, livestate %d (%s), %d hosts", bi, search, len(items))
# Copy because the search filter is updated by the function ...
if 'embedded' not in search:
search.update({'embedded': {'host': 1}})
search_services = search.copy()
services = datamgr.get_services(search=search_services, embedded=True)
items.extend(services)
logger.debug("get_html_livestate, livestate %d (%s), %d services", bi, search, len(items))
rows = []
count = 0
current_host = ''
for item in items:
logger.debug("get_html_livestate, item: %s", item)
count += 1
elt_id = Helper.get_html_id("host", item.name)
host_url = ''
if current_host != item.name:
current_host = item.name
host_url = item.html_link
service_url = ''
if item.object_type == "service":
service_url = item.html_link
host_url = item.host.html_link
long_output = ''
if item.long_output:
long_output = """
<button
type="button"
class="btn btn-xs btn-info"
data-toggle="popover"
title="Long output"
data-content="Long check output ...">
%s
</button>
""" % item.long_output
extra = ''
if item.acknowledged:
extra += _(' and acknowledged')
if item.downtimed:
extra += _(' and in scheduled downtime')
title = "%s - %s (%s)" \
% (item.status,
Helper.print_duration(item.last_check, duration_only=True, x_elts=0),
item.output)
tr = """
<tr data-toggle="collapse" data-target="#details-%s" class="accordion-toggle">
<td>%s</td>
<td>%s</td>
<td>%s</td>
<td>%s</td>
<td class="hidden-xs">%s</td>
<td class="hidden-sm hidden-xs">%s%s</td>
</tr>""" % (
elt_id,
item.get_html_state(text=None, title=title, extra=extra),
Helper.get_html_commands_buttons(item, title=_("Actions")) if actions else '',
host_url, service_url,
Helper.print_duration(item.last_check, duration_only=True, x_elts=0),
item.output, long_output
)
rows.append(tr)
# tr2 = """
# <tr id="details-%s" class="collapse">
# <td colspan="20">
# """ % (elt_id)
# tr2 += """
# <div class="pull-left">
# """
# if item.passive_checks_enabled:
# tr2 += """
# <span>
# <span class="fa fa-arrow-left" title="Passive checks are enabled."></span>"""
# if item.check_freshness:
# tr2 += """
# <span title="Freshness check is enabled">(Freshness: %s seconds)</span>
# </span>""" % (item.freshness_threshold)
# else:
# tr2 += """</span>"""
#
# if item.active_checks_enabled:
# tr2 += """
# <span>
# <i class="fa fa-arrow-right" title="Active checks are enabled."></i>
# <i>
# Last check <strong>%s</strong>,
# next check in <strong>%s</strong>,
# attempt <strong>%d / %d</strong>
# </i>
# </span>""" % (
# Helper.print_duration(item.last_check, duration_only=True, x_elts=2),
# Helper.print_duration(item.next_check, duration_only=True, x_elts=2),
# int(item.current_attempt),
# int(item.max_attempts)
# )
# tr2 += """
# </div>
# """
#
# tr2 += """
# </td>
# </tr>"""
#
# rows.append(tr2)
collapsed = False
if 'livestate-bi-%d' % bi in panels:
collapsed = panels['livestate-bi-%d' % bi]['collapsed']
if not rows:
collapsed = True
panel_bi = """
<div id="livestate-bi-#bi-id#" class="livestate-panel panel panel-default">
<div class="panel-heading clearfix">
<strong>
<i class="fa fa-heartbeat"></i>
<span class="livestate-all text-%s" data-count="#nb_problems#">
#bi-text# - #nb_problems# problems.
</span>
</strong>
<div class="pull-right">
<a href="#p_livestate-#bi-id#" class="btn btn-xs btn-raised"
data-toggle="collapse">
<i class="fa fa-fw %s"></i>
</a>
</div>
</div>
<div id="p_livestate-#bi-id#" class="panel-collapse collapse %s">
<div class="panel-body">
""" % ('success' if count == 0 else 'danger',
'fa-caret-up' if collapsed else 'fa-caret-down',
'in' if not collapsed else '')
if count > 0:
panel_bi += """
<table class="table table-invisible table-condensed" data-business-impact="#bi-id#" >
<thead><tr>
<th width="10px"></th>
<th width="30px"></th>
<th width="60px">%s</th>
<th width="90px">%s</th>
<th width="30px">%s</th>
<th class="hidden-sm hidden-xs" width="100%%">%s</th>
</tr></thead>
<tbody>
</tbody>
</table>
""" % (
_("Host"), _("Service"), _("Duration"), _("Output")
)
else:
panel_bi += """
<div class="alert alert-success"><p>%s</p></div>
""" % (_("No problems."))
panel_bi += """
</div>
</div>
</div>
"""
# Update panel templating fields
panel_bi = panel_bi.replace("#bi-id#", "%d" % (bi))
panel_bi = panel_bi.replace("#bi-text#",
Helper.get_html_business_impact(bi, icon=True, text=True))
panel_bi = panel_bi.replace("#nb_problems#", "%s" % count if count > 0 else 'no')
return {'bi': bi, 'rows': rows, 'panel_bi': panel_bi}
|
import subprocess
import sys
import os
if len(sys.argv) < 2:
print "Usage: %s SCRIPTNAME [args]" % sys.argv[0]
sys.exit(1)
else:
script = sys.argv[1]
args = sys.argv[2:]
project = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))
wd = os.path.join(project, 'localconfig')
if not os.path.isdir(wd):
os.makedirs(wd)
env = os.environ.copy()
env['PYTHONPATH'] = project
env['WEBOOB_WORKDIR'] = wd
env['WEBOOB_BACKENDS'] = os.getenv('WEBOOB_LOCAL_BACKENDS',
os.getenv('WEBOOB_BACKENDS',
os.path.expanduser('~/.config/weboob/backends')))
with open(os.path.join(wd, 'sources.list'), 'w') as f:
f.write("file://%s\n" % os.path.join(project, 'modules'))
p = subprocess.Popen(
[sys.executable, os.path.join(project, 'scripts', 'weboob-config'), 'update'],
env=env,
stdout=subprocess.PIPE)
s = p.communicate()
if p.returncode != 0:
print s[0]
sys.exit(p.returncode)
if os.path.exists(script):
spath = script
else:
spath = os.path.join(project, 'scripts', script)
os.execvpe(
sys.executable,
['-Wall', spath] + args,
env)
|
import synthicity.urbansim.interaction as interaction
import pandas as pd, numpy as np, copy
from synthicity.utils import misc
from drcog.models import transition
def simulate(dset,year,depvar = 'building_id',alternatives=None,simulation_table = 'households',
output_names=None,agents_groupby = ['income_3_tenure',],transition_config=None,relocation_config=None):
output_csv, output_title, coeff_name, output_varname = output_names
if transition_config['Enabled']:
ct = dset.fetch(transition_config['control_totals_table'])
if 'persons' in ct.columns:
del ct['persons']
ct["total_number_of_households"] = (ct["total_number_of_households"]*transition_config['scaling_factor']).astype('int32')
hh = dset.fetch('households')
persons = dset.fetch('persons')
tran = transition.TabularTotalsTransition(ct, 'total_number_of_households')
model = transition.TransitionModel(tran)
#import pdb; pdb.set_trace()
new, added, new_linked = model.transition(
hh, year, linked_tables={'linked': (persons, 'household_id')})
new.loc[added,'building_id'] = -1
dset.d['households'] = new
dset.d['persons'] = new_linked['linked']
# new_hhlds = {"table": "dset.households","writetotmp": "households","model": "transitionmodel","first_year": 2010,"control_totals": "dset.%s"%transition_config['control_totals_table'],
# "geography_field": "building_id","amount_field": "total_number_of_households"}
# import synthicity.urbansim.transitionmodel as transitionmodel
# transitionmodel.simulate(dset,new_hhlds,year=year,show=True,subtract=True)
dset.households.index.name = 'household_id'
choosers = dset.fetch(simulation_table)
if relocation_config['Enabled']:
rate_table = dset.store[relocation_config['relocation_rates_table']].copy()
rate_field = "probability_of_relocating"
rate_table[rate_field] = rate_table[rate_field]*.01*relocation_config['scaling_factor']
movers = dset.relocation_rates(choosers,rate_table,rate_field)
choosers[depvar].ix[movers] = -1
movers = choosers[choosers[depvar]==-1]
print "Total new agents and movers = %d" % len(movers.index)
empty_units = dset.buildings[(dset.buildings.residential_units>0)].residential_units.sub(choosers.groupby('building_id').size(),fill_value=0)
empty_units = empty_units[empty_units>0].order(ascending=False)
alternatives = alternatives.ix[np.repeat(empty_units.index.values,empty_units.values.astype('int'))]
alts = alternatives
pdf = pd.DataFrame(index=alts.index)
segments = movers.groupby(agents_groupby)
for name, segment in segments:
segment = segment.head(1)
name = str(name)
tmp_outcsv, tmp_outtitle, tmp_coeffname = output_csv%name, output_title%name, coeff_name%name
ind_vars = dset.coeffs[(tmp_coeffname, 'fnames')][np.invert(dset.coeffs[(tmp_coeffname, 'fnames')].isnull().values)].values.tolist()
SAMPLE_SIZE = alts.index.size
numchoosers = segment.shape[0]
numalts = alts.shape[0]
sample = np.tile(alts.index.values,numchoosers)
alts_sample = alts
alts_sample.loc[:, 'join_index'] = np.repeat(segment.index.values,SAMPLE_SIZE) # corrected chained index error
alts_sample = pd.merge(alts_sample,segment,left_on='join_index',right_index=True,suffixes=('','_r'))
chosen = np.zeros((numchoosers,SAMPLE_SIZE))
chosen[:,0] = 1
sample, alternative_sample, est_params = sample, alts_sample, ('mnl',chosen)
##Interaction variables
interaction_vars = [(var, var.split('_x_')) for var in ind_vars if '_x_' in var]
for ivar in interaction_vars:
if ivar[1][0].endswith('gt'):
alternative_sample[ivar[0]] = ((alternative_sample[ivar[1][0]])>alternative_sample[ivar[1][1]]).astype('int32')
if ivar[1][0].endswith('lt'):
alternative_sample[ivar[0]] = ((alternative_sample[ivar[1][0]])<alternative_sample[ivar[1][1]]).astype('int32')
else:
alternative_sample[ivar[0]] = ((alternative_sample[ivar[1][0]])*alternative_sample[ivar[1][1]])
est_data = pd.DataFrame(index=alternative_sample.index)
for varname in ind_vars:
est_data[varname] = alternative_sample[varname]
est_data = est_data.fillna(0)
data = est_data
data = data.as_matrix()
coeff = dset.load_coeff(tmp_coeffname)
probs = interaction.mnl_simulate(data,coeff,numalts=SAMPLE_SIZE,returnprobs=1)
pdf['segment%s'%name] = pd.Series(probs.flatten(),index=alts.index)
new_homes = pd.Series(np.ones(len(movers.index))*-1,index=movers.index)
for name, segment in segments:
name_coeff = str(name)
name = str(name)
p=pdf['segment%s'%name].values
mask = np.zeros(len(alts.index),dtype='bool')
print "Assigning units to %d agents of segment %s" % (len(segment.index),name)
def choose(p,mask,alternatives,segment,new_homes,minsize=None):
p = copy.copy(p)
p[mask] = 0 # already chosen
try:
indexes = np.random.choice(len(alternatives.index),len(segment.index),replace=False,p=p/p.sum())
except:
print "WARNING: not enough options to fit agents, will result in unplaced agents"
return mask,new_homes
new_homes.ix[segment.index] = alternatives.index.values[indexes]
mask[indexes] = 1
return mask,new_homes
mask,new_homes = choose(p,mask,alts,segment,new_homes)
build_cnts = new_homes.value_counts() #num households place in each building
print "Assigned %d agents to %d locations with %d unplaced" % (new_homes.size,build_cnts.size,build_cnts.get(-1,0))
table = dset.households # need to go back to the whole dataset
table[depvar].ix[new_homes.index] = new_homes.values.astype('int32')
dset.store_attr(output_varname,year,copy.deepcopy(table[depvar]))
if __name__ == '__main__':
from drcog.models import dataset
from drcog.variables import variable_library
import os
import cProfile
dset = dataset.DRCOGDataset(os.path.join(misc.data_dir(),'drcog.h5'))
#Load estimated coefficients
coeff_store = pd.HDFStore(os.path.join(misc.data_dir(),'coeffs.h5'))
dset.coeffs = coeff_store.coeffs.copy()
coeff_store.close()
coeff_store = pd.HDFStore(os.path.join(misc.data_dir(),'coeffs_res.h5'))
dset.coeffs_res = coeff_store.coeffs_res.copy()
coeff_store.close()
variable_library.calculate_variables(dset)
alternatives = dset.buildings[(dset.buildings.residential_units>0)]
sim_year = 2011
fnc = "simulate(dset, year=sim_year,depvar = 'building_id',alternatives=alternatives,simulation_table = 'households',output_names = ('drcog-coeff-hlcm-%s.csv','DRCOG HOUSEHOLD LOCATION CHOICE MODELS (%s)','hh_location_%s','household_building_ids')," +\
"agents_groupby= ['income_3_tenure',],transition_config = {'Enabled':True,'control_totals_table':'annual_household_control_totals','scaling_factor':1.0}," +\
"relocation_config = {'Enabled':True,'relocation_rates_table':'annual_household_relocation_rates','scaling_factor':1.0},)"
cProfile.run(fnc, 'c:/users/jmartinez/documents/projects/urbansim/cprofile/hlcm')
|
"""
Patient Sale
Created: 25 Sep 2019
Last mod: 25 Sep 2019
"""
from openerp import models, fields, api
from openerp.addons.openhealth.models.order import ord_vars
class patient_sale(models.Model):
"""
Patient Sale Class
"""
_name = 'openhealth.patient.sale'
_description = 'Openhealth Patient Sale'
_inherit = 'openhealth.patient.report'
# Nr lines
nr_lines = fields.Integer(
string='Nr lineas',
)
# Amount
amount = fields.Float(
string="Total",
)
|
"""
This module holds the standard implementation of the :class:`PrinterInterface` and it helpers.
"""
from __future__ import absolute_import
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import copy
import logging
import os
import threading
import time
from octoprint import util as util
from octoprint.events import eventManager, Events
from octoprint.filemanager import FileDestinations, NoSuchStorage
from octoprint.plugin import plugin_manager, ProgressPlugin
from octoprint.printer import PrinterInterface, PrinterCallback, UnknownScript
from octoprint.printer.estimation import TimeEstimationHelper
from octoprint.settings import settings
from octoprint.util import comm as comm
from octoprint.util import InvariantContainer
from octoprint.util import to_unicode
class Printer(PrinterInterface, comm.MachineComPrintCallback):
"""
Default implementation of the :class:`PrinterInterface`. Manages the communication layer object and registers
itself with it as a callback to react to changes on the communication layer.
"""
def __init__(self, fileManager, analysisQueue, printerProfileManager):
from collections import deque
self._logger = logging.getLogger(__name__)
self._analysisQueue = analysisQueue
self._fileManager = fileManager
self._printerProfileManager = printerProfileManager
# state
# TODO do we really need to hold the temperature here?
self._temp = None
self._bedTemp = None
self._targetTemp = None
self._targetBedTemp = None
self._temps = TemperatureHistory(cutoff=settings().getInt(["temperature", "cutoff"])*60)
self._tempBacklog = []
self._messages = deque([], 300)
self._messageBacklog = []
self._log = deque([], 300)
self._logBacklog = []
self._state = None
self._currentZ = None
self._printAfterSelect = False
self._posAfterSelect = None
# sd handling
self._sdPrinting = False
self._sdStreaming = False
self._sdFilelistAvailable = threading.Event()
self._streamingFinishedCallback = None
self._selectedFile = None
self._timeEstimationData = None
self._timeEstimationStatsWeighingUntil = settings().getFloat(["estimation", "printTime", "statsWeighingUntil"])
self._timeEstimationValidityRange = settings().getFloat(["estimation", "printTime", "validityRange"])
self._timeEstimationForceDumbFromPercent = settings().getFloat(["estimation", "printTime", "forceDumbFromPercent"])
self._timeEstimationForceDumbAfterMin = settings().getFloat(["estimation", "printTime", "forceDumbAfterMin"])
# comm
self._comm = None
# callbacks
self._callbacks = []
# progress plugins
self._lastProgressReport = None
self._progressPlugins = plugin_manager().get_implementations(ProgressPlugin)
self._stateMonitor = StateMonitor(
interval=0.5,
on_update=self._sendCurrentDataCallbacks,
on_add_temperature=self._sendAddTemperatureCallbacks,
on_add_log=self._sendAddLogCallbacks,
on_add_message=self._sendAddMessageCallbacks,
on_get_progress=self._updateProgressDataCallback
)
self._stateMonitor.reset(
state={"text": self.get_state_string(), "flags": self._getStateFlags()},
job_data={
"file": {
"name": None,
"size": None,
"origin": None,
"date": None
},
"estimatedPrintTime": None,
"lastPrintTime": None,
"filament": {
"length": None,
"volume": None
}
},
progress={"completion": None, "filepos": None, "printTime": None, "printTimeLeft": None},
current_z=None
)
eventManager().subscribe(Events.METADATA_ANALYSIS_FINISHED, self._on_event_MetadataAnalysisFinished)
eventManager().subscribe(Events.METADATA_STATISTICS_UPDATED, self._on_event_MetadataStatisticsUpdated)
#~~ handling of PrinterCallbacks
def register_callback(self, callback):
if not isinstance(callback, PrinterCallback):
self._logger.warn("Registering an object as printer callback which doesn't implement the PrinterCallback interface")
self._callbacks.append(callback)
self._sendInitialStateUpdate(callback)
def unregister_callback(self, callback):
if callback in self._callbacks:
self._callbacks.remove(callback)
def _sendAddTemperatureCallbacks(self, data):
for callback in self._callbacks:
try: callback.on_printer_add_temperature(data)
except: self._logger.exception("Exception while adding temperature data point")
def _sendAddLogCallbacks(self, data):
for callback in self._callbacks:
try: callback.on_printer_add_log(data)
except: self._logger.exception("Exception while adding communication log entry")
def _sendAddMessageCallbacks(self, data):
for callback in self._callbacks:
try: callback.on_printer_add_message(data)
except: self._logger.exception("Exception while adding printer message")
def _sendCurrentDataCallbacks(self, data):
for callback in self._callbacks:
try: callback.on_printer_send_current_data(copy.deepcopy(data))
except: self._logger.exception("Exception while pushing current data")
#~~ callback from metadata analysis event
def _on_event_MetadataAnalysisFinished(self, event, data):
if self._selectedFile:
self._setJobData(self._selectedFile["filename"],
self._selectedFile["filesize"],
self._selectedFile["sd"])
def _on_event_MetadataStatisticsUpdated(self, event, data):
self._setJobData(self._selectedFile["filename"],
self._selectedFile["filesize"],
self._selectedFile["sd"])
#~~ progress plugin reporting
def _reportPrintProgressToPlugins(self, progress):
if not progress or not self._selectedFile or not "sd" in self._selectedFile or not "filename" in self._selectedFile:
return
storage = "sdcard" if self._selectedFile["sd"] else "local"
filename = self._selectedFile["filename"]
def call_plugins(storage, filename, progress):
for plugin in self._progressPlugins:
try:
plugin.on_print_progress(storage, filename, progress)
except:
self._logger.exception("Exception while sending print progress to plugin %s" % plugin._identifier)
thread = threading.Thread(target=call_plugins, args=(storage, filename, progress))
thread.daemon = False
thread.start()
#~~ PrinterInterface implementation
def connect(self, port=None, baudrate=None, profile=None):
"""
Connects to the printer. If port and/or baudrate is provided, uses these settings, otherwise autodetection
will be attempted.
"""
if self._comm is not None:
self._comm.close()
self._printerProfileManager.select(profile)
from octoprint.logging.handlers import SerialLogHandler
SerialLogHandler.on_open_connection()
self._comm = comm.MachineCom(port, baudrate, callbackObject=self, printerProfileManager=self._printerProfileManager)
def disconnect(self):
"""
Closes the connection to the printer.
"""
if self._comm is not None:
self._comm.close()
self._comm = None
self._printerProfileManager.deselect()
eventManager().fire(Events.DISCONNECTED)
def get_transport(self):
if self._comm is None:
return None
return self._comm.getTransport()
getTransport = util.deprecated("getTransport has been renamed to get_transport", since="1.2.0-dev-590", includedoc="Replaced by :func:`get_transport`")
def fake_ack(self):
if self._comm is None:
return
self._comm.fakeOk()
def commands(self, commands):
"""
Sends one or more gcode commands to the printer.
"""
if self._comm is None:
return
if not isinstance(commands, (list, tuple)):
commands = [commands]
for command in commands:
self._comm.sendCommand(command)
def script(self, name, context=None):
if self._comm is None:
return
if name is None or not name:
raise ValueError("name must be set")
result = self._comm.sendGcodeScript(name, replacements=context)
if not result:
raise UnknownScript(name)
def jog(self, axis, amount):
if not isinstance(axis, (str, unicode)):
raise ValueError("axis must be a string: {axis}".format(axis=axis))
axis = axis.lower()
if not axis in PrinterInterface.valid_axes:
raise ValueError("axis must be any of {axes}: {axis}".format(axes=", ".join(PrinterInterface.valid_axes), axis=axis))
if not isinstance(amount, (int, long, float)):
raise ValueError("amount must be a valid number: {amount}".format(amount=amount))
printer_profile = self._printerProfileManager.get_current_or_default()
movement_speed = printer_profile["axes"][axis]["speed"]
self.commands(["G91", "G1 %s%.4f F%d" % (axis.upper(), amount, movement_speed), "G90"])
def home(self, axes):
if not isinstance(axes, (list, tuple)):
if isinstance(axes, (str, unicode)):
axes = [axes]
else:
raise ValueError("axes is neither a list nor a string: {axes}".format(axes=axes))
validated_axes = filter(lambda x: x in PrinterInterface.valid_axes, map(lambda x: x.lower(), axes))
if len(axes) != len(validated_axes):
raise ValueError("axes contains invalid axes: {axes}".format(axes=axes))
self.commands(["G91", "G28 %s" % " ".join(map(lambda x: "%s0" % x.upper(), validated_axes)), "G90"])
def extrude(self, amount):
if not isinstance(amount, (int, long, float)):
raise ValueError("amount must be a valid number: {amount}".format(amount=amount))
printer_profile = self._printerProfileManager.get_current_or_default()
extrusion_speed = printer_profile["axes"]["e"]["speed"]
self.commands(["G91", "G1 E%s F%d" % (amount, extrusion_speed), "G90"])
def change_tool(self, tool):
if not PrinterInterface.valid_tool_regex.match(tool):
raise ValueError("tool must match \"tool[0-9]+\": {tool}".format(tool=tool))
tool_num = int(tool[len("tool"):])
self.commands("T%d" % tool_num)
def set_temperature(self, heater, value):
if not PrinterInterface.valid_heater_regex.match(heater):
raise ValueError("heater must match \"tool[0-9]+\" or \"bed\": {heater}".format(type=heater))
if not isinstance(value, (int, long, float)) or value < 0:
raise ValueError("value must be a valid number >= 0: {value}".format(value=value))
if heater.startswith("tool"):
printer_profile = self._printerProfileManager.get_current_or_default()
extruder_count = printer_profile["extruder"]["count"]
if extruder_count > 1:
toolNum = int(heater[len("tool"):])
self.commands("M104 T%d S%f" % (toolNum, value))
else:
self.commands("M104 S%f" % value)
elif heater == "bed":
self.commands("M140 S%f" % value)
def set_temperature_offset(self, offsets=None):
if offsets is None:
offsets = dict()
if not isinstance(offsets, dict):
raise ValueError("offsets must be a dict")
validated_keys = filter(lambda x: PrinterInterface.valid_heater_regex.match(x), offsets.keys())
validated_values = filter(lambda x: isinstance(x, (int, long, float)), offsets.values())
if len(validated_keys) != len(offsets):
raise ValueError("offsets contains invalid keys: {offsets}".format(offsets=offsets))
if len(validated_values) != len(offsets):
raise ValueError("offsets contains invalid values: {offsets}".format(offsets=offsets))
if self._comm is None:
return
self._comm.setTemperatureOffset(offsets)
self._stateMonitor.set_temp_offsets(offsets)
def _convert_rate_value(self, factor, min=0, max=200):
if not isinstance(factor, (int, float, long)):
raise ValueError("factor is not a number")
if isinstance(factor, float):
factor = int(factor * 100.0)
if factor < min or factor > max:
raise ValueError("factor must be a value between %f and %f" % (min, max))
return factor
def feed_rate(self, factor):
factor = self._convert_rate_value(factor, min=50, max=200)
self.commands("M220 S%d" % factor)
def flow_rate(self, factor):
factor = self._convert_rate_value(factor, min=75, max=125)
self.commands("M221 S%d" % factor)
def select_file(self, path, sd, printAfterSelect=False, pos=None):
if self._comm is None or (self._comm.isBusy() or self._comm.isStreaming()):
self._logger.info("Cannot load file: printer not connected or currently busy")
return
recovery_data = self._fileManager.get_recovery_data()
if recovery_data:
# clean up recovery data if we just selected a different file than is logged in that
expected_origin = FileDestinations.SDCARD if sd else FileDestinations.LOCAL
actual_origin = recovery_data.get("origin", None)
actual_path = recovery_data.get("path", None)
if actual_origin is None or actual_path is None or actual_origin != expected_origin or actual_path != path:
self._fileManager.delete_recovery_data()
self._printAfterSelect = printAfterSelect
self._posAfterSelect = pos
self._comm.selectFile("/" + path if sd and not settings().getBoolean(["feature", "sdRelativePath"]) else path, sd)
self._setProgressData(completion=0)
self._setCurrentZ(None)
def unselect_file(self):
if self._comm is not None and (self._comm.isBusy() or self._comm.isStreaming()):
return
self._comm.unselectFile()
self._setProgressData(completion=0)
self._setCurrentZ(None)
def start_print(self, pos=None):
"""
Starts the currently loaded print job.
Only starts if the printer is connected and operational, not currently printing and a printjob is loaded
"""
if self._comm is None or not self._comm.isOperational() or self._comm.isPrinting():
return
if self._selectedFile is None:
return
# we are happy if the average of the estimates stays within 60s of the prior one
threshold = settings().getFloat(["estimation", "printTime", "stableThreshold"])
rolling_window = None
countdown = None
if self._selectedFile["sd"]:
# we are interesting in a rolling window of roughly the last 15s, so the number of entries has to be derived
# by that divided by the sd status polling interval
rolling_window = 15 / settings().get(["serial", "timeout", "sdStatus"])
# we are happy when one rolling window has been stable
countdown = rolling_window
self._timeEstimationData = TimeEstimationHelper(rolling_window=rolling_window,
threshold=threshold,
countdown=countdown)
self._fileManager.delete_recovery_data()
self._lastProgressReport = None
self._setProgressData(completion=0)
self._setCurrentZ(None)
self._comm.startPrint(pos=pos)
def pause_print(self):
"""
Pause the current printjob.
"""
if self._comm is None:
return
if self._comm.isPaused():
return
self._comm.setPause(True)
def resume_print(self):
"""
Resume the current printjob.
"""
if self._comm is None:
return
if not self._comm.isPaused():
return
self._comm.setPause(False)
def cancel_print(self):
"""
Cancel the current printjob.
"""
if self._comm is None:
return
self._comm.cancelPrint()
# reset progress, height, print time
self._setCurrentZ(None)
self._setProgressData()
# mark print as failure
if self._selectedFile is not None:
self._fileManager.log_print(FileDestinations.SDCARD if self._selectedFile["sd"] else FileDestinations.LOCAL, self._selectedFile["filename"], time.time(), self._comm.getPrintTime(), False, self._printerProfileManager.get_current_or_default()["id"])
payload = {
"file": self._selectedFile["filename"],
"origin": FileDestinations.LOCAL
}
if self._selectedFile["sd"]:
payload["origin"] = FileDestinations.SDCARD
eventManager().fire(Events.PRINT_FAILED, payload)
def get_state_string(self):
"""
Returns a human readable string corresponding to the current communication state.
"""
if self._comm is None:
return "Offline"
else:
return self._comm.getStateString()
def get_current_data(self):
return self._stateMonitor.get_current_data()
def get_current_job(self):
currentData = self._stateMonitor.get_current_data()
return currentData["job"]
def get_current_temperatures(self):
if self._comm is not None:
offsets = self._comm.getOffsets()
else:
offsets = dict()
result = {}
if self._temp is not None:
for tool in self._temp.keys():
result["tool%d" % tool] = {
"actual": self._temp[tool][0],
"target": self._temp[tool][1],
"offset": offsets[tool] if tool in offsets and offsets[tool] is not None else 0
}
if self._bedTemp is not None:
result["bed"] = {
"actual": self._bedTemp[0],
"target": self._bedTemp[1],
"offset": offsets["bed"] if "bed" in offsets and offsets["bed"] is not None else 0
}
return result
def get_temperature_history(self):
return self._temps
def get_current_connection(self):
if self._comm is None:
return "Closed", None, None, None
port, baudrate = self._comm.getConnection()
printer_profile = self._printerProfileManager.get_current_or_default()
return self._comm.getStateString(), port, baudrate, printer_profile
def is_closed_or_error(self):
return self._comm is None or self._comm.isClosedOrError()
def is_operational(self):
return self._comm is not None and self._comm.isOperational()
def is_printing(self):
return self._comm is not None and self._comm.isPrinting()
def is_paused(self):
return self._comm is not None and self._comm.isPaused()
def is_error(self):
return self._comm is not None and self._comm.isError()
def is_ready(self):
return self.is_operational() and not self._comm.isStreaming()
def is_sd_ready(self):
if not settings().getBoolean(["feature", "sdSupport"]) or self._comm is None:
return False
else:
return self._comm.isSdReady()
#~~ sd file handling
def get_sd_files(self):
if self._comm is None or not self._comm.isSdReady():
return []
return map(lambda x: (x[0][1:], x[1]), self._comm.getSdFiles())
def add_sd_file(self, filename, absolutePath, streamingFinishedCallback):
if not self._comm or self._comm.isBusy() or not self._comm.isSdReady():
self._logger.error("No connection to printer or printer is busy")
return
self._streamingFinishedCallback = streamingFinishedCallback
self.refresh_sd_files(blocking=True)
existingSdFiles = map(lambda x: x[0], self._comm.getSdFiles())
remoteName = util.get_dos_filename(filename,
existing_filenames=existingSdFiles,
extension="gco",
whitelisted_extensions=["gco", "g"])
self._timeEstimationData = TimeEstimationHelper()
self._comm.startFileTransfer(absolutePath, filename, "/" + remoteName)
return remoteName
def delete_sd_file(self, filename):
if not self._comm or not self._comm.isSdReady():
return
self._comm.deleteSdFile("/" + filename)
def init_sd_card(self):
if not self._comm or self._comm.isSdReady():
return
self._comm.initSdCard()
def release_sd_card(self):
if not self._comm or not self._comm.isSdReady():
return
self._comm.releaseSdCard()
def refresh_sd_files(self, blocking=False):
"""
Refreshs the list of file stored on the SD card attached to printer (if available and printer communication
available). Optional blocking parameter allows making the method block (max 10s) until the file list has been
received (and can be accessed via self._comm.getSdFiles()). Defaults to an asynchronous operation.
"""
if not self._comm or not self._comm.isSdReady():
return
self._sdFilelistAvailable.clear()
self._comm.refreshSdFiles()
if blocking:
self._sdFilelistAvailable.wait(10000)
#~~ state monitoring
def _setCurrentZ(self, currentZ):
self._currentZ = currentZ
self._stateMonitor.set_current_z(self._currentZ)
def _setState(self, state, state_string=None):
if state_string is None:
state_string = self.get_state_string()
self._state = state
self._stateMonitor.set_state({"text": state_string, "flags": self._getStateFlags()})
def _addLog(self, log):
self._log.append(log)
self._stateMonitor.add_log(log)
def _addMessage(self, message):
self._messages.append(message)
self._stateMonitor.add_message(message)
def _estimateTotalPrintTime(self, progress, printTime):
if not progress or not printTime or not self._timeEstimationData:
return None
else:
newEstimate = printTime / progress
self._timeEstimationData.update(newEstimate)
result = None
if self._timeEstimationData.is_stable():
result = self._timeEstimationData.average_total_rolling
return result
def _setProgressData(self, completion=None, filepos=None, printTime=None, printTimeLeft=None):
self._stateMonitor.set_progress(dict(completion=int(completion * 100) if completion is not None else None,
filepos=filepos,
printTime=int(printTime) if printTime is not None else None,
printTimeLeft=int(printTimeLeft) if printTimeLeft is not None else None))
def _updateProgressDataCallback(self):
if self._comm is None:
progress = None
filepos = None
printTime = None
cleanedPrintTime = None
else:
progress = self._comm.getPrintProgress()
filepos = self._comm.getPrintFilepos()
printTime = self._comm.getPrintTime()
cleanedPrintTime = self._comm.getCleanedPrintTime()
statisticalTotalPrintTime = None
statisticalTotalPrintTimeType = None
if self._selectedFile and "estimatedPrintTime" in self._selectedFile \
and self._selectedFile["estimatedPrintTime"]:
statisticalTotalPrintTime = self._selectedFile["estimatedPrintTime"]
statisticalTotalPrintTimeType = self._selectedFile.get("estimatedPrintTimeType", None)
printTimeLeft, printTimeLeftOrigin = self._estimatePrintTimeLeft(progress, printTime, cleanedPrintTime, statisticalTotalPrintTime, statisticalTotalPrintTimeType)
if progress is not None:
progress_int = int(progress * 100)
if self._lastProgressReport != progress_int:
self._lastProgressReport = progress_int
self._reportPrintProgressToPlugins(progress_int)
return dict(completion=progress * 100 if progress is not None else None,
filepos=filepos,
printTime=int(printTime) if printTime is not None else None,
printTimeLeft=int(printTimeLeft) if printTimeLeft is not None else None,
printTimeLeftOrigin=printTimeLeftOrigin)
def _estimatePrintTimeLeft(self, progress, printTime, cleanedPrintTime, statisticalTotalPrintTime, statisticalTotalPrintTimeType):
"""
Tries to estimate the print time left for the print job
This is somewhat horrible since accurate print time estimation is pretty much impossible to
achieve, considering that we basically have only two data points (current progress in file and
time needed for that so far - former prints or a file analysis might not have happened or simply
be completely impossible e.g. if the file is stored on the printer's SD card) and
hence can only do a linear estimation of a completely non-linear process. That's a recipe
for inaccurate predictions right there. Yay.
Anyhow, here's how this implementation works. This method gets the current progress in the
printed file (percentage based on bytes read vs total bytes), the print time that elapsed,
the same print time with the heat up times subtracted (if possible) and if available also
some statistical total print time (former prints or a result from the GCODE analysis).
1. First get an "intelligent" estimate based on the :class:`~octoprint.printer.estimation.TimeEstimationHelper`.
That thing tries to detect if the estimation based on our progress and time needed for that becomes
stable over time through a rolling window and only returns a result once that appears to be the
case.
2. If we have any statistical data (former prints or a result from the GCODE analysis)
but no intelligent estimate yet, we'll use that for the next step. Otherwise, up to a certain percentage
in the print we do a percentage based weighing of the statistical data and the intelligent
estimate - the closer to the beginning of the print, the more precedence for the statistical
data, the closer to the cut off point, the more precendence for the intelligent estimate. This
is our preliminary total print time.
3. If the total print time is set, we do a sanity check for it. Based on the total print time
estimate and the time we already spent printing, we calculate at what percentage we SHOULD be
and compare that to the percentage at which we actually ARE. If it's too far off, our total
can't be trusted and we fall back on the dumb estimate. Same if the time we spent printing is
already higher than our total estimate.
4. If we do NOT have a total print time estimate yet but we've been printing for longer than
a configured amount of minutes or are further in the file than a configured percentage, we
also use the dumb estimate for now.
Yes, all this still produces horribly inaccurate results. But we have to do this live during the print and
hence can't produce to much computational overhead, we do not have any insight into the firmware implementation
with regards to planner setup and acceleration settings, we might not even have access to the printed file's
contents and such we need to find something that works "mostly" all of the time without costing too many
resources. Feel free to propose a better solution within the above limitations (and I mean that, this solution
here makes me unhappy).
Args:
progress (float or None): Current percentage in the printed file
printTime (float or None): Print time elapsed so far
cleanedPrintTime (float or None): Print time elapsed minus the time needed for getting up to temperature
(if detectable).
statisticalTotalPrintTime (float or None): Total print time of past prints against same printer profile,
or estimated total print time from GCODE analysis.
statisticalTotalPrintTimeType (str or None): Type of statistical print time, either "average" (total time
of former prints) or "analysis"
Returns:
(2-tuple) estimated print time left or None if not proper estimate could be made at all, origin of estimation
"""
if progress is None or printTime is None or cleanedPrintTime is None:
return None
dumbTotalPrintTime = printTime / progress
estimatedTotalPrintTime = self._estimateTotalPrintTime(progress, cleanedPrintTime)
totalPrintTime = estimatedTotalPrintTime
printTimeLeftOrigin = "estimate"
if statisticalTotalPrintTime is not None:
if estimatedTotalPrintTime is None:
# no estimate yet, we'll use the statistical total
totalPrintTime = statisticalTotalPrintTime
printTimeLeftOrigin = statisticalTotalPrintTimeType
else:
if progress < self._timeEstimationStatsWeighingUntil:
# still inside weighing range, use part stats, part current estimate
sub_progress = progress * (1 / self._timeEstimationStatsWeighingUntil)
if sub_progress > 1.0:
sub_progress = 1.0
printTimeLeftOrigin = "mixed-" + statisticalTotalPrintTimeType
else:
# use only the current estimate
sub_progress = 1.0
printTimeLeftOrigin = "estimate"
# combine
totalPrintTime = (1.0 - sub_progress) * statisticalTotalPrintTime \
+ sub_progress * estimatedTotalPrintTime
printTimeLeft = None
if totalPrintTime is not None:
# sanity check current total print time estimate
assumed_progress = cleanedPrintTime / totalPrintTime
min_progress = progress - self._timeEstimationValidityRange
max_progress = progress + self._timeEstimationValidityRange
if min_progress <= assumed_progress <= max_progress and totalPrintTime > cleanedPrintTime:
# appears sane, we'll use it
printTimeLeft = totalPrintTime - cleanedPrintTime
else:
# too far from the actual progress or negative,
# we use the dumb print time instead
printTimeLeft = dumbTotalPrintTime - cleanedPrintTime
printTimeLeftOrigin = "linear"
else:
printTimeLeftOrigin = "linear"
if progress > self._timeEstimationForceDumbFromPercent or \
cleanedPrintTime >= self._timeEstimationForceDumbAfterMin * 60:
# more than x% or y min printed and still no real estimate, ok, we'll use the dumb variant :/
printTimeLeft = dumbTotalPrintTime - cleanedPrintTime
if printTimeLeft is not None and printTimeLeft < 0:
# shouldn't actually happen, but let's make sure
printTimeLeft = None
return printTimeLeft, printTimeLeftOrigin
def _addTemperatureData(self, temp, bedTemp):
currentTimeUtc = int(time.time())
data = {
"time": currentTimeUtc
}
for tool in temp.keys():
data["tool%d" % tool] = {
"actual": temp[tool][0],
"target": temp[tool][1]
}
if bedTemp is not None and isinstance(bedTemp, tuple):
data["bed"] = {
"actual": bedTemp[0],
"target": bedTemp[1]
}
self._temps.append(data)
self._temp = temp
self._bedTemp = bedTemp
self._stateMonitor.add_temperature(data)
def _setJobData(self, filename, filesize, sd):
if filename is not None:
if sd:
path_in_storage = filename
if path_in_storage.startswith("/"):
path_in_storage = path_in_storage[1:]
path_on_disk = None
else:
path_in_storage = self._fileManager.path_in_storage(FileDestinations.LOCAL, filename)
path_on_disk = self._fileManager.path_on_disk(FileDestinations.LOCAL, filename)
self._selectedFile = {
"filename": path_in_storage,
"filesize": filesize,
"sd": sd,
"estimatedPrintTime": None
}
else:
self._selectedFile = None
self._stateMonitor.set_job_data({
"file": {
"name": None,
"origin": None,
"size": None,
"date": None
},
"estimatedPrintTime": None,
"averagePrintTime": None,
"lastPrintTime": None,
"filament": None,
})
return
estimatedPrintTime = None
lastPrintTime = None
averagePrintTime = None
date = None
filament = None
if path_on_disk:
# Use a string for mtime because it could be float and the
# javascript needs to exact match
if not sd:
date = int(os.stat(path_on_disk).st_mtime)
try:
fileData = self._fileManager.get_metadata(FileDestinations.SDCARD if sd else FileDestinations.LOCAL, path_on_disk)
except:
fileData = None
if fileData is not None:
if "analysis" in fileData:
if estimatedPrintTime is None and "estimatedPrintTime" in fileData["analysis"]:
estimatedPrintTime = fileData["analysis"]["estimatedPrintTime"]
if "filament" in fileData["analysis"].keys():
filament = fileData["analysis"]["filament"]
if "statistics" in fileData:
printer_profile = self._printerProfileManager.get_current_or_default()["id"]
if "averagePrintTime" in fileData["statistics"] and printer_profile in fileData["statistics"]["averagePrintTime"]:
averagePrintTime = fileData["statistics"]["averagePrintTime"][printer_profile]
if "lastPrintTime" in fileData["statistics"] and printer_profile in fileData["statistics"]["lastPrintTime"]:
lastPrintTime = fileData["statistics"]["lastPrintTime"][printer_profile]
if averagePrintTime is not None:
self._selectedFile["estimatedPrintTime"] = averagePrintTime
self._selectedFile["estimatedPrintTimeType"] = "average"
elif estimatedPrintTime is not None:
# TODO apply factor which first needs to be tracked!
self._selectedFile["estimatedPrintTime"] = estimatedPrintTime
self._selectedFile["estimatedPrintTimeType"] = "analysis"
self._stateMonitor.set_job_data({
"file": {
"name": path_in_storage,
"origin": FileDestinations.SDCARD if sd else FileDestinations.LOCAL,
"size": filesize,
"date": date
},
"estimatedPrintTime": estimatedPrintTime,
"averagePrintTime": averagePrintTime,
"lastPrintTime": lastPrintTime,
"filament": filament,
})
def _sendInitialStateUpdate(self, callback):
try:
data = self._stateMonitor.get_current_data()
data.update({
"temps": list(self._temps),
"logs": list(self._log),
"messages": list(self._messages)
})
callback.on_printer_send_initial_data(data)
except:
self._logger.exception("Error while trying to send inital state update")
def _getStateFlags(self):
return {
"operational": self.is_operational(),
"printing": self.is_printing(),
"closedOrError": self.is_closed_or_error(),
"error": self.is_error(),
"paused": self.is_paused(),
"ready": self.is_ready(),
"sdReady": self.is_sd_ready()
}
#~~ comm.MachineComPrintCallback implementation
def on_comm_log(self, message):
"""
Callback method for the comm object, called upon log output.
"""
self._addLog(to_unicode(message, "utf-8", errors="replace"))
def on_comm_temperature_update(self, temp, bedTemp):
self._addTemperatureData(temp, bedTemp)
def on_comm_state_change(self, state):
"""
Callback method for the comm object, called if the connection state changes.
"""
oldState = self._state
state_string = None
if self._comm is not None:
state_string = self._comm.getStateString()
# forward relevant state changes to gcode manager
if oldState == comm.MachineCom.STATE_PRINTING:
if self._selectedFile is not None:
if state == comm.MachineCom.STATE_CLOSED or state == comm.MachineCom.STATE_ERROR or state == comm.MachineCom.STATE_CLOSED_WITH_ERROR:
self._fileManager.log_print(FileDestinations.SDCARD if self._selectedFile["sd"] else FileDestinations.LOCAL, self._selectedFile["filename"], time.time(), self._comm.getPrintTime(), False, self._printerProfileManager.get_current_or_default()["id"])
self._analysisQueue.resume() # printing done, put those cpu cycles to good use
elif state == comm.MachineCom.STATE_PRINTING:
self._analysisQueue.pause() # do not analyse files while printing
if state == comm.MachineCom.STATE_CLOSED or state == comm.MachineCom.STATE_CLOSED_WITH_ERROR:
if self._comm is not None:
self._comm = None
self._setProgressData(completion=0)
self._setCurrentZ(None)
self._setJobData(None, None, None)
self._setState(state, state_string=state_string)
def on_comm_message(self, message):
"""
Callback method for the comm object, called upon message exchanges via serial.
Stores the message in the message buffer, truncates buffer to the last 300 lines.
"""
self._addMessage(to_unicode(message, "utf-8", errors="replace"))
def on_comm_progress(self):
"""
Callback method for the comm object, called upon any change in progress of the printjob.
Triggers storage of new values for printTime, printTimeLeft and the current progress.
"""
self._stateMonitor.trigger_progress_update()
def on_comm_z_change(self, newZ):
"""
Callback method for the comm object, called upon change of the z-layer.
"""
oldZ = self._currentZ
if newZ != oldZ:
# we have to react to all z-changes, even those that might "go backward" due to a slicer's retraction or
# anti-backlash-routines. Event subscribes should individually take care to filter out "wrong" z-changes
eventManager().fire(Events.Z_CHANGE, {"new": newZ, "old": oldZ})
self._setCurrentZ(newZ)
def on_comm_sd_state_change(self, sdReady):
self._stateMonitor.set_state({"text": self.get_state_string(), "flags": self._getStateFlags()})
def on_comm_sd_files(self, files):
eventManager().fire(Events.UPDATED_FILES, {"type": "gcode"})
self._sdFilelistAvailable.set()
def on_comm_file_selected(self, filename, filesize, sd):
self._setJobData(filename, filesize, sd)
self._stateMonitor.set_state({"text": self.get_state_string(), "flags": self._getStateFlags()})
if self._printAfterSelect:
self._printAfterSelect = False
self.start_print(pos=self._posAfterSelect)
def on_comm_print_job_done(self):
self._fileManager.log_print(FileDestinations.SDCARD if self._selectedFile["sd"] else FileDestinations.LOCAL, self._selectedFile["filename"], time.time(), self._comm.getPrintTime(), True, self._printerProfileManager.get_current_or_default()["id"])
self._setProgressData(completion=1.0, filepos=self._selectedFile["filesize"], printTime=self._comm.getPrintTime(), printTimeLeft=0)
self._stateMonitor.set_state({"text": self.get_state_string(), "flags": self._getStateFlags()})
self._fileManager.delete_recovery_data()
def on_comm_file_transfer_started(self, filename, filesize):
self._sdStreaming = True
self._setJobData(filename, filesize, True)
self._setProgressData(completion=0.0, filepos=0, printTime=0)
self._stateMonitor.set_state({"text": self.get_state_string(), "flags": self._getStateFlags()})
def on_comm_file_transfer_done(self, filename):
self._sdStreaming = False
if self._streamingFinishedCallback is not None:
# in case of SD files, both filename and absolutePath are the same, so we set the (remote) filename for
# both parameters
self._streamingFinishedCallback(filename, filename, FileDestinations.SDCARD)
self._setCurrentZ(None)
self._setJobData(None, None, None)
self._setProgressData()
self._stateMonitor.set_state({"text": self.get_state_string(), "flags": self._getStateFlags()})
def on_comm_force_disconnect(self):
self.disconnect()
def on_comm_record_fileposition(self, origin, name, pos):
try:
self._fileManager.save_recovery_data(origin, name, pos)
except NoSuchStorage:
pass
except:
self._logger.exception("Error while trying to persist print recovery data")
class StateMonitor(object):
def __init__(self, interval=0.5, on_update=None, on_add_temperature=None, on_add_log=None, on_add_message=None, on_get_progress=None):
self._interval = interval
self._update_callback = on_update
self._on_add_temperature = on_add_temperature
self._on_add_log = on_add_log
self._on_add_message = on_add_message
self._on_get_progress = on_get_progress
self._state = None
self._job_data = None
self._gcode_data = None
self._sd_upload_data = None
self._current_z = None
self._progress = None
self._progress_dirty = False
self._offsets = {}
self._change_event = threading.Event()
self._state_lock = threading.Lock()
self._progress_lock = threading.Lock()
self._last_update = time.time()
self._worker = threading.Thread(target=self._work)
self._worker.daemon = True
self._worker.start()
def _get_current_progress(self):
if callable(self._on_get_progress):
return self._on_get_progress()
return self._progress
def reset(self, state=None, job_data=None, progress=None, current_z=None):
self.set_state(state)
self.set_job_data(job_data)
self.set_progress(progress)
self.set_current_z(current_z)
def add_temperature(self, temperature):
self._on_add_temperature(temperature)
self._change_event.set()
def add_log(self, log):
self._on_add_log(log)
self._change_event.set()
def add_message(self, message):
self._on_add_message(message)
self._change_event.set()
def set_current_z(self, current_z):
self._current_z = current_z
self._change_event.set()
def set_state(self, state):
with self._state_lock:
self._state = state
self._change_event.set()
def set_job_data(self, job_data):
self._job_data = job_data
self._change_event.set()
def trigger_progress_update(self):
with self._progress_lock:
self._progress_dirty = True
self._change_event.set()
def set_progress(self, progress):
with self._progress_lock:
self._progress_dirty = False
self._progress = progress
self._change_event.set()
def set_temp_offsets(self, offsets):
self._offsets = offsets
self._change_event.set()
def _work(self):
while True:
self._change_event.wait()
now = time.time()
delta = now - self._last_update
additional_wait_time = self._interval - delta
if additional_wait_time > 0:
time.sleep(additional_wait_time)
with self._state_lock:
data = self.get_current_data()
self._update_callback(data)
self._last_update = time.time()
self._change_event.clear()
def get_current_data(self):
with self._progress_lock:
if self._progress_dirty:
self._progress = self._get_current_progress()
self._progress_dirty = False
return {
"state": self._state,
"job": self._job_data,
"currentZ": self._current_z,
"progress": self._progress,
"offsets": self._offsets
}
class TemperatureHistory(InvariantContainer):
def __init__(self, cutoff=30 * 60):
def temperature_invariant(data):
data.sort(key=lambda x: x["time"])
now = int(time.time())
return [item for item in data if item["time"] >= now - cutoff]
InvariantContainer.__init__(self, guarantee_invariant=temperature_invariant)
|
from erukar.system.engine import EnvironmentProfile, ErukarObject
from .Location import Location
import operator, re
class Sector(ErukarObject):
def __init__(self, region, economic_seed_fn=None):
self.coordinates = ""
self.environment_profile = EnvironmentProfile()
self.region = region
self.adjacent_sectors = set()
self.locations = set()
self.name = 'Random Sector'
self.use_day_night_cycle = False
self.economic_profile = region.economic_profile\
if not economic_seed_fn\
else economic_seed_fn(self)
def alias(self):
return self.name
def set_coordinates(self, new_coords):
self.coordinates = Sector.autocorrect(new_coords)
def get_coordinates(self):
return self.coordinates
def adjacent(self):
for sector in self.adjacent_sectors:
yield sector
def neighbors(self):
return list(self.adjacent())
def distance_to(self, sector):
'''The sum of all coordinates adds up to zero. By taking the absolute
value and summing them, you get twice the total distance between two coords.'''
return -1
def location(self):
if len(self.locations) > 0:
return list(self.locations)[0]
new_loc = Location(self)
new_loc.name = self.name
new_loc.environment_profile = self.environment_profile
self.locations.add(new_loc)
return new_loc
def is_overland(coords):
if coords is not str: coords = str(coords).replace(' ','')
return re.match(r'\(([-+]*\d+),([-+]*\d+),([-+]*\d+)\)', coords) is not None
def autocorrect(coord_string):
if Sector.is_overland(coord_string):
return Sector.to_overland(coord_string)
return coord_string
def to_overland(coords):
out = coords
if isinstance(coords,str):
out = coords\
.strip()\
.replace(' ','')\
.replace('(','')\
.replace(')','')\
.split(',')
elif not isinstance(coords, tuple) and not isinstance(coords, list):
raise ValueError('Malformed Overland Coordinates: Unable to parse a non-str non-list non-tuple input (received {})'.format(type(coords)))
if len(out) != 3:
raise ValueError('Malformed Overland Coordinates String: Received "{}", which returned "{}"'.format(coords, out))
return tuple(int(x) for x in out)
def supply_and_demand_scalar(self, good):
return self.economic_profile.supply_and_demand_scalar(good)
def register_transaction(self, good, at_price, supply_shift):
self.economic_profile.register_transaction(good, at_price, supply_shift)
|
import pytest
from collections import OrderedDict
from zone_normalize import split_comments, zone_normalize, zone_dict_to_str
REFERENCE_COM_ZONE = [OrderedDict([('origin', 'com.'),
('ttl', '900'),
('class', 'in'),
('type', 'soa'),
('data',
['a.gtld-servers.net.',
'nstld.verisign-grs.com.', '(',
'1463069743',
'1800',
'900',
'604800',
'86400', ')'])]),
OrderedDict([('origin', 'com.'),
('ttl', '172800'),
('class', 'in'),
('type', 'ns'),
('data', ['a.gtld-servers.net.'])]),
OrderedDict([('origin', 'ns2.muscleextremexxl.com.'),
('ttl', '172800'),
('class', 'in'),
('type', 'a'),
('data', ['149.255.57.124'])]),
OrderedDict([('origin', 'ns3.muscleextremexxl.com.'),
('ttl', '172800'),
('class', 'in'),
('type', 'a'),
('data', ['193.105.134.6'])]),
OrderedDict([('origin', 'ns1.muscleextremexxl.com.'),
('ttl', '172800'),
('class', 'in'),
('type', 'a'),
('data', ['31.220.42.157'])]),
OrderedDict([('origin', 'ns1.newpointnet.com.'),
('ttl', '172800'),
('class', 'in'),
('type', 'a'),
('data', ['208.67.44.155'])]),
OrderedDict([('origin', 'ns2.newpointnet.com.'),
('ttl', '172800'),
('class', 'in'),
('type', 'a'),
('data', ['198.175.28.6'])]),
OrderedDict([('origin', 'ns0.world-of-digital.com.'),
('ttl', '172800'),
('class', 'in'),
('type', 'a'),
('data', ['212.227.82.151'])]),
OrderedDict([('origin', 'ns.hemendemo.com.'),
('ttl', '172800'),
('class', 'in'),
('type', 'a'),
('data', ['185.87.121.219'])]),
OrderedDict([('origin', 'www.poldega.com.'),
('ttl', '172800'),
('class', 'in'),
('type', 'a'),
('data', ['89.16.164.137'])]),
OrderedDict([('origin',
'vrsn-end-of-zone-marker-dummy-record.com.'
),
('ttl', '172800'),
('class', 'in'),
('type', 'txt'),
('data', ['plenus'])])]
RFC1035_EXAMPLE = [OrderedDict([('origin', 'isi.edu.'),
('ttl', '900'),
('class', 'in'),
('type', 'soa'),
('data',
['venera', 'action\\.domains',
'(', '20', '7200', '600', '3600000',
'60)'])]),
OrderedDict([('origin', 'isi.edu.'),
('ttl', '900'),
('class', 'in'),
('type', 'ns'),
('data', ['a.isi.edu.'])]),
OrderedDict([('origin', 'isi.edu.'),
('ttl', '900'),
('class', 'in'),
('type', 'ns'),
('data', ['venera'])]),
OrderedDict([('origin', 'isi.edu.'),
('ttl', '900'),
('class', 'in'),
('type', 'ns'),
('data', ['vaxa'])]),
OrderedDict([('origin', 'isi.edu.'),
('ttl', '900'),
('class', 'in'),
('type', 'mx'),
('data', ['10', 'venera'])]),
OrderedDict([('origin', 'isi.edu.'),
('ttl', '900'),
('class', 'in'),
('type', 'mx'),
('data', ['20', 'vaxa'])]),
OrderedDict([('origin', 'isi.edu.'),
('ttl', '900'),
('class', 'in'),
('type', 'a'),
('data', ['a', '26.3.0.103'])]),
OrderedDict([('origin', 'venera.isi.edu.'),
('ttl', '900'),
('class', 'in'),
('type', 'a'),
('data', ['10.1.0.52'])]),
OrderedDict([('origin', 'isi.edu.'),
('ttl', '900'),
('class', 'in'),
('type', 'a'),
('data', ['128.9.0.32'])]),
OrderedDict([('origin', 'vaxa.isi.edu.'),
('ttl', '900'),
('class', 'in'),
('type', 'a'),
('data', ['10.2.0.27'])]),
OrderedDict([('origin', 'isi.edu.'),
('ttl', '900'),
('class', 'in'),
('type', 'a'),
('data', ['128.9.0.33'])])]
RFC4035_EXAMPLE = [OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'soa'),
('data',
['ns1.example.',
'bugs.x.w.example.', '(',
'1081539377', '3600', '300', '3600000',
'3600', ')'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'rrsig'),
('data',
['soa', '5', '1', '3600',
'20040509183619',
'(', '20040409183619',
'38519', 'example.',
'onx0k36rcjaxytcngq6iqnpnv5+drqyasc9h',
'7tsjahcqbhe67sr6ah2xdugcqqwu/n0uvzrf',
'vkgo9ebarz0gwdkcuwlm6enb5six2k74l5lw',
'da7s/un/ibtdq4ay8nmnlqi7dw7n4p8/rjkb',
'jv7j86hyqgm5e7+miraz8v01b0i=',
')'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'ns'),
('data', ['ns1.example.'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'ns'),
('data', ['ns2.example.'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'rrsig'),
('data',
['ns', '5', '1', '3600',
'20040509183619',
'(', '20040409183619',
'38519',
'example.',
'gl13f00f2u0r+swixxlhwsmy+qstyy5k6zfd',
'euivwc+wd1fmbncyql0tk7lhtx6uoxc8agnf',
'4isfve8xqf4q+o9qlnqizmppu3linekt4fz8',
'ro5urfovomrtbqxw3u0hxwugge4g3zpshv48',
'0hjmerazb/frpgfjpajngcq6kwg=',
')'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'mx'),
('data', ['1', 'xx.example.'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'rrsig'),
('data',
['mx', '5', '1', '3600',
'20040509183619',
'(', '20040409183619',
'38519',
'example.',
'hydhyvt5khsz7hto/vypumpmszqrcop3tzwb',
'2qakkhvpfau/dglgs/ikenkyogl95g4n+nze',
'vynu8dctockt+chpcgevjguq7a3ao9z/zkuo',
'6gmmuw4b89rz1puxw4jzuxj66ptwovtuu/im',
'w6oisukd1eqt7a0kygkg+pedxdi=',
')'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'nsec'),
('data',
['a.example.',
'ns', 'soa', 'mx', 'rrsig',
'nsec', 'dnskey'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'rrsig'),
('data',
['nsec', '5', '1', '3600',
'20040509183619',
'(', '20040409183619',
'38519', 'example.',
'o0k558jhhyrc97ishnislm4klmw48c7u7cbm',
'ftfhke5ivqnrvtb1stlmpgpbdic9hcryoo0v',
'z9me5xpzuehbvgnhd5sfzgfvegxr5nyyq4tw',
'sdbgibilquv1ivy29vhxy7wgr62dprz0pwvm',
'jffj5arxf4npxp/keowggbrzy/u=',
')'])])]
RFC5155_EXAMPLE = [OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'soa'),
('data',
['ns1.example.', 'bugs.x.w.example.', '1',
'3600', '300', '(', '3600000', '3600',
')'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'rrsig'),
('data',
['soa', '7', '1', '3600', '20150420235959',
'20051021000000', '(', '40430', 'example.',
'hu25uiynpmvpivbrldn+9mlp9zql39qaud8i',
'q4zllywfuubbas41pg+68z81q1xhkyaceyhd',
'vi2lmkusbzst0q==', ')'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'ns'),
('data', ['ns1.example.'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'ns'),
('data', ['ns2.example.'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'rrsig'),
('data',
['ns', '7', '1', '3600', '20150420235959',
'20051021000000', '(', '40430', 'example.',
'pvogtmk1hhestau+hwdwc8ts+6c8qtqd4pqj',
'qotdevgg+ma+ai4fwdehu3qhjylcq9tbd2vv',
'cnmxjtz6syobxa==', ')'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'mx'),
('data', ['1', 'xx.example.'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'rrsig'),
('data',
['mx', '7', '1', '3600', '20150420235959',
'20051021000000', '(', '40430', 'example.',
'ggq1a9xs47k42vpvpl/a1bwuz/6xsnhkjotw',
'9so8mqtztl2wjbsnoqsaohrrcrrbyriel/gz',
'n9mto/kx+wbo+w==', ')'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'dnskey'),
('data',
['256', '3', '7',
'aweaaaetidlzskwut4swwr8yu0wphpiui8lu', '(',
'sad0qpwu+wzt89epo6thzkmbvdkc7qphqo2h',
'ty4hhn9npwfrw5byube=', ')'])]),
OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'dnskey'),
('data',
['257', '3', '7',
'aweaaculfv1vhmqx6nsouoq2r/dsr7xm3upj', '(',
'j7iommwspjabvfw8q0rovxdm6kzt+tau92l9',
'absudblmfin8cvf3n4s=', ')'])])]
@pytest.fixture()
def sample_com_tld():
com_zone = open('test_data/com.zone', mode='rt')
def fin():
com_zone.close()
return com_zone
@pytest.fixture()
def rfc_examples():
rfcs = [1035, 4035, 5155]
zones = {}
for rfc in rfcs:
zones[rfc] = open('test_data/rfc{}.zone'.format(rfc), mode='rt')
def fin():
for rfc in rfcs:
zones[rfc].close()
return zones
class TestParse:
def test_comment_split(self):
simple_test_comment = "; foobar"
_, comment = split_comments(simple_test_comment)
assert "foobar" == comment
def test_rdata_split(self):
test_comment = "example.com. NS ns1.example.com ; an example NS record"
rdata, _ = split_comments(test_comment)
assert "example.com. NS ns1.example.com" == rdata
@pytest.mark.usefixtures("sample_com_tld")
def test_com_tld_parse(self, sample_com_tld):
zone = [l for l in zone_normalize(sample_com_tld)]
assert REFERENCE_COM_ZONE == zone
def test_implicit_origin(self):
implicit_origin_zone = [zone_dict_to_str(REFERENCE_COM_ZONE[0]),
zone_dict_to_str(REFERENCE_COM_ZONE[1])]
zone = [l for l in zone_normalize(implicit_origin_zone)]
assert REFERENCE_COM_ZONE[0:2] == zone
def test_implicit_class(self):
implicit_class_record = ['example. 3600 hinfo "klh-10" "tops-20"']
expected = [OrderedDict([('origin', 'example.'),
('ttl', '3600'),
('class', 'in'),
('type', 'hinfo'),
('data', ['"klh-10"', '"tops-20"'])])]
assert expected == [l for l in
zone_normalize(implicit_class_record)]
def test_implicit_ttl_following_integer_name(self):
integer_name_record = ['$origin example.',
'$ttl 2600',
'1234567 NS example.com']
expected = [OrderedDict([('origin', '1234567.example.'),
('ttl', '2600'),
('class', 'in'),
('type', 'ns'),
('data', ['example.com'])])]
assert expected == [l for l in zone_normalize(integer_name_record)]
@pytest.mark.usefixtures("rfc_examples")
def test_rfc_examples(self, rfc_examples):
zone1035 = [l for l in zone_normalize(rfc_examples[1035])]
zone4035 = [l for l in zone_normalize(rfc_examples[4035])]
zone5155 = [l for l in zone_normalize(rfc_examples[5155])]
assert RFC1035_EXAMPLE == zone1035
assert RFC4035_EXAMPLE == zone4035[0:9]
assert RFC5155_EXAMPLE == zone5155[0:9]
|
from django.conf.urls.defaults import *
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^api/', include('api.urls', 'api')),
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
)
|
from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
from django.db.models import Sum
from books.models import BookType, OrderedBook
from common.bookchooserwizard import BookChooserWizard
from orders.models import Order
from utils.books import get_available_books, get_available_amount
class PurchaseWizard(BookChooserWizard):
@property
def page_title(self):
return _("Purchase books")
@property
def url_namespace(self):
return "purchase"
@property
def session_var_name(self):
return "purchase_chosen_books"
@property
def feature_books_in_stock(self):
return True
def process_books_summary(self, session, user, book_list):
book_type_list = [book['pk'] for book in book_list] # List of Book primary keys
# Select the Books which are available for purchasing and match the BookTypes we're looking for
books, amounts = get_available_books(with_amounts=True)
books = books.filter(book_type__in=book_type_list).order_by('-pk')
# Remove duplicated Books. Thanks to order_by('-pk'), we'll have firstly added book as a value here
books_by_id = dict()
for book in books:
books_by_id.setdefault(book.book_type.pk, []).append(book)
order = Order(user=user)
order.save()
error_occurred = False
correct_book_list = []
parts_of_order = []
for book in book_list:
if book['pk'] in books_by_id and book['amount'] > 0:
if amounts[book['pk']] >= book['amount']:
parts_of_order.append(OrderedBook(book_type=BookType.objects.get(pk=book['pk']),
count=book['amount'], order=order))
else:
book['amount'] = amounts[book['pk']]
error_occurred = True
correct_book_list.append(book)
else:
error_occurred = True
if error_occurred:
order.delete()
else:
OrderedBook.objects.bulk_create(parts_of_order)
session['order_id'] = order.pk
return not error_occurred, correct_book_list
def success(self, request):
order = Order.objects.prefetch_related('user', 'orderedbook_set', 'orderedbook_set__book_type').annotate(
books_count=Sum('orderedbook__count')).get(pk=request.session['order_id'])
# Order id shown to the user
order_id = order.date.strftime("%Y%m%d") + "-" + str(order.pk) + "-" + str(order.user.pk) + "-" + str(
order.books_count)
amounts = dict((orderedbook.book_type, orderedbook.count) for orderedbook in order.orderedbook_set.all())
for book_type in amounts.keys():
book_type.amount = amounts[book_type]
del request.session['order_id']
return render(request, 'purchase/success.html',
{'order': order, 'order_ID': order_id, 'chosen_book_list': amounts.keys()})
|
import time
import logging
from PyQt4.QtGui import QWidget, QListWidgetItem, QImage, QIcon, QPixmap, \
QFrame, QMessageBox, QTabWidget, QVBoxLayout, \
QFormLayout, QLabel, QPushButton
from PyQt4.QtCore import SIGNAL, Qt
from weboob.tools.application.qt import QtDo, HTMLDelegate
from weboob.tools.misc import to_unicode
from weboob.capabilities.contact import ICapContact, Contact
from weboob.capabilities.chat import ICapChat
from weboob.capabilities.messages import ICapMessages, ICapMessagesPost, Message
from weboob.capabilities.base import NotLoaded
from .ui.contacts_ui import Ui_Contacts
from .ui.contact_thread_ui import Ui_ContactThread
from .ui.thread_message_ui import Ui_ThreadMessage
from .ui.profile_ui import Ui_Profile
from .ui.notes_ui import Ui_Notes
class ThreadMessage(QFrame):
"""
This class represents a message in the thread tab.
"""
def __init__(self, message, parent=None):
QFrame.__init__(self, parent)
self.ui = Ui_ThreadMessage()
self.ui.setupUi(self)
self.set_message(message)
def set_message(self, message):
self.message = message
self.ui.nameLabel.setText(message.sender)
header = time.strftime('%Y-%m-%d %H:%M:%S', message.date.timetuple())
if message.flags & message.IS_NOT_RECEIVED:
header += u' — <font color=#ff0000>Unread</font>'
elif message.flags & message.IS_RECEIVED:
header += u' — <font color=#00ff00>Read</font>'
self.ui.headerLabel.setText(header)
if message.flags & message.IS_HTML:
content = message.content
else:
content = message.content.replace('&', '&').replace('<', '<').replace('>', '>').replace('\n', '<br />')
self.ui.contentLabel.setText(content)
def __eq__(self, m):
if not isinstance(m, Message):
return False
return self.message == m.message
class ContactThread(QWidget):
"""
The thread of the selected contact.
"""
def __init__(self, weboob, contact, support_reply, parent=None):
QWidget.__init__(self, parent)
self.ui = Ui_ContactThread()
self.ui.setupUi(self)
self.weboob = weboob
self.contact = contact
self.thread = None
self.messages = []
self.process_msg = None
self.connect(self.ui.refreshButton, SIGNAL('clicked()'), self.refreshMessages)
if support_reply:
self.connect(self.ui.sendButton, SIGNAL('clicked()'), self.postReply)
else:
self.ui.frame.hide()
self.refreshMessages()
def refreshMessages(self, fillobj=False):
if self.process_msg:
return
self.ui.refreshButton.setEnabled(False)
self.process_msg = QtDo(self.weboob, self.gotThread, self.gotError)
if fillobj and self.thread:
self.process_msg.do('fillobj', self.thread, ['root'], backends=self.contact.backend)
else:
self.process_msg.do('get_thread', self.contact.id, backends=self.contact.backend)
def gotError(self, backend, error, backtrace):
self.ui.textEdit.setEnabled(False)
self.ui.sendButton.setEnabled(False)
self.ui.refreshButton.setEnabled(True)
def gotThread(self, backend, thread):
if not thread:
#v = self.ui.scrollArea.verticalScrollBar()
#print v.minimum(), v.value(), v.maximum(), v.sliderPosition()
#self.ui.scrollArea.verticalScrollBar().setValue(self.ui.scrollArea.verticalScrollBar().maximum())
self.process_msg = None
return
self.ui.textEdit.setEnabled(True)
self.ui.sendButton.setEnabled(True)
self.ui.refreshButton.setEnabled(True)
self.thread = thread
if thread.root is NotLoaded:
self._insert_load_button(0)
else:
for message in thread.iter_all_messages():
self._insert_message(message)
def _insert_message(self, message):
widget = ThreadMessage(message)
if widget in self.messages:
old_widget = self.messages[self.messages.index(widget)]
if old_widget.message.flags != widget.message.flags:
old_widget.set_message(widget.message)
return
for i, m in enumerate(self.messages):
if widget.message.date > m.message.date:
self.ui.scrollAreaContent.layout().insertWidget(i, widget)
self.messages.insert(i, widget)
if message.parent is NotLoaded:
self._insert_load_button(i)
return
self.ui.scrollAreaContent.layout().addWidget(widget)
self.messages.append(widget)
if message.parent is NotLoaded:
self._insert_load_button(-1)
def _insert_load_button(self, pos):
button = QPushButton(self.tr('More messages...'))
self.connect(button, SIGNAL('clicked()'), lambda: self._load_button_pressed(button))
if pos >= 0:
self.ui.scrollAreaContent.layout().insertWidget(pos, button)
else:
self.ui.scrollAreaContent.layout().addWidget(button)
def _load_button_pressed(self, button):
self.ui.scrollAreaContent.layout().removeWidget(button)
button.hide()
button.deleteLater()
self.refreshMessages(fillobj=True)
def postReply(self):
text = unicode(self.ui.textEdit.toPlainText())
self.ui.textEdit.setEnabled(False)
self.ui.sendButton.setEnabled(False)
m = Message(thread=self.thread,
id=0,
title=u'',
sender=None,
receivers=None,
content=text,
parent=self.messages[0].message if len(self.messages) > 0 else None)
self.process_reply = QtDo(self.weboob, self._postReply_cb, self._postReply_eb)
self.process_reply.do('post_message', m, backends=self.contact.backend)
def _postReply_cb(self, backend, ignored):
if not backend:
return
self.ui.textEdit.clear()
self.ui.textEdit.setEnabled(True)
self.ui.sendButton.setEnabled(True)
self.refreshMessages()
self.process_reply = None
def _postReply_eb(self, backend, error, backtrace):
content = unicode(self.tr('Unable to send message:\n%s\n')) % to_unicode(error)
if logging.root.level == logging.DEBUG:
content += '\n%s\n' % to_unicode(backtrace)
QMessageBox.critical(self, self.tr('Error while posting reply'),
content, QMessageBox.Ok)
self.process_reply = None
class ContactProfile(QWidget):
def __init__(self, weboob, contact, parent=None):
QWidget.__init__(self, parent)
self.ui = Ui_Profile()
self.ui.setupUi(self)
self.connect(self.ui.previousButton, SIGNAL('clicked()'), self.previousClicked)
self.connect(self.ui.nextButton, SIGNAL('clicked()'), self.nextClicked)
self.weboob = weboob
self.contact = contact
self.loaded_profile = False
self.displayed_photo_idx = 0
self.process_photo = {}
missing_fields = self.gotProfile(self.weboob.get_backend(contact.backend), contact)
if len(missing_fields) > 0:
self.process_contact = QtDo(self.weboob, self.gotProfile, self.gotError)
self.process_contact.do('fillobj', self.contact, missing_fields, backends=self.contact.backend)
def gotError(self, backend, error, backtrace):
self.ui.frame_photo.hide()
self.ui.descriptionEdit.setText('<h1>Unable to show profile</h1><p>%s</p>' % to_unicode(error))
def gotProfile(self, backend, contact):
if not backend:
return []
missing_fields = set()
self.display_photo()
self.ui.nicknameLabel.setText('<h1>%s</h1>' % contact.name)
if contact.status == Contact.STATUS_ONLINE:
status_color = 0x00aa00
elif contact.status == Contact.STATUS_OFFLINE:
status_color = 0xff0000
elif contact.status == Contact.STATUS_AWAY:
status_color = 0xffad16
else:
status_color = 0xaaaaaa
self.ui.statusLabel.setText('<font color="#%06X">%s</font>' % (status_color, contact.status_msg))
self.ui.contactUrlLabel.setText('<b>URL:</b> <a href="%s">%s</a>' % (contact.url, contact.url))
if contact.summary is NotLoaded:
self.ui.descriptionEdit.setText('<h1>Description</h1><p><i>Receiving...</i></p>')
missing_fields.add('summary')
else:
self.ui.descriptionEdit.setText('<h1>Description</h1><p>%s</p>' % contact.summary.replace('\n', '<br />'))
if not contact.profile:
missing_fields.add('profile')
elif not self.loaded_profile:
self.loaded_profile = True
for head in contact.profile.itervalues():
if head.flags & head.HEAD:
widget = self.ui.headWidget
else:
widget = self.ui.profileTab
self.process_node(head, widget)
return missing_fields
def process_node(self, node, widget):
# Set the value widget
value = None
if node.flags & node.SECTION:
value = QWidget()
value.setLayout(QFormLayout())
for sub in node.value.itervalues():
self.process_node(sub, value)
elif isinstance(node.value, list):
value = QLabel('<br />'.join(unicode(s) for s in node.value))
value.setWordWrap(True)
elif isinstance(node.value, tuple):
value = QLabel(', '.join(unicode(s) for s in node.value))
value.setWordWrap(True)
elif isinstance(node.value, (basestring,int,long,float)):
value = QLabel(unicode(node.value))
else:
logging.warning('Not supported value: %r' % node.value)
return
if isinstance(value, QLabel):
value.setTextInteractionFlags(Qt.TextSelectableByMouse|Qt.TextSelectableByKeyboard|Qt.LinksAccessibleByMouse)
# Insert the value widget into the parent widget, depending
# of its type.
if isinstance(widget, QTabWidget):
widget.addTab(value, node.label)
elif isinstance(widget.layout(), QFormLayout):
label = QLabel(u'<b>%s:</b> ' % node.label)
widget.layout().addRow(label, value)
elif isinstance(widget.layout(), QVBoxLayout):
widget.layout().addWidget(QLabel(u'<h3>%s</h3>' % node.label))
widget.layout().addWidget(value)
else:
logging.warning('Not supported widget: %r' % widget)
def previousClicked(self):
if len(self.contact.photos) == 0:
return
self.displayed_photo_idx = (self.displayed_photo_idx - 1) % len(self.contact.photos)
self.display_photo()
def nextClicked(self):
if len(self.contact.photos) == 0:
return
self.displayed_photo_idx = (self.displayed_photo_idx + 1) % len(self.contact.photos)
self.display_photo()
def display_photo(self):
if self.displayed_photo_idx >= len(self.contact.photos):
self.displayed_photo_idx = len(self.contact.photos) - 1
if self.displayed_photo_idx < 0:
self.ui.photoUrlLabel.setText('')
return
photo = self.contact.photos.values()[self.displayed_photo_idx]
if photo.data:
data = photo.data
if photo.id in self.process_photo:
self.process_photo.pop(photo.id)
else:
self.process_photo[photo.id] = QtDo(self.weboob, lambda b,p: self.display_photo())
self.process_photo[photo.id].do('fillobj', photo, ['data'], backends=self.contact.backend)
if photo.thumbnail_data:
data = photo.thumbnail_data
else:
return
img = QImage.fromData(data)
img = img.scaledToWidth(self.width()/3)
self.ui.photoLabel.setPixmap(QPixmap.fromImage(img))
if photo.url is not NotLoaded:
text = '<a href="%s">%s</a>' % (photo.url, photo.url)
if photo.hidden:
text += '<br /><font color=#ff0000><i>(Hidden photo)</i></font>'
self.ui.photoUrlLabel.setText(text)
class ContactNotes(QWidget):
""" Widget for storing notes about a contact """
def __init__(self, weboob, contact, parent=None):
QWidget.__init__(self, parent)
self.ui = Ui_Notes()
self.ui.setupUi(self)
self.weboob = weboob
self.contact = contact
self.ui.textEdit.setEnabled(False)
self.ui.saveButton.setEnabled(False)
self.process = QtDo(self.weboob, self._getNotes_cb, self._getNotes_eb)
self.process.do('get_notes', self.contact.id, backends=(self.contact.backend,))
self.connect(self.ui.saveButton, SIGNAL('clicked()'), self.saveNotes)
def _getNotes_cb(self, backend, data):
if not backend or not data:
self.process = None
self.ui.textEdit.setEnabled(True)
self.ui.saveButton.setEnabled(True)
return
self.ui.textEdit.setText(data)
def _getNotes_eb(self, backend, error, backtrace):
if isinstance(error, NotImplementedError):
return
self.ui.textEdit.setEnabled(True)
self.ui.saveButton.setEnabled(True)
content = unicode(self.tr('Unable to load notes:\n%s\n')) % to_unicode(error)
if logging.root.level == logging.DEBUG:
content += '\n%s\n' % to_unicode(backtrace)
QMessageBox.critical(self, self.tr('Error while loading notes'),
content, QMessageBox.Ok)
def saveNotes(self):
text = unicode(self.ui.textEdit.toPlainText())
self.ui.saveButton.setEnabled(False)
self.ui.textEdit.setEnabled(False)
self.process = QtDo(self.weboob, self._saveNotes_cb, self._saveNotes_eb)
self.process.do('save_notes', self.contact.id, text, backends=(self.contact.backend,))
def _saveNotes_cb(self, backend, data):
self.ui.saveButton.setEnabled(True)
self.ui.textEdit.setEnabled(True)
pass
def _saveNotes_eb(self, backend, error, backtrace):
self.ui.saveButton.setEnabled(True)
self.ui.textEdit.setEnabled(True)
content = unicode(self.tr('Unable to save notes:\n%s\n')) % to_unicode(error)
if logging.root.level == logging.DEBUG:
content += '\n%s\n' % to_unicode(backtrace)
QMessageBox.critical(self, self.tr('Error while saving notes'),
content, QMessageBox.Ok)
class IGroup(object):
def __init__(self, weboob, id, name):
self.id = id
self.name = name
self.weboob = weboob
def iter_contacts(self, cb):
raise NotImplementedError()
class MetaGroup(IGroup):
def iter_contacts(self, cb):
if self.id == 'online':
status = Contact.STATUS_ONLINE|Contact.STATUS_AWAY
elif self.id == 'offline':
status = Contact.STATUS_OFFLINE
else:
status = Contact.STATUS_ALL
self.process = QtDo(self.weboob, lambda b, d: self.cb(cb, b, d))
self.process.do('iter_contacts', status, caps=ICapContact)
def cb(self, cb, backend, contact):
if contact:
cb(contact)
elif not backend:
self.process = None
cb(None)
class ContactsWidget(QWidget):
def __init__(self, weboob, parent=None):
QWidget.__init__(self, parent)
self.ui = Ui_Contacts()
self.ui.setupUi(self)
self.weboob = weboob
self.contact = None
self.ui.contactList.setItemDelegate(HTMLDelegate())
self.url_process = None
self.photo_processes = {}
self.ui.groupBox.addItem('All', MetaGroup(self.weboob, 'all', self.tr('All')))
self.ui.groupBox.addItem('Online', MetaGroup(self.weboob, 'online', self.tr('Online')))
self.ui.groupBox.addItem('Offline', MetaGroup(self.weboob, 'offline', self.tr('Offline')))
self.ui.groupBox.setCurrentIndex(1)
self.connect(self.ui.groupBox, SIGNAL('currentIndexChanged(int)'), self.groupChanged)
self.connect(self.ui.contactList, SIGNAL('itemClicked(QListWidgetItem*)'), self.contactChanged)
self.connect(self.ui.refreshButton, SIGNAL('clicked()'), self.refreshContactList)
self.connect(self.ui.urlButton, SIGNAL('clicked()'), self.urlClicked)
def load(self):
self.refreshContactList()
self.ui.backendsList.clear()
for backend in self.weboob.iter_backends():
self.ui.backendsList.addItem(backend.name)
def groupChanged(self, i):
self.refreshContactList()
def refreshContactList(self):
self.ui.contactList.clear()
self.ui.refreshButton.setEnabled(False)
i = self.ui.groupBox.currentIndex()
group = self.ui.groupBox.itemData(i).toPyObject()
group.iter_contacts(self.addContact)
def setPhoto(self, contact, item):
if not contact:
return False
try:
self.photo_processes.pop(contact.id, None)
except KeyError:
pass
img = None
for photo in contact.photos.itervalues():
if photo.thumbnail_data:
img = QImage.fromData(photo.thumbnail_data)
break
if img:
item.setIcon(QIcon(QPixmap.fromImage(img)))
return True
return False
def addContact(self, contact):
if not contact:
self.ui.refreshButton.setEnabled(True)
return
status = ''
if contact.status == Contact.STATUS_ONLINE:
status = u'Online'
status_color = 0x00aa00
elif contact.status == Contact.STATUS_OFFLINE:
status = u'Offline'
status_color = 0xff0000
elif contact.status == Contact.STATUS_AWAY:
status = u'Away'
status_color = 0xffad16
else:
status = u'Unknown'
status_color = 0xaaaaaa
if contact.status_msg:
status += u' — %s' % contact.status_msg
item = QListWidgetItem()
item.setText('<h2>%s</h2><font color="#%06X">%s</font><br /><i>%s</i>' % (contact.name, status_color, status, contact.backend))
item.setData(Qt.UserRole, contact)
if contact.photos is NotLoaded:
process = QtDo(self.weboob, lambda b, c: self.setPhoto(c, item))
process.do('fillobj', contact, ['photos'], backends=contact.backend)
self.photo_processes[contact.id] = process
elif len(contact.photos) > 0:
if not self.setPhoto(contact, item):
photo = contact.photos.values()[0]
process = QtDo(self.weboob, lambda b, p: self.setPhoto(contact, item))
process.do('fillobj', photo, ['thumbnail_data'], backends=contact.backend)
self.photo_processes[contact.id] = process
for i in xrange(self.ui.contactList.count()):
if self.ui.contactList.item(i).data(Qt.UserRole).toPyObject().status > contact.status:
self.ui.contactList.insertItem(i, item)
return
self.ui.contactList.addItem(item)
def contactChanged(self, current):
if not current:
return
contact = current.data(Qt.UserRole).toPyObject()
self.setContact(contact)
def setContact(self, contact):
if not contact or contact == self.contact:
return
if not isinstance(contact, Contact):
return self.retrieveContact(contact)
self.ui.tabWidget.clear()
self.contact = contact
backend = self.weboob.get_backend(self.contact.backend)
self.ui.tabWidget.addTab(ContactProfile(self.weboob, self.contact), self.tr('Profile'))
if backend.has_caps(ICapMessages):
self.ui.tabWidget.addTab(ContactThread(self.weboob, self.contact, backend.has_caps(ICapMessagesPost)), self.tr('Messages'))
if backend.has_caps(ICapChat):
self.ui.tabWidget.setTabEnabled(self.ui.tabWidget.addTab(QWidget(), self.tr('Chat')),
False)
self.ui.tabWidget.setTabEnabled(self.ui.tabWidget.addTab(QWidget(), self.tr('Calendar')),
False)
self.ui.tabWidget.addTab(ContactNotes(self.weboob, self.contact), self.tr('Notes'))
def urlClicked(self):
url = unicode(self.ui.urlEdit.text())
if not url:
return
self.retrieveContact(url)
def retrieveContact(self, url):
backend_name = unicode(self.ui.backendsList.currentText())
self.ui.urlButton.setEnabled(False)
self.url_process = QtDo(self.weboob, self.retrieveContact_cb, self.retrieveContact_eb)
self.url_process.do('get_contact', url, backends=backend_name)
def retrieveContact_cb(self, backend, contact):
if not backend:
self.url_process = None
self.ui.urlButton.setEnabled(True)
return
self.ui.urlEdit.clear()
self.setContact(contact)
def retrieveContact_eb(self, backend, error, backtrace):
content = unicode(self.tr('Unable to get contact:\n%s\n')) % to_unicode(error)
if logging.root.level == logging.DEBUG:
content += u'\n%s\n' % to_unicode(backtrace)
QMessageBox.critical(self, self.tr('Error while getting contact'),
content, QMessageBox.Ok)
|
{
'name': 'Product Replacement Cost',
'version': '0.3.1',
'depends': ['base', 'product', 'purchase', 'stock', 'purchase_expense_distribution'],
'author': '[OpenDrive Ltda]',
'website': '[http://www.opendrive.cl]',
'description': """
This module add the replacement cost to the product and the cost price in the selected currency.
Created by David Acevedo Toledo (Fedoro).
""",
'data': [
'product_view.xml',
],
'installable': True,
}
|
from __future__ import absolute_import, print_function, division
import unittest
from pony.orm.sqlsymbols import *
from pony.orm.sqlbuilding import SQLBuilder
from pony.orm.dbapiprovider import DBAPIProvider
from pony.orm.tests.testutils import TestPool
class TestFormatStyles(unittest.TestCase):
def setUp(self):
self.key1 = object()
self.key2 = object()
self.provider = DBAPIProvider(pony_pool_mockup=TestPool(None))
self.ast = [ SELECT, [ ALL, [COLUMN, None, 'A']], [ FROM, [None, TABLE, 'T1']],
[ WHERE, [ EQ, [COLUMN, None, 'B'], [ PARAM, self.key1 ] ],
[ EQ, [COLUMN, None, 'C'], [ PARAM, self.key2 ] ],
[ EQ, [COLUMN, None, 'D'], [ PARAM, self.key2 ] ],
[ EQ, [COLUMN, None, 'E'], [ PARAM, self.key1 ] ]
]
]
def test_qmark(self):
self.provider.paramstyle = 'qmark'
b = SQLBuilder(self.provider, self.ast)
self.assertEqual(b.sql, 'SELECT "A"\n'
'FROM "T1"\n'
'WHERE "B" = ?\n AND "C" = ?\n AND "D" = ?\n AND "E" = ?')
self.assertEqual(b.layout, (self.key1, self.key2, self.key2, self.key1))
def test_numeric(self):
self.provider.paramstyle = 'numeric'
b = SQLBuilder(self.provider, self.ast)
self.assertEqual(b.sql, 'SELECT "A"\n'
'FROM "T1"\n'
'WHERE "B" = :1\n AND "C" = :2\n AND "D" = :2\n AND "E" = :1')
self.assertEqual(b.layout, (self.key1, self.key2))
def test_named(self):
self.provider.paramstyle = 'named'
b = SQLBuilder(self.provider, self.ast)
self.assertEqual(b.sql, 'SELECT "A"\n'
'FROM "T1"\n'
'WHERE "B" = :p1\n AND "C" = :p2\n AND "D" = :p2\n AND "E" = :p1')
self.assertEqual(b.layout, (self.key1, self.key2))
def test_format(self):
self.provider.paramstyle = 'format'
b = SQLBuilder(self.provider, self.ast)
self.assertEqual(b.sql, 'SELECT "A"\n'
'FROM "T1"\n'
'WHERE "B" = %s\n AND "C" = %s\n AND "D" = %s\n AND "E" = %s')
self.assertEqual(b.layout, (self.key1, self.key2, self.key2, self.key1))
def test_pyformat(self):
self.provider.paramstyle = 'pyformat'
b = SQLBuilder(self.provider, self.ast)
self.assertEqual(b.sql, 'SELECT "A"\n'
'FROM "T1"\n'
'WHERE "B" = %(p1)s\n AND "C" = %(p2)s\n AND "D" = %(p2)s\n AND "E" = %(p1)s')
self.assertEqual(b.layout, (self.key1, self.key2))
if __name__ == "__main__":
unittest.main()
|
from __future__ import absolute_import
from celery import task
from django.conf import settings
from helfertool.utils import cache_lock
from .receive import MailHandler
@task(bind=True)
def receive_mails(self):
if settings.RECEIVE_EMAIL_HOST:
with cache_lock("receive_mails", self.app.oid) as acquired:
if acquired:
handler = MailHandler()
handler.run(do_forward=settings.FORWARD_UNHANDLED_ADDRESS is not None)
|
from odoo import api, fields, models
class StockMove(models.Model):
_inherit = 'stock.move'
@api.multi
def _get_lot_vals(self, old_lot, index):
self.ensure_one()
lot_number = "%s-%d" % (
old_lot.name, index)
return {
'name': lot_number,
'product_id': self.product_id.id
}
def action_explode(self):
original_lot = self.restrict_lot_id
processed_moves = super(StockMove, self.with_context(subcall=True)).\
action_explode()
if not self.env.context.get('subcall', False) and processed_moves and \
original_lot:
index = 1
for new_move in processed_moves:
if new_move != self:
vals = new_move._get_lot_vals(original_lot, index)
lot = original_lot.copy(vals)
new_move.restrict_lot_id = lot
index += 1
return processed_moves
class StockProductionLot(models.Model):
_inherit = 'stock.production.lot'
mrp_production_ids = fields.One2many(
'mrp.production',
'lot_id',
string='Production Order')
@api.multi
def unlink(self):
for lot in self:
for mo in lot.mrp_production_ids:
mo.unlink()
return super(StockProductionLot, self).unlink()
|
import six
from shuup.xtheme.layout import Layout, LayoutCell
from shuup.xtheme.plugins.text import TextPlugin
from shuup.xtheme.rendering import get_view_config, render_placeholder
from shuup.xtheme.testing import override_current_theme_class
from shuup_tests.utils import printable_gibberish
from shuup_tests.xtheme.utils import (
close_enough, get_jinja2_engine, get_request,
get_test_template_bits, plugin_override,
FauxTheme)
def test_layout_serialization():
theme = FauxTheme
with plugin_override():
l = Layout(theme, "test")
l.begin_column({"md": 8})
l.add_plugin("text", {"text": "yes"})
serialized = l.serialize()
expected = {
'name': "test",
'rows': [
{
'cells': [
{'config': {'text': 'yes'}, 'plugin': 'text', 'sizes': {"md": 8}}
]
}
]
}
assert serialized == expected
assert Layout.unserialize(theme, serialized).serialize() == expected
def test_layout_rendering(rf):
request = get_request(edit=False)
with override_current_theme_class(None):
with plugin_override():
(template, layout, gibberish, ctx) = get_test_template_bits(request)
result = six.text_type(render_placeholder(ctx, "test", layout, "test"))
expect = """
<div class="xt-ph" id="xt-ph-test">
<div class="row xt-ph-row">
<div class="col-md-12 hidden-xs xt-ph-cell"><p>%s</p></div>
</div>
</div>
""" % gibberish
assert close_enough(result, expect)
def test_layout_rendering_with_global_type(rf):
request = get_request(edit=False)
with override_current_theme_class(None):
with plugin_override():
jeng = get_jinja2_engine()
template = jeng.from_string("")
(template, layout, gibberish, ctx) = get_test_template_bits(request)
global_class = "xt-global-ph"
result = six.text_type(render_placeholder(ctx, "test", layout, template.template.name, global_type=True))
assert global_class in result
result = six.text_type(render_placeholder(ctx, "test", layout, template.template.name, global_type=False))
assert global_class not in result
def test_layout_edit_render():
request = get_request(edit=True)
with override_current_theme_class(None):
with plugin_override():
(template, layout, gibberish, ctx) = get_test_template_bits(request)
result = six.text_type(render_placeholder(ctx, "test", layout, "test"))
# Look for evidence of editing:
assert "xt-ph-edit" in result
assert "data-xt-placeholder-name" in result
assert "data-xt-row" in result
assert "data-xt-cell" in result
def test_view_config_caches_into_context(rf):
# This is a silly test...
request = get_request(edit=False)
with override_current_theme_class(None):
(template, layout, gibberish, ctx) = get_test_template_bits(request)
cfg1 = get_view_config(ctx)
cfg2 = get_view_config(ctx)
assert cfg1 is cfg2
(template, layout, gibberish, ctx) = get_test_template_bits(request, False)
cfg1 = get_view_config(ctx)
cfg2 = get_view_config(ctx)
assert cfg1 is cfg2
def test_missing_plugin_render():
plugin_id = printable_gibberish()
cell = LayoutCell(FauxTheme, plugin_identifier=plugin_id)
assert not cell.plugin_class
assert not cell.instantiate_plugin()
assert ("%s?" % plugin_id) in cell.render(None) # Should render a "whut?" comment
def test_null_cell_render():
cell = LayoutCell(FauxTheme, None)
assert not cell.plugin_class
assert not cell.instantiate_plugin()
assert not cell.render(None) # Should render nothing whatsoever!
def test_plugin_naming():
with plugin_override():
cell = LayoutCell(FauxTheme, TextPlugin.identifier)
assert cell.plugin_name == TextPlugin.name
def test_layout_api():
l = Layout(FauxTheme, "test")
l.begin_column({"md": 8})
px0y0 = l.add_plugin("text", {"text": "yes"})
l.begin_column({"md": 4})
px1y0 = l.add_plugin("text", {"text": "no"})
assert len(l) == 1
assert len(l.rows[0]) == 2
assert not l.delete_cell(x=0, y=1) # nonexistent row
assert l.get_cell(0, 0) == px0y0
assert l.get_cell(1, 0) == px1y0
assert not l.get_cell(2, 0)
assert not l.get_cell(0, 1)
l.begin_row()
assert len(l) == 2
assert len(l.rows[1]) == 0
l.begin_column()
assert len(l.rows[1]) == 1
assert l.delete_cell(x=0, y=1) # existent cell
assert not l.delete_cell(x=0, y=1) # cell existent no more
assert l.delete_row(1) # existent row
assert len(l) == 1
assert not l.delete_row(1) # nonexistent row
l.insert_row(0).add_cell() # insert a cellful row in first place
assert len(l) == 2 and list(map(len, l.rows)) == [1, 2]
l.insert_row(1) # insert an empty row in second place
assert len(l) == 3 and list(map(len, l.rows)) == [1, 0, 2]
assert not l.insert_row(-1) # that's silly!
|
import mysite.profile.controllers
import mysite.project.controllers
def get_user_ip(request):
if request.META['REMOTE_ADDR'] == '127.0.0.1':
return "98.140.110.121"
else:
return request.META['REMOTE_ADDR']
class HandleWannaHelpQueue(object):
def process_request(self, request):
if not hasattr(request, 'user') or not hasattr(request, 'session'):
return None
if request.user.is_authenticated() and 'wanna_help_queue_handled' not in request.session:
mysite.project.controllers.flush_session_wanna_help_queue_into_database(
request.user, request.session)
request.session['wanna_help_queue_handled'] = True
return None
class DetectLogin(object):
# called every time a page is gotten
# Checks for work that should be done at login time
def process_response(self, request, response):
if not hasattr(request, 'user') or not hasattr(request, 'session'):
return response
if request.user.is_authenticated() and 'post_login_stuff_run' not in request.session:
mysite.project.controllers.take_control_of_our_answers(request.user, request.session)
request.session['post_login_stuff_run'] = True
return response
|
from . import account_fiscalyear_close
|
"backend for http://www.lesinrocks.com"
from weboob.capabilities.messages import ICapMessages
from weboob.tools.capabilities.messages.GenericBackend import GenericNewspaperBackend
from .browser import NewspaperInrocksBrowser
from .tools import rssid
class NewspaperInrocksBackend(GenericNewspaperBackend, ICapMessages):
MAINTAINER = u'Julien Hebert'
EMAIL = 'juke@free.fr'
VERSION = '0.h'
LICENSE = 'AGPLv3+'
STORAGE = {'seen': {}}
NAME = 'inrocks'
DESCRIPTION = u'Les Inrocks French news website'
BROWSER = NewspaperInrocksBrowser
RSS_FEED = 'http://www.lesinrocks.com/fileadmin/rss/actus.xml'
RSSID = rssid
|
import couchdb
couch = couchdb.Server()
def get_db(name):
if name in couch:
return couch[name]
else:
print("Automatically created database", name)
return couch.create(name)
|
from flask.ext.wtf import Form
from wtforms.fields.html5 import (IntegerField,
DateField)
from wtforms import SelectField, FloatField
from ..models import Offer, CURRENCIES
CURRENCIES = [(c, c) for c in CURRENCIES]
class OfferForm(Form):
currency_from = SelectField('From currency', choices=CURRENCIES)
currency_to = SelectField('To currency', choices=CURRENCIES)
amount = IntegerField('Amount')
expires = DateField('Expiration date')
latitude = FloatField('Latitude', default=49.22573)
longitude = FloatField('Longitude', default=16.58205)
|
import colander
from zope.interface import implementer
from substanced.content import content
from substanced.schema import NameSchemaNode
from substanced.util import renamer
from dace.objectofcollaboration.entity import Entity
from pontus.widget import RichTextWidget
from pontus.core import VisualisableElement, VisualisableElementSchema
from .interface import ICandidacy
from novaideo import _
def context_is_a_candidacy(context, request):
return request.registry.content.istype(context, 'candidacy')
class CandidacySchema(VisualisableElementSchema):
"""Schema for Candidacy"""
name = NameSchemaNode(
editing=context_is_a_candidacy,
)
body = colander.SchemaNode(
colander.String(),
widget=RichTextWidget(),
title=_('Application'),
)
@content(
'candidacy',
icon='glyphicon glyphicon-align-left',
)
@implementer(ICandidacy)
class Candidacy(VisualisableElement, Entity):
"""Candidacy class"""
name = renamer()
|
__author__ = "Felix Brezo, Yaiza Rubio <contacto@i3visio.com>"
__version__ = "2.0"
from osrframework.utils.platforms import Platform
class Ebay(Platform):
"""A <Platform> object for Ebay"""
def __init__(self):
self.platformName = "Ebay"
self.tags = ["e-commerce"]
########################
# Defining valid modes #
########################
self.isValidMode = {}
self.isValidMode["phonefy"] = False
self.isValidMode["usufy"] = True
self.isValidMode["searchfy"] = False
######################################
# Search URL for the different modes #
######################################
# Strings with the URL for each and every mode
self.url = {}
#self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>"
self.url["usufy"] = "http://www.ebay.com/usr/" + "<usufy>"
#self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>"
######################################
# Whether the user needs credentials #
######################################
self.needsCredentials = {}
#self.needsCredentials["phonefy"] = False
self.needsCredentials["usufy"] = False
#self.needsCredentials["searchfy"] = False
#################
# Valid queries #
#################
# Strings that will imply that the query number is not appearing
self.validQuery = {}
# The regular expression '.+' will match any query.
#self.validQuery["phonefy"] = ".*"
self.validQuery["usufy"] = ".+"
#self.validQuery["searchfy"] = ".*"
###################
# Not_found clues #
###################
# Strings that will imply that the query number is not appearing
self.notFoundText = {}
#self.notFoundText["phonefy"] = []
self.notFoundText["usufy"] = ["The User ID you entered was not found. Please check the User ID and try again"]
#self.notFoundText["searchfy"] = []
#########################
# Fields to be searched #
#########################
self.fieldsRegExp = {}
# Definition of regular expressions to be searched in phonefy mode
#self.fieldsRegExp["phonefy"] = {}
# Example of fields:
#self.fieldsRegExp["phonefy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in usufy mode
self.fieldsRegExp["usufy"] = {}
# Example of fields:
#self.fieldsRegExp["usufy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in searchfy mode
#self.fieldsRegExp["searchfy"] = {}
# Example of fields:
#self.fieldsRegExp["searchfy"]["i3visio.location"] = ""
################
# Fields found #
################
# This attribute will be feeded when running the program.
self.foundFields = {}
|
""" Errors used by the Discussion API. """
from __future__ import absolute_import
from django.core.exceptions import ObjectDoesNotExist
class DiscussionDisabledError(ObjectDoesNotExist):
""" Discussion is disabled. """
pass
class ThreadNotFoundError(ObjectDoesNotExist):
""" Thread was not found. """
pass
class CommentNotFoundError(ObjectDoesNotExist):
""" Comment was not found. """
pass
|
from django.conf import settings
from django.conf.urls.defaults import patterns, include, handler404
from django.views.generic.simple import direct_to_template, redirect_to
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('channelguide.guide.views',
(r'^$', 'frontpage.index'),
(r'^admin/(.*)$', admin.site.root),
(r'^favicon.ico$', redirect_to, {'url': '/images/favicon.ico'}),
(r'^frontpage$', 'frontpage.index'),
(r'^frontpage/edit-header$', 'frontpage.edit_header'),
(r'^audio/', include('channelguide.guide.audio_urlconf')),
(r'^firsttime$', 'firsttime.index'),
(r'^accounts/', include('channelguide.user_profile.urls')),
(r'^accounts/', include('django.contrib.auth.urls')),
(r'^languages/', include('channelguide.labels.languages.urls')),
(r'^notes/', include('channelguide.notes.urls')),
(r'^watch/', include('channelguide.cobranding.urls')),
(r'^i18n/setlang/?', 'i18n.set_language'),
(r'^api/', include('channelguide.api.urls')),
(r'^recommend/', include('channelguide.recommendations.urls')),
(r'^ping/', include('channelguide.watched.urls')),
(r'^submit/?', include('channelguide.submit.urls')),
(r'^share/', include('channelguide.sharing.urls')),
(r'^genres/', include('channelguide.labels.categories.urls')),
(r'^tags/', include('channelguide.labels.tags.urls')),
(r'^dmca$', direct_to_template,
{'template': 'guide/dmca.html'}))
urlpatterns += patterns('channelguide.moderate.views',
(r'^moderate$', 'index'),
(r'^how-to-moderate$', 'how_to_moderate'),
(r'^moderate/', include('channelguide.moderate.urls')))
urlpatterns += patterns('channelguide.search.views',
(r'^search$', 'search'),
(r'^search-more-channels$', redirect_to, {'url': '/search'}),
(r'^search-more-items$', redirect_to, {'url': '/search'}))
urlpatterns += patterns('channelguide.user_profile.views',
(r'^user/(.*)$', 'for_user'))
urlpatterns += patterns('channelguide.channels.views',
(r'^popular/?$', 'filtered_listing', {
'filter': 'name',
'default_sort': '-popular',
'title': 'Popular Shows'}),
(r'^toprated/?$', 'filtered_listing', {
'filter': 'name',
'default_sort': '-rating',
'title': 'Top-Rated Shows'}),
(r'^feeds/?$', 'filtered_listing', {
'filter': 'feed',
'value': True,
'title': 'Feeds'}),
(r'^sites/?$', 'filtered_listing', {
'filter': 'feed',
'value': False,
'title': 'Sites'}),
(r'^new/?$', 'filtered_listing', {
'filter': 'name',
'default_sort': '-age',
'title': 'New Shows'}),
(r'^featured/?$', 'filtered_listing', {
'filter': 'featured',
'value': True,
'title': 'Featured Shows'}),
(r'^hd/?$', 'filtered_listing', {
'filter': 'hd',
'value': True,
'title': 'High-Definition Shows'}),
(r'^(feeds|sites)/', include('channelguide.channels.urls')),
)
urlpatterns += patterns('channelguide.channels.playback',
(r'^items/(\d+)/?$', 'item'))
def donate_render(request, template):
context = {'request': request,
'google_analytics_ua': settings.GOOGLE_ANALYTICS_UA,
'BASE_URL': settings.BASE_URL,
'BASE_URL_FULL': settings.BASE_URL_FULL,
'PAYPAL_URL': 'https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=donate%40pculture%2eorg&item_name=Tax%20Deductible%20Donation%20to%20Miro&page_style=MiroStore&no_shipping=1&return=https%3a%2f%2fwww%2emiroguide%2ecom%2fdonate%2fthanks&no_note=1&tax=0¤cy_code=USD&lc=US&bn=PP%2dDonationsBF&charset=UTF%2d8',
'CC_URL': 'https://www.getmiro.com/about/donate/cc-guide.html',
}
return direct_to_template(request,
template=template,
extra_context = context)
def donate_thanks(request):
response = donate_render(request, template='donate/thanks.html')
if 'donate_donated' not in request.COOKIES:
response.set_cookie('donate_donated', 'yes', max_age=60*60*24*30,
secure=settings.USE_SECURE_COOKIES or None)
return response
urlpatterns += patterns('',
(r'^donate$', donate_render, {
'template': 'donate/donate.html'}),
(r'^donate/special$', donate_render, {
'template': 'donate/special.html'}),
(r'^donate/biz$', donate_render, {
'template': 'donate/biz.html'}),
(r'^donate/thanks$', donate_thanks),
)
from channelguide.guide import feeds
urlpatterns += patterns('',
(r'^rss/(?P<name>new|featured|popular|toprated)/?', redirect_to,
{'url': 'http://feeds.feedburner.com/miroguide/%(name)s'}))
urlpatterns = urlpatterns + patterns('',
(r'^(?:rss|feeds)(?:_real)?/(?P<url>.*)$', feeds.cached_feed,
{'feed_dict':
{ 'new': feeds.NewChannelsFeed,
'features': feeds.FeaturedChannelsFeed,
'featured': feeds.FeaturedChannelsFeed,
'popular': feeds.PopularChannelsFeed,
'toprated': feeds.TopRatedChannelsFeed,
'categories': feeds.CategoriesFeed,
'tags': feeds.TagsFeed,
'languages': feeds.LanguagesFeed,
'search': feeds.SearchFeed,
'recommend': feeds.RecommendationsFeed}
}),
# be backwards compatible even though we're using /feeds/* for something
# else now
(r'^feeds/(?P<name>(new|featured|popular|toprated|categories|tags|languages|search|recommend).*)$',
redirect_to, {'url': '/rss/%(name)s'}),
(r'^feeds/features/?$', redirect_to, {'url': '/rss/featured'}),
)
urlpatterns += patterns('',
(r'^browse/$', redirect_to, {'url': None}),
(r'^category-peek-fragment$', redirect_to,
{'url': None}),
(r'^channels/', include('channelguide.channels.urls')),
(r'^categories/', include('channelguide.labels.categories.urls')),
(r'^cobranding/', include('channelguide.cobranding.urls')))
js_info_dict = {
'packages': ('channelguide.guide',),
}
urlpatterns += patterns('',
(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', js_info_dict),
)
handler500 = 'channelguide.guide.views.errors.error_500'
if settings.DEBUG:
import os
static_patterns = []
for dir in ('css', 'images', 'js', 'movies', 'swf'):
static_patterns.append((r'^%s/(?P<path>.*)$' % dir,
'django.views.static.serve',
{'document_root': os.path.join(settings.STATIC_DIR, dir)}))
urlpatterns.extend(patterns ('', *static_patterns))
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}))
|
import setuptools
with open('VERSION.txt', 'r') as f:
version = f.read().strip()
setuptools.setup(
name="odoo14-addons-oca-donation",
description="Meta package for oca-donation Odoo addons",
version=version,
install_requires=[
'odoo14-addon-donation',
'odoo14-addon-donation_base',
'odoo14-addon-donation_direct_debit',
'odoo14-addon-donation_recurring',
'odoo14-addon-donation_sale',
'odoo14-addon-product_analytic_donation',
],
classifiers=[
'Programming Language :: Python',
'Framework :: Odoo',
'Framework :: Odoo :: 14.0',
]
)
|
from django.conf import settings
from django.conf.urls import patterns, url
from ecommerce.credit.views import Checkout
urlpatterns = patterns(
'',
url(r'^checkout/{course}/$'.format(course=settings.COURSE_ID_PATTERN),
Checkout.as_view(), name='checkout'),
)
|
from odoo import api, models
class HRHolidays(models.Model):
_inherit = "hr.holidays"
@api.multi
def _get_duration(self):
self.ensure_one()
return self.number_of_hours_temp
@api.multi
def _set_duration(self, duration):
self.ensure_one()
self.number_of_hours_temp = duration
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('compatibility_test', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Topic',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('description', models.TextField()),
('mp_positions', models.TextField(null=True, blank=True)),
],
),
migrations.RemoveField(
model_name='politicaltopic',
name='group',
),
migrations.RemoveField(
model_name='politicaltopic',
name='test',
),
migrations.AddField(
model_name='testgroup',
name='test',
field=models.ForeignKey(to='compatibility_test.CompatTest', null=True),
),
migrations.AlterField(
model_name='argument',
name='topic',
field=models.ForeignKey(to='compatibility_test.Topic'),
),
migrations.AlterField(
model_name='voting',
name='topic',
field=models.ForeignKey(to='compatibility_test.Topic', null=True),
),
migrations.DeleteModel(
name='PoliticalTopic',
),
migrations.AddField(
model_name='testgroup',
name='topics',
field=models.ManyToManyField(to='compatibility_test.Topic'),
),
]
|
__doc__="""This code implements the motp one time password algorithm
described in motp.sourceforge.net.
The code is tested in tests/test_lib_tokens_motp
"""
from .mOTP import mTimeOtp
from privacyidea.lib.apps import create_motp_url
from privacyidea.lib.tokenclass import TokenClass
from privacyidea.lib.log import log_with
from privacyidea.lib.utils import create_img
from privacyidea.api.lib.utils import getParam
from privacyidea.lib.utils import generate_otpkey
from privacyidea.lib.decorators import check_token_locked
import traceback
import logging
import gettext
optional = True
required = False
log = logging.getLogger(__name__)
_ = gettext.gettext
class MotpTokenClass(TokenClass):
@staticmethod
def get_class_type():
return "motp"
@staticmethod
def get_class_prefix():
return "PIMO"
@staticmethod
def get_class_info(key=None, ret='all'):
"""
returns a subtree of the token definition
Is used by lib.token.get_token_info
:param key: subsection identifier
:type key: string
:param ret: default return value, if nothing is found
:type ret: user defined
:return: subsection if key exists or user defined
:rtype : dict or string
"""
res = {'type': 'motp',
'title': 'mOTP Token',
'description': 'mOTP: Classical mobile One Time Passwords.',
'init': {'page': {'html': 'motptoken.mako',
'scope': 'enroll', },
'title': {'html': 'motptoken.mako',
'scope': 'enroll.title'},
},
'config': {'page': {'html': 'motptoken.mako',
'scope': 'config'},
'title': {'html': 'motptoken.mako',
'scope': 'config.title', },
},
'user': ['enroll'],
# This tokentype is enrollable in the UI for...
'ui_enroll': ["admin", "user"],
'policy': {'user': {'motp_webprovision': {'type': 'bool',
'desc': 'Enroll mOTP token via QR-Code.'}
}}
}
if key is not None and key in res:
ret = res.get(key)
else:
if ret == 'all':
ret = res
return ret
@log_with(log)
def __init__(self, db_token):
"""
constructor - create a token object
:param a_token: instance of the orm db object
:type a_token: orm object
"""
TokenClass.__init__(self, db_token)
self.set_type(u"motp")
self.hKeyRequired = True
return
@log_with(log)
def get_init_detail(self, params=None, user=None):
"""
to complete the token normalisation, the response of the initialization
should be build by the token specific method, the getInitDetails
"""
response_detail = TokenClass.get_init_detail(self, params, user)
otpkey = self.init_details.get('otpkey')
if otpkey:
tok_type = self.type.lower()
if user is not None:
try:
motp_url = create_motp_url(otpkey,
user.login, user.realm,
serial=self.get_serial())
response_detail["motpurl"] = {"description": _("URL for MOTP "
"token"),
"value": motp_url,
"img": create_img(motp_url,
width=250)
}
except Exception as ex: # pragma: no cover
log.debug("{0!s}".format(traceback.format_exc()))
log.error('failed to set motp url: {0!r}'.format(ex))
return response_detail
@log_with(log)
def update(self, param, reset_failcount=True):
"""
update - process initialization parameters
:param param: dict of initialization parameters
:type param: dict
:return: nothing
"""
if self.hKeyRequired is True:
genkey = int(getParam(param, "genkey", optional) or 0)
if not param.get('keysize'):
param['keysize'] = 16
if 1 == genkey:
otpKey = generate_otpkey(param['keysize'])
del param['genkey']
else:
# genkey not set: check otpkey is given
# this will raise an exception if otpkey is not present
otpKey = getParam(param, "otpkey", required)
param['otpkey'] = otpKey
# motp token specific
mOTPPin = getParam(param, "motppin", required)
self.token.set_user_pin(mOTPPin)
TokenClass.update(self, param, reset_failcount)
return
@log_with(log)
@check_token_locked
def check_otp(self, anOtpVal, counter=None, window=None, options=None):
"""
validate the token otp against a given otpvalue
:param anOtpVal: the to be verified otpvalue
:type anOtpVal: string
:param counter: the counter state, that should be verified
:type counter: int
:param window: the counter +window, which should be checked
:type window: int
:param options: the dict, which could contain token specific info
:type options: dict
:return: the counter state or -1
:rtype: int
"""
otplen = self.token.otplen
# otime contains the previous verification time
# the new one must be newer than this!
oCount = self.get_otp_count()
secretHOtp = self.token.get_otpkey()
window = self.token.count_window
secretPin = self.token.get_user_pin()
log.debug("original counter %s", oCount)
mtimeOtp = mTimeOtp(secretHOtp, secretPin, oCount, otplen)
res = mtimeOtp.checkOtp(anOtpVal, window, options=options)
if res != -1 and oCount != 0 and res <= oCount:
log.warning("a previous OTP value was used again! former "
"tokencounter: %i, presented counter %i" %
(oCount, res))
res = -1
return res
if res != -1:
# on success, we have to save the last attempt
self.set_otp_count(res)
return res
|
from flask import flash, request, url_for
from flask import Markup
from flask_admin.contrib.sqla import ModelView
from wtforms import validators
from labonneboite.common.models import OfficeAdminAdd, OfficeAdminRemove
from labonneboite.web.admin.forms import nospace_filter, phone_validator, strip_filter, siret_validator
from labonneboite.web.admin.utils import datetime_format, AdminModelViewMixin, SelectForChoiceTypeField
class OfficeAdminRemoveModelView(AdminModelViewMixin, ModelView):
"""
Admin interface for the `OfficeAdminRemove` model.
http://flask-admin.readthedocs.io/en/latest/api/mod_model/
"""
can_view_details = True
column_searchable_list = ['siret', 'name']
column_default_sort = ('date_created', True)
page_size = 100
column_list = [
'siret',
'name',
'reason',
'date_created',
'date_updated',
'date_follow_up_phone_call',
'initiative',
]
column_details_list = [
'siret',
'name',
'initiative',
'date_follow_up_phone_call',
'requested_by_email',
'requested_by_first_name',
'requested_by_last_name',
'requested_by_phone',
'reason',
'created_by',
'date_created',
'updated_by',
'date_updated',
]
column_formatters = {
'date_created': datetime_format,
'date_follow_up_phone_call': datetime_format,
'date_updated': datetime_format,
}
column_labels = {
'siret': "Siret",
'name': "Nom de l'entreprise",
'reason': "Raison",
'initiative': "À l'initiative de",
'date_follow_up_phone_call': "Date de rappel",
'requested_by_email': "Email",
'requested_by_first_name': "Prénom",
'requested_by_last_name': "Nom",
'requested_by_phone': "Téléphone",
'date_created': "Date de création",
'date_updated': "Date de modification",
'created_by': "Créé par",
'updated_by': "Modifié par",
}
column_descriptions = {
'initiative': "Permet de préciser qui est à l'initiative de la suppression.",
'requested_by_email': "Email de la personne qui demande la suppression.",
'requested_by_first_name': "Prénom de la personne qui demande la suppression.",
'requested_by_last_name': "Nom de la personne qui demande la suppression.",
'requested_by_phone': "Téléphone de la personne qui demande la suppression.",
'reason': "Raison de la suppression.",
'date_follow_up_phone_call': "Date de rappel de l'employeur",
}
form_columns = [
'siret',
'name',
'reason',
'initiative',
'date_follow_up_phone_call',
'requested_by_email',
'requested_by_first_name',
'requested_by_last_name',
'requested_by_phone',
]
form_overrides = {
'initiative': SelectForChoiceTypeField,
}
form_args = {
'siret': {
'filters': [strip_filter, nospace_filter],
'validators': [siret_validator],
},
'name': {
'filters': [strip_filter],
},
'reason': {
'filters': [strip_filter],
},
'initiative': {
'choices': OfficeAdminRemove.INITIATIVE_CHOICES,
},
'requested_by_email': {
'validators': [validators.optional(), validators.Email()],
},
'requested_by_first_name': {
'filters': [strip_filter],
},
'requested_by_last_name': {
'filters': [strip_filter],
},
'requested_by_phone': {
'filters': [strip_filter, nospace_filter],
'validators': [validators.optional(), phone_validator],
},
}
def create_form(self):
form = super(OfficeAdminRemoveModelView, self).create_form()
if 'siret' in request.args:
form.siret.data = request.args['siret']
if 'name' in request.args:
form.name.data = request.args['name']
if 'requested_by_email' in request.args:
form.requested_by_email.data = request.args['requested_by_email']
if 'requested_by_first_name' in request.args:
form.requested_by_first_name.data = request.args['requested_by_first_name']
if 'requested_by_last_name' in request.args:
form.requested_by_last_name.data = request.args['requested_by_last_name']
if 'requested_by_phone' in request.args:
form.requested_by_phone.data = request.args['requested_by_phone']
if 'reason' in request.args:
form.reason.data = request.args['reason']
return form
def validate_form(self, form):
"""
Ensure that the office to remove does not already exist in `OfficeAdminAdd`.
"""
is_valid = super(OfficeAdminRemoveModelView, self).validate_form(form)
if is_valid and 'siret' in list(form.data.keys()):
office_to_add = OfficeAdminAdd.query.filter_by(siret=form.data['siret']).first()
if office_to_add:
# Use the link of the list view with a filter on the `siret`, because
# the delete button is missing on the edit and/or detail view.
# https://github.com/flask-admin/flask-admin/issues/1327
office_to_add_url = url_for('officeadminadd.index_view', search=office_to_add.siret)
msg = (
"Vous ne pouvez pas supprimer cette entreprise car elle existe déjà dans la liste "
"<b>Ajouter une entreprise</b>.<br>Vous devez d'abord "
'<a target="_blank" href="{url}">la supprimer de cette liste</a>.'.format(url=office_to_add_url)
)
flash(Markup(msg), 'error')
return False
return is_valid
|
{
'name': 'Guatemala - Accounting',
'version': '3.0',
'category': 'Localization/Account Charts',
'description': """
This is the base module to manage the accounting chart for Guatemala.
=====================================================================
Agrega una nomenclatura contable para Guatemala. También icluye impuestos y
la moneda del Quetzal. -- Adds accounting chart for Guatemala. It also includes
taxes and the Quetzal currency.""",
'author': 'José Rodrigo Fernández Menegazzo',
'website': 'http://solucionesprisma.com/',
'depends': ['base', 'account', 'account_chart'],
'data': [
'account_types.xml',
'account_chart.xml',
'account_tax.xml',
'l10n_gt_base.xml',
],
'demo': [],
'installable': True,
'images': ['images/config_chart_l10n_gt.jpeg','images/l10n_gt_chart.jpeg'],
}
|
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
from django.utils.timezone import utc
import apps.txtrender.fields
class Migration(migrations.Migration):
dependencies = [
('forum', '0002_forumthreadpost_content_text'),
]
operations = [
migrations.AlterModelOptions(
name='forumthreadpost',
options={'verbose_name': 'Forum post', 'permissions': (('can_see_ip_address', 'Can see IP address'), ('allow_titles_in_post', 'Allow titles in forum post'), ('allow_alerts_box_in_post', 'Allow alerts box in forum post'), ('allow_text_colors_in_post', 'Allow coloured text in forum post'), ('allow_cdm_extra_in_post', 'Allow CDM extra in forum post'), ('allow_raw_link_in_post', 'Allow raw link (without forcing nofollow) in forum post')), 'verbose_name_plural': 'Forum posts', 'get_latest_by': 'pub_date', 'ordering': ('-pub_date',)},
),
migrations.AddField(
model_name='forum',
name='description_html',
field=models.TextField(verbose_name='Description (raw HTML)', default=''),
preserve_default=False,
),
migrations.AddField(
model_name='forum',
name='description_text',
field=models.TextField(verbose_name='Description (raw text)', default=''),
preserve_default=False,
),
migrations.AddField(
model_name='forum',
name='last_modification_date',
field=models.DateTimeField(verbose_name='Last modification date', auto_now=True, default=datetime.datetime(2016, 1, 1, 17, 39, 44, 851386, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='forumthread',
name='last_modification_date',
field=models.DateTimeField(verbose_name='Last modification date', auto_now=True, default=datetime.datetime(2016, 1, 1, 17, 39, 48, 403848, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='forumthreadpost',
name='footnotes_html',
field=models.TextField(verbose_name='Content footnotes (raw HTML)', default=''),
preserve_default=False,
),
migrations.AddField(
model_name='forumthreadpost',
name='last_content_modification_date',
field=models.DateTimeField(verbose_name='Last content modification date', db_index=True, default=datetime.datetime(2016, 1, 1, 17, 39, 57, 300151, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='forumthreadpost',
name='summary_html',
field=models.TextField(verbose_name='Content summary (raw HTML)', default=''),
preserve_default=False,
),
migrations.AlterField(
model_name='forum',
name='description',
field=apps.txtrender.fields.RenderTextField(verbose_name='Description'),
),
migrations.AlterField(
model_name='forumthreadpost',
name='last_modification_date',
field=models.DateTimeField(verbose_name='Last modification date', auto_now=True),
),
]
|
from openerp.addons.account_report_webkit.report.general_ledger import (
GeneralLedgerWebkit
)
from openerp.addons.account_report_webkit.report.webkit_parser_header_fix import (
HeaderFooterTextWebKitParser
)
class GeneralLedgerCsv(GeneralLedgerWebkit):
def __init__(self, cursor, uid, context):
super(GeneralLedgerCsv, self).__init__(
cursor, uid, '', context=context)
def get_objects(self, data, ids):
"""Simulate a Webkit report to gather objects to be extracted as CSV,
then return them."""
self.set_context([], data, ids)
return self.localcontext['objects']
HeaderFooterTextWebKitParser(
'report.account.account_report_general_ledger_csv',
'account.account',
'',
parser=GeneralLedgerCsv
)
|
""" Config is the class to read, load and manipulate the user
configuration. It read a main cfg (nagios.cfg) and get all informations
from it. It create objects, make link between them, clean them, and cut
them into independent parts. The main user of this is Arbiter, but schedulers
use it too (but far less)"""
import re
import sys
import string
import os
import socket
import itertools
import time
import random
import cPickle
import tempfile
from StringIO import StringIO
from multiprocessing import Process, Manager
import json
from item import Item
from timeperiod import Timeperiod, Timeperiods
from service import Service, Services
from command import Command, Commands
from resultmodulation import Resultmodulation, Resultmodulations
from businessimpactmodulation import Businessimpactmodulation, Businessimpactmodulations
from escalation import Escalation, Escalations
from serviceescalation import Serviceescalation, Serviceescalations
from hostescalation import Hostescalation, Hostescalations
from host import Host, Hosts
from hostgroup import Hostgroup, Hostgroups
from realm import Realm, Realms
from contact import Contact, Contacts
from contactgroup import Contactgroup, Contactgroups
from notificationway import NotificationWay, NotificationWays
from checkmodulation import CheckModulation, CheckModulations
from macromodulation import MacroModulation, MacroModulations
from servicegroup import Servicegroup, Servicegroups
from servicedependency import Servicedependency, Servicedependencies
from hostdependency import Hostdependency, Hostdependencies
from module import Module, Modules
from discoveryrule import Discoveryrule, Discoveryrules
from discoveryrun import Discoveryrun, Discoveryruns
from hostextinfo import HostExtInfo, HostsExtInfo
from serviceextinfo import ServiceExtInfo, ServicesExtInfo
from trigger import Triggers
from pack import Packs
from shinken.util import split_semicolon
from shinken.objects.arbiterlink import ArbiterLink, ArbiterLinks
from shinken.objects.schedulerlink import SchedulerLink, SchedulerLinks
from shinken.objects.reactionnerlink import ReactionnerLink, ReactionnerLinks
from shinken.objects.brokerlink import BrokerLink, BrokerLinks
from shinken.objects.receiverlink import ReceiverLink, ReceiverLinks
from shinken.objects.pollerlink import PollerLink, PollerLinks
from shinken.graph import Graph
from shinken.log import logger
from shinken.property import (UnusedProp, BoolProp, IntegerProp, CharProp,
StringProp, LogLevelProp, ListProp, ToGuessProp)
from shinken.daemon import get_cur_user, get_cur_group
from shinken.util import jsonify_r
no_longer_used_txt = ('This parameter is not longer take from the main file, but must be defined '
'in the status_dat broker module instead. But Shinken will create you one '
'if there are no present and use this parameter in it, so no worry.')
not_interresting_txt = 'We do not think such an option is interesting to manage.'
class Config(Item):
cache_path = "objects.cache"
my_type = "config"
# Properties:
# *required: if True, there is not default, and the config must put them
# *default: if not set, take this value
# *pythonize: function call to
# *class_inherit: (Service, 'blabla'): must set this property to the
# Service class with name blabla
# if (Service, None): must set this property to the Service class with
# same name
# *unused: just to warn the user that the option he use is no more used
# in Shinken
# *usage_text: if present, will print it to explain why it's no more useful
properties = {
'prefix':
StringProp(default='/usr/local/shinken/'),
'workdir':
StringProp(default='/var/run/shinken/'),
'config_base_dir':
StringProp(default=''), # will be set when we will load a file
'modules_dir':
StringProp(default='/var/lib/shinken/modules'),
'use_local_log':
BoolProp(default=True),
'log_level':
LogLevelProp(default='WARNING'),
'local_log':
StringProp(default='/var/log/shinken/arbiterd.log'),
'log_file':
UnusedProp(text=no_longer_used_txt),
'object_cache_file':
UnusedProp(text=no_longer_used_txt),
'precached_object_file':
UnusedProp(text='Shinken does not use precached_object_files. Skipping.'),
'resource_file':
StringProp(default='/tmp/resources.txt'),
'temp_file':
UnusedProp(text='Temporary files are not used in the shinken architecture. Skipping'),
'status_file':
UnusedProp(text=no_longer_used_txt),
'status_update_interval':
UnusedProp(text=no_longer_used_txt),
'shinken_user':
StringProp(default=get_cur_user()),
'shinken_group':
StringProp(default=get_cur_group()),
'enable_notifications':
BoolProp(default=True, class_inherit=[(Host, None), (Service, None), (Contact, None)]),
'execute_service_checks':
BoolProp(default=True, class_inherit=[(Service, 'execute_checks')]),
'accept_passive_service_checks':
BoolProp(default=True, class_inherit=[(Service, 'accept_passive_checks')]),
'execute_host_checks':
BoolProp(default=True, class_inherit=[(Host, 'execute_checks')]),
'accept_passive_host_checks':
BoolProp(default=True, class_inherit=[(Host, 'accept_passive_checks')]),
'enable_event_handlers':
BoolProp(default=True, class_inherit=[(Host, None), (Service, None)]),
'log_rotation_method':
CharProp(default='d'),
'log_archive_path':
StringProp(default='/usr/local/shinken/var/archives'),
'check_external_commands':
BoolProp(default=True),
'command_check_interval':
UnusedProp(text='another value than look always the file is useless, so we fix it.'),
'command_file':
StringProp(default=''),
'external_command_buffer_slots':
UnusedProp(text='We do not limit the external command slot.'),
'check_for_updates':
UnusedProp(text='network administrators will never allow such communication between '
'server and the external world. Use your distribution packet manager '
'to know if updates are available or go to the '
'http://www.shinken-monitoring.org website instead.'),
'bare_update_checks':
UnusedProp(text=None),
'lock_file':
StringProp(default='/var/run/shinken/arbiterd.pid'),
'retain_state_information':
UnusedProp(text='sorry, retain state information will not be implemented '
'because it is useless.'),
'state_retention_file':
StringProp(default=''),
'retention_update_interval':
IntegerProp(default=60),
'use_retained_program_state':
UnusedProp(text=not_interresting_txt),
'use_retained_scheduling_info':
UnusedProp(text=not_interresting_txt),
'retained_host_attribute_mask':
UnusedProp(text=not_interresting_txt),
'retained_service_attribute_mask':
UnusedProp(text=not_interresting_txt),
'retained_process_host_attribute_mask':
UnusedProp(text=not_interresting_txt),
'retained_process_service_attribute_mask':
UnusedProp(text=not_interresting_txt),
'retained_contact_host_attribute_mask':
UnusedProp(text=not_interresting_txt),
'retained_contact_service_attribute_mask':
UnusedProp(text=not_interresting_txt),
'use_syslog':
BoolProp(default=False),
'log_notifications':
BoolProp(default=True, class_inherit=[(Host, None), (Service, None)]),
'log_service_retries':
BoolProp(default=True, class_inherit=[(Service, 'log_retries')]),
'log_host_retries':
BoolProp(default=True, class_inherit=[(Host, 'log_retries')]),
'log_event_handlers':
BoolProp(default=True, class_inherit=[(Host, None), (Service, None)]),
'log_initial_states':
BoolProp(default=True, class_inherit=[(Host, None), (Service, None)]),
'log_external_commands':
BoolProp(default=True),
'log_passive_checks':
BoolProp(default=True),
'global_host_event_handler':
StringProp(default='', class_inherit=[(Host, 'global_event_handler')]),
'global_service_event_handler':
StringProp(default='', class_inherit=[(Service, 'global_event_handler')]),
'sleep_time':
UnusedProp(text='this deprecated option is useless in the shinken way of doing.'),
'service_inter_check_delay_method':
UnusedProp(text='This option is useless in the Shinken scheduling. '
'The only way is the smart way.'),
'max_service_check_spread':
IntegerProp(default=30, class_inherit=[(Service, 'max_check_spread')]),
'service_interleave_factor':
UnusedProp(text='This option is useless in the Shinken scheduling '
'because it use a random distribution for initial checks.'),
'max_concurrent_checks':
UnusedProp(text='Limiting the max concurrent checks is not helpful '
'to got a good running monitoring server.'),
'check_result_reaper_frequency':
UnusedProp(text='Shinken do not use reaper process.'),
'max_check_result_reaper_time':
UnusedProp(text='Shinken do not use reaper process.'),
'check_result_path':
UnusedProp(text='Shinken use in memory returns, not check results on flat file.'),
'max_check_result_file_age':
UnusedProp(text='Shinken do not use flat file check resultfiles.'),
'host_inter_check_delay_method':
UnusedProp(text='This option is unused in the Shinken scheduling because distribution '
'of the initial check is a random one.'),
'max_host_check_spread':
IntegerProp(default=30, class_inherit=[(Host, 'max_check_spread')]),
'interval_length':
IntegerProp(default=60, class_inherit=[(Host, None), (Service, None)]),
'auto_reschedule_checks':
BoolProp(managed=False, default=True),
'auto_rescheduling_interval':
IntegerProp(managed=False, default=1),
'auto_rescheduling_window':
IntegerProp(managed=False, default=180),
'use_aggressive_host_checking':
BoolProp(default=False, class_inherit=[(Host, None)]),
'translate_passive_host_checks':
BoolProp(managed=False, default=True),
'passive_host_checks_are_soft':
BoolProp(managed=False, default=True),
'enable_predictive_host_dependency_checks':
BoolProp(managed=False,
default=True,
class_inherit=[(Host, 'enable_predictive_dependency_checks')]),
'enable_predictive_service_dependency_checks':
BoolProp(managed=False, default=True),
'cached_host_check_horizon':
IntegerProp(default=0, class_inherit=[(Host, 'cached_check_horizon')]),
'cached_service_check_horizon':
IntegerProp(default=0, class_inherit=[(Service, 'cached_check_horizon')]),
'use_large_installation_tweaks':
UnusedProp(text='this option is deprecated because in shinken it is just an alias '
'for enable_environment_macros=0'),
'free_child_process_memory':
UnusedProp(text='this option is automatic in Python processes'),
'child_processes_fork_twice':
UnusedProp(text='fork twice is not use.'),
'enable_environment_macros':
BoolProp(default=True, class_inherit=[(Host, None), (Service, None)]),
'enable_flap_detection':
BoolProp(default=True, class_inherit=[(Host, None), (Service, None)]),
'low_service_flap_threshold':
IntegerProp(default=20, class_inherit=[(Service, 'global_low_flap_threshold')]),
'high_service_flap_threshold':
IntegerProp(default=30, class_inherit=[(Service, 'global_high_flap_threshold')]),
'low_host_flap_threshold':
IntegerProp(default=20, class_inherit=[(Host, 'global_low_flap_threshold')]),
'high_host_flap_threshold':
IntegerProp(default=30, class_inherit=[(Host, 'global_high_flap_threshold')]),
'soft_state_dependencies':
BoolProp(managed=False, default=False),
'service_check_timeout':
IntegerProp(default=60, class_inherit=[(Service, 'check_timeout')]),
'host_check_timeout':
IntegerProp(default=30, class_inherit=[(Host, 'check_timeout')]),
'timeout_exit_status':
IntegerProp(default=2),
'event_handler_timeout':
IntegerProp(default=30, class_inherit=[(Host, None), (Service, None)]),
'notification_timeout':
IntegerProp(default=30, class_inherit=[(Host, None), (Service, None)]),
'ocsp_timeout':
IntegerProp(default=15, class_inherit=[(Service, None)]),
'ochp_timeout':
IntegerProp(default=15, class_inherit=[(Host, None)]),
'perfdata_timeout':
IntegerProp(default=5, class_inherit=[(Host, None), (Service, None)]),
'obsess_over_services':
BoolProp(default=False, class_inherit=[(Service, 'obsess_over')]),
'ocsp_command':
StringProp(default='', class_inherit=[(Service, None)]),
'obsess_over_hosts':
BoolProp(default=False, class_inherit=[(Host, 'obsess_over')]),
'ochp_command':
StringProp(default='', class_inherit=[(Host, None)]),
'process_performance_data':
BoolProp(default=True, class_inherit=[(Host, None), (Service, None)]),
'host_perfdata_command':
StringProp(default='', class_inherit=[(Host, 'perfdata_command')]),
'service_perfdata_command':
StringProp(default='', class_inherit=[(Service, 'perfdata_command')]),
'host_perfdata_file':
StringProp(default='', class_inherit=[(Host, 'perfdata_file')]),
'service_perfdata_file':
StringProp(default='', class_inherit=[(Service, 'perfdata_file')]),
'host_perfdata_file_template':
StringProp(default='/tmp/host.perf', class_inherit=[(Host, 'perfdata_file_template')]),
'service_perfdata_file_template':
StringProp(default='/tmp/host.perf',
class_inherit=[(Service, 'perfdata_file_template')]),
'host_perfdata_file_mode':
CharProp(default='a', class_inherit=[(Host, 'perfdata_file_mode')]),
'service_perfdata_file_mode':
CharProp(default='a', class_inherit=[(Service, 'perfdata_file_mode')]),
'host_perfdata_file_processing_interval':
IntegerProp(managed=False, default=15),
'service_perfdata_file_processing_interval':
IntegerProp(managed=False, default=15),
'host_perfdata_file_processing_command':
StringProp(managed=False,
default='',
class_inherit=[(Host, 'perfdata_file_processing_command')]),
'service_perfdata_file_processing_command':
StringProp(managed=False, default=None),
'check_for_orphaned_services':
BoolProp(default=True, class_inherit=[(Service, 'check_for_orphaned')]),
'check_for_orphaned_hosts':
BoolProp(default=True, class_inherit=[(Host, 'check_for_orphaned')]),
'check_service_freshness':
BoolProp(default=True, class_inherit=[(Service, 'global_check_freshness')]),
'service_freshness_check_interval':
IntegerProp(default=60),
'check_host_freshness':
BoolProp(default=True, class_inherit=[(Host, 'global_check_freshness')]),
'host_freshness_check_interval':
IntegerProp(default=60),
'additional_freshness_latency':
IntegerProp(default=15, class_inherit=[(Host, None), (Service, None)]),
'enable_embedded_perl':
BoolProp(managed=False,
default=True,
help='It will surely never be managed, '
'but it should not be useful with poller performances.'),
'use_embedded_perl_implicitly':
BoolProp(managed=False, default=False),
'date_format':
StringProp(managed=False, default=None),
'use_timezone':
StringProp(default='', class_inherit=[(Host, None), (Service, None), (Contact, None)]),
'illegal_object_name_chars':
StringProp(default="""`~!$%^&*"|'<>?,()=""",
class_inherit=[(Host, None), (Service, None),
(Contact, None), (HostExtInfo, None)]),
'illegal_macro_output_chars':
StringProp(default='',
class_inherit=[(Host, None), (Service, None), (Contact, None)]),
'use_regexp_matching':
BoolProp(managed=False,
default=False,
help='If you go some host or service definition like prod*, '
'it will surely failed from now, sorry.'),
'use_true_regexp_matching':
BoolProp(managed=False, default=None),
'admin_email':
UnusedProp(text='sorry, not yet implemented.'),
'admin_pager':
UnusedProp(text='sorry, not yet implemented.'),
'event_broker_options':
UnusedProp(text='event broker are replaced by modules '
'with a real configuration template.'),
'broker_module':
StringProp(default=''),
'debug_file':
UnusedProp(text=None),
'debug_level':
UnusedProp(text=None),
'debug_verbosity':
UnusedProp(text=None),
'max_debug_file_size':
UnusedProp(text=None),
'modified_attributes':
IntegerProp(default=0L),
# '$USERn$: {'required':False, 'default':''} # Add at run in __init__
# SHINKEN SPECIFIC
'idontcareaboutsecurity':
BoolProp(default=False),
'daemon_enabled':
BoolProp(default=True), # Put to 0 to disable the arbiter to run
'graceful_enabled':
BoolProp(default=False),
'aggressive_memory_management':
BoolProp(default=False),
'daemon_thread_pool_size':
IntegerProp(default=16),
'flap_history':
IntegerProp(default=20, class_inherit=[(Host, None), (Service, None)]),
'max_plugins_output_length':
IntegerProp(default=8192, class_inherit=[(Host, None), (Service, None)]),
'no_event_handlers_during_downtimes':
BoolProp(default=False, class_inherit=[(Host, None), (Service, None)]),
# Interval between cleaning queues pass
'cleaning_queues_interval':
IntegerProp(default=900),
# Enable or not the notice about old Nagios parameters
'disable_old_nagios_parameters_whining':
BoolProp(default=False),
# Now for problem/impact states changes
'enable_problem_impacts_states_change':
BoolProp(default=False, class_inherit=[(Host, None), (Service, None)]),
# More a running value in fact
'resource_macros_names':
ListProp(default=[]),
'http_backend':
StringProp(default='auto'),
# SSL PART
# global boolean for know if we use ssl or not
'use_ssl':
BoolProp(default=False,
class_inherit=[(SchedulerLink, None), (ReactionnerLink, None),
(BrokerLink, None), (PollerLink, None),
(ReceiverLink, None), (ArbiterLink, None)]),
'ca_cert':
StringProp(default='etc/certs/ca.pem'),
'server_cert':
StringProp(default='etc/certs/server.cert'),
'server_key':
StringProp(default='etc/certs/server.key'),
'hard_ssl_name_check':
BoolProp(default=False),
# Log format
'human_timestamp_log':
BoolProp(default=False),
# Discovery part
'strip_idname_fqdn':
BoolProp(default=True),
'runners_timeout':
IntegerProp(default=3600),
# pack_distribution_file is for keeping a distribution history
# of the host distribution in the several "packs" so a same
# scheduler will have more change of getting the same host
'pack_distribution_file':
StringProp(default='pack_distribution.dat'),
# WEBUI part
'webui_lock_file':
StringProp(default='webui.pid'),
'webui_port':
IntegerProp(default=8080),
'webui_host':
StringProp(default='0.0.0.0'),
# Large env tweacks
'use_multiprocesses_serializer':
BoolProp(default=False),
# About shinken.io part
'api_key':
StringProp(default='',
class_inherit=[(SchedulerLink, None), (ReactionnerLink, None),
(BrokerLink, None), (PollerLink, None),
(ReceiverLink, None), (ArbiterLink, None)]),
'secret':
StringProp(default='',
class_inherit=[(SchedulerLink, None), (ReactionnerLink, None),
(BrokerLink, None), (PollerLink, None),
(ReceiverLink, None), (ArbiterLink, None)]),
'http_proxy':
StringProp(default='',
class_inherit=[(SchedulerLink, None), (ReactionnerLink, None),
(BrokerLink, None), (PollerLink, None),
(ReceiverLink, None), (ArbiterLink, None)]),
# and local statsd one
'statsd_host':
StringProp(default='localhost',
class_inherit=[(SchedulerLink, None), (ReactionnerLink, None),
(BrokerLink, None), (PollerLink, None),
(ReceiverLink, None), (ArbiterLink, None)]),
'statsd_port':
IntegerProp(default=8125,
class_inherit=[(SchedulerLink, None), (ReactionnerLink, None),
(BrokerLink, None), (PollerLink, None),
(ReceiverLink, None), (ArbiterLink, None)]),
'statsd_prefix': StringProp(default='shinken',
class_inherit=[(SchedulerLink, None), (ReactionnerLink, None),
(BrokerLink, None), (PollerLink, None),
(ReceiverLink, None), (ArbiterLink, None)]),
'statsd_enabled': BoolProp(default=False,
class_inherit=[(SchedulerLink, None), (ReactionnerLink, None),
(BrokerLink, None), (PollerLink, None),
(ReceiverLink, None), (ArbiterLink, None)]),
'statsd_interval':
IntegerProp(default=5,
class_inherit=[(SchedulerLink, None), (ReactionnerLink, None),
(BrokerLink, None), (PollerLink, None),
(ReceiverLink, None), (ArbiterLink, None)]),
'statsd_types':
StringProp(default=None,
class_inherit=[(SchedulerLink, None), (ReactionnerLink, None),
(BrokerLink, None), (PollerLink, None),
(ReceiverLink, None), (ArbiterLink, None)]),
'statsd_pattern':
StringProp(default=None,
class_inherit=[(SchedulerLink, None), (ReactionnerLink, None),
(BrokerLink, None), (PollerLink, None),
(ReceiverLink, None), (ArbiterLink, None)]),
}
macros = {
'PREFIX': 'prefix',
'MAINCONFIGFILE': '',
'STATUSDATAFILE': '',
'COMMENTDATAFILE': '',
'DOWNTIMEDATAFILE': '',
'RETENTIONDATAFILE': '',
'OBJECTCACHEFILE': '',
'TEMPFILE': '',
'TEMPPATH': '',
'LOGFILE': '',
'RESOURCEFILE': '',
'COMMANDFILE': 'command_file',
'HOSTPERFDATAFILE': '',
'SERVICEPERFDATAFILE': '',
'ADMINEMAIL': '',
'ADMINPAGER': ''
# 'USERn': '$USERn$' # Add at run time
}
# We create dict of objects
# Type: 'name in objects': {Class of object, Class of objects,
# 'property for self for the objects(config)'
types_creations = {
'timeperiod':
(Timeperiod, Timeperiods, 'timeperiods', True),
'service':
(Service, Services, 'services', False),
'servicegroup':
(Servicegroup, Servicegroups, 'servicegroups', True),
'command':
(Command, Commands, 'commands', True),
'host':
(Host, Hosts, 'hosts', True),
'hostgroup':
(Hostgroup, Hostgroups, 'hostgroups', True),
'contact':
(Contact, Contacts, 'contacts', True),
'contactgroup':
(Contactgroup, Contactgroups, 'contactgroups', True),
'notificationway':
(NotificationWay, NotificationWays, 'notificationways', True),
'checkmodulation':
(CheckModulation, CheckModulations, 'checkmodulations', True),
'macromodulation':
(MacroModulation, MacroModulations, 'macromodulations', True),
'servicedependency':
(Servicedependency, Servicedependencies, 'servicedependencies', True),
'hostdependency':
(Hostdependency, Hostdependencies, 'hostdependencies', True),
'arbiter':
(ArbiterLink, ArbiterLinks, 'arbiters', True),
'scheduler':
(SchedulerLink, SchedulerLinks, 'schedulers', True),
'reactionner':
(ReactionnerLink, ReactionnerLinks, 'reactionners', True),
'broker':
(BrokerLink, BrokerLinks, 'brokers', True),
'receiver':
(ReceiverLink, ReceiverLinks, 'receivers', True),
'poller':
(PollerLink, PollerLinks, 'pollers', True),
'realm':
(Realm, Realms, 'realms', True),
'module':
(Module, Modules, 'modules', True),
'resultmodulation':
(Resultmodulation, Resultmodulations, 'resultmodulations', True),
'businessimpactmodulation':
(Businessimpactmodulation, Businessimpactmodulations,
'businessimpactmodulations', True),
'escalation':
(Escalation, Escalations, 'escalations', True),
'serviceescalation':
(Serviceescalation, Serviceescalations, 'serviceescalations', False),
'hostescalation':
(Hostescalation, Hostescalations, 'hostescalations', False),
'discoveryrule':
(Discoveryrule, Discoveryrules, 'discoveryrules', True),
'discoveryrun':
(Discoveryrun, Discoveryruns, 'discoveryruns', True),
'hostextinfo':
(HostExtInfo, HostsExtInfo, 'hostsextinfo', True),
'serviceextinfo':
(ServiceExtInfo, ServicesExtInfo, 'servicesextinfo', True),
}
# This tab is used to transform old parameters name into new ones
# so from Nagios2 format, to Nagios3 ones
old_properties = {
'nagios_user': 'shinken_user',
'nagios_group': 'shinken_group',
'modulesdir': 'modules_dir',
}
read_config_silent = 0
early_created_types = ['arbiter', 'module']
configuration_types = ['void', 'timeperiod', 'command', 'contactgroup', 'hostgroup',
'contact', 'notificationway', 'checkmodulation',
'macromodulation', 'host', 'service', 'servicegroup',
'servicedependency', 'hostdependency', 'arbiter', 'scheduler',
'reactionner', 'broker', 'receiver', 'poller', 'realm', 'module',
'resultmodulation', 'escalation', 'serviceescalation', 'hostescalation',
'discoveryrun', 'discoveryrule', 'businessimpactmodulation',
'hostextinfo', 'serviceextinfo']
def __init__(self):
self.params = {}
self.resource_macros_names = []
# By default the conf is correct
self.conf_is_correct = True
# We tag the conf with a magic_hash, a random value to
# idify this conf
random.seed(time.time())
self.magic_hash = random.randint(1, 100000)
self.configuration_errors = []
self.triggers_dirs = []
self.triggers = Triggers({})
self.packs_dirs = []
self.packs = Packs({})
def get_name(self):
return 'global configuration file'
# We've got macro in the resource file and we want
# to update our MACRO dict with it
def fill_resource_macros_names_macros(self):
""" fill the macro dict will all value
from self.resource_macros_names"""
properties = self.__class__.properties
macros = self.__class__.macros
for macro_name in self.resource_macros_names:
properties['$' + macro_name + '$'] = StringProp(default='')
macros[macro_name] = '$' + macro_name + '$'
def clean_params(self, params):
clean_p = {}
for elt in params:
elts = elt.split('=', 1)
if len(elts) == 1: # error, there is no = !
self.conf_is_correct = False
logger.error("[config] the parameter %s is malformed! (no = sign)", elts[0])
elif elts[1] == '':
self.conf_is_correct = False
logger.error("[config] the parameter %s is malformed! (no value after =)", elts[0])
else:
clean_p[elts[0]] = elts[1]
return clean_p
def load_params(self, params):
clean_params = self.clean_params(params)
for key, value in clean_params.items():
if key in self.properties:
val = self.properties[key].pythonize(clean_params[key])
elif key in self.running_properties:
logger.warning("using a the running property %s in a config file", key)
val = self.running_properties[key].pythonize(clean_params[key])
elif key.startswith('$') or key in ['cfg_file', 'cfg_dir']:
# it's a macro or a useless now param, we don't touch this
val = value
else:
logger.warning("Guessing the property %s type because it is not in "
"%s object properties", key, self.__class__.__name__)
val = ToGuessProp.pythonize(clean_params[key])
setattr(self, key, val)
# Maybe it's a variable as $USER$ or $ANOTHERVATRIABLE$
# so look at the first character. If it's a $, it's a variable
# and if it's end like it too
if key[0] == '$' and key[-1] == '$':
macro_name = key[1:-1]
self.resource_macros_names.append(macro_name)
# Change Nagios2 names to Nagios3 ones (before using them)
self.old_properties_names_to_new()
def _cut_line(self, line):
# punct = '"#$%&\'()*+/<=>?@[\\]^`{|}~'
tmp = re.split("[" + string.whitespace + "]+", line, 1)
r = [elt for elt in tmp if elt != '']
return r
def read_config(self, files):
# just a first pass to get the cfg_file and all files in a buf
res = StringIO()
for file in files:
# We add a \n (or \r\n) to be sure config files are separated
# if the previous does not finish with a line return
res.write(os.linesep)
res.write('# IMPORTEDFROM=%s' % (file) + os.linesep)
if self.read_config_silent == 0:
logger.info("[config] opening '%s' configuration file", file)
try:
# Open in Universal way for Windows, Mac, Linux
fd = open(file, 'rU')
buf = fd.readlines()
fd.close()
self.config_base_dir = os.path.dirname(file)
except IOError, exp:
logger.error("[config] cannot open config file '%s' for reading: %s", file, exp)
# The configuration is invalid because we have a bad file!
self.conf_is_correct = False
continue
for line in buf:
line = line.decode('utf8', 'replace')
res.write(line)
if line.endswith('\n'):
line = line[:-1]
line = line.strip()
if re.search("^cfg_file", line) or re.search("^resource_file", line):
elts = line.split('=', 1)
if os.path.isabs(elts[1]):
cfg_file_name = elts[1]
else:
cfg_file_name = os.path.join(self.config_base_dir, elts[1])
cfg_file_name = cfg_file_name.strip()
try:
fd = open(cfg_file_name, 'rU')
if self.read_config_silent == 0:
logger.info("Processing object config file '%s'", cfg_file_name)
res.write(os.linesep + '# IMPORTEDFROM=%s' % (cfg_file_name) + os.linesep)
res.write(fd.read().decode('utf8', 'replace'))
# Be sure to add a line return so we won't mix files
res.write(os.linesep)
fd.close()
except IOError, exp:
logger.error("Cannot open config file '%s' for reading: %s",
cfg_file_name, exp)
# The configuration is invalid because we have a bad file!
self.conf_is_correct = False
elif re.search("^cfg_dir", line):
elts = line.split('=', 1)
if os.path.isabs(elts[1]):
cfg_dir_name = elts[1]
else:
cfg_dir_name = os.path.join(self.config_base_dir, elts[1])
# Ok, look if it's really a directory
if not os.path.isdir(cfg_dir_name):
logger.error("Cannot open config dir '%s' for reading", cfg_dir_name)
self.conf_is_correct = False
# Look for .pack file into it :)
self.packs_dirs.append(cfg_dir_name)
# Now walk for it.
for root, dirs, files in os.walk(cfg_dir_name, followlinks=True):
for file in files:
if re.search("\.cfg$", file):
if self.read_config_silent == 0:
logger.info("Processing object config file '%s'",
os.path.join(root, file))
try:
res.write(os.linesep + '# IMPORTEDFROM=%s' %
(os.path.join(root, file)) + os.linesep)
fd = open(os.path.join(root, file), 'rU')
res.write(fd.read().decode('utf8', 'replace'))
# Be sure to separate files data
res.write(os.linesep)
fd.close()
except IOError, exp:
logger.error("Cannot open config file '%s' for reading: %s",
os.path.join(root, file), exp)
# The configuration is invalid
# because we have a bad file!
self.conf_is_correct = False
elif re.search("^triggers_dir", line):
elts = line.split('=', 1)
if os.path.isabs(elts[1]):
trig_dir_name = elts[1]
else:
trig_dir_name = os.path.join(self.config_base_dir, elts[1])
# Ok, look if it's really a directory
if not os.path.isdir(trig_dir_name):
logger.error("Cannot open triggers dir '%s' for reading", trig_dir_name)
self.conf_is_correct = False
continue
# Ok it's a valid one, I keep it
self.triggers_dirs.append(trig_dir_name)
config = res.getvalue()
res.close()
return config
def read_config_buf(self, buf):
params = []
objectscfg = {}
types = self.__class__.configuration_types
for t in types:
objectscfg[t] = []
tmp = []
tmp_type = 'void'
in_define = False
almost_in_define = False
continuation_line = False
tmp_line = ''
lines = buf.split('\n')
line_nb = 0 # Keep the line number for the file path
for line in lines:
if line.startswith("# IMPORTEDFROM="):
filefrom = line.split('=')[1]
line_nb = 0 # reset the line number too
continue
line_nb += 1
# Remove comments
line = split_semicolon(line)[0].strip()
# A backslash means, there is more to come
if re.search("\\\s*$", line) is not None:
continuation_line = True
line = re.sub("\\\s*$", "", line)
line = re.sub("^\s+", " ", line)
tmp_line += line
continue
elif continuation_line:
# Now the continuation line is complete
line = re.sub("^\s+", "", line)
line = tmp_line + line
tmp_line = ''
continuation_line = False
# } alone in a line means stop the object reading
if re.search("^\s*}\s*$", line) is not None:
in_define = False
# { alone in a line can mean start object reading
if re.search("^\s*\{\s*$", line) is not None and almost_in_define:
almost_in_define = False
in_define = True
continue
if re.search("^\s*#|^\s*$|^\s*}", line) is not None:
pass
# A define must be catch and the type save
# The old entry must be save before
elif re.search("^define", line) is not None:
if re.search(".*\{.*$", line) is not None:
in_define = True
else:
almost_in_define = True
if tmp_type not in objectscfg:
objectscfg[tmp_type] = []
objectscfg[tmp_type].append(tmp)
tmp = []
tmp.append("imported_from " + filefrom + ':%d' % line_nb)
# Get new type
elts = re.split('\s', line)
# Maybe there was space before and after the type
# so we must get all and strip it
tmp_type = ' '.join(elts[1:]).strip()
tmp_type = tmp_type.split('{')[0].strip()
else:
if in_define:
tmp.append(line)
else:
params.append(line)
# Maybe the type of the last element is unknown, declare it
if tmp_type not in objectscfg:
objectscfg[tmp_type] = []
objectscfg[tmp_type].append(tmp)
objects = {}
# print "Params", params
self.load_params(params)
# And then update our MACRO dict
self.fill_resource_macros_names_macros()
for type in objectscfg:
objects[type] = []
for items in objectscfg[type]:
tmp = {}
for line in items:
elts = self._cut_line(line)
if elts == []:
continue
prop = elts[0]
if prop not in tmp:
tmp[prop] = []
value = ' '.join(elts[1:])
tmp[prop].append(value)
if tmp != {}:
objects[type].append(tmp)
return objects
# We need to have some ghost objects like
# the check_command bp_rule for business
# correlator rules
def add_ghost_objects(self, raw_objects):
bp_rule = {'command_name': 'bp_rule', 'command_line': 'bp_rule'}
raw_objects['command'].append(bp_rule)
host_up = {'command_name': '_internal_host_up', 'command_line': '_internal_host_up'}
raw_objects['command'].append(host_up)
echo_obj = {'command_name': '_echo', 'command_line': '_echo'}
raw_objects['command'].append(echo_obj)
# We've got raw objects in string, now create real Instances
def create_objects(self, raw_objects):
""" Create real 'object' from dicts of prop/value """
types_creations = self.__class__.types_creations
# some types are already created in this time
early_created_types = self.__class__.early_created_types
# Before really create the objects, we add
# ghost ones like the bp_rule for correlation
self.add_ghost_objects(raw_objects)
for t in types_creations:
if t not in early_created_types:
self.create_objects_for_type(raw_objects, t)
def create_objects_for_type(self, raw_objects, type):
types_creations = self.__class__.types_creations
t = type
# Ex: the above code do for timeperiods:
# timeperiods = []
# for timeperiodcfg in objects['timeperiod']:
# t = Timeperiod(timeperiodcfg)
# t.clean()
# timeperiods.append(t)
# self.timeperiods = Timeperiods(timeperiods)
(cls, clss, prop, initial_index) = types_creations[t]
# List where we put objects
lst = []
for obj_cfg in raw_objects[t]:
# We create the object
o = cls(obj_cfg)
# Change Nagios2 names to Nagios3 ones (before using them)
o.old_properties_names_to_new()
lst.append(o)
# we create the objects Class and we set it in prop
setattr(self, prop, clss(lst, initial_index))
# Here arbiter and modules objects should be prepare and link
# before all others types
def early_arbiter_linking(self):
""" Prepare the arbiter for early operations """
if len(self.arbiters) == 0:
logger.warning("There is no arbiter, I add one in localhost:7770")
a = ArbiterLink({'arbiter_name': 'Default-Arbiter',
'host_name': socket.gethostname(),
'address': 'localhost', 'port': '7770',
'spare': '0'})
self.arbiters = ArbiterLinks([a])
# Should look at hacking command_file module first
self.hack_old_nagios_parameters_for_arbiter()
# First fill default
self.arbiters.fill_default()
self.modules.fill_default()
# print "****************** Linkify ******************"
self.arbiters.linkify(self.modules)
self.modules.linkify()
# We will load all triggers .trig files from all triggers_dir
def load_triggers(self):
for p in self.triggers_dirs:
self.triggers.load_file(p)
# We will load all packs .pack files from all packs_dirs
def load_packs(self):
for p in self.packs_dirs:
self.packs.load_file(p)
# We use linkify to make the config more efficient: elements will be
# linked, like pointers. For example, a host will have it's service,
# and contacts directly in it's properties
# REMEMBER: linkify AFTER explode...
def linkify(self):
""" Make 'links' between elements, like a host got a services list
with all it's services in it """
self.services.optimize_service_search(self.hosts)
# First linkify myself like for some global commands
self.linkify_one_command_with_commands(self.commands, 'ocsp_command')
self.linkify_one_command_with_commands(self.commands, 'ochp_command')
self.linkify_one_command_with_commands(self.commands, 'host_perfdata_command')
self.linkify_one_command_with_commands(self.commands, 'service_perfdata_command')
self.linkify_one_command_with_commands(self.commands, 'global_host_event_handler')
self.linkify_one_command_with_commands(self.commands, 'global_service_event_handler')
# print "Hosts"
# link hosts with timeperiods and commands
self.hosts.linkify(self.timeperiods, self.commands,
self.contacts, self.realms,
self.resultmodulations, self.businessimpactmodulations,
self.escalations, self.hostgroups,
self.triggers, self.checkmodulations,
self.macromodulations
)
self.hostsextinfo.merge(self.hosts)
# Do the simplify AFTER explode groups
# print "Hostgroups"
# link hostgroups with hosts
self.hostgroups.linkify(self.hosts, self.realms)
# print "Services"
# link services with other objects
self.services.linkify(self.hosts, self.commands,
self.timeperiods, self.contacts,
self.resultmodulations, self.businessimpactmodulations,
self.escalations, self.servicegroups,
self.triggers, self.checkmodulations,
self.macromodulations
)
self.servicesextinfo.merge(self.services)
# print "Service groups"
# link servicegroups members with services
self.servicegroups.linkify(self.hosts, self.services)
# link notificationways with timeperiods and commands
self.notificationways.linkify(self.timeperiods, self.commands)
# link notificationways with timeperiods and commands
self.checkmodulations.linkify(self.timeperiods, self.commands)
# Link with timeperiods
self.macromodulations.linkify(self.timeperiods)
# print "Contactgroups"
# link contacgroups with contacts
self.contactgroups.linkify(self.contacts)
# print "Contacts"
# link contacts with timeperiods and commands
self.contacts.linkify(self.timeperiods, self.commands,
self.notificationways)
# print "Timeperiods"
# link timeperiods with timeperiods (exclude part)
self.timeperiods.linkify()
# print "Servicedependency"
self.servicedependencies.linkify(self.hosts, self.services,
self.timeperiods)
# print "Hostdependency"
self.hostdependencies.linkify(self.hosts, self.timeperiods)
# print "Resultmodulations"
self.resultmodulations.linkify(self.timeperiods)
self.businessimpactmodulations.linkify(self.timeperiods)
# print "Escalations"
self.escalations.linkify(self.timeperiods, self.contacts,
self.services, self.hosts)
# Link discovery commands
self.discoveryruns.linkify(self.commands)
# print "Realms"
self.realms.linkify()
# print "Schedulers and satellites"
# Link all links with realms
# self.arbiters.linkify(self.modules)
self.schedulers.linkify(self.realms, self.modules)
self.brokers.linkify(self.realms, self.modules)
self.receivers.linkify(self.realms, self.modules)
self.reactionners.linkify(self.realms, self.modules)
self.pollers.linkify(self.realms, self.modules)
# Ok, now update all realms with backlinks of
# satellites
self.realms.prepare_for_satellites_conf()
# Removes service exceptions based on host configuration
def remove_exclusions(self):
return self.services.remove_exclusions(self.hosts)
def set_initial_state(self):
"""
Sets services and hosts initial states.
"""
self.hosts.set_initial_state()
self.services.set_initial_state()
# Some elements are maybe set as wrong after a is_correct, so clean them
# if possible
def clean(self):
self.services.clean()
# In the scheduler we need to relink the commandCall with
# the real commands
def late_linkify(self):
props = ['ocsp_command', 'ochp_command',
'service_perfdata_command', 'host_perfdata_command',
'global_host_event_handler', 'global_service_event_handler']
for prop in props:
cc = getattr(self, prop, None)
if cc:
cc.late_linkify_with_command(self.commands)
# But also other objects like hosts and services
self.hosts.late_linkify_h_by_commands(self.commands)
self.services.late_linkify_s_by_commands(self.commands)
self.contacts.late_linkify_c_by_commands(self.commands)
# Some properties are dangerous to be send like that
# like realms linked in hosts. Realms are too big to send (too linked)
# We are also pre-serializing the confs so the sending phase will
# be quicker.
def prepare_for_sending(self):
# Preparing hosts and hostgroups for sending. Some properties
# should be "flatten" before sent, like .realm object that should
# be changed into names
self.hosts.prepare_for_sending()
self.hostgroups.prepare_for_sending()
t1 = time.time()
logger.info('[Arbiter] Serializing the configurations...')
# There are two ways of configuration serializing
# One if to use the serial way, the other is with use_multiprocesses_serializer
# to call to sub-wrokers to do the job.
# TODO : enable on windows? I'm not sure it will work, must give a test
if os.name == 'nt' or not self.use_multiprocesses_serializer:
logger.info('Using the default serialization pass')
for r in self.realms:
for (i, conf) in r.confs.iteritems():
# Remember to protect the local conf hostgroups too!
conf.hostgroups.prepare_for_sending()
logger.debug('[%s] Serializing the configuration %d', r.get_name(), i)
t0 = time.time()
r.serialized_confs[i] = cPickle.dumps(conf, 0) # cPickle.HIGHEST_PROTOCOL)
logger.debug("[config] time to serialize the conf %s:%s is %s (size:%s)",
r.get_name(), i, time.time() - t0, len(r.serialized_confs[i]))
logger.debug("PICKLE LEN : %d", len(r.serialized_confs[i]))
# Now pickle the whole conf, for easy and quick spare send
t0 = time.time()
whole_conf_pack = cPickle.dumps(self, cPickle.HIGHEST_PROTOCOL)
logger.debug("[config] time to serialize the global conf : %s (size:%s)",
time.time() - t0, len(whole_conf_pack))
self.whole_conf_pack = whole_conf_pack
logger.debug("[config]serializing total: %s" % (time.time() - t1))
else:
logger.info('Using the multiprocessing serialization pass')
t1 = time.time()
# We ask a manager to manage the communication with our children
m = Manager()
# The list will got all the strings from the children
q = m.list()
for r in self.realms:
processes = []
for (i, conf) in r.confs.iteritems():
# This function will be called by the children, and will give
# us the pickle result
def Serialize_config(q, rname, i, conf):
# Remember to protect the local conf hostgroups too!
conf.hostgroups.prepare_for_sending()
logger.debug('[%s] Serializing the configuration %d', rname, i)
t0 = time.time()
res = cPickle.dumps(conf, cPickle.HIGHEST_PROTOCOL)
logger.debug("[config] time to serialize the conf %s:%s is %s (size:%s)",
rname, i, time.time() - t0, len(res))
q.append((i, res))
# Prepare a sub-process that will manage the pickle computation
p = Process(target=Serialize_config,
name="serializer-%s-%d" % (r.get_name(), i),
args=(q, r.get_name(), i, conf))
p.start()
processes.append((i, p))
# Here all sub-processes are launched for this realm, now wait for them to finish
while len(processes) != 0:
to_del = []
for (i, p) in processes:
if p.exitcode is not None:
to_del.append((i, p))
# remember to join() so the children can die
p.join()
for (i, p) in to_del:
logger.debug("The sub process %s is done with the return code %d",
p.name, p.exitcode)
processes.remove((i, p))
# Don't be too quick to poll!
time.sleep(0.1)
# Check if we got the good number of configuration,
# maybe one of the cildren got problems?
if len(q) != len(r.confs):
logger.error("Something goes wrong in the configuration serializations, "
"please restart Shinken Arbiter")
sys.exit(2)
# Now get the serialized configuration and saved them into self
for (i, cfg) in q:
r.serialized_confs[i] = cfg
# Now pickle the whole configuration into one big pickle object, for the arbiter spares
whole_queue = m.list()
t0 = time.time()
# The function that just compute the whole conf pickle string, but n a children
def create_whole_conf_pack(whole_queue, self):
logger.debug("[config] sub processing the whole configuration pack creation")
whole_queue.append(cPickle.dumps(self, cPickle.HIGHEST_PROTOCOL))
logger.debug("[config] sub processing the whole configuration pack creation "
"finished")
# Go for it
p = Process(target=create_whole_conf_pack,
args=(whole_queue, self),
name='serializer-whole-configuration')
p.start()
# Wait for it to die
while p.exitcode is None:
time.sleep(0.1)
p.join()
# Maybe we don't have our result?
if len(whole_queue) != 1:
logger.error("Something goes wrong in the whole configuration pack creation, "
"please restart Shinken Arbiter")
sys.exit(2)
# Get it and save it
self.whole_conf_pack = whole_queue.pop()
logger.debug("[config] time to serialize the global conf : %s (size:%s)",
time.time() - t0, len(self.whole_conf_pack))
# Shutdown the manager, the sub-process should be gone now
m.shutdown()
# It's used to warn about useless parameter and print why it's not use.
def notice_about_useless_parameters(self):
if not self.disable_old_nagios_parameters_whining:
properties = self.__class__.properties
for prop, entry in properties.items():
if isinstance(entry, UnusedProp):
logger.warning("The parameter %s is useless and can be removed "
"from the configuration (Reason: %s)", prop, entry.text)
# It's used to raise warning if the user got parameter
# that we do not manage from now
def warn_about_unmanaged_parameters(self):
properties = self.__class__.properties
unmanaged = []
for prop, entry in properties.items():
if not entry.managed and hasattr(self, prop):
if entry.help:
s = "%s: %s" % (prop, entry.help)
else:
s = prop
unmanaged.append(s)
if len(unmanaged) != 0:
mailing_list_uri = "https://lists.sourceforge.net/lists/listinfo/shinken-devel"
logger.warning("The following parameter(s) are not currently managed.")
for s in unmanaged:
logger.info(s)
logger.warning("Unmanaged configuration statement, do you really need it?"
"Ask for it on the developer mailinglist %s or submit a pull "
"request on the Shinken github ", mailing_list_uri)
# Overrides specific instances properties
def override_properties(self):
self.services.override_properties(self.hosts)
# Use to fill groups values on hosts and create new services
# (for host group ones)
def explode(self):
# first elements, after groups
# print "Contacts"
self.contacts.explode(self.contactgroups, self.notificationways)
# print "Contactgroups"
self.contactgroups.explode()
# print "Hosts"
self.hosts.explode(self.hostgroups, self.contactgroups, self.triggers)
# print "Hostgroups"
self.hostgroups.explode()
# print "Services"
# print "Initially got nb of services: %d" % len(self.services.items)
self.services.explode(self.hosts, self.hostgroups, self.contactgroups,
self.servicegroups, self.servicedependencies,
self.triggers)
# print "finally got nb of services: %d" % len(self.services.items)
# print "Servicegroups"
self.servicegroups.explode()
# print "Timeperiods"
self.timeperiods.explode()
self.hostdependencies.explode(self.hostgroups)
# print "Servicedependency"
self.servicedependencies.explode(self.hostgroups)
# Serviceescalations hostescalations will create new escalations
self.serviceescalations.explode(self.escalations)
self.hostescalations.explode(self.escalations)
self.escalations.explode(self.hosts, self.hostgroups,
self.contactgroups)
# Now the architecture part
# print "Realms"
self.realms.explode()
# Dependencies are important for scheduling
# This function create dependencies linked between elements.
def apply_dependencies(self):
self.hosts.apply_dependencies()
self.services.apply_dependencies()
# Use to apply inheritance (template and implicit ones)
# So elements will have their configured properties
def apply_inheritance(self):
# inheritance properties by template
# print "Hosts"
self.hosts.apply_inheritance()
# print "Contacts"
self.contacts.apply_inheritance()
# print "Services"
self.services.apply_inheritance()
# print "Servicedependencies"
self.servicedependencies.apply_inheritance()
# print "Hostdependencies"
self.hostdependencies.apply_inheritance()
# Also timeperiods
self.timeperiods.apply_inheritance()
# Also "Hostextinfo"
self.hostsextinfo.apply_inheritance()
# Also "Serviceextinfo"
self.servicesextinfo.apply_inheritance()
# Now escalations too
self.serviceescalations.apply_inheritance()
self.hostescalations.apply_inheritance()
self.escalations.apply_inheritance()
# Use to apply implicit inheritance
def apply_implicit_inheritance(self):
# print "Services"
self.services.apply_implicit_inheritance(self.hosts)
# will fill properties for elements so they will have all theirs properties
def fill_default(self):
# Fill default for config (self)
super(Config, self).fill_default()
self.hosts.fill_default()
self.hostgroups.fill_default()
self.contacts.fill_default()
self.contactgroups.fill_default()
self.notificationways.fill_default()
self.checkmodulations.fill_default()
self.macromodulations.fill_default()
self.services.fill_default()
self.servicegroups.fill_default()
self.resultmodulations.fill_default()
self.businessimpactmodulations.fill_default()
self.hostsextinfo.fill_default()
self.servicesextinfo.fill_default()
# Now escalations
self.escalations.fill_default()
# Also fill default of host/servicedep objects
self.servicedependencies.fill_default()
self.hostdependencies.fill_default()
# Discovery part
self.discoveryrules.fill_default()
self.discoveryruns.fill_default()
# first we create missing sat, so no other sat will
# be created after this point
self.fill_default_satellites()
# now we have all elements, we can create a default
# realm if need and it will be tagged to sat that do
# not have an realm
self.fill_default_realm()
self.realms.fill_default() # also put default inside the realms themselves
self.reactionners.fill_default()
self.pollers.fill_default()
self.brokers.fill_default()
self.receivers.fill_default()
self.schedulers.fill_default()
# The arbiters are already done.
# self.arbiters.fill_default()
# Now fill some fields we can predict (like address for hosts)
self.fill_predictive_missing_parameters()
# Here is a special functions to fill some special
# properties that are not filled and should be like
# address for host (if not set, put host_name)
def fill_predictive_missing_parameters(self):
self.hosts.fill_predictive_missing_parameters()
# Will check if a realm is defined, if not
# Create a new one (default) and tag everyone that do not have
# a realm prop to be put in this realm
def fill_default_realm(self):
if len(self.realms) == 0:
# Create a default realm with default value =1
# so all hosts without realm will be link with it
default = Realm({'realm_name': 'Default', 'default': '1'})
self.realms = Realms([default])
logger.warning("No realms defined, I add one at %s", default.get_name())
lists = [self.pollers, self.brokers, self.reactionners, self.receivers, self.schedulers]
for l in lists:
for elt in l:
if not hasattr(elt, 'realm'):
elt.realm = 'Default'
logger.info("Tagging %s with realm %s", elt.get_name(), default.get_name())
# If a satellite is missing, we add them in the localhost
# with defaults values
def fill_default_satellites(self):
if len(self.schedulers) == 0:
logger.warning("No scheduler defined, I add one at localhost:7768")
s = SchedulerLink({'scheduler_name': 'Default-Scheduler',
'address': 'localhost', 'port': '7768'})
self.schedulers = SchedulerLinks([s])
if len(self.pollers) == 0:
logger.warning("No poller defined, I add one at localhost:7771")
p = PollerLink({'poller_name': 'Default-Poller',
'address': 'localhost', 'port': '7771'})
self.pollers = PollerLinks([p])
if len(self.reactionners) == 0:
logger.warning("No reactionner defined, I add one at localhost:7769")
r = ReactionnerLink({'reactionner_name': 'Default-Reactionner',
'address': 'localhost', 'port': '7769'})
self.reactionners = ReactionnerLinks([r])
if len(self.brokers) == 0:
logger.warning("No broker defined, I add one at localhost:7772")
b = BrokerLink({'broker_name': 'Default-Broker',
'address': 'localhost', 'port': '7772',
'manage_arbiters': '1'})
self.brokers = BrokerLinks([b])
# Return if one broker got a module of type: mod_type
def got_broker_module_type_defined(self, mod_type):
for b in self.brokers:
for m in b.modules:
if hasattr(m, 'module_type') and m.module_type == mod_type:
return True
return False
# return if one scheduler got a module of type: mod_type
def got_scheduler_module_type_defined(self, mod_type):
for b in self.schedulers:
for m in b.modules:
if hasattr(m, 'module_type') and m.module_type == mod_type:
return True
return False
# return if one arbiter got a module of type: mod_type
# but this time it's tricky: the python pass is not done!
# so look with strings!
def got_arbiter_module_type_defined(self, mod_type):
for a in self.arbiters:
# Do like the linkify will do after....
for m in getattr(a, 'modules', []):
# So look at what the arbiter try to call as module
m = m.strip()
# Ok, now look in modules...
for mod in self.modules:
# try to see if this module is the good type
if getattr(mod, 'module_type', '').strip() == mod_type.strip():
# if so, the good name?
if getattr(mod, 'module_name', '').strip() == m:
return True
return False
# Will ask for each host/service if the
# check_command is a bp rule. If so, it will create
# a tree structures with the rules
def create_business_rules(self):
self.hosts.create_business_rules(self.hosts, self.services)
self.services.create_business_rules(self.hosts, self.services)
# Will fill dep list for business rules
def create_business_rules_dependencies(self):
self.hosts.create_business_rules_dependencies()
self.services.create_business_rules_dependencies()
# It's used to hack some old Nagios parameters like
# log_file or status_file: if they are present in
# the global configuration and there is no such modules
# in a Broker, we create it on the fly for all Brokers
def hack_old_nagios_parameters(self):
""" Create some 'modules' from all nagios parameters if they are set and
the modules are not created """
# We list all modules we will add to brokers
mod_to_add = []
mod_to_add_to_schedulers = []
# For status_dat
if (hasattr(self, 'status_file') and
self.status_file != '' and
hasattr(self, 'object_cache_file')):
# Ok, the user put such a value, we must look
# if he forget to put a module for Brokers
got_status_dat_module = self.got_broker_module_type_defined('status_dat')
# We need to create the module on the fly?
if not got_status_dat_module:
data = {'object_cache_file': self.object_cache_file,
'status_file': self.status_file,
'module_name': 'Status-Dat-Autogenerated',
'module_type': 'status_dat'}
mod = Module(data)
mod.status_update_interval = getattr(self, 'status_update_interval', 15)
mod_to_add.append(mod)
# Now the log_file
if hasattr(self, 'log_file') and self.log_file != '':
# Ok, the user put such a value, we must look
# if he forget to put a module for Brokers
got_simple_log_module = self.got_broker_module_type_defined('simple_log')
# We need to create the module on the fly?
if not got_simple_log_module:
data = {'module_type': 'simple_log', 'path': self.log_file,
'archive_path': self.log_archive_path,
'module_name': 'Simple-log-Autogenerated'}
mod = Module(data)
mod_to_add.append(mod)
# Now the syslog facility
if self.use_syslog:
# Ok, the user want a syslog logging, why not after all
got_syslog_module = self.got_broker_module_type_defined('syslog')
# We need to create the module on the fly?
if not got_syslog_module:
data = {'module_type': 'syslog',
'module_name': 'Syslog-Autogenerated'}
mod = Module(data)
mod_to_add.append(mod)
# Now the service_perfdata module
if self.service_perfdata_file != '':
# Ok, we've got a path for a service perfdata file
got_service_perfdata_module = self.got_broker_module_type_defined('service_perfdata')
# We need to create the module on the fly?
if not got_service_perfdata_module:
data = {'module_type': 'service_perfdata',
'module_name': 'Service-Perfdata-Autogenerated',
'path': self.service_perfdata_file,
'mode': self.service_perfdata_file_mode,
'template': self.service_perfdata_file_template}
mod = Module(data)
mod_to_add.append(mod)
# Now the old retention file module
if self.state_retention_file != '' and self.retention_update_interval != 0:
# Ok, we've got a old retention file
got_retention_file_module = \
self.got_scheduler_module_type_defined('nagios_retention_file')
# We need to create the module on the fly?
if not got_retention_file_module:
data = {'module_type': 'nagios_retention_file',
'module_name': 'Nagios-Retention-File-Autogenerated',
'path': self.state_retention_file}
mod = Module(data)
mod_to_add_to_schedulers.append(mod)
# Now the host_perfdata module
if self.host_perfdata_file != '':
# Ok, we've got a path for a host perfdata file
got_host_perfdata_module = self.got_broker_module_type_defined('host_perfdata')
# We need to create the module on the fly?
if not got_host_perfdata_module:
data = {'module_type': 'host_perfdata',
'module_name': 'Host-Perfdata-Autogenerated',
'path': self.host_perfdata_file, 'mode': self.host_perfdata_file_mode,
'template': self.host_perfdata_file_template}
mod = Module(data)
mod_to_add.append(mod)
# We add them to the brokers if we need it
if mod_to_add != []:
logger.warning("I autogenerated some Broker modules, please look at your configuration")
for m in mod_to_add:
logger.warning("The module %s is autogenerated", m.module_name)
for b in self.brokers:
b.modules.append(m)
# Then for schedulers
if mod_to_add_to_schedulers != []:
logger.warning("I autogenerated some Scheduler modules, "
"please look at your configuration")
for m in mod_to_add_to_schedulers:
logger.warning("The module %s is autogenerated", m.module_name)
for b in self.schedulers:
b.modules.append(m)
# It's used to hack some old Nagios parameters like
# but for the arbiter, so very early in the run
def hack_old_nagios_parameters_for_arbiter(self):
""" Create some 'modules' from all nagios parameters if they are set and
the modules are not created """
# We list all modules we will add to arbiters
mod_to_add = []
# For command_file
if getattr(self, 'command_file', '') != '':
# Ok, the user put such a value, we must look
# if he forget to put a module for arbiters
got_named_pipe_module = self.got_arbiter_module_type_defined('named_pipe')
# We need to create the module on the fly?
if not got_named_pipe_module:
data = {'command_file': self.command_file,
'module_name': 'NamedPipe-Autogenerated',
'module_type': 'named_pipe'}
mod = Module(data)
mod_to_add.append((mod, data))
# We add them to the brokers if we need it
if mod_to_add != []:
logger.warning("I autogenerated some Arbiter modules, "
"please look at your configuration")
for (mod, data) in mod_to_add:
logger.warning("Module %s was autogenerated", data['module_name'])
for a in self.arbiters:
a.modules = getattr(a, 'modules', []) + [data['module_name']]
self.modules.add_item(mod)
# Set our timezone value and give it too to unset satellites
def propagate_timezone_option(self):
if self.use_timezone != '':
# first apply myself
os.environ['TZ'] = self.use_timezone
time.tzset()
tab = [self.schedulers, self.pollers, self.brokers, self.receivers, self.reactionners]
for t in tab:
for s in t:
if s.use_timezone == 'NOTSET':
setattr(s, 'use_timezone', self.use_timezone)
# Link templates with elements
def linkify_templates(self):
""" Like for normal object, we link templates with each others """
self.hosts.linkify_templates()
self.contacts.linkify_templates()
self.services.linkify_templates()
self.servicedependencies.linkify_templates()
self.hostdependencies.linkify_templates()
self.timeperiods.linkify_templates()
self.hostsextinfo.linkify_templates()
self.servicesextinfo.linkify_templates()
self.escalations.linkify_templates()
# But also old srv and host escalations
self.serviceescalations.linkify_templates()
self.hostescalations.linkify_templates()
# Some parameters are just not managed like O*HP commands
# and regexp capabilities
# True: OK
# False: error in conf
def check_error_on_hard_unmanaged_parameters(self):
r = True
if self.use_regexp_matching:
logger.error("use_regexp_matching parameter is not managed.")
r &= False
# if self.ochp_command != '':
# logger.error("ochp_command parameter is not managed.")
# r &= False
# if self.ocsp_command != '':
# logger.error("ocsp_command parameter is not managed.")
# r &= False
return r
# check if elements are correct or not (fill with defaults, etc)
# Warning: this function call be called from a Arbiter AND
# from and scheduler. The first one got everything, the second
# does not have the satellites.
def is_correct(self):
""" Check if all elements got a good configuration """
logger.info('Running pre-flight check on configuration data...')
r = self.conf_is_correct
# Globally unmanaged parameters
if self.read_config_silent == 0:
logger.info('Checking global parameters...')
if not self.check_error_on_hard_unmanaged_parameters():
r = False
logger.error("Check global parameters failed")
for x in ('hosts', 'hostgroups', 'contacts', 'contactgroups', 'notificationways',
'escalations', 'services', 'servicegroups', 'timeperiods', 'commands',
'hostsextinfo', 'servicesextinfo', 'checkmodulations', 'macromodulations'):
if self.read_config_silent == 0:
logger.info('Checking %s...', x)
cur = getattr(self, x)
if not cur.is_correct():
r = False
logger.error("\t%s conf incorrect!!", x)
if self.read_config_silent == 0:
logger.info('\tChecked %d %s', len(cur), x)
# Hosts got a special check for loops
if not self.hosts.no_loop_in_parents("self", "parents"):
r = False
logger.error("Hosts: detected loop in parents ; conf incorrect")
for x in ('servicedependencies', 'hostdependencies', 'arbiters', 'schedulers',
'reactionners', 'pollers', 'brokers', 'receivers', 'resultmodulations',
'discoveryrules', 'discoveryruns', 'businessimpactmodulations'):
try:
cur = getattr(self, x)
except AttributeError:
continue
if self.read_config_silent == 0:
logger.info('Checking %s...', x)
if not cur.is_correct():
r = False
logger.error("\t%s conf incorrect!!", x)
if self.read_config_silent == 0:
logger.info('\tChecked %d %s', len(cur), x)
# Look that all scheduler got a broker that will take brok.
# If there are no, raise an Error
for s in self.schedulers:
rea = s.realm
if rea:
if len(rea.potential_brokers) == 0:
logger.error("The scheduler %s got no broker in its realm or upper",
s.get_name())
self.add_error("Error: the scheduler %s got no broker in its realm "
"or upper" % s.get_name())
r = False
# Check that for each poller_tag of a host, a poller exists with this tag
# TODO: need to check that poller are in the good realm too
hosts_tag = set()
services_tag = set()
pollers_tag = set()
for h in self.hosts:
hosts_tag.add(h.poller_tag)
for s in self.services:
services_tag.add(s.poller_tag)
for p in self.pollers:
for t in p.poller_tags:
pollers_tag.add(t)
if not hosts_tag.issubset(pollers_tag):
for tag in hosts_tag.difference(pollers_tag):
logger.error("Hosts exist with poller_tag %s but no poller got this tag", tag)
self.add_error("Error: hosts exist with poller_tag %s but no poller "
"got this tag" % tag)
r = False
if not services_tag.issubset(pollers_tag):
for tag in services_tag.difference(pollers_tag):
logger.error("Services exist with poller_tag %s but no poller got this tag", tag)
self.add_error("Error: services exist with poller_tag %s but no poller "
"got this tag" % tag)
r = False
# Check that all hosts involved in business_rules are from the same realm
for l in [self.services, self.hosts]:
for e in l:
if e.got_business_rule:
e_ro = e.get_realm()
# Something was wrong in the conf, will be raised elsewhere
if not e_ro:
continue
e_r = e_ro.realm_name
for elt in e.business_rule.list_all_elements():
r_o = elt.get_realm()
# Something was wrong in the conf, will be raised elsewhere
if not r_o:
continue
elt_r = elt.get_realm().realm_name
if not elt_r == e_r:
logger.error("Business_rule '%s' got hosts from another realm: %s",
e.get_full_name(), elt_r)
self.add_error("Error: Business_rule '%s' got hosts from another "
"realm: %s" % (e.get_full_name(), elt_r))
r = False
if len([realm for realm in self.realms if hasattr(realm, 'default') and realm.default]) > 1:
err = "Error : More than one realm are set to the default realm"
logger.error(err)
self.add_error(err)
r = False
self.conf_is_correct = r
# Explode parameters like cached_service_check_horizon in the
# Service class in a cached_check_horizon manner, o*hp commands
# , etc
def explode_global_conf(self):
clss = [Service, Host, Contact, SchedulerLink,
PollerLink, ReactionnerLink, BrokerLink,
ReceiverLink, ArbiterLink, HostExtInfo]
for cls in clss:
cls.load_global_conf(self)
# Clean useless elements like templates because they are not needed anymore
def remove_templates(self):
self.hosts.remove_templates()
self.contacts.remove_templates()
self.services.remove_templates()
self.servicedependencies.remove_templates()
self.hostdependencies.remove_templates()
self.timeperiods.remove_templates()
self.discoveryrules.remove_templates()
self.discoveryruns.remove_templates()
# Add an error in the configuration error list so we can print them
# all in one place
def add_error(self, txt):
err = txt
self.configuration_errors.append(err)
self.conf_is_correct = False
# Now it's time to show all configuration errors
def show_errors(self):
for err in self.configuration_errors:
logger.error(err)
# Create packs of hosts and services so in a pack,
# all dependencies are resolved
# It create a graph. All hosts are connected to their
# parents, and hosts without parent are connected to host 'root'.
# services are link to the host. Dependencies are managed
# REF: doc/pack-creation.png
def create_packs(self, nb_packs):
# We create a graph with host in nodes
g = Graph()
g.add_nodes(self.hosts)
# links will be used for relations between hosts
links = set()
# Now the relations
for h in self.hosts:
# Add parent relations
for p in h.parents:
if p is not None:
links.add((p, h))
# Add the others dependencies
for (dep, tmp, tmp2, tmp3, tmp4) in h.act_depend_of:
links.add((dep, h))
for (dep, tmp, tmp2, tmp3, tmp4) in h.chk_depend_of:
links.add((dep, h))
# For services: they are link with their own host but we need
# To have the hosts of service dep in the same pack too
for s in self.services:
for (dep, tmp, tmp2, tmp3, tmp4) in s.act_depend_of:
# I don't care about dep host: they are just the host
# of the service...
if hasattr(dep, 'host'):
links.add((dep.host, s.host))
# The other type of dep
for (dep, tmp, tmp2, tmp3, tmp4) in s.chk_depend_of:
links.add((dep.host, s.host))
# For host/service that are business based, we need to
# link them too
for s in [s for s in self.services if s.got_business_rule]:
for e in s.business_rule.list_all_elements():
if hasattr(e, 'host'): # if it's a service
if e.host != s.host: # do not a host with itself
links.add((e.host, s.host))
else: # it's already a host
if e != s.host:
links.add((e, s.host))
# Same for hosts of course
for h in [h for h in self.hosts if h.got_business_rule]:
for e in h.business_rule.list_all_elements():
if hasattr(e, 'host'): # if it's a service
if e.host != h:
links.add((e.host, h))
else: # e is a host
if e != h:
links.add((e, h))
# Now we create links in the graph. With links (set)
# We are sure to call the less add_edge
for (dep, h) in links:
g.add_edge(dep, h)
g.add_edge(h, dep)
# Access_list from a node il all nodes that are connected
# with it: it's a list of ours mini_packs
tmp_packs = g.get_accessibility_packs()
# Now We find the default realm
default_realm = None
for r in self.realms:
if hasattr(r, 'default') and r.default:
default_realm = r
# Now we look if all elements of all packs have the
# same realm. If not, not good!
for pack in tmp_packs:
tmp_realms = set()
for elt in pack:
if elt.realm is not None:
tmp_realms.add(elt.realm)
if len(tmp_realms) > 1:
self.add_error("Error: the realm configuration of yours hosts is not good "
"because there a more than one realm in one pack (host relations):")
for h in pack:
if h.realm is None:
err = ' the host %s do not have a realm' % h.get_name()
self.add_error(err)
else:
err = ' the host %s is in the realm %s' % (h.get_name(),
h.realm.get_name())
self.add_error(err)
if len(tmp_realms) == 1: # Ok, good
r = tmp_realms.pop() # There is just one element
r.packs.append(pack)
elif len(tmp_realms) == 0: # Hum.. no realm value? So default Realm
if default_realm is not None:
default_realm.packs.append(pack)
else:
err = ("Error: some hosts do not have a realm and you do not "
"defined a default realm!")
self.add_error(err)
for h in pack:
err = ' Impacted host: %s ' % h.get_name()
self.add_error(err)
# The load balancing is for a loop, so all
# hosts of a realm (in a pack) will be dispatch
# in the schedulers of this realm
# REF: doc/pack-agregation.png
# Count the numbers of elements in all the realms, to compare it the total number of hosts
nb_elements_all_realms = 0
for r in self.realms:
# print "Load balancing realm", r.get_name()
packs = {}
# create roundrobin iterator for id of cfg
# So dispatching is loadbalanced in a realm
# but add a entry in the roundrobin tourniquet for
# every weight point schedulers (so Weight round robin)
weight_list = []
no_spare_schedulers = [s for s in r.schedulers if not s.spare]
nb_schedulers = len(no_spare_schedulers)
# Maybe there is no scheduler in the realm, it's can be a
# big problem if there are elements in packs
nb_elements = 0
for pack in r.packs:
nb_elements += len(pack)
nb_elements_all_realms += len(pack)
logger.info("Number of hosts in the realm %s: %d "
"(distributed in %d linked packs)",
r.get_name(), nb_elements, len(r.packs))
if nb_schedulers == 0 and nb_elements != 0:
err = "The realm %s has hosts but no scheduler!" % r.get_name()
self.add_error(err)
r.packs = [] # Dumb pack
continue
packindex = 0
packindices = {}
for s in no_spare_schedulers:
packindices[s.id] = packindex
packindex += 1
for i in xrange(0, s.weight):
weight_list.append(s.id)
rr = itertools.cycle(weight_list)
# We must have nb_schedulers packs
for i in xrange(0, nb_schedulers):
packs[i] = []
# Try to load the history association dict so we will try to
# send the hosts in the same "pack"
assoc = {}
# Now we explode the numerous packs into nb_packs reals packs:
# we 'load balance' them in a roundrobin way
for pack in r.packs:
valid_value = False
old_pack = -1
for elt in pack:
# print 'Look for host', elt.get_name(), 'in assoc'
old_i = assoc.get(elt.get_name(), -1)
# print 'Founded in ASSOC: ', elt.get_name(),old_i
# Maybe it's a new, if so, don't count it
if old_i == -1:
continue
# Maybe it is the first we look at, if so, take it's value
if old_pack == -1 and old_i != -1:
# print 'First value set', elt.get_name(), old_i
old_pack = old_i
valid_value = True
continue
if old_i == old_pack:
# print 'I found a match between elements', old_i
valid_value = True
if old_i != old_pack:
# print 'Outch found a change sorry', old_i, old_pack
valid_value = False
# print 'Is valid?', elt.get_name(), valid_value, old_pack
i = None
# If it's a valid sub pack and the pack id really exist, use it!
if valid_value and old_pack in packindices:
# print 'Use a old id for pack', old_pack, [h.get_name() for h in pack]
i = old_pack
else: # take a new one
# print 'take a new id for pack', [h.get_name() for h in pack]
i = rr.next()
for elt in pack:
# print 'We got the element', elt.get_full_name(), ' in pack', i, packindices
packs[packindices[i]].append(elt)
assoc[elt.get_name()] = i
# Now in packs we have the number of packs [h1, h2, etc]
# equal to the number of schedulers.
r.packs = packs
logger.info("Total number of hosts : %d",
nb_elements_all_realms)
if len(self.hosts) != nb_elements_all_realms:
logger.warning("There are %d hosts defined, and %d hosts dispatched in the realms. "
"Some hosts have been ignored", len(self.hosts), nb_elements_all_realms)
self.add_error("There are %d hosts defined, and %d hosts dispatched in the realms. "
"Some hosts have been "
"ignored" % (len(self.hosts), nb_elements_all_realms))
# Use the self.conf and make nb_parts new confs.
# nbparts is equal to the number of schedulerlink
# New confs are independent with checks. The only communication
# That can be need is macro in commands
def cut_into_parts(self):
# print "Scheduler configured:", self.schedulers
# I do not care about alive or not. User must have set a spare if need it
nb_parts = len([s for s in self.schedulers if not s.spare])
if nb_parts == 0:
nb_parts = 1
# We create dummy configurations for schedulers:
# they are clone of the master
# conf but without hosts and services (because they are dispatched between
# theses configurations)
self.confs = {}
for i in xrange(0, nb_parts):
# print "Create Conf:", i, '/', nb_parts -1
cur_conf = self.confs[i] = Config()
# Now we copy all properties of conf into the new ones
for prop, entry in Config.properties.items():
if entry.managed and not isinstance(entry, UnusedProp):
val = getattr(self, prop)
setattr(cur_conf, prop, val)
# print "Copy", prop, val
# we need a deepcopy because each conf
# will have new hostgroups
cur_conf.id = i
cur_conf.commands = self.commands
cur_conf.timeperiods = self.timeperiods
# Create hostgroups with just the name and same id, but no members
new_hostgroups = []
for hg in self.hostgroups:
new_hostgroups.append(hg.copy_shell())
cur_conf.hostgroups = Hostgroups(new_hostgroups)
cur_conf.notificationways = self.notificationways
cur_conf.checkmodulations = self.checkmodulations
cur_conf.macromodulations = self.macromodulations
cur_conf.contactgroups = self.contactgroups
cur_conf.contacts = self.contacts
cur_conf.triggers = self.triggers
# Create hostgroups with just the name and same id, but no members
new_servicegroups = []
for sg in self.servicegroups:
new_servicegroups.append(sg.copy_shell())
cur_conf.servicegroups = Servicegroups(new_servicegroups)
cur_conf.hosts = [] # will be fill after
cur_conf.services = [] # will be fill after
# The elements of the others conf will be tag here
cur_conf.other_elements = {}
# if a scheduler have accepted the conf
cur_conf.is_assigned = False
logger.info("Creating packs for realms")
# Just create packs. There can be numerous ones
# In pack we've got hosts and service
# packs are in the realms
# REF: doc/pack-creation.png
self.create_packs(nb_parts)
# We've got all big packs and get elements into configurations
# REF: doc/pack-agregation.png
offset = 0
for r in self.realms:
for i in r.packs:
pack = r.packs[i]
for h in pack:
h.pack_id = i
self.confs[i + offset].hosts.append(h)
for s in h.services:
self.confs[i + offset].services.append(s)
# Now the conf can be link in the realm
r.confs[i + offset] = self.confs[i + offset]
offset += len(r.packs)
del r.packs
# We've nearly have hosts and services. Now we want REALS hosts (Class)
# And we want groups too
# print "Finishing packs"
for i in self.confs:
# print "Finishing pack Nb:", i
cfg = self.confs[i]
# Create ours classes
cfg.hosts = Hosts(cfg.hosts)
cfg.services = Services(cfg.services)
# Fill host groups
for ori_hg in self.hostgroups:
hg = cfg.hostgroups.find_by_name(ori_hg.get_name())
mbrs = ori_hg.members
mbrs_id = []
for h in mbrs:
if h is not None:
mbrs_id.append(h.id)
for h in cfg.hosts:
if h.id in mbrs_id:
hg.members.append(h)
# And also relink the hosts with the valid hostgroups
for h in cfg.hosts:
orig_hgs = h.hostgroups
nhgs = []
for ohg in orig_hgs:
nhg = cfg.hostgroups.find_by_name(ohg.get_name())
nhgs.append(nhg)
h.hostgroups = nhgs
# Fill servicegroup
for ori_sg in self.servicegroups:
sg = cfg.servicegroups.find_by_name(ori_sg.get_name())
mbrs = ori_sg.members
mbrs_id = []
for s in mbrs:
if s is not None:
mbrs_id.append(s.id)
for s in cfg.services:
if s.id in mbrs_id:
sg.members.append(s)
# And also relink the services with the valid servicegroups
for h in cfg.services:
orig_hgs = h.servicegroups
nhgs = []
for ohg in orig_hgs:
nhg = cfg.servicegroups.find_by_name(ohg.get_name())
nhgs.append(nhg)
h.servicegroups = nhgs
# Now we fill other_elements by host (service are with their host
# so they are not tagged)
for i in self.confs:
for h in self.confs[i].hosts:
for j in [j for j in self.confs if j != i]: # So other than i
self.confs[i].other_elements[h.get_name()] = i
# We tag conf with instance_id
for i in self.confs:
self.confs[i].instance_id = i
random.seed(time.time())
def dump(self, f=None):
dmp = {}
for category in ("hosts",
"hostgroups",
"hostdependencies",
"contactgroups",
"contacts",
"notificationways",
"checkmodulations",
"macromodulations",
"servicegroups",
"services",
"servicedependencies",
"resultmodulations",
"businessimpactmodulations",
"escalations",
"discoveryrules",
"discoveryruns",
"schedulers",
"realms",
):
objs = [jsonify_r(i) for i in getattr(self, category)]
container = getattr(self, category)
if category == "services":
objs = sorted(objs, key=lambda o: "%s/%s" %
(o["host_name"], o["service_description"]))
elif hasattr(container, "name_property"):
np = container.name_property
objs = sorted(objs, key=lambda o: getattr(o, np, ''))
dmp[category] = objs
if f is None:
d = tempfile.gettempdir()
p = os.path.join(d, 'shinken-config-dump-%d' % time.time())
f = open(p, "wb")
close = True
else:
close = False
f.write(
json.dumps(
dmp,
indent=4,
separators=(',', ': '),
sort_keys=True
)
)
if close is True:
f.close()
def lazy():
# let's compute the "USER" properties and macros..
for n in xrange(1, 256):
n = str(n)
Config.properties['$USER' + str(n) + '$'] = StringProp(default='')
Config.macros['USER' + str(n)] = '$USER' + n + '$'
lazy()
del lazy
|
from __future__ import absolute_import, unicode_literals
import time
from future import standard_library
from pyload.core.datatype.check import OnlineCheck
from pyload.core.manager.base import BaseManager
from pyload.core.thread import InfoThread
from pyload.utils.layer.safethreading import RLock
from pyload.utils.struct.lock import lock
standard_library.install_aliases()
class InfoManager(BaseManager):
"""Manages all non download related threads and jobs."""
def setup(self):
self.thread = [] # thread list
self.lock = RLock()
# some operations require to fetch url info from hoster,
# so we caching them so it wont be done twice
# contains a timestamp and will be purged after timeout
self.info_cache = {}
# pool of ids for online check
self.result_ids = 0
# saved online checks
self.info_results = {}
# timeout for cache purge
self.timestamp = 0
@lock
def add_thread(self, thread):
self.thread.append(thread)
@lock
def remove_thread(self, thread):
"""Remove a thread from the local list."""
if thread in self.thread:
self.thread.remove(thread)
@lock
def create_info_thread(self, data, pid):
"""Start a thread which fetches online status and other info's."""
self.timestamp = time.time() + 5 * 60
thread = InfoThread(self, None, data, pid)
thread.start()
@lock
def create_result_thread(self, user, data):
"""Creates a thread to fetch online status, returns result id."""
self.timestamp = time.time() + 5 * 60
rid = self.result_ids
self.result_ids += 1
oc = OnlineCheck(rid, user)
self.info_results[rid] = oc
thread = InfoThread(self, user, data, oc=oc)
thread.start()
return rid
@lock
def get_info_result(self, rid):
return self.info_results.get(rid)
def set_info_results(self, oc, result):
self.pyload.evm.fire('linkcheck:updated', oc.rid,
result, owner=oc.owner)
oc.update(result)
def get_progress_list(self, user=None):
info = []
for thread in self.thread:
# skip if not belong to current user
if user is not None and thread.owner != user:
continue
progress_info = thread.get_progress_info()
if progress_info:
info.append(progress_info)
return info
def work(self):
"""Run all task which have to be done (this is for repetitive call by
core)."""
if self.info_cache and self.timestamp < time.time():
self.info_cache.clear()
self.pyload.log.debug('Cleared Result cache')
for rid in self.info_results:
if self.info_results[rid].is_stale():
del self.info_results[rid]
|
from openerp import models, fields, api
class clv_seedling(models.Model):
_inherit = 'clv_seedling'
_defaults = {
'active_history': True,
}
|
import datetime
from lxml import etree
import Primitives
import Match
import Tactics
class player_position_type:
Goalkeeper = 0
Defender = 1
Midfielder = 2
Forward = 3
def num_to_pos(num):
if num == 0:
return player_position_type.Goalkeeper
elif num == 1:
return player_position_type.Defender
elif num == 2:
return player_position_type.Midfielder
else:
return player_position_type.Forward
class player_position:
def __init__(self, pos, left = False, winger = False):
self.pos = pos
self.left = left
self.winger = winger
def to_xml(self):
positionnode = etree.Element("position")
positionnode.set("pos", str(self.pos))
positionnode.set("left", str(int(self.left)))
positionnode.set("wing", str(int(self.winger)))
return positionnode
def equals(self, other):
return (self.pos == other.pos and self.left == other.left and self.winger == other.winger)
player_personalities_list = ["active", "risktaking", "offensive", "aggressive", "consistent", "creative", "experienced"]
player_skills_list = ["stamina", "dexterity", "speed", "tackling", "passing", "shooting", "control", "accuracy", "goalkeeping", "heading"]
class player_skills:
def to_xml(self):
root = etree.Element("skills")
for skill in player_skills_list:
root.set(skill, str(getattr(self, skill)))
return root
class player_personality:
def to_xml(self):
root = etree.Element("personality")
for personality in player_personalities_list:
root.set(personality, str(getattr(self, personality)))
return root
def to_stars(skill):
num_stars = (skill + 100) / 200
plus = skill % 200 < 100 and num_stars < 5
s = "*" * num_stars
if plus:
s += "+"
return s
class Player(Primitives.Human):
def __init__(self, pid, name = ""):
Primitives.Human.__init__(self, name)
self.id = pid
self.club_name = ""
self.rating = -1
def __str__(self):
return "%-30s %-5s %-5s %-5s %-5s %-5s %-5s %-5s %-5s %-5s %-5s" % (self.name,
to_stars(self.skills.stamina),
to_stars(self.skills.speed),
to_stars(self.skills.dexterity),
to_stars(self.skills.goalkeeping),
to_stars(self.skills.tackling),
to_stars(self.skills.passing),
to_stars(self.skills.control),
to_stars(self.skills.heading),
to_stars(self.skills.shooting),
to_stars(self.skills.accuracy))
def str2(self):
return "%-30s %-5d %-5d %-5d %-5d %-5d %-5d %-5d %-5d %-5d %-5d" % (self.name,
self.skills.stamina,
self.skills.speed,
self.skills.dexterity,
self.skills.goalkeeping,
self.skills.tackling,
self.skills.passing,
self.skills.control,
self.skills.heading,
self.skills.shooting,
self.skills.accuracy)
def calculate_rating(self):
self.rating = 0
if self.position.pos == player_position_type.Goalkeeper:
self.rating += self.skills.goalkeeping
elif self.position.pos == player_position_type.Defender:
self.rating += self.skills.tackling * 2
self.rating += self.skills.passing
self.rating += self.skills.control
self.rating = self.rating / 4
elif self.position.pos == player_position_type.Midfielder:
self.rating += self.skills.passing * 2
self.rating += self.skills.control
self.rating += self.skills.stamina
self.rating += self.skills.tackling
self.rating = self.rating / 5
elif self.position.pos == player_position_type.Forward:
self.rating += self.skills.shooting * 2
self.rating += self.skills.accuracy * 2
self.rating += self.skills.speed
self.rating = self.rating / 5
def get_rating(self):
if self.rating < 0:
self.calculate_rating()
# print "Rating for player %s: %d" % (self.name, self.rating)
return self.rating
def to_xml(self):
playernode = etree.Element("player", id = str(self.id))
personalnode = etree.SubElement(playernode, "personal", name = self.name)
appearancenode = etree.SubElement(personalnode, "appearance", value = str(self.appearance))
nationalitynode = etree.SubElement(personalnode, "nationality", value = self.nationality)
personalitynode = self.personality.to_xml()
playernode.append(personalitynode)
skillsnode = self.skills.to_xml()
playernode.append(skillsnode)
positionnode = self.position.to_xml()
playernode.append(positionnode)
return playernode
class Coach(Primitives.Human):
def __init__(self, name):
Primitives.Human.__init__(self, name)
class player_factory:
def __init__(self):
self.pidcounter = 1
def new_player(self, name):
p = Player(self.pidcounter, name)
self.pidcounter += 1
return p
class Kit:
def __init__(self):
self.jersey_type = 0
self.jersey_image = ""
self.jersey_colors = []
self.shorts_color = Primitives.Color()
self.socks_color = Primitives.Color()
def __str__(self):
return '%s; %s; %s' % (self.jersey_colors[0], self.shorts_color, self.socks_color)
def to_xml(self):
kitnode = etree.Element("kit")
jerseynode = etree.SubElement(kitnode, "jersey", type = str(self.jersey_type))
for jcolor in self.jersey_colors:
jerseynode.append(jcolor.to_xml())
shortsnode = etree.SubElement(kitnode, "shorts")
shortsnode.append(self.shorts_color.to_xml())
socksnode = etree.SubElement(kitnode, "socks")
socksnode.append(self.socks_color.to_xml())
return kitnode
class Club:
def __init__(self, name):
self.name = name
self.kits = []
self.contracts = []
self.players = {}
self.org_name = ""
self.stadium = ""
self.rating = -1
self.player_ratings = []
def __str__(self):
retval = ""
retval += "%s - %s - %s\n" % (self.name, self.org_name, self.stadium)
retval += "%-30s %-5s %-5s %-5s %-5s %-5s %-5s %-5s %-5s %-5s %-5s\n" % ("Name",
"Stamn", "Speed", "Dextr", "Goalk", "Tackl", "Passn", "Contr",
"Headn", "Shoot", "Accur")
for p in self.players.values():
retval += "%s\n" % str(p)
# retval += self.formation.__str__()
return retval
def get_players(self, dbase):
plsdb = dbase.players
for contract in self.contracts:
self.players[contract] = plsdb[contract]
self.setup_formation(dbase.pitch_tactics)
def setup_formation(self, pitch_tactics):
"""Setups the formation based on given players.
This simply takes the best players the club has and assigns them
appropriately. A better way (TODO) would be to also consider the
areas on the pitch where players are needed, even if the best players
aren't available on those areas.
"""
pltacs = []
if not self.player_ratings:
self.setup_player_ratings()
gk = []
dfc = []
dfw = []
mdc = []
mdw = []
fw = []
subs = []
for v, p in self.player_ratings:
tot_pls = len(dfc) + len(mdc) + len(fw)
if len(gk) < 1 and p.position.pos == player_position_type.Goalkeeper:
gk.append(p)
elif tot_pls >= 10:
subs.append(p)
elif len(dfc) < 6 and p.position.pos == player_position_type.Defender:
dfc.append(p)
elif len(mdc) < 5 and p.position.pos == player_position_type.Midfielder:
mdc.append(p)
elif len(mdw) < 4 and p.position.pos == player_position_type.Forward:
fw.append(p)
else:
subs.append(p)
# print "tot pls: %d" % tot_pls
self.subs_to_pos(subs, dfc, player_position_type.Defender, 3)
self.subs_to_pos(subs, mdc, player_position_type.Midfielder, 2)
self.subs_to_pos(subs, fw, player_position_type.Forward, 1)
while len(dfc) + len(mdc) + len(fw) > 10:
if len(dfc) > 3: # just take the first position that fits
subs.append(dfc.pop())
elif len(mdc) > 2:
subs.append(mdc.pop())
elif len(fw) > 1:
subs.append(fw.pop())
form_name = "%d-%d-%d" % (len(dfc), len(mdc), len(fw))
gentac = Tactics.GeneralTactic()
pitch_tactic = pitch_tactics[form_name]
self.formation = Tactics.Formation(gentac, pitch_tactic)
for player in gk:
self.formation.lineup.add_player(player.id, "Goalkeeper")
# TODO: remove random lineup creation
for (tacname, tactic), player in zip(pitch_tactic.player_tactics, dfc + mdc + fw):
self.formation.lineup.add_player(player.id, tacname)
# self.pos_to_wing(dfc, dfw)
# self.pos_to_wing(mdc, mdw)
# self.formation.setup(gk, dfc, dfw, mdc, mdw, fw, subs)
def pos_to_wing(self, pos, wings):
if len(pos) < 3:
return
for p in pos:
if p.position.winger:
wings.append(p)
pos.remove(p)
if len(wings) >= 2:
return
while len(wings) < 2 and len(pos) > 0:
wings.append(pos.pop(0))
def subs_to_pos(self, subs, pos, postype, min):
if len(pos) < min:
for p in subs:
if p.position.pos == postype:
pos.append(p)
subs.remove(p)
if len(pos) == min:
break
if len(pos) < min:
raise ValueError("%s: Not enough players for position %d!" %
(self.name, postype))
def setup_player_ratings(self):
self.player_ratings = []
for p in self.players.values():
val = p.get_rating()
self.player_ratings.append((val, p))
self.player_ratings.sort()
self.player_ratings.reverse()
def calculate_rating(self):
if not self.player_ratings:
self.setup_player_ratings()
self.rating = 0
chosen = self.player_ratings[:12]
for v, p in chosen:
self.rating += v
def get_rating(self):
if self.rating < 0:
self.calculate_rating()
# print "Rating for club %s: %d" % (self.name, self.rating)
return self.rating
def to_xml(self):
root = etree.Element("club", name = self.name)
coachnode = etree.SubElement(root, "coach", name = self.coach.name)
kitsnode = etree.SubElement(root, "kits")
for kit in self.kits:
kitnode = kit.to_xml()
kitsnode.append(kitnode)
countrynode = etree.SubElement(root, "country", name = self.org_name)
stadiumnode = etree.SubElement(root, "stadium", name = self.stadium)
contractsnode = etree.SubElement(root, "contracts")
for contract in self.contracts:
contractnode = etree.SubElement(contractsnode, "contract", player = str(contract))
return root
class Pitch:
def __init__(self, name, friction, length, width, size_length, size_width, pattern = [], state = []):
self.name = name
self.size_length = size_length
self.size_width = size_width
self.length = length
self.width = width
self.pattern = pattern
self.state = state
def to_xml(self):
root = etree.Element("pitch", name = self.name)
etree.SubElement(root, "area", length = str(self.length), width = str(self.width))
etree.SubElement(root, "size", length = str(self.size_length), width = str(self.size_width))
patnode = etree.SubElement(root, "patterns")
for pattern in self.pattern:
etree.SubElement(patnode, "pattern", keyword = pattern)
statenode = etree.SubElement(root, "states")
for state in self.state:
etree.SubElement(statenode, "state", keyword = state)
return root
def default_pitch():
p = Pitch("grass01", 0.95, 100, 70, 110, 80)
return p
class Stadium:
def __init__(self, name, capacity = 0):
self.name = name
self.capacity = capacity
self.pitch = default_pitch()
def to_xml(self):
root = etree.Element("stadium", name = self.name, capacity = str(self.capacity), pitch = self.pitch.name)
return root
class Region:
def __init__(self, name):
self.name = name
self.subregions = {}
self.stadiums = {}
class Country:
def __init__(self, name):
self.name = name
self.tournaments = {}
def get_stages(self):
ret = []
for l in self.leaguesystem.levels:
for b in l.branches:
for s in b.stages:
ret.append(s)
return ret
class Branch:
def __init__(self, stages, prs, rls):
self.stages = stages
self.promotions = prs
self.relegations = rls
def stage_number_to_stage_name(s, max_stages):
if s < 1 or max_stages < 1:
return ""
if s == 1:
return "Final"
elif s == 2:
return "Semifinals"
elif s == 3:
return "Quarterfinals"
else:
if s == max_stages:
return "First round"
elif s == max_stages - 1:
return "Second round"
elif s == max_stages - 2:
return "Third round"
elif s == max_stages - 3:
return "Fourth round"
elif s == max_stages - 4:
return "Fifth round"
elif s == max_stages - 5:
return "Sixth round"
elif s == max_stages - 6:
return "Seventh round"
elif s == max_stages - 7:
return "Eighth round"
return ""
class Exchange:
def __init__(self, highest = 0, num = 0, tournament = "", stage = ""):
self.highest = highest
self.num = num
self.tournament = tournament
self.stage = stage
def __str__(self):
return "%s %s %d" % (self.tournament, self.stage, self.num)
class Trophy:
def __init__(self, name):
self.name = name
class Level:
def __init__(self):
self.branches = []
self.promotions = []
self.relegations = []
class Leaguesystem:
def __init__(self, name):
self.name = name
def get_higher_level(self, stagename):
for i in range(len(self.levels)):
for b in self.levels[i].branches:
for j in range(len(b.stages)):
if stagename == b.stages[i].name:
if i == 0:
raise ValueError("No higher level")
return self.levels[i - 1]
raise ValueError("Stage not found")
def get_lower_level(self, stagename):
for i in range(len(self.levels)):
for b in self.levels[i].branches:
for j in range(len(b.stages)):
if stagename == b.stages[i].name:
if i == len(self.levels) - 1:
raise ValueError("No lower levels")
return self.levels[i + 1]
raise ValueError("Stage not found")
class Preset:
def __init__(self, name):
self.name = name
class DB:
def __init__(self):
self.clubs = {}
self.players = {}
self.countries = {}
self.tournaments = {}
self.stadiums = {}
self.formations = {}
self.pitches = {}
self.pitch_tactics = {}
self.clubs["unknown"] = Club("unknown")
if __name__ == '__main__':
pf = player_factory()
p1 = pf.new_player("John", "Smith")
p2 = pf.new_player("Eric", "Doe")
print p1
print p2
|
from osv import fields,osv
from lxml import etree
from tools import graph
from tools.safe_eval import safe_eval as eval
import tools
from tools.view_validation import valid_view
import os
import logging
_logger = logging.getLogger(__name__)
class view_custom(osv.osv):
_name = 'ir.ui.view.custom'
_order = 'create_date desc' # search(limit=1) should return the last customization
_columns = {
'ref_id': fields.many2one('ir.ui.view', 'Original View', select=True, required=True, ondelete='cascade'),
'user_id': fields.many2one('res.users', 'User', select=True, required=True, ondelete='cascade'),
'arch': fields.text('View Architecture', required=True),
}
def _auto_init(self, cr, context=None):
super(view_custom, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_ui_view_custom_user_id_ref_id\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_ui_view_custom_user_id_ref_id ON ir_ui_view_custom (user_id, ref_id)')
class view(osv.osv):
_name = 'ir.ui.view'
def _type_field(self, cr, uid, ids, name, args, context=None):
result = {}
for record in self.browse(cr, uid, ids, context):
# Get the type from the inherited view if any.
if record.inherit_id:
result[record.id] = record.inherit_id.type
else:
result[record.id] = etree.fromstring(record.arch.encode('utf8')).tag
return result
_columns = {
'name': fields.char('View Name', required=True),
'model': fields.char('Object', size=64, required=True, select=True),
'priority': fields.integer('Sequence', required=True),
'type': fields.function(_type_field, type='selection', selection=[
('tree','Tree'),
('form','Form'),
('mdx','mdx'),
('graph', 'Graph'),
('calendar', 'Calendar'),
('diagram','Diagram'),
('gantt', 'Gantt'),
('kanban', 'Kanban'),
('search','Search')], string='View Type', required=True, select=True, store=True),
'arch': fields.text('View Architecture', required=True),
'inherit_id': fields.many2one('ir.ui.view', 'Inherited View', ondelete='cascade', select=True),
'field_parent': fields.char('Child Field',size=64),
'xml_id': fields.function(osv.osv.get_xml_id, type='char', size=128, string="External ID",
help="ID of the view defined in xml file"),
'groups_id': fields.many2many('res.groups', 'ir_ui_view_group_rel', 'view_id', 'group_id',
string='Groups', help="If this field is empty, the view applies to all users. Otherwise, the view applies to the users of those groups only."),
}
_defaults = {
'arch': '<?xml version="1.0"?>\n<tree string="My view">\n\t<field name="name"/>\n</tree>',
'priority': 16
}
_order = "priority,name"
# Holds the RNG schema
_relaxng_validator = None
def create(self, cr, uid, values, context=None):
if 'type' in values:
_logger.warning("Setting the `type` field is deprecated in the `ir.ui.view` model.")
if not values.get('name'):
if values.get('inherit_id'):
inferred_type = self.browse(cr, uid, values['inherit_id'], context).type
else:
inferred_type = etree.fromstring(values['arch'].encode('utf8')).tag
values['name'] = "%s %s" % (values['model'], inferred_type)
return super(osv.osv, self).create(cr, uid, values, context)
def _relaxng(self):
if not self._relaxng_validator:
frng = tools.file_open(os.path.join('base','rng','view.rng'))
try:
relaxng_doc = etree.parse(frng)
self._relaxng_validator = etree.RelaxNG(relaxng_doc)
except Exception:
_logger.exception('Failed to load RelaxNG XML schema for views validation')
finally:
frng.close()
return self._relaxng_validator
def _check_render_view(self, cr, uid, view, context=None):
"""Verify that the given view's hierarchy is valid for rendering, along with all the changes applied by
its inherited views, by rendering it using ``fields_view_get()``.
@param browse_record view: view to validate
@return: the rendered definition (arch) of the view, always utf-8 bytestring (legacy convention)
if no error occurred, else False.
"""
try:
fvg = self.pool.get(view.model).fields_view_get(cr, uid, view_id=view.id, view_type=view.type, context=context)
return fvg['arch']
except:
_logger.exception("Can't render view %s for model: %s", view.xml_id, view.model)
return False
def _check_xml(self, cr, uid, ids, context=None):
for view in self.browse(cr, uid, ids, context):
# Sanity check: the view should not break anything upon rendering!
view_arch_utf8 = self._check_render_view(cr, uid, view, context=context)
# always utf-8 bytestring - legacy convention
if not view_arch_utf8: return False
# RNG-based validation is not possible anymore with 7.0 forms
# TODO 7.0: provide alternative assertion-based validation of view_arch_utf8
view_docs = [etree.fromstring(view_arch_utf8)]
if view_docs[0].tag == 'data':
# A <data> element is a wrapper for multiple root nodes
view_docs = view_docs[0]
validator = self._relaxng()
for view_arch in view_docs:
if (view_arch.get('version') < '7.0') and validator and not validator.validate(view_arch):
for error in validator.error_log:
_logger.error(tools.ustr(error))
return False
if not valid_view(view_arch):
return False
return True
_constraints = [
(_check_xml, 'Invalid XML for View Architecture!', ['arch'])
]
def _auto_init(self, cr, context=None):
super(view, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_ui_view_model_type_inherit_id\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_ui_view_model_type_inherit_id ON ir_ui_view (model, inherit_id)')
def get_inheriting_views_arch(self, cr, uid, view_id, model, context=None):
"""Retrieves the architecture of views that inherit from the given view, from the sets of
views that should currently be used in the system. During the module upgrade phase it
may happen that a view is present in the database but the fields it relies on are not
fully loaded yet. This method only considers views that belong to modules whose code
is already loaded. Custom views defined directly in the database are loaded only
after the module initialization phase is completely finished.
:param int view_id: id of the view whose inheriting views should be retrieved
:param str model: model identifier of the view's related model (for double-checking)
:rtype: list of tuples
:return: [(view_arch,view_id), ...]
"""
user_groups = frozenset(self.pool.get('res.users').browse(cr, 1, uid, context).groups_id)
if self.pool._init:
# Module init currently in progress, only consider views from modules whose code was already loaded
query = """SELECT v.id FROM ir_ui_view v LEFT JOIN ir_model_data md ON (md.model = 'ir.ui.view' AND md.res_id = v.id)
WHERE v.inherit_id=%s AND v.model=%s AND md.module in %s
ORDER BY priority"""
query_params = (view_id, model, tuple(self.pool._init_modules))
else:
# Modules fully loaded, consider all views
query = """SELECT v.id FROM ir_ui_view v
WHERE v.inherit_id=%s AND v.model=%s
ORDER BY priority"""
query_params = (view_id, model)
cr.execute(query, query_params)
view_ids = [v[0] for v in cr.fetchall()]
# filter views based on user groups
return [(view.arch, view.id)
for view in self.browse(cr, 1, view_ids, context)
if not (view.groups_id and user_groups.isdisjoint(view.groups_id))]
def write(self, cr, uid, ids, vals, context=None):
if not isinstance(ids, (list, tuple)):
ids = [ids]
# drop the corresponding view customizations (used for dashboards for example), otherwise
# not all users would see the updated views
custom_view_ids = self.pool.get('ir.ui.view.custom').search(cr, uid, [('ref_id','in',ids)])
if custom_view_ids:
self.pool.get('ir.ui.view.custom').unlink(cr, uid, custom_view_ids)
return super(view, self).write(cr, uid, ids, vals, context)
def graph_get(self, cr, uid, id, model, node_obj, conn_obj, src_node, des_node, label, scale, context=None):
nodes=[]
nodes_name=[]
transitions=[]
start=[]
tres={}
labels={}
no_ancester=[]
blank_nodes = []
_Model_Obj=self.pool.get(model)
_Node_Obj=self.pool.get(node_obj)
_Arrow_Obj=self.pool.get(conn_obj)
for model_key,model_value in _Model_Obj._columns.items():
if model_value._type=='one2many':
if model_value._obj==node_obj:
_Node_Field=model_key
_Model_Field=model_value._fields_id
flag=False
for node_key,node_value in _Node_Obj._columns.items():
if node_value._type=='one2many':
if node_value._obj==conn_obj:
if src_node in _Arrow_Obj._columns and flag:
_Source_Field=node_key
if des_node in _Arrow_Obj._columns and not flag:
_Destination_Field=node_key
flag = True
datas = _Model_Obj.read(cr, uid, id, [],context)
for a in _Node_Obj.read(cr,uid,datas[_Node_Field],[]):
if a[_Source_Field] or a[_Destination_Field]:
nodes_name.append((a['id'],a['name']))
nodes.append(a['id'])
else:
blank_nodes.append({'id': a['id'],'name':a['name']})
if a.has_key('flow_start') and a['flow_start']:
start.append(a['id'])
else:
if not a[_Source_Field]:
no_ancester.append(a['id'])
for t in _Arrow_Obj.read(cr,uid, a[_Destination_Field],[]):
transitions.append((a['id'], t[des_node][0]))
tres[str(t['id'])] = (a['id'],t[des_node][0])
label_string = ""
if label:
for lbl in eval(label):
if t.has_key(tools.ustr(lbl)) and tools.ustr(t[lbl])=='False':
label_string = label_string + ' '
else:
label_string = label_string + " " + tools.ustr(t[lbl])
labels[str(t['id'])] = (a['id'],label_string)
g = graph(nodes, transitions, no_ancester)
g.process(start)
g.scale(*scale)
result = g.result_get()
results = {}
for node in nodes_name:
results[str(node[0])] = result[node[0]]
results[str(node[0])]['name'] = node[1]
return {'nodes': results,
'transitions': tres,
'label' : labels,
'blank_nodes': blank_nodes,
'node_parent_field': _Model_Field,}
class view_sc(osv.osv):
_name = 'ir.ui.view_sc'
_columns = {
'name': fields.char('Shortcut Name', size=64), # Kept for backwards compatibility only - resource name used instead (translatable)
'res_id': fields.integer('Resource Ref.', help="Reference of the target resource, whose model/table depends on the 'Resource Name' field."),
'sequence': fields.integer('Sequence'),
'user_id': fields.many2one('res.users', 'User Ref.', required=True, ondelete='cascade', select=True),
'resource': fields.char('Resource Name', size=64, required=True, select=True)
}
def _auto_init(self, cr, context=None):
super(view_sc, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_ui_view_sc_user_id_resource\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_ui_view_sc_user_id_resource ON ir_ui_view_sc (user_id, resource)')
def get_sc(self, cr, uid, user_id, model='ir.ui.menu', context=None):
ids = self.search(cr, uid, [('user_id','=',user_id),('resource','=',model)], context=context)
results = self.read(cr, uid, ids, ['res_id'], context=context)
name_map = dict(self.pool.get(model).name_get(cr, uid, [x['res_id'] for x in results], context=context))
# Make sure to return only shortcuts pointing to exisintg menu items.
filtered_results = filter(lambda result: result['res_id'] in name_map, results)
for result in filtered_results:
result.update(name=name_map[result['res_id']])
return filtered_results
_order = 'sequence,name'
_defaults = {
'resource': 'ir.ui.menu',
'user_id': lambda obj, cr, uid, context: uid,
}
_sql_constraints = [
('shortcut_unique', 'unique(res_id, resource, user_id)', 'Shortcut for this menu already exists!'),
]
|
import logging
import psycopg2
from odoo.addons.component.core import Component
from odoo.addons.connector.exception import RetryableJobError
_logger = logging.getLogger(__name__)
class RecordLocker(Component):
"""Component allowing to lock record(s) for the current transaction
Example of usage::
self.component('record.locker').lock(self.records)
See the definition of :meth:`~lock` for details.
"""
_name = "base.record.locker"
_inherit = ["base.connector"]
_usage = "record.locker"
def lock(self, records, seconds=None, ignore_retry=True):
"""Lock the records.
Lock the record so we are sure that only one job is running for this
record(s) if concurrent jobs have to run a job for the same record(s).
When concurrent jobs try to work on the same record(s), the first one
will lock and proceed, the others will fail to acquire it and will be
retried later
(:exc:`~odoo.addons.queue_job.exception.RetryableJobError` is raised).
The lock is using a ``FOR UPDATE NOWAIT`` so any concurrent transaction
trying FOR UPDATE/UPDATE will be rejected until the current transaction
is committed or rollbacked.
A classical use case for this is to prevent concurrent exports.
The following parameters are forwarded to the exception
:exc:`~odoo.addons.queue_job.exception.RetryableJobError`
:param seconds: In case of retry because the lock cannot be acquired,
in how many seconds it must be retried. If not set,
the queue_job configuration is used.
:param ignore_retry: If True, the retry counter of the job will not be
increased.
"""
sql = "SELECT id FROM %s WHERE ID IN %%s FOR UPDATE NOWAIT" % self.model._table
try:
self.env.cr.execute(sql, (tuple(records.ids),), log_exceptions=False)
except psycopg2.OperationalError:
_logger.info(
"A concurrent job is already working on the same "
"record (%s with one id in %s). Job delayed later.",
self.model._name,
tuple(records.ids),
)
raise RetryableJobError(
"A concurrent job is already working on the same record "
"(%s with one id in %s). The job will be retried later."
% (self.model._name, tuple(records.ids)),
seconds=seconds,
ignore_retry=ignore_retry,
)
|
"""
WSGI config for notifications project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "notifications.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
""" BioDrop uLite
"""
from bika.lims import bikaMessageFactory as _
from bika.lims.utils import t
from . import BioDropCSVParser, BioDropImporter
import json
import traceback
title = "BioDrop uLite"
def Import(context, request):
""" Read biodrop analysis results
"""
infile = request.form['filename']
fileformat = request.form['format']
artoapply = request.form['artoapply']
override = request.form['override']
sample = request.form.get('sample', 'requestid')
instrument = request.form.get('instrument', None)
errors = []
logs = []
warns = []
# Load the suitable parser
parser = None
if not hasattr(infile, 'filename'):
errors.append(_("No file selected"))
elif fileformat == 'csv':
analysis = request.form.get('analysis', None)
if analysis:
parser = BioDropCSVParser(infile, analysis)
else:
errors.append(t(_("No analysis selected")))
else:
errors.append(t(_("Unrecognized file format ${fileformat}",
mapping={"fileformat": fileformat})))
if parser:
# Load the importer
status = ['sample_received', 'attachment_due', 'to_be_verified']
if artoapply == 'received':
status = ['sample_received']
elif artoapply == 'received_tobeverified':
status = ['sample_received', 'attachment_due', 'to_be_verified']
over = [False, False]
if override == 'nooverride':
over = [False, False]
elif override == 'override':
over = [True, False]
elif override == 'overrideempty':
over = [True, True]
sam = ['getRequestID', 'getSampleID', 'getClientSampleID']
if sample == 'requestid':
sam = ['getRequestID']
if sample == 'sampleid':
sam = ['getSampleID']
elif sample == 'clientsid':
sam = ['getClientSampleID']
elif sample == 'sample_clientsid':
sam = ['getSampleID', 'getClientSampleID']
importer = BioDropImporter(parser=parser,
context=context,
idsearchcriteria=sam,
allowed_ar_states=status,
allowed_analysis_states=None,
override=over,
instrument_uid=instrument)
tbex = ''
try:
importer.process()
except:
tbex = traceback.format_exc()
errors = importer.errors
logs = importer.logs
warns = importer.warns
if tbex:
errors.append(tbex)
results = {'errors': errors, 'log': logs, 'warns': warns}
return json.dumps(results)
|
from yandextank.core.consoleworker import ConsoleTank
from TankTests import TankTestCase, FakeOptions
from yandextank.core import ConfigManager
import tempfile
import unittest
class ConfigManagerTestCase(TankTestCase):
def setUp(self):
tank = ConsoleTank(FakeOptions(), None)
tank.init_logging()
self.foo = ConfigManager()
def tearDown(self):
del self.foo
self.foo = None
def test_load_files(self):
confs = ['config/load_1.conf', 'config/load_2.conf']
self.foo.load_files(confs)
self.foo.flush()
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import, print_function, unicode_literals
from . import gpgme
from . import util
del absolute_import, print_function, unicode_literals
NO_ERROR = None
EOF = None
util.process_constants('GPG_ERR_', globals())
del util
class GpgError(Exception):
"""A GPG Error
This is the base of all errors thrown by this library.
If the error originated from GPGME, then additional information
can be found by looking at 'code' for the error code, and 'source'
for the errors origin. Suitable constants for comparison are
defined in this module. 'code_str' and 'source_str' are
human-readable versions of the former two properties.
If 'context' is not None, then it contains a human-readable hint
as to where the error originated from.
If 'results' is not None, it is a tuple containing results of the
operation that failed. The tuples elements are the results of the
function that raised the error. Some operations return results
even though they signal an error. Of course this information must
be taken with a grain of salt. But often, this information is
useful for diagnostic uses or to give the user feedback. Since
the normal control flow is disrupted by the exception, the callee
can no longer return results, hence we attach them to the
exception objects.
"""
def __init__(self, error=None, context=None, results=None):
self.error = error
self.context = context
self.results = results
@property
def code(self):
if self.error is None:
return None
return gpgme.gpgme_err_code(self.error)
@property
def code_str(self):
if self.error is None:
return None
return gpgme.gpgme_strerror(self.error)
@property
def source(self):
if self.error is None:
return None
return gpgme.gpgme_err_source(self.error)
@property
def source_str(self):
if self.error is None:
return None
return gpgme.gpgme_strsource(self.error)
def __str__(self):
msgs = []
if self.context is not None:
msgs.append(self.context)
if self.error is not None:
msgs.append(self.source_str)
msgs.append(self.code_str)
return ': '.join(msgs)
class GPGMEError(GpgError):
'''Generic error
This is a generic error that wraps the underlying libraries native
error type. It is thrown when the low-level API is invoked and
returns an error. This is the error that was used in PyME.
'''
@classmethod
def fromSyserror(cls):
return cls(gpgme.gpgme_err_code_from_syserror())
@property
def message(self):
return self.context
def getstring(self):
return str(self)
def getcode(self):
return self.code
def getsource(self):
return self.source
def errorcheck(retval, extradata=None):
if retval:
raise GPGMEError(retval, extradata)
class KeyNotFound(GPGMEError, KeyError):
"""Raised if a key was not found
GPGME indicates this condition with EOF, which is not very
idiomatic. We raise this error that is both a GPGMEError
indicating EOF, and a KeyError.
"""
def __init__(self, keystr):
self.keystr = keystr
GPGMEError.__init__(self, EOF)
def __str__(self):
return self.keystr
class EncryptionError(GpgError):
pass
class InvalidRecipients(EncryptionError):
def __init__(self, recipients, **kwargs):
EncryptionError.__init__(self, **kwargs)
self.recipients = recipients
def __str__(self):
return ", ".join("{}: {}".format(r.fpr, gpgme.gpgme_strerror(r.reason))
for r in self.recipients)
class DecryptionError(GpgError):
pass
class UnsupportedAlgorithm(DecryptionError):
def __init__(self, algorithm, **kwargs):
DecryptionError.__init__(self, **kwargs)
self.algorithm = algorithm
def __str__(self):
return self.algorithm
class SigningError(GpgError):
pass
class InvalidSigners(SigningError):
def __init__(self, signers, **kwargs):
SigningError.__init__(self, **kwargs)
self.signers = signers
def __str__(self):
return ", ".join("{}: {}".format(s.fpr, gpgme.gpgme_strerror(s.reason))
for s in self.signers)
class VerificationError(GpgError):
def __init__(self, result, **kwargs):
GpgError.__init__(self, **kwargs)
self.result = result
class BadSignatures(VerificationError):
def __str__(self):
return ", ".join("{}: {}".format(s.fpr, gpgme.gpgme_strerror(s.status))
for s in self.result.signatures
if s.status != NO_ERROR)
class MissingSignatures(VerificationError):
def __init__(self, result, missing, **kwargs):
VerificationError.__init__(self, result, **kwargs)
self.missing = missing
def __str__(self):
return ", ".join(k.subkeys[0].fpr for k in self.missing)
|
import pytest
from unittest.mock import MagicMock
from yandextank.plugins.ShellExec import Plugin
def test_plugin_execute():
plugin = Plugin(MagicMock(), {}, 'shellexec')
assert plugin.execute('echo foo') == 0
def test_plugin_execute_raises():
plugin = Plugin(MagicMock(), {}, 'shellexec')
with pytest.raises(RuntimeError) as error:
plugin.execute('echo "foo')
assert 'Subprocess returned 2' in error.message
|
""" myhdl's distribution and installation script. """
from __future__ import print_function
import ast
import fnmatch
import re
import os
import sys
from collections import defaultdict
if sys.version_info < (2, 6) or (3, 0) <= sys.version_info < (3, 4):
raise RuntimeError("Python version 2.6, 2.7 or >= 3.4 required.")
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('myhdl/_version.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
data_root = 'share/myhdl'
cosim_data = defaultdict(list)
for base, dir, files in os.walk('cosimulation'):
for pat in ('*.c', 'Makefile*', '*.py', '*.v', '*.txt'):
good = fnmatch.filter(files, pat)
if good:
cosim_data[base].extend(os.path.join(base, f) for f in good)
setup(
name="myhdl-numeric",
version=version,
description="MyHDL including fixed point functionality",
long_description="See home page.",
author="Jose M. Gomez",
author_email="jm.gomez@ub.edu",
url="https://github.com/jmgc/myhdl-numeric",
packages=['myhdl', 'myhdl.conversion', 'myhdl.numeric'],
data_files=[(os.path.join(data_root, k), v) for k, v in cosim_data.items()],
license="LGPL",
platforms='any',
keywords="HDL ASIC FPGA hardware design",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)',
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
]
)
|
import os
import sys
import subprocess
if __name__ == "__main__":
for fn_src, fn_dst in [
("acpi-phat.builder.xml", "acpi-phat.bin"),
("bcm57xx.builder.xml", "bcm57xx.bin"),
("ccgx.builder.xml", "ccgx.cyacd"),
("ccgx-dmc.builder.xml", "ccgx-dmc.bin"),
("cfu-offer.builder.xml", "cfu.offer"),
("cfu-payload.builder.xml", "cfu.payload"),
("cros-ec.builder.xml", "cros-ec.bin"),
("dfuse.builder.xml", "dfuse.dfu"),
("ebitdo.builder.xml", "ebitdo.dat"),
("elanfp.builder.xml", "elanfp.bin"),
("efi-firmware-file.builder.xml", "efi-file.bin"),
("efi-firmware-filesystem.builder.xml", "efi-filesystem.bin"),
("efi-firmware-volume.builder.xml", "efi-volume.bin"),
("elantp.builder.xml", "elantp.bin"),
("fmap.builder.xml", "fmap.bin"),
("fmap-offset.builder.xml", "fmap-offset.bin"),
("ifd-bios.builder.xml", "ifd-bios.bin"),
("ifd.builder.xml", "ifd.bin"),
("ifd-no-bios.builder.xml", "ifd-no-bios.bin"),
("ihex.builder.xml", "ihex.hex"),
("pixart.builder.xml", "pixart.bin"),
("redfish-smbios.builder.xml", "redfish-smbios.bin"),
("rmi-0x.builder.xml", "synaptics-rmi-0x.img"),
("rmi-10.builder.xml", "synaptics-rmi-10.img"),
("solokey.builder.xml", "solokey.json"),
("srec-addr32.builder.xml", "srec-addr32.srec"),
("srec.builder.xml", "srec.srec"),
("synaprom.builder.xml", "synaprom.bin"),
("synaptics-cape.builder.xml", "synaptics-cape.fw"),
("synaptics-mst.builder.xml", "synaptics-mst.dat"),
("wacom.builder.xml", "wacom.wac"),
]:
if not os.path.exists(fn_src):
print("WARNING: cannot find {}".format(fn_src))
continue
print("INFO: converting {} into {}".format(fn_src, fn_dst))
try:
argv = [
"sudo",
"../../build/src/fwupdtool",
"firmware-build",
fn_src,
os.path.join("firmware", fn_dst),
]
subprocess.run(argv, check=True)
except subprocess.CalledProcessError as e:
print("tried to run: `{}` and got {}".format(" ".join(argv), str(e)))
sys.exit(1)
|
from spack import *
class PyPsycopg2(PythonPackage):
"""Python interface to PostgreSQL databases"""
homepage = "http://initd.org/psycopg/"
url = "http://initd.org/psycopg/tarballs/PSYCOPG-2-7/psycopg2-2.7.5.tar.gz"
version('2.7.5', sha256='eccf962d41ca46e6326b97c8fe0a6687b58dfc1a5f6540ed071ff1474cea749e')
depends_on('py-setuptools', type='build')
depends_on('postgresql', type=('build', 'run'))
|
import sys
from sets import Set
from threading import RLock
from traceback import print_exc
from Tribler.Core.simpledefs import *
DEBUG = False
class RateManager:
def __init__(self):
self.lock = RLock()
self.statusmap = {}
self.currenttotal = {}
self.dset = Set()
self.clear_downloadstates()
def add_downloadstate(self,ds):
""" Returns the number of unique states currently stored """
if DEBUG:
print >>sys.stderr,"RateManager: add_downloadstate",`ds.get_download().get_def().get_infohash()`
self.lock.acquire()
try:
d = ds.get_download()
if d not in self.dset:
self.statusmap[ds.get_status()].append(ds)
for dir in [UPLOAD,DOWNLOAD]:
self.currenttotal[dir] += ds.get_current_speed(dir)
self.dset.add(d)
return len(self.dset)
finally:
self.lock.release()
def add_downloadstatelist(self, dslist):
for ds in dslist:
self.add_downloadstate(ds)
def adjust_speeds(self):
""" Adjust speeds for the specified set of downloads and clears the set """
self.lock.acquire()
try:
self.calc_and_set_speed_limits(DOWNLOAD)
self.calc_and_set_speed_limits(UPLOAD)
self.clear_downloadstates()
finally:
self.lock.release()
def clear_downloadstates(self):
self.statusmap[DLSTATUS_ALLOCATING_DISKSPACE] = []
self.statusmap[DLSTATUS_WAITING4HASHCHECK] = []
self.statusmap[DLSTATUS_HASHCHECKING] = []
self.statusmap[DLSTATUS_DOWNLOADING] = []
self.statusmap[DLSTATUS_SEEDING] = []
self.statusmap[DLSTATUS_STOPPED] = []
self.statusmap[DLSTATUS_STOPPED_ON_ERROR] = []
self.statusmap[DLSTATUS_REPEXING] = [] # RePEX: needed to prevent KeyError
for dir in [UPLOAD,DOWNLOAD]:
self.currenttotal[dir] = 0
self.dset.clear()
#
# Internal methods
#
#
# The following methods are all called with the lock held
#
def calc_and_set_speed_limits(self,direct):
""" Override this method to write you own speed management policy. """
pass
class UserDefinedMaxAlwaysOtherwiseEquallyDividedRateManager(RateManager):
""" This class implements a simple rate management policy that:
1. If the API user set a desired speed for a particular download,
the speed limit for this download is set to the desired value.
2. For all torrents for which no desired speeds have been set,
the global limit is equally divided amongst all downloads.
(however small the piece of the pie may be).
3. There are separate global limits for download speed, upload speed
and upload speed when all torrents are seeding.
"""
def __init__(self):
RateManager.__init__(self)
self.global_max_speed = {}
self.global_max_speed[UPLOAD] = 0.0
self.global_max_speed[DOWNLOAD] = 0.0
self.global_max_seedupload_speed = 0.0
def set_global_max_speed(self,direct,speed):
self.lock.acquire()
self.global_max_speed[direct] = speed
self.lock.release()
def set_global_max_seedupload_speed(self,speed):
self.lock.acquire()
self.global_max_seedupload_speed = speed
self.lock.release()
def calc_and_set_speed_limits(self, dir = UPLOAD):
if DEBUG:
print >>sys.stderr,"RateManager: calc_and_set_speed_limits",dir
if dir == UPLOAD:
workingset = self.statusmap[DLSTATUS_DOWNLOADING]+self.statusmap[DLSTATUS_SEEDING]
else:
workingset = self.statusmap[DLSTATUS_DOWNLOADING]
if DEBUG:
print >>sys.stderr,"RateManager: calc_and_set_speed_limits: len workingset",len(workingset)
# Limit working set to active torrents with connections:
newws = []
for ds in workingset:
if ds.get_num_peers() > 0:
newws.append(ds)
workingset = newws
if DEBUG:
print >>sys.stderr,"RateManager: calc_and_set_speed_limits: len active workingset",len(workingset)
# No active file, not need to calculate
if not workingset:
return
globalmaxspeed = self.get_global_max_speed(dir)
# See if global speed settings are set to unlimited
if globalmaxspeed == 0:
# Unlimited speed
for ds in workingset:
d = ds.get_download()
d.set_max_speed(dir,d.get_max_desired_speed(dir))
return
if DEBUG:
print >>sys.stderr,"RateManager: calc_and_set_speed_limits: globalmaxspeed is",globalmaxspeed,dir
# User set priority is always granted, ignoring global limit
todoset = []
for ds in workingset:
d = ds.get_download()
maxdesiredspeed = d.get_max_desired_speed(dir)
if maxdesiredspeed > 0.0:
d.set_max_speed(dir,maxdesiredspeed)
else:
todoset.append(ds)
if len(todoset) > 0:
# Rest divides globalmaxspeed equally
localmaxspeed = globalmaxspeed / float(len(todoset))
# if too small than user's problem
if DEBUG:
print >>sys.stderr,"RateManager: calc_and_set_speed_limits: localmaxspeed is",localmaxspeed,dir
for ds in todoset:
d = ds.get_download()
d.set_max_speed(dir,localmaxspeed)
def get_global_max_speed(self, dir = UPLOAD):
if dir == UPLOAD and len(self.statusmap[DLSTATUS_DOWNLOADING]) == 0 and len(self.statusmap[DLSTATUS_SEEDING]) > 0:
# Static overall maximum up speed when seeding
return self.global_max_seedupload_speed
else:
return self.global_max_speed[dir]
class UserDefinedMaxAlwaysOtherwiseDividedOnDemandRateManager(UserDefinedMaxAlwaysOtherwiseEquallyDividedRateManager):
""" This class implements a simple rate management policy that:
1. If the API user set a desired speed for a particular download,
the speed limit for this download is set to the desired value.
2. For all torrents for which no desired speeds have been set,
the global limit is divided on demand amongst all downloads.
3. There are separate global limits for download speed, upload speed
and upload speed when all torrents are seeding.
TODO: if vod: give all of global limit? Do this at higher level: stop
all dls when going to VOD
"""
def __init__(self):
UserDefinedMaxAlwaysOtherwiseEquallyDividedRateManager.__init__(self)
self.ROOM = 5.0 # the amount of room in speed underutilizing downloads get
def calc_and_set_speed_limits(self, dir = UPLOAD):
if DEBUG:
print >>sys.stderr,"RateManager: calc_and_set_speed_limits",dir
if dir == UPLOAD:
workingset = self.statusmap[DLSTATUS_DOWNLOADING]+self.statusmap[DLSTATUS_SEEDING]
else:
workingset = self.statusmap[DLSTATUS_DOWNLOADING]
if DEBUG:
print >>sys.stderr,"RateManager: calc_and_set_speed_limits: len workingset",len(workingset)
# Limit working set to active torrents with connections:
newws = []
for ds in workingset:
if ds.get_num_peers() > 0:
newws.append(ds)
workingset = newws
if DEBUG:
print >>sys.stderr,"RateManager: calc_and_set_speed_limits: len new workingset",len(workingset)
for ds in workingset:
d = ds.get_download()
print >>sys.stderr,"RateManager: calc_and_set_speed_limits: working is",d.get_def().get_name()
# No active file, not need to calculate
if not workingset:
return
globalmaxspeed = self.get_global_max_speed(dir)
# See if global speed settings are set to unlimited
if globalmaxspeed == 0:
# Unlimited speed
for ds in workingset:
d = ds.get_download()
d.set_max_speed(dir,d.get_max_desired_speed(dir))
return
if DEBUG:
print >>sys.stderr,"RateManager: calc_and_set_speed_limits: globalmaxspeed is",globalmaxspeed,dir
# User set priority is always granted, ignoring global limit
todoset = []
for ds in workingset:
d = ds.get_download()
maxdesiredspeed = d.get_max_desired_speed(dir)
if maxdesiredspeed > 0.0:
d.set_max_speed(dir,maxdesiredspeed)
else:
todoset.append(ds)
if len(todoset) > 0:
# Rest divides globalmaxspeed based on their demand
localmaxspeed = globalmaxspeed / float(len(todoset))
# if too small than user's problem
if DEBUG:
print >>sys.stderr,"RateManager: calc_and_set_speed_limits: localmaxspeed is",localmaxspeed,dir
# See if underutilizers and overutilizers. If not, just divide equally
downloadsatmax = False
downloadsunderutil = False
for ds in todoset:
d = ds.get_download()
currspeed = ds.get_current_speed(dir)
currmaxspeed = d.get_max_speed(dir)
newmaxspeed = currspeed+self.ROOM
if currspeed >= (currmaxspeed-3.0): # dl needs more
downloadsatmax = True
elif newmaxspeed < localmaxspeed: # dl got quota to spare
downloadsunderutil = True
if downloadsatmax and downloadsunderutil:
totalunused = 0.0
todoset2 = []
for ds in todoset:
d = ds.get_download()
currspeed = ds.get_current_speed(dir)
newmaxspeed = currspeed+self.ROOM
if newmaxspeed < localmaxspeed:
# If unterutilizing:
totalunused += (localmaxspeed-newmaxspeed)
# Give current speed + 5.0 KB/s extra so it can grow
print >>sys.stderr,"RateManager: calc_and_set_speed_limits: Underutil set to",newmaxspeed
d.set_max_speed(dir,newmaxspeed)
else:
todoset2.append(ds)
# Divide the unused bandwidth equally amongst others
if len(todoset2) > 0:
pie = float(len(todoset2)) * localmaxspeed + totalunused
piece = pie / float(len(todoset2))
for ds in todoset:
d = ds.get_download()
print >>sys.stderr,"RateManager: calc_and_set_speed_limits: Overutil set to",piece
d.set_max_speed(dir,piece)
else:
# what the f? No overutilizers now?
print >>sys.stderr,"UserDefinedMaxAlwaysOtherwiseDividedOnDemandRateManager: Internal error: No overutilizers anymore?"
else:
# No over and under utilizers, just divide equally
for ds in todoset:
d = ds.get_download()
print >>sys.stderr,"RateManager: calc_and_set_speed_limits: Normal set to",piece
d.set_max_speed(dir,localmaxspeed)
class UserDefinedMaxAlwaysOtherwiseDividedOverActiveSwarmsRateManager(UserDefinedMaxAlwaysOtherwiseEquallyDividedRateManager):
""" This class implements a simple rate management policy that:
1. If the API user set a desired speed for a particular download,
the speed limit for this download is set to the desired value.
2. For all torrents for which no desired speeds have been set,
the global limit is divided amongst all downloads that have peers.
Torrents without user-prefs or peers get a max equal to the global max.
They'll get throttled again to an equal share in the next iteration
after peers connect.
3. There are separate global limits for download speed, upload speed
and upload speed when all torrents are seeding.
"""
def __init__(self):
UserDefinedMaxAlwaysOtherwiseEquallyDividedRateManager.__init__(self)
self.ROOM = 5.0 # the amount of room in speed underutilizing downloads get
def calc_and_set_speed_limits(self, dir = UPLOAD):
if DEBUG:
print >>sys.stderr,"RateManager: calc_and_set_speed_limits",dir
if dir == UPLOAD:
workingset = self.statusmap[DLSTATUS_DOWNLOADING]+self.statusmap[DLSTATUS_SEEDING]
else:
workingset = self.statusmap[DLSTATUS_DOWNLOADING]
if DEBUG:
print >>sys.stderr,"RateManager: set_lim: len workingset",len(workingset)
# Limit working set to active torrents with connections:
newws = []
inactiveset = []
for ds in workingset:
#d = ds.get_download()
#print >>sys.stderr,"RateManager: set_lim: Peers",d.get_def().get_name(),ds.get_num_nonseeds(),"alt",ds.get_num_seeds_peers()
# Arno, 2010-09-16: Don't count any HTTP seeders as leechers.
if ds.get_num_nonseeds() > 0:
newws.append(ds)
else:
inactiveset.append(ds)
workingset = newws
if DEBUG:
print >>sys.stderr,"RateManager: set_lim: len new workingset",len(workingset)
for ds in workingset:
d = ds.get_download()
print >>sys.stderr,"RateManager: set_lim: working is",d.get_def().get_name()
globalmaxspeed = self.get_global_max_speed(dir)
#TEST globalmaxspeed = 1.0
if DEBUG:
print >>sys.stderr,"RateManager: set_lim: globalmaxspeed is",globalmaxspeed,dir
# See if global speed settings are set to unlimited
if globalmaxspeed == 0:
# Unlimited speed
for ds in workingset:
d = ds.get_download()
d.set_max_speed(dir,d.get_max_desired_speed(dir))
for ds in inactiveset:
d = ds.get_download()
d.set_max_speed(dir,d.get_max_desired_speed(dir)) # 0 is default
return
if DEBUG:
print >>sys.stderr,"RateManager: set_lim: globalmaxspeed is",globalmaxspeed,dir
# User set priority is always granted, ignoring global limit
todoset = []
for ds in workingset:
d = ds.get_download()
maxdesiredspeed = d.get_max_desired_speed(dir)
if maxdesiredspeed > 0.0:
d.set_max_speed(dir,maxdesiredspeed)
else:
todoset.append(ds)
if len(todoset) > 0:
# Rest divides globalmaxspeed based on their demand
localmaxspeed = globalmaxspeed / float(len(todoset))
# if too small than user's problem
if DEBUG:
print >>sys.stderr,"RateManager: set_lim: localmaxspeed is",localmaxspeed,dir
for ds in todoset:
d = ds.get_download()
if DEBUG:
print >>sys.stderr,"RateManager: set_lim:",d.get_def().get_name(),"WorkQ",localmaxspeed
d.set_max_speed(dir,localmaxspeed)
# For inactives set limit to user desired, with max of globalmaxspeed
# or to globalmaxspeed. This way the peers have a limit already set
# when the first peers arrive. The height of the limit will be corrected
# here a few seconds later (see BaseApp ratelimiter).
#
for ds in inactiveset:
d = ds.get_download()
desspeed = d.get_max_desired_speed(dir)
if desspeed == 0:
setspeed = globalmaxspeed
else:
setspeed = min(desspeed,globalmaxspeed)
if DEBUG:
print >>sys.stderr,"RateManager: set_lim:",d.get_def().get_name(),"InactQ",setspeed
d.set_max_speed(dir,setspeed)
|
r"""Command-line tool to validate and pretty-print JSON
Usage::
$ echo '{"json":"obj"}' | python -m json.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m json.tool
Expecting property name enclosed in double quotes: line 1 column 3 (char 2)
"""
import argparse
import json
import sys
def main():
prog = 'python -m json.tool'
description = ('A simple command line interface for json module '
'to validate and pretty-print JSON objects.')
parser = argparse.ArgumentParser(prog=prog, description=description)
parser.add_argument('infile', nargs='?', type=argparse.FileType(encoding="utf-8"),
help='a JSON file to be validated or pretty-printed')
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w', encoding="utf-8"),
help='write the output of infile to outfile')
parser.add_argument('--sort-keys', action='store_true', default=False,
help='sort the output of dictionaries alphabetically by key')
options = parser.parse_args()
infile = options.infile or sys.stdin
outfile = options.outfile or sys.stdout
sort_keys = options.sort_keys
with infile:
try:
obj = json.load(infile)
except ValueError as e:
raise SystemExit(e)
with outfile:
json.dump(obj, outfile, sort_keys=sort_keys, indent=4)
outfile.write('\n')
if __name__ == '__main__':
try:
main()
except BrokenPipeError as exc:
sys.exit(exc.errno)
|
from abc import abstractmethod
import string
import traceback
from types import GeneratorType
from contracts import describe_type, describe_value, contract
from contracts.utils import indent
from procgraph import Generator
from procgraph import ModelExecutionError
__all__ = ['IteratorGenerator']
class IteratorGenerator(Generator):
'''
'''
@abstractmethod
@contract(returns=GeneratorType)
def init_iterator(self):
""" Must return an iterator yielding signal, timestamp, value """
pass
def init(self):
self.iterator = self.init_iterator()
if self.iterator is None:
msg = 'must return an iterator, got %s' % describe_value(self.iterator)
raise ValueError(msg)
self._load_next()
def _load_next(self):
try:
try:
res = self.iterator.next()
if not isinstance(res, tuple):
msg = 'Expected tuple (signal, timestamp, value), obtained %s' % describe_type(res)
raise ValueError(msg)
if not len(res) == 3:
raise ValueError('Required len 3 tuple; obtained %d.' % len(res))
signal, timestamp, value = res
except StopIteration:
raise
except Exception as e:
msg = 'Could not call next() on user-given iterator.\n'
msg += ' iterator: %s\n' % str(self.iterator)
msg += ' of type: %s\n' % describe_type(self.iterator)
msg += 'because of this error:\n'
msg += indent(string.strip('%s\n%s' % (e, traceback.format_exc(e))), '| ')
raise ModelExecutionError(msg, self)
if not isinstance(signal, (str, int)):
msg = ('Expected a string or number for the signal, got %s' %
describe_value(signal))
raise ValueError(msg)
if not isinstance(timestamp, float):
msg = ('Expected a number for the timestamp, got %s' %
describe_value(timestamp))
raise ValueError(msg)
self.next_signal = signal
self.next_timestamp = timestamp
self.next_value = value
self.has_next = True
except StopIteration:
self.has_next = False
def next_data_status(self):
if self.has_next:
return (True, self.next_timestamp)
else:
return (False, None)
def update(self):
if not self.has_next:
return # XXX: error here?
self.set_output(self.next_signal,
value=self.next_value, timestamp=self.next_timestamp)
self._load_next()
|
from __future__ import division # support for python2
from threading import Thread, Condition
import concurrent.futures
import logging
try:
from urllib.parse import urlparse
except ImportError: # support for python2
from urlparse import urlparse
from opcua import ua
from opcua.client.ua_client import UaClient
from opcua.common.xmlimporter import XmlImporter
from opcua.common.xmlexporter import XmlExporter
from opcua.common.node import Node
from opcua.common.manage_nodes import delete_nodes
from opcua.common.subscription import Subscription
from opcua.common import utils
from opcua.common import ua_utils
from opcua.crypto import security_policies
from opcua.common.shortcuts import Shortcuts
from opcua.common.structures import load_type_definitions, load_enums
use_crypto = True
try:
from opcua.crypto import uacrypto
except ImportError:
use_crypto = False
_logger = logging.getLogger(__name__)
class KeepAlive(Thread):
"""
Used by Client to keep the session open.
OPCUA defines timeout both for sessions and secure channel
"""
def __init__(self, client, timeout):
"""
:param session_timeout: Timeout to re-new the session
in milliseconds.
"""
Thread.__init__(self)
_logger = logging.getLogger(__name__)
self.client = client
self._dostop = False
self._cond = Condition()
self.timeout = timeout
# some server support no timeout, but we do not trust them
if self.timeout == 0:
self.timeout = 3600000 # 1 hour
def run(self):
_logger.debug("starting keepalive thread with period of %s milliseconds", self.timeout)
server_state = self.client.get_node(ua.FourByteNodeId(ua.ObjectIds.Server_ServerStatus_State))
while not self._dostop:
with self._cond:
self._cond.wait(self.timeout / 1000)
if self._dostop:
break
_logger.debug("renewing channel")
try:
self.client.open_secure_channel(renew=True)
except concurrent.futures.TimeoutError:
_logger.debug("keepalive failed: timeout on open_secure_channel()")
break
val = server_state.get_value()
_logger.debug("server state is: %s ", val)
_logger.debug("keepalive thread has stopped")
def stop(self):
_logger.debug("stoping keepalive thread")
self._dostop = True
with self._cond:
self._cond.notify_all()
class Client(object):
"""
High level client to connect to an OPC-UA server.
This class makes it easy to connect and browse address space.
It attempts to expose as much functionality as possible
but if you want more flexibility it is possible and advised to
use the UaClient object, available as self.uaclient, which offers
the raw OPC-UA services interface.
"""
if use_crypto is False:
logging.getLogger(__name__).warning("cryptography is not installed, use of crypto disabled")
def __init__(self, url, timeout=4):
"""
:param url: url of the server.
if you are unsure of url, write at least hostname
and port and call get_endpoints
:param timeout:
Each request sent to the server expects an answer within this
time. The timeout is specified in seconds.
Some other client parameters can be changed by setting
attributes on the constructed object:
secure_channel_timeout
Timeout for the secure channel, specified in milliseconds.
session_timeout
Timeout for the session, specified in milliseconds.
See the source code for the exhaustive list.
"""
_logger = logging.getLogger(__name__)
self.server_url = urlparse(url)
# take initial username and password from the url
self._username = self.server_url.username
self._password = self.server_url.password
self.name = "Pure Python Client"
self.description = self.name
self.application_uri = "urn:freeopcua:client"
self.product_uri = "urn:freeopcua.github.io:client"
self.security_policy = ua.SecurityPolicy()
self.secure_channel_id = None
self.secure_channel_timeout = 3600000 # 1 hour
self.session_timeout = 3600000 # 1 hour
self._policy_ids = []
self.uaclient = UaClient(timeout)
self.user_certificate = None
self.user_private_key = None
self._server_nonce = None
self._session_counter = 1
self.keepalive = None
self.nodes = Shortcuts(self.uaclient)
self.max_messagesize = 0 # No limits
self.max_chunkcount = 0 # No limits
def __enter__(self):
self.connect()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.disconnect()
@staticmethod
def find_endpoint(endpoints, security_mode, policy_uri):
"""
Find endpoint with required security mode and policy URI
"""
for ep in endpoints:
if (ep.EndpointUrl.startswith(ua.OPC_TCP_SCHEME) and
ep.SecurityMode == security_mode and
ep.SecurityPolicyUri == policy_uri):
return ep
raise ua.UaError("No matching endpoints: {0}, {1}".format(security_mode, policy_uri))
def set_user(self, username):
"""
Set user name for the connection.
initial user from the URL will be overwritten
"""
self._username = username
def set_password(self, pwd):
"""
Set user password for the connection.
initial password from the URL will be overwritten
"""
self._password = pwd
def set_security_string(self, string):
"""
Set SecureConnection mode. String format:
Policy,Mode,certificate,private_key[,server_private_key]
where Policy is Basic128Rsa15, Basic256 or Basic256Sha256,
Mode is Sign or SignAndEncrypt
certificate, private_key and server_private_key are
paths to .pem or .der files
Call this before connect()
"""
if not string:
return
parts = string.split(',')
if len(parts) < 4:
raise ua.UaError('Wrong format: `{0}`, expected at least 4 comma-separated values'.format(string))
policy_class = getattr(security_policies, 'SecurityPolicy' + parts[0])
mode = getattr(ua.MessageSecurityMode, parts[1])
return self.set_security(policy_class, parts[2], parts[3],
parts[4] if len(parts) >= 5 else None, mode)
def set_security(self, policy, certificate_path, private_key_path,
server_certificate_path=None,
mode=ua.MessageSecurityMode.SignAndEncrypt):
"""
Set SecureConnection mode.
Call this before connect()
"""
if server_certificate_path is None:
# load certificate from server's list of endpoints
endpoints = self.connect_and_get_server_endpoints()
endpoint = Client.find_endpoint(endpoints, mode, policy.URI)
server_cert = uacrypto.x509_from_der(endpoint.ServerCertificate)
else:
server_cert = uacrypto.load_certificate(server_certificate_path)
cert = uacrypto.load_certificate(certificate_path)
pk = uacrypto.load_private_key(private_key_path)
self.security_policy = policy(server_cert, cert, pk, mode)
self.uaclient.set_security(self.security_policy)
def load_client_certificate(self, path):
"""
load our certificate from file, either pem or der
"""
self.user_certificate = uacrypto.load_certificate(path)
def load_private_key(self, path):
"""
Load user private key. This is used for authenticating using certificate
"""
self.user_private_key = uacrypto.load_private_key(path)
def connect_and_get_server_endpoints(self):
"""
Connect, ask server for endpoints, and disconnect
"""
self.connect_socket()
try:
self.send_hello()
self.open_secure_channel()
endpoints = self.get_endpoints()
self.close_secure_channel()
finally:
self.disconnect_socket()
return endpoints
def connect_and_find_servers(self):
"""
Connect, ask server for a list of known servers, and disconnect
"""
self.connect_socket()
try:
self.send_hello()
self.open_secure_channel() # spec says it should not be necessary to open channel
servers = self.find_servers()
self.close_secure_channel()
finally:
self.disconnect_socket()
return servers
def connect_and_find_servers_on_network(self):
"""
Connect, ask server for a list of known servers on network, and disconnect
"""
self.connect_socket()
try:
self.send_hello()
self.open_secure_channel()
servers = self.find_servers_on_network()
self.close_secure_channel()
finally:
self.disconnect_socket()
return servers
def connect(self):
"""
High level method
Connect, create and activate session
"""
self.connect_socket()
try:
self.send_hello()
self.open_secure_channel()
try:
self.create_session()
try:
self.activate_session(username=self._username, password=self._password, certificate=self.user_certificate)
except Exception:
# clean up the session
self.close_session()
raise
except Exception:
# clean up the secure channel
self.close_secure_channel()
raise
except Exception:
self.disconnect_socket() # clean up open socket
raise
def disconnect(self):
"""
High level method
Close session, secure channel and socket
"""
try:
self.close_session()
self.close_secure_channel()
finally:
self.disconnect_socket()
def connect_socket(self):
"""
connect to socket defined in url
"""
self.uaclient.connect_socket(self.server_url.hostname, self.server_url.port)
def disconnect_socket(self):
self.uaclient.disconnect_socket()
def send_hello(self):
"""
Send OPC-UA hello to server
"""
ack = self.uaclient.send_hello(self.server_url.geturl(), self.max_messagesize, self.max_chunkcount)
# TODO: Handle ua.UaError
if isinstance(ack, ua.UaStatusCodeError):
raise ack
def open_secure_channel(self, renew=False):
"""
Open secure channel, if renew is True, renew channel
"""
params = ua.OpenSecureChannelParameters()
params.ClientProtocolVersion = 0
params.RequestType = ua.SecurityTokenRequestType.Issue
if renew:
params.RequestType = ua.SecurityTokenRequestType.Renew
params.SecurityMode = self.security_policy.Mode
params.RequestedLifetime = self.secure_channel_timeout
# length should be equal to the length of key of symmetric encryption
params.ClientNonce = utils.create_nonce(self.security_policy.symmetric_key_size) # this nonce is used to create a symmetric key
result = self.uaclient.open_secure_channel(params)
if self.secure_channel_timeout != result.SecurityToken.RevisedLifetime:
_logger.warning("Requested secure channel timeout to be %dms, got %dms instead",
self.secure_channel_timeout,
result.SecurityToken.RevisedLifetime)
self.secure_channel_timeout = result.SecurityToken.RevisedLifetime
def close_secure_channel(self):
return self.uaclient.close_secure_channel()
def get_endpoints(self):
params = ua.GetEndpointsParameters()
params.EndpointUrl = self.server_url.geturl()
return self.uaclient.get_endpoints(params)
def find_servers(self, uris=None):
"""
send a FindServer request to the server. The answer should be a list of
servers the server knows about
A list of uris can be provided, only server having matching uris will be returned
"""
if uris is None:
uris = []
params = ua.FindServersParameters()
params.EndpointUrl = self.server_url.geturl()
params.ServerUris = uris
return self.uaclient.find_servers(params)
def find_servers_on_network(self):
params = ua.FindServersOnNetworkParameters()
return self.uaclient.find_servers_on_network(params)
def create_session(self):
"""
send a CreateSessionRequest to server with reasonable parameters.
If you want o modify settings look at code of this methods
and make your own
"""
desc = ua.ApplicationDescription()
desc.ApplicationUri = self.application_uri
desc.ProductUri = self.product_uri
desc.ApplicationName = ua.LocalizedText(self.name)
desc.ApplicationType = ua.ApplicationType.Client
params = ua.CreateSessionParameters()
# at least 32 random bytes for server to prove possession of private key (specs part 4, 5.6.2.2)
nonce = utils.create_nonce(32)
params.ClientNonce = nonce
params.ClientCertificate = self.security_policy.client_certificate
params.ClientDescription = desc
params.EndpointUrl = self.server_url.geturl()
params.SessionName = self.description + " Session" + str(self._session_counter)
params.RequestedSessionTimeout = self.session_timeout
params.MaxResponseMessageSize = 0 # means no max size
response = self.uaclient.create_session(params)
if self.security_policy.client_certificate is None:
data = nonce
else:
data = self.security_policy.client_certificate + nonce
self.security_policy.asymmetric_cryptography.verify(data, response.ServerSignature.Signature)
self._server_nonce = response.ServerNonce
if not self.security_policy.server_certificate:
self.security_policy.server_certificate = response.ServerCertificate
elif self.security_policy.server_certificate != response.ServerCertificate:
raise ua.UaError("Server certificate mismatch")
# remember PolicyId's: we will use them in activate_session()
ep = Client.find_endpoint(response.ServerEndpoints, self.security_policy.Mode, self.security_policy.URI)
self._policy_ids = ep.UserIdentityTokens
if self.session_timeout != response.RevisedSessionTimeout:
_logger.warning("Requested session timeout to be %dms, got %dms instead",
self.secure_channel_timeout,
response.RevisedSessionTimeout)
self.session_timeout = response.RevisedSessionTimeout
self.keepalive = KeepAlive(
self, min(self.session_timeout, self.secure_channel_timeout) * 0.7) # 0.7 is from spec
self.keepalive.start()
return response
def server_policy_id(self, token_type, default):
"""
Find PolicyId of server's UserTokenPolicy by token_type.
Return default if there's no matching UserTokenPolicy.
"""
for policy in self._policy_ids:
if policy.TokenType == token_type:
return policy.PolicyId
return default
def server_policy_uri(self, token_type):
"""
Find SecurityPolicyUri of server's UserTokenPolicy by token_type.
If SecurityPolicyUri is empty, use default SecurityPolicyUri
of the endpoint
"""
for policy in self._policy_ids:
if policy.TokenType == token_type:
if policy.SecurityPolicyUri:
return policy.SecurityPolicyUri
else: # empty URI means "use this endpoint's policy URI"
return self.security_policy.URI
return self.security_policy.URI
def activate_session(self, username=None, password=None, certificate=None):
"""
Activate session using either username and password or private_key
"""
params = ua.ActivateSessionParameters()
challenge = b""
if self.security_policy.server_certificate is not None:
challenge += self.security_policy.server_certificate
if self._server_nonce is not None:
challenge += self._server_nonce
if self.security_policy.AsymmetricSignatureURI:
params.ClientSignature.Algorithm = (
self.security_policy.AsymmetricSignatureURI
)
else:
params.ClientSignature.Algorithm = (
security_policies.SecurityPolicyBasic256.AsymmetricSignatureURI
)
params.ClientSignature.Signature = self.security_policy.asymmetric_cryptography.signature(challenge)
params.LocaleIds.append("en")
if not username and not certificate:
self._add_anonymous_auth(params)
elif certificate:
self._add_certificate_auth(params, certificate, challenge)
else:
self._add_user_auth(params, username, password)
return self.uaclient.activate_session(params)
def _add_anonymous_auth(self, params):
params.UserIdentityToken = ua.AnonymousIdentityToken()
params.UserIdentityToken.PolicyId = self.server_policy_id(ua.UserTokenType.Anonymous, "anonymous")
def _add_certificate_auth(self, params, certificate, challenge):
params.UserIdentityToken = ua.X509IdentityToken()
params.UserIdentityToken.CertificateData = uacrypto.der_from_x509(certificate)
# specs part 4, 5.6.3.1: the data to sign is created by appending
# the last serverNonce to the serverCertificate
params.UserTokenSignature = ua.SignatureData()
if certificate.signature_hash_algorithm.name == "sha256":
params.UserIdentityToken.PolicyId = self.server_policy_id(ua.UserTokenType.Certificate, "certificate_basic256sha256")
sig = uacrypto.sign_sha256(self.user_private_key, challenge)
params.UserTokenSignature.Algorithm = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"
params.UserTokenSignature.Signature = sig
else:
params.UserIdentityToken.PolicyId = self.server_policy_id(ua.UserTokenType.Certificate, "certificate_basic256")
sig = uacrypto.sign_sha1(self.user_private_key, challenge)
params.UserTokenSignature.Algorithm = "http://www.w3.org/2000/09/xmldsig#rsa-sha1"
params.UserTokenSignature.Signature = sig
def _add_user_auth(self, params, username, password):
params.UserIdentityToken = ua.UserNameIdentityToken()
params.UserIdentityToken.UserName = username
policy_uri = self.server_policy_uri(ua.UserTokenType.UserName)
if not policy_uri or policy_uri == security_policies.POLICY_NONE_URI:
# see specs part 4, 7.36.3: if the token is NOT encrypted,
# then the password only contains UTF-8 encoded password
# and EncryptionAlgorithm is null
if self._password:
_logger.warning("Sending plain-text password")
params.UserIdentityToken.Password = password.encode('utf8')
params.UserIdentityToken.EncryptionAlgorithm = None
elif self._password:
data, uri = self._encrypt_password(password, policy_uri)
params.UserIdentityToken.Password = data
params.UserIdentityToken.EncryptionAlgorithm = uri
params.UserIdentityToken.PolicyId = self.server_policy_id(ua.UserTokenType.UserName, "username_basic256")
def _encrypt_password(self, password, policy_uri):
pubkey = uacrypto.x509_from_der(self.security_policy.server_certificate).public_key()
# see specs part 4, 7.36.3: if the token is encrypted, password
# shall be converted to UTF-8 and serialized with server nonce
passwd = password.encode("utf8")
if self._server_nonce is not None:
passwd += self._server_nonce
etoken = ua.ua_binary.Primitives.Bytes.pack(passwd)
data, uri = security_policies.encrypt_asymmetric(pubkey, etoken, policy_uri)
return data, uri
def close_session(self):
"""
Close session
"""
if self.keepalive and self.keepalive.is_alive():
self.keepalive.stop()
self.keepalive.join()
return self.uaclient.close_session(True)
def get_root_node(self):
return self.get_node(ua.TwoByteNodeId(ua.ObjectIds.RootFolder))
def get_objects_node(self):
return self.get_node(ua.TwoByteNodeId(ua.ObjectIds.ObjectsFolder))
def get_server_node(self):
return self.get_node(ua.FourByteNodeId(ua.ObjectIds.Server))
def get_node(self, nodeid):
"""
Get node using NodeId object or a string representing a NodeId
"""
return Node(self.uaclient, nodeid)
def create_subscription(self, period, handler):
"""
Create a subscription.
returns a Subscription object which allow
to subscribe to events or data on server
handler argument is a class with data_change and/or event methods.
period argument is either a publishing interval in milliseconds or a
CreateSubscriptionParameters instance. The second option should be used,
if the opcua-server has problems with the default options.
These methods will be called when notfication from server are received.
See example-client.py.
Do not do expensive/slow or network operation from these methods
since they are called directly from receiving thread. This is a design choice,
start another thread if you need to do such a thing.
"""
if isinstance(period, ua.CreateSubscriptionParameters):
return Subscription(self.uaclient, period, handler)
params = ua.CreateSubscriptionParameters()
params.RequestedPublishingInterval = period
params.RequestedLifetimeCount = 10000
params.RequestedMaxKeepAliveCount = 3000
params.MaxNotificationsPerPublish = 10000
params.PublishingEnabled = True
params.Priority = 0
return Subscription(self.uaclient, params, handler)
def reconciliate_subscription(self, subscription):
"""
Reconciliate the server state with the client
"""
node = self.get_node(
ua.FourByteNodeId(ua.ObjectIds.Server)
)
# returns server and client handles
monitored_items = node.call_method(
ua.uatypes.QualifiedName("GetMonitoredItems"),
ua.Variant(subscription.subscription_id, ua.VariantType.UInt32)
)
return subscription.reconciliate(monitored_items)
def get_namespace_array(self):
ns_node = self.get_node(ua.NodeId(ua.ObjectIds.Server_NamespaceArray))
return ns_node.get_value()
def get_namespace_index(self, uri):
uries = self.get_namespace_array()
return uries.index(uri)
def delete_nodes(self, nodes, recursive=False):
return delete_nodes(self.uaclient, nodes, recursive)
def import_xml(self, path=None, xmlstring=None):
"""
Import nodes defined in xml
"""
importer = XmlImporter(self)
return importer.import_xml(path, xmlstring)
def export_xml(self, nodes, path):
"""
Export defined nodes to xml
"""
exp = XmlExporter(self)
exp.build_etree(nodes)
return exp.write_xml(path)
def register_namespace(self, uri):
"""
Register a new namespace. Nodes should in custom namespace, not 0.
This method is mainly implemented for symetry with server
"""
ns_node = self.get_node(ua.NodeId(ua.ObjectIds.Server_NamespaceArray))
uries = ns_node.get_value()
if uri in uries:
return uries.index(uri)
uries.append(uri)
ns_node.set_value(uries)
return len(uries) - 1
def load_type_definitions(self, nodes=None):
"""
Load custom types (custom structures/extension objects) definition from server
Generate Python classes for custom structures/extension objects defined in server
These classes will available in ua module
"""
return load_type_definitions(self, nodes)
def load_enums(self):
"""
generate Python enums for custom enums on server.
This enums will be available in ua module
"""
return load_enums(self)
def register_nodes(self, nodes):
"""
Register nodes for faster read and write access (if supported by server)
Rmw: This call modifies the nodeid of the nodes, the original nodeid is
available as node.basenodeid
"""
nodeids = [node.nodeid for node in nodes]
nodeids = self.uaclient.register_nodes(nodeids)
for node, nodeid in zip(nodes, nodeids):
node.basenodeid = node.nodeid
node.nodeid = nodeid
return nodes
def unregister_nodes(self, nodes):
"""
Unregister nodes
"""
nodeids = [node.nodeid for node in nodes]
self.uaclient.unregister_nodes(nodeids)
for node in nodes:
node.nodeid = node.basenodeid
node.basenodeid = None
def get_values(self, nodes):
"""
Read the value of multiple nodes in one roundtrip.
"""
nodes = [node.nodeid for node in nodes]
results = self.uaclient.get_attributes(nodes, ua.AttributeIds.Value)
return [result.Value.Value for result in results]
def set_values(self, nodes, values):
"""
Write values to multiple nodes in one ua call
"""
nodeids = [node.nodeid for node in nodes]
dvs = [ua_utils.value_to_datavalue(val) for val in values]
results = self.uaclient.set_attributes(nodeids, dvs, ua.AttributeIds.Value)
for result in results:
result.check()
|
"""
This script uses a matrix to store STAGES number of short samples
always renewed by recording a source sound with MatrixRecLoop.
A metronomic random playback choose a sample to play between the
first and RND_LEVEL.
"""
from pyo import *
s = Server(duplex=0).boot()
SIZE = 8192
STAGES = 32
RND_LEVEL = 4 # 1 -> STAGES
GATE = 100
period = SIZE / s.getSamplingRate()
env = CosTable([(0, 0), (300, 1), (1000, 0.4), (8191, 0)])
matrix = NewMatrix(SIZE, STAGES)
src = SfPlayer("../snds/baseballmajeur_m.aif", speed=1, loop=True, mul=0.3)
m_rec = MatrixRecLoop(src, matrix)
metro = Metro(time=period / 2, poly=2).play()
trig = Percent(metro, GATE)
x = TrigLinseg(trig, [(0, 0), (period, 1)])
y = TrigRandInt(trig, max=RND_LEVEL, mul=1.0 / STAGES)
amp = TrigEnv(trig, table=env, dur=period)
out = MatrixPointer(matrix, x, y, amp).out()
s.gui(locals())
|
import argparse
from argparse import RawDescriptionHelpFormatter
import logging
import sys
import re
import mclib
def getOptions():
""" Function to pull in arguments """
description = """ This script takes a VCF 4.2 file pulls out snps/indels and depth into a csv """
parser = argparse.ArgumentParser(description=description, formatter_class=RawDescriptionHelpFormatter)
group1 = parser.add_argument_group(description="Input Files")
group1.add_argument("--vcf", dest="vname", action='store', required=False, help="Name of the uncompressed VCF file v4.2. [stdin]")
group2 = parser.add_argument_group(description="Output Files")
group2.add_argument("-o", dest="oname", action='store', required=False, help="Name of output csv. [stdout]")
group2.add_argument("--log", dest="log", action='store', required=False, help="Name of the LOG file [stderr]")
parser.add_argument("--debug", dest="debug", action='store_true', required=False, help="Enable debug output.")
args = parser.parse_args()
return(args)
def inputOutput(args):
""" If an input/output files were given then use them, else use
stdin/stdout
Arguments:
----------
args (obj): command line arguments.
Returns:
--------
FH = a input file handlder
OUT = a output file handler
"""
if args.vname:
logger.info('Reading file %s' % args.vname)
FH = open(args.vname, 'r')
else:
logger.info('Reading STDIN')
FH = sys.stdin
if args.oname:
logger.info('Writing to %s' % args.vname)
OUT = open(args.oname, 'w')
else:
logger.info('Writing to STDOUT')
OUT = sys.stdout
return FH, OUT
def parseLine(line):
""" Take a VCFv2.4 row and split in up into pieces """
chrom, pos, id, ref, alt, qual, filter, info, format, sample = line.rstrip().split('\t')
# split info into dictionary
infoDict = dict()
for item in info.split(';'):
if item == 'INDEL':
# If it is an indel then set key and value
key = 'INDEL'
value = '1';
else:
# Split up based on '='
key, value = item.split('=')
# If value is a list then split it up. Figure out if
# these are numbers or strings
if ',' in value:
try:
val = [int(x) for x in value.split(',')]
except:
val = value.split(',')
else:
try:
val = int(value)
except:
val = value
infoDict[key] = val
# Sometimes ref and alt have multiple alleles, split them into a list
refList = ref.split(',')
altList = alt.split(',')
# Samtools includes the value <X> for all SNPs and it includes a count in
# the DPR field. I want to remove these.
if '<X>' in altList:
altList.remove('<X>')
infoDict['DPR'].pop()
# Merge ref and alt back together, this time using a | as separator so I
# don't interfer with the csv format
refClean = '|'.join(refList)
altClean = '|'.join(altList)
return chrom, pos, refClean, altClean, infoDict
def main(args):
# Get Input/Output file handlders
FH, OUT = inputOutput(args)
header = ','.join(['chrom', 'pos', 'ref', 'alt', 'totalCount', 'refCount', 'altCount', 'flagIndel']) + '\n'
OUT.write(header)
# Make sure the input vcf file is v4.2
logger.info('Checking VCF version')
line1 = FH.next()
vcfVersion = line1.split('=')[1].rstrip()
if vcfVersion != 'VCFv4.2':
logger.error('This script was written only for vcf version 4.2')
sys.exit(1)
else:
logger.info('Your VCF is version 4.2')
# Iterate over lines and grab usefule info
logger.info('Parsing VCF file')
for line in FH:
# Skip header lines
if line.startswith('#'):
continue
# split the line into its parts
chrom, pos, ref, alt, infoDict = parseLine(line)
refCount = infoDict['DPR'][0]
altCount = '|'.join([str(x) for x in infoDict['DPR'][1:]])
totalCount = sum(infoDict['DPR'])
if 'INDEL' in infoDict:
flagIndel = 1
else:
flagIndel = 0
# Write output as csv
if alt:
myOut = ','.join([str(x) for x in [chrom, pos, ref, alt, totalCount, refCount, altCount, flagIndel]]) + '\n'
OUT.write(myOut)
FH.close()
OUT.close()
if __name__ == '__main__':
# Turn on Logging if option -g was given
args = getOptions()
# Turn on logging
logger = logging.getLogger()
if args.debug:
mclib.logger.setLogger(logger, args.log, 'debug')
else:
mclib.logger.setLogger(logger, args.log)
# Run Main part of the script
main(args)
logger.info("Script complete.")
|
from flask import Flask, request, jsonify, Response, send_from_directory
from flask_restful import Resource, Api
import petl, json, logging, io, sys, os, csv
type_supported = {'csv':petl.io.csv.fromcsv,
'tsv':petl.io.csv.fromtsv,
'txt':petl.io.text.fromtext,
'xml':petl.io.xml.fromxml,
'json':petl.io.json.fromjson,
'xls':petl.io.xls.fromxls,
'xlsx':petl.io.xlsx.fromxlsx,
}
blades = {'pre':list(),
'post':list()}
def create_app():
app = Flask(__name__)
api = Api(app, catch_all_404s=True)
logging.basicConfig(filename='exginsu.log', level=logging.DEBUG)
@app.route('/')
def index():
return jsonify({'status': 200, 'success':True, 'message':'Hello. My name is Ginsu!'})
@app.route('/process/', methods = ['POST'])
def process_file():
# {'url':"", 'type':""}
data = json.loads(request.data.decode('utf-8'))
url = data.get('url', None)
ftype = data.get("type", None)
meta = data.get("meta", None)
#Is url valid?
if ftype not in type_supported.keys():
return jsonify(dict(status=400, message='file type not supported'))
#Grab file from url
table = type_supported[ftype](url)
#Process PETL specific blades here
for b in blades['pre']:
table = b.run(table, meta)
def generate():
#columns = petl.util.base.header(table)
data = petl.convertnumbers(table)
for row in petl.util.base.dicts(data):
#Process blades here
for b in blades['post']:
row = b.run(row, meta)
yield json.dumps(row)+'\n'
return Response(generate(), mimetype='application/json')
#api.add_resource(treeView.dataTree, '/<tree_name>/')
app.config.from_pyfile(os.path.dirname('../config/config.py')+'/../config/config.py')
#Load Blades
for blade in app.config['BLADES']:
b = __import__('blades.'+blade['path'], fromlist=[blade['name']])
active_blade = getattr(b, blade['name'])()
blades[active_blade.getStage()].append(active_blade)
#@app.before_request
# def write_access_log():
# return 'Path: '+request.path
return app
|
"""netfilter stats for TSDB. This collector exposes metrics from /proc/sys/net/ipv4/netfilter/*.
Note that the plugin also collects the setting values from this directory, as it makes
it possible to monitor for incorrect settings, and also gives access to the value of
these for non-root users."""
import sys
import time
import re
import os
from collectors.lib import utils
interval = 15 # seconds
STATS = ("ip_conntrack_buckets", "ip_conntrack_checksum", "ip_conntrack_count",
"ip_conntrack_generic_timeout", "ip_conntrack_icmp_timeout",
"ip_conntrack_log_invalid", "ip_conntrack_max", "ip_conntrack_tcp_be_liberal",
"ip_conntrack_tcp_loose", "ip_conntrack_tcp_max_retrans",
"ip_conntrack_tcp_timeout_close", "ip_conntrack_tcp_timeout_close_wait",
"ip_conntrack_tcp_timeout_established", "ip_conntrack_tcp_timeout_fin_wait",
"ip_conntrack_tcp_timeout_last_ack", "ip_conntrack_tcp_timeout_max_retrans",
"ip_conntrack_tcp_timeout_syn_recv", "ip_conntrack_tcp_timeout_syn_sent",
"ip_conntrack_tcp_timeout_time_wait", "ip_conntrack_udp_timeout",
"ip_conntrack_udp_timeout_stream")
basedir = "/proc/sys/net/ipv4/netfilter"
def main():
"""netfilter main loop"""
utils.drop_privileges()
if (os.path.isdir(basedir)):
while True:
ts = int(time.time())
for s in STATS:
try:
f = open(basedir + "/" + s, 'r')
value = f.readline().rstrip()
print("proc.sys.net.ipv4.netfilter.%s %d %s" % (s, ts, value))
f.close()
except:
# brute'ish, but should keep the collector reasonably future
# proof if some of the stats disappear between kernel module
# versions
continue
sys.stdout.flush()
time.sleep(interval)
else:
print ("%s does not exist - ip_conntrack probably missing")
sys.exit(13) # we signal tcollector to not run us
if __name__ == "__main__":
sys.exit(main())
|
r"""This module implements IO and manipulation function for discrete trajectories
Discrete trajectories are generally ndarrays of type integer
We store them either as single column ascii files or as ndarrays of shape (n,) in binary .npy format.
.. moduleauthor:: B. Trendelkamp-Schroer <benjamin DOT trendelkamp-schroer AT fu-berlin DOT de>
.. moduleauthor:: F. Noe <frank DOT noe AT fu-berlin DOT de>
"""
import numpy as np
from pyemma.util.types import ensure_dtraj_list as _ensure_dtraj_list
from pyemma.util.annotators import shortcut
__author__ = 'noe'
@shortcut('read_dtraj')
def read_discrete_trajectory(filename):
"""Read discrete trajectory from ascii file.
The ascii file containing a single column with integer entries is
read into an array of integers.
Parameters
----------
filename : str
The filename of the discrete state trajectory file.
The filename can either contain the full or the
relative path to the file.
Returns
-------
dtraj : (M, ) ndarray
Discrete state trajectory.
"""
with open(filename, "r") as f:
lines=f.read()
dtraj=np.fromstring(lines, dtype=int, sep="\n")
return dtraj
@shortcut('write_dtraj')
def write_discrete_trajectory(filename, dtraj):
r"""Write discrete trajectory to ascii file.
The discrete trajectory is written to a
single column ascii file with integer entries
Parameters
----------
filename : str
The filename of the discrete state trajectory file.
The filename can either contain the full or the
relative path to the file.
dtraj : array-like
Discrete state trajectory.
"""
dtraj=np.asarray(dtraj)
with open(filename, 'w') as f:
dtraj.tofile(f, sep='\n', format='%d')
@shortcut('load_dtraj')
def load_discrete_trajectory(filename):
r"""Read discrete trajectory form binary file.
The binary file is a one dimensional numpy array
of integers stored in numpy .npy format.
Parameters
----------
filename : str
The filename of the discrete state trajectory file.
The filename can either contain the full or the
relative path to the file.
Returns
-------
dtraj : (M,) ndarray
Discrete state trajectory.
"""
dtraj=np.load(filename)
return dtraj
@shortcut('save_dtraj')
def save_discrete_trajectory(filename, dtraj):
r"""Write discrete trajectory to binary file.
The discrete trajectory is stored as ndarray of integers
in numpy .npy format.
Parameters
----------
filename : str
The filename of the discrete state trajectory file.
The filename can either contain the full or the
relative path to the file.
dtraj : array-like
Discrete state trajectory.
"""
dtraj=np.asarray(dtraj)
np.save(filename, dtraj)
@shortcut('histogram')
def count_states(dtrajs, ignore_negative=False):
r"""returns a histogram count
Parameters
----------
dtrajs : array_like or list of array_like
Discretized trajectory or list of discretized trajectories
ignore_negative, bool, default=False
Ignore negative elements. By default, a negative element will cause an
exception
Returns
-------
count : ndarray((n), dtype=int)
the number of occurrences of each state. n=max+1 where max is the largest state index found.
"""
# format input
dtrajs = _ensure_dtraj_list(dtrajs)
# make bincounts for each input trajectory
nmax = 0
bcs = []
for dtraj in dtrajs:
if ignore_negative:
dtraj = dtraj[np.where(dtraj >= 0)]
bc = np.bincount(dtraj)
nmax = max(nmax, bc.shape[0])
bcs.append(bc)
# construct total bincount
res = np.zeros(nmax, dtype=int)
# add up individual bincounts
for i, bc in enumerate(bcs):
res[:bc.shape[0]] += bc
return res
def visited_set(dtrajs):
r"""returns the set of states that have at least one count
Parameters
----------
dtraj : array_like or list of array_like
Discretized trajectory or list of discretized trajectories
Returns
-------
vis : ndarray((n), dtype=int)
the set of states that have at least one count.
"""
hist = count_states(dtrajs)
return np.argwhere(hist > 0)[:,0]
@shortcut('nstates')
def number_of_states(dtrajs, only_used = False):
r"""returns the number of states in the given trajectories.
Parameters
----------
dtraj : array_like or list of array_like
Discretized trajectory or list of discretized trajectories
only_used = False : boolean
If False, will return max+1, where max is the largest index used.
If True, will return the number of states that occur at least once.
"""
dtrajs = _ensure_dtraj_list(dtrajs)
if only_used:
# only states with counts > 0 wanted. Make a bincount and count nonzeros
bc = count_states(dtrajs)
return np.count_nonzero(bc)
else:
# all states wanted, included nonpopulated ones. return max + 1
imax = 0
for dtraj in dtrajs:
imax = max(imax, np.max(dtraj))
return imax+1
def index_states(dtrajs, subset=None):
"""Generates a trajectory/time indexes for the given list of states
Parameters
----------
dtraj : array_like or list of array_like
Discretized trajectory or list of discretized trajectories. Negative elements will be ignored
subset : ndarray((n)), optional, default = None
array of states to be indexed. By default all states in dtrajs will be used
Returns
-------
indexes : list of ndarray( (N_i, 2) )
For each state, all trajectory and time indexes where this state occurs.
Each matrix has a number of rows equal to the number of occurances of the corresponding state,
with rows consisting of a tuple (i, t), where i is the index of the trajectory and t is the time index
within the trajectory.
"""
# check input
dtrajs = _ensure_dtraj_list(dtrajs)
# select subset unless given
n = number_of_states(dtrajs)
if subset is None:
subset = np.arange(n)
else:
if np.max(subset) >= n:
raise ValueError('Selected subset is not a subset of the states in dtrajs.')
# histogram states
hist = count_states(dtrajs, ignore_negative=True)
# efficient access to which state are accessible
is_requested = np.ndarray((n), dtype=bool)
is_requested[:] = False
is_requested[subset] = True
# efficient access to requested state indexes
full2states = np.zeros((n), dtype=int)
full2states[subset] = range(len(subset))
# initialize results
res = np.ndarray(len(subset), dtype=object)
counts = np.zeros((len(subset)), dtype=int)
for i,s in enumerate(subset):
res[i] = np.zeros((hist[s],2), dtype=int)
# walk through trajectories and remember requested state indexes
for i,dtraj in enumerate(dtrajs):
for t,s in enumerate(dtraj):
# only index nonnegative state indexes
if s >= 0 and is_requested[s]:
k = full2states[s]
res[k][counts[k],0] = i
res[k][counts[k],1] = t
counts[k] += 1
return res
def sample_indexes_by_sequence(indexes, sequence):
"""Samples trajectory/time indexes according to the given sequence of states
Parameters
----------
indexes : list of ndarray( (N_i, 2) )
For each state, all trajectory and time indexes where this state occurs.
Each matrix has a number of rows equal to the number of occurrences of the corresponding state,
with rows consisting of a tuple (i, t), where i is the index of the trajectory and t is the time index
within the trajectory.
sequence : array of integers
A sequence of discrete states. For each state, a trajectory/time index will be sampled at which dtrajs
have an occurrences of this state
Returns
-------
indexes : ndarray( (N, 2) )
The sampled index sequence.
Index array with a number of rows equal to N=len(sequence), with rows consisting of a tuple (i, t),
where i is the index of the trajectory and t is the time index within the trajectory.
"""
N = len(sequence)
res = np.zeros((N,2), dtype=int)
for t in range(N):
s = sequence[t]
i = np.random.randint(indexes[s].shape[0])
res[t,:] = indexes[s][i,:]
return res
def sample_indexes_by_state(indexes, nsample, subset=None, replace=True):
"""Samples trajectory/time indexes according to the given sequence of states
Parameters
----------
indexes : list of ndarray( (N_i, 2) )
For each state, all trajectory and time indexes where this state occurs.
Each matrix has a number of rows equal to the number of occurrences of the corresponding state,
with rows consisting of a tuple (i, t), where i is the index of the trajectory and t is the time index
within the trajectory.
nsample : int
Number of samples per state. If replace = False, the number of returned samples per state could be smaller
if less than nsample indexes are available for a state.
subset : ndarray((n)), optional, default = None
array of states to be indexed. By default all states in dtrajs will be used
replace : boolean, optional
Whether the sample is with or without replacement
Returns
-------
indexes : list of ndarray( (N, 2) )
List of the sampled indices by state.
Each element is an index array with a number of rows equal to N=len(sequence), with rows consisting of a
tuple (i, t), where i is the index of the trajectory and t is the time index within the trajectory.
"""
# how many states in total?
n = len(indexes)
# define set of states to work on
if subset is None:
subset = np.arange(n)
# list of states
res = np.ndarray(len(subset), dtype=object)
for i, s in enumerate(subset):
# how many indexes are available?
m_available = indexes[s].shape[0]
# do we have no indexes for this state? Then insert empty array.
if m_available == 0:
res[i] = np.zeros((0,2), dtype=int)
elif replace:
I = np.random.choice(m_available, nsample, replace=True)
res[i] = indexes[s][I,:]
else:
I = np.random.choice(m_available, min(m_available,nsample), replace=False)
res[i] = indexes[s][I,:]
return res
def sample_indexes_by_distribution(indexes, distributions, nsample):
"""Samples trajectory/time indexes according to the given probability distributions
Parameters
----------
indexes : list of ndarray( (N_i, 2) )
For each state, all trajectory and time indexes where this state occurs.
Each matrix has a number of rows equal to the number of occurrences of the corresponding state,
with rows consisting of a tuple (i, t), where i is the index of the trajectory and t is the time index
within the trajectory.
distributions : list or array of ndarray ( (n) )
m distributions over states. Each distribution must be of length n and must sum up to 1.0
nsample : int
Number of samples per distribution. If replace = False, the number of returned samples per state could be smaller
if less than nsample indexes are available for a state.
Returns
-------
indexes : length m list of ndarray( (nsample, 2) )
List of the sampled indices by distribution.
Each element is an index array with a number of rows equal to nsample, with rows consisting of a
tuple (i, t), where i is the index of the trajectory and t is the time index within the trajectory.
"""
# how many states in total?
n = len(indexes)
for dist in distributions:
if len(dist) != n:
raise ValueError('Size error: Distributions must all be of length n (number of states).')
# list of states
res = np.ndarray(len(distributions), dtype=object)
for i, dist in enumerate(distributions):
# sample states by distribution
sequence = np.random.choice(n, size=nsample, p=dist)
res[i] = sample_indexes_by_sequence(indexes, sequence)
#
return res
|
import sys
from optparse import OptionParser
from xml.dom import minidom
import codecs
from androguard.core import androconf
from androguard.core.bytecodes import apk
option_0 = { 'name' : ('-i', '--input'), 'help' : 'filename input (APK or android\'s binary xml)', 'nargs' : 1 }
option_1 = { 'name' : ('-o', '--output'), 'help' : 'filename output of the xml', 'nargs' : 1 }
option_2 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
options = [option_0, option_1, option_2]
def main(options, arguments) :
if options.input != None :
buff = ""
ret_type = androconf.is_android(options.input)
if ret_type == "APK":
a = apk.APK(options.input)
print a.get_android_manifest_xml()
buff = a.get_android_manifest_xml().toprettyxml(encoding="utf-8")
a.get_activities()
elif ".xml" in options.input:
ap = apk.AXMLPrinter(open(options.input, "rb").read())
buff = minidom.parseString(ap.get_buff()).toprettyxml(encoding="utf-8")
else:
print "Unknown file type"
return
if options.output != None :
fd = codecs.open(options.output, "w", "utf-8")
fd.write( buff )
fd.close()
else :
print buff
elif options.version != None :
print "Androaxml version %s" % androconf.ANDROGUARD_VERSION
if __name__ == "__main__" :
parser = OptionParser()
for option in options :
param = option['name']
del option['name']
parser.add_option(*param, **option)
options, arguments = parser.parse_args()
sys.argv[:] = arguments
main(options, arguments)
|
import sys
try:
from r_core import RCore
except:
from r2.r_core import RCore
core = RCore()
path="/tmp/fatmach0-3true"
core.bin.load (path, 0, 0, 0, 0, 0)
print ("Supported archs: %d"%core.bin.narch)
if core.bin.narch>1:
for i in range (0,core.bin.narch):
core.bin.select_idx (i)
info = core.bin.get_info ()
if info:
print ("%d: %s %s"%(i,info.arch,info.bits))
core.config.set ("asm.arch", "x86");
core.config.set ("asm.bits", "32");
f = core.file_open(path, False, 0)
core.bin_load ("", 0)
print ("33+3 = ",core.num.math("33+3"));
print ("Entrypoint : 0x%x"%(core.num.get ("entry0")))
print (core.cmd_str ("pd 12 @ entry0"))
|
from dataclasses import dataclass
from enum import Enum
from typing import List
from dcs.drawing.drawing import Drawing, LineStyle
from dcs.mapping import Point
class LineMode(Enum):
Segment = "segment"
Segments = "segments"
Free = "free"
@dataclass
class LineDrawing(Drawing):
closed: bool
line_thickness: float
line_style: LineStyle
line_mode: LineMode
points: List[Point]
def dict(self):
d = super().dict()
d["primitiveType"] = "Line"
d["closed"] = self.closed
d["thickness"] = self.line_thickness
d["style"] = self.line_style.value
d["lineMode"] = self.line_mode.value
d["points"] = super().points_to_dict(self.points)
return d
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.