code
stringlengths 1
199k
|
|---|
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy import (abs, asarray, cos, exp, floor, pi, sign, sin, sqrt, sum,
size, tril, isnan, atleast_2d, repeat)
from numpy.testing import assert_almost_equal
from .go_benchmark import Benchmark
class CarromTable(Benchmark):
"""
CarromTable objective function.
The CarromTable [1]_ global optimization problem is a multimodal
minimization problem defined as follows:
.. math::
f_{\text{CarromTable}}(x) = - \frac{1}{30}\left(\cos(x_1)
cos(x_2) e^{\left|1 - \frac{\sqrt{x_1^2 + x_2^2}}{\pi}\right|}\right)^2
with :math:`x_i \in [-10, 10]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = -24.15681551650653` for :math:`x_i = \pm
9.646157266348881` for :math:`i = 1, 2`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-10.0] * self.N, [10.0] * self.N))
self.global_optimum = [(9.646157266348881, 9.646134286497169),
(-9.646157266348881, 9.646134286497169),
(9.646157266348881, -9.646134286497169),
(-9.646157266348881, -9.646134286497169)]
self.fglob = -24.15681551650653
def fun(self, x, *args):
self.nfev += 1
u = cos(x[0]) * cos(x[1])
v = sqrt(x[0] ** 2 + x[1] ** 2)
return -((u * exp(abs(1 - v / pi))) ** 2) / 30.
class Chichinadze(Benchmark):
"""
Chichinadze objective function.
This class defines the Chichinadze [1]_ global optimization problem. This is a
multimodal minimization problem defined as follows:
.. math::
f_{\text{Chichinadze}}(x) = x_{1}^{2} - 12 x_{1}
+ 8 \sin\left(\frac{5}{2} \pi x_{1}\right)
+ 10 \cos\left(\frac{1}{2} \pi x_{1}\right) + 11
- 0.2 \frac{\sqrt{5}}{e^{\frac{1}{2} \left(x_{2} -0.5 \right)^{2}}}
with :math:`x_i \in [-30, 30]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = -42.94438701899098` for :math:`x =
[6.189866586965680, 0.5]`
.. [1] Gavana, A. Global Optimization Benchmarks and AMPGO retrieved 2015
TODO: Jamil#33 has a dividing factor of 2 in the sin term. However, f(x)
for the given solution does not give the global minimum. i.e. the equation
is at odds with the solution.
Only by removing the dividing factor of 2, i.e. `8 * sin(5 * pi * x[0])`
does the given solution result in the given global minimum.
Do we keep the result or equation?
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-30.0] * self.N, [30.0] * self.N))
self.custom_bounds = [(-10, 10), (-10, 10)]
self.global_optimum = [[6.189866586965680, 0.5]]
self.fglob = -42.94438701899098
def fun(self, x, *args):
self.nfev += 1
return (x[0] ** 2 - 12 * x[0] + 11 + 10 * cos(pi * x[0] / 2)
+ 8 * sin(5 * pi * x[0] / 2)
- 1.0 / sqrt(5) * exp(-((x[1] - 0.5) ** 2) / 2))
class Cigar(Benchmark):
"""
Cigar objective function.
This class defines the Cigar [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Cigar}}(x) = x_1^2 + 10^6\sum_{i=2}^{n} x_i^2
Here, :math:`n` represents the number of dimensions and :math:`x_i \in
[-100, 100]` for :math:`i = 1, ..., n`.
*Global optimum*: :math:`f(x) = 0` for :math:`x_i = 0` for
:math:`i = 1, ..., n`
.. [1] Gavana, A. Global Optimization Benchmarks and AMPGO retrieved 2015
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-100.0] * self.N,
[100.0] * self.N))
self.custom_bounds = [(-5, 5), (-5, 5)]
self.global_optimum = [[0 for _ in range(self.N)]]
self.fglob = 0.0
self.change_dimensionality = True
def fun(self, x, *args):
self.nfev += 1
return x[0] ** 2 + 1e6 * sum(x[1:] ** 2)
class Cola(Benchmark):
"""
Cola objective function.
This class defines the Cola global optimization problem. The 17-dimensional
function computes indirectly the formula :math:`f(n, u)` by setting
:math:`x_0 = y_0, x_1 = u_0, x_i = u_{2(i2)}, y_i = u_{2(i2)+1}` :
.. math::
f_{\text{Cola}}(x) = \sum_{i<j}^{n} \left (r_{i,j} - d_{i,j} \right )^2
Where :math:`r_{i, j}` is given by:
.. math::
r_{i, j} = \sqrt{(x_i - x_j)^2 + (y_i - y_j)^2}
And :math:`d` is a symmetric matrix given by:
.. math::
\{d} = \left [ d_{ij} \right ] = \begin{pmatrix}
1.27 & & & & & & & & \\
1.69 & 1.43 & & & & & & & \\
2.04 & 2.35 & 2.43 & & & & & & \\
3.09 & 3.18 & 3.26 & 2.85 & & & & & \\
3.20 & 3.22 & 3.27 & 2.88 & 1.55 & & & & \\
2.86 & 2.56 & 2.58 & 2.59 & 3.12 & 3.06 & & & \\
3.17 & 3.18 & 3.18 & 3.12 & 1.31 & 1.64 & 3.00 & \\
3.21 & 3.18 & 3.18 & 3.17 & 1.70 & 1.36 & 2.95 & 1.32 & \\
2.38 & 2.31 & 2.42 & 1.94 & 2.85 & 2.81 & 2.56 & 2.91 & 2.97
\end{pmatrix}
This function has bounds :math:`x_0 \in [0, 4]` and :math:`x_i \in [-4, 4]`
for :math:`i = 1, ..., n-1`.
*Global optimum* 11.7464.
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=17):
Benchmark.__init__(self, dimensions)
self._bounds = [[0.0, 4.0]] + list(zip([-4.0] * (self.N - 1),
[4.0] * (self.N - 1)))
self.global_optimum = [[0.651906, 1.30194, 0.099242, -0.883791,
-0.8796, 0.204651, -3.28414, 0.851188,
-3.46245, 2.53245, -0.895246, 1.40992,
-3.07367, 1.96257, -2.97872, -0.807849,
-1.68978]]
self.fglob = 11.7464
self.d = asarray([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1.27, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1.69, 1.43, 0, 0, 0, 0, 0, 0, 0, 0],
[2.04, 2.35, 2.43, 0, 0, 0, 0, 0, 0, 0],
[3.09, 3.18, 3.26, 2.85, 0, 0, 0, 0, 0, 0],
[3.20, 3.22, 3.27, 2.88, 1.55, 0, 0, 0, 0, 0],
[2.86, 2.56, 2.58, 2.59, 3.12, 3.06, 0, 0, 0, 0],
[3.17, 3.18, 3.18, 3.12, 1.31, 1.64, 3.00, 0, 0, 0],
[3.21, 3.18, 3.18, 3.17, 1.70, 1.36, 2.95, 1.32, 0, 0],
[2.38, 2.31, 2.42, 1.94, 2.85, 2.81, 2.56, 2.91, 2.97, 0.]])
def fun(self, x, *args):
self.nfev += 1
xi = atleast_2d(asarray([0.0, x[0]] + list(x[1::2])))
xj = repeat(xi, size(xi, 1), axis=0)
xi = xi.T
yi = atleast_2d(asarray([0.0, 0.0] + list(x[2::2])))
yj = repeat(yi, size(yi, 1), axis=0)
yi = yi.T
inner = (sqrt(((xi - xj) ** 2 + (yi - yj) ** 2)) - self.d) ** 2
inner = tril(inner, -1)
return sum(sum(inner, axis=1))
class Colville(Benchmark):
"""
Colville objective function.
This class defines the Colville global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Colville}}(x) = \left(x_{1} -1\right)^{2}
+ 100 \left(x_{1}^{2} - x_{2}\right)^{2}
+ 10.1 \left(x_{2} -1\right)^{2} + \left(x_{3} -1\right)^{2}
+ 90 \left(x_{3}^{2} - x_{4}\right)^{2}
+ 10.1 \left(x_{4} -1\right)^{2} + 19.8 \frac{x_{4} -1}{x_{2}}
with :math:`x_i \in [-10, 10]` for :math:`i = 1, ..., 4`.
*Global optimum*: :math:`f(x) = 0` for :math:`x_i = 1` for
:math:`i = 1, ..., 4`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
TODO docstring equation is wrong use Jamil#36
"""
def __init__(self, dimensions=4):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-10.0] * self.N, [10.0] * self.N))
self.global_optimum = [[1 for _ in range(self.N)]]
self.fglob = 0.0
def fun(self, x, *args):
self.nfev += 1
return (100 * (x[0] - x[1] ** 2) ** 2
+ (1 - x[0]) ** 2 + (1 - x[2]) ** 2
+ 90 * (x[3] - x[2] ** 2) ** 2
+ 10.1 * ((x[1] - 1) ** 2 + (x[3] - 1) ** 2)
+ 19.8 * (x[1] - 1) * (x[3] - 1))
class Corana(Benchmark):
"""
Corana objective function.
This class defines the Corana [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Corana}}(x) = \begin{cases} \sum_{i=1}^n 0.15 d_i
[z_i - 0.05\textrm{sgn}(z_i)]^2 & \textrm{if }|x_i-z_i| < 0.05 \\
d_ix_i^2 & \textrm{otherwise}\end{cases}
Where, in this exercise:
.. math::
z_i = 0.2 \lfloor |x_i/s_i|+0.49999\rfloor\textrm{sgn}(x_i),
d_i=(1,1000,10,100, ...)
with :math:`x_i \in [-5, 5]` for :math:`i = 1, ..., 4`.
*Global optimum*: :math:`f(x) = 0` for :math:`x_i = 0` for
:math:`i = 1, ..., 4`
..[1] Gavana, A. Global Optimization Benchmarks and AMPGO retrieved 2015
"""
def __init__(self, dimensions=4):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-5.0] * self.N, [5.0] * self.N))
self.global_optimum = [[0 for _ in range(self.N)]]
self.fglob = 0.0
def fun(self, x, *args):
self.nfev += 1
d = [1., 1000., 10., 100.]
r = 0
for j in range(4):
zj = floor(abs(x[j] / 0.2) + 0.49999) * sign(x[j]) * 0.2
if abs(x[j] - zj) < 0.05:
r += 0.15 * ((zj - 0.05 * sign(zj)) ** 2) * d[j]
else:
r += d[j] * x[j] * x[j]
return r
class CosineMixture(Benchmark):
"""
Cosine Mixture objective function.
This class defines the Cosine Mixture global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{CosineMixture}}(x) = -0.1 \sum_{i=1}^n \cos(5 \pi x_i)
- \sum_{i=1}^n x_i^2
Here, :math:`n` represents the number of dimensions and :math:`x_i \in
[-1, 1]` for :math:`i = 1, ..., N`.
*Global optimum*: :math:`f(x) = -0.1N` for :math:`x_i = 0` for
:math:`i = 1, ..., N`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
TODO, Jamil #38 has wrong minimum and wrong fglob. I plotted it.
-(x**2) term is always negative if x is negative.
cos(5 * pi * x) is equal to -1 for x=-1.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self.change_dimensionality = True
self._bounds = list(zip([-1.0] * self.N, [1.0] * self.N))
self.global_optimum = [[-1. for _ in range(self.N)]]
self.fglob = -0.9 * self.N
def fun(self, x, *args):
self.nfev += 1
return -0.1 * sum(cos(5.0 * pi * x)) - sum(x ** 2.0)
class CrossInTray(Benchmark):
"""
Cross-in-Tray objective function.
This class defines the Cross-in-Tray [1]_ global optimization problem. This is a
multimodal minimization problem defined as follows:
.. math::
f_{\text{CrossInTray}}(x) = - 0.0001 \left(\left|{e^{\left|{100
- \frac{\sqrt{x_{1}^{2} + x_{2}^{2}}}{\pi}}\right|}
\sin\left(x_{1}\right) \sin\left(x_{2}\right)}\right| + 1\right)^{0.1}
with :math:`x_i \in [-15, 15]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = -2.062611870822739` for :math:`x_i =
\pm 1.349406608602084` for :math:`i = 1, 2`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-10.0] * self.N, [10.0] * self.N))
self.global_optimum = [(1.349406685353340, 1.349406608602084),
(-1.349406685353340, 1.349406608602084),
(1.349406685353340, -1.349406608602084),
(-1.349406685353340, -1.349406608602084)]
self.fglob = -2.062611870822739
def fun(self, x, *args):
self.nfev += 1
return (-0.0001 * (abs(sin(x[0]) * sin(x[1])
* exp(abs(100 - sqrt(x[0] ** 2 + x[1] ** 2) / pi)))
+ 1) ** (0.1))
class CrossLegTable(Benchmark):
"""
Cross-Leg-Table objective function.
This class defines the Cross-Leg-Table [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{CrossLegTable}}(x) = - \frac{1}{\left(\left|{e^{\left|{100
- \frac{\sqrt{x_{1}^{2} + x_{2}^{2}}}{\pi}}\right|}
\sin\left(x_{1}\right) \sin\left(x_{2}\right)}\right| + 1\right)^{0.1}}
with :math:`x_i \in [-10, 10]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = -1`. The global minimum is found on the
planes :math:`x_1 = 0` and :math:`x_2 = 0`
..[1] Mishra, S. Global Optimization by Differential Evolution and Particle
Swarm Methods: Evaluation on Some Benchmark Functions Munich University,
2006
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-10.0] * self.N, [10.0] * self.N))
self.global_optimum = [[0., 0.]]
self.fglob = -1.0
def fun(self, x, *args):
self.nfev += 1
u = 100 - sqrt(x[0] ** 2 + x[1] ** 2) / pi
v = sin(x[0]) * sin(x[1])
return -(abs(v * exp(abs(u))) + 1) ** (-0.1)
class CrownedCross(Benchmark):
"""
Crowned Cross objective function.
This class defines the Crowned Cross [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{CrownedCross}}(x) = 0.0001 \left(\left|{e^{\left|{100
- \frac{\sqrt{x_{1}^{2} + x_{2}^{2}}}{\pi}}\right|}
\sin\left(x_{1}\right) \sin\left(x_{2}\right)}\right| + 1\right)^{0.1}
with :math:`x_i \in [-10, 10]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x_i) = 0.0001`. The global minimum is found on
the planes :math:`x_1 = 0` and :math:`x_2 = 0`
..[1] Mishra, S. Global Optimization by Differential Evolution and Particle
Swarm Methods: Evaluation on Some Benchmark Functions Munich University,
2006
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-10.0] * self.N, [10.0] * self.N))
self.global_optimum = [[0, 0]]
self.fglob = 0.0001
def fun(self, x, *args):
self.nfev += 1
u = 100 - sqrt(x[0] ** 2 + x[1] ** 2) / pi
v = sin(x[0]) * sin(x[1])
return 0.0001 * (abs(v * exp(abs(u))) + 1) ** (0.1)
class Csendes(Benchmark):
"""
Csendes objective function.
This class defines the Csendes [1]_ global optimization problem. This is a
multimodal minimization problem defined as follows:
.. math::
f_{\text{Csendes}}(x) = \sum_{i=1}^n x_i^6 \left[ 2 + \sin
\left( \frac{1}{x_i} \right ) \right]
Here, :math:`n` represents the number of dimensions and :math:`x_i \in
[-1, 1]` for :math:`i = 1, ..., N`.
*Global optimum*: :math:`f(x) = 0.0` for :math:`x_i = 0` for
:math:`i = 1, ..., N`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self.change_dimensionality = True
self._bounds = list(zip([-1.0] * self.N, [1.0] * self.N))
self.global_optimum = [[0 for _ in range(self.N)]]
self.fglob = np.nan
def fun(self, x, *args):
self.nfev += 1
try:
return sum((x ** 6.0) * (2.0 + sin(1.0 / x)))
except ZeroDivisionError:
return np.nan
except FloatingPointError:
return np.nan
def success(self, x):
"""Is a candidate solution at the global minimum"""
val = self.fun(asarray(x))
if isnan(val):
return True
try:
assert_almost_equal(val, 0., 4)
return True
except AssertionError:
return False
return False
class Cube(Benchmark):
"""
Cube objective function.
This class defines the Cube global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Cube}}(x) = 100(x_2 - x_1^3)^2 + (1 - x1)^2
Here, :math:`n` represents the number of dimensions and :math:`x_i \in [-10, 10]` for :math:`i=1,...,N`.
*Global optimum*: :math:`f(x_i) = 0.0` for :math:`x = [1, 1]`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
TODO: jamil#41 has the wrong solution.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-10.0] * self.N, [10.0] * self.N))
self.custom_bounds = ([0, 2], [0, 2])
self.global_optimum = [[1.0, 1.0]]
self.fglob = 0.0
def fun(self, x, *args):
self.nfev += 1
return 100.0 * (x[1] - x[0] ** 3.0) ** 2.0 + (1.0 - x[0]) ** 2.0
|
import jwt
from flask import url_for
from app import db, bcrypt, app
from sqlalchemy.dialects.postgresql import JSON
from datetime import datetime, timedelta
class Incident(db.Model):
__tablename__ = "incidents"
id = db.Column('id', db.Integer, primary_key=True)
title = db.Column('title', db.Unicode) #string?
description = db.Column('description', db.Unicode) #string?
timestamp = db.Column('timestamp', db.DateTime)
location = db.Column('location', JSON)
status = db.Column('status', db.Unicode)
# tag = db.Column('tag', db.Unicode)
reporter = db.Column(db.Integer, db.ForeignKey('users.id'), default=None)
files = db.relationship('File', backref='incident')
def __init__(self, title="", description="", location={}, reporter=None):
self.title = title
self.description = description
self.location = location
self.reporter = reporter
self.timestamp = datetime.utcnow().replace(microsecond=0) + \
timedelta(hours=3)
self.status = 'pending'
# self.tag = ''
def __repr__(self):
return '<Incident object. Title: {}>'.format(self.title)
def serialize(self):
files = []
if self.files:
files = [file.path for file in self.files]
return {"url": url_for('api_blueprint.incidents_api',
incident_id=self.id,
_external=True),
"title": self.title,
"description": self.description,
"location": self.location,
"timestamp": self.timestamp,
"reporter": self.incident_reporter.username,
"files": files}
class File(db.Model):
"""Model for files storage paths"""
__tablename__ = 'files'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
path = db.Column(db.String(200))
uploaded_on = db.Column(db.DateTime)
incident_id = db.Column(db.Integer, db.ForeignKey('incidents.id'))
def __init__(self, path="", incident_id=None):
self.path = path
self.uploaded_on = datetime.now()
self.incident_id = incident_id
def __repr__(self):
return '<File object. Path: {}>'.format(self.path)
class User(db.Model):
""" User Model for storing user related details """
__tablename__ = "users"
id = db.Column('id', db.Integer, primary_key=True, autoincrement=True)
username = db.Column('title', db.Unicode, unique=True)
password = db.Column('password', db.String(255), nullable=False)
registered_on = db.Column('registered_on', db.DateTime, nullable=False)
admin = db.Column('admin' ,db.Boolean, nullable=False, default=False)
user_reporter = db.relationship('Incident',
backref='incident_reporter',
foreign_keys='Incident.reporter')
def __init__(self, username, password, admin=False):
self.username = username
self.password = bcrypt.generate_password_hash(password).decode()
self.registered_on = datetime.now().replace(microsecond=0) + \
timedelta(hours=3)
self.admin = admin
def __repr__(self):
return '<User object. Username: {}>'.format(self.username)
def encode_auth_token(self, user_id, user_role):
"""
Generates the Auth Token
:return: string
"""
try:
payload = {'exp': datetime.utcnow() + timedelta(days=1),
'iat': datetime.utcnow(),
'sub': user_id,
'role': user_role}
return jwt.encode(payload, app.config.get('SECRET_KEY'),
algorithm='HS256')
except Exception as e:
return e
@staticmethod
def decode_auth_token(auth_token):
"""
Validates the auth token
:param auth_token:
:return: dictionary | string
"""
try:
payload = jwt.decode(auth_token, app.config.get('SECRET_KEY'))
is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)
if is_blacklisted_token:
return 'Token blacklisted. Please log in again.'
else:
user_data = {'user': payload['sub'], 'role': payload['role']}
return user_data
except jwt.ExpiredSignatureError:
return 'Signature expired. Please log in again.'
except jwt.InvalidTokenError:
return 'Invalid token. Please log in again.'
class BlacklistToken(db.Model):
""" Token Model for storing blacklisted JWT tokens """
__tablename__ = 'blacklist_tokens'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
token = db.Column(db.String(500), unique=True, nullable=False)
blacklisted_on = db.Column(db.DateTime, nullable=False)
def __init__(self, token=''):
self.token = token
self.blacklisted_on = datetime.now()
def __repr__(self):
return '<Token: {}>'.format(self.token)
@staticmethod
def check_blacklist(auth_token):
# check whether auth token has been blacklisted
result = BlacklistToken.query.filter_by(token=str(auth_token)).first()
if result:
return True
else:
return False
|
"""
kaoru.commands.hibernate
~~~~~~~~
/hibernate command implementation
:copyright: (c) 2015 by Alejandro Ricoveri
:license: MIT, see LICENSE for more details.
"""
import re
from .. import config
from .. import utils
from ..procutils import proc_exec_async, proc_select
from . import bot_command
@bot_command
def _cmd_handler(bot, update):
"""hibernate your machine(s)"""
hibernate_exec = proc_select(['pm-hibernate'])
utils.echo_msg(
bot, update,
'Your host(s) are going to be put into HIBERNATION'
)
if re.match('.*pm-hibernate$', hibernate_exec):
# pm-hibernate needs to be run as root
hibernate_exec = 'sudo {}'.format(hibernate_exec)
proc_exec_async(hibernate_exec)
else:
log.msg_err(
"pm-hibernate has not been found, "
"make sure you have installed pm-utils on your system."
)
desc = 'Suspend your host(s) to disk' # This command's description
cmd_handler = _cmd_handler # command handler
cmd_str = 'hibernate' # command /string
|
"""
Django settings for tv_show_fetcher project.
Generated by 'django-admin startproject' using Django 1.8.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'h2&xl&&f%_50x24@b($$jnk0*02z_7=jkt2bg_@t$=&igk(f39'
DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'backoffice'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'tv_show_fetcher.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['', 'test'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'tv_show_fetcher.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'tv_show_fetcher',
'USER': 'root',
'PASSWORD': '',
'HOST': '127.0.0.1',
'PORT': ''
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
STATICFILES_DIRS = (
BASE_DIR + '/static/',
)
MEDIA_URL = '/media/'
MEDIA_ROOT = BASE_DIR + '/media/'
USER_ID =
USER_URL = "https://api.tozelabs.com/v2/user"
USER_PARAMS = {
'include_shows': 1,
'include_lists': 0,
'include_airing_data': 0,
'include_last_seen': 0,
'include_for_later': 0,
'include_to_watch': 0
}
SHOW_URL = "https://api.tozelabs.com/v2/show"
SHOW_PARAMS = {
'user_id': USER_ID,
'include_followers': 0,
'nb_followers': 0,
'seasons_data': 0,
'include_episodes': 1,
'nb_recommendations': 0,
'include_suggestions': 0,
'include_products': 0
}
CHROME_PREF = {
'download.default_directory' : '/tmp/toAdd'
}
PROXY_SERVER = ""
CAPTCHA_PATH = '/tmp/img.png'
PREFERD_RES = "720p"
|
current_n = '1113222113'
for n in xrange(40):
current_count = 0
next_n = ''
prev_d = current_n[0]
for i,d in enumerate(current_n):
if d == prev_d:
current_count += 1
if i == len(current_n)-1:
next_n += str(current_count) + prev_d
else:
next_n += str(current_count) + prev_d
current_count = 1
prev_d = d
if i == len(current_n)-1:
next_n += str(current_count) + prev_d
current_n = next_n
print len(current_n)
|
from django import template
from ..models import Content, Entry
register = template.Library()
@register.assignment_tag()
def content(slug):
return Content.objects.get(translations__slug=slug)
@register.assignment_tag()
def entries():
return Entry.objects.all()
@register.assignment_tag()
def random_item(items):
return items.order_by('?').first()
|
from setuptools import setup
setup(
name="witchcraft",
version="0.2",
description='',
author='Peter Facka',
author_email='pfacka@trackingwire.com',
url='https://github.com/trackingwire/witchcraft',
packages=[
'witchcraft',
],
zip_safe=False,
install_requires=[
'psycopg2>=2.6.1',
'SQLAlchemy>=1.0.6',
'hy>=0.11.1',
'pyparsing>=2.1.1'
],
provides=['witchcraft (0.2)'],
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: MIT License',
],
)
|
from game.ai.defence.yaku_analyzer.yaku_analyzer import YakuAnalyzer
class YakuhaiAnalyzer(YakuAnalyzer):
id = "yakuhai"
def __init__(self, enemy):
self.enemy = enemy
def serialize(self):
return {"id": self.id}
def is_yaku_active(self):
return len(self._get_suitable_melds()) > 0
def melds_han(self):
han = 0
suitable_melds = self._get_suitable_melds()
for x in suitable_melds:
tile_34 = x.tiles[0] // 4
# we need to do that to support double winds yakuhais
han += len([x for x in self.enemy.valued_honors if x == tile_34])
return han
def _get_suitable_melds(self):
suitable_melds = []
for x in self.enemy.melds:
tile_34 = x.tiles[0] // 4
if tile_34 in self.enemy.valued_honors:
suitable_melds.append(x)
return suitable_melds
|
"""
Access UCSC data stored locally.
"""
from Bio import SeqIO
from Bio.Alphabet import generic_dna
import gzip, os, logging
logger = logging.getLogger(__name__)
ucsc_data_dir = "/home/john/Data/UCSC"
def full_path(filename):
"@return: The full path for the filename."
return os.path.join(ucsc_data_dir, filename)
def upstream_5000(genome='mm9'):
return upstream(genome, length=5000)
def upstream(genome='mm9', length=5000):
"@return: Yield 5000bp sequences upstream of the TSS."
filename = full_path(os.path.join('goldenPath', genome, 'bigZips', 'upstream%d.fa.gz' % length))
logger.info('Loading upstream sequences from %s', filename)
for record in SeqIO.parse(
gzip.open(filename, 'rb'),
"fasta",
generic_dna
):
yield record
def chromosome_filename(genome, chromsome):
"@return: The filename of the chromosome."
return full_path(os.path.join('goldenPath', genome, 'chromosomes', '%s.fa.gz' % chromsome))
class Genome(dict):
"""
@return: A dictionary mapping chromosome names to sequences.
"""
def __init__(self, genome_name):
self.genome_name = genome_name
"The name of the genome, e.g. mm8."
def __missing__(self, chromosome):
filename = chromosome_filename(self.genome_name, chromosome)
logger.info('Loading %s %s from %s', self.genome_name, chromosome, filename)
seqs = list(SeqIO.parse(
gzip.open(filename),
"fasta",
generic_dna)
)
if 1 != len(seqs):
raise RuntimeError('Expecting exactly one sequence in %s' % filename)
self[chromosome] = seqs[0]
return seqs[0]
if '__main__' == __name__:
for record in upstream_5000():
print record
|
"""
gae-init
~~~~~~~~
Google App Engine with Bootstrap, Flask and tons of other cool features.
https://github.com/gae-init
http://gae-init.appspot.com
Copyright (c) 2012-2015 by Panayiotis Lipiridis.
License MIT, see LICENSE for more details.
"""
__version__ = '2.1.5'
|
from .. import Provider as PhoneNumberProvider
class Provider(PhoneNumberProvider):
# Currently this is my own work
formats = (
'+62-##-###-####',
'+62-0##-###-####',
'+62 (0##) ### ####',
'+62 (0##) ###-####',
'+62 (##) ### ####',
'+62 (##) ###-####',
'+62-###-###-####',
'+62-0###-###-####',
'+62 (0###) ### ####',
'+62 (0###) ###-####',
'+62 (###) ### ####',
'+62 (###) ###-####',
'(0##) ### ####',
'(0##) ###-####',
'(0###) ### ####',
'(0###) ###-####',
'08# ### ####',
'08########',
)
|
"""
Django settings for volttron project.
Generated by 'django-admin startproject' using Django 1.10.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = '!6^rxe*rxcr+llcpudyqs#uspavesjg+zy!o@9b=1_0^8=g9&0'
DEBUG = True
ALLOWED_HOSTS = ['squanch.us']
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_tables2',
'chartit',
'smartcity',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'smartcity.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['html/smartcity/templates/'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.request',
],
},
},
]
WSGI_APPLICATION = 'smartcity.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'volttron_guide',
'USER' : 'volttron_user',
'PASSWORD' : 'Volttron1!',
'HOST' : 'squanch.us',
'PORT' : '3306',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'formatters': {
'simple': {
'format': '[%(asctime)s] %(levelname)s %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S'
},
'verbose': {
'format': '[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'development_logfile': {
'level': 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.FileHandler',
'filename': '/var/www/html/smartcity/logs/django_dev.log',
'formatter': 'verbose'
},
'production_logfile': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'logging.FileHandler',
'filename': '/var/www/html/smartcity/logs/django_production.log',
'formatter': 'simple'
},
'dba_logfile': {
'level': 'DEBUG',
'filters': ['require_debug_false','require_debug_true'],
'class': 'logging.FileHandler',
'filename': '/var/www/html/smartcity/logs/django_dba.log',
'formatter': 'simple'
},
},
'loggers': {
'coffeehouse': {
'handlers': ['console','development_logfile','production_logfile'],
},
'dba': {
'handlers': ['console','dba_logfile'],
},
'django': {
'handlers': ['console','development_logfile','production_logfile'],
},
'py.warnings': {
'handlers': ['console','development_logfile'],
},
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "static/")
|
import constants
import time
import requests
state = 2
old_state = 2
def what_lights():
response = requests.get(constants.url, headers=constants.headers)
sessions = response.json()
children = sessions["_children"]
for child in children:
if child['_elementType'] == 'Video':
subchildren = child['_children']
for subchild in subchildren:
if child['librarySectionID'] in constants.movie_lib_ids and subchild["_elementType"] =="Player" and subchild["machineIdentifier"] == constants.player_id:
if subchild['state'] == 'playing' and child['type'] in constants.half_lights:
return 1
if subchild['state'] == 'paused' and child['type'] in constants.no_lights:
return 3
elif subchild['state'] == 'playing' and child['type'] in constants.no_lights:
return 0
else:
return 2
return 2
def set_lights(state):
requests.get(constants.openhab_url + 'Scene=' + str(state))
while True:
try:
new_state = what_lights()
if new_state == state != old_state:
if new_state == 2:
print 'regular lights'
set_lights(1)
elif new_state != state:
if new_state == 0:
if state == 3:
print 'back to movie'
for action in back_to_movie_actions:
requests.get(constants.openhab_url + action)
else:
print 'movie lights'
set_lights(2)
elif new_state == 1:
print 'half lights'
set_lights(5)
elif new_state == 3:
print 'pause lights'
for action in pause_lights_actions:
requests.get(constants.openhab_url + action)
except:
print 'Error'
old_state = state
state = new_state
time.sleep(0.5)
|
"""Family module for OpenStreetMap wiki."""
from __future__ import absolute_import, division, unicode_literals
from pywikibot import family
class Family(family.SingleSiteFamily):
"""Family class for OpenStreetMap wiki."""
name = 'osm'
domain = 'wiki.openstreetmap.org'
code = 'en'
def protocol(self, code):
"""Return https as the protocol for this family."""
return 'https'
|
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy, Model
from flask.ext.cache import Cache
from flask.ext.sqlalchemy_cache import CachingQuery
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///example.db'
app.config['DEBUG'] = True
app.config['CACHE_TYPE'] = 'memcached'
Model.query_class = CachingQuery
db = SQLAlchemy(app, session_options={'query_cls': CachingQuery})
class Country(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
def __init__(self, name):
self.name = name
def __repr__(self):
return self.name
class State(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
country_id = db.Column(db.ForeignKey(Country.id))
country = db.relationship(Country, backref='states')
class City(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
state_id = db.Column(db.ForeignKey(State.id))
state = db.relationship(State, backref='cities')
def make_db():
if os.path.isfile('example.db'):
return
db.create_all()
brazil = Country(name='Brazil')
db.session.add(brazil)
germany = Country(name='Germany')
db.session.add(germany)
sp = State(name='SP', country=brazil)
db.session.add(sp)
cotia = City(name='Cotia', state=sp)
db.session.add(cotia)
db.session.commit()
with app.app_context():
make_db()
cache = Cache(app)
|
'''
Created on Apr 14, 2017
@author: zheyuan
'''
conf_left_joy = 0
conf_right_joy = 1
conf_xbox = 2
conf_rightFrontBaseTalon = 1
conf_rightRearBaseTalon = 3
conf_leftFrontBaseTalon = 2
conf_leftRearBaseTalon = 4
conf_shifterSolenoid1 = 0
conf_shifterSolenoid2 = 1
conf_liftSolenoid1 = 2
conf_liftSolenoid2 = 3
conf_esophagusSolenoid1 = 4
conf_esophagusSolenoid2 = 5
|
import sys, os
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
install_requires=[
"TurboGears2 >= 2.1.4",
"tgext.pluggable"
]
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
setup(
name='cashup',
version='0.2',
description='',
long_description=README,
author='Daniele Favara',
author_email='daniele@zeroisp.com',
#url='',
keywords='turbogears2.application',
packages=find_packages(exclude=['ez_setup']),
install_requires=install_requires,
include_package_data=True,
package_data={'tgapp.cashup': ['i18n/*/LC_MESSAGES/*.mo',
'templates/*/*',
'public/*/*']},
entry_points="""
""",
zip_safe=False
)
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class PaginationConfig(AppConfig):
name = 'refarm_pagination'
verbose_name = _('refarm_pagination')
|
import os
import traceback
def getTracebackStr():
lines = traceback.format_exc().strip().split('\n')
rl = [lines[-1]]
lines = lines[1:-1]
lines.reverse()
nstr = ''
for i in range(len(lines)):
line = lines[i].strip()
if line.startswith('File "'):
eles = lines[i].strip().split('"')
basename = os.path.basename(eles[1])
lastdir = os.path.basename(os.path.dirname(eles[1]))
eles[1] = '%s/%s' % (lastdir,basename)
rl.append('^\t%s %s' % (nstr,'"'.join(eles)))
nstr = ''
else:
nstr += line
return '\n'.join(rl)
|
"""
This module contains the main execution functionality for Reaction Mechanism
Generator (RMG).
"""
import os.path
import sys
import logging
import time
import shutil
import numpy
import gc
import copy
from copy import deepcopy
from rmgpy.constraints import failsSpeciesConstraints
from rmgpy.molecule import Molecule
from rmgpy.solver.base import TerminationTime, TerminationConversion
from rmgpy.solver.simple import SimpleReactor
from rmgpy.data.rmg import RMGDatabase
from rmgpy.exceptions import ForbiddenStructureException, DatabaseError
from rmgpy.data.kinetics.library import KineticsLibrary, LibraryReaction
from rmgpy.data.kinetics.family import KineticsFamily, TemplateReaction
from rmgpy.data.thermo import ThermoLibrary
from rmgpy.data.base import Entry
from rmgpy import settings
from rmgpy.kinetics.diffusionLimited import diffusionLimiter
from model import Species, CoreEdgeReactionModel
from rmgpy.reaction import Reaction
from pdep import PDepNetwork
import rmgpy.util as util
from rmgpy.chemkin import ChemkinWriter
from rmgpy.rmg.output import OutputHTMLWriter
from rmgpy.rmg.listener import SimulationProfileWriter, SimulationProfilePlotter
from rmgpy.restart import RestartWriter
from rmgpy.qm.main import QMDatabaseWriter
from rmgpy.stats import ExecutionStatsWriter
from rmgpy.thermo.thermoengine import submit
from rmgpy.tools.sensitivity import plotSensitivity
from cantera import ck2cti
solvent = None
class RMG(util.Subject):
"""
A representation of a Reaction Mechanism Generator (RMG) job. The
attributes are:
=================================== ================================================
Attribute Description
=================================== ================================================
`inputFile` The path to the input file
----------------------------------- ------------------------------------------------
`databaseDirectory` The directory containing the RMG database
`thermoLibraries` The thermodynamics libraries to load
`reactionLibraries` The kinetics libraries to load
`statmechLibraries` The statistical mechanics libraries to load
`seedMechanisms` The seed mechanisms included in the model
`kineticsFamilies` The kinetics families to use for reaction generation
`kineticsDepositories` The kinetics depositories to use for looking up kinetics in each family
`kineticsEstimator` The method to use to estimate kinetics: 'group additivity' or 'rate rules'
`solvent` If solvation estimates are required, the name of the solvent.
----------------------------------- ------------------------------------------------
`reactionModel` The core-edge reaction model generated by this job
`reactionSystems` A list of the reaction systems used in this job
`database` The RMG database used in this job
----------------------------------- ------------------------------------------------
`modelSettingsList` List of ModelSettings objects containing information related to how to manage species/reaction movement
`simulatorSettingsList` List of SimulatorSettings objects containing information on how to run simulations
`unimolecularThreshold` Array of flags indicating whether a species is above the unimolecular reaction threshold
`bimolecularaThreshold` Array of flags indicating whether two species are above the bimolecular reaction threshold
`unimolecularReact` Array of flags indicating whether a species should react unimolecularly in the enlarge step
`bimolecularReact` Array of flags indicating whether two species should react in the enlarge step
`termination` A list of termination targets (i.e :class:`TerminationTime` and :class:`TerminationConversion` objects)
`speciesConstraints` Dictates the maximum number of atoms, carbons, electrons, etc. generated by RMG
----------------------------------- ------------------------------------------------
`outputDirectory` The directory used to save output files
`verbosity` The level of logging verbosity for console output
`loadRestart` ``True`` if restarting a previous job, ``False`` otherwise
`saveRestartPeriod` The time period to periodically save a restart file (:class:`Quantity`), or ``None`` for never.
`units` The unit system to use to save output files (currently must be 'si')
`generateOutputHTML` ``True`` to draw pictures of the species and reactions, saving a visualized model in an output HTML file. ``False`` otherwise
`generatePlots` ``True`` to generate plots of the job execution statistics after each iteration, ``False`` otherwise
`verboseComments` ``True`` to keep the verbose comments for database estimates, ``False`` otherwise
`saveEdgeSpecies` ``True`` to save chemkin and HTML files of the edge species, ``False`` otherwise
`keepIrreversible` ``True`` to keep ireversibility of library reactions as is ('<=>' or '=>'). ``False`` (default) to force all library reactions to be reversible ('<=>')
`pressureDependence` Whether to process unimolecular (pressure-dependent) reaction networks
`quantumMechanics` Whether to apply quantum mechanical calculations instead of group additivity to certain molecular types.
`wallTime` The maximum amount of CPU time in the form DD:HH:MM:SS to expend on this job; used to stop gracefully so we can still get profiling information
`kineticsdatastore` ``True`` if storing details of each kinetic database entry in text file, ``False`` otherwise
----------------------------------- ------------------------------------------------
`initializationTime` The time at which the job was initiated, in seconds since the epoch (i.e. from time.time())
`done` Whether the job has completed (there is nothing new to add)
=================================== ================================================
"""
def __init__(self, inputFile=None, outputDirectory=None):
super(RMG, self).__init__()
self.inputFile = inputFile
self.outputDirectory = outputDirectory
self.clear()
self.modelSettingsList = []
self.simulatorSettingsList = []
def clear(self):
"""
Clear all loaded information about the job (except the file paths).
"""
self.databaseDirectory = None
self.thermoLibraries = None
self.transportLibraries = None
self.reactionLibraries = None
self.statmechLibraries = None
self.seedMechanisms = None
self.kineticsFamilies = None
self.kineticsDepositories = None
self.kineticsEstimator = 'group additivity'
self.solvent = None
self.diffusionLimiter = None
self.reactionModel = None
self.reactionSystems = None
self.database = None
self.reactionSystem = None
self.modelSettingsList = []
self.simulatorSettingsList = []
self.filterReactions=False
self.unimolecularReact = None
self.bimolecularReact = None
self.unimolecularThreshold = None
self.bimolecularThreshold = None
self.termination = []
self.done = False
self.verbosity = logging.INFO
self.loadRestart = None
self.saveRestartPeriod = None
self.units = 'si'
self.generateOutputHTML = None
self.generatePlots = None
self.saveSimulationProfiles = None
self.verboseComments = None
self.saveEdgeSpecies = None
self.keepIrreversible = None
self.pressureDependence = None
self.quantumMechanics = None
self.speciesConstraints = {}
self.wallTime = '00:00:00:00'
self.initializationTime = 0
self.kineticsdatastore = None
self.name = 'Seed'
self.generateSeedEachIteration = True
self.saveSeedToDatabase = False
self.thermoCentralDatabase = None
self.execTime = []
def loadInput(self, path=None):
"""
Load an RMG job from the input file located at `inputFile`, or
from the `inputFile` attribute if not given as a parameter.
"""
from input import readInputFile
if path is None: path = self.inputFile
readInputFile(path, self)
self.reactionModel.kineticsEstimator = self.kineticsEstimator
# If the output directory is not yet set, then set it to the same
# directory as the input file by default
if not self.outputDirectory:
self.outputDirectory = os.path.dirname(path)
if self.pressureDependence:
self.pressureDependence.outputFile = self.outputDirectory
self.reactionModel.pressureDependence = self.pressureDependence
if self.solvent:
self.reactionModel.solventName = self.solvent
self.reactionModel.verboseComments = self.verboseComments
self.reactionModel.saveEdgeSpecies = self.saveEdgeSpecies
if self.quantumMechanics:
self.reactionModel.quantumMechanics = self.quantumMechanics
def loadThermoInput(self, path=None):
"""
Load an Thermo Estimation job from a thermo input file located at `inputFile`, or
from the `inputFile` attribute if not given as a parameter.
"""
from input import readThermoInputFile
if path is None: path = self.inputFile
if not self.outputDirectory:
self.outputDirectory = os.path.dirname(path)
readThermoInputFile(path, self)
if self.quantumMechanics:
self.reactionModel.quantumMechanics = self.quantumMechanics
def checkInput(self):
"""
Check for a few common mistakes in the input file.
"""
if self.pressureDependence:
for index, reactionSystem in enumerate(self.reactionSystems):
assert (reactionSystem.T.value_si < self.pressureDependence.Tmax.value_si), "Reaction system T is above pressureDependence range."
assert (reactionSystem.T.value_si > self.pressureDependence.Tmin.value_si), "Reaction system T is below pressureDependence range."
assert (reactionSystem.P.value_si < self.pressureDependence.Pmax.value_si), "Reaction system P is above pressureDependence range."
assert (reactionSystem.P.value_si > self.pressureDependence.Pmin.value_si), "Reaction system P is below pressureDependence range."
assert any([not s.reactive for s in reactionSystem.initialMoleFractions.keys()]), \
"Pressure Dependence calculations require at least one inert (nonreacting) species for the bath gas."
def checkLibraries(self):
"""
Check unwanted use of libraries:
Liquid phase libraries in Gas phase simulation.
Loading a Liquid phase library obtained in another solvent than the one defined in the input file.
Other checks can be added here.
"""
#Liquid phase simulation checks
if self.solvent:
#check thermo librairies
for libIter in self.database.thermo.libraries.iterkeys():
if self.database.thermo.libraries[libIter].solvent:
if not self.solvent == self.database.thermo.libraries[libIter].solvent:
raise DatabaseError('''Thermo library "{2}" was obtained in "{1}" and cannot be used with this liquid phase simulation in "{0}"
'''.format(self.solvent, self.database.thermo.libraries[libIter].solvent, self.database.thermo.libraries[libIter].name))
#Check kinetic librairies
for libIter in self.database.kinetics.libraries.iterkeys():
if self.database.kinetics.libraries[libIter].solvent:
if not self.solvent == self.database.kinetics.libraries[libIter].solvent:
raise DatabaseError('''Kinetics library "{2}" was obtained in "{1}" and cannot be used with this liquid phase simulation in "{0}"
'''.format(self.solvent, self.database.kinetics.libraries[libIter].solvent, self.database.kinetics.libraries[libIter].name))
#Gas phase simulation checks
else:
#check thermo librairies
for libIter in self.database.thermo.libraries.iterkeys():
if self.database.thermo.libraries[libIter].solvent:
raise DatabaseError('''Thermo library "{1}" was obtained in "{0}" solvent and cannot be used in gas phase simulation
'''.format(self.database.thermo.libraries[libIter].solvent, self.database.thermo.libraries[libIter].name))
#Check kinetic librairies
for libIter in self.database.kinetics.libraries.iterkeys():
if self.database.kinetics.libraries[libIter].solvent:
raise DatabaseError('''Kinetics library "{1}" was obtained in "{0}" solvent and cannot be used in gas phase simulation
'''.format(self.database.kinetics.libraries[libIter].solvent, self.database.kinetics.libraries[libIter].name))
def saveInput(self, path=None):
"""
Save an RMG job to the input file located at `path`, or
from the `outputFile` attribute if not given as a parameter.
"""
from input import saveInputFile
if path is None: path = self.outputFile
saveInputFile(path, self)
def loadDatabase(self):
self.database = RMGDatabase()
self.database.load(
path = self.databaseDirectory,
thermoLibraries = self.thermoLibraries,
transportLibraries = self.transportLibraries,
reactionLibraries = [library for library, option in self.reactionLibraries],
seedMechanisms = self.seedMechanisms,
kineticsFamilies = self.kineticsFamilies,
kineticsDepositories = self.kineticsDepositories,
#frequenciesLibraries = self.statmechLibraries,
depository = False, # Don't bother loading the depository information, as we don't use it
)
#check libraries
self.checkLibraries()
#set global variable solvent
if self.solvent:
global solvent
solvent=self.solvent
if self.kineticsEstimator == 'rate rules':
if '!training' not in self.kineticsDepositories:
logging.info('Adding rate rules from training set in kinetics families...')
# Temporarily remove species constraints for the training reactions
copySpeciesConstraints=copy.copy(self.speciesConstraints)
self.speciesConstraints={}
for family in self.database.kinetics.families.values():
family.addKineticsRulesFromTrainingSet(thermoDatabase=self.database.thermo)
#If requested by the user, write a text file for each kinetics family detailing the source of each entry
if self.kineticsdatastore:
logging.info('Writing sources of kinetic entries in family {0} to text file'.format(family.label))
path = os.path.join(self.outputDirectory, 'kinetics_database', family.label + '.txt')
with open(path, 'w') as f:
for template_label, entries in family.rules.entries.iteritems():
f.write("Template [{0}] uses the {1} following source(s):\n".format(template_label,str(len(entries))))
for entry_index, entry in enumerate(entries):
f.write(str(entry_index+1) + ". " + entry.shortDesc + "\n" + entry.longDesc + "\n")
f.write('\n')
f.write('\n')
self.speciesConstraints=copySpeciesConstraints
else:
logging.info('Training set explicitly not added to rate rules in kinetics families...')
logging.info('Filling in rate rules in kinetics families by averaging...')
for family in self.database.kinetics.families.values():
family.fillKineticsRulesByAveragingUp(verbose=self.verboseComments)
def initialize(self, **kwargs):
"""
Initialize an RMG job using the command-line arguments `args` as returned
by the :mod:`argparse` package.
"""
# Save initialization time
self.initializationTime = time.time()
# Log start timestamp
logging.info('RMG execution initiated at ' + time.asctime() + '\n')
# Print out RMG header
self.logHeader()
try:
restart = kwargs['restart']
except KeyError:
restart = False
if restart:
if not os.path.exists(os.path.join(self.outputDirectory,'restart.pkl')):
logging.error("Could not find restart file (restart.pkl). Please run without --restart option.")
raise Exception("No restart file")
# Read input file
self.loadInput(self.inputFile)
# Check input file
self.checkInput()
# See if memory profiling package is available
try:
import psutil
except ImportError:
logging.info('Optional package dependency "psutil" not found; memory profiling information will not be saved.')
# Make output subdirectories
util.makeOutputSubdirectory(self.outputDirectory, 'pdep')
util.makeOutputSubdirectory(self.outputDirectory, 'solver')
util.makeOutputSubdirectory(self.outputDirectory, 'kinetics_database')
# Specifies if details of kinetic database entries should be stored according to user
try:
self.kineticsdatastore = kwargs['kineticsdatastore']
except KeyError:
self.kineticsdatastore = False
# Load databases
self.loadDatabase()
# Do all liquid-phase startup things:
if self.solvent:
solventData = self.database.solvation.getSolventData(self.solvent)
diffusionLimiter.enable(solventData, self.database.solvation)
logging.info("Setting solvent data for {0}".format(self.solvent))
try:
self.wallTime = kwargs['walltime']
except KeyError:
pass
data = self.wallTime.split(':')
if not len(data) == 4:
raise ValueError('Invalid format for wall time {0}; should be DD:HH:MM:SS.'.format(self.wallTime))
self.wallTime = int(data[-1]) + 60 * int(data[-2]) + 3600 * int(data[-3]) + 86400 * int(data[-4])
# Initialize reaction model
if restart:
self.initializeRestartRun(os.path.join(self.outputDirectory,'restart.pkl'))
else:
# Seed mechanisms: add species and reactions from seed mechanism
# DON'T generate any more reactions for the seed species at this time
for seedMechanism in self.seedMechanisms:
self.reactionModel.addSeedMechanismToCore(seedMechanism, react=False)
# Reaction libraries: add species and reactions from reaction library to the edge so
# that RMG can find them if their rates are large enough
for library, option in self.reactionLibraries:
self.reactionModel.addReactionLibraryToEdge(library)
# Also always add in a few bath gases (since RMG-Java does)
for label, smiles in [('Ar','[Ar]'), ('He','[He]'), ('Ne','[Ne]'), ('N2','N#N')]:
molecule = Molecule().fromSMILES(smiles)
spec, isNew = self.reactionModel.makeNewSpecies(molecule, label=label, reactive=False)
if isNew:
self.initialSpecies.append(spec)
# Perform species constraints and forbidden species checks on input species
for spec in self.initialSpecies:
if self.database.forbiddenStructures.isMoleculeForbidden(spec.molecule[0]):
if 'allowed' in self.speciesConstraints and 'input species' in self.speciesConstraints['allowed']:
logging.warning('Input species {0} is globally forbidden. It will behave as an inert unless found in a seed mechanism or reaction library.'.format(spec.label))
else:
raise ForbiddenStructureException("Input species {0} is globally forbidden. You may explicitly allow it, but it will remain inert unless found in a seed mechanism or reaction library.".format(spec.label))
if failsSpeciesConstraints(spec):
if 'allowed' in self.speciesConstraints and 'input species' in self.speciesConstraints['allowed']:
self.speciesConstraints['explicitlyAllowedMolecules'].append(spec.molecule[0])
pass
else:
raise ForbiddenStructureException("Species constraints forbids input species {0}. Please reformulate constraints, remove the species, or explicitly allow it.".format(spec.label))
# For liquidReactor, checks whether the solvent is listed as one of the initial species.
if self.solvent:
solventStructure = self.database.solvation.getSolventStructure(self.solvent)
self.database.solvation.checkSolventinInitialSpecies(self,solventStructure)
#Check to see if user has input Singlet O2 into their input file or libraries
#This constraint is special in that we only want to check it once in the input instead of every time a species is made
if 'allowSingletO2' in self.speciesConstraints and self.speciesConstraints['allowSingletO2']:
pass
else:
#Here we get a list of all species that from the user input
allInputtedSpecies=[spec for spec in self.initialSpecies]
#Because no iterations have taken place, the only things in the core are from seed mechanisms
allInputtedSpecies.extend(self.reactionModel.core.species)
#Because no iterations have taken place, the only things in the edge are from reaction libraries
allInputtedSpecies.extend(self.reactionModel.edge.species)
O2Singlet=Molecule().fromSMILES('O=O')
for spec in allInputtedSpecies:
if spec.isIsomorphic(O2Singlet):
raise ForbiddenStructureException("""Species constraints forbids input species {0}
RMG expects the triplet form of oxygen for correct usage in reaction families. Please change your input to SMILES='[O][O]'
If you actually want to use the singlet state, set the allowSingletO2=True inside of the Species Constraints block in your input file.
""".format(spec.label))
for spec in self.initialSpecies:
submit(spec,self.solvent)
# Add nonreactive species (e.g. bath gases) to core first
# This is necessary so that the PDep algorithm can identify the bath gas
for spec in self.initialSpecies:
if not spec.reactive:
self.reactionModel.enlarge(spec)
for spec in self.initialSpecies:
if spec.reactive:
self.reactionModel.enlarge(spec)
#chatelak: store constant SPC indices in the reactor attributes if any constant SPC provided in the input file
#advantages to write it here: this is run only once (as species indexes does not change over the generation)
if self.solvent is not None:
for index, reactionSystem in enumerate(self.reactionSystems):
if reactionSystem.constSPCNames is not None: #if no constant species provided do nothing
reactionSystem.get_constSPCIndices(self.reactionModel.core.species) ##call the function to identify indices in the solver
self.initializeReactionThresholdAndReactFlags()
self.reactionModel.initializeIndexSpeciesDict()
def register_listeners(self):
"""
Attaches listener classes depending on the options
found in the RMG input file.
"""
self.attach(ChemkinWriter(self.outputDirectory))
if self.generateOutputHTML:
self.attach(OutputHTMLWriter(self.outputDirectory))
if self.saveRestartPeriod:
self.attach(RestartWriter())
if self.quantumMechanics:
self.attach(QMDatabaseWriter())
self.attach(ExecutionStatsWriter(self.outputDirectory))
if self.saveSimulationProfiles:
for index, reactionSystem in enumerate(self.reactionSystems):
reactionSystem.attach(SimulationProfileWriter(
self.outputDirectory, index, self.reactionModel.core.species))
reactionSystem.attach(SimulationProfilePlotter(
self.outputDirectory, index, self.reactionModel.core.species))
def execute(self, **kwargs):
"""
Execute an RMG job using the command-line arguments `args` as returned
by the :mod:`argparse` package.
"""
self.initialize(**kwargs)
# register listeners
self.register_listeners()
self.done = False
self.Tmax = max([x.T for x in self.reactionSystems]).value_si
# Initiate first reaction discovery step after adding all core species
if self.modelSettingsList[0].filterReactions:
# Run the reaction system to update threshold and react flags
for index, reactionSystem in enumerate(self.reactionSystems):
reactionSystem.initializeModel(
coreSpecies = self.reactionModel.core.species,
coreReactions = self.reactionModel.core.reactions,
edgeSpecies = [],
edgeReactions = [],
pdepNetworks = self.reactionModel.networkList,
atol = self.simulatorSettingsList[0].atol,
rtol = self.simulatorSettingsList[0].rtol,
filterReactions=True,
)
self.updateReactionThresholdAndReactFlags(
rxnSysUnimolecularThreshold=reactionSystem.unimolecularThreshold,
rxnSysBimolecularThreshold=reactionSystem.bimolecularThreshold)
if not numpy.isinf(self.modelSettingsList[0].toleranceThermoKeepSpeciesInEdge):
self.reactionModel.setThermodynamicFilteringParameters(self.Tmax,toleranceThermoKeepSpeciesInEdge=self.modelSettingsList[0].toleranceThermoKeepSpeciesInEdge,
minCoreSizeForPrune=self.modelSettingsList[0].minCoreSizeForPrune,
maximumEdgeSpecies =self.modelSettingsList[0].maximumEdgeSpecies,
reactionSystems=self.reactionSystems)
self.reactionModel.enlarge(reactEdge=True,
unimolecularReact=self.unimolecularReact,
bimolecularReact=self.bimolecularReact)
if not numpy.isinf(self.modelSettingsList[0].toleranceThermoKeepSpeciesInEdge):
self.reactionModel.thermoFilterDown(maximumEdgeSpecies=self.modelSettingsList[0].maximumEdgeSpecies)
logging.info('Completed initial enlarge edge step...')
self.saveEverything()
self.makeSeedMech(firstTime=True)
maxNumSpcsHit = False #default
for q,modelSettings in enumerate(self.modelSettingsList):
if len(self.simulatorSettingsList) > 1:
simulatorSettings = self.simulatorSettingsList[q]
else: #if they only provide one input for simulator use that everytime
simulatorSettings = self.simulatorSettingsList[0]
self.filterReactions = modelSettings.filterReactions
logging.info('Beginning model generation stage {0}\n\n'.format(q+1))
self.done = False
# Main RMG loop
while not self.done:
self.reactionModel.iterationNum += 1
self.done = True
allTerminated = True
numCoreSpecies = len(self.reactionModel.core.species)
prunableSpecies = self.reactionModel.edge.species[:]
prunableNetworks = self.reactionModel.networkList[:]
for index, reactionSystem in enumerate(self.reactionSystems):
reactionSystem.prunableSpecies = prunableSpecies
reactionSystem.prunableNetworks = prunableNetworks
reactorDone = True
objectsToEnlarge = []
self.reactionSystem = reactionSystem
# Conduct simulation
logging.info('Conducting simulation of reaction system %s...' % (index+1))
prune = True
self.reactionModel.adjustSurface()
if numCoreSpecies < modelSettings.minCoreSizeForPrune:
# Turn pruning off if we haven't reached minimum core size.
prune = False
try: terminated,resurrected,obj,newSurfaceSpecies,newSurfaceReactions = reactionSystem.simulate(
coreSpecies = self.reactionModel.core.species,
coreReactions = self.reactionModel.core.reactions,
edgeSpecies = self.reactionModel.edge.species,
edgeReactions = self.reactionModel.edge.reactions,
surfaceSpecies = self.reactionModel.surface.species,
surfaceReactions = self.reactionModel.surface.reactions,
pdepNetworks = self.reactionModel.networkList,
prune = prune,
modelSettings=modelSettings,
simulatorSettings = simulatorSettings,
)
except:
logging.error("Model core reactions:")
if len(self.reactionModel.core.reactions) > 5:
logging.error("Too many to print in detail")
else:
from rmgpy.cantherm.output import prettify
logging.error(prettify(repr(self.reactionModel.core.reactions)))
self.makeSeedMech()
raise
if self.generateSeedEachIteration:
self.makeSeedMech()
reactorDone = self.reactionModel.addNewSurfaceObjects(obj,newSurfaceSpecies,newSurfaceReactions,reactionSystem)
allTerminated = allTerminated and terminated
logging.info('')
# If simulation is invalid, note which species should be added to
# the core
if obj != [] and not (obj is None):
objectsToEnlarge = self.processToSpeciesNetworks(obj)
reactorDone = False
# Enlarge objects identified by the simulation for enlarging
# These should be Species or Network objects
logging.info('')
objectsToEnlarge = list(set(objectsToEnlarge))
# Add objects to enlarge to the core first
for objectToEnlarge in objectsToEnlarge:
self.reactionModel.enlarge(objectToEnlarge)
if len(self.reactionModel.core.species) > numCoreSpecies:
tempModelSettings = deepcopy(modelSettings)
tempModelSettings.fluxToleranceKeepInEdge = 0
# If there were core species added, then react the edge
# If there were no new core species, it means the pdep network needs be updated through another enlarge core step
if modelSettings.filterReactions:
# Run a raw simulation to get updated reaction system threshold values
# Run with the same conditions as with pruning off
if not resurrected:
reactionSystem.simulate(
coreSpecies = self.reactionModel.core.species,
coreReactions = self.reactionModel.core.reactions,
edgeSpecies = [],
edgeReactions = [],
surfaceSpecies = self.reactionModel.surface.species,
surfaceReactions = self.reactionModel.surface.reactions,
pdepNetworks = self.reactionModel.networkList,
modelSettings = tempModelSettings,
simulatorSettings = simulatorSettings,
)
self.updateReactionThresholdAndReactFlags(
rxnSysUnimolecularThreshold = reactionSystem.unimolecularThreshold,
rxnSysBimolecularThreshold = reactionSystem.bimolecularThreshold)
else:
self.updateReactionThresholdAndReactFlags(
rxnSysUnimolecularThreshold = reactionSystem.unimolecularThreshold,
rxnSysBimolecularThreshold = reactionSystem.bimolecularThreshold, skipUpdate=True)
logging.warn('Reaction thresholds/flags for Reaction System {0} was not updated due to resurrection'.format(index+1))
logging.info('')
else:
self.updateReactionThresholdAndReactFlags()
if not numpy.isinf(modelSettings.toleranceThermoKeepSpeciesInEdge):
self.reactionModel.setThermodynamicFilteringParameters(self.Tmax, toleranceThermoKeepSpeciesInEdge=modelSettings.toleranceThermoKeepSpeciesInEdge,
minCoreSizeForPrune=modelSettings.minCoreSizeForPrune,
maximumEdgeSpecies=modelSettings.maximumEdgeSpecies,
reactionSystems=self.reactionSystems)
self.reactionModel.enlarge(reactEdge=True,
unimolecularReact=self.unimolecularReact,
bimolecularReact=self.bimolecularReact)
if not numpy.isinf(self.modelSettingsList[0].toleranceThermoKeepSpeciesInEdge):
self.reactionModel.thermoFilterDown(maximumEdgeSpecies=modelSettings.maximumEdgeSpecies)
maxNumSpcsHit = len(self.reactionModel.core.species) >= modelSettings.maxNumSpecies
if maxNumSpcsHit: #breaks the while loop
break
if not reactorDone:
self.done = False
self.saveEverything()
if not self.done: # There is something that needs exploring/enlarging
# If we reached our termination conditions, then try to prune
# species from the edge
if allTerminated and modelSettings.fluxToleranceKeepInEdge>0.0:
self.reactionModel.prune(self.reactionSystems, modelSettings.fluxToleranceKeepInEdge, modelSettings.maximumEdgeSpecies, modelSettings.minSpeciesExistIterationsForPrune)
# Perform garbage collection after pruning
collected = gc.collect()
logging.info('Garbage collector: collected %d objects.' % (collected))
# Consider stopping gracefully if the next iteration might take us
# past the wall time
if self.wallTime > 0 and len(self.execTime) > 1:
t = self.execTime[-1]
dt = self.execTime[-1] - self.execTime[-2]
if t + 3 * dt > self.wallTime:
logging.info('MODEL GENERATION TERMINATED')
logging.info('')
logging.info('There is not enough time to complete the next iteration before the wall time is reached.')
logging.info('The output model may be incomplete.')
logging.info('')
coreSpec, coreReac, edgeSpec, edgeReac = self.reactionModel.getModelSize()
logging.info('The current model core has %s species and %s reactions' % (coreSpec, coreReac))
logging.info('The current model edge has %s species and %s reactions' % (edgeSpec, edgeReac))
return
if maxNumSpcsHit: #resets maxNumSpcsHit and continues the settings for loop
logging.info('The maximum number of species ({0}) has been hit, Exiting stage {1} ...'.format(modelSettings.maxNumSpecies,q+1))
maxNumSpcsHit = False
continue
# Run sensitivity analysis post-model generation if sensitivity analysis is on
for index, reactionSystem in enumerate(self.reactionSystems):
if reactionSystem.sensitiveSpecies:
logging.info('Conducting sensitivity analysis of reaction system %s...' % (index+1))
sensWorksheet = []
for spec in reactionSystem.sensitiveSpecies:
csvfilePath = os.path.join(self.outputDirectory, 'solver', 'sensitivity_{0}_SPC_{1}.csv'.format(index+1, spec.index))
sensWorksheet.append(csvfilePath)
terminated, resurrected,obj, surfaceSpecies, surfaceReactions = reactionSystem.simulate(
coreSpecies = self.reactionModel.core.species,
coreReactions = self.reactionModel.core.reactions,
edgeSpecies = self.reactionModel.edge.species,
edgeReactions = self.reactionModel.edge.reactions,
surfaceSpecies = [],
surfaceReactions = [],
pdepNetworks = self.reactionModel.networkList,
sensitivity = True,
sensWorksheet = sensWorksheet,
modelSettings = self.modelSettingsList[-1],
simulatorSettings = self.simulatorSettingsList[-1],
)
plotSensitivity(self.outputDirectory, index, reactionSystem.sensitiveSpecies)
# generate Cantera files chem.cti & chem_annotated.cti in a designated `cantera` output folder
try:
self.generateCanteraFiles(os.path.join(self.outputDirectory, 'chemkin', 'chem.inp'))
self.generateCanteraFiles(os.path.join(self.outputDirectory, 'chemkin', 'chem_annotated.inp'))
except EnvironmentError:
logging.error('Could not generate Cantera files due to EnvironmentError. Check read\write privileges in output directory.')
# Write output file
logging.info('')
logging.info('MODEL GENERATION COMPLETED')
logging.info('')
coreSpec, coreReac, edgeSpec, edgeReac = self.reactionModel.getModelSize()
logging.info('The final model core has %s species and %s reactions' % (coreSpec, coreReac))
logging.info('The final model edge has %s species and %s reactions' % (edgeSpec, edgeReac))
self.finish()
def makeSeedMech(self,firstTime=False):
"""
causes RMG to make a seed mechanism out of the current chem_annotated.inp and species_dictionary.txt
this seed mechanism is outputted in a seed folder within the run directory and automatically
added to as the (or replaces the current) 'Seed' thermo and kinetics libraries in database
if run with firstTime=True it will change self.name to be unique within the thermo/kinetics libraries
by adding integers to the end of the name to prevent overwritting
"""
logging.info('Making seed mechanism...')
name = self.name
if self.saveSeedToDatabase and firstTime: #make sure don't overwrite current libraries
thermoNames = self.database.thermo.libraries.keys()
kineticsNames = self.database.kinetics.libraries.keys()
if name in thermoNames or name in kineticsNames:
q = 1
while name+str(q) in thermoNames or name+str(q) in kineticsNames:
q += 1
self.name = name + str(q)
seedDir = os.path.join(self.outputDirectory,'seed')
if firstTime and not os.path.exists(seedDir): #if seed directory does not exist make it
os.mkdir(seedDir)
else:
shutil.rmtree(seedDir) #otherwise delete the old seed and make a new directory
os.mkdir(seedDir)
speciesList = self.reactionModel.core.species
reactionList = self.reactionModel.core.reactions
edgeSpeciesList = self.reactionModel.edge.species
edgeReactionList = self.reactionModel.edge.reactions
# Make species labels independent
oldLabels = self.makeSpeciesLabelsIndependent(speciesList)
edgeOldLabels = self.makeSpeciesLabelsIndependent(edgeSpeciesList)
# load thermo library entries
thermoLibrary = ThermoLibrary(name=name)
for i,species in enumerate(speciesList):
if species.thermo:
thermoLibrary.loadEntry(index = i + 1,
label = species.label,
molecule = species.molecule[0].toAdjacencyList(),
thermo = species.thermo,
shortDesc = species.thermo.comment
)
else:
logging.warning('Species {0} did not contain any thermo data and was omitted from the thermo library.'.format(str(species)))
edgeThermoLibrary = ThermoLibrary(name=name+'_edge')
for i,species in enumerate(edgeSpeciesList):
if species.thermo:
edgeThermoLibrary.loadEntry(index = i + 1,
label = species.label,
molecule = species.molecule[0].toAdjacencyList(),
thermo = species.thermo,
shortDesc = species.thermo.comment
)
else:
logging.warning('Species {0} did not contain any thermo data and was omitted from the edge thermo library.'.format(str(species)))
# load kinetics library entries
kineticsLibrary = KineticsLibrary(name=name,duplicatesChecked=False)
kineticsLibrary.entries = {}
for i in range(len(reactionList)):
reaction = reactionList[i]
entry = Entry(
index = i+1,
label = reaction.toLabeledStr(),
item = reaction,
data = reaction.kinetics,
)
try:
entry.longDesc = 'Originally from reaction library: ' + reaction.library + "\n" + reaction.kinetics.comment
except AttributeError:
entry.longDesc = reaction.kinetics.comment
kineticsLibrary.entries[i+1] = entry
# Mark as duplicates where there are mixed pressure dependent and non-pressure dependent duplicate kinetics
# Even though CHEMKIN does not require a duplicate flag, RMG needs it.
# Using flag markDuplicates = True
# load kinetics library entries
edgeKineticsLibrary = KineticsLibrary(name=name+'_edge',duplicatesChecked=False)
edgeKineticsLibrary.entries = {}
for i,reaction in enumerate(edgeReactionList):
entry = Entry(
index = i+1,
label = reaction.toLabeledStr(),
item = reaction,
data = reaction.kinetics,
)
try:
entry.longDesc = 'Originally from reaction library: ' + reaction.library + "\n" + reaction.kinetics.comment
except AttributeError:
entry.longDesc = reaction.kinetics.comment
edgeKineticsLibrary.entries[i+1] = entry
# Mark as duplicates where there are mixed pressure dependent and non-pressure dependent duplicate kinetics
# Even though CHEMKIN does not require a duplicate flag, RMG needs it.
# Using flag markDuplicates = True
#save in database
if self.saveSeedToDatabase:
databaseDirectory = settings['database.directory']
try:
os.makedirs(os.path.join(databaseDirectory, 'kinetics', 'libraries',name))
except:
pass
thermoLibrary.save(os.path.join(databaseDirectory, 'thermo' ,'libraries', name + '.py'))
kineticsLibrary.save(os.path.join(databaseDirectory, 'kinetics', 'libraries', name, 'reactions.py'))
kineticsLibrary.saveDictionary(os.path.join(databaseDirectory, 'kinetics', 'libraries', name, 'dictionary.txt'))
try:
os.makedirs(os.path.join(databaseDirectory, 'kinetics', 'libraries',name+'_edge'))
except:
pass
edgeThermoLibrary.save(os.path.join(databaseDirectory, 'thermo' ,'libraries', name +'_edge'+'.py'))
edgeKineticsLibrary.save(os.path.join(databaseDirectory, 'kinetics', 'libraries', name+'_edge', 'reactions.py'))
edgeKineticsLibrary.saveDictionary(os.path.join(databaseDirectory, 'kinetics', 'libraries', name+'_edge', 'dictionary.txt'))
#save in output directory
thermoLibrary.save(os.path.join(seedDir, name + '.py'))
kineticsLibrary.save(os.path.join(seedDir, name, 'reactions.py'))
kineticsLibrary.saveDictionary(os.path.join(seedDir, name, 'dictionary.txt'))
edgeThermoLibrary.save(os.path.join(seedDir, name + '_edge'+ '.py'))
edgeKineticsLibrary.save(os.path.join(seedDir, name+'_edge', 'reactions.py'))
edgeKineticsLibrary.saveDictionary(os.path.join(seedDir, name+'_edge', 'dictionary.txt'))
#change labels back so species aren't renamed
for i,label in enumerate(oldLabels):
speciesList[i].label = label
for i,label in enumerate(edgeOldLabels):
edgeSpeciesList[i].label = label
def makeSpeciesLabelsIndependent(self, species):
"""
This method looks at the core species labels and makes sure none of them conflict
If a conflict occurs, the second occurance will have '-2' added
returns a list of the old labels
"""
oldLabels = []
labels = set()
for spec in species:
oldLabels.append(spec.label)
duplicate_index = 1
potential_label = spec.label
while potential_label in labels:
duplicate_index += 1
potential_label = spec.label + '-{}'.format(duplicate_index)
spec.label = potential_label
labels.add(potential_label)
return oldLabels
################################################################################
def processToSpeciesNetworks(self,obj):
"""
breaks down the objects returned by simulate into Species and PDepNetwork
components
"""
if isinstance(obj, PDepNetwork):
out = [self.processPdepNetworks(obj)]
return out
elif isinstance(obj, Species):
return [obj]
elif isinstance(obj,Reaction):
return list(self.processReactionsToSpecies(obj))
elif isinstance(obj,list): #list of species
rspcs = self.processReactionsToSpecies([k for k in obj if isinstance(k,Reaction)])
spcs = {k for k in obj if isinstance(k,Species)} | rspcs
nworks,pspcs = self.processPdepNetworks([k for k in obj if isinstance(k,PDepNetwork)])
spcs = list(spcs-pspcs) #avoid duplicate species
return spcs+nworks
else:
raise TypeError("improper call, obj input was incorrect")
def processPdepNetworks(self,obj):
"""
properly processes PDepNetwork objects and lists of PDepNetwork objects returned from simulate
"""
reactionSystem = self.reactionSystem
if isinstance(obj, PDepNetwork):
# Determine which species in that network has the highest leak rate
# We do this here because we need a temperature and pressure
# Store the maximum leak species along with the associated network
ob = (obj, obj.getMaximumLeakSpecies(reactionSystem.T.value_si, reactionSystem.P.value_si))
return ob
elif isinstance(obj,list):
spcs = [ob.getMaximumLeakSpecies(reactionSystem.T.value_si, reactionSystem.P.value_si) for ob in obj]
nworks = [(obj[i],spcs[i]) for i in xrange(len(obj))]
return nworks,set(spcs)
else:
raise TypeError("improper call, obj input was incorrect")
def processReactionsToSpecies(self,obj):
"""
properly processes Reaction objects and lists of Reaction objects returned from simulate
"""
coreSpecies = self.reactionModel.core.species
filterFcn = lambda x: not ((x in coreSpecies)) #remove species already in core
if isinstance(obj,Reaction):
potentialSpcs = obj.reactants+obj.products
potentialSpcs = filter(filterFcn,potentialSpcs)
elif isinstance(obj,list) or isinstance(obj,set):
potentialSpcs = set()
for ob in obj:
potentialSpcs = potentialSpcs | set(ob.reactants+ob.products)
potentialSpcs = {sp for sp in potentialSpcs if filterFcn(sp)}
else:
raise TypeError("improper call, obj input was incorrect")
return potentialSpcs
def generateCanteraFiles(self, chemkinFile, **kwargs):
"""
Convert a chemkin mechanism chem.inp file to a cantera mechanism file chem.cti
and save it in the cantera directory
"""
transportFile = os.path.join(os.path.dirname(chemkinFile), 'tran.dat')
fileName = os.path.splitext(os.path.basename(chemkinFile))[0] + '.cti'
outName = os.path.join(self.outputDirectory, 'cantera', fileName)
canteraDir = os.path.dirname(outName)
try:
os.makedirs(canteraDir)
except OSError:
if not os.path.isdir(canteraDir):
raise
if os.path.exists(outName):
os.remove(outName)
parser = ck2cti.Parser()
parser.convertMech(chemkinFile, transportFile=transportFile, outName=outName, quiet=True, permissive=True, **kwargs)
def initializeReactionThresholdAndReactFlags(self):
numCoreSpecies = len(self.reactionModel.core.species)
if self.filterReactions:
self.unimolecularReact = numpy.zeros((numCoreSpecies),bool)
self.bimolecularReact = numpy.zeros((numCoreSpecies, numCoreSpecies),bool)
self.unimolecularThreshold = numpy.zeros((numCoreSpecies),bool)
self.bimolecularThreshold = numpy.zeros((numCoreSpecies, numCoreSpecies),bool)
else:
# By default, react everything
self.unimolecularReact = numpy.ones((numCoreSpecies),bool)
self.bimolecularReact = numpy.ones((numCoreSpecies, numCoreSpecies),bool)
# No need to initialize reaction threshold arrays in this case
def updateReactionThresholdAndReactFlags(self, rxnSysUnimolecularThreshold=None, rxnSysBimolecularThreshold=None,skipUpdate=False):
"""
updates the length and boolean value of the unimolecular and bimolecular react and threshold flags
"""
numCoreSpecies = len(self.reactionModel.core.species)
prevNumCoreSpecies = len(self.unimolecularReact)
stale = True if numCoreSpecies > prevNumCoreSpecies else False
if self.filterReactions:
if stale:
# Reset and expand the react arrays if there were new core species added
self.unimolecularReact = numpy.zeros((numCoreSpecies), bool)
self.bimolecularReact = numpy.zeros((numCoreSpecies, numCoreSpecies), bool)
# Expand the threshold arrays if there were new core species added
unimolecularThreshold = numpy.zeros((numCoreSpecies), bool)
bimolecularThreshold = numpy.zeros((numCoreSpecies, numCoreSpecies), bool)
# Broadcast original thresholds
unimolecularThreshold[:prevNumCoreSpecies] = self.unimolecularThreshold
bimolecularThreshold[:prevNumCoreSpecies,:prevNumCoreSpecies] = self.bimolecularThreshold
self.unimolecularThreshold = unimolecularThreshold
self.bimolecularThreshold = bimolecularThreshold
if skipUpdate:
return
# Always update the react and threshold arrays
for i in xrange(numCoreSpecies):
if not self.unimolecularThreshold[i] and rxnSysUnimolecularThreshold[i]:
# We've shifted from not reacting to reacting
self.unimolecularReact[i] = True
self.unimolecularThreshold[i] = True
for i in xrange(numCoreSpecies):
for j in xrange(i, numCoreSpecies):
if not self.bimolecularThreshold[i,j] and rxnSysBimolecularThreshold[i,j]:
# We've shifted from not reacting to reacting
self.bimolecularReact[i,j] = True
self.bimolecularThreshold[i,j] = True
else:
# We are not filtering reactions
if stale:
# Reset and expand the react arrays if there were new core species added
self.unimolecularReact = numpy.zeros((numCoreSpecies), bool)
self.bimolecularReact = numpy.zeros((numCoreSpecies, numCoreSpecies), bool)
# React all the new core species unimolecularly
for i in xrange(prevNumCoreSpecies, numCoreSpecies):
self.unimolecularReact[i] = True
# React all the new core species with all the core species bimolecularly
for i in xrange(numCoreSpecies):
for j in xrange(prevNumCoreSpecies,numCoreSpecies):
self.bimolecularReact[i,j] = True
def saveEverything(self):
"""
Saves the output HTML, the Chemkin file, and the Restart file (if appropriate).
The restart file is only saved if self.saveRestartPeriod or self.done.
"""
# If the user specifies it, add unused reaction library reactions to
# an additional output species and reaction list which is written to the ouput HTML
# file as well as the chemkin file
if self.reactionLibraries:
# First initialize the outputReactionList and outputSpeciesList to empty
self.reactionModel.outputSpeciesList = []
self.reactionModel.outputReactionList = []
for library, option in self.reactionLibraries:
if option:
self.reactionModel.addReactionLibraryToOutput(library)
self.execTime.append(time.time() - self.initializationTime)
# Notify registered listeners:
self.notify()
def finish(self):
"""
Complete the model generation.
"""
# Log end timestamp
logging.info('')
logging.info('RMG execution terminated at ' + time.asctime())
def getGitCommit(self, modulePath):
import subprocess
if os.path.exists(os.path.join(modulePath,'..','.git')):
try:
return subprocess.check_output(['git', 'log',
'--format=%H%n%cd', '-1'],
cwd=modulePath).splitlines()
except:
return '', ''
else:
return '', ''
def logHeader(self, level=logging.INFO):
"""
Output a header containing identifying information about RMG to the log.
"""
from rmgpy import __version__, getPath
logging.log(level, '#########################################################')
logging.log(level, '# RMG-Py - Reaction Mechanism Generator in Python #')
logging.log(level, '# Version: {0:44s} #'.format(__version__))
logging.log(level, '# Authors: RMG Developers (rmg_dev@mit.edu) #')
logging.log(level, '# P.I.s: William H. Green (whgreen@mit.edu) #')
logging.log(level, '# Richard H. West (r.west@neu.edu) #')
logging.log(level, '# Website: http://reactionmechanismgenerator.github.io/ #')
logging.log(level, '#########################################################\n')
# Extract git commit from RMG-Py
head, date = self.getGitCommit(getPath())
if head != '' and date != '':
logging.log(level, 'The current git HEAD for RMG-Py is:')
logging.log(level, '\t%s' % head)
logging.log(level, '\t%s' % date)
logging.log(level, '')
else:
# If we cannot get git info, try checking if it is a conda package instead:
condaPackage = getCondaPackage('rmg')
if condaPackage != '':
logging.log(level, 'The current anaconda package for RMG-Py is:')
logging.log(level, condaPackage)
logging.log(level,'')
databaseHead, databaseDate = self.getGitCommit(settings['database.directory'])
if databaseHead !='' and databaseDate !='':
logging.log(level, 'The current git HEAD for RMG-database is:')
logging.log(level, '\t%s' % databaseHead)
logging.log(level, '\t%s' % databaseDate)
logging.log(level, '')
else:
databaseCondaPackage=getCondaPackage('rmgdatabase')
if databaseCondaPackage != '':
logging.log(level, 'The current anaconda package for RMG-database is:')
logging.log(level, databaseCondaPackage)
logging.log(level,'')
def initializeRestartRun(self, path):
from rmgpy.rmg.model import getFamilyLibraryObject
# read restart file
self.loadRestartFile(path)
# A few things still point to the species in the input file, so update
# those to point to the equivalent species loaded from the restart file
# The termination conversions still point to the old species
from rmgpy.solver.base import TerminationConversion
for reactionSystem in self.reactionSystems:
for term in reactionSystem.termination:
if isinstance(term, TerminationConversion):
term.species, isNew = self.reactionModel.makeNewSpecies(term.species.molecule[0], term.species.label, term.species.reactive)
# The initial mole fractions in the reaction systems still point to the old species
for reactionSystem in self.reactionSystems:
initialMoleFractions = {}
for spec0, moleFrac in reactionSystem.initialMoleFractions.iteritems():
spec, isNew = self.reactionModel.makeNewSpecies(spec0.molecule[0], spec0.label, spec0.reactive)
initialMoleFractions[spec] = moleFrac
reactionSystem.initialMoleFractions = initialMoleFractions
# The reactions and reactionDict still point to the old reaction families
reactionDict = {}
for family0_label in self.reactionModel.reactionDict:
# Find the equivalent library or family in the newly-loaded kinetics database
family_label = None
family0_obj = getFamilyLibraryObject(family0_label)
if isinstance(family0_obj, KineticsLibrary):
for label, database in self.database.kinetics.libraries.iteritems():
if database.label == family0_label:
family_label = database.label
break
elif isinstance(family0_obj, KineticsFamily):
for label, database in self.database.kinetics.families.iteritems():
if database.label == family0_label:
family_label = database.label
break
else:
import pdb; pdb.set_trace()
if family_label is None:
raise Exception("Unable to find matching reaction family for %s" % family0_label)
# Update each affected reaction to point to that new family
# Also use that new family in a duplicate reactionDict
reactionDict[family_label] = {}
for reactant1 in self.reactionModel.reactionDict[family0_label]:
reactionDict[family_label][reactant1] = {}
for reactant2 in self.reactionModel.reactionDict[family0_label][reactant1]:
reactionDict[family_label][reactant1][reactant2] = []
if isinstance(family0_obj, KineticsLibrary):
for rxn in self.reactionModel.reactionDict[family0_label][reactant1][reactant2]:
assert isinstance(rxn, LibraryReaction)
rxn.library = family_label
reactionDict[family_label][reactant1][reactant2].append(rxn)
elif isinstance(family0_obj, KineticsFamily):
for rxn in self.reactionModel.reactionDict[family0_label][reactant1][reactant2]:
assert isinstance(rxn, TemplateReaction)
rxn.family_label = family_label
reactionDict[family_label][reactant1][reactant2].append(rxn)
self.reactionModel.reactionDict = reactionDict
def loadRestartFile(self, path):
"""
Load a restart file at `path` on disk.
"""
import cPickle
# Unpickle the reaction model from the specified restart file
logging.info('Loading previous restart file...')
f = open(path, 'rb')
rmg_restart = cPickle.load(f)
f.close()
self.reactionModel = rmg_restart.reactionModel
self.unimolecularReact = rmg_restart.unimolecularReact
self.bimolecularReact = rmg_restart.bimolecularReact
if self.filterReactions:
self.unimolecularThreshold = rmg_restart.unimolecularThreshold
self.bimolecularThreshold = rmg_restart.bimolecularThreshold
def loadRMGJavaInput(self, path):
"""
Load an RMG-Java job from the input file located at `inputFile`, or
from the `inputFile` attribute if not given as a parameter.
"""
# NOTE: This function is currently incomplete!
# It only loads a subset of the available information.
self.reactionModel = CoreEdgeReactionModel()
self.initialSpecies = []
self.reactionSystems = []
Tlist = []; Plist = []; concentrationList = []; speciesDict = {}
termination = []
with open(path, 'r') as f:
line = self.readMeaningfulLineJava(f)
while line != '':
if line.startswith('TemperatureModel:'):
tokens = line.split()
units = tokens[2][1:-1]
assert units in ['C', 'F', 'K']
if units == 'C':
Tlist = [float(T)+273.15 for T in tokens[3:]]
elif units == 'F':
Tlist = [(float(T)+459.67)*5./9. for T in tokens[3:]]
else:
Tlist = [float(T) for T in tokens[3:]]
elif line.startswith('PressureModel:'):
tokens = line.split()
units = tokens[2][1:-1]
assert units in ['atm', 'bar', 'Pa', 'torr']
if units == 'atm':
Plist = [float(P)*101325. for P in tokens[3:]]
elif units == 'bar':
Plist = [float(P)*100000. for P in tokens[3:]]
elif units == 'torr':
Plist = [float(P)/760.*101325. for P in tokens[3:]]
else:
Plist = [float(P) for P in tokens[3:]]
elif line.startswith('InitialStatus:'):
label = ''; concentrations = []; adjlist = ''
line = self.readMeaningfulLineJava(f)
while line != 'END':
if line == '' and label != '':
species = Species(label=label, molecule=[Molecule().fromAdjacencyList(adjlist)])
self.initialSpecies.append(species)
speciesDict[label] = species
concentrationList.append(concentrations)
label = ''; concentrations = []; adjlist = ''
elif line != '' and label == '':
tokens = line.split()
label = tokens[0]
units = tokens[1][1:-1]
if tokens[-1] in ['Unreactive', 'ConstantConcentration']:
tokens.pop(-1)
assert units in ['mol/cm3', 'mol/m3', 'mol/l']
if units == 'mol/cm3':
concentrations = [float(C)*1.0e6 for C in tokens[2:]]
elif units == 'mol/l':
concentrations = [float(C)*1.0e3 for C in tokens[2:]]
else:
concentrations = [float(C) for C in tokens[2:]]
elif line != '':
adjlist += line + '\n'
line = f.readline().strip()
if '//' in line: line = line[0:line.index('//')]
elif line.startswith('InertGas:'):
line = self.readMeaningfulLineJava(f)
while line != 'END':
tokens = line.split()
label = tokens[0]
assert label in ['N2', 'Ar', 'He', 'Ne']
if label == 'Ne':
smiles = '[Ne]'
elif label == 'Ar':
smiles = '[Ar]'
elif label == 'He':
smiles = '[He]'
else:
smiles = 'N#N'
units = tokens[1][1:-1]
assert units in ['mol/cm3', 'mol/m3', 'mol/l']
if units == 'mol/cm3':
concentrations = [float(C)*1.0e6 for C in tokens[2:]]
elif units == 'mol/l':
concentrations = [float(C)*1.0e3 for C in tokens[2:]]
else:
concentrations = [float(C) for C in tokens[2:]]
species = Species(label=label, reactive=False, molecule=[Molecule().fromSMILES(smiles)])
self.initialSpecies.append(species)
speciesDict[label] = species
concentrationList.append(concentrations)
line = self.readMeaningfulLineJava(f)
elif line.startswith('FinishController:'):
# First meaningful line is a termination time or conversion
line = self.readMeaningfulLineJava(f)
tokens = line.split()
if tokens[2].lower() == 'conversion:':
label = tokens[3]
conversion = float(tokens[4])
termination.append(TerminationConversion(spec=speciesDict[label], conv=conversion))
elif tokens[2].lower() == 'reactiontime:':
time = float(tokens[3])
units = tokens[4][1:-1]
assert units in ['sec', 'min', 'hr', 'day']
if units == 'min':
time *= 60.
elif units == 'hr':
time *= 60. * 60.
elif units == 'day':
time *= 60. * 60. * 24.
termination.append(TerminationTime(time=time))
# Second meaningful line is the error tolerance
# We're not doing anything with this information yet!
line = self.readMeaningfulLineJava(f)
line = self.readMeaningfulLineJava(f)
assert len(Tlist) > 0
assert len(Plist) > 0
concentrationList = numpy.array(concentrationList)
assert concentrationList.shape[1] > 0 # An arbitrary number of concentrations is acceptable, and should be run for each reactor system
# Make a reaction system for each (T,P) combination
for T in Tlist:
for P in Plist:
for i in range(concentrationList.shape[1]):
concentrations = concentrationList[:,i]
totalConc = numpy.sum(concentrations)
initialMoleFractions = dict([(self.initialSpecies[i], concentrations[i] / totalConc) for i in range(len(self.initialSpecies))])
reactionSystem = SimpleReactor(T, P, initialMoleFractions=initialMoleFractions, termination=termination)
self.reactionSystems.append(reactionSystem)
def readMeaningfulLineJava(self, f):
"""
Read a meaningful line from an RMG-Java condition file object `f`,
returning the line with any comments removed.
"""
line = f.readline()
if line != '':
line = line.strip()
if '//' in line: line = line[0:line.index('//')]
while line == '':
line = f.readline()
if line == '': break
line = line.strip()
if '//' in line: line = line[0:line.index('//')]
return line
def initializeLog(verbose, log_file_name):
"""
Set up a logger for RMG to use to print output to stdout. The
`verbose` parameter is an integer specifying the amount of log text seen
at the console; the levels correspond to those of the :data:`logging` module.
"""
# Create logger
logger = logging.getLogger()
logger.setLevel(verbose)
# Create console handler and set level to debug; send everything to stdout
# rather than stderr
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(verbose)
logging.addLevelName(logging.CRITICAL, 'Critical: ')
logging.addLevelName(logging.ERROR, 'Error: ')
logging.addLevelName(logging.WARNING, 'Warning: ')
logging.addLevelName(logging.INFO, '')
logging.addLevelName(logging.DEBUG, '')
logging.addLevelName(1, '')
# Create formatter and add to console handler
#formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', '%Y-%m-%d %H:%M:%S')
#formatter = Formatter('%(message)s', '%Y-%m-%d %H:%M:%S')
formatter = logging.Formatter('%(levelname)s%(message)s')
ch.setFormatter(formatter)
# create file handler
if os.path.exists(log_file_name):
backup = os.path.join(log_file_name[:-7], 'RMG_backup.log')
if os.path.exists(backup):
logging.info("Removing old "+backup)
os.remove(backup)
logging.info('Moving {0} to {1}\n'.format(log_file_name, backup))
shutil.move(log_file_name, backup)
fh = logging.FileHandler(filename=log_file_name) #, backupCount=3)
fh.setLevel(min(logging.DEBUG,verbose)) # always at least VERBOSE in the file
fh.setFormatter(formatter)
# notice that STDERR does not get saved to the log file
# so errors from underlying libraries (eg. openbabel) etc. that report
# on stderr will not be logged to disk.
# remove old handlers!
while logger.handlers:
logger.removeHandler(logger.handlers[0])
# Add console and file handlers to logger
logger.addHandler(ch)
logger.addHandler(fh)
class Tee:
"""A simple tee to create a stream which prints to many streams.
This is used to report the profiling statistics to both the log file
and the standard output.
"""
def __init__(self, *fileobjects):
self.fileobjects=fileobjects
def write(self, string):
for fileobject in self.fileobjects:
fileobject.write(string)
def getCondaPackage(module):
"""
Check the version of any conda package
"""
import subprocess
try:
lines = subprocess.check_output(['conda', 'list', '-f', module]).splitlines()
packages=[]
# Strip comments
for line in lines:
if line[:1]=='#':
pass
else:
packages.append(line)
return '\n'.join(packages)
except:
return ''
def processProfileStats(stats_file, log_file):
import pstats
out_stream = Tee(sys.stdout,open(log_file,'a')) # print to screen AND append to RMG.log
print >>out_stream, "="*80
print >>out_stream, "Profiling Data".center(80)
print >>out_stream, "="*80
stats = pstats.Stats(stats_file,stream=out_stream)
stats.strip_dirs()
print >>out_stream, "Sorted by internal time"
stats.sort_stats('time')
stats.print_stats(25)
stats.print_callers(25)
print >>out_stream, "Sorted by cumulative time"
stats.sort_stats('cumulative')
stats.print_stats(25)
stats.print_callers(25)
stats.print_callees(25)
def makeProfileGraph(stats_file):
"""
Uses gprof2dot to create a graphviz dot file of the profiling information.
This requires the gprof2dot package available via `pip install gprof2dot`.
Render the result using the program 'dot' via a command like
`dot -Tps2 input.dot -o output.ps2`.
Rendering the ps2 file to pdf requires an external pdf converter
`ps2pdf output.ps2` which produces a `output.ps2.pdf` file.
"""
try:
from gprof2dot import PstatsParser, DotWriter, SAMPLES, themes
except ImportError:
logging.warning('Trouble importing from package gprof2dot. Unable to create a graph of the profile statistics.')
logging.warning('Try getting the latest version with something like `pip install --upgrade gprof2dot`.')
return
import subprocess
#create an Options class to mimic optparser output as much as possible:
class Options:
pass
options = Options()
options.node_thres = 0.8
options.edge_thres = 0.1
options.strip = False
options.show_samples = False
options.root = ""
options.leaf = ""
options.wrap = True
theme = themes['color'] # bw color gray pink
theme.fontname = "ArialMT" # default "Arial" leads to PostScript warnings in dot (on Mac OS)
parser = PstatsParser(stats_file)
profile = parser.parse()
dot_file = stats_file + '.dot'
output = open(dot_file,'wt')
dot = DotWriter(output)
dot.strip = options.strip
dot.wrap = options.wrap
if options.show_samples:
dot.show_function_events.append(SAMPLES)
profile = profile
profile.prune(options.node_thres/100.0, options.edge_thres/100.0)
if options.root:
rootId = profile.getFunctionId(options.root)
if not rootId:
sys.stderr.write('root node ' + options.root + ' not found (might already be pruned : try -e0 -n0 flags)\n')
sys.exit(1)
profile.prune_root(rootId)
if options.leaf:
leafId = profile.getFunctionId(options.leaf)
if not leafId:
sys.stderr.write('leaf node ' + options.leaf + ' not found (maybe already pruned : try -e0 -n0 flags)\n')
sys.exit(1)
profile.prune_leaf(leafId)
dot.graph(profile, theme)
output.close()
try:
subprocess.check_call(['dot', '-Tps2', dot_file, '-o', '{0}.ps2'.format(dot_file)])
except subprocess.CalledProcessError:
logging.error("Error returned by 'dot' when generating graph of the profile statistics.")
logging.info("To try it yourself:\n dot -Tps2 {0} -o {0}.ps2".format(dot_file))
except OSError:
logging.error("Couldn't run 'dot' to create graph of profile statistics. Check graphviz is installed properly and on your path.")
logging.info("Once you've got it, try:\n dot -Tps2 {0} -o {0}.ps2".format(dot_file))
try:
subprocess.check_call(['ps2pdf', '{0}.ps2'.format(dot_file), '{0}.pdf'.format(dot_file)])
except OSError:
logging.error("Couldn't run 'ps2pdf' to create pdf graph of profile statistics. Check that ps2pdf converter is installed.")
logging.info("Once you've got it, try:\n pd2pdf {0}.ps2 {0}.pdf".format(dot_file))
else:
logging.info("Graph of profile statistics saved to: \n {0}.pdf".format(dot_file))
|
from django.contrib import admin
from models import *
admin.site.register(OrganizacionEstudiantil)
admin.site.register(Miembro)
admin.site.register(DatosBancarios)
admin.site.register(Comite)
|
'''
Created by:
Juan Sarria
March 15, 2016
'''
import pandas as pd, numpy as np, fiona, timeit
from geopy.distance import vincenty
from shapely import geometry
from utilities import utm_to_latlong, latlong_to_utm
from __builtin__ import False
from pandas.core.frame import DataFrame
PROJECT_ROOT = '../'
def main():
#test values
lat = 49.2668355595
lon = -123.070244095
year = 2010
month = 5
'''
prop_df = pd.read_csv(PROJECT_ROOT + 'data/property_tax_06_15/latlong_property_tax_' + str(2006) + '.csv')
print avg_closest_properties(lat,lon,prop_df=prop_df)
sky_df = pd.read_csv(PROJECT_ROOT + 'data/skytrain_stations/rapid_transit_stations.csv')
print closest_skytrain(lat,lon)
crime_df = pd.read_csv(PROJECT_ROOT+'/data/crime_03_15/crime_latlong.csv')
neighbourhoods = crime_df['NEIGHBOURHOOD'].unique().tolist()
print len(neighbourhoods)
print one_hot_encoding(neighbourhoods[2],neighbourhoods)
a = number_graffiti(lat,lon)
print type(a[0])
'''
data = pd.read_csv(PROJECT_ROOT+'/data/crime_03_15/crime_latlong.csv')
data = data[data['YEAR'] >= 2006].sample(1000)
data = data[['LATITUDE','LONGITUDE', 'NEIGHBOURHOOD']]
data2 = data.apply(lambda row: pd.Series(locate_neighbourhood(row['LATITUDE'], row['LONGITUDE']),
index=['NEIGHBOURHOOD_2']),axis=1)
data = pd.concat([data,data2],axis=1)[['NEIGHBOURHOOD','NEIGHBOURHOOD_2']]
data = data[data['NEIGHBOURHOOD'] != data['NEIGHBOURHOOD_2']][pd.notnull(data['NEIGHBOURHOOD'])]
print data
print data.count()
def avg_closest_properties(lat, lon,year = None, prop_df = None, range_val = 0.0001):
try:
if year is not None:
property_file = PROJECT_ROOT + 'data/property_tax_06_15/latlong_property_tax_' + str(year) + '.csv'
if prop_df is None: prop_df = pd.read_csv(property_file)
# Keep a copy of original df
temp_df = prop_df
# Narrow down options to minimize unnecessary calculations
prop_df = prop_df[prop_df['LATITUDE']< lat+range_val]
prop_df = prop_df[prop_df['LATITUDE']> lat-range_val]
prop_df = prop_df[prop_df['LONGITUDE']< lon+range_val]
prop_df = prop_df[prop_df['LONGITUDE']> lon-range_val]
# If not enough values, start again with a bigger range
if prop_df.count()['VALUE'] < 10:
return avg_closest_properties(lat,lon,prop_df=temp_df,range_val=range_val*10)
# Apply vincenty in the remaining rows
prop_df['DIST_DIF'] = prop_df.apply(lambda row: vincenty((lat,lon),(row['LATITUDE'],row['LONGITUDE'])).m,axis=1)
# Find the top 10 and top 5 closest properties
ten_min_df = prop_df[['VALUE','DIST_DIF']].nsmallest(10,'DIST_DIF')
five_min_df = ten_min_df.nsmallest(5,'DIST_DIF')
# Return average property value for he top 5 and 10
return [five_min_df['VALUE'].mean(),ten_min_df['VALUE'].mean()]
except:
print "Error in avg_closest_properties"
def closest_skytrain(lat,lon, sky_df = None):
skytrain_file = PROJECT_ROOT + 'data/skytrain_stations/rapid_transit_stations.csv'
if sky_df is None: sky_df = pd.read_csv(skytrain_file)
vector = [0]*(sky_df.count()['STATION']+1)
# Find closest skytrain station
sky_df['DIST_DIF'] = sky_df.apply(lambda row: vincenty((lat,lon),(row['LAT'],row['LONG'])).m,axis=1)
min_df = sky_df.nsmallest(1,'DIST_DIF')
vector[list(min_df.index)[0]] = 1
vector[-1] = min_df.iloc[0]['DIST_DIF']
# returns on-hot encoded vector with distance at the end
return vector
'''
def get_weather(year, month, weatherdf = None):
weather_file = PROJECT_ROOT + 'data/weather/VANCOUVER SEA ISLAND CCG/summarydata.csv'
if weatherdf is None:
weatherdf = pd.read_csv(weather_file)
# basic checking to see if we have reasonable data passed in.
if month > 12:
return False
if year >= 2006 and year <= 2015:
filter_year = weatherdf[(weatherdf.YEAR == year)]
line = filter_year[(filter_year.MONTH == month)].drop('YEAR',axis=1).drop('MONTH',axis=1)
return line
else:
filter_month = weatherdf[(weatherdf.MONTH == month)].drop('YEAR',axis=1).drop('MONTH',axis=1).mean(axis=0).to_frame().transpose()
return filter_month
'''
def one_hot_encoding(label, list_of_labels):
vector = [0]*len(list_of_labels)
vector[list_of_labels.index(label)] = 1
return vector
def number_graffiti(lat,lon, graf_df = None, radius1 = 50, radius2 = 100):
graffiti_file = PROJECT_ROOT + 'data/graffiti/graffiti.csv'
if graf_df is None: graf_df = pd.read_csv(graffiti_file)
# Narrow down options
graf_df = graf_df[graf_df['LAT'] < lat+.001]
graf_df = graf_df[graf_df['LAT'] > lat-.001]
graf_df = graf_df[graf_df['LONG'] < lon+.001]
graf_df = graf_df[graf_df['LONG'] < lon+.001]
if graf_df['LAT'].count() == 0: return [0,0]
# Apply vincenty for remaining rows
graf_df['DIST_DIF'] = graf_df.apply(lambda row: vincenty((lat,lon),(row['LAT'],row['LONG'])).m,axis=1)
count_2 = graf_df[graf_df['DIST_DIF'] <= radius2]
count_1 = count_2[count_2['DIST_DIF'] <= radius1]
return [count_1['COUNT'].sum(), count_2['COUNT'].sum()]
def number_street_lights(lat,lon,light_df = None, radius = 50):
light_file = PROJECT_ROOT + 'data/street_lightings/street_lighting_poles.csv'
if light_df is None: light_df = pd.read_csv(light_file)
# Narrow down options
light_df = light_df[light_df['LAT'] < lat+.001]
light_df = light_df[light_df['LAT'] > lat-.001]
light_df = light_df[light_df['LONG'] < lon+.001]
light_df = light_df[light_df['LONG'] < lon+.001]
if light_df['LAT'].count() == 0 : return 0
# Apply vincenty and find number of lights within radius
light_df['DIST_DIF'] = light_df.apply(lambda row: vincenty((lat,lon),(row['LAT'],row['LONG'])).m,axis=1)
min_lights = light_df[light_df['DIST_DIF'] < radius]
return min_lights['DIST_DIF'].count()
def locate_neighbourhood(lat, lon):
with fiona.open(PROJECT_ROOT+'data/neighbourhood_borders/local_area_boundary.shp') as neighbourhoods:
point = geometry.Point(lat,lon)
for n in neighbourhoods:
if n['properties']['NAME'] == 'Arbutus-Ridge': n['properties']['NAME'] = 'Arbutus Ridge'
if n['properties']['NAME'] == 'Downtown': n['properties']['NAME'] = 'Central Business District'
n['geometry']['coordinates'][0] = [utm_to_latlong(x[0],x[1]) for x in n['geometry']['coordinates'][0]]
shape = geometry.asShape(n['geometry'])
if shape.contains(point): return n['properties']['NAME']
return -1
if __name__ == "__main__":
main()
|
import sys
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtGui import QPainter
from PyQt5.QtGui import QPixmap
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
windowSizeX = 440
windowSizeY = 250
userName = 'Aperture'
fontMajor = "Arial"
fontMinor = "Dotum"
class Form(QWidget):
# __init__ : 생성자
# parent : 부모객체
def __init__(self, parent=None):
super(Form, self).__init__(parent)
nameLabel = QLabel()
nameLabel.setText("User : %s" % userName)
newfont = QtGui.QFont(fontMinor, 12 )
nameLabel.setFont(newfont)
nameLabel.setAlignment(Qt.AlignCenter)
nameLabel2 = QLabel()
nameLabel2.setText("Picture uploaded.")
newfont = QtGui.QFont(fontMinor, 16 )
nameLabel2.setFont(newfont)
nameLabel2.setAlignment(Qt.AlignCenter)
nameLabel3 = QLabel()
nameLabel3.setText("Please Input your password below : ")
newfont = QtGui.QFont(fontMinor, 12 )
nameLabel3.setFont(newfont)
nameLabel3.setAlignment(Qt.AlignCenter)
blankLabel = QLabel()
blankLabel.setText(" ")
self.nameLine = QLineEdit()
self.nameLine.setGeometry(QRect())
submitButton = QPushButton("Submit")
submitButton.clicked.connect(self.submitContact)
mainLayout = QGridLayout()
mainLayout.addWidget(nameLabel2,0,0)
mainLayout.addWidget(nameLabel,1,0)
mainLayout.addWidget(nameLabel3,2,0)
mainLayout.addWidget(self.nameLine,3,0)
mainLayout.addWidget(submitButton,4,0)
self.setLayout(mainLayout)
self.setWindowTitle("Privasee")
def submitContact(self):
name = self.nameLine.text()
if name == "":
QMessageBox.information(self, "Empty Field",
"Please input your password properly.")
return
else:
'''call GUI_Privasee_RegisterDone'''
sys.exit(app.exec_())
if __name__ == '__main__':
app = QApplication(sys.argv)
screen = Form()
screen.resize(windowSizeX,windowSizeY)
screen.show()
sys.exit(app.exec_())
|
import bcrypt
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.exc import OperationalError, ProgrammingError
from sqlalchemy.pool import NullPool
from scoring_engine.config import config
from scoring_engine.models.base import Base
def delete_db(session):
Base.metadata.drop_all(session.bind)
def init_db(session):
Base.metadata.create_all(session.bind)
def verify_db_ready(session):
ready = True
try:
from scoring_engine.models.user import User
session.query(User).get(1)
except (OperationalError, ProgrammingError):
ready = False
return ready
isolation_level = "READ COMMITTED"
if 'sqlite' in config.db_uri:
# sqlite db does not support transaction based statements
# so we have to manually set it to something else
isolation_level = "READ UNCOMMITTED"
session = scoped_session(sessionmaker(bind=create_engine(config.db_uri,
isolation_level=isolation_level,
poolclass=NullPool)))
db_salt = bcrypt.gensalt()
def query_monkeypatch(classname):
session.commit()
return session.orig_query(classname)
session.orig_query = session.query
session.query = query_monkeypatch
|
import requests
class PublicClient(object):
def __init__(self):
super(PublicClient, self).__init__()
self.base_url = "https://api.kraken.com/0/public"
def _url_for(self, path):
return "%s/%s" % (self.base_url, path)
@classmethod
def _get(cls, url, params=None):
try:
response = requests.get(url, timeout=5, params=params)
except requests.exceptions.RequestException as e:
print('kraken get' + url + ' failed: ' + str(e))
else:
if response.status_code == requests.codes.ok:
return response.json()
def depth(self, symbol, count=5):
path = "Depth"
params = {
'count': count,
'pair': symbol
}
url = self._url_for(path)
return self._get(url, params)
|
from gcsa.reminders import Reminder, EmailReminder, PopupReminder
from .base_serializer import BaseSerializer
class ReminderSerializer(BaseSerializer):
type_ = Reminder
def __init__(self, reminder):
super().__init__(reminder)
@staticmethod
def _to_json(reminder: Reminder):
return {
'method': reminder.method,
'minutes': reminder.minutes_before_start
}
@staticmethod
def _to_object(json_reminder):
method = json_reminder['method']
if method == 'email':
return EmailReminder(int(json_reminder['minutes']))
elif method == 'popup':
return PopupReminder(int(json_reminder['minutes']))
else:
raise ValueError('Unexpected method "{}" for a reminder.'.format(method))
|
from inspect import getmro
from io import BytesIO
from xml.etree.ElementTree import Element, ElementTree, fromstring, SubElement
from iris_sdk.models.maps.base_map import BaseMap
from iris_sdk.utils.rest import HTTP_OK
from iris_sdk.utils.strings import Converter
BASE_MAP_SUFFIX = "Map"
BASE_PROP_CLIENT = "client"
BASE_PROP_ITEMS = "items"
BASE_PROP_NODE = "_node_name"
BASE_PROP_NODE_SAVE = "_node_name_save"
BASE_PROP_XPATH = "xpath"
BASE_PROP_XPATH_SEPARATOR = "{"
HEADER_LOCATION = "location"
class BaseData(object):
"""Base class for everything"""
def clear(self):
"""Flushes the data"""
for prop in dir(self):
property = getattr(self, prop)
# Might be needed
if (prop.startswith("_")) or (prop == BASE_PROP_CLIENT) or \
(prop == BASE_PROP_XPATH) or (prop == BASE_PROP_ITEMS) or\
(callable(property)):
continue
cleared = False
_class = property.__class__
# Everything is either a BaseData, a BaseResourceList or a
# BaseResource descendant (which itself inherits from BaseData).
if (_class == BaseData) or (_class == BaseResourceList) or \
(_class == BaseResourceSimpleList):
property.clear()
cleared = True
else:
for classtype in getmro(property.__class__):
if (classtype==BaseData) or (classtype==BaseResourceList)\
or (classtype==BaseResource) or \
(classtype==BaseResourceSimpleList):
property.clear()
cleared = True
break
# Built-in types
if not cleared:
setattr(self, prop, None)
def set_from_dict(self, initial_data=None):
if (initial_data is None) or (not isinstance(initial_data, dict)):
return self
self.clear()
for key in initial_data:
if not hasattr(self, key):
continue
if isinstance(initial_data[key], str):
setattr(self, key, initial_data[key])
else:
attr = getattr(self, key)
if isinstance(initial_data[key], dict):
if attr is None:
# attr should be already not None by the
# moment of calling set_from_dict,
# but just in case
attr = BaseResource()
attr.set_from_dict(initial_data[key])
setattr(self, key, attr)
elif isinstance(attr, BaseData):
attr.set_from_dict(initial_data[key])
elif isinstance(initial_data[key], list):
if attr is None:
setattr(self, key, BaseResourceList(BaseResource))
attr = getattr(self, key)
if isinstance(attr, BaseResourceSimpleList):
attr.clear()
for list_item in initial_data[key]:
attr.add(list_item)
setattr(self, key, attr)
return self
class BaseResourceSimpleList(object):
"""
Used to store simple values.
"""
@property
def items(self):
return self._items
def __init__(self):
self._items = []
def add(self, value):
self.items.append(value)
return self.items[-1]
def clear(self):
del self.items[:]
class BaseResourceList(BaseResourceSimpleList):
"""
List of instances of "class_type" passed to constructor.
"parent" used to link BaseResource instances and pass their "client"
properties.
"""
@property
def class_type(self):
return self._class_type
@property
def parent(self):
return self._parent
def __init__(self, class_type, parent=None):
BaseResourceSimpleList.__init__(self)
self._class_type = class_type
self._parent = parent
def add(self, initial_data=None):
if self.parent is not None:
item = self.class_type(self.parent)
else:
item = self.class_type()
item.set_from_dict(initial_data)
self.items.append(item)
return item
class BaseResource(BaseData):
"""
REST resource.
"_node_name" - corresponding XML element name,
"_save_post" - uses POST if True, PUT - otherwise,
"_xpath_save" - if set, uses this for saving,
"client" does http requests,
"xpath" returns the REST resource's relative path.
"""
_id = None
_parent = None
_node_name = None
_node_name_save = None
_save_post = False
_xpath = ""
_xpath_save = None
@property
def client(self):
return self._client
@client.setter
def client(self, client):
self._client = client
@property
def id(self):
return self._id
@id.setter
def id(self, id):
self._id = id
@property
def xpath(self):
return self._xpath
def __init__(self, parent=None, client=None):
self._converter = Converter()
self._parent = parent
self._client = client
if (client is None) and (parent is not None):
self._client = parent.client
def _delete_file(self, xpath, id):
if id is None:
raise ValueError("No id specified")
path = ""
if xpath is not None:
path = xpath.format(id)
response = self._client.delete(section=self.get_xpath() + path)
return response.status_code == HTTP_OK
def _element_from_string(self, str):
return fromstring(str)
def _from_xml(self, element, instance=None):
"""
Parses XML elements into existing objects, e.g.:
garply = some_class(),
garply._node_name = "Foo"
garply.bar_baz = some_other_class()
garply.bar_baz.qux = None
<Foo>
<BarBaz>
<Qux>Corge</Qux>
</BarBaz>
</Foo>
garply.bar_baz.qux equals "Corge".
Converts CamelCase names to lowercase underscore ones.
"""
# If instance is None, the tag name to search for in XML data equals
# class name.
inst = (instance or self)
class_name = inst.__class__.__name__
node_name = None
if hasattr(inst, BASE_PROP_NODE):
node_name = inst._node_name
# Recursive call: instance's class represents the element's structure
if instance is not None:
search_name = element.tag
else:
search_name = (node_name or class_name)
# The provided element is actually the one we're searching for
if element.tag == search_name:
element_children = element.getchildren()
else:
element_children = element.findall(search_name)
for el in element_children:
tag = self._converter.to_underscore(el.tag)
property = None
if not hasattr(inst, tag):
# Not the base class
if instance is not None:
continue
else:
property = getattr(inst, tag)
if len(el.getchildren()) == 0:
if el.text is not None:
# Simple list - multiple "<tag></tag>" lines
if isinstance(property, BaseResourceSimpleList):
property.items.append(el.text)
else:
setattr(inst, tag, el.text)
continue
_inst = property
# List of instances - add an item and parse recursively
if isinstance(property, BaseResourceList):
# Set parents for REST resources
has_parent = False
for class_type in property.class_type.__bases__:
if class_type == BaseResource:
has_parent = True
break
self._class = property.class_type
if has_parent:
item = property.class_type(property.parent)
else:
item = property.class_type()
property.items.append(item)
_inst = property.items[-1]
# Instance's class mirrors the element's structure
self._from_xml(el, _inst)
def _get(self, id=None, params=None):
new_id = (id or self.id)
self.clear()
self.id = new_id
xpath = self.get_xpath()
if (self.id is None) and (BASE_PROP_XPATH_SEPARATOR in xpath):
raise ValueError("No id specified")
return self._client.get(self.get_xpath(), params)
def _get_data(self, id=None, params=None):
content = self._get(id, params).content.decode(encoding="UTF-8")
if content:
root = self._element_from_string(content)
self._from_xml(root)
return self
def _get_file(self, xpath, id):
if id is None:
raise ValueError("No id specified")
path = ""
if xpath is not None:
path = xpath.format(id)
return self._client.get(section=self.get_xpath() + path)
def _get_status(self, id=None, params=None):
return self._get(id, params).status
def _post(self, xpath, data, params):
return self._client.post(section=xpath, params=params, data=data)
def _post_data(self, response_instance=None, params=None):
content = self._save(return_content=True, params=params)
if content:
root = self._element_from_string(content)
inst = (response_instance or self)
inst.clear()
inst._from_xml(root)
return inst
return self
def _put(self, xpath, data):
return self._client.put(section=xpath, data=data)
def _save(self, return_content=False, params=None):
data = self._serialize()
if (self.id is not None) and (not self._save_post):
response = self._put(self.get_xpath(True), data)
if return_content:
return response.content.decode(encoding="UTF-8")
else:
return response.status_code == HTTP_OK
resource = (self if self._save_post else self._parent)
path = resource.get_xpath(True)
response = self._post(path, data, params)
location = None
if HEADER_LOCATION in response.headers:
location = response.headers[HEADER_LOCATION]
res = ""
if location is not None:
pos = location.rfind("/")
res = location[pos+1:]
self.id = (res if res else self.id)
if return_content:
return response.content.decode(encoding="UTF-8")
else:
return True
def _send_file(self, xpath, filename, headers, id=None):
path = ""
request = self._client.post
if id is not None:
if xpath is not None:
path = xpath.format(id)
request = self._client.put
with open(filename, 'rb') as file_data:
response = request(section=self.get_xpath() + path,
data=file_data, headers=headers)
location = None
if HEADER_LOCATION in response.headers:
location = response.headers[HEADER_LOCATION]
if location is not None:
return location[location.rfind("/")+1:]
else:
return response.status_code == HTTP_OK
def _serialize(self):
root = ElementTree(self._to_xml())
data_io = BytesIO()
root.write(data_io, encoding="UTF-8", xml_declaration=True)
return data_io.getvalue()
def _to_xml(self, element=None, instance=None):
"""
The opposite of "_from_xml".
Lowercase underscore names are converted to CamelCase.
"""
inst = (instance or self)
# Renaming the root
node_name = inst.__class__.__name__
if hasattr(inst, BASE_PROP_NODE):
if inst._node_name is not None:
node_name = inst._node_name
if hasattr(inst, BASE_PROP_NODE_SAVE):
if inst._node_name_save is not None:
node_name = inst._node_name_save
elem = (Element(node_name) if element is None else element)
map = None
# "Map" is a base class that sets the correspondence between XML
# elements and class properties, i.e. what's not in this class doesn't
# get written to the file.
for classtype in getmro(inst.__class__):
if (classtype.__name__.endswith(BASE_MAP_SUFFIX) and
classtype.__name__ != BaseMap.__name__):
map = classtype
break
if map is None:
return elem
for prop in dir(map):
property = getattr(inst, prop)
if (prop.startswith("_") or
callable(property) or
property is None):
continue
# Lists
if isinstance(property, BaseResourceList):
for item in property.items:
el = SubElement(elem, self._converter.to_camelcase(prop))
self._to_xml(el, item)
continue
if isinstance(property, BaseResourceSimpleList):
for item in property.items:
el = SubElement(elem, self._converter.to_camelcase(prop))
el.text = str(item)
continue
# Everything else
el = SubElement(elem, self._converter.to_camelcase(prop))
if isinstance(property, BaseMap):
self._to_xml(el, property)
if (len(el.getchildren()) == 0) and (el.text is None):
elem.remove(el)
else:
el.text = str(property)
return elem
def delete(self):
response = self._client.delete(self.get_xpath())
return response.status_code == HTTP_OK
def get(self, id=None, params=None):
return self._get_data(id, params)
def get_status(self, id=None, params=None):
return self._get_status(self.get_xpath(id), params)
def get_xpath(self, save_path=False):
parent_path = ""
if self._parent is not None:
parent_path = self._parent.get_xpath(save_path)
own_path = self._xpath
if save_path and (self._xpath_save is not None):
own_path = self._xpath_save
xpath = parent_path + own_path
return xpath.format(self.id)
def save(self):
self._save()
|
''' Dummy NFC Provider to be used on desktops in case no other provider is found
'''
from electrum_dgb_gui.kivy.nfc_scanner import NFCBase
from kivy.clock import Clock
from kivy.logger import Logger
class ScannerDummy(NFCBase):
'''This is the dummy interface that gets selected in case any other
hardware interface to NFC is not available.
'''
_initialised = False
name = 'NFCDummy'
def nfc_init(self):
# print 'nfc_init()'
Logger.debug('NFC: configure nfc')
self._initialised = True
self.nfc_enable()
return True
def on_new_intent(self, dt):
tag_info = {'type': 'dymmy',
'message': 'dummy',
'extra details': None}
# let Main app know that a tag has been detected
app = App.get_running_app()
app.tag_discovered(tag_info)
app.show_info('New tag detected.', duration=2)
Logger.debug('NFC: got new dummy tag')
def nfc_enable(self):
Logger.debug('NFC: enable')
if self._initialised:
Clock.schedule_interval(self.on_new_intent, 22)
def nfc_disable(self):
# print 'nfc_enable()'
Clock.unschedule(self.on_new_intent)
def nfc_enable_exchange(self, data):
''' Start sending data
'''
Logger.debug('NFC: sending data {}'.format(data))
def nfc_disable_exchange(self):
''' Disable/Stop ndef exchange
'''
Logger.debug('NFC: disable nfc exchange')
|
import datetime
from beancount.loader import load_string
from fava.core.misc import sidebar_links
from fava.core.misc import upcoming_events
def test_sidebar_links(load_doc):
"""
2016-01-01 custom "fava-sidebar-link" "title" "link"
2016-01-02 custom "fava-sidebar-link" "titl1" "lin1"
"""
entries, _, _ = load_doc
assert sidebar_links(entries) == [("title", "link"), ("titl1", "lin1")]
def test_upcoming_events():
entries, _, _ = load_string(
f'{datetime.date.today()} event "some_event" "test"\n'
'2012-12-12 event "test" "test"'
)
assert len(upcoming_events(entries, 1)) == 1
|
import unittest
import time
from should_dsl import should
from fluidity import StateMachine, transition, state
class FluidityState(unittest.TestCase):
def test_it_defines_states(self):
class MyMachine(StateMachine):
state('unread')
state('read')
state('closed')
initial_state = 'read'
machine = MyMachine()
machine |should| have(3).states
machine.states() |should| include_all_of(['unread', 'read', 'closed'])
def test_it_has_an_initial_state(self):
class MyMachine(StateMachine):
initial_state = 'closed'
state('open')
state('closed')
machine = MyMachine()
machine.initial_state |should| equal_to('closed')
machine.current_state |should| equal_to('closed')
def test_it_defines_states_using_method_calls(self):
class MyMachine(StateMachine):
state('unread')
state('read')
state('closed')
initial_state = 'unread'
transition(from_='unread', event='read', to='read')
transition(from_='read', event='close', to='closed')
machine = MyMachine()
machine |should| have(3).states
machine.states() |should| include_all_of(['unread', 'read', 'closed'])
class OtherMachine(StateMachine):
state('idle')
state('working')
initial_state = 'idle'
transition(from_='idle', event='work', to='working')
machine = OtherMachine()
machine |should| have(2).states
machine.states() |should| include_all_of(['idle', 'working'])
def test_its_initial_state_may_be_a_callable(self):
def is_business_hours():
return True
class Person(StateMachine):
initial_state = lambda person: (person.worker and is_business_hours()) and 'awake' or 'sleeping'
state('awake')
state('sleeping')
def __init__(self, worker):
self.worker = worker
StateMachine.__init__(self)
person = Person(worker=True)
person.current_state |should| equal_to('awake')
person = Person(worker=False)
person.current_state |should| equal_to('sleeping')
if __name__ == '__main__':
unittest.main()
|
import os
import subprocess
import time
import sys
from lib.core.revision import getRevisionNumber
VERSION = "1.0"
REVISION = getRevisionNumber()
VERSION_STRING = "zerosacn/%s%s" % (VERSION, "-%s" % REVISION if REVISION else "-nongit-%s" % time.strftime("%Y%m%d", time.gmtime(os.path.getctime(__file__))))
IS_WIN = subprocess.mswindows
PLATFORM = os.name
PYVERSION = sys.version.split()[0]
ISSUES_PAGE = "https://github.com/zer0yu/ZEROScan/issues"
GIT_REPOSITORY = "git@github.com:zer0yu/ZEROScan.git"
GIT_PAGE = "https://github.com/zer0yu/ZEROScan"
LEGAL_DISCLAIMER = "Usage of zeroscan for attacking targets without prior mutual consent is illegal."
BANNER = """
____________ _____ ____ _____
|___ / ____| __ \ / __ \ / ____|
/ /| |__ | |__) | | | | (___ ___ __ _ _ __
/ / | __| | _ /| | | |\___ \ / __/ _` | '_ \
/ /__| |____| | \ \| |__| |____) | (_| (_| | | | |
/_____|______|_| \_\\\____/|_____/ \___\__,_|_| |_|
+ -- --=[ ZEROScan - %s ]=-- -- +
""" % (VERSION)
UNICODE_ENCODING = "utf-8"
INVALID_UNICODE_CHAR_FORMAT = r"\?%02x"
INDENT = " " * 2
POC_ATTRS = ( "appName", "appVersion", "author", "description", "references")
HTTP_DEFAULT_HEADER = {
"Accept": "*/*",
"Accept-Charset": "GBK,utf-8;q=0.7,*;q=0.3",
"Accept-Language": "zh-CN,zh;q=0.8",
"Cache-Control": "max-age=0",
"Connection": "keep-alive",
"Referer": "http://www.baidu.com",
"User-Agent": "Mozilla/5.0 (Windows NT 5.1; rv:5.0) Gecko/20100101 Firefox/5.0"
}
REPORT_TABLEBASE = """\
<tbody>
%s
</tbody>
"""
REPORT_HTMLBASE = """\
<!DOCTYPE html>
<html lang="zh-cn">
<head>
<meta charset="utf-8">
<title></title>
<style type="text/css">
caption{padding-top:8px;padding-bottom:8px;color:#777;text-align:left}th{text-align:left}.table{width:100%%;max-width:100%%;margin-bottom:20px}.table>thead>tr>th,.table>tbody>tr>th,.table>tfoot>tr>th,.table>thead>tr>td,.table>tbody>tr>td,.table>tfoot>tr>td{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.result0{display:none}.result1{}.status{cursor: pointer;}
</style>
<script>
function showDetail(dom){
parent = dom.parentElement;
detail = parent.children[1];
if (detail == undefined){
return;
};
if (detail.className == 'result0'){
detail.className = 'result1';
}else{
detail.className = 'result0';
};
}
</script>
</head>
<body>
<div class="container">
<table class="table">
<thead>
%s
</thead>
%s
</table>
</div>
</body>
</html>
"""
|
from OpenGL import GL
from cocos.director import director
from .node import GUINode
def _anchor_to_position_a(anchor, window_size, self_size):
abs_anchor = abs(anchor)
if isinstance(abs_anchor, int):
result = abs_anchor
elif isinstance(abs_anchor, float):
result = int(window_size * abs_anchor)
# TODO px, em, etc.?
if anchor < 0:
result = window_size - result - self_size
return result
def _anchor_to_position_c(anchor, window_size, self_size):
abs_anchor = abs(anchor)
if isinstance(abs_anchor, int):
result = abs_anchor
elif isinstance(abs_anchor, float):
result = int(window_size * abs_anchor * 0.01) - self_size // 2
# TODO px, em, etc.?
if anchor < 0:
result = window_size - result - self_size
return result
class AttachedWindow(GUINode):
def __init__(self, attach=None, **kwargs):
super(AttachedWindow, self).__init__(**kwargs)
self.attach = attach
def get_content_size(self):
nodes = self.get_nodes()
if not nodes:
return (0, 0)
else:
return nodes[0].width, nodes[0].height
def evaluate_position(self):
if self.attach is not None:
ww, wh = director.window.width, director.window.height
sw, sh = self.margin_box[2:4]
anchor_x, anchor_y = self.attach
x = _anchor_to_position_a(anchor_x, ww, sw)
y = _anchor_to_position_a(anchor_y, wh, sh)
self.position = (x, y)
def visit(self):
self.evaluate_position()
super(AttachedWindow, self).visit()
#def add(self, child, *args, **kwargs):
#if len(self.get_children()) >= 1:
#raise RuntimeError(
#'Only one child is supported for %s. '
#'Use layouts to add more objects.' % type(self).__name__)
#super(AttachedWindow, self).add(child, *args, **kwargs)
class CenteredWindow(AttachedWindow):
def evaluate_position(self):
if self.attach is not None:
ww, wh = director.window.width, director.window.height
sw, sh = self.margin_box[2:4]
anchor_x, anchor_y = self.attach
x = _anchor_to_position_c(anchor_x, ww, sw)
y = _anchor_to_position_c(anchor_y, wh, sh)
self.position = (x, y)
class ModalWindow(CenteredWindow):
def __init__(self, attach=None, fade_color=(0, 0, 0, 128), **kwargs):
if attach is None:
attach = (50., 50.) # exact center
super(ModalWindow, self).__init__(attach=attach, **kwargs)
self.fade_color = fade_color
def apply_style(self, **options):
super(ModalWindow, self).apply_style(**options)
self.z = 777
def draw(self):
ww, wh = 2000, 2000
GL.glPushMatrix()
self.transform()
GL.glPushAttrib(GL.GL_CURRENT_BIT)
GL.glBegin(GL.GL_QUADS)
GL.glColor4ubv(self.fade_color)
map(GL.glVertex2fv, [(-ww, -wh), (ww, -wh), (ww, wh), (-ww, wh)])
GL.glEnd()
GL.glPopAttrib()
GL.glPopMatrix()
super(ModalWindow, self).draw()
def on_mouse_press(self, x, y, button, modifiers):
pass # TODO close itself
def on_mouse_motion(self, *args):
return False
|
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('classes', '0005_auto_20160915_1227'),
]
operations = [
migrations.AddField(
model_name='classsession',
name='session_end',
field=models.DateTimeField(default=datetime.datetime(2016, 9, 15, 12, 32, 42, 324526)),
preserve_default=False,
),
migrations.AddField(
model_name='classsession',
name='session_start',
field=models.DateTimeField(default=datetime.datetime(2016, 9, 15, 12, 32, 50, 387521)),
preserve_default=False,
),
]
|
from ...maths import Vector2D
from ...core import globalSystem
from ..utils import getDirectionOrAngle
from ..core import *
from .core import PathElement
from .common import *
_INF = float('inf')
class LinearPathElement(AcceleratableElement):
"""
A PathElement that represents linear motion.
"""
def initialize(self, **config):
direction = getDirectionOrAngle(config, return_none=True)
# If ``duration`` is defined, then the path is determined by
# a finite duration of time and a number determine how many
# times to repeat. The path also has a fixed speed.
#
# If ``duration`` is not defined, then the path is determined
# by a direction, has a varying speed, and lasts an infinite
# length of time until forceEnd or forceNext is called.
duration = config.get("duration")
# I'm just gonna assume you can figure out what's going on here
# based on the exception messages.
if duration is not None:
repeats = config.get("repeatCount", 1)
finalPoint = config.get("finalPoint")
if direction == finalPoint == None:
raise ConfigurationError(
"Either ``direction`` or ``finalPoint`` must be " \
"defined if ``timeBased`` is True.")
distance = config.get("distance")
if direction is None and distance is not None:
raise ConfigurationError(
"``finalPoint`` and ``distance`` cannot both be " \
"defined simultaneously.")
elif finalPoint is None and distance is None:
raise ConfigurationError(
"``distance`` must be defined if ``direction`` is defined.")
if finalPoint:
speed = finalPoint.magnitude() / duration * globalSystem._timestep
direction = finalPoint.normalize()
else:
speed = distance / duration * globalSystem._timestep
else:
repeats = 1
if direction is None:
raise ConfigurationError(
"If ``timeBased`` is False, ``direction`` must be defined.")
if "speed" in config:
speed = config["speed"]
else:
speed = config["initialSpeed"]
self.speed = speed
self.direction = direction
self.duration = duration
self.repeats = repeats
# The current iteration number increases up to the given number
# of repeats.
self._current_iteration = 0
self._transition_time = 0
self._transition_amount = 0
def rotate(self, amount, radians=True):
"""Rotate the direction."""
self.direction = self.direction.rotate(amount, radians=radians)
def changeDirection(self, direction):
"""Change the direction."""
self.direction = Vector2D(*direction).normalize()
def takeAim(self):
"""Aim directly at the player (the mouse)."""
self.direction = (Vector2D(*pygame.mouse.get_pos()) - self.bullet.position).normalize()
def updateDisplacement(self):
"""
Update this PathElement's displacement.
"""
self.displacement += self.speed * self.direction
if self._transition_time > 1e-9: # Accounts for floating point errors
self.speed += self._transition_amount
self._transition_time -= globalSystem._timestep
if self.duration is not None:
# If we have completed one iteration (from (0, 0) to finalPoint)
if self.local_time > (self._current_iteration + 1) * self.duration:
self._current_iteration += 1
self.direction *= -1
# If we have repeated as many times as desired, then we are finished.
if self._current_iteration == self.repeats:
self.done = True
|
import logging
import os
import sys
import pkg_resources
import shutil
from threading import Timer
from device import Device
from app import App
from env_manager import AppEnvManager
from input_manager import InputManager
from droidbox_scripts.droidbox import DroidBox
class DroidBot(object):
"""
The main class of droidbot
"""
# this is a single instance class
instance = None
def __init__(self,
app_path=None,
device_serial=None,
is_emulator=False,
output_dir=None,
env_policy=None,
policy_name=None,
random_input=False,
script_path=None,
event_count=None,
event_interval=None,
timeout=None,
keep_app=None,
keep_env=False,
cv_mode=False,
debug_mode=False,
profiling_method=None,
grant_perm=False,
enable_accessibility_hard=False,
master=None,
with_droidbox=False):
"""
initiate droidbot with configurations
:return:
"""
logging.basicConfig(level=logging.DEBUG if debug_mode else logging.INFO)
self.logger = logging.getLogger('DroidBot')
DroidBot.instance = self
self.output_dir = output_dir
if output_dir is not None:
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
html_index_path = pkg_resources.resource_filename("droidbot", "resources/index.html")
stylesheets_path = pkg_resources.resource_filename("droidbot", "resources/stylesheets")
target_stylesheets_dir = os.path.join(output_dir, "stylesheets")
if os.path.exists(target_stylesheets_dir):
shutil.rmtree(target_stylesheets_dir)
shutil.copy(html_index_path, output_dir)
shutil.copytree(stylesheets_path, target_stylesheets_dir)
self.timeout = timeout
self.timer = None
self.keep_env = keep_env
self.keep_app = keep_app
self.device = None
self.app = None
self.droidbox = None
self.env_manager = None
self.input_manager = None
self.enable_accessibility_hard = enable_accessibility_hard
self.enabled = True
try:
self.device = Device(device_serial=device_serial,
is_emulator=is_emulator,
output_dir=self.output_dir,
cv_mode=cv_mode,
grant_perm=grant_perm,
enable_accessibility_hard=self.enable_accessibility_hard)
self.app = App(app_path, output_dir=self.output_dir)
if with_droidbox:
self.droidbox = DroidBox(droidbot=self, output_dir=self.output_dir)
self.env_manager = AppEnvManager(device=self.device,
app=self.app,
env_policy=env_policy)
self.input_manager = InputManager(device=self.device,
app=self.app,
policy_name=policy_name,
random_input=random_input,
event_count=event_count,
event_interval=event_interval,
script_path=script_path,
profiling_method=profiling_method,
master=master)
except Exception as e:
self.logger.warning("Something went wrong: " + e.message)
import traceback
traceback.print_exc()
self.stop()
sys.exit(-1)
@staticmethod
def get_instance():
if DroidBot.instance is None:
print "Error: DroidBot is not initiated!"
sys.exit(-1)
return DroidBot.instance
def start(self):
"""
start interacting
:return:
"""
if not self.enabled:
return
self.logger.info("Starting DroidBot")
try:
if self.timeout > 0:
self.timer = Timer(self.timeout, self.stop)
self.timer.start()
self.device.set_up()
if not self.enabled:
return
self.device.connect()
if not self.enabled:
return
self.device.install_app(self.app)
if not self.enabled:
return
self.env_manager.deploy()
if not self.enabled:
return
if self.droidbox is not None:
self.droidbox.set_apk(self.app.app_path)
self.droidbox.start_unblocked()
self.input_manager.start()
self.droidbox.stop()
self.droidbox.get_output()
else:
self.input_manager.start()
except KeyboardInterrupt:
self.logger.info("Keyboard interrupt.")
pass
except Exception as e:
self.logger.warning("Something went wrong: " + e.message)
import traceback
traceback.print_exc()
self.stop()
sys.exit(-1)
self.stop()
self.logger.info("DroidBot Stopped")
os._exit(0)
def stop(self):
self.enabled = False
if self.timer and self.timer.isAlive():
self.timer.cancel()
if self.env_manager:
self.env_manager.stop()
if self.input_manager:
self.input_manager.stop()
if self.droidbox:
self.droidbox.stop()
if self.device:
self.device.disconnect()
if not self.keep_env:
self.device.tear_down()
if not self.keep_app:
self.device.uninstall_app(self.app)
class DroidBotException(Exception):
pass
|
"""
This module contains data-types and helpers which are proper to the SNMP
protocol and independent of X.690
"""
from typing import Any, Iterator, Union
from x690.types import ObjectIdentifier, Type # type: ignore
from puresnmp.typevars import PyType
ERROR_MESSAGES = {
0: "(noError)",
1: "(tooBig)",
2: "(noSuchName)",
3: "(badValue)",
4: "(readOnly)",
5: "(genErr)",
6: "(noAccess)",
7: "(wrongType)",
8: "(wrongLength)",
9: "(wrongEncoding)",
10: "(wrongValue)",
11: "(noCreation)",
12: "(inconsistentValue)",
13: "(resourceUnavailable)",
14: "(commitFailed)",
15: "(undoFailed)",
16: "(authorizationError)",
17: "(notWritable)",
18: "(inconsistentName)",
}
class VarBind:
"""
A "VarBind" is a 2-tuple containing an object-identifier and the
corresponding value.
"""
# TODO: This class should be split in two for both the raw and pythonic
# API, that would simplify the typing of both "oid" and "value"a lot
# and keep things explicit
oid: ObjectIdentifier = ObjectIdentifier(0)
value: Union[PyType, Type, None] = None
def __init__(self, oid, value):
# type: (Union[ObjectIdentifier, str], PyType) -> None
if not isinstance(oid, (ObjectIdentifier, str)):
raise TypeError(
"OIDs for VarBinds must be ObjectIdentifier or str"
" instances! Your value: %r" % oid
)
if isinstance(oid, str):
oid = ObjectIdentifier.from_string(oid)
self.oid = oid
self.value = value
def __iter__(self) -> Iterator[Union[ObjectIdentifier, PyType]]:
return iter([self.oid, self.value])
def __getitem__(self, idx: int) -> Union[PyType, Type, None]:
return list(self)[idx]
def __lt__(self, other):
# type: (Any) -> bool
return (self.oid, self.value) < (other.oid, other.value)
def __eq__(self, other):
# type: (Any) -> bool
return (self.oid, self.value) == (other.oid, other.value)
def __hash__(self):
# type: () -> int
return hash((self.oid, self.value))
def __repr__(self):
# type: () -> str
return "VarBind(%r, %r)" % (self.oid, self.value)
|
import cv2
from matplotlib import pyplot as plt
img_file_path = "./img/Lenna.png"
img = cv2.imread(img_file_path)
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # para converter para GRAY <<<<<<<<<<<<<<<<<<
img_blur = cv2.blur(img, (3,3))
img_gauss = cv2.GaussianBlur(img,(11,11), 1)
sobelx = cv2.Sobel(img,cv2.CV_64F,1,0,ksize=5)
sobely = cv2.Sobel(img,cv2.CV_64F,0,1,ksize=5)
img_laplacian = cv2.Laplacian(img,cv2.CV_64F)
dob = img - img_blur
img_gauss1 = cv2.GaussianBlur(img,(5,5), 1)
img_gauss2 = cv2.GaussianBlur(img_gauss1,(5,5), 1)
dog = img - img_gauss1
fig, subs = plt.subplots(1,2)
plt.gray()
subs[0].imshow(img)
subs[1].imshow(dog)
plt.show()
|
import re
AS = open('corpus/pre-identification/Amar-Suen_6/Amar-Suen-6_cdli.txt', 'r')
SH = open('corpus/pre-identification/Shulgi_42/Shulgi_42_cdli.txt', 'r')
years = open('yearsList.txt', 'w')
asyears = set([])
shyears = set([])
year = re.compile(r'^[0-9]+\. (mu[#| ].*)')
for line in SH:
s = year.search(line)
if s:
shyears.add(s.group(1))
for line in AS:
s = year.search(line)
if s:
asyears.add(s.group(1))
allyears = set([])
allyears.update(asyears)
allyears.update(shyears)
ay = list(allyears)
[years.write(l + '\n') for l in sorted(ay)]
|
import argparse
import concurrent.futures
import datetime
import json
import boto3
SFN = boto3.client('stepfunctions')
def format_date_fields(obj):
for key in obj:
if isinstance(obj[key], datetime.datetime):
obj[key] = obj[key].isoformat()
return obj
def get_execution_details(execution_arn):
e = SFN.describe_execution(executionArn=execution_arn)
e = format_date_fields(e)
del e['ResponseMetadata']
return e
def main(args):
results = []
state_machine_arn = args.state_machine_arn
# TODO(dw): pagination for > 100 executions
executions = SFN.list_executions(
stateMachineArn=state_machine_arn,
statusFilter='RUNNING'
)
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
futures = []
for e in executions['executions']:
future = executor.submit(get_execution_details, e['executionArn'])
futures.append(future)
for future in concurrent.futures.as_completed(futures):
results.append(future.result())
print(json.dumps(results))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--state-machine-arn', required=True)
args = parser.parse_args()
main(args)
|
from setuptools import setup, find_packages
import visitingTimes
setup(
name='visitingTimes',
version=visitingTimes.__version__,
author='',
author_email='',
packages=find_packages(exclude=['test']),
description='Museum Visiting Times',
long_description=open('README.md').read(),
url='',
)
|
import argparse
import pdb
import traceback
from typing import List, Tuple
def test_ip(ip: int, rules: List[Tuple[int, int]], max_addr: int) -> bool:
for (start, end) in rules:
if start <= ip <= end:
break
else:
if ip < max_addr:
return True
return False
def solve(rules: List[Tuple[int, int]], max_addr: int) -> Tuple[int, int]:
candidates = [rule[1] + 1 for rule in rules]
valids = [candidate for candidate in candidates if test_ip(candidate, rules, max_addr)]
one: int = valids[0]
two: int = 0
for ip in valids:
while test_ip(ip, rules, max_addr):
two += 1
ip += 1
return (one, two)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Advent of Code - 2016 - Day 20 - Firewall Rules.")
parser.add_argument(
"input",
type=str,
default="input.txt",
nargs="?",
help="The puzzle input. (Default %(default)s)",
)
parser.add_argument(
"max_addr",
type=int,
default=4294967296,
nargs="?",
help="The largest address. (Default %(default)s)",
)
args = parser.parse_args()
rules: List[Tuple[int, int]] = []
with open(args.input, "rt") as inf:
for line in inf:
parts = line.strip().split("-")
rules.append((int(parts[0]), int(parts[1])))
rules.sort()
try:
print(solve(rules, args.max_addr))
except Exception:
traceback.print_exc()
pdb.post_mortem()
|
from Classes import *
def test_ia():
f1 = FichedeScore()
cpu1 = CPU()
cpu1.jouer(f1)
def repeat(times):
for i in range(times): test_ia()
repeat(100)
|
import cgt, numpy as np
class ParamCollection(object):
"""
A utility class containing a collection of parameters
which makes it convenient to write optimization code that uses flat vectors
"""
def __init__(self,params): #pylint: disable=W0622
"""
params should be a list of cgt nodes that were created by the cgt.shared or nn.parameter functions
"""
assert all(param.is_data() and param.dtype == cgt.floatX for param in params)
self._params = params
@property
def params(self):
return self._params
def get_values(self):
"""
Returns list of values of parameter arrays
"""
return [param.op.get_value() for param in self._params]
def get_shapes(self):
"""
Shapes of parameter arrays
"""
return [param.op.get_shape() for param in self._params]
def get_total_size(self):
"""
Total number of parameters
"""
return sum(np.prod(shape) for shape in self.get_shapes())
def num_vars(self):
"""
Numbe of parameter arrays
"""
return len(self._params)
def set_values(self, parvals):
"""
Set values of parameter arrays given list of values `parvals`
"""
assert len(parvals) == len(self._params)
for (param, newval) in zip(self._params, parvals):
param.op.set_value(newval)
assert param.op.get_shape() == newval.shape
def set_values_flat(self, theta):
"""
Set parameters using a vector which represents all of the parameters flattened and concatenated
"""
theta = theta.astype(cgt.floatX)
arrs = []
n = 0
for shape in self.get_shapes():
size = np.prod(shape)
arrs.append(theta[n:n+size].reshape(shape))
n += size
assert theta.size == n
self.set_values(arrs)
def get_values_flat(self):
"""
Flatten all parameter arrays into one vector and return it as a numpy array
"""
theta = np.empty(self.get_total_size(),dtype=cgt.floatX)
n = 0
for param in self._params:
s = param.op.get_size()
theta[n:n+s] = param.op.get_value().flat
n += s
assert theta.size == n
return theta
def _params_names(self):
out = []
for (i,param) in enumerate(self._params):
name = param.name or _tensordesc(param.typ)
name = "%s@%i"%(name,i)
out.append((param,name))
return out
def to_h5(self,grp):
"""
Save parameter arrays to hdf5 group `grp`
"""
for (param,name) in self._params_names():
arr = param.op.get_value()
grp[name] = arr
def from_h5(self,grp):
"""
Load parameter arrays from hdf5 group `grp`
"""
parvals = [grp[name].value for (_,name) in self._params_names()]
self.set_values(parvals)
def _tensordesc(typ):
if typ.ndim == 0:
part0 = "scalar"
elif typ.ndim == 1:
part0 = "vector"
elif typ.ndim == 2:
part0 = "matrix"
else:
part0 = "tensor"+str(typ.ndim)
return "%s_%s"%(part0, typ.dtype)
|
'''
There are some scripts and utilities out there which can't handle genes with
multiple mRNA children. This script splits each of these (and children) and
instead creates new gene features.
The ID of the newly-generated gene feature duplicates the source one but with
the suffix "_N" added, where N increases from 2..X for each isoform present for
a given gene.
Follow the GFF3 specification!
Author: Joshua Orvis
'''
import argparse
import sys
from biocode import gff, things
def main():
parser = argparse.ArgumentParser( description='Checks for genes with multiple mRNA children and creates new genes for each.')
## output file to be written
parser.add_argument('-i', '--input_file', type=str, required=True, help='Path to the input GFF3' )
parser.add_argument('-o', '--output_file', type=str, required=False, help='Path to an output file to be created' )
args = parser.parse_args()
(assemblies, features) = gff.get_gff3_features(args.input_file)
## output will either be a file or STDOUT
ofh = sys.stdout
if args.output_file is not None:
ofh = open(args.output_file, 'wt')
ofh.write("##gff-version 3\n")
for assembly_id in assemblies:
current_assembly = assemblies[assembly_id]
for gene in assemblies[assembly_id].genes():
rnas_found = 0
mRNAs = gene.mRNAs()
for mRNA in mRNAs:
mRNA_loc = mRNA.location_on(current_assembly)
rnas_found += 1
if rnas_found > 1:
gene.remove_mRNA(mRNA)
print("INFO: splitting mRNA off gene {0}".format(gene.id))
new_gene = things.Gene(id="{0}_{1}".format(gene.id, rnas_found))
new_gene.locate_on(target=current_assembly, fmin=mRNA_loc.fmin, fmax=mRNA_loc.fmax, strand=mRNA_loc.strand)
new_gene.add_RNA(mRNA)
new_gene.print_as(fh=ofh, format='gff3')
if len(mRNAs) > 1:
gene_loc = gene.location_on(current_assembly)
mRNA_loc = mRNAs[0].location_on(current_assembly)
gene_loc.fmin = mRNA_loc.fmin
gene_loc.fmax = mRNA_loc.fmax
gene_loc.strand = mRNA_loc.strand
gene.print_as(fh=ofh, format='gff3')
if __name__ == '__main__':
main()
|
"""Test cltk.stem."""
__author__ = 'Kyle P. Johnson <kyle@kyle-p-johnson.com>'
__license__ = 'MIT License. See LICENSE.'
from cltk.corpus.utils.importer import CorpusImporter
from cltk.stem.latin.j_v import JVReplacer
from cltk.stem.latin.stem import Stemmer
from cltk.stem.lemma import LemmaReplacer
from cltk.stem.latin.syllabifier import Syllabifier
import os
import unittest
class TestSequenceFunctions(unittest.TestCase): # pylint: disable=R0904
"""Class for unittest"""
def test_latin_i_u_transform(self):
"""Test converting ``j`` to ``i`` and ``v`` to ``u``."""
jv_replacer = JVReplacer()
trans = jv_replacer.replace('vem jam VEL JAM')
self.assertEqual(trans, 'uem iam UEL IAM')
def test_latin_stemmer(self):
"""Test Latin stemmer."""
sentence = 'Est interdum praestare mercaturis rem quaerere, nisi tam periculosum sit, et item foenerari, si tam honestum.' # pylint: disable=line-too-long
stemmer = Stemmer()
stemmed_text = stemmer.stem(sentence.lower())
target = 'est interd praestar mercatur r quaerere, nisi tam periculos sit, et it foenerari, si tam honestum. ' # pylint: disable=line-too-long
self.assertEqual(stemmed_text, target)
def test_lemmatizer_inlist_latin(self):
"""Test the Latin lemmatizer.
"""
replacer = LemmaReplacer('latin')
unlemmatized = ['hominum', 'divomque', 'voluptas']
lemmatized = replacer.lemmatize(unlemmatized, return_raw=False, return_string=False)
target = ['homo', 'divus', 'voluptas']
self.assertEqual(lemmatized, target)
def test_lemmatizer_inlist_outlemma_latin(self):
"""Test the Latin lemmatizer.
"""
replacer = LemmaReplacer('latin')
unlemmatized = ['hominum', 'divomque', 'voluptas']
lemmatized = replacer.lemmatize(unlemmatized, return_raw=True, return_string=False)
target = ['hominum/homo', 'divomque/divus', 'voluptas/voluptas']
self.assertEqual(lemmatized, target)
def test_lemmatizer_inlist_outstring_latin(self):
"""Test the Latin lemmatizer.
"""
replacer = LemmaReplacer('latin')
unlemmatized = ['hominum', 'divomque', 'voluptas']
lemmatized = replacer.lemmatize(unlemmatized, return_raw=False, return_string=True)
target = 'homo divus voluptas'
self.assertEqual(lemmatized, target)
def test_lemmatizer_inlist_outlemma_outstring_latin(self):
"""Test the Latin lemmatizer.
"""
replacer = LemmaReplacer('latin')
unlemmatized = ['hominum', 'divomque', 'voluptas']
lemmatized = replacer.lemmatize(unlemmatized, return_raw=True, return_string=True)
target = 'hominum/homo divomque/divus voluptas/voluptas'
self.assertEqual(lemmatized, target)
def test_lemmatizer_instr_latin(self):
"""Test the Latin lemmatizer.
"""
replacer = LemmaReplacer('latin')
unlemmatized = 'hominum divomque voluptas'
lemmatized = replacer.lemmatize(unlemmatized, return_raw=False, return_string=False)
target = ['homo', 'divus', 'voluptas']
self.assertEqual(lemmatized, target)
def test_lemmatizer_instr_outlemma_latin(self):
"""Test the Latin lemmatizer.
"""
replacer = LemmaReplacer('latin')
unlemmatized = 'hominum divomque voluptas'
lemmatized = replacer.lemmatize(unlemmatized, return_raw=True, return_string=False)
target = ['hominum/homo', 'divomque/divus', 'voluptas/voluptas']
self.assertEqual(lemmatized, target)
def test_lemmatizer_instr_outstring_latin(self):
"""Test the Latin lemmatizer.
"""
replacer = LemmaReplacer('latin')
unlemmatized = 'hominum divomque voluptas'
lemmatized = replacer.lemmatize(unlemmatized, return_raw=False, return_string=True)
target = 'homo divus voluptas'
self.assertEqual(lemmatized, target)
def test_lemmatizer_instr_outlemma_outstring_latin(self):
"""Test the Latin lemmatizer.
"""
replacer = LemmaReplacer('latin')
unlemmatized = 'hominum divomque voluptas'
lemmatized = replacer.lemmatize(unlemmatized, return_raw=True, return_string=True)
target = 'hominum/homo divomque/divus voluptas/voluptas'
self.assertEqual(lemmatized, target)
def test_lemmatizer_inlist_greek(self):
"""Test the Greek lemmatizer.
"""
replacer = LemmaReplacer('greek')
unlemmatized = ['τὴν', 'διάγνωσιν', 'ἔρχεσθαι']
lemmatized = replacer.lemmatize(unlemmatized, return_raw=False, return_string=False)
target = ['τὴν', 'διάγνωσις', 'ἔρχομαι']
self.assertEqual(lemmatized, target)
def test_lemmatizer_inlist_outlemma_greek(self):
"""Test the Greek lemmatizer.
"""
replacer = LemmaReplacer('greek')
unlemmatized = ['τὴν', 'διάγνωσιν', 'ἔρχεσθαι']
lemmatized = replacer.lemmatize(unlemmatized, return_raw=True, return_string=False)
target = ['τὴν/τὴν', 'διάγνωσιν/διάγνωσις', 'ἔρχεσθαι/ἔρχομαι']
self.assertEqual(lemmatized, target)
def test_lemmatizer_inlist_outstring_greek(self):
"""Test the Greek lemmatizer.
"""
replacer = LemmaReplacer('greek')
unlemmatized = ['τὴν', 'διάγνωσιν', 'ἔρχεσθαι']
lemmatized = replacer.lemmatize(unlemmatized, return_raw=False, return_string=True)
target = 'τὴν διάγνωσις ἔρχομαι'
self.assertEqual(lemmatized, target)
def test_lemmatizer_inlist_outlemma_outstring_greek(self):
"""Test the Greek lemmatizer.
"""
replacer = LemmaReplacer('greek')
unlemmatized = ['τὴν', 'διάγνωσιν', 'ἔρχεσθαι']
lemmatized = replacer.lemmatize(unlemmatized, return_raw=True, return_string=True)
target = 'τὴν/τὴν διάγνωσιν/διάγνωσις ἔρχεσθαι/ἔρχομαι'
self.assertEqual(lemmatized, target)
def test_lemmatizer_instr_greek(self):
"""Test the Greek lemmatizer.
"""
replacer = LemmaReplacer('greek')
unlemmatized = 'τὴν διάγνωσιν ἔρχεσθαι'
lemmatized = replacer.lemmatize(unlemmatized, return_raw=False, return_string=False)
target = ['τὴν', 'διάγνωσις', 'ἔρχομαι']
self.assertEqual(lemmatized, target)
def test_lemmatizer_instr_outlemma_greek(self):
"""Test the Greek lemmatizer.
"""
replacer = LemmaReplacer('greek')
unlemmatized = 'τὴν διάγνωσιν ἔρχεσθαι'
lemmatized = replacer.lemmatize(unlemmatized, return_raw=True, return_string=False)
target = ['τὴν/τὴν', 'διάγνωσιν/διάγνωσις', 'ἔρχεσθαι/ἔρχομαι']
self.assertEqual(lemmatized, target)
def test_lemmatizer_instr_outstring_greek(self):
"""Test the Greek lemmatizer.
"""
replacer = LemmaReplacer('greek')
unlemmatized = 'τὴν διάγνωσιν ἔρχεσθαι'
lemmatized = replacer.lemmatize(unlemmatized, return_raw=False, return_string=True)
target = 'τὴν διάγνωσις ἔρχομαι'
self.assertEqual(lemmatized, target)
def test_lemmatizer_instr_outlemma_outstring_greek(self):
"""Test the Greek lemmatizer.
"""
replacer = LemmaReplacer('greek')
unlemmatized = 'τὴν διάγνωσιν ἔρχεσθαι'
lemmatized = replacer.lemmatize(unlemmatized, return_raw=True, return_string=True)
target = 'τὴν/τὴν διάγνωσιν/διάγνωσις ἔρχεσθαι/ἔρχομαι'
self.assertEqual(lemmatized, target)
def test_latin_syllabifier(self):
"""Test Latin syllabifier."""
word = 'sidere'
syllabifier = Syllabifier()
syllables = syllabifier.syllabify(word)
target = ['si', 'de', 're']
self.assertEqual(syllables, target)
if __name__ == '__main__':
unittest.main()
|
"""
Program : checkData.py
Author : Jigar R. Gosalia
Verion : 1.0
Course : CSC-620 - Programming Language Theory
Prof. : Richard Riele
Validates given data against pre-defined validators using regular expressions.
"""
import re
import os
from datetime import datetime
""" Very crude level of regular expressions to validate data, regex can be more sophisticated then this. """
CURRENT_TIME = datetime.now().strftime("%Y%m%d%H%M%S")
BASE_DIRECTORY = os.getcwd() + os.sep;
CUSTOMER_DATA = BASE_DIRECTORY + "data" + os.sep + "all.csv"
LOGS_DIRECTORY = BASE_DIRECTORY + "logs" + os.sep + CURRENT_TIME;
NAME_VALIDATOR = re.compile('[A-Z][a-z]*')
DOB_VALIDATOR = re.compile('(0[1-9]|1[0-2])/(0[1-9]|1[0-9]|2[0-9]|3[0-1])/(19[0-9][0-9]|(200[0-9]|201[0-6]))')
GENDER_VALIDATOR = re.compile('([mM][aA][lL][eL]|[fF][eE][mM][aA][lL][eE])')
EMAIL_VALIDATOR = re.compile('[a-zA-Z0-9]*@[a-zA-Z0-9-]+\.[a-z]+')
STATE_VALIDATOR = re.compile('[A-Z][A-Z]')
ZIPCODE_VALIDATOR = re.compile('[0-9][0-9][0-9][0-9][0-9]')
PHONE_VALIDATOR = re.compile('1-\([0-9][0-9][0-9]\)[0-9][0-9][0-9]-[0-9][0-9][0-9][0-9]')
IP_VALIDATOR = re.compile('\d+\.\d+\.\d+\.\d+')
def main():
os.system('clear')
print ("\n" * 10)
print ("#" * 75)
middle = (75/2) - (len("VALIDATORS")/2)
print ((" " * middle) + "VALIDATORS")
print ("#" * 75)
print ("")
print ("Acceptable Name Format : [A-Z][a-z]*")
print ("Acceptable DOB Format : mm/dd/yyyy (1900-2016)")
print ("Acceptable Gender Format : Male|Female (case-insensitive)")
print ("Acceptable Email Format : *@*.*")
print ("Acceptable State Format : **")
print ("Acceptable Zip Format : #####")
print ("Acceptable Phone Format : +1-(###)###-####")
print ("Acceptable IP Format : ###.###.###.###")
print ("")
processed = 0
insufficient = 0
invalid = 0
valid = 0
invalidFirstName = 0
invalidLastName = 0
invalidDob = 0
invalidGender = 0
invalidEmail = 0
invalidState = 0
invalidZipcode = 0
invalidHomePhone = 0
invalidCellPhone = 0
invalidWorkPhone = 0
invalidIP = 0
invalidRecord = False
if os.path.exists(CUSTOMER_DATA):
with open(CUSTOMER_DATA) as file:
lines = file.read().splitlines()
file.close()
if (len(lines) > 0):
os.mkdir(LOGS_DIRECTORY)
for line in lines:
invalidRecord = False
if(line.count(",") >= 11):
processed += 1
record = line.split(",")
if (NAME_VALIDATOR.match(record[0]) == None):
writeData("invalidFirstName.txt", record[0] + " ==> " + line, invalidFirstName, NAME_VALIDATOR)
invalidRecord = True
invalidFirstName += 1
if (NAME_VALIDATOR.match(record[1]) == None):
writeData("invalidLastName.txt", record[1] + " ==> " + line, invalidLastName, NAME_VALIDATOR)
invalidRecord = True
invalidLastName += 1
if (DOB_VALIDATOR.match(record[2]) == None):
writeData("invalidDob.txt", record[2] + " ==> " + line, invalidDob, DOB_VALIDATOR)
invalidRecord = True
invalidDob += 1
if (GENDER_VALIDATOR.match(record[3]) == None):
writeData("invalidGender.txt", record[3] + " ==> " + line, invalidGender, GENDER_VALIDATOR)
invalidRecord = True
invalidGender += 1
if (EMAIL_VALIDATOR.match(record[4]) == None):
writeData("invalidEmail.txt", record[4] + " ==> " + line, invalidEmail, EMAIL_VALIDATOR)
invalidRecord = True
invalidEmail += 1
if (STATE_VALIDATOR.match(record[7]) == None):
writeData("invalidState.txt", record[7] + " ==> " + line, invalidState, STATE_VALIDATOR)
invalidRecord = True
invalidState += 1
if (ZIPCODE_VALIDATOR.match(record[8]) == None):
writeData("invalidZipcode.txt", record[8] + " ==> " + line, invalidZipcode, ZIPCODE_VALIDATOR)
invalidRecord = True
invalidZipcode += 1
if (PHONE_VALIDATOR.match(record[9]) == None):
writeData("invalidHomePhone.txt", record[9] + " ==> " + line, invalidHomePhone, PHONE_VALIDATOR)
invalidRecord = True
invalidHomePhone += 1
if (PHONE_VALIDATOR.match(record[10]) == None):
writeData("invalidCellPhone.txt", record[10] + " ==> " + line, invalidCellPhone, PHONE_VALIDATOR)
invalidRecord = True
invalidCellPhone += 1
if (PHONE_VALIDATOR.match(record[11]) == None):
writeData("invalidWorkPhone.txt", record[11] + " ==> " + line, invalidWorkPhone, PHONE_VALIDATOR)
invalidRecord = True
invalidWorkPhone += 1
if (IP_VALIDATOR.match(record[12]) == None):
writeData("invalidIP.txt", record[12] + " ==> " + line, invalidIP, IP_VALIDATOR)
invalidRecord = True
invalidIP += 1
if (invalidRecord):
invalid += 1
else:
valid += 1
else:
insufficient += 1
else:
print ("")
print ("Input File <" + CUSTOMER_DATA + "> is Empty!")
print ("")
else:
print ("")
print ("Input File <" + CUSTOMER_DATA + "> Not Available!")
print ("")
print ("#" * 75)
middle = (75/2) - (len("RESULTS")/2)
print ((" " * middle) + "RESULTS")
print ("#" * 75)
print ("")
print ("Processing file at location : " + CUSTOMER_DATA)
print ("Records available : " + str(len(lines)))
print ("Records processed : " + str(processed))
print ("Records with insufficient data : " + str(insufficient))
print ("Records with sufficient data : " + str(valid))
print ("Records with unacceptable data : " + str(invalid))
print (" Records with invalid First Name : " + str(invalidFirstName))
print (" Records with invalid Last Name : " + str(invalidLastName))
print (" Records with invalid Dob : " + str(invalidDob))
print (" Records with invalid Gender : " + str(invalidGender))
print (" Records with invalid Email : " + str(invalidEmail))
print (" Records with invalid State : " + str(invalidState))
print (" Records with invalid Zipcode : " + str(invalidZipcode))
print (" Records with invalid HomePhone : " + str(invalidHomePhone))
print (" Records with invalid CellPhone : " + str(invalidCellPhone))
print (" Records with invalid WorkPhone : " + str(invalidWorkPhone))
print (" Records with invalid IP : " + str(invalidIP))
print ("")
print ("#" * 75)
print ("\n" * 10)
def writeData(fileName, data, count, pattern):
if (count == 0):
file = open(LOGS_DIRECTORY + os.sep + fileName, 'a')
file.write(pattern.pattern + "\n\n")
file.close()
file = open(LOGS_DIRECTORY + os.sep + fileName, 'a')
file.write(data + "\n")
file.close()
main()
|
"""Color cycles for maximum contrast/viewability.
Author: Seth Axen
E-mail: seth.axen@gmail.com"""
from collections import OrderedDict
MAX_CONTRAST_COLORS = OrderedDict([
# best colors to use
((1.000, 0.702, 0.000), 'vivid_yellow'),
((0.502, 0.243, 0.459), 'strong_purple'),
((1.000, 0.408, 0.000), 'vivid_orange'),
((0.651, 0.741, 0.843), 'very_light_blue'),
((0.757, 0.000, 0.125), 'vivid_red'),
((0.808, 0.635, 0.384), 'grayish_yellow'),
((0.506, 0.439, 0.400), 'medium_gray'),
# not good for people with defective color vision,
((0.000, 0.490, 0.204), 'vivid_green'),
((0.965, 0.463, 0.557), 'strong_purplish_pink'),
((0.000, 0.325, 0.541), 'strong_blue'),
((1.000, 0.478, 0.361), 'strong_yellowish_pink'),
((0.325, 0.216, 0.478), 'strong_violet'),
((1.000, 0.557, 0.000), 'vivid_orange_yellow'),
((0.702, 0.157, 0.318), 'strong_purplish_red'),
((0.957, 0.784, 0.000), 'vivid_greenish_yellow'),
((0.498, 0.094, 0.051), 'strong_reddish_brown'),
((0.576, 0.667, 0.000), 'vivid_yellowish_green'),
((0.349, 0.200, 0.082), 'deep_yellowish_brown'),
((0.945, 0.227, 0.075), 'vivid_reddish_orange'),
((0.137, 0.173, 0.086), 'dark_olive_green')])
COLOR_ALPHABET = OrderedDict([
((0.941, 0.639, 1.000), 'amethyst'),
((0.000, 0.459, 0.863), 'blue'),
((0.600, 0.247, 0.000), 'caramel'),
((0.298, 0.000, 0.361), 'damson'),
((0.098, 0.098, 0.098), 'ebony'),
((0.000, 0.361, 0.192), 'forest'),
((0.169, 0.808, 0.282), 'green'),
((1.000, 0.800, 0.600), 'honeydew'),
((0.502, 0.502, 0.502), 'iron'),
((0.580, 1.000, 0.710), 'jade'),
((0.561, 0.486, 0.000), 'khaki'),
((0.616, 0.800, 0.000), 'lime'),
((0.761, 0.000, 0.533), 'mallow'),
((0.000, 0.200, 0.502), 'navy'),
((1.000, 0.643, 0.020), 'orpiment'),
((1.000, 0.659, 0.733), 'pink'),
((0.259, 0.400, 0.000), 'quagmire'),
((1.000, 0.000, 0.063), 'red'),
((0.369, 0.945, 0.949), 'sky'),
((0.000, 0.600, 0.561), 'turquoise'),
((0.878, 1.000, 0.400), 'uranium'),
((0.455, 0.039, 1.000), 'violet'),
((0.600, 0.000, 0.000), 'wine'),
((1.000, 1.000, 0.502), 'xanthin'),
((1.000, 1.000, 0.000), 'yellow'),
((1.000, 0.314, 0.020), 'zinnia')])
|
"""
Create a new users file.
To use this, run the code and then use the `newusers` command to add these to an AWS instance.
"""
import argparse
from random import shuffle
__author__ = 'Rob Edwards'
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=' ')
parser.add_argument('-b', '--base', help='base of the username (default="user")', default="user")
parser.add_argument('-n', '--number', help='number of accounts to create. (default=100)', type=int, default=100)
parser.add_argument('-s', '--servers', help='number of servers to share users among', type=int, default=6)
parser.add_argument('-u', '--users', help='users file for bash to write (default="users.tsv")', default="users.tsv")
parser.add_argument('-a', '--accounts', help='accounts file to give to the users (default="accounts.tsv")', default="accounts.tsv")
args = parser.parse_args()
abt = [x for x in 'ABCDEFGHKMNPQRSTWXYZabcdefghkmnpqrstwxyz23456789@#$%^*']
with open(args.users, 'w') as users, open(args.accounts, 'w') as accounts:
for i in range(1, args.number):
server = ( i % args.servers) + 1
shuffle(abt)
pwd = "".join(abt[0:8])
print(f"{server}\t{args.base}{i:0>4}\t{pwd}", file=accounts)
print(f"{args.base}{i:0>4}:{pwd}:::Rob Added User:/home/{args.base}{i:0>4}:/bin/bash", file=users)
|
import chainer
import chainer.functions as F
import chainer.links as L
from chainer import cuda
def init_conv(array):
xp = cuda.get_array_module(array)
array[...] = xp.random.normal(loc=0.0, scale=0.02, size=array.shape)
def init_bn(array):
xp = cuda.get_array_module(array)
array[...] = xp.random.normal(loc=1.0, scale=0.02, size=array.shape)
class ReLU(chainer.Chain):
def __init__(self):
super(ReLU, self).__init__()
def __call__(self, x):
return F.relu(x)
class Tanh(chainer.Chain):
def __init__(self):
super(Tanh, self).__init__()
def __call__(self, x):
return F.tanh(x)
class LeakyReLU(chainer.Chain):
def __init__(self):
super(LeakyReLU, self).__init__()
def __call__(self, x):
return F.leaky_relu(x)
class DCGAN_G(chainer.ChainList):
def __init__(self, isize, nc, ngf, conv_init=None, bn_init=None):
cngf, tisize = ngf // 2, 4
while tisize != isize:
cngf = cngf * 2
tisize = tisize * 2
layers = []
# input is Z, going into a convolution
layers.append(L.Deconvolution2D(None, cngf, ksize=4, stride=1, pad=0, initialW=conv_init, nobias=True))
layers.append(L.BatchNormalization(cngf, initial_gamma=bn_init))
layers.append(ReLU())
csize, cndf = 4, cngf
while csize < isize // 2:
layers.append(L.Deconvolution2D(None, cngf // 2, ksize=4, stride=2, pad=1, initialW=conv_init, nobias=True))
layers.append(L.BatchNormalization(cngf // 2, initial_gamma=bn_init))
layers.append(ReLU())
cngf = cngf // 2
csize = csize * 2
layers.append(L.Deconvolution2D(None, nc, ksize=4, stride=2, pad=1, initialW=conv_init, nobias=True))
layers.append(Tanh())
super(DCGAN_G, self).__init__(*layers)
def __call__(self, x):
for i in range(len(self)):
x = self[i](x)
return x
class DCGAN_D(chainer.ChainList):
def __init__(self, isize, ndf, nz=1, conv_init=None, bn_init=None):
layers = []
layers.append(L.Convolution2D(None, ndf, ksize=4, stride=2, pad=1, initialW=conv_init, nobias=True))
layers.append(LeakyReLU())
csize, cndf = isize / 2, ndf
while csize > 4:
in_feat = cndf
out_feat = cndf * 2
layers.append(L.Convolution2D(None, out_feat, ksize=4, stride=2, pad=1, initialW=conv_init, nobias=True))
layers.append(L.BatchNormalization(out_feat, initial_gamma=bn_init))
layers.append(LeakyReLU())
cndf = cndf * 2
csize = csize / 2
# state size. K x 4 x 4
layers.append(L.Convolution2D(None, nz, ksize=4, stride=1, pad=0, initialW=conv_init, nobias=True))
super(DCGAN_D, self).__init__(*layers)
def encode(self, x):
for i in range(len(self)):
x = self[i](x)
return x
def __call__(self, x):
x = self.encode(x)
x = F.sum(x, axis=0) / x.shape[0]
return F.squeeze(x)
class EncoderDecoder(chainer.Chain):
def __init__(self, nef, ngf, nc, nBottleneck, image_size=64, conv_init=None, bn_init=None):
super(EncoderDecoder, self).__init__(
encoder=DCGAN_D(image_size, nef, nBottleneck, conv_init, bn_init),
bn=L.BatchNormalization(nBottleneck, initial_gamma=bn_init),
decoder=DCGAN_G(image_size, nc, ngf, conv_init, bn_init)
)
def encode(self, x):
h = self.encoder.encode(x)
h = F.leaky_relu(self.bn(h))
return h
def decode(self, x):
h = self.decoder(x)
return h
def __call__(self, x):
h = self.encode(x)
h = self.decode(h)
return h
|
from nose.tools import raises
import unittest
from victor.exceptions import FieldRequiredError
from victor.transform import Transformer
from victor.vector import StringField, Vector
class TransformerTestCase(unittest.TestCase):
def test_transformer_name(self):
"""
Test that a transformer can return its class name so
that it can be named in workflows.
"""
class NamedTransformer(Transformer):
input_vector = Vector()
output_vector = Vector()
tf = NamedTransformer()
assert tf.get_name() == 'NamedTransformer',\
'Transformer does not know its own name'
@raises(AssertionError)
def test_transformer_input_cls(self):
"""
Test missing input vector throws exception.
"""
class InputTransformer(Transformer):
pass
InputTransformer()
@raises(AssertionError)
def test_transformer_output_cls(self):
"""
Test missing output vector throws exception.
"""
class OutputTransformer(Transformer):
input_vector = Vector()
OutputTransformer()
def test_transform_hook(self):
"""
Test transformer transform_"field" hook methods work and return values.
"""
class HookTransformer(Transformer):
input_vector = Vector()
output_vector = Vector()
def transform_count(self, value, data):
return value + 1
data = {
'count': 1
}
tf = HookTransformer()
tf.push_data(data)
output = tf.output
assert output['count'] == 2, 'Transformer did not increment count'
def test_transform_input(self):
class TrackVector(Vector):
remote_addr = StringField()
class InputTransformer(Transformer):
input_vector = TrackVector()
output_vector = Vector()
def transform_remote_addr(self, value, data):
geo = {
'city': 'Someplace',
'country': 'Somewhere'
}
data.update(geo)
return value
data = {
'remote_addr': 'some ip'
}
tf = InputTransformer()
tf.push_data(data)
output = tf.output
assert 'city' in output, 'City field not in output'
assert 'country' in output, 'Country field not in output'
assert 'remote_addr' in output, 'Remote addr not in output'
@raises(FieldRequiredError)
def test_ouput_missing_field(self):
"""
Test output validation fires with data that has already passed
through input.
"""
class OutputVector(Vector):
name = StringField(required=True)
class NameTransformer(Transformer):
input_vector = Vector()
output_vector = OutputVector()
tf = NameTransformer()
tf.push_data({})
@raises(FieldRequiredError)
def test_input_missing_field(self):
"""
Test that input does notice a missing field.
"""
class InputVector(Vector):
name = StringField(required=True)
class NameTransformer(Transformer):
input_vector = InputVector()
output_vector = Vector()
tf = NameTransformer()
tf.push_data({})
|
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
def get_unspent(listunspent, amount):
for utx in listunspent:
if utx['amount'] == amount:
return utx
raise AssertionError('Could not find unspent with amount={}'.format(amount))
class RawTransactionsTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 4
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
def run_test(self):
print("Mining blocks...")
min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
# if the fee's positive delta is higher than this value tests will fail,
# neg. delta always fail the tests.
# The size of the signature of every input may be at most 2 bytes larger
# than a minimum sized signature.
# = 2 bytes * minRelayTxFeePerByte
feeTolerance = 2 * min_relay_tx_fee/1000
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(121)
self.sync_all()
watchonly_address = self.nodes[0].getnewaddress()
watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
watchonly_amount = Decimal(200)
self.nodes[3].importpubkey(watchonly_pubkey, "", True)
watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0)
self.nodes[0].generate(1)
self.sync_all()
###############
# simple test #
###############
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test that we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.2 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
################################
# simple test with two outputs #
################################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
#########################################################################
# test a fundrawtransaction with a VIN greater than the required amount #
#########################################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#####################################################################
# test a fundrawtransaction with which will not get a change output #
#####################################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : Decimal(5.0) - fee - feeTolerance }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(rawtxfund['changepos'], -1)
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
####################################################
# test a fundrawtransaction with an invalid option #
####################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
try:
self.nodes[2].fundrawtransaction(rawtx, {'foo': 'bar'})
raise AssertionError("Accepted invalid option foo")
except JSONRPCException as e:
assert("Unexpected key foo" in e.error['message'])
############################################################
# test a fundrawtransaction with an invalid change address #
############################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
try:
self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': 'foobar'})
raise AssertionError("Accepted invalid zurcoin address")
except JSONRPCException as e:
assert("changeAddress must be a valid zurcoin address" in e.error['message'])
############################################################
# test a fundrawtransaction with a provided change address #
############################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
change = self.nodes[2].getnewaddress()
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 2})
except JSONRPCException as e:
assert('changePosition out of bounds' == e.error['message'])
else:
assert(False)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 0})
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
out = dec_tx['vout'][0];
assert_equal(change, out['scriptPubKey']['addresses'][0])
#########################################################################
# test a fundrawtransaction with a VIN smaller than the required amount #
#########################################################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
# 4-byte version + 1-byte vin count + 36-byte prevout then script_len
rawtx = rawtx[:82] + "0100" + rawtx[84:]
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for i, out in enumerate(dec_tx['vout']):
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
else:
assert_equal(i, rawtxfund['changepos'])
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
###########################################
# test a fundrawtransaction with two VINs #
###########################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
utx2 = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
matchingIns = 0
for vinOut in dec_tx['vin']:
for vinIn in inputs:
if vinIn['txid'] == vinOut['txid']:
matchingIns+=1
assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
#########################################################
# test a fundrawtransaction with two VINs and two vOUTs #
#########################################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
utx2 = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 2)
assert_equal(len(dec_tx['vout']), 3)
##############################################
# test a fundrawtransaction with invalid vin #
##############################################
listunspent = self.nodes[2].listunspent()
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 1.0}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
raise AssertionError("Spent more than available")
except JSONRPCException as e:
assert("Insufficient" in e.error['message'])
############################################################
#compare fee of a standard pubkeyhash transaction
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction with multiple outputs
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1,self.nodes[1].getnewaddress():1.2,self.nodes[1].getnewaddress():0.1,self.nodes[1].getnewaddress():1.3,self.nodes[1].getnewaddress():0.2,self.nodes[1].getnewaddress():0.3}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendmany("", outputs)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a 2of2 multisig p2sh transaction
# create 2of2 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction
# create 4of5 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr3 = self.nodes[1].getnewaddress()
addr4 = self.nodes[1].getnewaddress()
addr5 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
addr3Obj = self.nodes[1].validateaddress(addr3)
addr4Obj = self.nodes[1].validateaddress(addr4)
addr5Obj = self.nodes[1].validateaddress(addr5)
mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
# spend a 2of2 multisig transaction over fundraw
# create 2of2 addr
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
# send 1.2 ZUR to msig addr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
oldBalance = self.nodes[1].getbalance()
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawTx)
signedTx = self.nodes[2].signrawtransaction(fundedTx['hex'])
txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('1.10000000'), self.nodes[1].getbalance())
############################################################
# locked wallet test
self.nodes[1].encryptwallet("test")
self.nodes.pop(1)
stop_nodes(self.nodes)
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
# drain the keypool
self.nodes[1].getnewaddress()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
# fund a transaction that requires a new key for the change output
# creating the key must be impossible because the wallet is locked
try:
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('Keypool ran out' in e.error['message'])
#refill the keypool
self.nodes[1].walletpassphrase("test", 100)
self.nodes[1].walletlock()
try:
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.2)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('walletpassphrase' in e.error['message'])
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#now we need to unlock
self.nodes[1].walletpassphrase("test", 100)
signedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance())
###############################################
# multiple (~19) inputs tx test | Compare fee #
###############################################
#empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[1].sendmany("", outputs)
signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs
#############################################
# multiple (~19) inputs tx test | sign/send #
#############################################
#again, empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(oldBalance+Decimal('50.19000000'), self.nodes[0].getbalance()) #0.19+block reward
#####################################################
# test fundrawtransaction with OP_RETURN and no vin #
#####################################################
rawtx = "0100000000010000000000000000066a047465737400000000"
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(len(dec_tx['vin']), 0)
assert_equal(len(dec_tx['vout']), 1)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_greater_than(len(dec_tx['vin']), 0) # at least one vin
assert_equal(len(dec_tx['vout']), 2) # one change output added
##################################################
# test a fundrawtransaction using only watchonly #
##################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, {'includeWatching': True })
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 1)
assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
assert("fee" in result.keys())
assert_greater_than(result["changepos"], -1)
###############################################################
# test fundrawtransaction using the entirety of watched funds #
###############################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
# Backward compatibility test (2nd param is includeWatching)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 2)
assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid)
assert_greater_than(result["fee"], 0)
assert_greater_than(result["changepos"], -1)
assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
signedtx = self.nodes[3].signrawtransaction(result["hex"])
assert(not signedtx["complete"])
signedtx = self.nodes[0].signrawtransaction(signedtx["hex"])
assert(signedtx["complete"])
self.nodes[0].sendrawtransaction(signedtx["hex"])
self.nodes[0].generate(1)
self.sync_all()
#######################
# Test feeRate option #
#######################
# Make sure there is exactly one input so coin selection can't skew the result
assert_equal(len(self.nodes[3].listunspent(1)), 1)
inputs = []
outputs = {self.nodes[2].getnewaddress() : 1}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx) # uses min_relay_tx_fee (set by settxfee)
result2 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee})
result3 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 10*min_relay_tx_fee})
result_fee_rate = result['fee'] * 1000 / count_bytes(result['hex'])
assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate)
assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate)
if __name__ == '__main__':
RawTransactionsTest().main()
|
'''
Author: Robert Post
This class encompases the functionality to run a Deep Q Network agent as outlined in:
Human-level control through deep reinforcement learning.
Nature, 518(7540):529-533, February 2015
'''
import sys
import copy
import os
import cPickle
import time
import logging
import random
import numpy as np
import cv2
import argparse
import theano
import sys
import copy
import os
import cPickle
import time
import logging
import random
import numpy as np
import cv2
import argparse
import theano
sys.path.append("../utilities")
sys.path.append("../network")
import Parameters
import Environment
import DeepNetworks
import DeepQTransferNetwork
import DQNAgentMemory
floatX = theano.config.floatX
class DQTNAgent(object):
def __init__(self, actionList, inputHeight, inputWidth, batchSize, phiLength,
nnFile, loadWeightsFlipped, updateFrequency, replayMemorySize, replayStartSize,
networkType, updateRule, batchAccumulator, networkUpdateDelay,
transferTaskModule,
transferExperimentType, numTransferTasks,
discountRate, learningRate, rmsRho, rmsEpsilon, momentum,
epsilonStart, epsilonEnd, epsilonDecaySteps, evalEpsilon, useSARSAUpdate, kReturnLength, deathEndsEpisode):
self.actionList = actionList
self.numActions = len(self.actionList)
self.inputHeight = inputHeight
self.inputWidth = inputWidth
self.batchSize = batchSize
self.phiLength = phiLength
self.nnFile = nnFile
self.loadWeightsFlipped = loadWeightsFlipped
self.updateFrequency = updateFrequency
self.replayMemorySize = replayMemorySize
self.replayStartSize = replayStartSize
self.networkType = networkType
self.updateRule = updateRule
self.batchAccumulator = batchAccumulator
self.networkUpdateDelay = networkUpdateDelay
self.discountRate = discountRate
self.learningRate = learningRate
self.rmsRho = rmsRho
self.rmsEpsilon = rmsEpsilon
self.momentum = momentum
self.epsilonStart = epsilonStart
self.epsilonEnd = epsilonEnd
self.epsilonDecaySteps = epsilonDecaySteps
self.evalEpsilon = evalEpsilon
self.transferTaskModule = transferTaskModule
self.numTransferTasks = numTransferTasks
self.transferExperimentType = transferExperimentType
# self.useSharedTransferLayer = useSharedTransferLayer
self.kReturnLength = kReturnLength
self.useSARSAUpdate = useSARSAUpdate
self.deathEndsEpisode = deathEndsEpisode
self.trainingMemory =DQNAgentMemory.DQNAgentMemory((self.inputHeight, self.inputWidth), self.phiLength, self.replayMemorySize, self.discountRate, numTasks=self.numTransferTasks)
self.evaluationMemory=DQNAgentMemory.DQNAgentMemory((self.inputHeight, self.inputWidth), self.phiLength, self.phiLength * 2, self.discountRate, numTasks=self.numTransferTasks)
self.episodeCounter = 0
self.stepCounter = 0
self.batchCounter = 0
self.actionToTake = 0
self.transferTaskIndex = 0
self.nextTaskSampled = -1
self.lossAverages = []
self.epsilon = self.epsilonStart
if self.epsilonDecaySteps != 0:
self.epsilonRate = ((self.epsilonStart - self.epsilonEnd) / self.epsilonDecaySteps)
else:
self.epsilonRate = 0
self.training = False
self.network = DeepQTransferNetwork.DeepQTransferNetwork(self.batchSize, self.phiLength, self.inputHeight, self.inputWidth, self.numActions,
self.discountRate, self.learningRate, self.rmsRho, self.rmsEpsilon, self.momentum, self.networkUpdateDelay,
self.useSARSAUpdate, self.kReturnLength,
self.transferExperimentType , self.numTransferTasks, self.transferTaskModule.taskBatchFlag,
self.networkType, self.updateRule, self.batchAccumulator)
if self.nnFile is not None:
#Load network
DeepNetworks.loadNetworkParams(self.network.qValueNetwork, self.nnFile, self.loadWeightsFlipped)
self.network.resetNextQValueNetwork()
def agentCleanup(self):
pass
def startEpisode(self, observation, transferTaskIndex):
self.batchCounter= 0
self.lossAverages= []
self.currentTransferTaskIndex = transferTaskIndex
if self.training:
self.epsilon = max(self.epsilonEnd, self.epsilonStart - self.stepCounter * self.epsilonRate)
else:
self.epsilon = self.evalEpsilon
actionIndex = np.random.randint(0, self.numActions - 1)
returnAction = self.actionList[actionIndex]
self.actionToTake= actionIndex
if self.training:
self.trainingMemory.addFrame(observation)
else:
self.evaluationMemory.addFrame(observation)
return returnAction
def endEpisode(self, reward):
self.episodeCounter += 1
self.stepCounter += 1
if self.training:
self.trainingMemory.addExperience(np.clip(reward, -1, 1), self.actionToTake, True, self.currentTransferTaskIndex)
avgLoss = np.mean(self.lossAverages)
return avgLoss
'''
This function receives the reward and next state and returns the action to take
'''
def stepEpisode(self, reward, observation):
self.stepCounter += 1
if self.training:
currentMemory = self.trainingMemory
else:
currentMemory = self.evaluationMemory
currentMemory.addExperience(np.clip(reward, -1, 1), self.actionToTake, False, self.currentTransferTaskIndex)
currentMemory.addFrame(observation)
if self.stepCounter >= self.phiLength:
phi = currentMemory.getPhi()
actionIndex = self.network.chooseAction(phi, self.currentTransferTaskIndex, self.epsilon, self.transferTaskModule.getActionsForCurrentTask())
else:
actionIndex = np.random.randint(0, self.numActions - 1)
if self.training and len(self.trainingMemory) >= self.replayStartSize and self.stepCounter % self.updateFrequency == 0:
loss = self.runTrainingBatch()
self.batchCounter += 1
self.lossAverages.append(loss)
self.actionToTake = actionIndex
return self.actionList[self.actionToTake]
def runTrainingBatch(self):
if self.transferTaskModule.taskBatchFlag == 0:
self.nextTaskSampled = None
elif self.transferTaskModule.taskBatchFlag == 1:
self.nextTaskSampled = (self.nextTaskSampled + 1) % self.transferTaskModule.getNumTasks()
elif self.transferTaskModule.taskBatchFlag == 2:
self.nextTaskSampled = np.random.randint(0, self.transferTaskModule.getNumTasks())
batchStates, batchActions, batchRewards, batchNextStates, batchNextActions, batchTerminals, batchTasks = self.trainingMemory.getRandomExperienceBatch(self.batchSize, kReturnLength = self.kReturnLength, taskIndex = self.nextTaskSampled)
return self.network.trainNetwork(batchStates, batchActions, batchRewards, batchNextStates, batchNextActions, batchTerminals, batchTasks)
def startTrainingEpoch(self, epochNumber):
self.training = True
def endTrainingEpoch(self, epochNumber):
pass
def startEvaluationEpoch(self, epochNumber):
self.training = False
self.episodeCounter = 0
def endEvaluationEpoch(self, epochNumber):
pass
def computeHoldoutQValues(self, holdoutSize):
taskHoldoutAverageQValues = []
for taskIndex in xrange(self.numTransferTasks):
holdoutTaskBatchData = self.trainingMemory.getRandomExperienceBatch(holdoutSize, kReturnLength = self.kReturnLength, taskIndex = taskIndex)
holdoutStates = holdoutTaskBatchData[0]
holdoutSum = 0
for i in xrange(holdoutSize):
holdoutSum += np.mean(self.network.computeQValues(holdoutStates[i, ...], taskIndex))
taskHoldoutAverageQValues.append(holdoutSum / holdoutSize)
return taskHoldoutAverageQValues
|
import requests
class PushPipeline(object):
"""
Notify to the push service that a new strip so pushd will notify to all the interested users
"""
def process_item(self, item, spider):
key = item['comic']
msg = "There is a new strip " + key
data = {
"msg": msg,
"data.comic": key,
}
r = requests.post("http://push:8081/event/%s" % key, data=data)
## Hack to notify also using webpush custom implementation
r = requests.post("http://localhost:5000/webpush/notify", json = {"topic": key})
|
import os
import pprint
import shutil
import sys
import tempfile
import webbrowser
import ioJSON
from networkx.readwrite.json_graph import node_link_data
_excluded_node_data = set([
'pa_object',
])
_excluded_link_data = set([
'sexpr',
'dexpr',
])
_excluded_tooltip_data = set([
'short',
'comp',
'var',
'invalidation',
'boundary',
'iotype',
'color_idx',
'title',
'pseudo',
])
def _to_id(name):
"""Convert a given name to a valid html id, replacing
dots with hyphens."""
return name.replace('.', '-')
def _clean_graph(graph, excludes=(), scope=None, parent=None, minimal=False):
"""Return a cleaned version of the graph. Note that this
should not be used for really large graphs because it
copies the entire graph.
"""
# make a subgraph, creating new edge/node meta dicts later if
# we change anything
graph = graph.subgraph(graph.nodes_iter())
if parent is None:
graph.graph['title'] = 'unknown'
else:
name = parent.get_pathname()
if hasattr(parent, 'workflow'):
name += '._derivative_graph'
else:
name += '._depgraph'
graph.graph['title'] = name
if excludes:
excluded_vars = set(excludes)
else:
excluded_vars = set()
conns = graph.list_connections()
conn_nodes = set([u.split('[', 1)[0] for u, v in conns])
conn_nodes.update([v.split('[', 1)[0] for u, v in conns])
nodes_to_remove = []
for node, data in graph.nodes_iter(data=True):
cmpname, _, nodvar = node.partition('.')
if node in excluded_vars or nodvar in excluded_vars:
nodes_to_remove.append(node)
elif 'framework_var' in data:
nodes_to_remove.append(node)
else:
if minimal and '@' not in node and '~' not in node and not 'comp' in data:
degree = graph.in_degree(node) + graph.out_degree(node)
if degree < 2:
nodes_to_remove.append(node)
continue
# update node metadata
newdata = data
for meta in _excluded_node_data:
if meta in newdata:
if newdata is data:
newdata = dict(data) # make a copy of metadata since we're changing it
graph.node[node] = newdata
del newdata[meta]
tt_dct = {}
for key, val in newdata.items():
if key not in _excluded_tooltip_data:
tt_dct[key] = val
elif scope is not None and key == 'pseudo':
if val == 'objective':
newdata['objective'] = getattr(scope, node)._orig_expr
elif val == 'constraint':
newdata['constraint'] = getattr(scope, node)._orig_expr
newdata['title'] = pprint.pformat(tt_dct)
graph.remove_nodes_from(nodes_to_remove)
for u, v, data in graph.edges_iter(data=True):
newdata = data
for meta in _excluded_link_data:
if meta in newdata:
if newdata is data:
newdata = dict(data)
graph.edge[u][v] = newdata
del newdata[meta]
try:
for i, comp in enumerate(graph.component_graph()):
graph.node[comp]['color_idx'] = i
except AttributeError:
pass
# add some extra metadata to make things easier on the
# javascript side
for node, data in graph.nodes_iter(data=True):
parts = node.split('.', 1)
data['full'] = node
if len(parts) == 1 or node.startswith('parent.'):
data['short'] = node
else:
data['short'] = parts[1]
try:
data['color_idx'] = graph.node[parts[0]]['color_idx']
except KeyError:
pass
return graph
def plot_graph(graph, scope=None, parent=None,
excludes=(), d3page='fixedforce.html', minimal=False):
"""Open up a display of the graph in a browser window."""
tmpdir = tempfile.mkdtemp()
fdir = os.path.dirname(os.path.abspath(__file__))
shutil.copy(os.path.join(fdir, 'd3.js'), tmpdir)
shutil.copy(os.path.join(fdir, d3page), tmpdir)
graph = _clean_graph(graph, excludes=excludes,
scope=scope, parent=parent, minimal=minimal)
data = node_link_data(graph)
tmp = data.get('graph', [])
data['graph'] = [dict(tmp)]
startdir = os.getcwd()
os.chdir(tmpdir)
try:
# write out the json as a javascript var
# so we we're not forced to start our own webserver
# to avoid cross-site issues
with open('__graph.js', 'w') as f:
f.write("__mygraph__json = ")
ioJSON.dump(data, f)
f.write(";\n")
# open URL in web browser
wb = webbrowser.get()
wb.open('file://' + os.path.join(tmpdir, d3page))
except Exception as err:
print str(err)
finally:
os.chdir(startdir)
print "remember to remove temp directory '%s'" % tmpdir
# time.sleep(5) # sleep to give browser time
# to read files before we remove them
# shutil.rmtree(tmpdir)
# print "temp directory removed"
def plot_graphs(obj, recurse=False, d3page='fixedforce.html', minimal=False):
"""Return a list of tuples of the form (scope, parent, graph)"""
from openmdao.main.assembly import Assembly
from openmdao.main.driver import Driver
if isinstance(obj, Assembly):
try:
plot_graph(obj._depgraph, scope=obj, parent=obj,
d3page=d3page, minimal=minimal)
except Exception as err:
print "Can't plot depgraph of '%s': %s" % (obj.name, str(err))
if recurse:
plot_graphs(obj.driver, recurse)
elif isinstance(obj, Driver):
try:
plot_graph(obj.workflow.derivative_graph(),
scope=obj.parent, parent=obj,
d3page=d3page, minimal=minimal)
except Exception as err:
print "Can't plot deriv graph of '%s': %s" % (obj.name, str(err))
if recurse:
for comp in obj.iteration_set():
if isinstance(comp, Assembly) or isinstance(comp, Driver):
plot_graphs(comp, recurse)
def main():
from argparse import ArgumentParser
import inspect
from openmdao.main.assembly import Assembly, set_as_top
parser = ArgumentParser()
parser.add_argument('-m', '--module', action='store', dest='module',
metavar='MODULE',
help='name of module that contains the class to be instantiated and graphed')
parser.add_argument('-c', '--class', action='store', dest='klass',
help='boolean expression to filter hosts')
parser.add_argument('-r', '--recurse', action='store_true', dest='recurse',
help='if set, recurse down and plot all dependency and derivative graphs')
options = parser.parse_args()
if options.module is None:
parser.print_help()
sys.exit(-1)
__import__(options.module)
mod = sys.modules[options.module]
if options.klass:
obj = getattr(mod, options.klass)()
else:
def isasm(obj):
return issubclass(obj, Assembly)
klasses = inspect.getmembers(mod, isasm)
if len(klasses) > 1:
print "found %d Assembly classes. pick one" % len(klasses)
for i, klass in enumerate(klasses):
print "%d) %s" % (i, klass.__name__)
var = raw_input("\nEnter a number: ")
obj = klasses[int(var)]
set_as_top(obj)
if not obj.get_pathname():
obj.name = 'top'
plot_graphs(obj, recurse=options.recurse)
if __name__ == '__main__':
main()
|
import ConfigParser
import os
import skysurvey
from skysurvey.new_config import SYS_CFG_FNAME
__ALL__ = ['plot_dir', 'grid_dir', 'table_dir']
_sysConfig_fh = os.path.join(os.path.dirname(
os.path.realpath(skysurvey.__file__)), SYS_CFG_FNAME)
_SysConfig = ConfigParser.ConfigParser()
_SysConfig.read(_sysConfig_fh)
config_fh = _SysConfig.get('skysurvey_global_settings', 'config_fh')
Config = ConfigParser.ConfigParser()
Config.read(config_fh)
plot_dir = Config.get('PATH', 'plot_dir')
grid_dir = Config.get('PATH', 'grid_dir')
table_dir = Config.get('PATH', 'table_dir')
|
""" invdisttree.py: inverse-distance-weighted interpolation using KDTree
fast, solid, local
"""
from __future__ import division
import numpy as np
from scipy.spatial import cKDTree as KDTree
# http://docs.scipy.org/doc/scipy/reference/spatial.html
__date__ = "2010-11-09 Nov" # weights, doc
class Invdisttree:
""" inverse-distance-weighted interpolation using KDTree:
invdisttree = Invdisttree( X, z ) -- data points, values
interpol = invdisttree( q, nnear=3, eps=0, p=1, weights=None, stat=0 )
interpolates z from the 3 points nearest each query point q;
For example, interpol[ a query point q ]
finds the 3 data points nearest q, at distances d1 d2 d3
and returns the IDW average of the values z1 z2 z3
(z1/d1 + z2/d2 + z3/d3)
/ (1/d1 + 1/d2 + 1/d3)
= .55 z1 + .27 z2 + .18 z3 for distances 1 2 3
q may be one point, or a batch of points.
eps: approximate nearest, dist <= (1 + eps) * true nearest
p: use 1 / distance**p
weights: optional multipliers for 1 / distance**p, of the same shape as q
stat: accumulate wsum, wn for average weights
How many nearest neighbors should one take ?
a) start with 8 11 14 .. 28 in 2d 3d 4d .. 10d; see Wendel's formula
b) make 3 runs with nnear= e.g. 6 8 10, and look at the results --
|interpol 6 - interpol 8| etc., or |f - interpol*| if you have f(q).
I find that runtimes don't increase much at all with nnear -- ymmv.
p=1, p=2 ?
p=2 weights nearer points more, farther points less.
In 2d, the circles around query points have areas ~ distance**2,
so p=2 is inverse-area weighting. For example,
(z1/area1 + z2/area2 + z3/area3)
/ (1/area1 + 1/area2 + 1/area3)
= .74 z1 + .18 z2 + .08 z3 for distances 1 2 3
Similarly, in 3d, p=3 is inverse-volume weighting.
Scaling:
if different X coordinates measure different things, Euclidean distance
can be way off. For example, if X0 is in the range 0 to 1
but X1 0 to 1000, the X1 distances will swamp X0;
rescale the data, i.e. make X0.std() ~= X1.std() .
A nice property of IDW is that it's scale-free around query points:
if I have values z1 z2 z3 from 3 points at distances d1 d2 d3,
the IDW average
(z1/d1 + z2/d2 + z3/d3)
/ (1/d1 + 1/d2 + 1/d3)
is the same for distances 1 2 3, or 10 20 30 -- only the ratios matter.
In contrast, the commonly-used Gaussian kernel exp( - (distance/h)**2 )
is exceedingly sensitive to distance and to h.
"""
def __init__( self, X, z, leafsize=10, stat=0 ):
assert len(X) == len(z), "len(X) %d != len(z) %d" % (len(X), len(z))
self.tree = KDTree( X, leafsize=leafsize ) # build the tree
self.z = z
self.stat = stat
self.wn = 0
self.wsum = None;
def __call__( self, q, nnear=6, eps=0, p=1, weights=None ):
# nnear nearest neighbours of each query point --
q = np.asarray(q)
qdim = q.ndim
if qdim == 1:
q = np.array([q])
if self.wsum is None:
self.wsum = np.zeros(nnear)
self.distances, self.ix = self.tree.query( q, k=nnear, eps=eps )
interpol = np.zeros( (len(self.distances),) + np.shape(self.z[0]) )
jinterpol = 0
for dist, ix in zip( self.distances, self.ix ):
if nnear == 1:
wz = self.z[ix]
elif dist[0] < 1e-10:
wz = self.z[ix[0]]
else: # weight z s by 1/dist --
w = 1 / dist**p
if weights is not None:
w *= weights[ix] # >= 0
w /= np.sum(w)
wz = np.dot( w, self.z[ix] )
if self.stat:
self.wn += 1
self.wsum += w
interpol[jinterpol] = wz
jinterpol += 1
return interpol if qdim > 1 else interpol[0]
if __name__ == "__main__":
import sys
N = 10000
Ndim = 2
Nask = N # N Nask 1e5: 24 sec 2d, 27 sec 3d on mac g4 ppc
Nnear = 8 # 8 2d, 11 3d => 5 % chance one-sided -- Wendel, mathoverflow.com
leafsize = 10
eps = .1 # approximate nearest, dist <= (1 + eps) * true nearest
p = 1 # weights ~ 1 / distance**p
cycle = .25
seed = 1
exec "\n".join( sys.argv[1:] ) # python this.py N= ...
np.random.seed(seed )
np.set_printoptions( 3, threshold=100, suppress=True ) # .3f
print "\nInvdisttree: N %d Ndim %d Nask %d Nnear %d leafsize %d eps %.2g p %.2g" % (
N, Ndim, Nask, Nnear, leafsize, eps, p)
def terrain(x):
""" ~ rolling hills """
return np.sin( (2*np.pi / cycle) * np.mean( x, axis=-1 ))
known = np.random.uniform( size=(N,Ndim) ) ** .5 # 1/(p+1): density x^p
z = terrain( known )
ask = np.random.uniform( size=(Nask,Ndim) )
invdisttree = Invdisttree( known, z, leafsize=leafsize, stat=1 )
interpol = invdisttree( ask, nnear=Nnear, eps=eps, p=p )
print "average distances to nearest points: %s" % \
np.mean( invdisttree.distances, axis=0 )
print "average weights: %s" % (invdisttree.wsum / invdisttree.wn)
# see Wikipedia Zipf's law
err = np.abs( terrain(ask) - interpol )
print "average |terrain() - interpolated|: %.2g" % np.mean(err)
# print "interpolate a single point: %.2g" % \
# invdisttree( known[0], nnear=Nnear, eps=eps )
|
import ts3 #teamspeak library
import time #time for sleep function
import re #regular expressions
import TS3Auth #includes datetime import
import sqlite3 #Database
import os #operating system commands -check if files exist
import datetime #for date strings
import configparser #parse in configuration
import ast #eval a string to a list/boolean (for cmd_list from 'bot settings' or DEBUG from config)
import schedule # Allows auditing of users every X days
from bot_messages import * #Import all Static messages the BOT may need
current_version='1.1'
configs=configparser.ConfigParser()
configs.read('bot.conf')
host = configs.get('teamspeak connection settings','host')
port = configs.get('teamspeak connection settings','port')
user = configs.get('teamspeak connection settings','user')
passwd = configs.get('teamspeak connection settings','passwd')
server_id = configs.get('teamspeak other settings','server_id')
channel_name = configs.get('teamspeak other settings','channel_name')
verified_group = configs.get('teamspeak other settings','verified_group')
bot_nickname = configs.get('bot settings','bot_nickname')
bot_sleep_conn_lost = int(configs.get('bot settings','bot_sleep_conn_lost'))
bot_sleep_idle = int(configs.get('bot settings','bot_sleep_idle'))
cmd_list = ast.literal_eval(configs.get('bot settings','cmd_list'))
db_file_name = configs.get('bot settings','db_file_name')
audit_period = int(configs.get('bot settings','audit_period')) #How long a single user can go without being audited
audit_interval = int(configs.get('bot settings','audit_interval')) # how often the BOT audits all users
client_restriction_limit= int(configs.get('bot settings','client_restriction_limit'))
timer_msg_broadcast = int(configs.get('bot settings','broadcast_message_timer'))
DEBUG = ast.literal_eval(configs.get('DEBUGGING','DEBUG'))
bot_msg='''
%s is alive once again! Type 'verifyme' in the "%s" channel to begin verification!
''' %(bot_nickname,channel_name)
class Bot:
def __init__(self,db,ts_connection):
admin_data=ts_connection.whoami()
self.db_name=db
self.ts_connection=ts_connection
self.name=admin_data[0].get('client_login_name')
self.client_id=admin_data[0].get('client_id')
self.nickname=bot_nickname
self.vgrp_id=None
self.groupFind(verified_group)
self.getUserDatabase()
self.c_audit_date=datetime.date.today() # Todays Date
#Helps find the group ID for 'verified users group'
def groupFind(self,group_to_find):
self.groups_list=ts3conn.servergrouplist()
for group in self.groups_list:
if group.get('name') == group_to_find:
self.vgrp_id=group.get('sgid')
def clientNeedsVerify(self,unique_client_id):
client_db_id = self.getTsDatabaseID(unique_client_id)
#Check if user is in verified group
if any(perm_grp.get('name') == verified_group for perm_grp in ts3conn.servergroupsbyclientid(cldbid=client_db_id)):
return False #User already verified
#Check if user is authenticated in database and if so, re-adds them to the group
current_entries=self.db_cursor.execute("SELECT * FROM users WHERE ts_db_id=?", (unique_client_id,)).fetchall()
if len(current_entries) > 0:
self.setPermissions(unique_client_id)
return False
return True #User not verified
def setPermissions(self,unique_client_id):
try:
client_db_id = self.getTsDatabaseID(unique_client_id)
if DEBUG:
TS3Auth.log("Adding Permissions: CLUID [%s] SGID: %s CLDBID: %s" %(unique_client_id, self.vgrp_id, client_db_id))
try:
#Add user to group
self.ts_connection.servergroupaddclient(sgid=self.vgrp_id, cldbid=client_db_id)
except:
TS3Auth.log("Unable to add client to '%s' group. Does the group exist?" %verified_group)
except ts3.query.TS3QueryError as err:
TS3Auth.log("BOT [setPermissions]: Failed; %s" %err) #likely due to bad client id
def removePermissions(self,unique_client_id):
try:
client_db_id = self.getTsDatabaseID(unique_client_id)
if DEBUG:
TS3Auth.log("Removing Permissions: CLUID [%s] SGID: %s CLDBID: %s" %(unique_client_id, self.vgrp_id, client_db_id))
#Remove user from group
try:
self.ts_connection.servergroupdelclient(sgid=self.vgrp_id, cldbid=client_db_id)
except:
TS3Auth.log("Unable to remove client from '%s' group. Does the group exist?" %verified_group)
except ts3.query.TS3QueryError as err:
TS3Auth.log("BOT [removePermissions]: Failed; %s" %err) #likely due to bad client id
def getUserDatabase(self):
if os.path.isfile(self.db_name):
self.db_conn = sqlite3.connect(self.db_name,check_same_thread=False,detect_types=sqlite3.PARSE_DECLTYPES)
self.db_cursor = self.db_conn.cursor()
TS3Auth.log ("Loaded User Database...")
else:
self.db_conn = sqlite3.connect(self.db_name,check_same_thread=False,detect_types=sqlite3.PARSE_DECLTYPES)
self.db_cursor = self.db_conn.cursor()
TS3Auth.log("No User Database found...created new database!")
self.db_cursor.execute('''CREATE TABLE users
(ts_db_id text primary key, account_name text, api_key text, created_date date, last_audit_date date)''')
self.db_cursor.execute('''CREATE TABLE bot_info
(version text, last_succesful_audit date)''')
self.db_conn.commit()
self.db_cursor.execute('INSERT INTO bot_info (version, last_succesful_audit) VALUES (?,?)', (current_version, datetime.date.today(), ))
self.db_conn.commit()
def TsClientLimitReached(self,gw_acct_name):
limit_reached = False
current_entries = self.db_cursor.execute("SELECT * FROM users WHERE account_name=?", (gw_acct_name, )).fetchall()
if len(current_entries) >= client_restriction_limit:
limit_reached = True
return limit_reached
def addUserToDB(self,client_unique_id,account_name,api_key,created_date,last_audit_date):
client_id=self.getActiveTsUserID(client_unique_id)
client_exists=self.db_cursor.execute("SELECT * FROM users WHERE ts_db_id=?", (client_unique_id,)).fetchall()
if len(client_exists) > 1:
TS3Auth.log('Function [addUserToDB] WARN: Found multipe database entries for single unique teamspeakid %s.' %client_unique_id, silent=True)
if len(client_exists) != 0: # If client TS database id is in BOT's database.
self.db_cursor.execute("""UPDATE users SET ts_db_id=?, account_name=?, api_key=?, created_date=?, last_audit_date=? WHERE ts_db_id=?""", (client_unique_id, account_name, api_key, created_date, last_audit_date,client_unique_id))
TS3Auth.log("Teamspeak ID %s already in Database updating with new Account Name '%s'. (likely permissions changed by a Teamspeak Admin)" %(client_unique_id,account_name))
else:
self.db_cursor.execute("INSERT INTO users ( ts_db_id, account_name, api_key, created_date, last_audit_date) VALUES(?,?,?,?,?)",(client_unique_id, account_name, api_key, created_date, last_audit_date))
self.db_conn.commit()
def removeUserFromDB(self,client_db_id):
#client_db_id=
self.db_cursor.execute("DELETE FROM users WHERE ts_db_id=?", (client_db_id,))
self.db_conn.commit()
def auditUsers(self):
self.c_audit_date=datetime.date.today() #Update current date everytime run
self.db_audit_list=self.db_cursor.execute('SELECT * FROM users').fetchall()
for audit_user in self.db_audit_list:
#Convert to single variables
audit_ts_id = audit_user[0]
audit_account_name = audit_user[1]
audit_api_key = audit_user[2]
audit_created_date = audit_user[3]
audit_last_audit_date = audit_user[4]
if DEBUG:
print("Audit: User ",audit_account_name)
print("TODAY |%s| NEXT AUDIT |%s|" %(self.c_audit_date,audit_last_audit_date + datetime.timedelta(days=audit_period)))
#compare audit date
if self.c_audit_date >= audit_last_audit_date + datetime.timedelta(days=audit_period):
TS3Auth.log ("User %s is due for audting!" %audit_account_name)
auth=TS3Auth.auth_request(audit_api_key,audit_account_name)
if auth.success:
TS3Auth.log("User %s is still on %s. Succesful audit!" %(audit_account_name,auth.world.get('name')))
self.db_cursor.execute("UPDATE users SET last_audit_date = ? WHERE ts_db_id= ?", (self.c_audit_date,audit_ts_id,))
self.db_conn.commit()
else:
TS3Auth.log("User %s is no longer on our server. Removing access...." %(audit_account_name))
self.removePermissions(audit_ts_id)
self.removeUserFromDB(audit_ts_id)
self.db_cursor.execute('INSERT INTO bot_info (last_succesful_audit) VALUES (?)', (self.c_audit_date,))
self.db_conn.commit()
def broadcastMessage(self):
self.ts_connection.sendtextmessage( targetmode=2,target=server_id, msg=bot_msg_broadcast)
def getActiveTsUserID(self,client_unique_id):
return self.ts_connection.clientgetids(cluid=client_unique_id)[0].get('clid')
def getTsDatabaseID(self,client_unique_id):
return self.ts_connection.clientgetdbidfromuid(cluid=client_unique_id)[0].get('cldbid')
def getTsUniqueID(self,client_id):
return self.ts_connection.clientgetuidfromclid(clid=client_id)[0].get('cldbid')
def commandCheck(command_string):
action=0
for allowed_cmd in cmd_list:
if re.match('(^%s)\s*' %allowed_cmd,command_string):
action=allowed_cmd
return action
def my_event_handler(sender, event):
"""
*sender* is the TS3Connection instance, that received the event.
*event* is a ts3.response.TS3Event instance, that contains the name
of the event and the data.
"""
if DEBUG:
print("\nEvent:")
#print(" sender:", sender)
print(" event.event:", event.event)
print(" event.parsed:", event.parsed)
print("\n\n")
raw_cmd=event.parsed[0].get('msg')
rec_from_name=event.parsed[0].get('invokername').encode('utf-8') #fix any encoding issues introdcued by Teamspeak
rec_from_uid=event.parsed[0].get('invokeruid')
rec_from_id=event.parsed[0].get('invokerid')
rec_type=event.parsed[0].get('targetmode')
if rec_from_uid == 'serveradmin':
return #ignore any serveradmin messages, aka seeing our own messages.
try:
# Type 2 means it was channel text
if rec_type == '2':
cmd=commandCheck(raw_cmd) #sanitize the commands but also restricts commands to a list of known allowed commands
#
if cmd == 'verifyme':
if BOT.clientNeedsVerify(rec_from_uid):
TS3Auth.log("Verify Request Recieved from user '%s'. Sending PM now...\n ...waiting for user response." %rec_from_name)
sender.sendtextmessage( targetmode=1, target=rec_from_id, msg=bot_msg_verify)
else:
TS3Auth.log("Verify Request Recieved from user '%s'. Already verified, notified user." %rec_from_name)
sender.sendtextmessage( targetmode=1, target=rec_from_id, msg=bot_msg_alrdy_verified)
# Type 1 means it was a private message
elif rec_type == '1':
#reg_api_auth='\s*(\S+\s*\S+\.\d+)\s+(.*?-.*?-.*?-.*?-.*)\s*$'
reg_api_auth='\s*(.+?\.\d+)\s+(.*?-.*?-.*?-.*?-.*)\s*$'
#Command for verifying authentication
if re.match(reg_api_auth,raw_cmd):
pair=re.search(reg_api_auth,raw_cmd)
uname=pair.group(1)
uapi=pair.group(2)
limit_hit=BOT.TsClientLimitReached(uname)
if DEBUG:
print("Limit hit check: %s" %limit_hit)
if not limit_hit:
if BOT.clientNeedsVerify(rec_from_uid):
TS3Auth.log("Received verify response from %s" %rec_from_name)
auth=TS3Auth.auth_request(uapi,uname)
if DEBUG:
TS3Auth.log('Name: |%s| API: |%s|' %(uname,uapi))
if auth.success:
TS3Auth.log("Setting permissions for %s as verified." %rec_from_name)
#set permissions
BOT.setPermissions(rec_from_uid)
#get todays date
today_date=datetime.date.today()
#Add user to database so we can query their API key over time to ensure they are still on our server
BOT.addUserToDB(rec_from_uid,uname,uapi,today_date,today_date)
print ("Added user to DB with ID %s" %rec_from_uid)
#notify user they are verified
sender.sendtextmessage( targetmode=1, target=rec_from_id, msg=bot_msg_success)
else:
#Auth Failed
sender.sendtextmessage( targetmode=1, target=rec_from_id, msg=bot_msg_fail)
else:
TS3Auth.log("Received API Auth from %s, but %s is already verified. Notified user as such." %(rec_from_name,rec_from_name))
sender.sendtextmessage( targetmode=1, target=rec_from_id, msg=bot_msg_alrdy_verified)
else:
# client limit is set and hit
sender.sendtextmessage( targetmode=1, target=rec_from_id, msg=bot_msg_limit_Hit)
TS3Auth.log("Received API Auth from %s, but %s has reached the client limit." %(rec_from_name,rec_from_name))
else:
sender.sendtextmessage( targetmode=1, target=rec_from_id, msg=bot_msg_rcv_default)
TS3Auth.log("Received bad response from %s [msg= %s]" %(rec_from_name,raw_cmd.encode('utf-8')))
except:
TS3Auth.log('BOT Event: Something went wrong during message received from teamspeak server. Likely bad user command/message.')
return None
bot_loop_forever=True
TS3Auth.log("Initializing script....")
while bot_loop_forever:
try:
TS3Auth.log("Connecting to Teamspeak server...")
with ts3.query.TS3Connection(host,port) as ts3conn:
try:
ts3conn.login(client_login_name=user,client_login_password=passwd)
except ts3.query.TS3QueryError as err:
TS3Auth.log("Login Failed Reason: %s" %err.resp.error["msg"])
exit(1)
#Force connection to stay up by sending an alive message every 250 seconds
ts3conn.keepalive(interval=250)
#Choose which server instance we want to join (unless multiple exist the default of 1 should be fine)
ts3conn.use(sid=server_id)
#Define our bots info
BOT=Bot(db_file_name,ts3conn)
TS3Auth.log ("BOT loaded into server (%s) as %s (%s). Nickname '%s'" %(server_id,BOT.name,BOT.client_id,BOT.nickname))
ts3conn.clientupdate(client_nickname=BOT.nickname)
#Start our event handler (received the messages from server)
BOT.ts_connection.on_event.connect(my_event_handler)
# Find the verify channel
verify_channel_id=0
while verify_channel_id == 0:
try:
channel = ts3conn.channelfind(pattern=channel_name)
verify_channel_id=channel[0].get('cid')
channel_name=channel[0].get('channel_name')
except:
TS3Auth.log ("Unable to locate channel with name '%s'. Sleeping for 10 seconds..." %(channel_name))
time.sleep(10)
# Move ourselves to the Verify chanel and register for text events
try:
BOT.ts_connection.clientmove(clid=BOT.client_id,cid=verify_channel_id)
TS3Auth.log ("BOT has joined channel '%s' (%s)." %(channel_name,verify_channel_id))
except ts3.query.TS3QueryError as chnl_err: #BOT move failed because
TS3Auth.log("BOT Attempted to join channel '%s' (%s) WARN: %s" %(channel_name,verify_channel_id,chnl_err.resp.error["msg"]))
BOT.ts_connection.servernotifyregister(event="textchannel") #alert channel chat
BOT.ts_connection.servernotifyregister(event="textprivate") #alert Private chat
#Start looking for any received events from the server
BOT.ts_connection.recv_in_thread()
#Send message to the server that the BOT is up
BOT.ts_connection.sendtextmessage( targetmode=3,target=server_id, msg=bot_msg)
TS3Auth.log("BOT is now registered to receive messages!")
TS3Auth.log("BOT Database Audit policies initiating.")
# Always audit users on initialize if user audit date is up (in case the script is reloaded several times before audit interval hits, so we can ensure we maintain user database accurately)
BOT.auditUsers()
#Set audit schedule job to run in X days
schedule.every(audit_interval).days.do(BOT.auditUsers)
#Set schedule to advertise broadcast message in channel
if timer_msg_broadcast > 0:
schedule.every(timer_msg_broadcast).seconds.do(BOT.broadcastMessage)
BOT.broadcastMessage() # Send initial message into channel
#Forces script to loop forever while we wait for events to come in, unless connection timed out. Then it should loop a new bot into creation.
TS3Auth.log("BOT now idle, waiting for requests.")
while BOT.ts_connection.is_connected():
#auditjob check,
schedule.run_pending()
time.sleep(bot_sleep_idle)
TS3Auth.log("It appears the BOT has lost connection to teamspeak. Trying to restart connection in %s seconds...." %bot_sleep_conn_lost)
time.sleep(bot_sleep_conn_lost)
except ConnectionRefusedError:
TS3Auth.log("Unable to reach teamspeak server..trying again in %s seconds..." %bot_sleep_conn_lost)
time.sleep(bot_sleep_conn_lost)
|
"""
Implements a feature set based off of dictionary lookup.
.. autoclass:: revscoring.languages.features.Dictionary
:members:
:member-order: bysource
Supporting classes
------------------
.. autoclass:: revscoring.languages.features.dictionary.Revision
:members:
:member-order: bysource
.. autoclass:: revscoring.languages.features.dictionary.Diff
:members:
:member-order: bysource
"""
from .dictionary import Dictionary
from .features import Diff, Revision
from .util import utf16_cleanup
__all__ = [Dictionary, utf16_cleanup, Revision, Diff]
|
from insulaudit.log import io, logger as log
from insulaudit import lib, core
import time
STX = 0x02
ETX = 0x03
TIMEOUT = 0.5
RETRIES = 3
"""
Bit 7
Unused
Bit 6
Unused
Bit 5
Unused
Bit 4
More
Bit 3
Disconnect
Bit 2
Acknowledge
Bit 1
E
Bit 0
S
"""
def ls_long( B ):
B.reverse( )
return lib.BangLong( B )
def ls_int( B ):
B.reverse( )
return lib.BangInt( B )
class Link:
SEND = 0x01
RECE = 0x01 << 1
ACK = 0x01 << 2
DISC = 0x01 << 3
class Response( object ):
__raw__ = None
def __init__( self, raw=None ):
self.__raw__ = None
if raw is not None:
self.__raw__ = raw
def validate( self ):
self.bytez = bytearray( self.__raw__ )
if self.bytez[ 0 ] != STX:
raise InvalidResponse(raw)
self.length = self.raw[ 1 ]
def incr( self, raw ):
if self.__raw__ is None:
self.__raw__ = ''
self.__raw__ = self.__raw__ + raw
msg = self.__raw__[0]
if msg != STX:
raise InvalidResponse(msg)
class LSException(core.CarelinkException): pass
class InvalidResponse(LSException): pass
class MissingAck(LSException): pass
class CRCMismatch(LSException): pass
class AckCommand( core.Command ):
code = [ ]
def decode( self, msg ):
return bytearray( msg[ 3: len(msg) - 3 ] )
class DiscoverFirmware( AckCommand ):
code = [ 5, 13, 2 ]
def decode( self, msg ):
return str( msg[ 4: len(msg) - 3 ] )
class ReadSerialNumber( DiscoverFirmware ):
code = [ 0x05, 0x0B, 0x02,
0x00, 0x00, 0x00, 0x00,
0x84, 0x6A, 0xE8, 0x73, 0x00 ]
class ReadAvailableRecords( AckCommand ):
code = [ 0x05, 0x1F, 0xF5, 0x01 ]
def decode( self, msg ):
data = bytearray( msg[ 3:len(msg) - 3 ] )
return ls_int( data[ 2:4 ] )
class ReadGlucoseRecord( AckCommand ):
code = [ 0x05, 0x1F ]
def __init__( self, idx=0 ):
self.code = self.code + [ lib.LowByte( idx ), lib.HighByte( idx ) ]
def decode( self, msg ):
data = bytearray( msg[ 3: len(msg) - 3 ] )
date = time.ctime( ls_long( data[ 2:6 ] ) )
io.info( 'relevant message: %s' % lib.hexdump( data ) )
io.info( 'glucose: %s' % lib.hexdump( data[ 6:10 ] ) )
glucose = ls_long( data[ 6:10 ] )
return ( date, glucose )
class LSUltraMini( core.CommBuffer ):
__timeout__ = 0.5
__pause__ = 02
def disconnect( self ):
msg = list( self.wrap( 0x08, [ ] ) )
io.info( 'disconnect' )
self.__retry_write_with_ack__( msg, RETRIES )
def __retry_write_with_ack__( self, msg, retries ):
try:
for i in xrange( RETRIES - 1 ):
try:
self.write( str( bytearray( msg ) ) )
io.info( '__retry_write_with_ack__::%i' % i )
self.__ack__ = self.__requireAck__( )
return self.__ack__
except MissingAck, e:
io.info( 'retry:%s:missing ack:%r' % ( i, e ) )
self.write( str( bytearray( msg ) ) )
self.__ack__ = self.__requireAck__( )
# catch
except MissingAck, e:
#except Exception, e:
io.fatal( 'noticed and uncaught: %r' % e )
raise
return self.__ack__
def __requireAck__( self ):
"""Try to read an ack, raising MissingAck if we don't read it. Returns
bytearray ack."""
ack = None
for i in xrange( RETRIES ):
ack = bytearray( self.read( 6 ) )
if ack == '':
io.debug( "empty ack:%s:%s:sleeping:%s" % ( i, ack, self.__pause__ ) )
time.sleep( self.__pause__ )
else:
break
io.info( 'ACK: %s' % lib.hexdump( ack ) )
if ack == '':
raise MissingAck(i)
return ack
def __acknowledge__( self ):
msg = [ STX, 6, 0x04 | 0x08, ETX ]
crc = lib.CRC16CCITT.compute( msg )
msg.extend( [ lib.LowByte( crc ), lib.HighByte( crc ) ] )
io.info( 'sending ACK' )
self.write( str( bytearray( msg ) ) )
def __send__require_ack__( self, command ):
"""sending a command requires an ack from the device every time."""
io.debug( 'command:\n%s' % command )
# PC sends command
# meter sends ACK
msg = str( self.wrap( 0, command.code ) )
return self.__retry_write_with_ack__( msg, RETRIES )
# TODO: process ack here?
#self.write( msg )
# self.__requireAck__( )
def wrap( self, link, data ):
frame = [ STX, len( data ) + 6, link ] + data + [ ETX ]
crc = lib.CRC16CCITT.compute( frame )
frame.extend( [ lib.LowByte( crc ), lib.HighByte( crc ) ] )
return bytearray( frame )
def execute( self, command ):
"""
XXX: Handles retries, link control, and message validation?
"""
link = 0
# TODO: validate against CRC/ACK
r = self.__send__require_ack__( command )
# meter sends DATA
response = bytearray( self.read( 40 ) )
io.info( 'get response:%s' % response );
# PC sends ACK
self.__acknowledge__( )
return command.decode( response )
def __call__( self, command ):
self.prevCommand = command
if __name__ == '__main__':
import doctest
doctest.testmost( )
|
import random
from unittest.mock import patch
from nose.tools import assert_equal
from pyecharts import options as opts
from pyecharts.charts import Scatter3D
from pyecharts.faker import Faker
@patch("pyecharts.render.engine.write_utf8_html_file")
def test_scatter3d_base(fake_writer):
data = [
[random.randint(0, 100), random.randint(0, 100), random.randint(0, 100)]
for _ in range(80)
]
c = (
Scatter3D()
.add("", data)
.set_global_opts(
visualmap_opts=opts.VisualMapOpts(range_color=Faker.visual_color)
)
)
c.render()
_, content = fake_writer.call_args[0]
assert_equal(c.theme, "white")
assert_equal(c.renderer, "canvas")
|
import sys
import os
import errno
from datetime import datetime
from enum import Enum
from threading import current_thread
import re
from occacc.config import ERROR_DIR
class ErrorMessage(object):
def __init__(self, src, short, long):
self.src = src
self.short = short
self.long = long
class ErrorFilter(object):
def __init__(self, queue):
self.q = queue
def write(self, string):
if 'Exception' in string or 'Traceback' in string:
err = ErrorMessage(
current_thread().name,
'Exception' if 'Exception' in string else 'Error Traceback',
string)
logger(err, log_level=LOG.FATAL)
self.q.put(err)
else:
logger(string, LOG.ERROR)
class LOG(Enum):
INFO = 'INFO'
WARNING = 'WARNING'
ERROR = 'ERROR'
FATAL = 'FATAL'
def write_to_file(message):
# Create path if not exists
if not os.path.exists(ERROR_DIR):
try:
os.makedirs(ERROR_DIR)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
# Find a free filename
fpath = '{}/exception_{}.log'
i = 1
while os.path.exists(fpath.format(ERROR_DIR, i)):
i+=1
fpath = fpath.format(ERROR_DIR, i)
# Write file
with open(fpath, 'w') as f:
f.write(message)
return fpath
def logger(message, log_level=LOG.INFO):
def put(level, message):
prefix = "{time} [{level}]: ".format(time=datetime.now(), level=level)
message = message.replace('\n', '\n{}'.format(' '*len(prefix)))
print(prefix + message)
if isinstance(message, ErrorMessage):
fpath = write_to_file(message.long)
reason = message.long[message.long.rfind('\n ')+1:-1:]\
.strip()\
.replace('\n', ' / ')
message = "{} triggered {} : {}\nFull exception saved in file: {}".format(
message.src, message.short, reason, fpath)
elif not isinstance(message, str):
message = "Unknown message type:\n{}".format(str(message))
if re.search('^[\n\s]*$', message):
return
if not isinstance(log_level, LOG):
put(LOG.ERROR.value, 'Logging with unknown log-level:')
put('?', message)
put(log_level.value, message)
sys.stdout.flush() # Flush to systemd journal to prevent long delays..
|
class SearchOption(object):
"""a class to encapsulate a specific command line option"""
__slots__ = ['shortarg', 'longarg', 'desc', 'func']
def __init__(self, shortarg: str, longarg: str, desc: str, func):
self.shortarg = shortarg
self.longarg = longarg
self.desc = desc
self.func = func
@property
def sortarg(self):
if self.shortarg:
return self.shortarg.lower() + 'a' + self.longarg.lower()
return self.longarg.lower()
|
import logging
import ovh
from adapter import Adapter
log = logging.getLogger(__name__)
class OvhAdapter(Adapter):
def __init__(self):
self.endpoint = None
self.application_key = None
self.application_secret = None
self.consumer_key = None
self.client = None
def setup(self, params):
self.endpoint = params['endpoint']
self.application_key = params['application_key']
self.application_secret = params['application_secret']
self.consumer_key = params['consumer_key']
self.client = ovh.Client(self.endpoint, self.application_key, self.application_secret, self.consumer_key)
def deploy_challenge(self, basedomain, subdomain, tokenin):
token = "\"" + tokenin + "\""
log.info("Deploy challenge in TXT domain: {0} subdomain: {1}".format(basedomain, subdomain))
record = self.client.post('/domain/zone/%s/record' % basedomain, fieldType="TXT", subDomain=subdomain, ttl=60,
target=token)
log.debug("Deploy record id: {0}".format(record))
self.client.post('/domain/zone/%s/refresh' % basedomain)
return record
def delete_challenge(self, record):
self.client.delete('/domain/zone/%s/record/%s' % (record['zone'], record['id']))
self.client.post('/domain/zone/%s/refresh' % record['zone'])
|
import random
class distance():
def __init__(self):
self.distances = {
1:0,
2:0,
3:0,
4:0
}
def setup(self):
pass
def get_distance_all(self):
for sensor in range(1,5):
distance = random.randint(1,50)
self.distances[sensor] = distance
return self.distances
|
from django.contrib import admin
from .models import Autotag
class AutotagAdmin(admin.ModelAdmin):
list_display = ("pk", "owner",)
search_fields = ["pattern"]
admin.site.register(Autotag, AutotagAdmin)
|
"""
WSGI config for holidays project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "holidays.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import pytest
import simplesqlite as sqlite
from tabledata import TableData
@pytest.fixture
def database_path(tmpdir):
p = tmpdir.join("tmp.db")
db_path = str(p)
con = sqlite.SimpleSQLite(db_path, "w")
con.create_table_from_tabledata(
TableData("testdb0", ["attr_a", "attr b"], [[1, 2], [3, 4]]), index_attrs=["attr_a"]
)
con.create_table_from_tabledata(
TableData("testdb1", ["foo", "bar", "hoge"], [[1, 2.2, "aa"], [3, 4.4, "bb"]]),
index_attrs=("foo", "hoge"),
)
con.create_table(
"constraints",
[
"primarykey_id INTEGER PRIMARY KEY AUTOINCREMENT",
"notnull_value REAL NOT NULL",
"unique_value INTEGER UNIQUE",
"def_text_value TEXT DEFAULT 'null'",
"def_num_value INTEGER DEFAULT 0",
],
)
con.execute_query("CREATE VIEW view1 AS SELECT primarykey_id, unique_value FROM constraints")
return db_path
@pytest.fixture
def mb_database_path(tmpdir):
p = tmpdir.join("mb_database_path.db")
db_path = str(p)
con = sqlite.SimpleSQLite(db_path, "w")
con.create_table_from_tabledata(
TableData("テーブル", ["いち", "に"], [[1, 2], [3, 4]]), index_attrs=["いち"]
)
return db_path
|
"""Module to monitor installed napps."""
import logging
import re
from pathlib import Path
from watchdog.events import RegexMatchingEventHandler
from watchdog.observers import Observer
log = logging.getLogger(__name__)
class NAppDirListener(RegexMatchingEventHandler):
"""Class to handle directory changes."""
regexes = [re.compile(r".*\/kytos\/napps\/[a-zA-Z][^/]+\/[a-zA-Z].*")]
ignore_regexes = [re.compile(r".*\.installed")]
_controller = None
def __init__(self, controller):
"""Require controller to get NApps dir, load and unload NApps.
In order to watch the NApps dir for modifications, it must be created
if it doesn't exist (in this case, kytos-utils had not been run before
kytosd). We use the same dir permissions as in kytos-utils.
Args:
controller(kytos.core.controller): A controller instance.
"""
super().__init__()
self._controller = controller
self.napps_path = self._controller.options.napps
Path(self.napps_path).mkdir(mode=0o755, parents=True, exist_ok=True)
self.observer = Observer()
def start(self):
"""Method to start handle directory changes."""
self.observer.schedule(self, self.napps_path, True)
self.observer.start()
log.info('NAppDirListener Started...')
def stop(self):
"""Method to stop handle directory changes."""
self.observer.stop()
log.info('NAppDirListener Stopped...')
def _get_napp(self, absolute_path):
"""Method used to get a username and napp_name from absolute path.
Args:
absolute_path(str): String with absolute path.
Returns:
tuple: Tuple with username and napp_name.
"""
relative_path = absolute_path.replace(self.napps_path, '')
return tuple(relative_path.split('/')[1:3])
def on_created(self, event):
"""Load a napp from created directory.
Args:
event(watchdog.events.DirCreatedEvent): Event received from an
observer.
"""
napp = self._get_napp(event.src_path)
log.debug('The NApp "%s/%s" was enabled.', *napp)
self._controller.load_napp(*napp)
def on_deleted(self, event):
"""Unload a napp from deleted directory.
Args:
event(watchdog.events.DirDeletedEvent): Event received from an
observer.
"""
napp = self._get_napp(event.src_path)
log.debug('The NApp "%s/%s" was disabled.', *napp)
self._controller.unload_napp(*napp)
|
import generate
def encrypt(path,key,outfile):
print("\n-------Encrypting-------")
plain_text = open(str(path),"r")
encoding = open("./encode/"+"C_en","r")
duration = open("./encode/duration","r")
cipher_text = open("c_text","w")
cipher_dur = open("c_dur","w")
#Getting plain text
for lines in plain_text:
pt = lines.upper()
plain_text.close()
#Creating list for the encoding
en = []
for lines in encoding:
en.append(lines.rstrip())
#Creating list for the note duration
dur = []
for dura in duration:
dur.append(dura.rstrip('\n'))
#getting letters from plain text
for letter in pt:
for lines in en:
for let in lines:
if let == letter:
if lines.index(let) == 0:
cipher_text.write(str(lines.index(let))+"\n")
elif lines.index(let) == 1:
cipher_text.write(str(lines.index(let)+1)+"\n")
elif lines.index(let) == 2 or lines.index(let) == 3:
cipher_text.write(str(lines.index(let)+2)+"\n")
elif lines.index(let) == 4:
cipher_text.write(str(lines.index(let)+3)+"\n")
elif lines.index(let) == 5:
cipher_text.write(str(lines.index(let)+4)+"\n")
elif lines.index(let) == 6:
cipher_text.write(str(lines.index(let)+5)+"\n")
cipher_dur.write(str(dur[en.index(lines)])+"\n")
cipher_text.close()
cipher_dur.close()
duration.close()
encoding.close()
generate.generate(key,outfile)
|
import os
import sys
import cv2
import numpy as np
sys.path.insert(0, "..")
from models import Image
FRONT = "front/"
BACK = "back/"
SEGMAP = "segmap/"
NUM_INTERVALS = [40, 45, 40, 50, 40, 50, 40]
DOB_INTERVALS = [25, 13, 27, 22, 13, 25, 25, 25]
class Program():
def __init__(self, img_folder):
self.images = []
names = os.listdir(img_folder)
for n in names:
img = cv2.imread(img_folder + n)
self.images.append(Image(img, n))
def start(self, at=0):
for i in range(at, len(self.images)):
with open(SEGMAP + self.images[i].base, 'w') as m: # open segmap file
cplace1_intervals = [] # reuse intervals for cplace
cplace2_intervals = []
for f in self.images[i].fields:
for s in f.spans:
overlay = np.ones_like(s.image) * 255
cursors = [] # columns in a span's coordinates
current = 0
while True:
show_img = self._apply_overlay(s.image, overlay)
cv2.imshow('{}'.format(self.images[i].base), show_img)
key = 0xFF & cv2.waitKey(1)
if key == 27:
exit(0)
elif key == ord("a"):
current = self._left(overlay, current, cursors)
elif key == ord("d"):
current = self._right(overlay, current, cursors)
elif key == ord("s"):
self._save(cursors, current)
overlay = self._update_overlay(overlay, cursors)
print(cursors)
elif key == ord("r"):
self._undo(cursors, overlay)
overlay = self._update_overlay(overlay, cursors)
print(cursors)
elif key == ord("q"):
print(cursors)
break
elif key == ord("1"): # lazy save number
self._lazy_save(cursors, NUM_INTERVALS)
overlay = self._update_overlay(overlay, cursors)
print(cursors)
elif key == ord("2"): # lazy save dob
self._lazy_save(cursors, DOB_INTERVALS)
overlay = self._update_overlay(overlay, cursors)
print(cursors)
elif key == ord("3"): # lazy save cplace1
self._lazy_save(cursors, cplace1_intervals)
overlay = self._update_overlay(overlay, cursors)
print(cursors)
elif key == ord("4"): # lazy save cplace2
self._lazy_save(cursors, cplace2_intervals)
overlay = self._update_overlay(overlay, cursors)
print(cursors)
elif key == ord("5"): # save intervals for cplace1
cplace1_intervals = self._get_intervals(cursors)
print(cplace1_intervals)
elif key == ord("6"): # save intervals for cplace2
cplace2_intervals = self._get_intervals(cursors)
print(cplace2_intervals)
cursors.sort()
m.write(' '.join([str(c) for c in cursors]))
m.write('\n')
m.write('\n')
cv2.destroyWindow(self.images[i].base)
def _apply_overlay(self, img, overlay):
new_img = np.minimum(img, overlay)
return new_img
def _update_overlay(self, overlay, cursors):
overlay = np.ones_like(overlay) * 255
for c in cursors:
overlay[:, c] = 0
return overlay
def _right(self, overlay, current, cursors):
if current == overlay.shape[1] - 1:
return current
if current not in cursors:
overlay[:, current] = 255
overlay[:, current + 1] = 0
current += 1
return current
def _left(self, overlay, current, cursors):
if current == 0:
return current
if current not in cursors:
overlay[:, current] = 255
overlay[:, current - 1] = 0
current -= 1
return current
def _save(self, cursors, current):
if current not in cursors:
cursors.append(current)
def _undo(self, cursors, overlay):
if len(cursors) == 0:
return
last = cursors[-1]
overlay[:, last] = 255
cursors.remove(last)
def _lazy_save(self, cursors, intervals):
if len(cursors) == 0:
return None
start = len(cursors) - 1
for i in range(start, len(intervals)):
self._save(cursors, cursors[-1] + intervals[i])
def _get_intervals(self, cursors):
intervals = []
for i in range(1, len(cursors)):
intervals.append(cursors[i] - cursors[i-1])
return intervals
if __name__ == "__main__":
if len(sys.argv) == 2:
at = int(sys.argv[1])
else:
exit(1)
program = Program(FRONT)
program.start(at)
|
import random
from PyQt5.QtWidgets import QDialog
import common
import world
import qt_fight_dialog
config = {
"run_chance": 20
}
class FightDialog(QDialog):
def __init__(self, parent):
super(QDialog, self).__init__(parent.window)
self.parent = parent
self.ui = qt_fight_dialog.Ui_Dialog()
self.ui.setupUi(self)
self.ui.give_up_button.clicked.connect(self.give_up)
self.ui.shoot_button.clicked.connect(self.shoot)
self.ui.run_button.clicked.connect(self.try_run)
self.ui.dump_button.clicked.connect(self.dump_everything)
self.cop_count = random.randint(1, 10)
self.cop_health = [100, 100]
self.update()
def closeEvent(self, event):
self.give_up()
def set_status(self, msg):
self.ui.status_label.setText(msg)
def update(self):
self.ui.cop_count_label.setText(str(self.cop_count))
self.ui.cop_health_slider.setMaximum(self.cop_health[1])
self.ui.cop_health_slider.setValue(self.cop_health[0])
player = self.parent.world.player
self.ui.health_slider.setMaximum(player.health[1])
self.ui.health_slider.setValue(player.health[0])
weapon, ammo = player.weapon
if weapon is not None:
pretty_name = common.weapons[weapon]["name"]
self.ui.weapon_equipped.setText("%s [%i]" % (pretty_name, ammo))
else:
self.ui.weapon_equipped.setText("Unarmed")
if player.has_drugs() or player.has_weapons():
self.ui.dump_button.setEnabled(True)
else:
self.ui.dump_button.setEnabled(False)
if weapon is not None and ammo > 0:
self.ui.shoot_button.setEnabled(True)
else:
self.ui.shoot_button.setEnabled(False)
if not self.parent.world.player.is_alive():
self.done(999)
def dump_everything(self):
self.parent.world.player.dump_weapon()
self.parent.world.player.dump_all_drugs()
self.cop_turn("You dump your weapon and drugs.")
self.update()
def give_up(self):
self.parent.world.add_log("You gave yourself up like a <b style='font-size: x-large; color: pink; text-decoration: underline'>BITCH</b><br>")
if self.parent.world.player.has_drugs() or self.parent.world.player.has_weapons():
self.parent.world.add_log("Officer Hardass says, \"I'll be taking that\"")
self.parent.world.player.dump_weapon()
self.parent.world.player.dump_all_drugs()
else:
self.parent.world.add_log("Officer Hardass pats you down and mumbles something about getting you next time")
self.done(111)
def try_run(self):
if world.rand_percent(config["run_chance"]):
self.parent.world.add_log("You got away!")
self.done(333)
else:
self.cop_turn("You can't run from the law...")
self.update()
def cop_turn(self, prepend):
if world.rand_percent(50):
self.parent.world.player.damage(10)
self.set_status(prepend + " Cops hit you for 10 damage!!!")
else:
self.set_status(prepend)
def shoot(self):
weapon = self.parent.world.player.weapon
if weapon[0] is None or weapon[1] is None:
return False
self.cop_health[0] -= common.weapons[weapon[0]]["damage"]
self.parent.world.player.weapon[1] -= 1
if self.cop_health[0] <= 0:
if self.cop_count <= 1:
self.parent.world.add_log("<b style='font-size: x-large'>FUCK THE POLICE!</b>")
self.done(222)
else:
self.cop_count -= 1
self.cop_health[0] = self.cop_health[1]
self.cop_turn("One down!")
else:
self.cop_turn("You hit the cop for %i damage!" % common.weapons[weapon[0]]["damage"])
self.update()
|
class Team:
"""Defines a Keeper Team """
def __init__(self, team_uid='', restrict_edit=False, restrict_view=False, restrict_share=False, name=''):
self.team_uid = team_uid
self.restrict_edit = restrict_edit
self.restrict_view = restrict_view
self.restrict_share = restrict_share
self.name = name
def load(self,team):
self.restrict_edit = team['restrict_edit'] or False
self.restrict_view = team['restrict_view'] or False
self.restrict_share = team['restrict_share'] or False
self.name = team['name']
def display(self):
print('')
print('{0:>20s}: {1:<20s}'.format('Team UID',self.team_uid))
print('{0:>20s}: {1}'.format('Name',self.name))
print('{0:>20s}: {1}'.format('Restrict Edit',self.restrict_edit))
print('{0:>20s}: {1}'.format('Restrict View',self.restrict_view))
print('{0:>20s}: {1}'.format('Restrict Share',self.restrict_share))
print('')
def to_string(self):
target = self.team_uid + str(self.restrict_edit) + str(self.restrict_view)
return target
def to_lowerstring(self):
keywords = [self.team_uid, self.name]
keywords = [x.lower() for x in keywords]
return '\n'.join(keywords)
|
import io
import os
import sys
import unittest
class Test_TestProgram(unittest.TestCase):
def test_discovery_from_dotted_path(self):
loader = unittest.TestLoader()
tests = [self]
expectedPath = os.path.abspath(os.path.dirname(unittest.test.__file__))
self.wasRun = False
def _find_tests(start_dir, pattern):
self.wasRun = True
self.assertEqual(start_dir, expectedPath)
return tests
loader._find_tests = _find_tests
suite = loader.discover('unittest.test')
self.assertTrue(self.wasRun)
self.assertEqual(suite._tests, tests)
# Horrible white box test
def testNoExit(self):
result = object()
test = object()
class FakeRunner(object):
def run(self, test):
self.test = test
return result
runner = FakeRunner()
oldParseArgs = unittest.TestProgram.parseArgs
def restoreParseArgs():
unittest.TestProgram.parseArgs = oldParseArgs
unittest.TestProgram.parseArgs = lambda *args: None
self.addCleanup(restoreParseArgs)
def removeTest():
del unittest.TestProgram.test
unittest.TestProgram.test = test
self.addCleanup(removeTest)
program = unittest.TestProgram(testRunner=runner, exit=False, verbosity=2)
self.assertEqual(program.result, result)
self.assertEqual(runner.test, test)
self.assertEqual(program.verbosity, 2)
class FooBar(unittest.TestCase):
def testPass(self):
assert True
def testFail(self):
assert False
class FooBarLoader(unittest.TestLoader):
"""Test loader that returns a suite containing FooBar."""
def loadTestsFromModule(self, module):
return self.suiteClass(
[self.loadTestsFromTestCase(Test_TestProgram.FooBar)])
def test_NonExit(self):
program = unittest.main(exit=False,
argv=["foobar"],
testRunner=unittest.TextTestRunner(stream=io.StringIO()),
testLoader=self.FooBarLoader())
self.assertTrue(hasattr(program, 'result'))
def test_Exit(self):
self.assertRaises(
SystemExit,
unittest.main,
argv=["foobar"],
testRunner=unittest.TextTestRunner(stream=io.StringIO()),
exit=True,
testLoader=self.FooBarLoader())
def test_ExitAsDefault(self):
self.assertRaises(
SystemExit,
unittest.main,
argv=["foobar"],
testRunner=unittest.TextTestRunner(stream=io.StringIO()),
testLoader=self.FooBarLoader())
class InitialisableProgram(unittest.TestProgram):
exit = False
result = None
verbosity = 1
defaultTest = None
testRunner = None
testLoader = unittest.defaultTestLoader
module = '__main__'
progName = 'test'
test = 'test'
def __init__(self, *args):
pass
RESULT = object()
class FakeRunner(object):
initArgs = None
test = None
raiseError = False
def __init__(self, **kwargs):
FakeRunner.initArgs = kwargs
if FakeRunner.raiseError:
FakeRunner.raiseError = False
raise TypeError
def run(self, test):
FakeRunner.test = test
return RESULT
class TestCommandLineArgs(unittest.TestCase):
def setUp(self):
self.program = InitialisableProgram()
self.program.createTests = lambda: None
FakeRunner.initArgs = None
FakeRunner.test = None
FakeRunner.raiseError = False
def testHelpAndUnknown(self):
program = self.program
def usageExit(msg=None):
program.msg = msg
program.exit = True
program.usageExit = usageExit
for opt in '-h', '-H', '--help':
program.exit = False
program.parseArgs([None, opt])
self.assertTrue(program.exit)
self.assertIsNone(program.msg)
program.parseArgs([None, '-$'])
self.assertTrue(program.exit)
self.assertIsNotNone(program.msg)
def testVerbosity(self):
program = self.program
for opt in '-q', '--quiet':
program.verbosity = 1
program.parseArgs([None, opt])
self.assertEqual(program.verbosity, 0)
for opt in '-v', '--verbose':
program.verbosity = 1
program.parseArgs([None, opt])
self.assertEqual(program.verbosity, 2)
def testBufferCatchFailfast(self):
program = self.program
for arg, attr in (('buffer', 'buffer'), ('failfast', 'failfast'),
('catch', 'catchbreak')):
if attr == 'catch' and not hasInstallHandler:
continue
short_opt = '-%s' % arg[0]
long_opt = '--%s' % arg
for opt in short_opt, long_opt:
setattr(program, attr, None)
program.parseArgs([None, opt])
self.assertTrue(getattr(program, attr))
for opt in short_opt, long_opt:
not_none = object()
setattr(program, attr, not_none)
program.parseArgs([None, opt])
self.assertEqual(getattr(program, attr), not_none)
def testWarning(self):
"""Test the warnings argument"""
# see #10535
class FakeTP(unittest.TestProgram):
def parseArgs(self, *args, **kw): pass
def runTests(self, *args, **kw): pass
warnoptions = sys.warnoptions
try:
sys.warnoptions[:] = []
# no warn options, no arg -> default
self.assertEqual(FakeTP().warnings, 'default')
# no warn options, w/ arg -> arg value
self.assertEqual(FakeTP(warnings='ignore').warnings, 'ignore')
sys.warnoptions[:] = ['somevalue']
# warn options, no arg -> None
# warn options, w/ arg -> arg value
self.assertEqual(FakeTP().warnings, None)
self.assertEqual(FakeTP(warnings='ignore').warnings, 'ignore')
finally:
sys.warnoptions[:] = warnoptions
def testRunTestsRunnerClass(self):
program = self.program
program.testRunner = FakeRunner
program.verbosity = 'verbosity'
program.failfast = 'failfast'
program.buffer = 'buffer'
program.warnings = 'warnings'
program.runTests()
self.assertEqual(FakeRunner.initArgs, {'verbosity': 'verbosity',
'failfast': 'failfast',
'buffer': 'buffer',
'warnings': 'warnings'})
self.assertEqual(FakeRunner.test, 'test')
self.assertIs(program.result, RESULT)
def testRunTestsRunnerInstance(self):
program = self.program
program.testRunner = FakeRunner()
FakeRunner.initArgs = None
program.runTests()
# A new FakeRunner should not have been instantiated
self.assertIsNone(FakeRunner.initArgs)
self.assertEqual(FakeRunner.test, 'test')
self.assertIs(program.result, RESULT)
def testRunTestsOldRunnerClass(self):
program = self.program
FakeRunner.raiseError = True
program.testRunner = FakeRunner
program.verbosity = 'verbosity'
program.failfast = 'failfast'
program.buffer = 'buffer'
program.test = 'test'
program.runTests()
# If initialising raises a type error it should be retried
# without the new keyword arguments
self.assertEqual(FakeRunner.initArgs, {})
self.assertEqual(FakeRunner.test, 'test')
self.assertIs(program.result, RESULT)
def testCatchBreakInstallsHandler(self):
module = sys.modules['unittest.main']
original = module.installHandler
def restore():
module.installHandler = original
self.addCleanup(restore)
self.installed = False
def fakeInstallHandler():
self.installed = True
module.installHandler = fakeInstallHandler
program = self.program
program.catchbreak = True
program.testRunner = FakeRunner
program.runTests()
self.assertTrue(self.installed)
def _patch_isfile(self, names, exists=True):
def isfile(path):
return path in names
original = os.path.isfile
os.path.isfile = isfile
def restore():
os.path.isfile = original
self.addCleanup(restore)
def testParseArgsFileNames(self):
# running tests with filenames instead of module names
program = self.program
argv = ['progname', 'foo.py', 'bar.Py', 'baz.PY', 'wing.txt']
self._patch_isfile(argv)
program.createTests = lambda: None
program.parseArgs(argv)
# note that 'wing.txt' is not a Python file so the name should
# *not* be converted to a module name
expected = ['foo', 'bar', 'baz', 'wing.txt']
self.assertEqual(program.testNames, expected)
def testParseArgsFilePaths(self):
program = self.program
argv = ['progname', 'foo/bar/baz.py', 'green\\red.py']
self._patch_isfile(argv)
program.createTests = lambda: None
program.parseArgs(argv)
expected = ['foo.bar.baz', 'green.red']
self.assertEqual(program.testNames, expected)
def testParseArgsNonExistentFiles(self):
program = self.program
argv = ['progname', 'foo/bar/baz.py', 'green\\red.py']
self._patch_isfile([])
program.createTests = lambda: None
program.parseArgs(argv)
self.assertEqual(program.testNames, argv[1:])
def testParseArgsAbsolutePathsThatCanBeConverted(self):
cur_dir = os.getcwd()
program = self.program
def _join(name):
return os.path.join(cur_dir, name)
argv = ['progname', _join('foo/bar/baz.py'), _join('green\\red.py')]
self._patch_isfile(argv)
program.createTests = lambda: None
program.parseArgs(argv)
expected = ['foo.bar.baz', 'green.red']
self.assertEqual(program.testNames, expected)
def testParseArgsAbsolutePathsThatCannotBeConverted(self):
program = self.program
# even on Windows '/...' is considered absolute by os.path.abspath
argv = ['progname', '/foo/bar/baz.py', '/green/red.py']
self._patch_isfile(argv)
program.createTests = lambda: None
program.parseArgs(argv)
self.assertEqual(program.testNames, argv[1:])
# it may be better to use platform specific functions to normalise paths
# rather than accepting '.PY' and '\' as file seprator on Linux / Mac
# it would also be better to check that a filename is a valid module
# identifier (we have a regex for this in loader.py)
# for invalid filenames should we raise a useful error rather than
# leaving the current error message (import of filename fails) in place?
if __name__ == '__main__':
unittest.main()
|
import _plotly_utils.basevalidators
class FamilyValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self, plotly_name="family", parent_name="heatmapgl.colorbar.tickfont", **kwargs
):
super(FamilyValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
no_blank=kwargs.pop("no_blank", True),
strict=kwargs.pop("strict", True),
**kwargs
)
|
from django.http import Http404
from django.shortcuts import render, get_object_or_404
from django.views.decorators.http import require_http_methods
from projects.models import Project
from .models import Entry
@require_http_methods(["GET"])
def blog_entries(request):
"""
Blog main page.
"""
projectList = Project.objects.all().filter(active=True).order_by("title")
entriesList = Entry.objects.all().order_by('-pub_time')[:5]
context = {
'projectList': projectList,
'entriesList': entriesList,
}
return render(request, 'blog/blog_entries.html', context)
@require_http_methods(["GET"])
def blog_detail(request, entry_uuid, entry_slug):
"""
Individual blog entry.
"""
entry = get_object_or_404(Entry, uuid=entry_uuid)
# make sure uuid and slug match
if entry.slug != entry_slug:
raise Http404
else:
pass
projectList = Project.objects.all().filter(active=True).order_by("title")
context = {
'projectList': projectList,
'entry': entry,
}
return render(request, 'blog/blog_detail.html', context)
|
import uuid
from unittest.mock import MagicMock
from app.data_model.answer_store import AnswerStore, Answer
from app.questionnaire.completeness import Completeness
from app.questionnaire.location import Location
from app.questionnaire.navigation import Navigation
from app.utilities.schema import load_schema_from_params
from tests.app.app_context_test_case import AppContextTestCase
standard_routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('skip-payment-group', 0, 'skip-payment'),
Location('final-section-routed-group', 0, 'final-interstitial'),
Location('summary-group', 0, 'summary')
]
class TestNavigation(AppContextTestCase):
def test_navigation_no_blocks_completed(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=False)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
navigation = _create_navigation(schema, AnswerStore(), metadata, [], standard_routing_path)
user_navigation = [
{
'link_name': 'Property Details',
'highlight': True,
'repeating': False,
'completed': False,
'link_url': Location('property-details', 0, 'insurance-type').url(metadata)
},
{
'link_name': 'Household Composition',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('multiple-questions-group', 0, 'household-composition').url(metadata)
},
{
'link_name': 'Extra Cover',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('extra-cover', 0, 'extra-cover-block').url(metadata)
},
{
'link_name': 'Payment Details',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('skip-payment-group', 0, 'skip-payment').url(metadata)
},
{
'link_name': 'Final section',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('final-section-routed-group', 0, 'final-interstitial').url(metadata)
}
]
self.assertEqual(navigation.build_navigation('property-details', 0), user_navigation)
def test_non_repeating_block_completed(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
answer_store = AnswerStore()
answer_1 = Answer(
value='Contents',
group_instance=0,
answer_instance=0,
answer_id='insurance-type-answer'
)
answer_store.add_or_update(answer_1)
completed_blocks = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address')
]
routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('skip-payment-group', 0, 'skip-payment'),
]
navigation = _create_navigation(schema, answer_store, metadata, completed_blocks, routing_path)
user_navigation = [
{
'link_name': 'Property Details',
'highlight': True,
'repeating': False,
'completed': True,
'link_url': Location('property-details', 0, 'insurance-type').url(metadata)
},
{
'link_name': 'Property Interstitial',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('property-interstitial-group', 0, 'property-interstitial').url(metadata)
},
{
'link_name': 'Household Composition',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('multiple-questions-group', 0, 'household-composition').url(metadata)
},
{
'link_name': 'Extra Cover',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('extra-cover', 0, 'extra-cover-block').url(metadata)
},
{
'link_name': 'Payment Details',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('skip-payment-group', 0, 'skip-payment').url(metadata)
},
{
'link_name': 'Final section',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('final-section-routed-group', 0, 'final-interstitial').url(metadata)
}
]
self.assertEqual(navigation.build_navigation('property-details', 0), user_navigation)
def test_navigation_repeating_household_and_hidden_household_groups_completed(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = [
Location('multiple-questions-group', 0, 'household-composition'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('repeating-group', 1, 'repeating-block-1'),
Location('repeating-group', 1, 'repeating-block-2')
]
routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('repeating-group', 1, 'repeating-block-1'),
Location('repeating-group', 1, 'repeating-block-2'),
Location('skip-payment-group', 0, 'skip-payment')
]
person1_uuid = uuid.uuid4()
person2_uuid = uuid.uuid4()
answer_store = AnswerStore(existing_answers=[
{
'group_instance': 0,
'answer_instance': 0,
'answer_id': 'first-name',
'value': 'Jim',
'group_instance_id': person1_uuid,
},
{
'group_instance': 0,
'answer_instance': 1,
'answer_id': 'first-name',
'value': 'Ben',
'group_instance_id': person2_uuid,
},
{
'group_instance': 0,
'answer_instance': 0,
'answer_id': 'what-is-your-age',
'value': None,
'group_instance_id': person1_uuid,
},
{
'group_instance': 0,
'answer_instance': 0,
'answer_id': 'what-is-your-shoe-size',
'value': None,
'group_instance_id': person1_uuid,
},
{
'group_instance': 1,
'answer_instance': 0,
'answer_id': 'what-is-your-age',
'value': None,
'group_instance_id': person2_uuid,
},
{
'group_instance': 1,
'answer_instance': 0,
'answer_id': 'what-is-your-shoe-size',
'value': None,
'group_instance_id': person2_uuid,
}
])
navigation = _create_navigation(schema, answer_store, metadata, completed_blocks, routing_path)
user_navigation = [
{
'link_name': 'Property Details',
'repeating': False,
'completed': False,
'highlight': True,
'link_url': Location('property-details', 0, 'insurance-type').url(metadata)
},
{
'link_name': 'Household Composition',
'highlight': False,
'repeating': False,
'completed': True,
'link_url': Location('multiple-questions-group', 0, 'household-composition').url(metadata)
},
{
'link_name': 'Jim',
'repeating': True,
'completed': True,
'highlight': False,
'link_url': Location('repeating-group', 0, 'repeating-block-1').url(metadata)
},
{
'link_name': 'Ben',
'repeating': True,
'completed': True,
'highlight': False,
'link_url': Location('repeating-group', 1, 'repeating-block-1').url(metadata)
},
{
'link_name': 'Extra Cover',
'repeating': False,
'completed': False,
'highlight': False,
'link_url': Location('extra-cover', 0, 'extra-cover-block').url(metadata)
},
{
'link_name': 'Payment Details',
'repeating': False,
'completed': False,
'highlight': False,
'link_url': Location('skip-payment-group', 0, 'skip-payment').url(metadata)
},
{
'link_name': 'Final section',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('final-section-routed-group', 0, 'final-interstitial').url(metadata)
}
]
built_navigation = navigation.build_navigation('property-details', 0)
self.assertEqual(built_navigation, user_navigation)
def test_navigation_repeating_group_extra_answered_not_completed(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = [
Location('multiple-questions-group', 0, 'household-composition'),
Location('extra-cover', 0, 'extra-cover-block')
]
answer_store = AnswerStore()
person1_uuid = uuid.uuid4()
person2_uuid = uuid.uuid4()
answer_1 = Answer(
answer_instance=0,
answer_id='first-name',
group_instance=0,
value='Person1',
group_instance_id=person1_uuid
)
answer_2 = Answer(
answer_instance=1,
answer_id='first-name',
group_instance=0,
value='Person2',
group_instance_id=person2_uuid
)
answer_3 = Answer(
answer_instance=1,
answer_id='extra-cover-answer',
group_instance=0,
value=2,
group_instance_id=person2_uuid
)
answer_store.add_or_update(answer_1)
answer_store.add_or_update(answer_2)
answer_store.add_or_update(answer_3)
routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
Location('skip-payment-group', 0, 'skip-payment'),
]
navigation = _create_navigation(schema, answer_store, metadata, completed_blocks, routing_path)
user_navigation = [
{
'completed': False,
'highlight': True,
'repeating': False,
'link_name': 'Property Details',
'link_url': Location('property-details', 0, 'insurance-type').url(metadata)
},
{
'link_name': 'Household Composition',
'highlight': False,
'repeating': False,
'completed': True,
'link_url': Location('multiple-questions-group', 0, 'household-composition').url(metadata)
},
{
'completed': False,
'highlight': False,
'repeating': True,
'link_name': 'Person1',
'link_url': Location('repeating-group', 0, 'repeating-block-1').url(metadata),
},
{
'completed': False,
'highlight': False,
'repeating': True,
'link_name': 'Person2',
'link_url': Location('repeating-group', 1, 'repeating-block-1').url(metadata),
},
{
'completed': True,
'highlight': False,
'repeating': False,
'link_name': 'Extra Cover',
'link_url': Location('extra-cover', 0, 'extra-cover-block').url(metadata),
},
{
'completed': False,
'highlight': False,
'repeating': False,
'link_name': 'Extra Cover Items',
'link_url': Location('extra-cover-items-group', 0, 'extra-cover-items').url(metadata)
},
{
'completed': False,
'highlight': False,
'repeating': False,
'link_name': 'Payment Details',
'link_url': Location('skip-payment-group', 0, 'skip-payment').url(metadata),
},
{
'link_name': 'Final section',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('final-section-routed-group', 0, 'final-interstitial').url(metadata)
}
]
self.assertEqual(navigation.build_navigation('property-details', 0), user_navigation)
def test_navigation_repeating_group_extra_answered_completed(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = [
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
Location('extra-cover-items-group', 0, 'extra-cover-items-radio'),
Location('extra-cover-items-group', 1, 'extra-cover-items'),
Location('extra-cover-items-group', 1, 'extra-cover-items-radio'),
]
answer_store = AnswerStore()
answer_1 = Answer(
value=2,
group_instance=0,
group_instance_id='group-1-0',
answer_id='extra-cover-answer',
answer_instance=0
)
answer_2 = Answer(
value='1',
group_instance=0,
group_instance_id='group-1-0',
answer_id='extra-cover-items-answer',
answer_instance=0
)
answer_3 = Answer(
value='Yes',
group_instance=0,
group_instance_id='group-1-0',
answer_id='extra-cover-items-radio-answer',
answer_instance=0
)
answer_4 = Answer(
value='2',
group_instance=1,
group_instance_id='group-1-1',
answer_id='extra-cover-items-answer',
answer_instance=0
)
answer_5 = Answer(
value='Yes',
group_instance=1,
group_instance_id='group-1-1',
answer_id='extra-cover-items-radio-answer',
answer_instance=0
)
answer_store.add_or_update(answer_1)
answer_store.add_or_update(answer_2)
answer_store.add_or_update(answer_3)
answer_store.add_or_update(answer_4)
answer_store.add_or_update(answer_5)
routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
Location('extra-cover-items-group', 0, 'extra-cover-items-radio'),
Location('extra-cover-items-group', 1, 'extra-cover-items'),
Location('extra-cover-items-group', 1, 'extra-cover-items-radio'),
Location('skip-payment-group', 0, 'skip-payment'),
]
navigation = _create_navigation(schema, answer_store, metadata, completed_blocks, routing_path)
user_navigation = [
{
'repeating': False,
'highlight': True,
'completed': False,
'link_name': 'Property Details',
'link_url': Location('property-details', 0, 'insurance-type').url(metadata)
},
{
'link_name': 'Household Composition',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('multiple-questions-group', 0, 'household-composition').url(metadata)
},
{
'repeating': False,
'highlight': False,
'completed': True,
'link_name': 'Extra Cover',
'link_url': Location('extra-cover', 0, 'extra-cover-block').url(metadata)
},
{
'repeating': False,
'highlight': False,
'completed': True,
'link_name': 'Extra Cover Items',
'link_url': Location('extra-cover-items-group', 0, 'extra-cover-items').url(metadata)
},
{
'repeating': False,
'highlight': False,
'completed': False,
'link_name': 'Payment Details',
'link_url': Location('skip-payment-group', 0, 'skip-payment').url(metadata)
},
{
'link_name': 'Final section',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('final-section-routed-group', 0, 'final-interstitial').url(metadata)
}
]
self.assertEqual(navigation.build_navigation('property-details', 0), user_navigation)
def test_navigation_repeating_group_link_name_format(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = [
Location('multiple-questions-group', 0, 'household-composition'),
]
answer_store = AnswerStore()
person1_uuid = uuid.uuid4()
person2_uuid = uuid.uuid4()
answer_1 = Answer(
answer_instance=0,
answer_id='first-name',
group_instance=0,
value='Joe',
group_instance_id=person1_uuid
)
answer_2 = Answer(
answer_instance=0,
answer_id='last-name',
group_instance=0,
value='Bloggs',
group_instance_id=person1_uuid
)
answer_3 = Answer(
answer_instance=1,
answer_id='first-name',
group_instance=0,
value='Jane',
group_instance_id=person2_uuid
)
answer_4 = Answer(
answer_instance=1,
answer_id='last-name',
group_instance=0,
value='Doe',
group_instance_id=person2_uuid
)
answer_store.add_or_update(answer_1)
answer_store.add_or_update(answer_2)
answer_store.add_or_update(answer_3)
answer_store.add_or_update(answer_4)
routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('skip-payment-group', 0, 'skip-payment'),
]
navigation = _create_navigation(schema, answer_store, metadata, completed_blocks, routing_path)
user_navigation = [
{
'link_name': 'Property Details',
'highlight': True,
'repeating': False,
'completed': False,
'link_url': Location('property-details', 0, 'insurance-type').url(metadata)
},
{
'repeating': False,
'completed': True,
'highlight': False,
'link_name': 'Household Composition',
'link_url': Location('multiple-questions-group', 0, 'household-composition').url(metadata)
},
{
'repeating': True,
'link_name': 'Joe Bloggs',
'completed': False,
'highlight': False,
'link_url': Location('repeating-group', 0, 'repeating-block-1').url(metadata)
},
{
'repeating': True,
'link_name': 'Jane Doe',
'completed': False,
'highlight': False,
'link_url': Location('repeating-group', 1, 'repeating-block-1').url(metadata)
},
{
'link_name': 'Extra Cover',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('extra-cover', 0, 'extra-cover-block').url(metadata)
},
{
'link_name': 'Payment Details',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('skip-payment-group', 0, 'skip-payment').url(metadata)
},
{
'link_name': 'Final section',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('final-section-routed-group', 0, 'final-interstitial').url(metadata)
}
]
self.assertEqual(navigation.build_navigation('property-details', 0), user_navigation)
def test_navigation_skip_condition_hide_group(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = []
answer_store = AnswerStore()
answer_1 = Answer(
value='Buildings',
group_instance=0,
answer_instance=0,
answer_id='insurance-type-answer'
)
answer_store.add_or_update(answer_1)
navigation = _create_navigation(schema, answer_store, metadata, completed_blocks, [])
user_navigation = navigation.build_navigation('property-details', 0)
link_names = [d['link_name'] for d in user_navigation]
self.assertNotIn('Property Interstitial', link_names)
def test_navigation_skip_condition_show_group(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = []
answer_store = AnswerStore()
answer_1 = Answer(
value='Contents',
group_instance=0,
answer_instance=0,
answer_id='insurance-type-answer'
)
answer_store.add_or_update(answer_1)
navigation = _create_navigation(schema, answer_store, metadata, completed_blocks, [])
user_navigation = navigation.build_navigation('property-details', 0)
link_names = [d['link_name'] for d in user_navigation]
self.assertIn('Property Interstitial', link_names)
def test_navigation_skip_condition_change_answer(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = []
answer_store = AnswerStore()
answer_1 = Answer(
value='Contents',
group_instance=0,
group_instance_id='group-0',
answer_instance=0,
answer_id='insurance-type-answer'
)
answer_store.add_or_update(answer_1)
navigation = _create_navigation(schema, answer_store, metadata, completed_blocks, [])
user_navigation = navigation.build_navigation('property-details', 0)
link_names = [d['link_name'] for d in user_navigation]
self.assertIn('Property Interstitial', link_names)
change_answer = Answer(
value='Buildings',
group_instance=0,
group_instance_id='group-0',
answer_instance=0,
answer_id='insurance-type-answer'
)
answer_store.add_or_update(change_answer)
navigation = _create_navigation(schema, answer_store, metadata, completed_blocks, [])
user_navigation = navigation.build_navigation('property-details', 0)
link_names = [d['link_name'] for d in user_navigation]
self.assertNotIn('Property Interstitial', link_names)
def test_build_navigation_returns_none_when_schema_navigation_is_false(self):
# Given
schema = load_schema_from_params('test', 'navigation')
schema.json['navigation'] = {'visible': False}
completed_blocks = []
metadata = {}
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, [])
# When
nav_menu = navigation.build_navigation('group-1', 'group-instance-1')
# Then
self.assertIsNone(nav_menu)
def test_build_navigation_returns_none_when_no_schema_navigation_property(self):
# Given
schema = load_schema_from_params('test', 'navigation')
del schema.json['navigation']
completed_blocks = []
metadata = {}
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, [])
# When
nav_menu = navigation.build_navigation('group-1', 'group-instance-1')
# Then
self.assertIsNone(nav_menu)
def test_build_navigation_returns_navigation_when_schema_navigation_is_true(self):
# Given
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
schema.json['navigation'] = {'visible': True}
completed_blocks = []
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, [])
# When
nav_menu = navigation.build_navigation('group-1', 'group-instance-1')
# Then
self.assertIsNotNone(nav_menu)
def test_build_navigation_summary_link_hidden_when_no_sections_completed(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
navigation = _create_navigation(schema, AnswerStore(), metadata, [], [])
confirmation_link = {
'link_name': 'Summary',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('summary-group', 0, 'summary').url(metadata)
}
self.assertNotIn(confirmation_link, navigation.build_navigation('property-details', 0))
def test_build_navigation_summary_link_hidden_when_not_all_sections_completed(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-details', 0, 'property-interstitial'),
Location('house-details', 0, 'house-type'),
Location('multiple-questions-group', 0, 'household-composition'),
]
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, standard_routing_path)
confirmation_link = {
'link_name': 'Summary',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('summary-group', 0, 'summary').url(metadata)
}
navigation_links = navigation.build_navigation('property-details', 0)
self.assertNotIn(confirmation_link, navigation_links)
self.assertEqual(len(navigation_links), 5)
def test_build_navigation_summary_link_visible_when_all_sections_complete(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-details', 0, 'property-interstitial'),
Location('house-details', 0, 'house-type'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover', 0, 'extra-cover-interstitial'),
Location('payment-details', 0, 'credit-card'),
Location('payment-details', 0, 'expiry-date'),
Location('payment-details', 0, 'security-code'),
Location('payment-details', 0, 'security-code-interstitial'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
Location('extra-cover-items-group', 0, 'extra-cover-items-radio'),
Location('skip-payment-group', 0, 'skip-payment'),
]
routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-details', 0, 'property-interstitial'),
Location('house-details', 0, 'house-type'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('payment-details', 0, 'credit-card'),
Location('payment-details', 0, 'expiry-date'),
Location('payment-details', 0, 'security-code'),
Location('payment-details', 0, 'security-code-interstitial'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover', 0, 'extra-cover-interstitial'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
Location('extra-cover-items-group', 0, 'extra-cover-items-radio'),
Location('skip-payment-group', 0, 'skip-payment'),
Location('summary-group', 0, 'summary'),
]
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, routing_path)
confirmation_link = {
'link_name': 'Summary',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('summary-group', 0, 'summary').url(metadata)
}
navigation_links = navigation.build_navigation('property-details', 0)
self.assertIn(confirmation_link, navigation_links)
self.assertEqual(len(navigation_links), 6)
def test_build_navigation_submit_answers_link_not_visible_for_survey_with_summary(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-details', 0, 'property-interstitial'),
Location('house-details', 0, 'house-type'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover', 0, 'extra-cover-interstitial'),
Location('payment-details', 0, 'credit-card'),
Location('payment-details', 0, 'expiry-date'),
Location('payment-details', 0, 'security-code'),
Location('payment-details', 0, 'security-code-interstitial'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
]
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, standard_routing_path)
confirmation_link = {
'link_name': 'Submit answers',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('confirmation-group', 0, 'confirmation').url(metadata)
}
navigation_links = navigation.build_navigation('property-details', 0)
self.assertNotIn(confirmation_link, navigation_links)
self.assertEqual(len(navigation_links), 5)
def test_build_navigation_submit_answers_link_hidden_when_no_sections_completed(self):
schema = load_schema_from_params('test', 'navigation_confirmation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
navigation = _create_navigation(schema, AnswerStore(), metadata, [], [])
confirmation_link = {
'link_name': 'Submit answers',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('confirmation-group', 0, 'confirmation').url(metadata)
}
navigation_links = navigation.build_navigation('property-details', 0)
self.assertNotIn(confirmation_link, navigation_links)
self.assertEqual(len(navigation_links), 4)
def test_build_navigation_submit_answers_link_hidden_when_not_all_sections_completed(self):
schema = load_schema_from_params('test', 'navigation_confirmation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-details', 0, 'property-interstitial'),
Location('house-details', 0, 'house-type'),
Location('multiple-questions-group', 0, 'household-composition'),
]
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, [])
confirmation_link = {
'link_name': 'Submit answers',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('confirmation-group', 0, 'confirmation').url(metadata)
}
navigation_links = navigation.build_navigation('property-details', 0)
self.assertNotIn(confirmation_link, navigation_links)
self.assertEqual(len(navigation_links), 4)
def test_build_navigation_submit_answers_link_visible_when_all_sections_complete(self):
schema = load_schema_from_params('test', 'navigation_confirmation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-details', 0, 'property-interstitial'),
Location('house-details', 0, 'house-type'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover', 0, 'extra-cover-interstitial'),
Location('payment-details', 0, 'credit-card'),
Location('payment-details', 0, 'expiry-date'),
Location('payment-details', 0, 'security-code'),
Location('payment-details', 0, 'security-code-interstitial'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
]
routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-details', 0, 'property-interstitial'),
Location('house-details', 0, 'house-type'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover', 0, 'extra-cover-interstitial'),
Location('payment-details', 0, 'credit-card'),
Location('payment-details', 0, 'expiry-date'),
Location('payment-details', 0, 'security-code'),
Location('payment-details', 0, 'security-code-interstitial'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
Location('confirmation-group', 0, 'confirmation'),
]
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, routing_path)
confirmation_link = {
'link_name': 'Submit answers',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('confirmation-group', 0, 'confirmation').url(metadata)
}
navigation_links = navigation.build_navigation('property-details', 0)
self.assertIn(confirmation_link, navigation_links)
self.assertEqual(len(navigation_links), 5)
def test_build_navigation_summary_link_not_visible_for_survey_with_confirmation(self):
schema = load_schema_from_params('test', 'navigation_confirmation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-details', 0, 'property-interstitial'),
Location('house-details', 0, 'house-type'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover', 0, 'extra-cover-interstitial'),
Location('payment-details', 0, 'credit-card'),
Location('payment-details', 0, 'expiry-date'),
Location('payment-details', 0, 'security-code'),
Location('payment-details', 0, 'security-code-interstitial'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
]
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, [])
confirmation_link = {
'link_name': 'Summary',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('summary-group', 0, 'summary').url(metadata)
}
self.assertNotIn(confirmation_link, navigation.build_navigation('property-details', 0))
def test_build_navigation_submit_answers_link_not_visible_when_no_completed_blocks(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = []
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, standard_routing_path)
confirmation_link = {
'link_name': 'Summary',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('summary-group', 0, 'summary').url(metadata)
}
navigation_links = navigation.build_navigation('property-details', 0)
self.assertNotIn(confirmation_link, navigation_links)
self.assertEqual(len(navigation_links), 5)
def test_build_navigation_summary_link_hidden_when_not_on_routing_path(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-details', 0, 'property-interstitial'),
Location('house-details', 0, 'house-type'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover', 0, 'extra-cover-interstitial'),
Location('payment-details', 0, 'credit-card'),
Location('payment-details', 0, 'expiry-date'),
Location('payment-details', 0, 'security-code'),
Location('payment-details', 0, 'security-code-interstitial'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
]
routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-details', 0, 'property-interstitial'),
Location('house-details', 0, 'house-type'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('payment-details', 0, 'credit-card'),
Location('payment-details', 0, 'expiry-date'),
Location('payment-details', 0, 'security-code'),
Location('payment-details', 0, 'security-code-interstitial'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover', 0, 'extra-cover-interstitial'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
Location('skip-payment-group', 0, 'skip-payment'),
]
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, routing_path)
confirmation_link = {
'link_name': 'Summary',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('summary-group', 0, 'summary').url(metadata)
}
navigation_links = navigation.build_navigation('property-details', 0)
self.assertNotIn(confirmation_link, navigation_links)
self.assertEqual(len(navigation_links), 5)
def test_build_navigation_summary_link_shown_when_invalid_section_present(self):
schema = load_schema_from_params('test', 'navigation')
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
intro_section = {
'id': 'intro-section',
'title': 'Intro',
'groups': [{
'id': 'intro-group',
'blocks': [{
'id': 'intro-block',
'type': 'Interstitial'
}]
}]
}
schema.json['sections'].insert(0, intro_section)
# pylint: disable=protected-access
schema._parse_schema()
completed_blocks = [
Location('property-details', 0, 'insurance-type'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('skip-payment-group', 0, 'skip-payment'),
]
routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('skip-payment-group', 0, 'skip-payment'),
]
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, routing_path)
confirmation_link = {
'link_name': 'Summary',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('summary-group', 0, 'summary').url(metadata)
}
navigation_links = navigation.build_navigation('skip-payment', 0)
self.assertIn(confirmation_link, navigation_links)
self.assertEqual(len(navigation_links), 7)
def test_build_navigation_repeated_blocks_independent_completeness(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('extra-cover', 0, 'extra-cover-block')
]
answer_store = AnswerStore()
person1_uuid = uuid.uuid4()
person2_uuid = uuid.uuid4()
answer_store.add_or_update(Answer(
answer_instance=0,
answer_id='first-name',
group_instance=0,
value='Person1',
group_instance_id=person1_uuid
))
answer_store.add_or_update(Answer(
answer_instance=1,
answer_id='first-name',
group_instance=0,
value='Person2',
group_instance_id=person2_uuid
))
answer_store.add_or_update(Answer(
answer_instance=0,
answer_id='what-is-your-age',
group_instance=0,
value=42,
group_instance_id=person1_uuid
))
answer_store.add_or_update(Answer(
answer_instance=0,
answer_id='what-is-your-shoe-size',
group_instance=0,
value='Employed',
group_instance_id=person1_uuid
))
routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('repeating-group', 0, 'repeating-block-1'),
Location('repeating-group', 0, 'repeating-block-2'),
Location('repeating-group', 1, 'repeating-block-1'),
Location('repeating-group', 1, 'repeating-block-2'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
Location('skip-payment-group', 0, 'skip-payment'),
]
navigation = _create_navigation(schema, answer_store, metadata, completed_blocks, routing_path)
user_navigation = [
{
'completed': True,
'highlight': True,
'repeating': False,
'link_name': 'Property Details',
'link_url': Location('property-details', 0, 'insurance-type').url(metadata)
},
{
'link_name': 'Household Composition',
'highlight': False,
'repeating': False,
'completed': True,
'link_url': Location('multiple-questions-group', 0, 'household-composition').url(metadata)
},
{
'completed': True,
'highlight': False,
'repeating': True,
'link_name': 'Person1',
'link_url': Location('repeating-group', 0, 'repeating-block-1').url(metadata),
},
{
'completed': False,
'highlight': False,
'repeating': True,
'link_name': 'Person2',
'link_url': Location('repeating-group', 1, 'repeating-block-1').url(metadata),
},
{
'completed': True,
'highlight': False,
'repeating': False,
'link_name': 'Extra Cover',
'link_url': Location('extra-cover', 0, 'extra-cover-block').url(metadata),
},
{
'completed': False,
'highlight': False,
'repeating': False,
'link_name': 'Payment Details',
'link_url': Location('skip-payment-group', 0, 'skip-payment').url(metadata),
},
{
'link_name': 'Final section',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('final-section-routed-group', 0, 'final-interstitial').url(metadata)
}
]
self.assertEqual(navigation.build_navigation('property-details', 0), user_navigation)
def test_build_navigation_first_group_with_skip_condition_containing_repeating_group(self):
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
# add group to extra-cover-items-section
schema.json['sections'][6]['groups'].insert(0, {
'id': 'extra-cover-items-intro',
'skip_conditions': [{
'when': [{
'id': 'extra-cover-answer',
'condition': 'not set'
}]
}],
'blocks': [{
'id': 'household-full-names-intro-block',
'type': 'Interstitial'
}]
})
completed_blocks = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-interstitial-section', 0, 'property-interstitial'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
]
routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-interstitial-section', 0, 'property-interstitial'),
Location('multiple-questions-group', 0, 'household-composition'),
Location('extra-cover', 0, 'extra-cover-block'),
Location('extra-cover-items-group', 0, 'extra-cover-items'),
Location('skip-payment-group', 0, 'skip-payment'),
]
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, routing_path)
user_navigation = [
{
'completed': True,
'highlight': True,
'repeating': False,
'link_name': 'Property Details',
'link_url': Location('property-details', 0, 'insurance-type').url(metadata)
},
{
'link_name': 'Household Composition',
'highlight': False,
'repeating': False,
'completed': True,
'link_url': Location('multiple-questions-group', 0, 'household-composition').url(metadata)
},
# deliberately omitting extra cover section
{
'completed': True,
'highlight': False,
'repeating': False,
'link_name': 'Extra Cover',
'link_url': Location('extra-cover', 0, 'extra-cover-block').url(metadata),
},
{
'completed': False,
'highlight': False,
'repeating': False,
'link_name': 'Payment Details',
'link_url': Location('skip-payment-group', 0, 'skip-payment').url(metadata),
},
{
'link_name': 'Final section',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('final-section-routed-group', 0, 'final-interstitial').url(metadata)
}
]
self.assertEqual(navigation.build_navigation(
'property-details', 0), user_navigation)
def test_build_navigation_with_single_skipped_block_in_group(self):
"""A section containing a group which doesn't have all of its blocks skipped should
have its navigation rendered
"""
schema = load_schema_from_params('test', 'navigation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
# skip the insurance-address block if insurance-type-answer is Both
schema.json['sections'][0]['groups'][0]['blocks'][1]['skip_conditions'] = [{
'when': [{
'id': 'insurance-type-answer',
'condition': 'equals',
'value': 'Both'
}]
}]
completed_blocks = [
Location('property-details', 0, 'insurance-type'),
]
answer_store = AnswerStore()
answer_store.add_or_update(Answer(
answer_instance=0,
answer_id='insurance-type-answer',
group_instance=0,
value='Both'
))
routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('skip-payment-group', 0, 'skip-payment'),
]
navigation = _create_navigation(schema, answer_store, metadata, completed_blocks, routing_path)
user_navigation = [
{
'completed': True,
'highlight': True,
'repeating': False,
'link_name': 'Property Details',
'link_url': Location('property-details', 0, 'insurance-type').url(metadata)
},
{
'link_name': 'House Details',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('house-details', 0, 'house-type').url(metadata)
},
{
'link_name': 'Household Composition',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('multiple-questions-group', 0, 'household-composition').url(metadata)
},
{
'completed': False,
'highlight': False,
'repeating': False,
'link_name': 'Extra Cover',
'link_url': Location('extra-cover', 0, 'extra-cover-block').url(metadata),
},
{
'completed': False,
'highlight': False,
'repeating': False,
'link_name': 'Payment Details',
'link_url': Location('skip-payment-group', 0, 'skip-payment').url(metadata),
},
{
'link_name': 'Final section',
'highlight': False,
'repeating': False,
'completed': False,
'link_url': Location('final-section-routed-group', 0, 'final-interstitial').url(metadata)
}
]
self.assertEqual(navigation.build_navigation('property-details', 0), user_navigation)
def test_build_navigation_completed_section_with_summary_links_to_last_block(self):
schema = load_schema_from_params('test', 'navigation_confirmation')
schema.answer_is_in_repeating_group = MagicMock(return_value=True)
schema.json['sections'][0]['groups'][0]['blocks'].append({
'id': 'property-summary',
'type': 'SectionSummary'
})
metadata = {
'eq_id': '1',
'collection_exercise_sid': '999',
'form_type': 'some_form'
}
completed_blocks = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-details', 0, 'property-interstitial'),
]
routing_path = [
Location('property-details', 0, 'insurance-type'),
Location('property-details', 0, 'insurance-address'),
Location('property-details', 0, 'property-interstitial'),
Location('property-details', 0, 'property-summary'),
]
navigation = _create_navigation(schema, AnswerStore(), metadata, completed_blocks, routing_path)
confirmation_link = {
'link_name': 'Property Details',
'highlight': True,
'repeating': False,
'completed': True,
'link_url': Location('property-details', 0, 'property-summary').url(metadata)
}
self.assertIn(confirmation_link, navigation.build_navigation('property-details', 0))
def _create_navigation(schema, answer_store, metadata, completed_blocks, routing_path):
completeness = Completeness(schema, answer_store, completed_blocks, routing_path, metadata)
return Navigation(schema, answer_store, metadata, completed_blocks, routing_path, completeness)
|
"""Module for storing coinchoose data in the database."""
import coinchoose
from datetime import datetime
from datetime import timedelta
from decimal import Decimal
import os
import psycopg2 as pg2
import psycopg2.extras as pg2ext
import random
import unittest
batchLimit = 1000
tables = {
"currency": "currency",
"currency_historical": "currency_historical",
"network_status": "network_status",
"network_status_latest": "network_status_latest"
}
dbcFile = open(
"{0}/.pgpass".format(os.path.dirname(os.path.abspath(__file__))),
'r')
dbcRaw = dbcFile.readline().strip().split(':')
dbcParams = {
'database': dbcRaw[2],
'user': dbcRaw[3],
'password': dbcRaw[4],
'host': dbcRaw[0],
'port': dbcRaw[1]
}
dbcFile.close()
conn = None
def connect():
"""Connect to the database."""
global conn
if conn is not None:
return conn
else:
conn = pg2.connect(**dbcParams)
return conn
def cursor():
""""Pull a cursor from the connection."""
return connect().cursor()
def dictCursor():
""""Pull a dictionary cursor from the connection."""
return connect().cursor(cursor_factory=pg2ext.RealDictCursor)
def _createStaging(tableName, cursor):
"""Create staging table."""
stagingTable = "{0}_{1}".format(
tableName, str(int(pow(10, random.random()*10))).zfill(10))
cursor.execute("""CREATE TABLE {0} (LIKE {1}
INCLUDING DEFAULTS)""".format(stagingTable, tableName))
return stagingTable
def _dropStaging(tableName, cursor):
"""Drop staging table."""
cursor.execute("""
DROP TABLE {0}""".format(tableName))
def insertLatestCurrencies(data, withHistory=True):
"""Insert latest currency data."""
cursor = dictCursor()
targetTable = tables['currency']
# Create staging table
stagingTable = _createStaging(targetTable, cursor)
# Move data into staging table
cursor.executemany("""
INSERT INTO {0} (
symbol, name, algo)
VALUES (
%(symbol)s,
%(name)s,
%(algo)s
)""".format(stagingTable), data)
# Update any altered currencies
cursor.execute("""
UPDATE {0} tgt
SET name = stg.name, algo = stg.algo,
db_update_time = stg.db_update_time
FROM {1} stg
WHERE tgt.symbol = stg.symbol
AND (tgt.name <> stg.name OR
tgt.algo <> stg.algo)""".format(
targetTable, stagingTable))
# Merge any new currencies into target table
cursor.execute("""
INSERT INTO {0} (
symbol, name, algo, db_update_time)
(SELECT stg.*
FROM {1} stg
LEFT JOIN {0} tgt ON tgt.symbol = stg.symbol
WHERE tgt.symbol IS NULL)""".format(
targetTable, stagingTable))
# If requested, merge data into the historical table
if withHistory:
historicalTable = tables['currency_historical']
cursor.execute("""
INSERT INTO {0} (
symbol, name, algo, db_update_time)
(SELECT stg.*
FROM {1} stg
LEFT JOIN {0} tgt ON
tgt.symbol = stg.symbol AND
tgt.name = stg.name AND
tgt.algo = stg.algo
WHERE tgt.symbol IS NULL)""".format(
historicalTable, stagingTable))
# Drop staging table
_dropStaging(stagingTable, cursor)
# Commit
cursor.execute("""COMMIT""")
def insertLatestNetworkStatus(data):
"""Insert latest network status data."""
cursor = dictCursor()
targetTable = tables['network_status']
latestTable = tables['network_status_latest']
# Create staging table
stagingTable = _createStaging(targetTable, cursor)
# Move data into staging table
cursor.executemany("""
INSERT INTO {0}
(scrape_time, symbol, current_blocks, difficulty,
reward, hash_rate, avg_hash_rate)
VALUES (
%(scrape_time)s,
%(symbol)s,
%(current_blocks)s,
%(difficulty)s,
%(reward)s,
%(hash_rate)s,
%(avg_hash_rate)s
)""".format(stagingTable), data)
# Update target table where we have new data
cursor.execute("""
INSERT INTO {0}
(scrape_time, symbol, current_blocks, difficulty,
reward, hash_rate, avg_hash_rate, db_update_time)
(SELECT stg.*
FROM {1} stg
LEFT JOIN {2} lt
ON lt.symbol = stg.symbol
AND lt.current_blocks = stg.current_blocks
AND lt.difficulty = stg.difficulty
AND lt.reward = stg.reward
AND lt.hash_rate = stg.hash_rate
AND lt.avg_hash_rate = stg.avg_hash_rate
WHERE lt.scrape_time IS NULL)""".format(
targetTable, stagingTable, latestTable))
# Replace data in latest table with new data in staging
cursor.execute("""DELETE FROM {0}""".format(latestTable))
cursor.execute("""INSERT INTO {0}
SELECT *
FROM {1}""".format(latestTable, stagingTable))
# Drop staging table
_dropStaging(stagingTable, cursor)
# Commit
cursor.execute("""COMMIT""")
class PgTest(unittest.TestCase):
"""Testing suite for pg module."""
def setUp(self):
"""Setup tables for test."""
# Swap and sub configuration variables
global tables
self.tablesOriginal = tables
tables = {}
for key, table in self.tablesOriginal.iteritems():
tables[key] = "{0}_test".format(table)
global batchLimit
self.batchLimitOriginal = batchLimit
batchLimit = 20
# Create test tables
cur = cursor()
for key, table in tables.iteritems():
cur.execute("""CREATE TABLE IF NOT EXISTS
{0} (LIKE {1} INCLUDING ALL)""".format(
table, self.tablesOriginal[key]))
cur.execute("""COMMIT""")
def tearDown(self):
"""Teardown test tables."""
# Drop test tables
global tables
cur = cursor()
for table in tables.values():
cur.execute("""DROP TABLE IF EXISTS
{0}""".format(table))
# Undo swap / sub
tables = self.tablesOriginal
global batchLimit
batchLimit = self.batchLimitOriginal
def testInsertLatestCurrencies(self):
"""Test insertLatestCurrencies function."""
fileString = "{0}/example/api.json"
f = open(fileString.format(
os.path.dirname(os.path.abspath(__file__))), 'r')
jsonDump = f.read()
f.close()
data = coinchoose.parseLatestCurrencies(jsonDump)
insertLatestCurrencies(data)
# Test out some basic count statistics
cur = dictCursor()
cur.execute("""SELECT COUNT(*) cnt FROM {0}""".format(
tables['currency']))
row = cur.fetchone()
self.assertEqual(row['cnt'], 59)
cur.execute("""SELECT COUNT(*) cnt FROM {0}""".format(
tables['currency_historical']))
row = cur.fetchone()
self.assertEqual(row['cnt'], 59)
# Test out contents of first and last row
expectedFirst = {
'symbol': 'ALF',
'name': 'Alphacoin',
'algo': 'scrypt'
}
cur.execute("""SELECT symbol, name, algo
FROM {0}
WHERE symbol = '{1}'""".format(
tables['currency'], 'ALF'))
datumFirst = cur.fetchone()
self.assertEqual(datumFirst, expectedFirst)
expectedLast = {
'symbol': 'GLC',
'name': 'GlobalCoin',
'algo': 'scrypt'
}
cur.execute("""SELECT symbol, name, algo
FROM {0}
WHERE symbol = '{1}'""".format(
tables['currency'], 'GLC'))
datumLast = cur.fetchone()
self.assertEqual(datumLast, expectedLast)
# Update the data in a way that modifies what's in the DB
updatedData = [
{
'symbol': 'ALF',
'name': 'XXAlphacoinXX',
'algo': 'scrypt'
},
{
'symbol': 'GLC',
'name': 'GlobalCoin',
'algo': 'SHA-256'
}
]
insertLatestCurrencies(updatedData)
cur.execute("""SELECT COUNT(*) cnt FROM {0}""".format(
tables['currency']))
row = cur.fetchone()
self.assertEqual(row['cnt'], 59)
cur.execute("""SELECT COUNT(*) cnt FROM {0}""".format(
tables['currency_historical']))
row = cur.fetchone()
self.assertEqual(row['cnt'], 61)
cur.execute("""SELECT symbol, name, algo
FROM {0}
WHERE symbol = '{1}'""".format(
tables['currency'], 'ALF'))
newDatumFirst = cur.fetchone()
self.assertEqual(newDatumFirst, updatedData[0])
cur.execute("""SELECT symbol, name, algo
FROM {0}
WHERE symbol = '{1}'""".format(
tables['currency'], 'GLC'))
newDatumFirst = cur.fetchone()
self.assertEqual(newDatumFirst, updatedData[1])
def testInsertLatestNetworkStatus(self):
"""Test insertLatestNetworkStatus function."""
fileString = "{0}/example/api.json"
f = open(fileString.format(
os.path.dirname(os.path.abspath(__file__))), 'r')
jsonDump = f.read()
f.close()
now = datetime.utcnow()
data = coinchoose.parseLatestNetworkStatus(jsonDump, scrapeTime=now)
insertLatestNetworkStatus(data)
# Test out some basic count statistics
cur = dictCursor()
cur.execute("""SELECT COUNT(*) cnt FROM {0}""".format(
tables['network_status']))
row = cur.fetchone()
self.assertEqual(row['cnt'], 59)
cur.execute("""SELECT COUNT(*) cnt FROM {0}""".format(
tables['network_status_latest']))
row = cur.fetchone()
self.assertEqual(row['cnt'], 59)
# Test out contents of first and last row
expectedFirst = {
'symbol': 'ALF',
'scrape_time': now,
'current_blocks': long(655258),
'difficulty': Decimal("1.52109832"),
'reward': Decimal(50),
'hash_rate': long(10308452),
'avg_hash_rate': Decimal("10308452.0000")
}
cur.execute("""SELECT
symbol, scrape_time, current_blocks, difficulty,
reward, hash_rate, avg_hash_rate
FROM {0}
WHERE symbol = '{1}'""".format(
tables['network_status'], 'ALF'))
datumFirst = cur.fetchone()
self.assertEqual(datumFirst, expectedFirst)
expectedLast = {
'symbol': 'GLC',
'scrape_time': now,
'current_blocks': long(300011),
'difficulty': Decimal("0.768"),
'reward': Decimal(100),
'hash_rate': long(0),
'avg_hash_rate': Decimal("0")
}
cur.execute("""SELECT
symbol, scrape_time, current_blocks, difficulty,
reward, hash_rate, avg_hash_rate
FROM {0}
WHERE symbol = '{1}'""".format(
tables['network_status'], 'GLC'))
datumLast = cur.fetchone()
self.assertEqual(datumLast, expectedLast)
# Update the data in a way that modifies some of what's in the DB
updatedData = [
{
'symbol': 'ALF',
'scrape_time': now + timedelta(days=1),
'current_blocks': long(655258),
'difficulty': Decimal("1.52109832"),
'reward': Decimal(50),
'hash_rate': long(10308452),
'avg_hash_rate': Decimal("10308452.0000")
},
{
'symbol': 'GLC',
'scrape_time': now + timedelta(days=1),
'current_blocks': long(300155),
'difficulty': Decimal("1.234"),
'reward': Decimal(100),
'hash_rate': long(20),
'avg_hash_rate': Decimal("20.34")
}
]
insertLatestNetworkStatus(updatedData)
cur.execute("""SELECT COUNT(*) cnt FROM {0}""".format(
tables['network_status']))
row = cur.fetchone()
self.assertEqual(row['cnt'], 60)
cur.execute("""SELECT COUNT(*) cnt FROM {0}""".format(
tables['network_status_latest']))
row = cur.fetchone()
self.assertEqual(row['cnt'], 2)
cur.execute("""SELECT COUNT(*) cnt
FROM {0}
WHERE symbol = '{1}'""".format(
tables['network_status'], 'ALF'))
row = cur.fetchone()
self.assertEqual(row['cnt'], 1)
cur.execute("""SELECT COUNT(*) cnt
FROM {0}
WHERE symbol ='{1}'""".format(
tables['network_status'], 'GLC'))
row = cur.fetchone()
self.assertEqual(row['cnt'], 2)
cur.execute("""SELECT
symbol, scrape_time, current_blocks, difficulty,
reward, hash_rate, avg_hash_rate
FROM {0}
WHERE symbol = '{1}'""".format(
tables['network_status'], 'ALF'))
newDatumFirst = cur.fetchone()
self.assertEqual(newDatumFirst, expectedFirst)
cur.execute("""SELECT
symbol, scrape_time, current_blocks, difficulty,
reward, hash_rate, avg_hash_rate
FROM {0}
WHERE symbol = '{1}'
ORDER BY scrape_time
DESC LIMIT 1""".format(
tables['network_status'], 'GLC'))
newDatumLast = cur.fetchone()
self.assertEqual(newDatumLast, updatedData[-1])
cur.execute("""SELECT
symbol, scrape_time, current_blocks, difficulty,
reward, hash_rate, avg_hash_rate
FROM {0}
WHERE symbol = '{1}'""".format(
tables['network_status_latest'], 'ALF'))
newDatumFirst = cur.fetchone()
self.assertEqual(newDatumFirst, updatedData[0])
cur.execute("""SELECT
symbol, scrape_time, current_blocks, difficulty,
reward, hash_rate, avg_hash_rate
FROM {0}
WHERE symbol = '{1}'""".format(
tables['network_status_latest'], 'GLC'))
newDatumLast = cur.fetchone()
self.assertEqual(newDatumLast, updatedData[-1])
if __name__ == "__main__":
unittest.main()
|
import _plotly_utils.basevalidators
class AlignsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="alignsrc", parent_name="sunburst.hoverlabel", **kwargs
):
super(AlignsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
class Board(object):
def __init__(self):
self.squares = [ " ", " ", " ", " ", " ", " ", " ", " ", " " ]
def showBoard(self):
"""Converts the board to a string for displaying purposes."""
brd = "\n | | \n" + \
" " + self.squares[0] + " | " + self.squares[1] + " | " + self.squares[2] + " \n" + \
"___|___|___\n" + \
" | | \n" + \
" " + self.squares[3] + " | " + self.squares[4] + " | " + self.squares[5] + " \n" + \
"___|___|___\n" + \
" | | \n" + \
" " + self.squares[6] + " | " + self.squares[7] + " | " + self.squares[8] + " \n" + \
" | | \n"
return brd
def isBlank(self, n):
"""Checks if square n is blank."""
return self.squares[n] == " "
def markSquare(self, n, mrk):
"""Places marker mrk in square n."""
self.squares[n] = mrk
return
def movesLeft(self):
"""Checks if any squares are still empty."""
return " " in self.squares
def gameWon(self):
"""Checks to see if the game has been won."""
wins = [ threeInARow( self.squares[0], self.squares[1], self.squares[2] ),
threeInARow( self.squares[3], self.squares[4], self.squares[5] ),
threeInARow( self.squares[6], self.squares[7], self.squares[8] ),
threeInARow( self.squares[0], self.squares[3], self.squares[6] ),
threeInARow( self.squares[1], self.squares[4], self.squares[7] ),
threeInARow( self.squares[2], self.squares[5], self.squares[8] ),
threeInARow( self.squares[0], self.squares[4], self.squares[8] ),
threeInARow( self.squares[2], self.squares[4], self.squares[6] ) ]
return any(wins)
def convertBoard(self):
"""Converts a Board to a list of integers (Blank -> 0, X/O -> \\pm 1 )."""
board = ""
for m in self.squares:
board += str(convertMarker(m)) + " "
return board
def intList(self):
"""returns the board as a list of integers (Blank -> 0, X/O -> \\pm 1 )."""
board = []
for m in self.squares:
board.append(convertMarker(m))
return board
def threeInARow(m1, m2, m3):
"""Checks if the the marks form a triple."""
if " " in [m1, m2, m3]:
return False
else:
return m1 == m2 and m1 == m3
def convertMarker(m):
"""Converts the marker to an integer: Blank -> 0, X/O -> \\pm 1."""
if (m == " "):
return 0
elif (m == "X"):
return 1
elif (m == "O"):
return -1
else:
raise ValueError("Bad marker descriptor.")
|
from mass_api_client.schemas import DroppedBySampleRelationSchema, ResolvedBySampleRelationSchema, \
RetrievedBySampleRelationSchema, ContactedBySampleRelationSchema, SsdeepSampleRelationSchema
from .base_with_subclasses import BaseWithSubclasses
from .sample import Sample
class SampleRelation(BaseWithSubclasses):
_endpoint = 'sample_relation'
_class_identifier = 'SampleRelation'
@classmethod
def create(cls, sample, other, **kwargs):
if not isinstance(sample, Sample) or not isinstance(other, Sample):
raise ValueError('"sample" and "other" must be an instance of Sample')
return cls._create(sample=sample.url, other=other.url, **kwargs)
def __repr__(self):
return '[{}] {}'.format(str(self.__class__.__name__), str(self.id))
def __str__(self):
return self.__repr__()
def get_sample(self):
"""
Retrieves the first :class:`Sample` object of the sample relation from the server.
:return: The :class:`Sample` object.
"""
return Sample._get_detail_from_url(self.sample, append_base_url=False)
def get_other(self):
"""
Retrieves the other :class:`Sample` object of the sample relation from the server.
:return: The :class:`Sample` object.
"""
return Sample._get_detail_from_url(self.other, append_base_url=False)
class DroppedBySampleRelation(SampleRelation):
schema = DroppedBySampleRelationSchema()
_class_identifier = 'SampleRelation.DroppedBySampleRelation'
_creation_point = 'sample_relation/submit_dropped_by'
class ResolvedBySampleRelation(SampleRelation):
schema = ResolvedBySampleRelationSchema()
_class_identifier = 'SampleRelation.ResolvedBySampleRelation'
_creation_point = 'sample_relation/submit_resolved_by'
class ContactedBySampleRelation(SampleRelation):
schema = ContactedBySampleRelationSchema()
_class_identifier = 'SampleRelation.ContactedBySampleRelation'
_creation_point = 'sample_relation/submit_contacted_by'
class RetrievedBySampleRelation(SampleRelation):
schema = RetrievedBySampleRelationSchema()
_class_identifier = 'SampleRelation.RetrievedBySampleRelation'
_creation_point = 'sample_relation/submit_retrieved_by'
class SsdeepSampleRelation(SampleRelation):
schema = SsdeepSampleRelationSchema()
_class_identifier = 'SampleRelation.SsdeepSampleRelation'
_creation_point = 'sample_relation/submit_ssdeep'
|
from django.db import models
from datetime import datetime
class Entry(models.Model):
name = models.CharField(max_length=200, blank=True)
phone1 = models.CharField(max_length=50, blank=True)
phone2 = models.CharField(max_length=50, blank=True)
email = models.CharField(max_length=100, blank=True)
curr_date = models.DateTimeField(default=datetime.now)
device_id = models.CharField(max_length=200, blank=True)
class Meta:
verbose_name_plural = 'Entries'
def __unicode__(self):
return self.name
|
import json
import urllib
from dateutil import parser
from kaha import models
import os
class KahaImport:
"""
{u'active': u'true',
u'description': {u'contactname': u'--',
u'contactnumber': u'--',
u'detail': u'Binayak Basti Balaju Alongside Bishnumati River',
u'title': u'Binayak Basti Balaju'},
u'location': {u'district': u'kathmandu', u'tole': u'thakali samaj ghar'},
u'stat': {u'helpedctr': u'0', u'unavlblctr': u'0', u'wrngdtactr': u'0'},
u'type': u'shelter',
u'uuid': u'5b7cfcdd00e420f892e3e252a1e41d24dc57c58a'
}
"""
def __init__(self, options={}):
self.data = {}
self.options = options
def grab_data(self, use_cache=False):
has_cache = False
file_name = 'kaha-data.json'
if use_cache:
if os.path.isfile(file_name):
has_cache = True
if ((not use_cache) or (not has_cache)):
kaha_url = 'http://kaha.co/api'
file_handler, header = urllib.urlretrieve(kaha_url, file_name)
with open(file_name) as data_file:
self.data = json.load(data_file)
return self.data
def fixDitrict(self, district):
corrected = district
if district == u'Sindhupalchok':
corrected = 'Sindhupalchowk'
elif district == u'Kavrepalanchok':
corrected = 'Kavre'
return corrected
def transform_row(self, row):
resource = models.KahaResource()
resource.data_source.append(models.KahaResourceSource(source='kaha', source_id=row[u'uuid'], source_json=json.dumps(row)))
resource.district = self.fixDitrict(row[u'location'][u'district'].title())
resource.tole = row[u'location'][u'tole'].title()
resource.title = row[u'description'][u'title']
resource.description = row[u'description'][u'detail']
if 'contactname' in row[u'description']:
resource.contactname = row[u'description'][u'contactname']
if 'contactname' in row[u'description']:
resource.contactnumber = row[u'description'][u'contactnumber']
resource_for = u'supply'
if 'channel' in row:
resource_for = row[u'channel']
resource.resource_for = resource_for
if 'date' in row:
resource.updated = parser.parse(row[u'date'][u'modified'])
if row[u'date'][u'created']:
resource.craeted = parser.parse(row[u'date'][u'created'])
resource.types.append(models.KahaResourceType(resource_type=row[u'type']))
if 'stat' in row:
for _key, _value in row[u'stat'].iteritems():
s = models.KahaResourceProperty(key='stat_%s' % _key, value=_value)
resource.props.append(s)
return resource
def find_record(self, row, db):
return db.session.query(models.KahaResourceSource).filter_by(source_id=row[u'uuid'], source='kaha').first()
|
from django.contrib.auth.models import User
from django.db import models
class Invitation(models.Model):
user = models.ForeignKey(User)
token = models.CharField(max_length=40)
valid_from = models.DateTimeField()
valid_until = models.DateTimeField()
|
Test.describe('Example Tests')
Test.assert_equals(sum_mul(0, 0), 'INVALID')
Test.assert_equals(sum_mul(2, 9), 20)
Test.assert_equals(sum_mul(3, 13), 30)
Test.assert_equals(sum_mul(4, 123), 1860)
Test.assert_equals(sum_mul(4, -7), 'INVALID')
|
__author__ = 'Tirth Patel <complaints@tirthpatel.com>'
import requests
import re
import shutil
import os
def get_img_links(url):
req = requests.get(url)
if req.status_code != 200:
return []
return clean_up(re.findall(r'data-src="//(.*?)"', req.text))
def clean_up(coarse_urls):
clean_urls = []
for url in coarse_urls:
# if url[1][-1] != '4': # mp4 exclusion
# if url[1][-4:] == 'webm': # webm to gif conversion
# clean_urls += ['http:' + url[1][:-4] + 'gif']
# else:
clean_urls += ['http://' + url[:17] + url[18:]]
return clean_urls # because reasons
def download_imgs(album_url, folders=False):
imgs = get_img_links(album_url)
if folders: # doesn't really work yet
os.makedirs('out/jpgs', exist_ok=True)
os.makedirs('out/pngs', exist_ok=True)
os.makedirs('out/webms', exist_ok=True)
os.makedirs('out/others', exist_ok=True)
else:
os.makedirs('downloaded', exist_ok=True)
for img in imgs:
response = requests.get(img, stream=True)
filename = re.findall(r'm/.*', img)[0][2:] # lol I know
extension = re.findall(r'\..*', filename)[0][1:]
if folders:
path = 'out/' + extension + 's' + filename
else:
path = 'downloaded/' + filename
with open(path, 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
del response
if __name__ == '__main__':
with open('imgur_links.txt', 'r') as links:
albums = links.read().splitlines()
for album in albums:
if album[0] != "#":
download_imgs(album)
|
import re
from collections import defaultdict, namedtuple
from collections.abc import Iterable
from functools import lru_cache
from sanic.exceptions import NotFound, InvalidUsage
from sanic.views import CompositionView
Route = namedtuple(
'Route',
['handler', 'methods', 'pattern', 'parameters', 'name', 'uri'])
Parameter = namedtuple('Parameter', ['name', 'cast'])
REGEX_TYPES = {
'string': (str, r'[^/]+'),
'int': (int, r'\d+'),
'number': (float, r'[0-9\\.]+'),
'alpha': (str, r'[A-Za-z]+'),
'path': (str, r'[^/].*?'),
}
ROUTER_CACHE_SIZE = 1024
def url_hash(url):
return url.count('/')
class RouteExists(Exception):
pass
class RouteDoesNotExist(Exception):
pass
class Router:
"""Router supports basic routing with parameters and method checks
Usage:
.. code-block:: python
@sanic.route('/my/url/<my_param>', methods=['GET', 'POST', ...])
def my_route(request, my_param):
do stuff...
or
.. code-block:: python
@sanic.route('/my/url/<my_param:my_type>', methods['GET', 'POST', ...])
def my_route_with_type(request, my_param: my_type):
do stuff...
Parameters will be passed as keyword arguments to the request handling
function. Provided parameters can also have a type by appending :type to
the <parameter>. Given parameter must be able to be type-casted to this.
If no type is provided, a string is expected. A regular expression can
also be passed in as the type. The argument given to the function will
always be a string, independent of the type.
"""
routes_static = None
routes_dynamic = None
routes_always_check = None
parameter_pattern = re.compile(r'<(.+?)>')
def __init__(self):
self.routes_all = {}
self.routes_static = {}
self.routes_dynamic = defaultdict(list)
self.routes_always_check = []
self.hosts = set()
@classmethod
def parse_parameter_string(cls, parameter_string):
"""Parse a parameter string into its constituent name, type, and
pattern
For example::
parse_parameter_string('<param_one:[A-z]>')` ->
('param_one', str, '[A-z]')
:param parameter_string: String to parse
:return: tuple containing
(parameter_name, parameter_type, parameter_pattern)
"""
# We could receive NAME or NAME:PATTERN
name = parameter_string
pattern = 'string'
if ':' in parameter_string:
name, pattern = parameter_string.split(':', 1)
default = (str, pattern)
# Pull from pre-configured types
_type, pattern = REGEX_TYPES.get(pattern, default)
return name, _type, pattern
def add(self, uri, methods, handler, host=None, strict_slashes=False,
version=None):
"""Add a handler to the route list
:param uri: path to match
:param methods: sequence of accepted method names. If none are
provided, any method is allowed
:param handler: request handler function.
When executed, it should provide a response object.
:param strict_slashes: strict to trailing slash
:param version: current version of the route or blueprint. See
docs for further details.
:return: Nothing
"""
if version is not None:
if uri.startswith('/'):
uri = "/".join(["/v{}".format(str(version)), uri[1:]])
else:
uri = "/".join(["/v{}".format(str(version)), uri])
# add regular version
self._add(uri, methods, handler, host)
if strict_slashes:
return
# Add versions with and without trailing /
slash_is_missing = (
not uri[-1] == '/'
and not self.routes_all.get(uri + '/', False)
)
without_slash_is_missing = (
uri[-1] == '/'
and not self.routes_all.get(uri[:-1], False)
and not uri == '/'
)
# add version with trailing slash
if slash_is_missing:
self._add(uri + '/', methods, handler, host)
# add version without trailing slash
elif without_slash_is_missing:
self._add(uri[:-1], methods, handler, host)
def _add(self, uri, methods, handler, host=None):
"""Add a handler to the route list
:param uri: path to match
:param methods: sequence of accepted method names. If none are
provided, any method is allowed
:param handler: request handler function.
When executed, it should provide a response object.
:return: Nothing
"""
if host is not None:
if isinstance(host, str):
uri = host + uri
self.hosts.add(host)
else:
if not isinstance(host, Iterable):
raise ValueError("Expected either string or Iterable of "
"host strings, not {!r}".format(host))
for host_ in host:
self.add(uri, methods, handler, host_)
return
# Dict for faster lookups of if method allowed
if methods:
methods = frozenset(methods)
parameters = []
properties = {"unhashable": None}
def add_parameter(match):
name = match.group(1)
name, _type, pattern = self.parse_parameter_string(name)
parameter = Parameter(
name=name, cast=_type)
parameters.append(parameter)
# Mark the whole route as unhashable if it has the hash key in it
if re.search(r'(^|[^^]){1}/', pattern):
properties['unhashable'] = True
# Mark the route as unhashable if it matches the hash key
elif re.search(r'/', pattern):
properties['unhashable'] = True
return '({})'.format(pattern)
pattern_string = re.sub(self.parameter_pattern, add_parameter, uri)
pattern = re.compile(r'^{}$'.format(pattern_string))
def merge_route(route, methods, handler):
# merge to the existing route when possible.
if not route.methods or not methods:
# method-unspecified routes are not mergeable.
raise RouteExists(
"Route already registered: {}".format(uri))
elif route.methods.intersection(methods):
# already existing method is not overloadable.
duplicated = methods.intersection(route.methods)
raise RouteExists(
"Route already registered: {} [{}]".format(
uri, ','.join(list(duplicated))))
if isinstance(route.handler, CompositionView):
view = route.handler
else:
view = CompositionView()
view.add(route.methods, route.handler)
view.add(methods, handler)
route = route._replace(
handler=view, methods=methods.union(route.methods))
return route
if parameters:
# TODO: This is too complex, we need to reduce the complexity
if properties['unhashable']:
routes_to_check = self.routes_always_check
ndx, route = self.check_dynamic_route_exists(
pattern, routes_to_check)
else:
routes_to_check = self.routes_dynamic[url_hash(uri)]
ndx, route = self.check_dynamic_route_exists(
pattern, routes_to_check)
if ndx != -1:
# Pop the ndx of the route, no dups of the same route
routes_to_check.pop(ndx)
else:
route = self.routes_all.get(uri)
if route:
route = merge_route(route, methods, handler)
else:
# prefix the handler name with the blueprint name
# if available
if hasattr(handler, '__blueprintname__'):
handler_name = '{}.{}'.format(
handler.__blueprintname__, handler.__name__)
else:
handler_name = getattr(handler, '__name__', None)
route = Route(
handler=handler, methods=methods, pattern=pattern,
parameters=parameters, name=handler_name, uri=uri)
self.routes_all[uri] = route
if properties['unhashable']:
self.routes_always_check.append(route)
elif parameters:
self.routes_dynamic[url_hash(uri)].append(route)
else:
self.routes_static[uri] = route
@staticmethod
def check_dynamic_route_exists(pattern, routes_to_check):
for ndx, route in enumerate(routes_to_check):
if route.pattern == pattern:
return ndx, route
else:
return -1, None
def remove(self, uri, clean_cache=True, host=None):
if host is not None:
uri = host + uri
try:
route = self.routes_all.pop(uri)
except KeyError:
raise RouteDoesNotExist("Route was not registered: {}".format(uri))
if route in self.routes_always_check:
self.routes_always_check.remove(route)
elif url_hash(uri) in self.routes_dynamic \
and route in self.routes_dynamic[url_hash(uri)]:
self.routes_dynamic[url_hash(uri)].remove(route)
else:
self.routes_static.pop(uri)
if clean_cache:
self._get.cache_clear()
@lru_cache(maxsize=ROUTER_CACHE_SIZE)
def find_route_by_view_name(self, view_name):
"""Find a route in the router based on the specified view name.
:param view_name: string of view name to search by
:return: tuple containing (uri, Route)
"""
if not view_name:
return (None, None)
for uri, route in self.routes_all.items():
if route.name == view_name:
return uri, route
return (None, None)
def get(self, request):
"""Get a request handler based on the URL of the request, or raises an
error
:param request: Request object
:return: handler, arguments, keyword arguments
"""
# No virtual hosts specified; default behavior
if not self.hosts:
return self._get(request.path, request.method, '')
# virtual hosts specified; try to match route to the host header
try:
return self._get(request.path, request.method,
request.headers.get("Host", ''))
# try default hosts
except NotFound:
return self._get(request.path, request.method, '')
@lru_cache(maxsize=ROUTER_CACHE_SIZE)
def _get(self, url, method, host):
"""Get a request handler based on the URL of the request, or raises an
error. Internal method for caching.
:param url: request URL
:param method: request method
:return: handler, arguments, keyword arguments
"""
url = host + url
# Check against known static routes
route = self.routes_static.get(url)
method_not_supported = InvalidUsage(
'Method {} not allowed for URL {}'.format(
method, url), status_code=405)
if route:
if route.methods and method not in route.methods:
raise method_not_supported
match = route.pattern.match(url)
else:
route_found = False
# Move on to testing all regex routes
for route in self.routes_dynamic[url_hash(url)]:
match = route.pattern.match(url)
route_found |= match is not None
# Do early method checking
if match and method in route.methods:
break
else:
# Lastly, check against all regex routes that cannot be hashed
for route in self.routes_always_check:
match = route.pattern.match(url)
route_found |= match is not None
# Do early method checking
if match and method in route.methods:
break
else:
# Route was found but the methods didn't match
if route_found:
raise method_not_supported
raise NotFound('Requested URL {} not found'.format(url))
kwargs = {p.name: p.cast(value)
for value, p
in zip(match.groups(1), route.parameters)}
route_handler = route.handler
if hasattr(route_handler, 'handlers'):
route_handler = route_handler.handlers[method]
return route_handler, [], kwargs, route.uri
def is_stream_handler(self, request):
""" Handler for request is stream or not.
:param request: Request object
:return: bool
"""
try:
handler = self.get(request)[0]
except (NotFound, InvalidUsage):
return False
if (hasattr(handler, 'view_class') and
hasattr(handler.view_class, request.method.lower())):
handler = getattr(handler.view_class, request.method.lower())
return hasattr(handler, 'is_stream')
|
import unittest
from azure.communication.sms._shared.policy import HMACCredentialsPolicy
from devtools_testutils import AzureTestCase
class HMACTest(AzureTestCase):
def setUp(self):
super(HMACTest, self).setUp()
def test_correct_hmac(self):
auth_policy = HMACCredentialsPolicy("contoso.communicationservices.azure.com", "pw==")
sha_val = auth_policy._compute_hmac("banana")
assert sha_val == "88EC05aAS9iXnaimtNO78JLjiPtfWryQB/5QYEzEsu8="
def test_correct_utf16_hmac(self):
auth_policy = HMACCredentialsPolicy("contoso.communicationservices.azure.com", "pw==")
sha_val = auth_policy._compute_hmac(u"😀")
assert sha_val == "1rudJKjn2Zi+3hRrBG29wIF6pD6YyAeQR1ZcFtXoKAU="
|
from sqlalchemy.testing import expect_deprecated_20
from sqlalchemy.testing import fixtures
from sqlalchemy.util.compat import import_
class DeprecationWarningsTest(fixtures.TestBase):
__backend__ = False
def test_deprecate_databases(self):
with expect_deprecated_20(
"The `database` package is deprecated and will be removed in v2.0 "
):
import_("sqlalchemy.databases")
|
from setuptools import setup
tests_require = [
'coveralls',
'mock',
'testtools'
]
setup(
name='SpreadFlowDelta',
version='0.0.1',
description='Common SpreadFlow processors for delta-type messages',
author='Lorenz Schori',
author_email='lo@znerol.ch',
url='https://github.com/znerol/spreadflow-delta',
packages=[
'spreadflow_delta',
'spreadflow_delta.test'
],
install_requires=[
'SpreadFlowCore'
],
tests_require=tests_require,
extras_require={
'tests': tests_require
},
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Twisted',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Multimedia'
]
)
|
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class AvailableDelegationsOperations:
"""AvailableDelegationsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
location: str,
**kwargs: Any
) -> AsyncIterable["_models.AvailableDelegationsResult"]:
"""Gets all of the available subnet delegations for this subscription in this region.
:param location: The location of the subnet.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AvailableDelegationsResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_12_01.models.AvailableDelegationsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableDelegationsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('AvailableDelegationsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/availableDelegations'} # type: ignore
|
OPERATION_NAMES = ("conjunction", "disjunction", "implication", "exclusive", "equivalence")
def boolean(x, y, operation):
return {
"conjunction": x & y,
"disjunction": x | y,
"implication": (not x) | y,
"exclusive": x ^ y,
"equivalence": x == y
}.get(operation, None)
if __name__ == '__main__':
# These "asserts" using only for self-checking and not necessary for auto-testing
assert boolean(1, 0, "conjunction") == 0, "and"
assert boolean(1, 0, "disjunction") == 1, "or"
assert boolean(1, 1, "implication") == 1, "material"
assert boolean(0, 1, "exclusive") == 1, "xor"
assert boolean(0, 1, "equivalence") == 0, "same?"
print("All done? Earn rewards by using the 'Check' button!")
|
""" A simple way of interacting to a ethereum node through JSON RPC commands. """
import logging
import time
import warnings
import json
import gevent
from ethereum.abi import ContractTranslator
from ethereum.tools.keys import privtoaddr
from ethereum.transactions import Transaction
from ethereum.utils import denoms, int_to_big_endian, big_endian_to_int, normalize_address
from ethereum.tools._solidity import solidity_unresolved_symbols, solidity_library_symbol, solidity_resolve_symbols
from tinyrpc.protocols.jsonrpc import JSONRPCErrorResponse, JSONRPCSuccessResponse
from tinyrpc.protocols.jsonrpc import JSONRPCProtocol
from tinyrpc.transports.http import HttpPostClientTransport
from pyethapp.jsonrpc import address_encoder as _address_encoder
from pyethapp.jsonrpc import (
data_encoder, data_decoder, address_decoder, default_gasprice,
default_startgas, quantity_encoder, quantity_decoder,
)
z_address = '\x00' * 20
log = logging.getLogger(__name__)
def address_encoder(address):
""" Normalize address and hex encode it with the additional of the '0x'
prefix.
"""
normalized_address = normalize_address(address, allow_blank=True)
return _address_encoder(normalized_address)
def block_tag_encoder(val):
if isinstance(val, int):
return quantity_encoder(val)
elif val and isinstance(val, bytes):
assert val in ('latest', 'pending')
return data_encoder(val)
else:
assert not val
def topic_encoder(topic):
assert isinstance(topic, (int, long))
return data_encoder(int_to_big_endian(topic))
def topic_decoder(topic):
return big_endian_to_int(data_decoder(topic))
def deploy_dependencies_symbols(all_contract):
dependencies = {}
symbols_to_contract = dict()
for contract_name in all_contract:
symbol = solidity_library_symbol(contract_name)
if symbol in symbols_to_contract:
raise ValueError('Conflicting library names.')
symbols_to_contract[symbol] = contract_name
for contract_name, contract in all_contract.items():
unresolved_symbols = solidity_unresolved_symbols(contract['bin_hex'])
dependencies[contract_name] = [
symbols_to_contract[unresolved]
for unresolved in unresolved_symbols
]
return dependencies
def dependencies_order_of_build(target_contract, dependencies_map):
""" Return an ordered list of contracts that is sufficient to sucessfully
deploys the target contract.
Note:
This function assumes that the `dependencies_map` is an acyclic graph.
"""
if len(dependencies_map) == 0:
return [target_contract]
if target_contract not in dependencies_map:
raise ValueError('no dependencies defined for {}'.format(target_contract))
order = [target_contract]
todo = list(dependencies_map[target_contract])
while len(todo):
target_contract = todo.pop(0)
target_pos = len(order)
for dependency in dependencies_map[target_contract]:
# we need to add the current contract before all it's depedencies
if dependency in order:
target_pos = order.index(dependency)
else:
todo.append(dependency)
order.insert(target_pos, target_contract)
order.reverse()
return order
class JSONRPCClientReplyError(Exception):
pass
class JSONRPCClient(object):
protocol = JSONRPCProtocol()
def __init__(self, host='127.0.0.1', port=4000, print_communication=True,
privkey=None, sender=None, use_ssl=False, transport=None):
"""
Args:
host (str): host address to connect to.
port (int): port number to connect to.
print_communication (bool): True to print the rpc communication.
privkey: specify privkey for local signing
sender (address): the sender address, computed from privkey if provided.
use_ssl (bool): Use https instead of http.
transport: Tiny rpc transport instance.
"""
if transport is None:
self.transport = HttpPostClientTransport('{}://{}:{}'.format(
'https' if use_ssl else 'http', host, port), headers={'content-type': 'application/json'})
else:
self.transport = transport
self.print_communication = print_communication
self.privkey = privkey
self._sender = sender
self.port = port
def __repr__(self):
return '<JSONRPCClient @%d>' % self.port
@property
def sender(self):
if self.privkey:
return privtoaddr(self.privkey)
if self._sender is None:
self._sender = self.coinbase
return self._sender
@property
def coinbase(self):
""" Return the client coinbase address. """
return address_decoder(self.call('eth_coinbase'))
def blocknumber(self):
""" Return the most recent block. """
return quantity_decoder(self.call('eth_blockNumber'))
def nonce(self, address):
if len(address) == 40:
address = address.decode('hex')
try:
res = self.call('eth_nonce', address_encoder(address), 'pending')
return quantity_decoder(res)
except JSONRPCClientReplyError as e:
if e.message == 'Method not found':
raise JSONRPCClientReplyError(
"'eth_nonce' is not supported by your endpoint (pyethapp only). "
"For transactions use server-side nonces: "
"('eth_sendTransaction' with 'nonce=None')")
raise e
def balance(self, account):
""" Return the balance of the account of given address. """
res = self.call('eth_getBalance', address_encoder(account), 'pending')
return quantity_decoder(res)
def gaslimit(self):
return quantity_decoder(self.call('eth_gasLimit'))
def lastgasprice(self):
return quantity_decoder(self.call('eth_lastGasPrice'))
def new_abi_contract(self, contract_interface, address):
warnings.warn('deprecated, use new_contract_proxy', DeprecationWarning)
return self.new_contract_proxy(contract_interface, address)
def new_contract_proxy(self, contract_interface, address):
""" Return a proxy for interacting with a smart contract.
Args:
contract_interface: The contract interface as defined by the json.
address: The contract's address.
"""
sender = self.sender or privtoaddr(self.privkey)
return ContractProxy(
sender,
contract_interface,
address,
self.eth_call,
self.send_transaction,
self.eth_estimateGas,
)
def deploy_solidity_contract(self, sender, contract_name, all_contracts, # pylint: disable=too-many-locals
libraries, constructor_parameters, timeout=None, gasprice=default_gasprice):
if contract_name not in all_contracts:
raise ValueError('Unkonwn contract {}'.format(contract_name))
libraries = dict(libraries)
contract = all_contracts[contract_name]
contract_interface = contract['abi']
symbols = solidity_unresolved_symbols(contract['bin_hex'])
if symbols:
available_symbols = map(solidity_library_symbol, all_contracts.keys()) # pylint: disable=bad-builtin
unknown_symbols = set(symbols) - set(available_symbols)
if unknown_symbols:
msg = 'Cannot deploy contract, known symbols {}, unresolved symbols {}.'.format(
available_symbols,
unknown_symbols,
)
raise Exception(msg)
dependencies = deploy_dependencies_symbols(all_contracts)
deployment_order = dependencies_order_of_build(contract_name, dependencies)
deployment_order.pop() # remove `contract_name` from the list
log.debug('Deploing dependencies: {}'.format(str(deployment_order)))
for deploy_contract in deployment_order:
dependency_contract = all_contracts[deploy_contract]
hex_bytecode = solidity_resolve_symbols(dependency_contract['bin_hex'], libraries)
bytecode = hex_bytecode.decode('hex')
dependency_contract['bin_hex'] = hex_bytecode
dependency_contract['bin'] = bytecode
transaction_hash_hex = self.send_transaction(
sender,
to='',
data=bytecode,
gasprice=gasprice,
)
transaction_hash = transaction_hash_hex.decode('hex')
self.poll(transaction_hash, timeout=timeout)
receipt = self.eth_getTransactionReceipt(transaction_hash)
contract_address = receipt['contractAddress']
contract_address = contract_address[2:] # remove the hexadecimal prefix 0x from the address
libraries[deploy_contract] = contract_address
deployed_code = self.eth_getCode(contract_address.decode('hex'))
if deployed_code == '0x':
raise RuntimeError("Contract address has no code, check gas usage.")
hex_bytecode = solidity_resolve_symbols(contract['bin_hex'], libraries)
bytecode = hex_bytecode.decode('hex')
contract['bin_hex'] = hex_bytecode
contract['bin'] = bytecode
if constructor_parameters:
translator = ContractTranslator(contract_interface)
parameters = translator.encode_constructor_arguments(constructor_parameters)
bytecode = contract['bin'] + parameters
else:
bytecode = contract['bin']
transaction_hash_hex = self.send_transaction(
sender,
to='',
data=bytecode,
gasprice=gasprice,
)
transaction_hash = transaction_hash_hex.decode('hex')
self.poll(transaction_hash, timeout=timeout)
receipt = self.eth_getTransactionReceipt(transaction_hash)
contract_address = receipt['contractAddress']
deployed_code = self.eth_getCode(contract_address[2:].decode('hex'))
if deployed_code == '0x':
raise RuntimeError("Deployment of {} failed. Contract address has no code, check gas usage.".format(
contract_name
))
return self.new_contract_proxy(
contract_interface,
contract_address,
)
def find_block(self, condition):
"""Query all blocks one by one and return the first one for which
`condition(block)` evaluates to `True`.
"""
i = 0
while True:
block = self.call('eth_getBlockByNumber', quantity_encoder(i), True)
if condition(block) or not block:
return block
i += 1
def new_filter(self, fromBlock=None, toBlock=None, address=None, topics=None):
""" Creates a filter object, based on filter options, to notify when
the state changes (logs). To check if the state has changed, call
eth_getFilterChanges.
"""
json_data = {
'fromBlock': block_tag_encoder(fromBlock or ''),
'toBlock': block_tag_encoder(toBlock or ''),
}
if address is not None:
json_data['address'] = address_encoder(address)
if topics is not None:
if not isinstance(topics, list):
raise ValueError('topics must be a list')
json_data['topics'] = [topic_encoder(topic) for topic in topics]
filter_id = self.call('eth_newFilter', json_data)
return quantity_decoder(filter_id)
def filter_changes(self, fid):
changes = self.call('eth_getFilterChanges', quantity_encoder(fid))
if not changes:
return None
elif isinstance(changes, bytes):
return data_decoder(changes)
else:
decoders = dict(blockHash=data_decoder,
transactionHash=data_decoder,
data=data_decoder,
address=address_decoder,
topics=lambda x: [topic_decoder(t) for t in x],
blockNumber=quantity_decoder,
logIndex=quantity_decoder,
transactionIndex=quantity_decoder)
return [{k: decoders[k](v) for k, v in c.items() if v is not None} for c in changes]
def call(self, method, *args):
""" Do the request and returns the result.
Args:
method (str): The RPC method.
args: The encoded arguments expected by the method.
- Object arguments must be supplied as an dictionary.
- Quantity arguments must be hex encoded starting with '0x' and
without left zeros.
- Data arguments must be hex encoded starting with '0x'
"""
request = self.protocol.create_request(method, args)
reply = self.transport.send_message(request.serialize())
if self.print_communication:
print json.dumps(json.loads(request.serialize()), indent=2)
print reply
jsonrpc_reply = self.protocol.parse_reply(reply)
if isinstance(jsonrpc_reply, JSONRPCSuccessResponse):
return jsonrpc_reply.result
elif isinstance(jsonrpc_reply, JSONRPCErrorResponse):
raise JSONRPCClientReplyError(jsonrpc_reply.error)
else:
raise JSONRPCClientReplyError('Unknown type of JSONRPC reply')
__call__ = call
def send_transaction(self, sender, to, value=0, data='', startgas=0,
gasprice=10 * denoms.szabo, nonce=None):
""" Helper to send signed messages.
This method will use the `privkey` provided in the constructor to
locally sign the transaction. This requires an extended server
implementation that accepts the variables v, r, and s.
"""
if not self.privkey and not sender:
raise ValueError('Either privkey or sender needs to be supplied.')
if self.privkey and not sender:
sender = privtoaddr(self.privkey)
if nonce is None:
nonce = self.nonce(sender)
elif self.privkey:
if sender != privtoaddr(self.privkey):
raise ValueError('sender for a different privkey.')
if nonce is None:
nonce = self.nonce(sender)
else:
if nonce is None:
nonce = 0
if not startgas:
startgas = self.gaslimit() - 1
tx = Transaction(nonce, gasprice, startgas, to=to, value=value, data=data)
if self.privkey:
# add the fields v, r and s
tx.sign(self.privkey)
tx_dict = tx.to_dict()
# Transaction.to_dict() encodes 'data', so we need to decode it here.
tx_dict['data'] = data_decoder(tx_dict['data'])
# rename the fields to match the eth_sendTransaction signature
tx_dict.pop('hash')
tx_dict['sender'] = sender
tx_dict['gasPrice'] = tx_dict.pop('gasprice')
tx_dict['gas'] = tx_dict.pop('startgas')
res = self.eth_sendTransaction(**tx_dict)
assert len(res) in (20, 32)
return res.encode('hex')
def eth_sendTransaction(self, nonce=None, sender='', to='', value=0, data='',
gasPrice=default_gasprice, gas=default_startgas,
v=None, r=None, s=None):
""" Creates new message call transaction or a contract creation, if the
data field contains code.
Note:
The support for local signing through the variables v,r,s is not
part of the standard spec, a extended server is required.
Args:
from (address): The 20 bytes address the transaction is send from.
to (address): DATA, 20 Bytes - (optional when creating new
contract) The address the transaction is directed to.
gas (int): Gas provided for the transaction execution. It will
return unused gas.
gasPrice (int): gasPrice used for each paid gas.
value (int): Value send with this transaction.
data (bin): The compiled code of a contract OR the hash of the
invoked method signature and encoded parameters.
nonce (int): This allows to overwrite your own pending transactions
that use the same nonce.
"""
if to == '' and data.isalnum():
warnings.warn(
'Verify that the data parameter is _not_ hex encoded, if this is the case '
'the data will be double encoded and result in unexpected '
'behavior.'
)
if to == '0' * 40:
warnings.warn('For contract creating the empty string must be used.')
json_data = {
'to': data_encoder(normalize_address(to, allow_blank=True)),
'value': quantity_encoder(value),
'gasPrice': quantity_encoder(gasPrice),
'gas': quantity_encoder(gas),
'data': data_encoder(data),
}
if not sender and not (v and r and s):
raise ValueError('Either sender or v, r, s needs to be informed.')
if sender is not None:
json_data['from'] = address_encoder(sender)
if v and r and s:
json_data['v'] = quantity_encoder(v)
json_data['r'] = quantity_encoder(r)
json_data['s'] = quantity_encoder(s)
if nonce is not None:
json_data['nonce'] = quantity_encoder(nonce)
res = self.call('eth_sendTransaction', json_data)
return data_decoder(res)
def _format_call(self, sender='', to='', value=0, data='',
startgas=default_startgas, gasprice=default_gasprice):
""" Helper to format the transaction data. """
json_data = dict()
if sender is not None:
json_data['from'] = address_encoder(sender)
if to is not None:
json_data['to'] = data_encoder(to)
if value is not None:
json_data['value'] = quantity_encoder(value)
if gasprice is not None:
json_data['gasPrice'] = quantity_encoder(gasprice)
if startgas is not None:
json_data['gas'] = quantity_encoder(startgas)
if data is not None:
json_data['data'] = data_encoder(data)
return json_data
def eth_call(self, sender='', to='', value=0, data='',
startgas=default_startgas, gasprice=default_gasprice,
block_number='latest'):
""" Executes a new message call immediately without creating a
transaction on the block chain.
Args:
from: The address the transaction is send from.
to: The address the transaction is directed to.
gas (int): Gas provided for the transaction execution. eth_call
consumes zero gas, but this parameter may be needed by some
executions.
gasPrice (int): gasPrice used for each paid gas.
value (int): Integer of the value send with this transaction.
data (bin): Hash of the method signature and encoded parameters.
For details see Ethereum Contract ABI.
block_number: Determines the state of ethereum used in the
call.
"""
json_data = self._format_call(
sender,
to,
value,
data,
startgas,
gasprice,
)
res = self.call('eth_call', json_data, block_number)
return data_decoder(res)
def eth_estimateGas(self, sender='', to='', value=0, data='',
startgas=default_startgas, gasprice=default_gasprice):
""" Makes a call or transaction, which won't be added to the blockchain
and returns the used gas, which can be used for estimating the used
gas.
Args:
from: The address the transaction is send from.
to: The address the transaction is directed to.
gas (int): Gas provided for the transaction execution. eth_call
consumes zero gas, but this parameter may be needed by some
executions.
gasPrice (int): gasPrice used for each paid gas.
value (int): Integer of the value send with this transaction.
data (bin): Hash of the method signature and encoded parameters.
For details see Ethereum Contract ABI.
block_number: Determines the state of ethereum used in the
call.
"""
json_data = self._format_call(
sender,
to,
value,
data,
startgas,
gasprice,
)
res = self.call('eth_estimateGas', json_data)
return quantity_decoder(res)
def eth_getTransactionReceipt(self, transaction_hash):
""" Returns the receipt of a transaction by transaction hash.
Args:
transaction_hash: Hash of a transaction.
Returns:
A dict representing the transaction receipt object, or null when no
receipt was found.
"""
if transaction_hash.startswith('0x'):
warnings.warn(
'transaction_hash seems to be already encoded, this will'
' result in unexpected behavior'
)
if len(transaction_hash) != 32:
raise ValueError(
'transaction_hash length must be 32 (it might be hex encode)'
)
transaction_hash = data_encoder(transaction_hash)
return self.call('eth_getTransactionReceipt', transaction_hash)
def eth_getCode(self, address, block='latest'):
""" Returns code at a given address.
Args:
address: An address.
block_number: Integer block number, or the string "latest",
"earliest" or "pending".
"""
if address.startswith('0x'):
warnings.warn(
'address seems to be already encoded, this will result '
'in unexpected behavior'
)
if len(address) != 20:
raise ValueError(
'address length must be 20 (it might be hex encode)'
)
return self.call(
'eth_getCode',
address_encoder(address),
block,
)
def eth_getTransactionByHash(self, transaction_hash):
""" Returns the information about a transaction requested by
transaction hash.
"""
if transaction_hash.startswith('0x'):
warnings.warn(
'transaction_hash seems to be already encoded, this will'
' result in unexpected behavior'
)
if len(transaction_hash) != 32:
raise ValueError(
'transaction_hash length must be 32 (it might be hex encode)'
)
transaction_hash = data_encoder(transaction_hash)
return self.call('eth_getTransactionByHash', transaction_hash)
def poll(self, transaction_hash, confirmations=None, timeout=None):
""" Wait until the `transaction_hash` is applied or rejected.
If timeout is None, this could wait indefinitely!
Args:
transaction_hash (hash): Transaction hash that we are waiting for.
confirmations (int): Number of block confirmations that we will
wait for.
timeout (float): Timeout in seconds, raise an Excpetion on
timeout.
"""
if transaction_hash.startswith('0x'):
warnings.warn(
'transaction_hash seems to be already encoded, this will'
' result in unexpected behavior'
)
if len(transaction_hash) != 32:
raise ValueError(
'transaction_hash length must be 32 (it might be hex encode)'
)
transaction_hash = data_encoder(transaction_hash)
deadline = None
if timeout:
deadline = gevent.Timeout(timeout)
deadline.start()
try:
# used to check if the transaction was removed, this could happen
# if gas price is to low:
#
# > Transaction (acbca3d6) below gas price (tx=1 Wei ask=18
# > Shannon). All sequential txs from this address(7d0eae79)
# > will be ignored
#
last_result = None
while True:
# Could return None for a short period of time, until the
# transaction is added to the pool
transaction = self.call('eth_getTransactionByHash', transaction_hash)
# if the transaction was added to the pool and then removed
if transaction is None and last_result is not None:
raise Exception('invalid transaction, check gas price')
# the transaction was added to the pool and mined
if transaction and transaction['blockNumber'] is not None:
break
last_result = transaction
gevent.sleep(.5)
if confirmations:
# this will wait for both APPLIED and REVERTED transactions
transaction_block = quantity_decoder(transaction['blockNumber'])
confirmation_block = transaction_block + confirmations
block_number = self.blocknumber()
while block_number < confirmation_block:
gevent.sleep(.5)
block_number = self.blocknumber()
except gevent.Timeout:
raise Exception('timeout when polling for transaction')
finally:
if deadline:
deadline.cancel()
class MethodProxy(object):
""" A callable interface that exposes a contract function. """
valid_kargs = set(('gasprice', 'startgas', 'value'))
def __init__(self, sender, contract_address, function_name, translator,
call_function, transaction_function, estimate_function=None):
self.sender = sender
self.contract_address = contract_address
self.function_name = function_name
self.translator = translator
self.call_function = call_function
self.transaction_function = transaction_function
self.estimate_function = estimate_function
def transact(self, *args, **kargs):
assert set(kargs.keys()).issubset(self.valid_kargs)
data = self.translator.encode(self.function_name, args)
txhash = self.transaction_function(
sender=self.sender,
to=self.contract_address,
value=kargs.pop('value', 0),
data=data,
**kargs
)
return txhash
def call(self, *args, **kargs):
assert set(kargs.keys()).issubset(self.valid_kargs)
data = self.translator.encode(self.function_name, args)
res = self.call_function(
sender=self.sender,
to=self.contract_address,
value=kargs.pop('value', 0),
data=data,
**kargs
)
if res:
res = self.translator.decode(self.function_name, res)
res = res[0] if len(res) == 1 else res
return res
def estimate_gas(self, *args, **kargs):
if not self.estimate_function:
raise RuntimeError('estimate_function wasnt supplied.')
assert set(kargs.keys()).issubset(self.valid_kargs)
data = self.translator.encode(self.function_name, args)
res = self.estimate_function(
sender=self.sender,
to=self.contract_address,
value=kargs.pop('value', 0),
data=data,
**kargs
)
return res
def __call__(self, *args, **kargs):
if self.translator.function_data[self.function_name]['is_constant']:
return self.call(*args, **kargs)
else:
return self.transact(*args, **kargs)
class ContractProxy(object):
""" Exposes a smart contract as a python object.
Contract calls can be made directly in this object, all the functions will
be exposed with the equivalent api and will perform the argument
translation.
"""
def __init__(self, sender, abi, address, call_func, transact_func, estimate_function=None):
sender = normalize_address(sender)
self.abi = abi
self.address = address = normalize_address(address)
self.translator = ContractTranslator(abi)
for function_name in self.translator.function_data:
function_proxy = MethodProxy(
sender,
address,
function_name,
self.translator,
call_func,
transact_func,
estimate_function,
)
type_argument = self.translator.function_data[function_name]['signature']
arguments = [
'{type} {argument}'.format(type=type_, argument=argument)
for type_, argument in type_argument
]
function_signature = ', '.join(arguments)
function_proxy.__doc__ = '{function_name}({function_signature})'.format(
function_name=function_name,
function_signature=function_signature,
)
setattr(self, function_name, function_proxy)
ABIContract = ContractProxy
|
import logging
import unittest
"""Dominator (https://codility.com/demo/take-sample-test/dominator/)
Analysis:
- Find leader in O(N) and count_of_leader (https://codility.com/media/train/6-Leader.pdf)
- Validate if it's more than half, return index
"""
__author__ = 'au9ustine'
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
def solution(A):
candidate_val = -1
count_of_candidate = 0
candidate_idx = -1
for i, val in enumerate(A):
if count_of_candidate == 0:
candidate_idx = i
candidate_val = val
count_of_candidate += 1
else:
if val != candidate_val:
count_of_candidate -= 1
else:
count_of_candidate += 1
count_of_leader = len([val for val in A if val == candidate_val])
if count_of_leader <= len(A) // 2:
return -1
return candidate_idx
class SolutionTest(unittest.TestCase):
def setUp(self):
self.data = [
([3, 4, 3, 2, 3, -1, 3, 3], 6)
]
def test_solution(self):
for input_data, expected in self.data:
actual = solution(input_data)
self.assertEquals(expected, actual)
if __name__ == "__main__":
unittest.main(failfast=True)
|
from django.conf import settings
from django.contrib.auth.models import User
from django.test import TestCase
from django.core.exceptions import SuspiciousOperation
from django.core.urlresolvers import reverse
from pyspreedly import api
from spreedly.models import Plan, Subscription
from django.utils.unittest import skip
from mock import patch
from mocks import get_client_mock
ClientMock = get_client_mock(settings.SPREEDLY_SITE_NAME)
@patch('pyspreedly.api.Client', new_callable=ClientMock)
class ViewsSetup(TestCase):
fixtures = ['plans.json', 'fees.json']
def setUp(self):
with patch('pyspreedly.api.Client',
new=ClientMock) as client_mock:
self.spreedly_client = client_mock()
self.user = User.objects.create_user(username='test user',
email='test@mediapopinc.com',
password='testpassword')
self.trial_plan = Plan.objects.get(id=12345)
self.paid_plan = Plan.objects.get(id=67890)
self.subscription = Subscription.objects.create(
user=self.user,
plan=self.paid_plan)
@patch('pyspreedly.api.Client', new=ClientMock)
class TestViewsExist(ViewsSetup):
def test_plan_list_view(self):
"""(the poorly named) List view should show the plans, and a form."""
url = reverse('plan_list')
response = self.client.get(url)
self.assertTemplateUsed(response, 'spreedly/plan_list.html')
def test_list_view(self):
"""there should be a view which shows a list of plans - enabled and
not"""
self.skipTest("Add real tests for this")
url = reverse('plan_list') #Whu?
response = self.client.get(url)
self.assertTemplateUsed(response, 'spreedly/plan_list.html')
def test_buy_view(self):
"""there should be a view which sends you to spreedly for purchase"""
self.skipTest("Add real tests for this")
url = reverse('plan_list') #Again??
response = self.client.get(url)
self.assertTemplateUsed(response, 'spreedly/plan_list.html')
def test_email_set(self):
"""Email sent view should also exist"""
url = reverse('spreedly_email_sent', kwargs={'user_id': 1})
response = self.client.get(url)
self.assertTemplateUsed(response, 'spreedly/email_sent.html')
def test_spreedly_return(self):
"""The welcome back and thank you for your plastic page should also
exist"""
user = User.objects.create_user(username='test user2',
email='test@mediapopinc.com',
password='testpassword')
url = reverse('spreedly_return',
kwargs={'user_id': user.id,
'plan_pk': Plan.objects.all()[0].id})
response = self.client.get(url)
self.assertTemplateUsed(response, 'spreedly/return.html')
def test_spreedly_return_already_subscribed(self):
"""The welcome back and thank you for your plastic page should also
exist"""
url = reverse('spreedly_return',
kwargs={'user_id': self.user.id,
'plan_pk': Plan.objects.all()[0].id})
self.assertRaises(SuspiciousOperation, self.client.get, url)
def test_my_subscription(self):
"""my subscription page should exisit, wrapper view."""
url = reverse('my_subscription')
response = self.client.get(url)
self.assertRedirects(response, reverse('login') + '?next=' + url)
self.assertTrue(
self.client.login(username=self.user, password='testpassword')
)
url = reverse('my_subscription')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_plan_view(self):
"""There should be a view to show you a plan's details"""
url = reverse('plan_details',
kwargs={'plan_pk': Plan.objects.all()[0].id})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
#self.assertTemplateUsed(response, 'spreedly/plan_details.html')
def test_subscriber_view(self):
"""there should be a view to show a subscriber's info"""
url = reverse('subscription_details', kwargs={
'user_id': self.subscription.user.id
})
response = self.client.get(url)
self.assertRedirects(response, reverse('login') + '?next=' + url)
self.assertTrue(self.client.login(username=self.subscription.user,
password='testpassword'))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'spreedly/subscription_details.html')
@skip("Not ready")
def test_edit_subscriber(self):
"""Subscribers are mutable, change them"""
url = reverse('edit_subscription',
kwargs={'user_id': self.subscriber.user.id})
response = self.client.get(url)
self.assertRedirects(response, reverse('login'))
self.client.login(username='root', password='secret')
response = self.client.get(url)
self.assertTemplateUsed(response, 'spreedly/return.html')
|
import config
import urllib
import json
import access_token
import user
def object_decoder(obj):
Token = access_token.AccessToken()
if "refresh_token" in obj:
Token.set(obj["access_token"], obj['expires_in'],obj["token_type"],obj["refresh_token"])
return Token
else:
Token.set(obj["access_token"], obj['expires_in'],obj["token_type"])
return Token
def user_object_decoder(obj):
User = user.GoogleUser()
User.set(obj["sub"])
return User
class GoogleOAuth:
def auth (self,callback = "/callback", state = "auth"):
url = config.google_auth_endpoint + "?"
url += "scope=" + ' '.join(config.scopes) + "&"
url += "access_type=" + config.access_type + "&"
url += "client_id=" + config.client_id + "&"
url += "approval_promt=" + config.approval_promt + "&"
url += "response_type=" + config.response_type + "&"
url += "redirect_uri=" + config.redirect_uri + callback + "&"
url += "state=" + state
return url
def callback (self,code, callback = "/callback"):
params = urllib.urlencode({
"code" : code,
"client_id" : config.client_id,
"client_secret" : config.client_secret,
"redirect_uri" : config.redirect_uri + callback,
"grant_type" : "authorization_code"
})
f = urllib.urlopen(config.google_token_endpoint, params)
response = f.read()
object = json.loads(response, object_hook=object_decoder)
if object != "" and hasattr(object,"error") == False:
return object
else:
return False
def refresh (self,refresh_token):
params = urllib.urlencode({
"refresh_token" : refresh_token,
"client_id" : config.client_id,
"client_secret" : config.client_secret,
"grant_type" : "refresh_token"
})
f = urllib.urlopen(config.google_token_endpoint, params)
response = f.read()
object = json.loads(response, object_hook=object_decoder)
if object != "" and hasattr(object,"error") == False:
return object
else:
return False
def revoke (self,refresh_token):
f = urllib.urlopen(config.google_revoke_endpont + "?token=" + refresh_token)
f.read()
def validate (self,token):
f = urllib.urlopen(config.google_validate_url + "?id_token=" + token)
f.read()
def userinfo (self,access_token):
f = urllib.urlopen(config.google_user_info_url + "?access_token=" + access_token)
response = f.read()
object = json.loads(response, object_hook=user_object_decoder)
return object
|
import megazord
hello = megazord \
.Target(["test/cpp/hello.cpp"],
output="test/cpp/lib/libhello.so") \
.add_support("root")
main = megazord.Target('test/cpp/main.cpp',
output='test/cpp/bin/main.a')\
.depends_on(hello)
main.assembly()
main.deploy_to('./', exclude=hello)
java_target = megazord \
.Target(['test/java/Solver.java', 'test/java/Board.java'],
output='test/java/bin/',
entry_point='Board') \
.add_library('test/java/algs4.jar')
java_target.assembly()
jt = megazord.JarTool()
jt.run(java_target, 'target.jar')
|
import os
import argparse
import struct
from collections import deque
from statistics import mean
from cereal import log
import cereal.messaging as messaging
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Sniff a communication socket')
parser.add_argument('--addr', default='127.0.0.1')
args = parser.parse_args()
if args.addr != "127.0.0.1":
os.environ["ZMQ"] = "1"
messaging.context = messaging.Context()
poller = messaging.Poller()
messaging.sub_sock('can', poller, addr=args.addr)
active = 0
start_t = 0
start_v = 0
max_v = 0
max_t = 0
window = deque(maxlen=10)
avg = 0
while 1:
polld = poller.poll(1000)
for sock in polld:
msg = sock.receive()
evt = log.Event.from_bytes(msg)
for item in evt.can:
if item.address == 0xe4 and item.src == 128:
torque_req = struct.unpack('!h', item.dat[0:2])[0]
# print(torque_req)
active = abs(torque_req) > 0
if abs(torque_req) < 100:
if max_v > 5:
print(f'{start_v} -> {max_v} = {round(max_v - start_v, 2)} over {round(max_t - start_t, 2)}s')
start_t = evt.logMonoTime / 1e9
start_v = avg
max_t = 0
max_v = 0
if item.address == 0x1ab and item.src == 0:
motor_torque = ((item.dat[0] & 0x3) << 8) + item.dat[1]
window.append(motor_torque)
avg = mean(window)
#print(f'{evt.logMonoTime}: {avg}')
if active and avg > max_v + 0.5:
max_v = avg
max_t = evt.logMonoTime / 1e9
|
from server import Server
|
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
'attrs==16.3.0',
'six==1.10.0',
'contextlib2==0.5.4',
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='htmlvis',
version='0.1.0',
description="HTML visualization for Python",
long_description=readme + '\n\n' + history,
author="Damian Quiroga",
author_email='qdamian@gmail.com',
url='https://github.com/qdamian/htmlvis',
packages=[
'htmlvis',
],
package_dir={'htmlvis': 'htmlvis'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='htmlvis',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
test_suite='tests',
tests_require=test_requirements)
|
from msrest.serialization import Model
class DatasetReference(Model):
"""Dataset reference type.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar type: Dataset reference type. Default value: "DatasetReference" .
:vartype type: str
:param reference_name: Reference dataset name.
:type reference_name: str
:param parameters: Arguments for dataset.
:type parameters: dict[str, object]
"""
_validation = {
'type': {'required': True, 'constant': True},
'reference_name': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'reference_name': {'key': 'referenceName', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{object}'},
}
type = "DatasetReference"
def __init__(self, reference_name, parameters=None):
super(DatasetReference, self).__init__()
self.reference_name = reference_name
self.parameters = parameters
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.