index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
65,823 | luoheng/TCssrbm | refs/heads/master | /tests/speedTest_unshared_conv_diagonally.py |
import sys
import time
import unittest
import pdb
import numpy
import theano
from unshared_conv_diagonally import FilterActs
from unshared_conv_diagonally import WeightActs
from unshared_conv_diagonally import ImgActs
def rand(shp, dtype):
return numpy.random.rand(*shp).astype(dtype)
class TestWeightActsSpeed(unittest.TestCase):
# Global test variables (may be extended to include more tests)
#Each item in ishape_list : (icount, icolors, irows, icols)
ishape_list = [(64, 1, 98, 98)]
#Each item in fshapes_list = (fmodules, filters_per_module,
# fcolors, frows, fcols)
fshape_list = [(11, 32, 1, 11, 11)]
# Each item in hshapes_list = (hcount, fmodules, filter_per_module,
# hrows, hcols)
hshape_list = [(64, 11, 32, 8, 8)]
module_stride = 1
dtype = 'float64'
nbTests = len(ishape_list)
n_calls = 50
# Utility functions
def ishape(self, i):
return self.ishape_list[i]
def irows(self, i):
return self.ishape_list[i][2]
def icols(self, i):
return self.ishape_list[i][3]
def fshape(self, i):
return self.fshape_list[i]
def frows(self, i):
return self.fshape_list[i][3]
def fcols(self, i):
return self.fshape_list[i][4]
def hshape(self, i):
return self.hshape_list[i]
def setUp(self):
self.op = WeightActs(module_stride=self.module_stride)
self.s_images_list = [theano.shared(rand(ishape, self.dtype))
for ishape in self.ishape_list]
self.s_hidacts_list = [theano.shared(rand(hshape, self.dtype))
for hshape in self.hshape_list]
# Test Cases
def testMainOpSpeed(self):
# mode = theano.Mode(linker=theano.gof.vm.VM_Linker(
# allow_gc=False,
# use_cloop=True))
for i in range(self.nbTests):
# Generate theano functions to run the op in python and in C
output = self.op(self.s_images_list[i], self.s_hidacts_list[i],
self.frows(i), self.fcols(i))
pyFunction = theano.function([], output,
mode=theano.Mode(linker='py'))
cFunction = theano.function([], output,
mode=theano.Mode(linker='c'))
# Run the OP in python
t0 = time.time()
[pyFunction() for i in range(self.n_calls)]
t1 = time.time()
print "py", t1 - t0,
# Run the OP in C and time it
t0 = time.time()
[cFunction() for i in range(self.n_calls)]
t1 = time.time()
print "c", t1 - t0
class TestWeightActsSpeedF32(TestWeightActsSpeed):
dtype = 'float32'
class TestImgActsSpeed(unittest.TestCase):
# Global test variables (may be extended to include more tests)
#Each item in ishape_list : (icount, icolors, irows, icols)
ishape_list = [(64, 1, 98, 98)]
#Each item in fshapes_list = (fmodules, filters_per_module,
# fcolors, frows, fcols)
fshape_list = [(11, 32, 1, 11, 11)]
# Each item in hshapes_list = (hcount, fmodules, filter_per_module,
# hrows, hcols)
hshape_list = [(64, 11, 32, 8, 8)]
module_stride = 1
dtype = 'float64'
nbTests = len(ishape_list)
n_calls = 50
# Utility functions
def ishape(self, i):
return self.ishape_list[i]
def irows(self, i):
return self.ishape_list[i][2]
def icols(self, i):
return self.ishape_list[i][3]
def fshape(self, i):
return self.fshape_list[i]
def hshape(self, i):
return self.hshape_list[i]
def setUp(self):
self.op = ImgActs(module_stride=self.module_stride, openmp=False)
self.op_omp = ImgActs(module_stride=self.module_stride, openmp=True)
self.s_filters_list = [theano.shared(rand(fshape, self.dtype))
for fshape in self.fshape_list]
self.s_hidacts_list = [theano.shared(rand(hshape, self.dtype))
for hshape in self.hshape_list]
# Test Cases
def testMainOpSpeed(self):
# mode = theano.Mode(linker=theano.gof.vm.VM_Linker(
# allow_gc=False,
# use_cloop=True))
for i in range(self.nbTests):
# Generate theano functions to run the op in python and in C
output = self.op(self.s_filters_list[i], self.s_hidacts_list[i],
self.irows(i), self.icols(i))
output_omp = self.op_omp(self.s_filters_list[i],
self.s_hidacts_list[i],
self.irows(i), self.icols(i))
pyFunction = theano.function([], output,
mode=theano.Mode(linker='py'))
cFunction = theano.function([], output,
mode=theano.Mode(linker='c'))
cFunction2 = theano.function([], output_omp,
mode=theano.Mode(linker='c'))
# Run the OP in python
t0 = time.time()
[pyFunction() for i in range(self.n_calls)]
t1 = time.time()
py_t = t1 - t0
print "py", py_t
# Run the OP in C and time it
t0 = time.time()
[cFunction() for i in range(self.n_calls)]
t1 = time.time()
c_t = t1 - t0
print "c", c_t, "speed up python", py_t / c_t
# Run the Op in C with openmp
if theano.config.openmp:
t0 = time.time()
[cFunction2() for i in range(self.n_calls)]
t1 = time.time()
c_t2 = t1 - t0
print "omp c", c_t2, "speed up python", py_t / c_t2, "speed up c", c_t / c_t2
class TestImgActsSpeedF32(TestImgActsSpeed):
dtype = 'float32'
class TestFiltersActsSpeedF64(unittest.TestCase):
# Global test variables (may be extended to include more tests)
#Each item in ishape_list : (icount, icolors, irows, icols)
ishape_list = [(2, 1, 49, 49), (10, 1, 49, 49),
(10, 1, 98, 98), (10, 1, 98, 98),
(10, 1, 98, 98),
]
#Each item in fshapes_list = (fmodules, filters_per_module,
# fcolors, frows, fcols)
fshape_list = [(5, 32, 1, 11, 11), (5, 32, 1, 11, 11),
(9, 32, 1, 9, 9),
(9, 32, 1, 10, 10), (9, 32, 1, 11, 11),
]
ishape_list = [(64, 1, 98, 98)]
fshape_list = [(11, 32, 1, 11, 11)]
module_stride = 1
dtype = 'float64'
nbTests = len(ishape_list)
n_calls = 50
def setUp(self):
self.op = FilterActs(module_stride=self.module_stride,
openmp=False)
self.op_omp = FilterActs(module_stride=self.module_stride,
openmp=True)
self.s_filters_list = [theano.shared(rand(fshape, self.dtype))
for fshape in self.fshape_list]
self.s_images_list = [theano.shared(rand(ishape, self.dtype))
for ishape in self.ishape_list]
# Test Cases
def testMainOpSpeed(self):
def do_time(output, mode=theano.Mode(linker='c')):
f = theano.function([], output, mode=mode)
t0 = time.time()
[f() for i in range(self.n_calls)]
t1 = time.time()
return t1 - t0
for i in range(self.nbTests):
print "image shape", self.ishape_list[i]
print "filter shape", self.fshape_list[i]
# Generate theano functions to run the op in python and in C
output = self.op(self.s_images_list[i], self.s_filters_list[i])
output_omp = self.op_omp(self.s_images_list[i],
self.s_filters_list[i])
output_fcols = FilterActs(module_stride=self.module_stride,
openmp=False,
fcols=self.fshape_list[i][-1])(
self.s_images_list[i],
self.s_filters_list[i])
output_fcols_omp = FilterActs(module_stride=self.module_stride,
openmp=True,
fcols=self.fshape_list[i][-1])(
self.s_images_list[i],
self.s_filters_list[i])
output_frows_fcols = FilterActs(module_stride=self.module_stride,
openmp=False,
fcols=self.fshape_list[i][-1],
frows=self.fshape_list[i][-2])(
self.s_images_list[i],
self.s_filters_list[i])
output_frows_fcols_omp = FilterActs(module_stride=self.module_stride,
openmp=True,
fcols=self.fshape_list[i][-1],
frows=self.fshape_list[i][-2])(
self.s_images_list[i],
self.s_filters_list[i])
# Run the OP in python
py_t = do_time(output, mode=theano.Mode(linker='py'))
print "py", py_t
# Run the OP in C
c_t = do_time(output, mode=theano.Mode(linker='c|py'))
print "c|py", c_t, "speed up", py_t / c_t
# Run the OP in C with fcols
c_t_fcols = do_time(output_fcols)
print "c fcols", c_t_fcols, "speed up", py_t / c_t_fcols
# Run the OP in C with fcols, frows
c_t_frows_fcols = do_time(output_frows_fcols)
print "c frows_fcols", c_t_frows_fcols, "speed up", py_t / c_t_frows_fcols
# Run the Op in C with openmp
if theano.config.openmp:
c_omp_t = do_time(output_omp)
print "omp c", c_omp_t, "speed up python", py_t / c_omp_t, "speed up c", c_t / c_omp_t
c_omp_fcols_t = do_time(output_fcols_omp)
print "omp c fcols", c_omp_fcols_t, "speed up python", py_t / c_omp_fcols_t, "speed up c fcols", c_t_fcols / c_omp_fcols_t
c_omp_frows_fcols_t = do_time(output_frows_fcols_omp)
print "omp c fcols", c_omp_frows_fcols_t, "speed up python", py_t / c_omp_frows_fcols_t, "speed up c frows_fcols", c_t_frows_fcols / c_omp_frows_fcols_t
class TestFiltersActsSpeedF32(TestFiltersActsSpeedF64):
dtype = 'float32'
| {"/test_NCC.py": ["/CrossCorrelation.py"]} |
65,824 | luoheng/TCssrbm | refs/heads/master | /MSSIM.py | import numpy
class MSSIM(object):
def __init__(self, test_samples, samples, window_size, seed=98987):
self.test_samples = test_samples
self.samples = samples
self.window_size = window_size
self.rng = numpy.random.RandomState(seed)
self.single_test_sample = len(test_samples.shape) == 3
if self.single_test_sample:
# Only one test sample
n_samples,channels_samples,rows_samples,cols_samples = self.samples.shape
channels_test,rows_test,cols_test = self.test_samples.shape
# Ensure coherence between the shapes of the inputs
assert channels_test == channels_samples
assert rows_test == rows_samples
assert cols_test == cols_samples
else:
# One test sample by sample
n_samples,channels_samples,rows_samples,cols_samples = self.samples.shape
n_tests, channels_test,rows_test,cols_test = self.test_samples.shape
# Ensure coherence between the shapes of the inputs
assert channels_test == channels_samples
assert rows_test == rows_samples
assert cols_test == cols_samples
assert n_samples == n_tests
def MSSIM_old(self):
# Return the mean of the SSIM scores for all the samples
if self.single_test_sample:
sumSSIM = sum([self.SSIM(s, self.test_samples)
for s in self.samples])
return (1.0 / len(self.samples)) * sumSSIM
else:
sumSSIM = sum([self.SSIM(self.samples[i], self.test_samples[i])
for i in range(len(self.samples))])
return (1.0 / len(self.samples)) * sumSSIM
def MSSIM(self):
# Return the mean and the std_dev of the SSIM scores for all the samples
#import pdb
#pdb.set_trace()
SSIMs = None
if self.single_test_sample:
SSIMs = [self.MSSIM_one_sample(s, self.test_samples) for s in self.samples]
else:
SSIMs = [self.MSSIM_one_sample(self.samples[i], self.test_samples[i]) for i in range(len(self.samples))]
return (numpy.mean(SSIMs), numpy.std(SSIMs))
def MSSIM_one_sample(self, sample, test_sample):
results = numpy.zeros((sample.shape[-3], sample.shape[-2] + 1 - self.window_size, sample.shape[-1] + 1 - self.window_size))
for color in range(results.shape[0]):
for row in range(results.shape[1]):
for col in range(results.shape[2]):
results[color, row, col] = self.SSIM(sample[:,row:row+self.window_size, col:col+self.window_size],
test_sample[:,row:row+self.window_size, col:col+self.window_size])
return numpy.mean(results)
def SSIM(self, sample, test_sample):
meanSample = numpy.mean(sample)
meanTest = numpy.mean(test_sample)
stdSample = numpy.std(sample)
stdTest = numpy.std(test_sample)
covariance = numpy.cov(sample.flatten(), test_sample.flatten())[0,1]
covariance = numpy.nan_to_num(covariance)
C1 = (255 * 0.01) ** 2 # Constant to avoid instability when
# meanSample**2 + meanTest**2 approaches 0
C2 = (255 * 0.03) ** 2 # Constant to avoid instability when
# stdSample**2 + stdTest**2 approaches 0
numerator = (2 * meanSample * meanTest + C1) * (2 * covariance + C2)
denominator = (meanSample ** 2 + meanTest ** 2 + C1) * \
(stdSample ** 2 + stdTest ** 2 + C2)
return numerator / denominator
| {"/test_NCC.py": ["/CrossCorrelation.py"]} |
65,825 | luoheng/TCssrbm | refs/heads/master | /TCssrbm.py | """
This file extends the mu-ssRBM for tiled-convolutional training
"""
import cPickle, pickle
import numpy
numpy.seterr('warn') #SHOULD NOT BE IN LIBIMPORT
from PIL import Image
import theano
from theano import tensor
from theano.tensor import nnet,grad
from pylearn.io import image_tiling
from pylearn.algorithms.mcRBM import (
contrastive_cost, contrastive_grad)
import pylearn.gd.sgd
import sys
from unshared_conv_diagonally import FilterActs
from unshared_conv_diagonally import WeightActs
from unshared_conv_diagonally import ImgActs
from Brodatz import Brodatz_op
#import scipy.io
import os
_temp_data_path_ = '.'#'/Tmp/carriepl'
if 0:
print 'WARNING: using SLOW rng'
RandomStreams = tensor.shared_randomstreams.RandomStreams
else:
import theano.sandbox.rng_mrg
RandomStreams = theano.sandbox.rng_mrg.MRG_RandomStreams
floatX=theano.config.floatX
sharedX = lambda X, name : theano.shared(numpy.asarray(X, dtype=floatX),
name=name)
def Toncv(image,filters,module_stride=1):
op = FilterActs(module_stride)
return op(image,filters)
def Tdeconv(filters, hidacts, irows, icols, module_stride=1):
op = ImgActs(module_stride)
return op(filters, hidacts, irows, icols)
def unnatural_sgd_updates(params, grads, stepsizes, tracking_coef=0.1, epsilon=1):
grad_means = [theano.shared(numpy.zeros_like(p.get_value(borrow=True)))
for p in params]
grad_means_sqr = [theano.shared(numpy.ones_like(p.get_value(borrow=True)))
for p in params]
updates = dict()
for g, gm, gms, p, s in zip(
grads, grad_means, grad_means_sqr, params, stepsizes):
updates[gm] = tracking_coef * g + (1-tracking_coef) * gm
updates[gms] = tracking_coef * g*g + (1-tracking_coef) * gms
var_g = gms - gm**2
# natural grad doesn't want sqrt, but i found it worked worse
updates[p] = p - s * gm / tensor.sqrt(var_g+epsilon)
return updates
"""
def grad_updates(params, grads, stepsizes):
grad_means = [theano.shared(numpy.zeros_like(p.get_value(borrow=True)))
for p in params]
grad_means_sqr = [theano.shared(numpy.ones_like(p.get_value(borrow=True)))
for p in params]
updates = dict()
for g, p, s in zip(
grads, params, stepsizes):
updates[p] = p - s*g
return updates
"""
def safe_update(a, b):
for k,v in dict(b).iteritems():
if k in a:
raise KeyError(k)
a[k] = v
return a
def most_square_shape(N):
"""rectangle (height, width) with area N that is closest to sqaure
"""
for i in xrange(int(numpy.sqrt(N)),0, -1):
if 0 == N % i:
return (i, N/i)
def tile_conv_weights(w,flip=False, scale_each=False):
"""
Return something that can be rendered as an image to visualize the filters.
"""
#if w.shape[1] != 3:
# raise NotImplementedError('not rgb', w.shape)
if w.shape[2] != w.shape[3]:
raise NotImplementedError('not square', w.shape)
if w.shape[1] == 1:
wmin, wmax = w.min(), w.max()
if not scale_each:
w = numpy.asarray(255 * (w - wmin) / (wmax - wmin + 1e-6), dtype='uint8')
trows, tcols= most_square_shape(w.shape[0])
outrows = trows * w.shape[2] + trows-1
outcols = tcols * w.shape[3] + tcols-1
out = numpy.zeros((outrows, outcols), dtype='uint8')
#tr_stride= 1+w.shape[1]
for tr in range(trows):
for tc in range(tcols):
# this is supposed to flip the filters back into the image
# coordinates as well as put the channels in the right place, but I
# don't know if it really does that
tmp = w[tr*tcols+tc,
0,
::-1 if flip else 1,
::-1 if flip else 1]
if scale_each:
tmp = numpy.asarray(255*(tmp - tmp.min()) / (tmp.max() - tmp.min() + 1e-6),
dtype='uint8')
out[tr*(1+w.shape[2]):tr*(1+w.shape[2])+w.shape[2],
tc*(1+w.shape[3]):tc*(1+w.shape[3])+w.shape[3]] = tmp
return out
wmin, wmax = w.min(), w.max()
if not scale_each:
w = numpy.asarray(255 * (w - wmin) / (wmax - wmin + 1e-6), dtype='uint8')
trows, tcols= most_square_shape(w.shape[0])
outrows = trows * w.shape[2] + trows-1
outcols = tcols * w.shape[3] + tcols-1
out = numpy.zeros((outrows, outcols,3), dtype='uint8')
tr_stride= 1+w.shape[1]
for tr in range(trows):
for tc in range(tcols):
# this is supposed to flip the filters back into the image
# coordinates as well as put the channels in the right place, but I
# don't know if it really does that
tmp = w[tr*tcols+tc].transpose(1,2,0)[
::-1 if flip else 1,
::-1 if flip else 1]
if scale_each:
tmp = numpy.asarray(255*(tmp - tmp.min()) / (tmp.max() - tmp.min() + 1e-6),
dtype='uint8')
out[tr*(1+w.shape[2]):tr*(1+w.shape[2])+w.shape[2],
tc*(1+w.shape[3]):tc*(1+w.shape[3])+w.shape[3]] = tmp
return out
class RBM(object):
"""
Light-weight class that provides math related to inference in Spike & Slab RBM
Attributes:
- v_prec - the base conditional precisions of data units [shape (n_img_rows, n_img_cols,)]
- v_shape - the input image shape (ie. n_imgs, n_chnls, n_img_rows, n_img_cols)
- n_conv_hs - the number of spike and slab hidden units
- filters_hs_shape - the kernel filterbank shape for hs units
- filters_h_shape - the kernel filterbank shape for h units
- filters_hs - a tensor with shape (n_conv_hs,n_chnls,n_ker_rows, n_ker_cols)
- conv_bias_hs - a vector with shape (n_conv_hs, n_out_rows, n_out_cols)
- subsample_hs - how to space the receptive fields (dx,dy)
- n_global_hs - how many globally-connected spike and slab units
- weights_hs - global weights
- global_bias_hs -
- _params a list of the attributes that are shared vars
The technique of combining convolutional and global filters to account for border effects is
borrowed from (Alex Krizhevsky, TR?, October 2010).
"""
def __init__(self, **kwargs):
print 'init rbm'
self.__dict__.update(kwargs)
@classmethod
def alloc(cls,
conf,
image_shape, # input dimensionality
filters_hs_shape,
filters_irange,
v_prec,
v_prec_lower_limit, #should be parameter of the training algo
seed = 8923402
):
print 'alloc rbm'
rng = numpy.random.RandomState(seed)
self = cls()
n_images, n_channels, n_img_rows, n_img_cols = image_shape
n_filters_hs_modules, n_filters_hs_per_modules, fcolors, n_filters_hs_rows, n_filters_hs_cols = filters_hs_shape
assert fcolors == n_channels
self.v_shape = image_shape
print 'v_shape'
print self.v_shape
self.filters_hs_shape = filters_hs_shape
print 'self.filters_hs_shape'
print self.filters_hs_shape
self.out_conv_hs_shape = FilterActs.infer_shape_without_instance(self.v_shape,self.filters_hs_shape)
print 'self.out_conv_hs_shape'
print self.out_conv_hs_shape
#conv_bias_hs_shape = self.out_conv_hs_shape[1:]
conv_bias_hs_shape = (n_filters_hs_modules, n_filters_hs_per_modules)
self.conv_bias_hs_shape = conv_bias_hs_shape
print 'self.conv_bias_hs_shape'
print self.conv_bias_hs_shape
self.v_prec = sharedX(numpy.zeros((n_channels, n_img_rows, n_img_cols))+v_prec, 'var_v_prec')
self.v_prec_lower_limit = sharedX(v_prec_lower_limit, 'v_prec_lower_limit')
#a = self.v_prec.broadcastable
#b = self.v_prec_lower_limit.broadcastable
#print a,b
self.filters_hs = sharedX(rng.randn(*filters_hs_shape) * filters_irange , 'filters_hs')
#a = self.filters_hs.broadcastable
#print a
#conv_bias_ival = rng.rand(*conv_bias_hs_shape)*2-1
#conv_bias_ival *= conf['conv_bias_irange']
#conv_bias_ival += conf['conv_bias0']
#self.conv_bias_hs = sharedX(conv_bias_ival, name='conv_bias_hs')
self.conv_bias_hs = sharedX(numpy.zeros(self.conv_bias_hs_shape), name='conv_bias_hs')
conv_mu_ival = numpy.zeros(conv_bias_hs_shape,dtype=floatX) + conf['conv_mu0']
self.conv_mu = sharedX(conv_mu_ival, 'conv_mu')
if conf['alpha_logdomain']:
conv_alpha_ival = numpy.zeros(conv_bias_hs_shape,dtype=floatX) + numpy.log(conf['conv_alpha0'])
self.conv_alpha = sharedX(conv_alpha_ival,'conv_alpha')
else:
self.conv_alpha = sharedX(
numpy.zeros(conv_bias_hs_shape)+conf['conv_alpha0'],
'conv_alpha')
if conf['lambda_logdomain']:
self.conv_lambda = sharedX(
numpy.zeros(self.filters_hs_shape)
+ numpy.log(conf['lambda0']),
name='conv_lambda')
else:
self.conv_lambda = sharedX(
numpy.zeros(self.filters_hs_shape)
+ (conf['lambda0']),
name='conv_lambda')
negsample_mask = numpy.zeros((n_channels,n_img_rows,n_img_cols),dtype=floatX)
negsample_mask[:,n_filters_hs_rows:n_img_rows-n_filters_hs_rows+1,n_filters_hs_cols:n_img_cols-n_filters_hs_cols+1] = 1
self.negsample_mask = sharedX(negsample_mask,'negsample_mask')
self.conf = conf
self._params = [self.v_prec,
self.filters_hs,
self.conv_bias_hs,
self.conv_mu,
self.conv_alpha,
self.conv_lambda
]
return self
def get_conv_alpha(self):
if self.conf['alpha_logdomain']:
rval = tensor.exp(self.conv_alpha)
return rval
else:
return self.conv_alpha
def get_conv_lambda(self):
if self.conf["lambda_logdomain"]:
L = tensor.exp(self.conv_lambda)
else:
L = self.conv_lambda
return L
def conv_problem_term(self, v):
L = self.get_conv_lambda()
W = self.filters_hs
vLv = self.convdot(v*v, L)
return vLv
def conv_problem_term_T(self, h):
L = self.get_conv_lambda()
#W = self.filters_hs
#alpha = self.get_conv_alpha()
hL = self.convdot_T(L, h)
return hL
def convdot(self, image, filters):
return Toncv(image,filters)
def convdot_T(self, filters, hidacts):
n_images, n_channels, n_img_rows, n_img_cols = self.v_shape
return Tdeconv(filters, hidacts, n_img_rows, n_img_cols)
#####################
# spike-and-slab convolutional hidden units
def mean_convhs_h_given_v(self, v):
"""Return the mean of binary-valued hidden units h, given v
"""
alpha = self.get_conv_alpha()
W = self.filters_hs
vW = self.convdot(v, W)
vW_broadcastable = vW.dimshuffle(0,3,4,1,2)
#change 64 x 11 x 32 x 8 x 8 to 64 x 8 x 8 x 11 x 32 for broadcasting
pre_convhs_h_parts = self.conv_mu*vW_broadcastable + self.conv_bias_hs + 0.5*(vW_broadcastable**2)/alpha
rval = nnet.sigmoid(
tensor.add(
pre_convhs_h_parts.dimshuffle(0,3,4,1,2),
-0.5*self.conv_problem_term(v)))
return rval
def mean_var_convhs_s_given_v(self, v):
"""
Return mu (N,K,B) and sigma (N,K,K) for latent s variable.
For efficiency, this method assumes all h variables are 1.
"""
alpha = self.get_conv_alpha()
vW = self.convdot(v, self.filters_hs)
rval = self.conv_mu + (vW.dimshuffle(0,3,4,1,2))/alpha
return rval.dimshuffle(0,3,4,1,2), 1.0 / alpha
#####################
# visible units
def mean_var_v_given_h_s(self, convhs_h, convhs_s):
shF = self.convdot_T(self.filters_hs, convhs_h*convhs_s)
conv_hL = self.conv_problem_term_T(convhs_h)
contrib = shF
sigma_sq = 1.0 / (self.v_prec + conv_hL)
mu = contrib * sigma_sq
return mu, sigma_sq
def all_hidden_h_means_given_v(self, v):
mean_convhs_h = self.mean_convhs_h_given_v(v)
return mean_convhs_h
#####################
def gibbs_step_for_v(self, v, s_rng, return_locals=False, border_mask=True, sampling_for_v=True):
#positive phase
# spike variable means
mean_convhs_h = self.all_hidden_h_means_given_v(v)
#broadcastable_value = mean_convhs_h.broadcastable
#print broadcastable_value
# slab variable means
meanvar_convhs_s = self.mean_var_convhs_s_given_v(v)
#smean, svar = meanvar_convhs_s
#broadcastable_value = smean.broadcastable
#print broadcastable_value
#broadcastable_value = svar.broadcastable
#print broadcastable_value
# spike variable samples
def sample_h(hmean,shp):
return tensor.cast(s_rng.uniform(size=shp) < hmean, floatX)
#def sample_s(smeanvar, shp):
# smean, svar = smeanvar
# return s_rng.normal(size=shp)*tensor.sqrt(svar) + smean
sample_convhs_h = sample_h(mean_convhs_h, self.out_conv_hs_shape)
# slab variable samples
smean, svar = meanvar_convhs_s
# the shape of svar: n_filters_hs_modules, n_filters_hs_per_modules
random_normal = s_rng.normal(size=self.out_conv_hs_shape)
random_normal_bc = random_normal.dimshuffle(0,3,4,1,2)*tensor.sqrt(svar)
sample_convhs_s = random_normal_bc.dimshuffle(0,3,4,1,2) + smean
#negative phase
vv_mean, vv_var = self.mean_var_v_given_h_s(
sample_convhs_h, sample_convhs_s,
)
if sampling_for_v:
vv_sample = s_rng.normal(size=self.v_shape) * tensor.sqrt(vv_var) + vv_mean
else:
vv_sample = vv_mean
if border_mask:
vv_sample = theano.tensor.mul(vv_sample,self.negsample_mask)
#broadcastable_value = vv_mean.broadcastable
#print broadcastable_value
if return_locals:
return vv_sample, locals()
else:
return vv_sample
def free_energy_given_v(self, v):
# This is accurate up to a multiplicative constant
# because I dropped some terms involving 2pi
def pre_sigmoid(x):
assert x.owner and x.owner.op == nnet.sigmoid
return x.owner.inputs[0]
pre_convhs_h = pre_sigmoid(self.mean_convhs_h_given_v(v))
rval = tensor.add(
-tensor.sum(nnet.softplus(pre_convhs_h),axis=[1,2,3,4]), #the shape of pre_convhs_h: 64 x 11 x 32 x 8 x 8
0.5 * tensor.sum(self.v_prec * (v**2), axis=[1,2,3]), #shape: 64 x 1 x 98 x 98
)
assert rval.ndim==1
return rval
def cd_updates(self, pos_v, neg_v, stepsizes, other_cost=None):
grads = contrastive_grad(self.free_energy_given_v,
pos_v, neg_v,
wrt=self.params(),
other_cost=other_cost
)
assert len(stepsizes)==len(grads)
if self.conf['unnatural_grad']:
sgd_updates = unnatural_sgd_updates
else:
sgd_updates = pylearn.gd.sgd.sgd_updates
rval = dict(
sgd_updates(
self.params(),
grads,
stepsizes=stepsizes))
if 0:
#DEBUG STORE GRADS
grad_shared_vars = [sharedX(0*p.value.copy(),'') for p in self.params()]
self.grad_shared_vars = grad_shared_vars
rval.update(dict(zip(grad_shared_vars, grads)))
return rval
def params(self):
# return the list of *shared* learnable parameters
# that are, in your judgement, typically learned in this model
return list(self._params)
def save_weights_to_files(self, identifier):
# save 4 sets of weights:
pass
def save_weights_to_grey_files(self, identifier):
# save 4 sets of weights:
#filters_hs
def arrange_for_show(filters_hs,filters_hs_shape):
n_filters_hs_modules, n_filters_hs_per_modules, fcolors, n_filters_hs_rows, n_filters_hs_cols = filters_hs_shape
filters_fs_for_show = filters_hs.reshape(
(n_filters_hs_modules*n_filters_hs_per_modules,
fcolors,
n_filters_hs_rows,
n_filters_hs_cols))
fn = theano.function([],filters_fs_for_show)
rval = fn()
return rval
filters_fs_for_show = arrange_for_show(self.filters_hs, self.filters_hs_shape)
Image.fromarray(
tile_conv_weights(
filters_fs_for_show,flip=False), 'L').save(
'filters_hs_%s.png'%identifier)
if self.conf['lambda_logdomain']:
raise NotImplementedError()
else:
conv_lambda_for_show = arrange_for_show(self.conv_lambda, self.filters_hs_shape)
Image.fromarray(
tile_conv_weights(
conv_lambda_for_show,flip=False), 'L').save(
'conv_lambda_%s.png'%identifier)
def dump_to_file(self, filename):
try:
cPickle.dump(self, open(filename, 'wb'))
except cPickle.PicklingError:
pickle.dump(self, open(filename, 'wb'))
class Gibbs(object): # if there's a Sampler interface - this should support it
@classmethod
def alloc(cls, rbm, rng):
if not hasattr(rng, 'randn'):
rng = numpy.random.RandomState(rng)
self = cls()
seed=int(rng.randint(2**30))
self.rbm = rbm
self.particles = sharedX(
rng.randn(*rbm.v_shape),
name='particles')
self.s_rng = RandomStreams(seed)
return self
def HMC(rbm, batchsize, rng): # if there's a Sampler interface - this should support it
if not hasattr(rng, 'randn'):
rng = numpy.random.RandomState(rng)
seed=int(rng.randint(2**30))
particles = sharedX(
rng.randn(*rbm.v_shape),
name='particles')
return pylearn.sampling.hmc.HMC_sampler(
particles,
rbm.free_energy_given_v,
seed=seed)
class Trainer(object): # updates of this object implement training
@classmethod
def alloc(cls, rbm, visible_batch,
lrdict,
conf,
rng=234,
iteration_value=0,
):
batchsize = rbm.v_shape[0]
sampler = Gibbs.alloc(rbm, rng=rng)
print 'alloc trainer'
error = 0.0
return cls(
rbm=rbm,
batchsize=batchsize,
visible_batch=visible_batch,
sampler=sampler,
iteration=sharedX(iteration_value, 'iter'), #float32.....
learn_rates = [lrdict[p] for p in rbm.params()],
conf=conf,
annealing_coef=sharedX(1.0, 'annealing_coef'),
conv_h_means = sharedX(numpy.zeros(rbm.out_conv_hs_shape[1:])+0.5,'conv_h_means'),
cpnv_h = sharedX(numpy.zeros(rbm.out_conv_hs_shape), 'conv_h'),
recons_error = sharedX(error,'reconstruction_error'),
)
def __init__(self, **kwargs):
print 'init trainer'
self.__dict__.update(kwargs)
def updates(self):
print 'start trainer.updates'
conf = self.conf
ups = {}
add_updates = lambda b: safe_update(ups,b)
annealing_coef = 1.0 - self.iteration / float(conf['train_iters'])
ups[self.iteration] = self.iteration + 1 #
ups[self.annealing_coef] = annealing_coef
conv_h = self.rbm.all_hidden_h_means_given_v(
self.visible_batch)
new_conv_h_means = 0.1 * conv_h.mean(axis=0) + .9*self.conv_h_means
#new_conv_h_means = conv_h.mean(axis=0)
ups[self.conv_h_means] = new_conv_h_means
ups[self.cpnv_h] = conv_h
#ups[self.global_h_means] = new_global_h_means
#sparsity_cost = 0
#self.sparsity_cost = sparsity_cost
# SML updates PCD
add_updates(
self.rbm.cd_updates(
pos_v=self.visible_batch,
neg_v=self.sampler.particles,
stepsizes=[annealing_coef*lr for lr in self.learn_rates]))
if conf['chain_reset_prob']:
# advance the 'negative-phase' chain
nois_batch = self.sampler.s_rng.normal(size=self.rbm.v_shape)
resets = self.sampler.s_rng.uniform(size=(conf['batchsize'],))<conf['chain_reset_prob']
old_particles = tensor.switch(resets.dimshuffle(0,'x','x','x'),
nois_batch, # reset the chain
self.sampler.particles, #continue chain
)
#old_particles = tensor.switch(resets.dimshuffle(0,'x','x','x'),
# self.visible_batch, # reset the chain
# self.sampler.particles, #continue chain
# )
else:
old_particles = self.sampler.particles
if conf['increase_steps_sampling']:
steps_sampling = self.iteration.get_value() / 1000 + 1
else:
steps_sampling = self.conf['constant_steps_sampling']
#print steps_sampling
tmp_particles = old_particles
for step in xrange(int(steps_sampling)):
tmp_particles = self.rbm.gibbs_step_for_v(tmp_particles,\
self.sampler.s_rng,border_mask=conf['border_mask'],\
sampling_for_v=conf['sampling_for_v'])
new_particles = tmp_particles
#broadcastable_value = new_particles.broadcastable
#print broadcastable_value
#reconstructions= self.rbm.gibbs_step_for_v(self.visible_batch, self.sampler.s_rng)
#recons_error = tensor.sum((self.visible_batch-reconstructions)**2)
#recons_error = 0.0
#ups[self.recons_error] = recons_error
#return {self.particles: new_particles}
ups[self.sampler.particles] = tensor.clip(new_particles,
conf['particles_min'],
conf['particles_max'])
# make sure that the new v_precision doesn't top below its floor
new_v_prec = ups[self.rbm.v_prec]
ups[self.rbm.v_prec] = tensor.switch(
new_v_prec<self.rbm.v_prec_lower_limit,
self.rbm.v_prec_lower_limit,
new_v_prec)
"""
# make sure that the interior region of global weights matrix is properly masked
if self.conf['zero_out_interior_weights']:
ups[self.rbm.weights_hs] = self.rbm.weights_mask * ups[self.rbm.weights_hs]
"""
if self.conf['alpha_min'] < self.conf['alpha_max']:
if self.conf['alpha_logdomain']:
ups[self.rbm.conv_alpha] = tensor.clip(
ups[self.rbm.conv_alpha],
numpy.log(self.conf['alpha_min']).astype(floatX),
numpy.log(self.conf['alpha_max']).astype(floatX))
#ups[self.rbm.global_alpha] = tensor.clip(
# ups[self.rbm.global_alpha],
# numpy.log(self.conf['alpha_min']).astype(floatX),
# numpy.log(self.conf['alpha_max']).astype(floatX))
else:
ups[self.rbm.conv_alpha] = tensor.clip(
ups[self.rbm.conv_alpha],
self.conf['alpha_min'],
self.conf['alpha_max'])
#ups[self.rbm.global_alpha] = tensor.clip(
# ups[self.rbm.global_alpha],
# self.conf['alpha_min'],
# self.conf['alpha_max'])
if self.conf['lambda_min'] < self.conf['lambda_max']:
if self.conf['lambda_logdomain']:
ups[self.rbm.conv_lambda] = tensor.clip(ups[self.rbm.conv_lambda],
numpy.log(self.conf['lambda_min']).astype(floatX),
numpy.log(self.conf['lambda_max']).astype(floatX))
#ups[self.rbm.global_lambda] = tensor.clip(ups[self.rbm.global_lambda],
# numpy.log(self.conf['lambda_min']).astype(floatX),
# numpy.log(self.conf['lambda_max']).astype(floatX))
else:
ups[self.rbm.conv_lambda] = tensor.clip(ups[self.rbm.conv_lambda],
self.conf['lambda_min'],
self.conf['lambda_max'])
#ups[self.rbm.global_lambda] = tensor.clip(ups[self.rbm.global_lambda],
# self.conf['lambda_min'],
# self.conf['lambda_max'])
#ups[self.rbm.conv_bias_hs] = self.rbm.conv_bias_hs.get_value(borrow=True)+self.rbm.h_tiled_conv_mask
return ups
def save_weights_to_files(self, pattern='iter_%05i'):
#pattern = pattern%self.iteration.get_value()
# save particles
#Image.fromarray(tile_conv_weights(self.sampler.particles.get_value(borrow=True),
# flip=False),
# 'RGB').save('particles_%s.png'%pattern)
#self.rbm.save_weights_to_files(pattern)
pass
def save_weights_to_grey_files(self, pattern='iter_%05i'):
pattern = pattern%self.iteration.get_value()
# save particles
"""
particles_for_show = self.sampler.particles.dimshuffle(3,0,1,2)
fn = theano.function([],particles_for_show)
particles_for_show_value = fn()
Image.fromarray(tile_conv_weights(particles_for_show_value,
flip=False),'L').save('particles_%s.png'%pattern)
self.rbm.save_weights_to_grey_files(pattern)
"""
Image.fromarray(tile_conv_weights(self.sampler.particles.get_value(borrow=True),
flip=False),'L').save('particles_%s.png'%pattern)
self.rbm.save_weights_to_grey_files(pattern)
def print_status(self):
def print_minmax(msg, x):
assert numpy.all(numpy.isfinite(x))
print msg, x.min(), x.max()
print 'iter:', self.iteration.get_value()
print_minmax('filters_hs ', self.rbm.filters_hs.get_value(borrow=True))
print_minmax('conv_bias_hs', self.rbm.conv_bias_hs.get_value(borrow=True))
#print_minmax('weights_hs ', self.rbm.weights_hs.get_value(borrow=True))
#print_minmax('global_bias_hs', self.rbm.global_bias_hs.get_value(borrow=True))
print_minmax('conv_mu', self.rbm.conv_mu.get_value(borrow=True))
#print_minmax('global_mu', self.rbm.global_mu.get_value(borrow=True))
if self.conf['alpha_logdomain']:
print_minmax('conv_alpha',
numpy.exp(self.rbm.conv_alpha.get_value(borrow=True)))
#print_minmax('global_alpha',
# numpy.exp(self.rbm.global_alpha.get_value(borrow=True)))
else:
print_minmax('conv_alpha', self.rbm.conv_alpha.get_value(borrow=True))
#print_minmax('global_alpha', self.rbm.global_alpha.get_value(borrow=True))
if self.conf['lambda_logdomain']:
print_minmax('conv_lambda',
numpy.exp(self.rbm.conv_lambda.get_value(borrow=True)))
#print_minmax('global_lambda',
# numpy.exp(self.rbm.global_lambda.get_value(borrow=True)))
else:
print_minmax('conv_lambda', self.rbm.conv_lambda.get_value(borrow=True))
#print_minmax('global_lambda', self.rbm.global_lambda.get_value(borrow=True))
print_minmax('v_prec', self.rbm.v_prec.get_value(borrow=True))
print_minmax('particles', self.sampler.particles.get_value())
print_minmax('conv_h_means', self.conv_h_means.get_value())
print_minmax('conv_h', self.cpnv_h.get_value())
print (self.cpnv_h.get_value()).std()
#print self.conv_h_means.get_value()[0,0:11,0:11]
#print self.rbm.conv_bias_hs.get_value(borrow=True)[0,0,0:3,0:3]
#print self.rbm.h_tiled_conv_mask.get_value(borrow=True)[0,32,0:3,0:3]
#print_minmax('global_h_means', self.global_h_means.get_value())
print 'lr annealing coef:', self.annealing_coef.get_value()
#print 'reconstruction error:', self.recons_error.get_value()
def main_inpaint(filename, algo='Gibbs', rng=777888, scale_separately=False, sampling_for_v=False):
rbm = cPickle.load(open(filename))
sampler = Gibbs.alloc(rbm, rng)
batch_idx = tensor.iscalar()
batch_range = batch_idx * rbm.conf['batchsize'] + numpy.arange(rbm.conf['batchsize'])
n_examples = rbm.conf['batchsize'] #64
n_img_rows = 98
n_img_cols = 98
n_img_channels=1
batch_x = Brodatz_op(batch_range,
'../Brodatz/D6.gif', # download from http://www.ux.uis.no/~tranden/brodatz.html
patch_shape=(n_img_channels,
n_img_rows,
n_img_cols),
noise_concelling=0.,
seed=3322,
batchdata_size=n_examples
)
fn_getdata = theano.function([batch_idx],batch_x)
batchdata = fn_getdata(0)
scaled_batchdata = (batchdata - batchdata.min())/(batchdata.max() - batchdata.min() + 1e-6)
scaled_batchdata[:,:,11:88,11:88] = 0
batchdata[:,:,11:88,11:88] = 0
print 'the min of border: %f, the max of border: %f'%(batchdata.min(),batchdata.max())
shared_batchdata = sharedX(batchdata,'batchdata')
border_mask = numpy.zeros((n_examples,n_img_channels,n_img_rows,n_img_cols),dtype=floatX)
border_mask[:,:,11:88,11:88]=1
sampler.particles = shared_batchdata
new_particles = rbm.gibbs_step_for_v(sampler.particles, sampler.s_rng, sampling_for_v=sampling_for_v)
new_particles = tensor.mul(new_particles,border_mask)
new_particles = tensor.add(new_particles,batchdata)
fn = theano.function([], [],
updates={sampler.particles: new_particles})
particles = sampler.particles
for i in xrange(5000):
print i
if i % 100 == 0:
savename = '%s_inpaint_%04i.png'%(filename,i)
print 'saving'
temp = particles.get_value(borrow=True)
print 'the min of center: %f, the max of center: %f' \
%(temp[:,:,11:88,11:88].min(),temp[:,:,11:88,11:88].max())
if scale_separately:
scale_separately_savename = '%s_inpaint_scale_separately_%04i.png'%(filename,i)
blank_img = numpy.zeros((n_examples,n_img_channels,n_img_rows,n_img_cols),dtype=floatX)
tmp = temp[:,:,11:88,11:88]
tmp = (tmp - tmp.min()) / (tmp.max() - tmp.min() + 1e-6)
blank_img[:,:,11:88,11:88] = tmp
blank_img = blank_img + scaled_batchdata
Image.fromarray(
tile_conv_weights(
blank_img,
flip=False,scale_each=True),
'L').save(scale_separately_savename)
else:
Image.fromarray(
tile_conv_weights(
particles.get_value(borrow=True),
flip=False,scale_each=True),
'L').save(savename)
fn()
def main_sample(filename, algo='Gibbs', rng=777888, burn_in=5000, save_interval=5000, n_files=10, sampling_for_v=False):
rbm = cPickle.load(open(filename))
#rbm.v_shape = (2,1,2045,2045)
#rbm.out_conv_hs_shape = FilterActs.infer_shape_without_instance(rbm.v_shape,rbm.filters_hs_shape)
#rbm.v_prec = sharedX(numpy.zeros(rbm.v_shape[1:])+rbm.v_prec.get_value(borrow=True).mean(), 'var_v_prec')
if algo == 'Gibbs':
sampler = Gibbs.alloc(rbm, rng)
new_particles = rbm.gibbs_step_for_v(sampler.particles, sampler.s_rng,border_mask=True, sampling_for_v=sampling_for_v)
new_particles = tensor.clip(new_particles,
rbm.conf['particles_min'],
rbm.conf['particles_max'])
fn = theano.function([], [],
updates={sampler.particles: new_particles})
particles = sampler.particles
elif algo == 'HMC':
print "WARNING THIS PROBABLY DOESNT WORK"
# still need to figure out how to get the clipping into
# the iterations of mcmc
sampler = HMC(rbm, rbm.conf['batchsize'], rng)
ups = sampler.updates()
ups[sampler.positions] = tensor.clip(ups[sampler.positions],
rbm.conf['particles_min'],
rbm.conf['particles_max'])
fn = theano.function([], [], updates=ups)
particles = sampler.positions
for i in xrange(burn_in):
print i
#savename = '%s_Large_sample_burn_%04i.png'%(filename,i)
#tmp = particles.get_value(borrow=True)[0,0,11:363,11:363]
#w = numpy.asarray(255 * (tmp - tmp.min()) / (tmp.max() - tmp.min() + 1e-6), dtype='uint8')
#Image.fromarray(w,'L').save(savename)
savename = '%s_sample_burn_%04i.png'%(filename,i)
if i % 100 == 0:
print 'saving'
Image.fromarray(
tile_conv_weights(
particles.get_value(borrow=True),
flip=False,scale_each=True),
'L').save(savename)
fn()
for n in xrange(n_files):
for i in xrange(save_interval):
fn()
savename = '%s_sample_%04i.png'%(filename,n)
print 'saving', savename
Image.fromarray(
tile_conv_weights(
particles.get_value(borrow=True),
flip=False,scale_each=True),
'L').save(savename)
def main_print_status(filename, algo='Gibbs', rng=777888, burn_in=500, save_interval=500, n_files=1):
def print_minmax(msg, x):
assert numpy.all(numpy.isfinite(x))
print msg, x.min(), x.max()
rbm = cPickle.load(open(filename))
if algo == 'Gibbs':
sampler = Gibbs.alloc(rbm, rng)
new_particles = rbm.gibbs_step_for_v(sampler.particles, sampler.s_rng)
#new_particles = tensor.clip(new_particles,
# rbm.conf['particles_min'],
# rbm.conf['particles_max'])
fn = theano.function([], [],
updates={sampler.particles: new_particles})
particles = sampler.particles
elif algo == 'HMC':
print "WARNING THIS PROBABLY DOESNT WORK"
for i in xrange(burn_in):
fn()
print_minmax('particles', particles.get_value(borrow=True))
def main0(rval_doc):
if 'conf' not in rval_doc:
raise NotImplementedError()
conf = rval_doc['conf']
batchsize = conf['batchsize']
batch_idx = tensor.lscalar()
batch_range = batch_idx * conf['batchsize'] + numpy.arange(conf['batchsize'])
if conf['dataset']=='Brodatz':
n_examples = conf['batchsize'] #64
n_img_rows = 98
n_img_cols = 98
n_img_channels=1
batch_x = Brodatz_op(batch_range,
'../Brodatz/D6.gif', # download from http://www.ux.uis.no/~tranden/brodatz.html
patch_shape=(n_img_channels,
n_img_rows,
n_img_cols),
noise_concelling=0.,
seed=3322,
batchdata_size=n_examples,
rescale=1.0
)
else:
raise ValueError('dataset', conf['dataset'])
rbm = RBM.alloc(
conf,
image_shape=(
n_examples,
n_img_channels,
n_img_rows,
n_img_cols
),
filters_hs_shape=(
conf['filters_hs_size'],
conf['n_filters_hs'],
n_img_channels,
conf['filters_hs_size'],
conf['filters_hs_size']
), #fmodules(stride) x filters_per_modules x fcolors(channels) x frows x fcols
filters_irange=conf['filters_irange'],
v_prec=conf['v_prec_init'],
v_prec_lower_limit=conf['v_prec_lower_limit'],
)
rbm.save_weights_to_grey_files('iter_0000')
base_lr = conf['base_lr_per_example']/batchsize
conv_lr_coef = conf['conv_lr_coef']
trainer = Trainer.alloc(
rbm,
visible_batch=batch_x,
lrdict={
# higher learning rate ok with CD1
rbm.v_prec: sharedX(base_lr, 'prec_lr'),
rbm.filters_hs: sharedX(conv_lr_coef*base_lr, 'filters_hs_lr'),
rbm.conv_bias_hs: sharedX(base_lr, 'conv_bias_hs_lr'),
rbm.conv_mu: sharedX(base_lr, 'conv_mu_lr'),
rbm.conv_alpha: sharedX(base_lr, 'conv_alpha_lr'),
rbm.conv_lambda: sharedX(conv_lr_coef*base_lr, 'conv_lambda_lr'),
},
conf = conf,
)
print 'start building function'
training_updates = trainer.updates() #
train_fn = theano.function(inputs=[batch_idx],
outputs=[],
#mode='FAST_COMPILE',
#mode='DEBUG_MODE',
updates=training_updates
) #
print 'training...'
iter = 0
while trainer.annealing_coef.get_value()>=0: #
dummy = train_fn(iter) #
#trainer.print_status()
if iter % 1000 == 0:
rbm.dump_to_file(os.path.join(_temp_data_path_,'rbm_%06i.pkl'%iter))
if iter <= 1000 and not (iter % 100): #
trainer.print_status()
trainer.save_weights_to_grey_files()
elif not (iter % 1000):
trainer.print_status()
trainer.save_weights_to_grey_files()
iter += 1
def main_train():
print 'start main_train'
main0(dict(
conf=dict(
dataset='Brodatz',
chain_reset_prob=.02,#approx CD-50
unnatural_grad=False,
alpha_logdomain=True,
conv_alpha0=20.,
global_alpha0=10.,
alpha_min=1.,
alpha_max=100.,
lambda_min=0,
lambda_max=10,
lambda0=0.001,
lambda_logdomain=False,
conv_bias0=0.0,
conv_bias_irange=0.0,#conv_bias0 +- this
conv_mu0 = 1.0,
train_iters=300000,
base_lr_per_example=0.00001,
conv_lr_coef=1.0,
batchsize=64,
n_filters_hs=32,
v_prec_init=20., # this should increase with n_filters_hs?
v_prec_lower_limit = 1.,
filters_hs_size=11,
filters_irange=.01,
zero_out_interior_weights=False,
#sparsity_weight_conv=0,#numpy.float32(500),
#sparsity_weight_global=0.,
particles_min=-1000.,
particles_max=1000.,
#problem_term_vWWv_weight = 0.,
#problem_term_vIv_weight = 0.,
n_tiled_conv_offset_diagonally = 1,
constant_steps_sampling = 1,
increase_steps_sampling = True,
border_mask=True,
sampling_for_v=False,
)))
if __name__ == '__main__':
if sys.argv[1] == 'train':
sys.exit(main_train())
if sys.argv[1] == 'sampling':
sys.exit(main_sample(sys.argv[2]))
if sys.argv[1] == 'inpaint':
sys.exit(main_inpaint(sys.argv[2]))
if sys.argv[1] == 'print_status':
sys.exit(main_print_status(sys.argv[2]))
| {"/test_NCC.py": ["/CrossCorrelation.py"]} |
65,826 | luoheng/TCssrbm | refs/heads/master | /Brodatz.py | import theano
import numpy
from PIL import Image
from protocol_ import TensorFnDataset
floatX=theano.config.floatX
def Brodatz_op(s_idx,
filename, # a list of texture images
patch_shape=(1,98,98),
noise_concelling=0.0, # see it in the paper of mu-ssRBM
seed=3322,
batchdata_size=64, # the size of batchdata for each texture image
rescale=1.0, # see it in the paper, 'Multiple texture Boltzmann machines'
new_shapes=[[320,320],], # a list of new shapes
validation=False, # use part of training image as a test image
test_data=False # crop the patches from the test image
):
"""Return symbolic Brodatz_images[s_idx]
If s_idx is a scalar, the return value is a tensor3 of shape 1,98,98.
If s_idx is a vector of len N, the return value
is a tensor4 of shape N,1,98,98.
"""
assert len(filename)==len(new_shapes)
ob = Brodatz(filename,
patch_shape,
noise_concelling,
seed,
batchdata_size,
rescale,
new_shapes,
validation,
test_data)
fn = ob.extract_random_patches
op = TensorFnDataset(floatX,
bcast=(False, False, False),
fn=fn,
single_shape=patch_shape)
return op(s_idx%(batchdata_size*len(filename)))
class Brodatz(object):
def __init__(self, filename,
patch_shape, noise_concelling,
seed, batchdata_size, rescale,
new_shapes,
validation,
test_data):
self.filename = filename
self.patch_shape = patch_shape
self.ncc = noise_concelling
self.rng = numpy.random.RandomState(seed)
self.batchdata_size = batchdata_size
self.training_img = []
self.validation = validation
self.test_data = test_data
self.test_img = []
f_index = 0
patch_channels, patch_rows, patch_cols = patch_shape
for f_name in self.filename:
assert len(new_shapes[f_index]) == 2
image = Image.open(f_name)
image_rows, image_cols = image.size
image = image.resize((int(new_shapes[f_index][0]),int(new_shapes[f_index][1])),Image.BICUBIC)
img_array = numpy.asarray(image, dtype=floatX)
if validation:
# The model is in validation mode, the training set is going to be the 2/3 of
# the top half of the image and the testing set is going to be the remaining third
train_rows = int(0.5*new_shapes[f_index][0])
train_cols = int(new_shapes[f_index][1]*2/3)
training_img = numpy.zeros((train_rows,train_cols),dtype=floatX)
test_rows = train_rows
test_cols = int(new_shapes[f_index][1]*1/3)
test_img = numpy.zeros((test_rows,test_cols))
training_img = img_array[0:train_rows,0:train_cols]
test_img = img_array[0:train_rows,train_cols:]
else:
# The model is in test mode, the training set is going to be the whole
# top half of the image and the testing set is going to be the bottom half
train_rows = int(0.5*new_shapes[f_index][0])
train_cols = int(new_shapes[f_index][1])
training_img = numpy.zeros((train_rows,train_cols),dtype=floatX)
test_img = numpy.zeros((train_rows,train_cols),dtype=floatX)
training_img = img_array[0:train_rows,:]
test_img = img_array[train_rows:,:]
assert patch_rows < train_rows
assert patch_cols < train_cols
print "BrodatzOp : using a validation set : " + str(validation)
print "BrodatzOp : the training image size is : " + str(training_img.shape)
print "BrodatzOp : the test image size is : " + str(test_img.shape)
assert patch_channels == 1
self.training_img += [(training_img - training_img.mean())/(rescale*training_img.std()+self.ncc)]
self.test_img += [(test_img - test_img.mean())/(rescale*test_img.std()+self.ncc)]
#before the training process, we normalize both of training images and test images
print 'the std of the training data %s is:%f' %(f_name, self.training_img[f_index].std())
print 'the std of the test data %s is:%f' %(f_name, self.test_img[f_index].std())
f_index += 1
#@staticmethod
def extract_random_patches(self):
N = self.batchdata_size
_, patch_rows, patch_cols = self.patch_shape
rval = numpy.zeros((N*len(self.training_img),1,patch_rows,patch_cols), dtype=self.training_img[0].dtype)
#print rval.shape
assert len(self.training_img) == len(self.test_img)
for img_index in xrange(len(self.training_img)):
if self.test_data:
img = self.test_img[img_index]
else:
img = self.training_img[img_index]
img_rows, img_cols = img.shape
offsets_row = self.rng.randint(img_rows-patch_rows+1, size=N)
offsets_col = self.rng.randint(img_cols-patch_cols+1, size=N)
for n, (r,c) in enumerate(zip(offsets_row, offsets_col)):
rval[img_index*N+n,0,:,:] = img[r:r+patch_rows,c:c+patch_cols]
return rval
| {"/test_NCC.py": ["/CrossCorrelation.py"]} |
65,827 | luoheng/TCssrbm | refs/heads/master | /TCssDBN.py | """
This file implements the binary convolutional ssRBM as a second layer in DBN
"""
import cPickle, pickle
import numpy
numpy.seterr('warn') #SHOULD NOT BE IN LIBIMPORT
from PIL import Image
import theano
from theano import tensor
from theano.tensor import nnet,grad
from theano.tensor.nnet.conv import conv2d
from pylearn.io import image_tiling
from pylearn.algorithms.mcRBM import (
contrastive_cost, contrastive_grad)
import pylearn.gd.sgd
from TCssrbm_FPCD import RBM,Gibbs
import sys
#from unshared_conv_diagonally import FilterActs
#from unshared_conv_diagonally import WeightActs
#from unshared_conv_diagonally import ImgActs
from Brodatz import Brodatz_op
from Brodatz import Brodatz
from CrossCorrelation import CrossCorrelation
#import scipy.io
import os
_temp_data_path_ = '.'#'/Tmp/luoheng'
if 1:
print 'WARNING: using SLOW rng'
RandomStreams = tensor.shared_randomstreams.RandomStreams
else:
import theano.sandbox.rng_mrg
RandomStreams = theano.sandbox.rng_mrg.MRG_RandomStreams
floatX=theano.config.floatX
sharedX = lambda X, name : theano.shared(numpy.asarray(X, dtype=floatX),
name=name)
def conv2d_transpose(x, filters, in_img_shape, filters_shape, subsample):
"""
Supposing a linear transformation M implementing convolution by dot(img, M),
Return the equivalent of dot(x, M.T).
This is also implemented by a convolution, but with lots of dimshuffles and flipping and
stuff.
"""
dummy_v = tensor.tensor4()
z_hs = conv2d(dummy_v, filters,
image_shape=in_img_shape,
filter_shape=filters_shape,
subsample=subsample)
rval, _ = z_hs.owner.op.grad((dummy_v, filters), (x,))
return rval
def unnatural_sgd_updates(params, grads, stepsizes, tracking_coef=0.1, epsilon=1):
grad_means = [theano.shared(numpy.zeros_like(p.get_value(borrow=True)))
for p in params]
grad_means_sqr = [theano.shared(numpy.ones_like(p.get_value(borrow=True)))
for p in params]
updates = dict()
for g, gm, gms, p, s in zip(
grads, grad_means, grad_means_sqr, params, stepsizes):
updates[gm] = tracking_coef * g + (1-tracking_coef) * gm
updates[gms] = tracking_coef * g*g + (1-tracking_coef) * gms
var_g = gms - gm**2
# natural grad doesn't want sqrt, but i found it worked worse
updates[p] = p - s * gm / tensor.sqrt(var_g+epsilon)
return updates
def safe_update(a, b):
for k,v in dict(b).iteritems():
if k in a:
raise KeyError(k)
a[k] = v
return a
def most_square_shape(N):
"""rectangle (height, width) with area N that is closest to sqaure
"""
for i in xrange(int(numpy.sqrt(N)),0, -1):
if 0 == N % i:
return (i, N/i)
def tile_conv_weights(w,flip=False, scale_each=True):
"""
Return something that can be rendered as an image to visualize the filters.
"""
#if w.shape[1] != 3:
# raise NotImplementedError('not rgb', w.shape)
if w.shape[2] != w.shape[3]:
raise NotImplementedError('not square', w.shape)
if w.shape[1] == 1:
wmin, wmax = w.min(), w.max()
if not scale_each:
w = numpy.asarray(255 * (w - wmin) / (wmax - wmin + 1e-6), dtype='uint8')
trows, tcols= most_square_shape(w.shape[0])
outrows = trows * w.shape[2] + trows-1
outcols = tcols * w.shape[3] + tcols-1
out = numpy.zeros((outrows, outcols), dtype='uint8')
#tr_stride= 1+w.shape[1]
for tr in range(trows):
for tc in range(tcols):
# this is supposed to flip the filters back into the image
# coordinates as well as put the channels in the right place, but I
# don't know if it really does that
tmp = w[tr*tcols+tc,
0,
::-1 if flip else 1,
::-1 if flip else 1]
if scale_each:
tmp = numpy.asarray(255*(tmp - tmp.min()) / (tmp.max() - tmp.min() + 1e-6),
dtype='uint8')
out[tr*(1+w.shape[2]):tr*(1+w.shape[2])+w.shape[2],
tc*(1+w.shape[3]):tc*(1+w.shape[3])+w.shape[3]] = tmp
return out
wmin, wmax = w.min(), w.max()
if not scale_each:
w = numpy.asarray(255 * (w - wmin) / (wmax - wmin + 1e-6), dtype='uint8')
trows, tcols= most_square_shape(w.shape[0])
outrows = trows * w.shape[2] + trows-1
outcols = tcols * w.shape[3] + tcols-1
out = numpy.zeros((outrows, outcols,3), dtype='uint8')
tr_stride= 1+w.shape[1]
for tr in range(trows):
for tc in range(tcols):
# this is supposed to flip the filters back into the image
# coordinates as well as put the channels in the right place, but I
# don't know if it really does that
tmp = w[tr*tcols+tc].transpose(1,2,0)[
::-1 if flip else 1,
::-1 if flip else 1]
if scale_each:
tmp = numpy.asarray(255*(tmp - tmp.min()) / (tmp.max() - tmp.min() + 1e-6),
dtype='uint8')
out[tr*(1+w.shape[2]):tr*(1+w.shape[2])+w.shape[2],
tc*(1+w.shape[3]):tc*(1+w.shape[3])+w.shape[3]] = tmp
return out
class bRBM(object):
"""
Light-weight class that provides math related to inference in binary Spike & Slab RBM
Attributes:
- _params a list of the attributes that are shared vars
"""
def __init__(self, **kwargs):
print 'init binary rbm'
self.__dict__.update(kwargs)
@classmethod
def alloc(cls,
l2_conf,
hs_shape, # input dimensionality
filters_shape,
filters_irange,
rbm,
seed = 8923402,
):
print 'alloc rbm'
rng = numpy.random.RandomState(seed)
self = cls()
#print hs_shape
#print filters_shape
n_batchsize, n_maps_, n_hs_rows, n_hs_cols = hs_shape
n_filters, n_maps, n_filters_rows, n_filters_cols = filters_shape
assert n_maps_ == n_maps
self.hs_shape = hs_shape
print 'hs_shape'
print self.hs_shape
self.filters_shape = filters_shape
print 'self.filters_shape'
print self.filters_shape
self.out_conv_v_shape = (n_batchsize, n_filters, n_hs_rows-n_filters_rows+1, n_hs_cols-n_filters_cols+1)
print 'self.out_conv_v_shape'
print self.out_conv_v_shape
#start to define the parameters
#biases for v and h
conv_v_bias_shape = self.out_conv_v_shape[1:]
self.conv_v_bias_shape = conv_v_bias_shape
self.conv_v_bias = sharedX(numpy.zeros(self.conv_v_bias_shape), name='conv_v_bias')
self.conv_v_bias_fast = sharedX(numpy.zeros(self.conv_v_bias_shape), name='conv_v_bias_fast')
print 'self.conv_v_bias_shape'
print self.conv_v_bias_shape
h_bias_shape = self.hs_shape[1:]
self.h_bias_shape = h_bias_shape
def conver_hs_bias(a,old_shp=rbm.conv_bias_hs_shape,new_shp=self.h_bias_shape):
f_modules,n_filters = old_shp
n_maps, n_hs_rows, n_hs_cols = new_shp
assert f_modules*n_filters == n_maps
b = a.reshape(f_modules*n_filters)
rval = numpy.zeros(new_shp)
for filters_index in xrange(f_modules*n_filters):
rval[filters_index,:,:]= b[filters_index]
return rval
h_bias_ival = conver_hs_bias(rbm.conv_bias_hs.get_value())
self.h_bias = sharedX(h_bias_ival, 'h_bias')
#self.h_bias = sharedX(numpy.zeros(self.h_bias_shape), 'h_bias')
self.h_bias_fast = sharedX(numpy.zeros(self.h_bias_shape), 'h_bias_fast')
print 'self.h_bias_shape'
print self.h_bias_shape
#filters
self.filters = sharedX(rng.randn(*self.filters_shape) * filters_irange , 'filters_hs')
self.filters_fast = sharedX(numpy.zeros(filters_shape), 'filters_fast')
#mu
mu_shape = self.hs_shape[1:]
self.mu_shape = mu_shape
#mu_ival = numpy.zeros(mu_shape,dtype=floatX) + l2_conf['mu0']
mu_ival = conver_hs_bias(rbm.conv_mu.get_value())
self.mu = sharedX(mu_ival, name='mu')
self.mu_fast = sharedX(numpy.zeros(mu_shape,dtype=floatX), name='mu_fast')
print 'mu_shape'
print self.mu_shape
if l2_conf['alpha_logdomain']:
#alpha_ival = numpy.zeros(self.mu_shape,dtype=floatX) + numpy.log(l2_conf['alpha0'])
alpha_ival = conver_hs_bias(rbm.conv_alpha.get_value())
self.alpha = sharedX(alpha_ival,'alpha')
alpha_ival_fast = numpy.zeros(self.mu_shape,dtype=floatX)
self.alpha_fast = sharedX(alpha_ival_fast, name='alpha_fast')
else:
alpha_ival = conver_hs_bias(rbm.conv_alpha.get_value())
self.alpha = sharedX(
alpha_ival,
'alpha')
self.alpha_fast = sharedX(
numpy.zeros(self.mu_shape), name='alpha_fast')
self.l2_conf = l2_conf
self._params = [self.filters,
self.conv_v_bias,
self.h_bias,
self.mu,
self.alpha
]
self._params_fast = [self.filters_fast,
self.conv_v_bias_fast,
self.h_bias_fast,
self.mu_fast,
self.alpha_fast
]
return self
def get_filters(self,With_fast):
if With_fast:
return self.filters+self.filters_fast
else:
return self.filters
def get_alpha(self,With_fast):
if With_fast:
if self.l2_conf['alpha_logdomain']:
rval = tensor.exp(self.alpha+self.alpha_fast)
return rval
else:
return self.alpha+self.alpha_fast
else:
if self.l2_conf['alpha_logdomain']:
rval = tensor.exp(self.alpha)
return rval
else:
return self.alpha
def get_conv_v_bias(self,With_fast):
if With_fast:
return self.conv_v_bias+self.conv_v_bias_fast
else:
return self.conv_v_bias
def get_h_bias(self,With_fast):
if With_fast:
return self.h_bias+self.h_bias_fast
else:
return self.h_bias
def get_mu(self,With_fast):
if With_fast:
return self.mu+self.mu_fast
else:
return self.mu
def convdot(self,hs,filters):
return conv2d(hs,filters,
image_shape=self.hs_shape,
filter_shape=self.filters_shape,
subsample=(1,1))
def convdot_T(self, v, filters):
return conv2d_transpose(v, filters,
self.hs_shape,
self.filters_shape,
(1,1))
#####################
# binary spike-and-slab convolutional visible units
def mean_conv_v_given_s_h(self, s, h, With_fast):
"""Return the mean of binary-valued visible units v, given h and s
"""
W = self.get_filters(With_fast)
conv_v_bias = self.get_conv_v_bias(With_fast)
shW = self.convdot(s*h, W)
rval = nnet.sigmoid(
tensor.add(shW, conv_v_bias))
return rval
#####################
# binary spike-and-slab convolutional spike units (h given v)
def mean_h_given_v(self, v, With_fast):
alpha = self.get_alpha(With_fast)
mu = self.get_mu(With_fast)
W = self.get_filters(With_fast)
h_bias = self.get_h_bias(With_fast)
vW = self.convdot_T(v, W)
alpha_vW_mu = vW/alpha + mu
rval = nnet.sigmoid(tensor.add(0.5*alpha*(alpha_vW_mu**2),h_bias,-0.5*alpha*(mu**2)))
return rval
#####################
# binary spike-and-slab convolutional slab units (s given v and h)
def mean_var_s_given_v_h(self, v, h, With_fast):
"""For efficiency, this method assumes all h variables are 1.
"""
alpha = self.get_alpha(With_fast)
mu = self.get_mu(With_fast)
W = self.get_filters(With_fast)
vW = self.convdot_T(v, W)
rval = ((vW/alpha)+mu)*h
return rval, 1.0 / alpha
#####################
def gibbs_step_for_s_h(self, s, h, s_rng, return_locals=False, sampling_for_s=True, With_fast=True):
#positive phase
# visible variable means
mean_conv_v = self.mean_conv_v_given_s_h(s, h, With_fast)
#visible samples
sample_conv_v = tensor.cast(s_rng.uniform(size=self.out_conv_v_shape) < mean_conv_v, floatX)
#negative phase
# spike variable means
mean_h = self.mean_h_given_v(sample_conv_v, With_fast)
# spike variable samples
sample_h = tensor.cast(s_rng.uniform(size=self.hs_shape) < mean_h, floatX)
# slab variable means
meanvar_s = self.mean_var_s_given_v_h(sample_conv_v,sample_h,With_fast)
# slab variable samples
mean_s, var_s = meanvar_s
if sampling_for_s:
random_normal = s_rng.normal(size=self.hs_shape)*tensor.sqrt(var_s)
sample_s = random_normal + mean_s
else:
sample_s = mean_s
if return_locals:
return sample_s, sample_h, locals()
else:
return sample_s, sample_h
def free_energy_given_s_h(self, s, h, With_fast=False):
alpha = self.get_alpha(With_fast)
mu = self.get_mu(With_fast)
W = self.get_filters(With_fast)
h_bias = self.get_h_bias(With_fast)
conv_v_bias = self.get_conv_v_bias(With_fast)
out_softplus = 0.5*alpha*(s**2) - alpha*mu*s*h + 0.5*alpha*(mu**2)*h - h_bias*h
rval = tensor.sum(out_softplus,axis=[1,2,3]) - tensor.sum(nnet.softplus(self.convdot(s*h, W)+conv_v_bias),axis=[1,2,3])
assert rval.ndim==1
return rval
def cd_updates(self, pos_s, pos_h, neg_s, neg_h, stepsizes, other_cost=None):
cost=(self.free_energy_given_s_h(pos_s, pos_h, With_fast=False) \
- self.free_energy_given_s_h(neg_s, neg_h,With_fast=False)).sum()
if other_cost:
cost = cost + other_cost
grads = theano.tensor.grad(cost,
wrt=self.params(),
consider_constant=[pos_s]+[pos_h]+[neg_s]+[neg_h])
#print len(stepsizes),len(grads+grads)
assert len(stepsizes)==len(grads+grads)
if self.l2_conf['unnatural_grad']:
sgd_updates = unnatural_sgd_updates
else:
sgd_updates = pylearn.gd.sgd.sgd_updates
rval = dict(
sgd_updates(
self.params()+self.params_fast(),
grads+grads,
stepsizes=stepsizes))
return rval
def params(self):
# return the list of *shared* learnable parameters
# that are, in your judgement, typically learned in this model
return list(self._params)
def params_fast(self):
# return the list of *shared* learnable parameters
# that are, in your judgement, typically learned in this model
return list(self._params_fast)
def save_weights_to_files(self, identifier):
# save 4 sets of weights:
pass
def save_weights_to_grey_files(self, identifier):
# save 4 sets of weights:
#filters_hs
pass
"""
def arrange_for_show(filters_hs,filters_hs_shape):
n_filters_hs_modules, n_filters_hs_per_modules, fcolors, n_filters_hs_rows, n_filters_hs_cols = filters_hs_shape
filters_fs_for_show = filters_hs.reshape(
(n_filters_hs_modules*n_filters_hs_per_modules,
fcolors,
n_filters_hs_rows,
n_filters_hs_cols))
fn = theano.function([],filters_fs_for_show)
rval = fn()
return rval
filters_fs_for_show = arrange_for_show(self.filters_hs, self.filters_hs_shape)
Image.fromarray(
tile_conv_weights(
filters_fs_for_show,flip=False), 'L').save(
'filters_hs_%s.png'%identifier)
if self.conf['lambda_logdomain']:
raise NotImplementedError()
else:
conv_lambda_for_show = arrange_for_show(self.conv_lambda, self.filters_hs_shape)
Image.fromarray(
tile_conv_weights(
conv_lambda_for_show,flip=False), 'L').save(
'conv_lambda_%s.png'%identifier)
"""
def dump_to_file(self, filename):
try:
cPickle.dump(self, open(filename, 'wb'))
except cPickle.PicklingError:
pickle.dump(self, open(filename, 'wb'))
class l2_Gibbs(object): # if there's a Sampler interface - this should support it
@classmethod
def alloc(cls, brbm, rng):
if not hasattr(rng, 'randn'):
rng = numpy.random.RandomState(rng)
self = cls()
seed=int(rng.randint(2**30))
self.brbm = brbm
self.s_particles = sharedX(
rng.randn(*brbm.hs_shape),
name='s_particles')
self.h_particles = sharedX(
rng.randint(2,size=brbm.hs_shape),
name='h_particles')
#self.particles = sharedX(
# numpy.zeros(rbm.v_shape),
# name='particles')
self.s_rng = RandomStreams(seed)
return self
class l2_Gibbs_for_genrating(object): # if there's a Sampler interface - this should support it
@classmethod
def alloc(cls, brbm, rng):
if not hasattr(rng, 'randn'):
rng = numpy.random.RandomState(rng)
self = cls()
seed=int(rng.randint(2**30))
self.brbm = brbm
self.v_particles = sharedX(
rng.randint(2,brbm.out_conv_v_shape),
name='v_particles')
#self.particles = sharedX(
# numpy.zeros(rbm.v_shape),
# name='particles')
self.s_rng = RandomStreams(seed)
return self
class Trainer(object): # updates of this object implement training
@classmethod
def alloc(cls,
brbm,
s_batch,
h_batch,
lrdict,
conf,
rng=234,
iteration_value=0,
):
batchsize = brbm.hs_shape[0]
sampler = l2_Gibbs.alloc(brbm, rng=rng)
print 'alloc trainer'
error = 0.0
return cls(
brbm=brbm,
batchsize=batchsize,
s_batch=s_batch,
h_batch=h_batch,
sampler=sampler,
iteration=sharedX(iteration_value, 'iter'), #float32.....
learn_rates = [lrdict[p] for p in brbm.params()],
learn_rates_fast = [lrdict[p_fast] for p_fast in brbm.params_fast()],
conf=conf,
annealing_coef=sharedX(1.0, 'annealing_coef'),
conv_v_means = sharedX(numpy.zeros(brbm.out_conv_v_shape[1:])+0.5,'conv_v_means'),
conv_v = sharedX(numpy.zeros(brbm.out_conv_v_shape), 'conv_v'),
recons_error = sharedX(error,'reconstruction_error'),
)
def __init__(self, **kwargs):
print 'init trainer'
self.__dict__.update(kwargs)
def updates(self):
print 'start trainer.updates'
conf = self.conf
ups = {}
add_updates = lambda b: safe_update(ups,b)
annealing_coef = 1.0 - self.iteration / float(conf['train_iters'])
ups[self.iteration] = self.iteration + 1 #
ups[self.annealing_coef] = annealing_coef
conv_v = self.brbm.mean_conv_v_given_s_h(
self.s_batch, self.h_batch, With_fast=False)
new_conv_v_means = 0.1 * conv_v.mean(axis=0) + .9*self.conv_v_means
ups[self.conv_v_means] = new_conv_v_means
ups[self.conv_v] = conv_v
#sparsity_cost = 0
#self.sparsity_cost = sparsity_cost
# SML updates PCD
add_updates(
self.brbm.cd_updates(
pos_s=self.s_batch,
pos_h=self.h_batch,
neg_s=self.sampler.s_particles,
neg_h=self.sampler.h_particles,
stepsizes=[annealing_coef*lr for lr in self.learn_rates]+[lr_fast for lr_fast in self.learn_rates_fast]))
if conf['increase_steps_sampling']:
steps_sampling = self.iteration.get_value() / 1000 + conf['constant_steps_sampling']
else:
steps_sampling = conf['constant_steps_sampling']
"""
if conf['chain_reset_prob']:
# advance the 'negative-phase' chain
nois_batch = self.sampler.s_rng.normal(size=self.rbm.v_shape)
#steps_sampling = steps_sampling + conf['chain_reset_burn_in']
resets = self.sampler.s_rng.uniform()<conf['chain_reset_prob']
old_particles = tensor.switch(resets.dimshuffle('x','x','x','x'),
nois_batch, # reset the chain
self.sampler.particles, #continue chain
)
#old_particles = tensor.switch(resets.dimshuffle(0,'x','x','x'),
# self.visible_batch, # reset the chain
# self.sampler.particles, #continue chain
# )
else:
old_particles = self.sampler.particles
"""
#print steps_sampling
s_tmp_particles = self.sampler.s_particles
h_tmp_particles = self.sampler.h_particles
for step in xrange(int(steps_sampling)):
tmp_particles = self.brbm.gibbs_step_for_s_h(s_tmp_particles,
h_tmp_particles, self.sampler.s_rng,
sampling_for_s=conf['sampling_for_s'])
#print tmp_particles
s_tmp_particles, h_tmp_particles = tmp_particles
new_s_particles = s_tmp_particles
new_h_particles = h_tmp_particles
recons_error = 0.0
ups[self.recons_error] = recons_error
ups[self.sampler.s_particles] = new_s_particles
ups[self.sampler.h_particles] = new_h_particles
if conf['alpha_min'] < conf['alpha_max']:
if conf['alpha_logdomain']:
ups[self.brbm.alpha] = tensor.clip(
ups[self.brbm.alpha],
numpy.log(conf['alpha_min']).astype(floatX),
numpy.log(conf['alpha_max']).astype(floatX))
else:
ups[self.brbm.alpha] = tensor.clip(
ups[self.brbm.alpha],
conf['alpha_min'],
conf['alpha_max'])
weight_decay = numpy.asarray(conf['penalty_for_fast_parameters'], dtype=floatX)
for p_fast in self.brbm.params_fast():
new_p_fast = ups[p_fast]
new_p_fast = new_p_fast - weight_decay*p_fast
ups[p_fast] = new_p_fast
if conf['alpha_min'] < conf['alpha_max']:
if conf['alpha_logdomain']:
ups[self.brbm.alpha_fast] = tensor.clip(
ups[self.brbm.alpha_fast],
numpy.log(conf['alpha_min']).astype(floatX),
numpy.log(conf['alpha_max']).astype(floatX))
else:
ups[self.brbm.alpha_fast] = tensor.clip(
ups[self.brbm.alpha_fast],
conf['alpha_min'],
conf['alpha_max'])
return ups
def save_weights_to_files(self, pattern='iter_%05i'):
#pattern = pattern%self.iteration.get_value()
# save particles
#Image.fromarray(tile_conv_weights(self.sampler.particles.get_value(borrow=True),
# flip=False),
# 'RGB').save('particles_%s.png'%pattern)
#self.rbm.save_weights_to_files(pattern)
pass
def save_weights_to_grey_files(self, pattern='iter_%05i'):
pattern = pattern%self.iteration.get_value()
# save particles
"""
particles_for_show = self.sampler.particles.dimshuffle(3,0,1,2)
fn = theano.function([],particles_for_show)
particles_for_show_value = fn()
Image.fromarray(tile_conv_weights(particles_for_show_value,
flip=False),'L').save('particles_%s.png'%pattern)
self.rbm.save_weights_to_grey_files(pattern)
"""
pass
"""
Image.fromarray(tile_conv_weights(self.sampler.particles.get_value(borrow=True),
flip=False),'L').save('particles_%s.png'%pattern)
self.rbm.save_weights_to_grey_files(pattern)
"""
def print_status(self):
def print_minmax(msg, x):
assert numpy.all(numpy.isfinite(x))
print msg, x.min(), x.max()
print 'iter:', self.iteration.get_value()
print_minmax('filters', self.brbm.filters.get_value(borrow=True))
print_minmax('filters_fast', self.brbm.filters_fast.get_value(borrow=True))
print_minmax('h_bias', self.brbm.h_bias.get_value(borrow=True))
print_minmax('h_bias_fast', self.brbm.h_bias_fast.get_value(borrow=True))
print_minmax('conv_v_bias', self.brbm.conv_v_bias.get_value(borrow=True))
print_minmax('conv_v_bias_fast', self.brbm.conv_v_bias_fast.get_value(borrow=True))
print_minmax('mu', self.brbm.mu.get_value(borrow=True))
print_minmax('mu_fast', self.brbm.mu_fast.get_value(borrow=True))
if self.conf['alpha_logdomain']:
print_minmax('alpha',
numpy.exp(self.brbm.alpha.get_value(borrow=True)))
print_minmax('alpha_fast',
numpy.exp(self.brbm.alpha_fast.get_value(borrow=True)))
else:
print_minmax('alpha', self.brbm.alpha.get_value(borrow=True))
print_minmax('alpha_fast', self.brbm.alpha_fast.get_value(borrow=True))
print_minmax('s_particles', self.sampler.s_particles.get_value())
print_minmax('h_particles', self.sampler.h_particles.get_value())
print_minmax('conv_v_means', self.conv_v_means.get_value())
print_minmax('conv_v', self.conv_v.get_value())
print (self.conv_v.get_value()).std()
#print self.conv_h_means.get_value()[0,0:11,0:11]
#print self.rbm.conv_bias_hs.get_value(borrow=True)[0,0,0:3,0:3]
#print self.rbm.h_tiled_conv_mask.get_value(borrow=True)[0,32,0:3,0:3]
#print_minmax('global_h_means', self.global_h_means.get_value())
print 'lr annealing coef:', self.annealing_coef.get_value()
#print 'reconstruction error:', self.recons_error.get_value()
def main_sample(layer1_filename, layer2_filename, algo='Gibbs', rng=777888, burn_in=10001, save_interval=5000, n_files=10, sampling_for_v=True):
rbm = cPickle.load(open(layer1_filename))
brbm = cPickle.load(open(layer2_filename))
sampler = l2_Gibbs.alloc(brbm, rng)
tmp_particles = brbm.gibbs_step_for_s_h(sampler.s_particles,
sampler.h_particles, sampler.s_rng,
sampling_for_s=brbm.l2_conf['sampling_for_s'])
s_tmp_particles, h_tmp_particles = tmp_particles
n_batchsize, n_maps, n_hs_rows, n_hs_cols = brbm.hs_shape
icount, fmodules, filters_per_module, hrows, hcols = rbm.out_conv_hs_shape
assert n_maps==fmodules*filters_per_module
s_particles_5d = s_tmp_particles.reshape((icount, fmodules, filters_per_module, hrows, hcols))
h_particles_5d = h_tmp_particles.reshape((icount, fmodules, filters_per_module, hrows, hcols))
mean_var_samples = rbm.mean_var_v_given_h_s(s_particles_5d, h_particles_5d, True)
fn = theano.function([], mean_var_samples,
updates={sampler.s_particles: s_tmp_particles,
sampler.h_particles: h_tmp_particles})
for i in xrange(burn_in):
print i
mean_var = fn()
mean_samples, var_samples = mean_var
if i % 100 == 0 and i!=0:
print 'saving'
savename = '%s_DBNsample_burn_%04i.png'%(layer1_filename+layer2_filename,i)
Image.fromarray(
tile_conv_weights(
mean_samples[:,:,11:88,11:88],
flip=False,scale_each=True),
'L').save(savename)
"""
B_texture = Brodatz('../../../Brodatz/D6.gif', patch_shape=(1,98,98),
noise_concelling=0.0, seed=3322 ,batchdata_size=1, rescale=1.0, rescale_size=2)
shp = B_texture.test_img.shape
img = numpy.zeros((1,)+shp)
temp_img = numpy.asarray(B_texture.test_img, dtype='uint8')
img[0,] = temp_img
Image.fromarray(temp_img,'L').save('test_img.png')
for i in xrange(burn_in):
if i% 100 ==0:
print i
#savename = '%s_Large_sample_burn_%04i.png'%(filename,i)
#tmp = particles.get_value(borrow=True)[0,0,11:363,11:363]
#w = numpy.asarray(255 * (tmp - tmp.min()) / (tmp.max() - tmp.min() + 1e-6), dtype='uint8')
#Image.fromarray(w,'L').save(savename)
savename = '%s_sample_burn_%04i.png'%(filename,i)
if i % 1000 == 0 and i!=0:
print 'saving'
Image.fromarray(
tile_conv_weights(
particles.get_value(borrow=True)[:,:,11:110,11:110],
flip=False,scale_each=True),
'L').save(savename)
samples = particles.get_value(borrow=True)[:,:,11:110,11:110]
for samples_index in xrange(n_samples):
temp_samples = samples[samples_index,]
#temp_samples = numpy.asarray(255 * (temp_samples - temp_samples.min()) / \
# (temp_samples.max() - temp_samples.min() + 1e-6), dtype='uint8')
samples[samples_index,]= temp_samples
CC = CrossCorrelation(img,samples,
window_size=19, n_patches_of_samples=1)
aaa = CC.TSS()
print aaa.mean(),aaa.std()
fn()
"""
def main0(rval_doc):
l2_conf = rval_doc['l2_conf']
rbm = cPickle.load(open(l2_conf['rbm_pkl']))
sampler = Gibbs.alloc(rbm, rng=33345)
batchsize, n_img_channels, \
n_img_rows, n_img_cols = rbm.v_shape
batch_idx = tensor.iscalar()
batch_range = batch_idx*batchsize + numpy.arange(batchsize)
batch_x = Brodatz_op(batch_range,
l2_conf['dataset'], # download from http://www.ux.uis.no/~tranden/brodatz.html
patch_shape=rbm.v_shape[1:],
noise_concelling=0.,
seed=3322,
batchdata_size=rbm.v_shape[0],
rescale=1.0,
rescale_size=[rbm.conf['data_rescale'],]
)
brbm = bRBM.alloc(
l2_conf,
hs_shape=(
rbm.out_conv_hs_shape[0],
rbm.out_conv_hs_shape[1]*rbm.out_conv_hs_shape[2],
rbm.out_conv_hs_shape[3],
rbm.out_conv_hs_shape[4]
),
filters_shape=(
l2_conf['n_filters'],
rbm.out_conv_hs_shape[1]*rbm.out_conv_hs_shape[2],
l2_conf['filters_size'],
l2_conf['filters_size']
), #fmodules(stride) x filters_per_modules x fcolors(channels) x frows x fcols
filters_irange=l2_conf['filters_irange'],
rbm=rbm,
)
brbm.save_weights_to_grey_files('layer2_iter_0000')
base_lr = l2_conf['base_lr_per_example']/batchsize
conv_lr_coef = l2_conf['conv_lr_coef']
h_mean = rbm.mean_convhs_h_given_v(batch_x, With_fast=False)
s_mean_var = rbm.mean_var_convhs_s_given_v(batch_x, With_fast=False)
s_mean, s_var = s_mean_var
batchsize, fmodules, filters_per_module, hrows, hcols = rbm.out_conv_hs_shape
if l2_conf['fast_weights']:
trainer = Trainer.alloc(
brbm,
s_batch=s_mean.reshape((batchsize, fmodules*filters_per_module, hrows, hcols)),
h_batch=h_mean.reshape((batchsize, fmodules*filters_per_module, hrows, hcols)),
lrdict={
brbm.filters: sharedX(conv_lr_coef*base_lr, 'filters_lr'),
brbm.conv_v_bias: sharedX(base_lr, 'conv_v_bias_lr'),
brbm.h_bias: sharedX(base_lr, 'h_bias_lr'),
brbm.mu: sharedX(base_lr, 'mu_lr'),
brbm.alpha: sharedX(base_lr, 'alpha_lr'),
brbm.filters_fast: sharedX(conv_lr_coef*base_lr, 'filters_fast_lr'),
brbm.conv_v_bias_fast: sharedX(base_lr, 'conv_v_bias_fast_lr'),
brbm.h_bias_fast: sharedX(base_lr, 'h_bias_fast_lr'),
brbm.mu_fast: sharedX(base_lr, 'conv_mu_fast_lr'),
brbm.alpha_fast: sharedX(base_lr, 'conv_alpha_fast_lr')
},
conf = l2_conf,
)
else:
trainer = Trainer.alloc(
brbm,
s_batch=s_mean.reshape((batchsize, fmodules*filters_per_module, hrows, hcols)),
h_batch=h_mean.reshape((batchsize, fmodules*filters_per_module, hrows, hcols)),
lrdict={
brbm.filters: sharedX(conv_lr_coef*base_lr, 'filters_lr'),
brbm.conv_v_bias: sharedX(base_lr, 'conv_v_bias_lr'),
brbm.h_bias: sharedX(base_lr, 'h_bias_lr'),
brbm.mu: sharedX(base_lr, 'mu_lr'),
brbm.alpha: sharedX(base_lr, 'alpha_lr'),
brbm.filters_fast: sharedX(0.0, 'filters_fast_lr'),
brbm.conv_v_bias_fast: sharedX(0.0, 'conv_v_bias_fast_lr'),
brbm.h_bias_fast: sharedX(0.0, 'h_bias_fast_lr'),
brbm.mu_fast: sharedX(0.0, 'conv_mu_fast_lr'),
brbm.alpha_fast: sharedX(0.0, 'conv_alpha_fast_lr')
},
conf = l2_conf,
)
print 'start building function'
training_updates = trainer.updates() #
train_fn = theano.function(inputs=[batch_idx],
outputs=[],
#mode='FAST_COMPILE',
#mode='DEBUG_MODE',
updates=training_updates
) #
print 'training the second layer...'
iter = 0
while trainer.annealing_coef.get_value()>=0: #
dummy = train_fn(iter) #
if iter % 10 == 0:
trainer.print_status()
if iter % 1000 == 0:
brbm.dump_to_file(os.path.join(_temp_data_path_,'brbm_%06i.pkl'%iter))
if iter <= 1000 and not (iter % 100): #
trainer.print_status()
trainer.save_weights_to_grey_files()
elif not (iter % 1000):
trainer.print_status()
trainer.save_weights_to_grey_files()
iter += 1
def main_train():
print 'start main_train'
main0(dict(
l2_conf=dict(
dataset='../../Brodatz/D6.gif',
rbm_pkl='./rbm_040000.pkl',
#chain_reset_prob=0.0,#reset for approximately every 1000 iterations #we need scan for the burn in loop
#chain_reset_iterations=100
#chain_reset_burn_in=0,
unnatural_grad=False,
alpha_logdomain=False,
alpha0=10.,
alpha_min=1.,
alpha_max=1000.,
mu0 = 1.0,
train_iters=40000,
base_lr_per_example=0.00001,
conv_lr_coef=1.0,
n_filters=64,
filters_size=2,
filters_irange=.001,
#sparsity_weight_conv=0,#numpy.float32(500),
#sparsity_weight_global=0.,
particles_min=-1000.,
particles_max=1000.,
constant_steps_sampling = 1,
increase_steps_sampling = False,
sampling_for_s=True,
penalty_for_fast_parameters = 0.1,
fast_weights = False
)))
if __name__ == '__main__':
if sys.argv[1] == 'train':
sys.exit(main_train())
if sys.argv[1] == 'sampling':
sys.exit(main_sample(sys.argv[2],sys.argv[3]))
| {"/test_NCC.py": ["/CrossCorrelation.py"]} |
65,828 | luoheng/TCssrbm | refs/heads/master | /protocol_.py | """Convenience base classes to help with writing Dataset ops
To randomly generate data, we use
try:
x = self.x_
except:
x = self.fn(*self.fn_args)
"""
__docformat__ = "restructuredtext_en"
import numpy
import theano
class Dataset(theano.Op):
"""
The basic dataset interface is an expression that maps an integer to a dataset element.
There is also a minibatch option, in which the expression maps an array of integers to a
list or array of dataset elements.
"""
def __init__(self, single_type, batch_type):
self.single_type = single_type
self.batch_type = batch_type
def make_node(self, idx):
_idx = theano.tensor.as_tensor_variable(idx)
if not _idx.dtype.startswith('int'):
raise TypeError()
if _idx.ndim == 0: # one example at a time
otype = self.single_type
elif _idx.ndim == 1: #many examples at a time
otype = self.batch_type
else:
raise TypeError(idx)
return theano.Apply(self, [_idx], [otype()])
def __eq__(self, other):
return type(self) == type(other) \
and self.single_type == other.single_type \
and self.batch_type == other.batch_type
def __hash__(self):
return hash(type(self)) ^ hash(self.single_type) ^ hash(self.batch_type)
def __str__(self):
return "%s{%s,%s}" % (self.__class__.__name__, self.single_type, self.batch_type)
def grad(self, inputs, g_outputs):
return [None for i in inputs]
class TensorDataset(Dataset):
"""A convenient base class for Datasets whose elements all have the same TensorType.
"""
def __init__(self, dtype, single_broadcastable, single_shape=None, batch_size=None):
single_broadcastable = tuple(single_broadcastable)
self.single_shape = single_shape
self.batch_size = batch_size
single_type = theano.tensor.Tensor(
broadcastable=single_broadcastable,
dtype=dtype)
batch_type = theano.tensor.Tensor(
broadcastable=(False,)+single_type.broadcastable,
dtype=dtype)
super(TensorDataset, self).__init__(single_type, batch_type)
def __eq__(self, other):
return (super(TensorDataset, self).__eq__(other)
and self.single_shape == other.single_shape
and self.batch_size == other.batch_size)
def __hash__(self):
return (super(TensorDataset, self).__hash__()
^ hash(self.single_shape)
^ hash(self.batch_size))
class TensorFnDataset(TensorDataset):
"""A good base class for TensorDatasets that are backed by indexed objects.
E.g. numpy ndarrays and memmaps.
This Op looks up the dataset by a function call, rather than by storing it
as a member variable. This is done to make the graph serializable without
having to save the dataset itself, which is typically large.
This Op is picklable if (and only if) the function that accesses the dataset
can be serialized.
"""
def __init__(self, dtype, bcast, fn, single_shape=None, batch_size=None):
"""
:type fn: callable or (callable, args) tuple [MUST BE PICKLABLE!]
:param fn: function that returns the dataset as a ndarray-like object.
:type bcast: tuple of bool
:param bcast: the broadcastable flag for the return value if this op is
indexed by a scalar (the one example case) A (False,) will be
pre-pended to this pattern when the Op is indexed by a vector.
"""
super(TensorFnDataset, self).__init__(dtype, bcast, single_shape, batch_size)
try:
self.fn, self.fn_args = fn
except:
self.fn, self.fn_args = fn, ()
def __getstate__(self):
rval = dict(self.__dict__)
if 'x_' in rval:
del rval['x_']
return rval
def __eq__(self, other):
return super(TensorFnDataset, self).__eq__(other) and self.fn == other.fn \
and self.fn_args == other.fn_args
def __hash__(self):
return (super(TensorFnDataset, self).__hash__()
^ hash(self.fn)
^ hash(self.fn_args))
def __str__(self):
try:
return "%s{%s,%s}" % (self.__class__.__name__, self.fn.__name__, self.fn_args)
except:
return "%s{%s}" % (self.__class__.__name__, self.fn, self.fn_args)
def perform(self, node, (idx,), (z,)):
try:
x = self.x_
except:
x = self.fn(*self.fn_args)
if idx.ndim == 0:
z[0] = numpy.asarray(x[int(idx)]) # asarray is important for memmaps
else:
z[0] = numpy.asarray(x[idx]) # asarray is important for memmaps
| {"/test_NCC.py": ["/CrossCorrelation.py"]} |
65,834 | guzhijun369/test3 | refs/heads/master | /utomarket/release_ad.py | from utomarket.skip_util import *
from utomarket.util import *
from selenium.webdriver.common.keys import Keys
from .personal_center_util import select_mode
# browser.find_element_by_xpath("//span[contains(text(),'获取验证码')]/..")
def release_ad_o(browser, country='中国', currency='人民币', transaction_type='固定价格', floating_ratio='99',
trading_price='20000',
payment_method='支付宝', transaction_volume='2', min_volume='100', max_volume='10,000',
payment_limit='10', trading_term=''):
explicit_wait(browser, 'VOEL', ["//span[contains(text(),'广告规则')]", 'xpath'])
time.sleep(2)
country_btn = browser.find_element_by_xpath("//div[contains(text(),'请选择所在地')]/..")
currency_btn = browser.find_element_by_xpath("//div[contains(text(),'请选择交易币种')]/..")
transaction_type_button = browser.find_element_by_xpath("//div[contains(text(),'固定价格')]/..")
trading_price_input = browser.find_element_by_xpath("//input[@placeholder='CNY/BTC']")
payment_method_input = browser.find_element_by_xpath("//div[contains(text(),'请选择交易方式')]/..")
transaction_volume_input = browser.find_element_by_id('max_count')
min_volume_input = browser.find_element_by_id('min_volume')
max_volume_input = browser.find_element_by_id('max_volume')
payment_limit_input = browser.find_element_by_id('payment_limit')
trading_term_input = browser.find_element_by_id('trading_term')
release_btn = browser.find_element_by_xpath("//span[contains(text(),'确认发布')]/..")
country_btn.click()
select_mode(browser, country)
currency_btn.click()
select_mode(browser, currency)
transaction_type_button.click()
select_mode(browser, transaction_type)
if transaction_type == "固定价格":
trading_price_input.send_keys(trading_price)
else:
floating_ratio_btn = browser.find_element_by_id('trading_price_ratio')
floating_ratio_btn.click()
for _ in range(1, 4):
floating_ratio_btn.send_keys(Keys.BACK_SPACE)
floating_ratio_btn.send_keys(floating_ratio)
payment_method_input.click()
select_mode(browser, payment_method)
transaction_volume_input.send_keys(transaction_volume)
min_volume_input.send_keys(min_volume)
max_volume_input.send_keys('1' + max_volume)
payment_limit_input.send_keys(payment_limit)
trading_term_input.send_keys(trading_term)
release_btn.click()
popup_o(browser, "操作成功")
url = get_current_url(browser)
url_true = 'https://dev.utomarket.com:9094/#/ad/my'
list_transaction_type = browser.find_elements_by_class_name('ant-table-row-level-0')[-1]
ad_btn = list_transaction_type.find_elements_by_tag_name("td")
price_range_td = ad_btn[5]
ad_type_td = ad_btn[2]
# span_text = ad_type_td.find_elements_by_tag_name("span")[0].text
transaction_type_text = ad_type_td.find_elements_by_tag_name("span")[1].text
price_range_text = price_range_td.find_element_by_tag_name("span").text
aa = '%s - %s CNY' % (min_volume, max_volume)
assert url == url_true
assert price_range_text == aa
assert transaction_type_text == '(%s)' % transaction_type
def remove_ad_o(browser):
explicit_wait(browser, 'VOEL', ["//span[contains(text(),'广告类型')]", 'xpath'])
remove_btns = browser.find_elements_by_class_name('text-green')
for item in remove_btns:
item.click()
popup_o("操作成功")
time.sleep(2)
def delete_ad_o(browser):
delete_btn = browser.find_elements_by_xpath("//span[contains(text(),'删除')]/..")
for item in delete_btn:
item.click()
confirm_btn = browser.find_elements_by_xpath("//span[text()='确认']/..")
confirm_btn.click()
popup_o("操作成功")
time.sleep(2)
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,835 | guzhijun369/test3 | refs/heads/master | /tests/test_release_ad.py | from utomarket.utomarket import Utomarket
from utomarket.util import explicit_wait, get_current_url
from utomarket.settings import Settings
import time
def test_release_buy_ad(): # 发布购买广告,浮动价格
ins = Utomarket('test129', '3201')
ins.login().ad_btn('购买').release_ad(transaction_type='浮动价格')
ins.browser.close()
def test_release_shell_ad(): # 发布出售广告,固定价格
ins = Utomarket('test129', '3201')
ins.login().ad_btn('出售').release_ad(transaction_type='固定价格', payment_method='微信支付',
transaction_volume='1.8887', min_volume='100',
max_volume='20,000', payment_limit='11', trading_term
='这是自动化的出售广告的交易条款')
ins.browser.close()
# def test_delete_ad(): # 清空账号广告
# ins = Utomarket('test129', '3201')
# ins.login().enter_menu('广告管理').remove_ad().delete_ad() | {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,836 | guzhijun369/test3 | refs/heads/master | /tests/test_user_info.py | from utomarket.util import explicit_wait
from utomarket.utomarket import Utomarket
import time
from utomarket.util import is_exist_element, navigator_to, get_current_url
def test_change_mail(): # 修改邮箱
ins = Utomarket('uitest7', '3201')
ins.login().my_center('个人中心').change_mail('284468321@qq.com', '3201')
ins.browser.close()
def test_band_telephone(): # 绑定手机号
ins = Utomarket('uitest7', '3201')
ins.login().my_center('个人中心').band_telephone('13028857899', '3201', '中国')
ins.browser.close()
def test_upload_avatar(): # 上传头像
ins = Utomarket('uitest7', '3201')
ins.login().my_center('个人中心').upload_avatar()
ins.browser.close()
def test_change_telephone(): # 修改手机号
ins = Utomarket('uitest7', '3201')
ins.login().my_center('个人中心').change_telephone('13028718489', '3201', '中国')
ins.browser.close()
def test_band_google(): # 绑定谷歌验证码
ins = Utomarket('uitest7', '3201')
ins.login().my_center('个人中心').band_google('3201').stop_google('3201')
ins.browser.close()
def test_change_pw(): # 修改密码
ins = Utomarket('uitest7', '3201')
ins.login().my_center('个人中心').change_pw('3201', '3201', 'q5310543', '邮箱').change_pw('3201', '3201', 'q5310543',
'手机')
ins.browser.close()
def test_auth(): # C1、C2认证
ins = Utomarket('uitest7', '3201')
ins.login().my_center('个人中心').auth_c1('谷志军', '431081199103136091').auth_c2().my_center("个人中心")
grade = ins.browser.find_element_by_xpath("//span[contains(text(),'认证等级')]/..")
assert grade.text == '认证等级: C2'
ins.browser.close()
def test_add_alipay(): # 添加支付方式并删除
ins = Utomarket('uitest7', '3201')
ins.login().my_center('个人中心').payment_alipay('支付宝', '谷志军', '13028812388', 'fesdffds.jpg'). \
payment_alipay('PayTm', '这是paytm', '13027788555812388', 'fesdffds.jpg'). \
payment_alipay('微信支付', '这是微信支付', '13027788812388', 'fesdffds.jpg'). \
payment_western_union("西联汇款", "谷志军", "这是西联付款详细信息大理石科技发达"
"时间里凯迪拉克撒娇大厦就说的是肯定撒爱撒"
"娇的萨克拉萨大家快来啊圣诞快乐就打算离开"). \
payment_bank("银行卡", "谷志军", "兴业银行高新园支行", "55467843454546545")
# add_btn = ins.browser.find_element_by_xpath("//span[contains(text(),'添加新的支付方式')]/..")
status = is_exist_element(ins.browser, "xpath", "//span[contains(text(),'添加新的支付方式')]/..")
assert not status
for _ in range(1, 6):
time.sleep(2)
ins.delete_payment()
ins.browser.close()
def test_others_information_nologin(): # 未登录状态下查看他人用户信息
ins = Utomarket('uitest7', '3201')
url = 'https://dev.utomarket.com:9094'
navigator_to(ins.browser, url)
ins.others_information()
ins.browser.close()
def test_others_information(): # 已登录状态下查看他人用户信息
ins = Utomarket('uitest7', '3201')
ins.login().others_information()
url = get_current_url(ins.browser)
url_true = 'https://dev.utomarket.com:9094/#/personage'
assert url_true in url
ins.browser.close()
def test_band_google_login(): # 绑定谷歌后登录
ins = Utomarket('uitest7', '3201')
ins.login().my_center('个人中心').band_google('3201').logout('退出登录').login_two().my_center('个人中心').stop_google('3201')
ins.browser.close() | {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,837 | guzhijun369/test3 | refs/heads/master | /utomarket/login_util.py | from selenium.webdriver.common.action_chains import ActionChains
# from utomarket.personal_center_util import band_google_o
from utomarket.util import *
from utomarket.settings import Settings
def login_user(browser, username, password, logger):
logger.debug('开始登陆,用户名:{},密码:{}'.format(username, password))
navigator_to(browser, Settings.login_url)
btn_login_xpath = '//*[@id="root"]/div/div/div[2]/div[1]/div[2]/div/form/div[3]/div/div/span/button'
input_username = browser.find_element_by_id('account')
input_password = browser.find_element_by_id('password')
# input_captcha = browser.find_element_by_id('captcha')
btn_login = browser.find_element_by_xpath(btn_login_xpath)
explicit_wait(browser, "VOEL", [btn_login_xpath, "xpath"], logger)
input_username.send_keys(username)
input_password.send_keys(password)
time.sleep(2)
btn_login.click()
# (ActionChains(browser).
# move_to_element(input_username).
# click().
# send_keys(username).
# move_to_element(input_password).
# click().
# send_keys(password).
# move_to_element(btn_login).
# click().
# perform())
def get_track(distance):
track = []
current = 0
mid = distance * 3 / 4
t = 0.2
v = 0
while current < distance:
if current < mid:
a = 2
else:
a = -3
v0 = v
v = v0 + a * t
move = v0 * t + 1 / 2 * a * t * t
current += move
track.append(round(move))
return track
def sliding_verification_o(browser):
explicit_wait(browser, 'VOEL', ["tcaptcha_iframe", 'id'])
iframe = browser.find_element_by_id("tcaptcha_iframe")
browser.switch_to.frame(iframe)
time.sleep(3)
explicit_wait(browser, 'VOEL', ["tcaptcha_drag_thumb", 'id'])
distance = 185
offset = 6
times = 0
slide_btn = browser.find_element_by_id('tcaptcha_drag_thumb')
while True:
action = ActionChains(browser) # 实例化一个action对象
action.click_and_hold(slide_btn).perform() # perform()用来执行ActionChains中存储的行为
action.reset_actions()
track = get_track(distance)[6:] # 生成运动轨迹的列表
print(sum(track))
for i in track:
action.move_by_offset(xoffset=i, yoffset=0).perform()
action.reset_actions()
time.sleep(0.5)
ActionChains(browser).release().perform()
time.sleep(3)
try:
alert = browser.find_element_by_id('tcaptcha_note').text
except Exception as e:
print('get alert error: %s' % e)
alert=''
if alert:
print('滑块位移需要调整: %s' % alert)
distance += offset
times += 1
time.sleep(5)
else:
print('滑块验证通过')
browser.switch_to_default_content() # 验证成功后跳回最外层页面
break
def login_user_two(browser, username, password, logger):
logger.debug('开始登陆,用户名:{},密码:{}'.format(username, password))
btn_login_xpath = '//*[@id="root"]/div/div[1]/div[2]/div/form/div[4]/div/div/span/button'
input_username = browser.find_element_by_id('account')
input_password = browser.find_element_by_id('password')
input_captcha = browser.find_element_by_id('captcha')
btn_login = browser.find_element_by_xpath(btn_login_xpath)
explicit_wait(browser, "VOEL", [btn_login_xpath, "xpath"], logger)
input_username.send_keys(username)
input_password.send_keys(password)
input_captcha.send_keys('3201')
btn_login.click()
explicit_wait(browser, "VOEL", ["//span[contains(text(),'确定')]", "xpath"])
time.sleep(2)
google_code_input = browser.find_element_by_id("code")
print(google_code_input, '---------------------')
submit_btn = browser.find_element_by_xpath("//span[contains(text(),'确定')]//..")
google_code_input.send_keys('3201')
submit_btn.click()
time.sleep(3)
# def google_login(browser):
# explicit_wait(browser, "VOEL", ['ant-modal-header', "xpath"])
# google_input = browser.find_element_by_xpath('//input[@placeholder="谷歌验证码"]')
# submit_btn = browser.find_element_by_xpath("//span[contains(text(),'确定')]//..")
# google_input.send_keys('3201')
# submit_btn.click()
# explicit_wait(browser, 'TC', 'P2P交易 - 乌托市场(TEST)')
# assert get_page_title(browser) == 'P2P交易 - 乌托市场(TEST)'
# time.sleep(5)
#
#
#
# def register_user(email, username, password, country, invite_code=None):
# pass
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,838 | guzhijun369/test3 | refs/heads/master | /utomarket/register_util.py | from selenium.webdriver.common.action_chains import ActionChains
from utomarket.skip_util import select_country
from utomarket.util import *
from utomarket.settings import Settings
import time
from utomarket.get_code_util import *
def register_test(browser, email, code, username, pass_wd, country, invitation_code=None): # 注册方法
navigator_to(browser, Settings.login_url) # 进入网站
sign_btn = browser.find_element_by_xpath("//a[contains(text(),'注册账户')]") # 获取注册按钮
sign_btn.click()
# logger.debug('注册开始')
# btn_login_xpath = '//*[@id="root"]/div/div[1]/div[2]/form/div[9]/div/div/span/button'
time.sleep(3)
input_email = browser.find_element_by_id('email') # 获取邮箱输入框
input_code = browser.find_element_by_id('verify_code') # 获取验证码输入框
get_code_btn = browser.find_element_by_xpath(
'//*[@id="root"]/div/div[1]/div[2]/form/div[2]/div[2]/div/span/div/div[2]/button') # 获取验证码点击按钮
input_username = browser.find_element_by_id('nickname') # 获取昵称输入框
input_pass_wd = browser.find_element_by_id('password') # 获取密码输入框
input_pass_wd_two = browser.find_element_by_id('confirm') # 获取确认密码输入框
input_country = browser.find_element_by_class_name('ant-select-selection--single') # 获取国家选择输入框
input_invitation_code = browser.find_element_by_id('invite_code') # 获取邀请码输入框
tick = browser.find_element_by_class_name('ant-checkbox-input') # 获取勾选框
register_btn = browser.find_element_by_xpath("//span[contains(text(),'注 册')]/..") # 注册按钮
input_email.send_keys(email)
get_code_btn.click()
time.sleep(1)
get_code(browser, code)
time.sleep(2)
input_code.send_keys(code)
input_username.send_keys(username)
input_pass_wd.send_keys(pass_wd)
input_pass_wd_two.send_keys(pass_wd)
input_country.click()
select_country(browser, country)
input_invitation_code.send_keys(invitation_code)
tick.click()
register_btn.click()
def forget_pw(browser, verification_mode, account, code, new_pw, country='中国'): # 忘记密码
navigator_to(browser, Settings.login_url) # 进入网站
forget_btn = browser.find_element_by_link_text("忘记密码") # 获取忘记密码按钮
forget_btn.click()
get_code_btn = browser.find_element_by_xpath("//span[contains(text(),'获取验证码')]/..") # 获取验证码按钮
input_country = browser.find_element_by_class_name('ant-select-selection--single') # 获取验证方式框
input_country.click()
options = browser.find_elements_by_class_name('ant-select-dropdown-menu-item')
if verification_mode == 'email': # 邮箱验证
options[1].click()
time.sleep(1)
input_email = browser.find_element_by_id('mail') # 获取邮箱输入框
print(account)
input_email.send_keys(account)
elif verification_mode == 'phone': # 手机验证
options[0].click()
time.sleep(1)
country_btn = browser.find_element_by_xpath('//*[@id="nation_code"]/div/div')
country_btn.click()
time.sleep(1)
country_text = browser.find_elements_by_class_name('ant-select-dropdown-menu-item') # 获取国家列表
for itme in country_text:
if itme.text == country:
itme.click()
input_phone = browser.find_element_by_id('phone') # 获取手机输入框
input_phone.send_keys(account)
get_code_btn.click()
captcha_xpaths = browser.find_elements_by_css_selector('input[placeholder="验证码"]') # 获取图形验证码输入框
captcha_xpaths[1].send_keys(code)
confirm_btn = browser.find_elements_by_css_selector('button[type="submit"]')[1] # 获取弹窗确定按钮
confirm_btn.click()
time.sleep(2)
input_code = browser.find_element_by_id('code') # 获取验证码输入框
input_code.send_keys(code)
next_btn = browser.find_element_by_xpath("//span[text()='下一步']/..") # 获取下一步按钮
# explicit_wait(ins.browser, 'VOEL', ['submit___B1_-n', 'class'])
next_btn.click()
time.sleep(1)
input_new_pw = browser.find_element_by_id('new_password') # 获取新密码输入框
input_confirm_pw = browser.find_element_by_id('confirm') # 获取确认密码输入框
input_new_pw.send_keys(new_pw)
input_confirm_pw.send_keys(new_pw)
submit_btn = browser.find_element_by_xpath(
'//*[@id="root"]/div/div[1]/div[2]/form/div[3]/div/div/span/button') # 获取提交按钮
submit_btn.click()
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,839 | guzhijun369/test3 | refs/heads/master | /utomarket/get_code_util.py |
def get_code(browser, code):
captcha_xpath = browser.find_element_by_id('code') #获取图形验证码输入框
captcha_xpath.send_keys(code)
confirm_btn = browser.find_elements_by_css_selector('button[type="submit"]')[1]#获取弹窗确定按钮
confirm_btn.click() | {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,840 | guzhijun369/test3 | refs/heads/master | /tests/test_logout.py | from utomarket.utomarket import Utomarket
import time
def test_logout(): # 退出登录
ins = Utomarket('test129', '3201')
ins.login()
time.sleep(3)
ins.logout('退出登录')
now_url = ins.get_url()
assert now_url == 'https://dev.utomarket.com:9094/#/user/login'
ins.browser.close()
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,841 | guzhijun369/test3 | refs/heads/master | /tests/test_util.py | from utomarket.utomarket import Utomarket
from utomarket.util import *
from utomarket.settings import Settings
import time
def test_get_current_url():
ins = Utomarket('test129', '3201')
ins.login()
assert get_current_url(ins.browser) == Settings.login_url
time.sleep(2)
ins.browser.close()
def test_reload_webpage():
ins = Utomarket('test129', '3201')
ins.login()
for _ in range(3):
reload_webpage(ins.browser)
time.sleep(2)
ins.browser.close()
def test_get_page_title():
ins = Utomarket('test129', '3201')
ins.login()
assert ins.browser.title == '乌托市场(TEST)'
time.sleep(2)
ins.browser.close()
def test_center(): # 进入个人中心
ins = Utomarket('test129', '3201')
ins.login()
ins.my_center('个人中心')
assert get_current_url(ins.browser) == 'https://dev.utomarket.com:9094/#/user-center/index'
time.sleep(2)
ins.browser.close()
def test_order(): # 进入我的订单
ins = Utomarket('test129', '3201')
ins.login()
ins.my_center('我的订单')
assert get_current_url(ins.browser) == 'https://dev.utomarket.com:9094/#/order/my'
time.sleep(2)
ins.browser.close()
def test_buy_btn(): # 导航栏购买
ins = Utomarket('test129', '3201')
ins.login()
ins.enter_menu('购买')
time.sleep(2)
ins.browser.close()
def test_sell_btn(): # 导航栏出售
ins = Utomarket('test129', '3201')
ins.login()
ins.enter_menu('出售')
time.sleep(2)
ins.browser.close()
def test_ad_home_btn(): # 导航栏广告管理
ins = Utomarket('test129', '3201')
ins.login()
ins.enter_menu('广告管理')
time.sleep(2)
ins.browser.close()
def test_switch_language(): # 切换语言
ins = Utomarket('test129', '3201')
ins.login().switch_language("English")
ins.browser.close()
def test_ad_screen(): # 首页筛选
ins = Utomarket('test129', '3201')
ins.login().ad_screen("全部国家", "全部币种", "全部支付方式")
ins.browser.close()
def test_cut_type(): # 首页切换广告
ins = Utomarket('test129', '3201')
ins.login().cut_type("出售")
ins.browser.close() | {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,842 | guzhijun369/test3 | refs/heads/master | /utomarket/logout_util.py |
from utomarket.util import *
from .skip_util import *
def logout_o(browser, logger, menu):
logger.debug("退出登录")
user_info(browser, menu)
btn_login_xpath = '//*[@id="root"]/div/div[1]/div[2]/div/form/div[4]/div/div/span/button'
explicit_wait(browser, "VOEL", [btn_login_xpath, "xpath"], logger)
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,843 | guzhijun369/test3 | refs/heads/master | /utomarket/personal_center_util.py | from selenium.webdriver.common.action_chains import ActionChains
from pymouse import PyMouse
from pykeyboard import PyKeyboard
from random import choice
import time
from utomarket.skip_util import *
from utomarket.util import *
from utomarket.settings import Settings
def upload_avatar_o(browser):
upload_btns = browser.find_elements_by_class_name('ant-btn')
upload_btn = upload_btns[0]
upload_btn.click()
upload('20180917154817.JPEG')
# kk = PyKeyboard()#实例化
# time.sleep(1)
# kk.tap_key(kk.shift_key) # 切换为英文,看实际情况是否需要
# time.sleep(1)
# kk.type_string(r'C:\Users\Apple\Desktop\photos') # 打开文件所在目录,方便多个文件上传
# time.sleep(1)
# kk.tap_key(kk.enter_key)
# time.sleep(1)
# kk.type_string('20180917154817.JPEG') # 多文件上传
# time.sleep(1)
# kk.tap_key(kk.enter_key)
popup_o(browser, '修改头像成功')
def get_code(browser, code, btn):
get_code_btn = browser.find_element_by_xpath("//span[contains(text(),'获取验证码')]/..")
code_input = browser.find_element_by_xpath("//input[@placeholder='验证码']")
next_btn = browser.find_element_by_xpath("//span[contains(text(),'%s')]//.." % btn)
print(next_btn, '-----------------------------')
get_code_btn.click()
popup_o(browser, '发送成功')
code_input.send_keys(code)
time.sleep(4)
next_btn.click()
def select_mode(browser, method):
time.sleep(1)
method_btns = browser.find_elements_by_class_name('ant-select-dropdown-menu-item')
for item in method_btns:
if item.text == method:
item.click()
time.sleep(1)
break
def change_mail_o(browser, email, code):
change_btn = browser.find_element_by_xpath(
'//*[@id="root"]/div/div/div/div[2]/div/div[2]/div/div[1]/div[2]/div[2]/ul')
change_btn.click()
explicit_wait(browser, 'VOEL', ['getCaptcha___3e6Ch', 'class'])
get_code(browser, code, '下一步')
explicit_wait(browser, 'VOEL', ['ant-input', 'class'])
new_email_input = browser.find_element_by_id('email')
new_email_input.send_keys(email)
get_code(browser, code, '下一步')
popup_o(browser, '操作成功')
time.sleep(5)
def band_telephone_o(browser, phone, code, country):
bind_btn = browser.find_element_by_xpath(
'//*[@id="root"]/div/div/div/div[2]/div/div[2]/div/div[1]/div[2]/div[3]/ul')
bind_btn.click()
explicit_wait(browser, 'VOEL', ['ant-modal-title', 'class'])
country_btn = browser.find_element_by_class_name('ant-select-selection__rendered')
country_btn.click()
time.sleep(1)
select_country(browser, country)
input_phone = browser.find_element_by_id('telephone')
input_phone.send_keys(phone)
get_code(browser, code, '下一步')
popup_o(browser, '操作成功')
def change_telephone_o(browser, phone, code, country):
change_btn = browser.find_element_by_xpath(
'//*[@id="root"]/div/div/div/div[2]/div/div[2]/div/div[1]/div[2]/div[3]/ul')
change_btn.click()
explicit_wait(browser, 'VOEL', ['ant-modal-title', 'class'])
get_code(browser, code, '下一步')
explicit_wait(browser, 'VOEL', ['ant-select-selection__rendered', 'class'])
change_country_btn = browser.find_element_by_class_name('ant-select-selection__rendered')
change_country_btn.click()
time.sleep(1)
select_country(browser, country)
input_phone = browser.find_element_by_id('telephone')
input_phone.send_keys(phone)
get_code(browser, code, '下一步')
popup_o(browser, '操作成功')
def band_google_o(browser, code):
change_btn = browser.find_element_by_xpath(
'//*[@id="root"]/div/div/div/div[2]/div/div[2]/div/div[1]/div[2]/div[4]/ul')
change_btn.click()
explicit_wait(browser, 'VOEL', ['ant-card-head-title', 'class'])
copy_btn = browser.find_element_by_class_name('anticon-copy')
input_code = browser.find_element_by_id('captcha')
tick = browser.find_element_by_xpath("//input[@type='checkbox']//..")
start_using_btn = browser.find_element_by_xpath("//button[@type='submit']")
copy_btn.click()
popup_o(browser, '复制成功')
input_code.send_keys(code)
tick.click()
time.sleep(3)
start_using_btn.click()
popup_o(browser, '操作成功')
url = get_current_url(browser)
assert url == 'https://dev.utomarket.com:9094/#/user-center/index'
def stop_google_o(browser, code):
stop_btn = browser.find_element_by_xpath(
'//*[@id="root"]/div/div/div/div[2]/div/div[2]/div/div[1]/div[2]/div[4]/ul')
stop_btn.click()
explicit_wait(browser, 'VOEL', ['ant-modal-title', 'class'])
code_input = browser.find_element_by_xpath("//input[@placeholder='谷歌验证码']")
confirm_btn = browser.find_element_by_xpath("//button[@type='submit']")
code_input.send_keys(code)
confirm_btn.click()
popup_o(browser, '操作成功')
def change_pw_o(browser, code, old_pw, new_pw, method):
change_btn = browser.find_element_by_xpath(
'//*[@id="root"]/div/div/div/div[2]/div/div[2]/div/div[1]/div[2]/div[5]/ul')
change_btn.click()
explicit_wait(browser, 'VOEL', ['ant-modal-title', 'class'])
method_btn = browser.find_element_by_xpath("//div[@role='combobox']")
method_btn.click()
time.sleep(1)
options = browser.find_elements_by_class_name('ant-select-dropdown-menu-item')
for item in options:
if method in item.text:
item.click()
break
time.sleep(2)
get_code(browser, code, '确定')
explicit_wait(browser, 'VOEL', ["//input[@id='old_password']", 'xpath'])
old_input = browser.find_element_by_xpath("//input[@id='old_password']")
new_input = browser.find_element_by_xpath("//input[@id='password']")
confirm_input = browser.find_element_by_xpath("//input[@id='confirm']")
next_btn = browser.find_element_by_xpath("//span[contains(text(),'下一步')]/..")
old_input.send_keys(old_pw)
new_input.send_keys(new_pw)
confirm_input.send_keys(new_pw)
next_btn.click()
popup_o(browser, '操作成功')
time.sleep(2)
def auth_c1_o(browser, name, id_number):
certification_btn = browser.find_element_by_xpath("//span[contains(text(),'立即认证')]/..")
certification_btn.click()
explicit_wait(browser, 'VOEL', ["//input[@id='name']", 'xpath'])
assert get_current_url(browser) == 'https://dev.utomarket.com:9094/#/authentication'
name_input = browser.find_element_by_id('name')
id_number_input = browser.find_element_by_id('number')
submit_btn = browser.find_element_by_xpath("//span[contains(text(),'提交审核')]/..")
name_input.send_keys(name)
id_number_input.send_keys(id_number)
submit_btn.click()
def auth_c2_o(browser):
explicit_wait(browser, 'VOEL', ["anticon-check", 'class'])
upload_btns = browser.find_elements_by_class_name('ant-form-item-control-wrapper')
upload_btns[0].click()
upload('0642.png')
time.sleep(4)
upload_btns[1].click()
upload('160704.PNG')
time.sleep(4)
submit_btn = browser.find_element_by_class_name('ant-btn-primary')
submit_btn.click()
# explicit_wait(browser, 'VOEL', ["ant-alert-info", 'class'])
time.sleep(15)
browser.implicitly_wait(20)
iframe = browser.find_element_by_tag_name("iframe")
print(iframe, '---------------------------')
browser.switch_to_frame(iframe)
time.sleep(5)
explicit_wait(browser, 'VOEL', ["jr-qrcode", 'id'])
QR_code = browser.find_element_by_id('jr-qrcode')
print(QR_code, '-----------------------')
assert QR_code != None
browser.switch_to_default_content()
def payment_alipay_o(browser, method, name, account, receipt_code): # 支付宝、微信、paytm共用
add_btn = browser.find_element_by_xpath("//span[contains(text(),'添加新的支付方式')]/..")
add_btn.click()
time.sleep(1)
choose_input = browser.find_element_by_xpath("//div[contains(text(),'请选择支付方式')]/..")
choose_input.click()
time.sleep(2)
select_mode(browser, method)
explicit_wait(browser, 'VOEL', ['payment_detail.name', 'id'])
name_input = browser.find_element_by_id('payment_detail.name')
account_input = browser.find_element_by_id('payment_detail.account')
code_btn = browser.find_element_by_class_name('ant-upload-drag')
confirm_btn = browser.find_element_by_xpath("//span[contains(text(),'确定')]/..")
name_input.send_keys(name)
account_input.send_keys(account)
code_btn.click()
upload(receipt_code)
time.sleep(4)
confirm_btn.click()
popup_o(browser, '操作成功')
time.sleep(2)
def payment_bank_o(browser, method, name, account, card_number):
add_btn = browser.find_element_by_xpath("//span[contains(text(),'添加新的支付方式')]/..")
add_btn.click()
time.sleep(1)
choose_input = browser.find_element_by_xpath("//div[contains(text(),'请选择支付方式')]/..")
choose_input.click()
time.sleep(2)
select_mode(browser, method)
explicit_wait(browser, 'VOEL', ['payment_detail.name', 'id'])
name_input = browser.find_element_by_id('payment_detail.name')
account_input = browser.find_element_by_id('payment_detail.bank_name')
card_number_input = browser.find_element_by_id('payment_detail.bank_account')
confirm_btn = browser.find_element_by_xpath("//span[contains(text(),'确定')]/..")
name_input.send_keys(name)
account_input.send_keys(account)
card_number_input.send_keys(card_number)
confirm_btn.click()
popup_o(browser, '操作成功')
time.sleep(2)
def payment_western_union_o(browser, method, name, transfer_information):
add_btn = browser.find_element_by_xpath("//span[contains(text(),'添加新的支付方式')]/..")
add_btn.click()
time.sleep(1)
choose_input = browser.find_element_by_xpath("//div[contains(text(),'请选择支付方式')]/..")
choose_input.click()
time.sleep(2)
select_mode(browser, method)
explicit_wait(browser, 'VOEL', ['payment_detail.name', 'id'])
name_input = browser.find_element_by_id('payment_detail.name')
confirm_btn = browser.find_element_by_xpath("//span[contains(text(),'确定')]/..")
transfer_information_input = browser.find_element_by_id('payment_detail.account')
name_input.send_keys(name)
transfer_information_input.send_keys(transfer_information)
confirm_btn.click()
popup_o(browser, '操作成功')
time.sleep(2)
def delete_payment_o(browser):
delete_btns = browser.find_elements_by_class_name('text-red')
choice(delete_btns).click()
confirm_btn = browser.find_element_by_xpath("//span[contains(text(),'确 定')]/..")
print(confirm_btn, '-----------------------------')
time.sleep(1)
confirm_btn.click()
popup_o(browser, '操作成功')
def others_information_o(browser):
name_a = browser.find_elements_by_xpath('//a[contains(@href,"#/personage")]')
choice(name_a).click()
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,844 | guzhijun369/test3 | refs/heads/master | /tests/test_register.py | from utomarket.utomarket import Utomarket
from utomarket.util import explicit_wait, get_current_url
from utomarket.settings import Settings
import time
def test_register(): # 注册
username = 'uitest7'
pw = 'q5310543'
ins = Utomarket(username, pw)
email = 'testui7@qq.com'
ins.register(email, '3201', '中国', '3201')
explicit_wait(ins.browser, 'VOEL', ["[//button[@type='button']", 'xpath'])
btn = ins.browser.find_element_by_xpath("//div[contains(text(),'注册成功')]").text
assert btn == '你的账户:%s 注册成功' % email
ins.browser.close()
def test_forget(): # 忘记密码
ins = Utomarket()
accounts = [('email', '281878321@qq.com'),
('phone', '13028812388')
]
code = '3201'
pass_wd = 'q5310543'
for way in accounts:
ins.forget(way[0], way[1], code, pass_wd)
explicit_wait(ins.browser, 'VOEL', ['title___17w4b', 'class'])
txt = ins.browser.find_element_by_class_name('title___17w4b').text
assert txt == '密码修改成功'
login_btn = ins.browser.find_element_by_xpath('//*[@id="root"]/div/div[1]/div[2]/div[3]/div/a[1]/button')
login_btn.click()
assert get_current_url(ins.browser) == Settings.login_url
ins.browser.back()
home_btn = ins.browser.find_element_by_xpath('//*[@id="root"]/div/div[1]/div[2]/div[3]/div/a[2]/button')
home_btn.click()
url = 'https://dev.utomarket.com:9094/#/'
url_true = get_current_url(ins.browser)
assert url in url_true
ins.browser.close()
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,845 | guzhijun369/test3 | refs/heads/master | /tests/test_top_notice.py | from utomarket.util import explicit_wait
from utomarket.utomarket import Utomarket
def test_top_notice(): # 置顶公告
ins = Utomarket('test129', '3201')
ins.login().top_notice_internal()
ins.browser.close() | {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,846 | guzhijun369/test3 | refs/heads/master | /utomarket/settings.py | import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
class Settings:
log_location = os.path.join(BASE_DIR, 'logs')
browser_language = 'zh-CN'
loggers = {}
login_url = 'https://dev.utomarket.com:9094/#/user/login' | {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,847 | guzhijun369/test3 | refs/heads/master | /utomarket/util.py | import time
import random
from contextlib import contextmanager
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as ec
from selenium.webdriver.common.by import By
from selenium.common.exceptions import WebDriverException, TimeoutException
from pymouse import PyMouse
from pykeyboard import PyKeyboard
def get_current_url(browser):#获取url
try:
current_url = browser.execute_script("return window.location.href")
except WebDriverException:
try:
current_url = browser.current_url
except WebDriverException:
current_url = None
return current_url
def explicit_wait(browser, track, ec_params, logger=None, timeout=30):#等待
if not isinstance(ec_params, list):
ec_params = [ec_params]
if track == "VOEL":
elem_address, find_method = ec_params
ec_name = "visibility of element located"
m = {
'xpath': By.XPATH,
'css': By.CSS_SELECTOR,
'id': By.ID,
'class': By.CLASS_NAME,
}
find_by = m.get(find_method)
if not find_by:
return False
locator = (find_by, elem_address)
condition = ec.visibility_of_element_located(locator)
elif track == "TC":
expect_in_title = ec_params[0]
ec_name = "title contains '{}'".format(expect_in_title)
condition = ec.title_contains(expect_in_title)
elif track == "PFL":
ec_name = "page fully loaded"
condition = (lambda browser: browser.execute_script("return document.readyState") in ["complete" or "loaded"])
try:
wait = WebDriverWait(browser, timeout)
result = wait.until(condition)
except TimeoutException:
if logger:
logger.info("timeout with failure while explicitly waiting until {}!".format(ec_name))
return False
return result
# def explicit_wait_not(browser, track, ec_params, logger=None, timeout=30):#等待
# if not isinstance(ec_params, list):
# ec_params = [ec_params]
#
# if track == "VOEL":
# elem_address, find_method = ec_params
# ec_name = "visibility of element located"
#
# find_by = (By.XPATH if find_method == "xpath" else
# By.CSS_SELECTOR if find_method == "css" else
# By.CLASS_NAME if find_method == "id" else
# By.ID)
# locator = (find_by, elem_address)
# condition = ec.visibility_of_element_located(locator)
# try:
# wait = WebDriverWait(browser, timeout)
# result = wait.until_not(condition)
# except TimeoutException:
# if logger:
# logger.info("timeout with failure while explicitly waiting until {}!".format(ec_name))
#
# return False
#
# return result
# def find(browser, track, ec_params):
# if track == 'class':
# pass
# elif track == 'xpath':
# pass
# elif track == 'id':
# pass
# elif track == 'tag_name':
def reload_webpage(browser): #刷新页面
browser.execute_script("location.reload()")
def get_page_title(browser, logger=None):#获取title
explicit_wait(browser, "PFL", [], logger, 10)
try:
page_title = browser.title
except WebDriverException:
try:
page_title = browser.execute_script("return document.title")
except WebDriverException:
try:
page_title = browser.execute_script("return document.getElementsByTagName('title')[0].text")
except WebDriverException:
if logger:
logger.info("Unable to find the title of the page")
return None
return page_title
def smart_input(element, text):#输入间隔
for c in text:
element.send_keys(c)
time.sleep(0.2)
def click_element(browser, element, try_times=0):#点击事件
try:
element.click()
except Exception:
if try_times == 0:
browser.execute_script("document.getElementsByClassName('" + element.get_attribute(
"class") + "')[0].scrollIntoView({ inline: 'center' });")
elif try_times == 1:
browser.execute_script("window.scrollTo(0,0);")
elif try_times == 2:
browser.execute_script("window.scrollTo(0,document.body.scrollHeight);")
else:
browser.execute_script(
"document.getElementsByClassName('" + element.get_attribute("class") + "')[0].click()")
return
time.sleep(1)
try_times += 1
click_element(browser, element, try_times)
@contextmanager
def new_tab(browser):
try:
browser.execute_script("window.open()")
time.sleep(2)
browser.switch_to.window(browser.window_handles[1])
time.sleep(1)
yield
finally:
browser.execute_script("window.close()")
time.sleep(1)
browser.switch_to.window(browser.window_handles[0])
time.sleep(1)
def navigator_to(browser, url):
browser.maximize_window()
current_url = get_current_url(browser)
if current_url and current_url.endswith('/'):
current_url = current_url[:-1]
if url and url.endswith('/'):
url = url[:-1]
retry_times = 0
new_navigation = (current_url != url)
if current_url is None or new_navigation:
while True:
try:
browser.get(url)
break
except TimeoutException as e:
if retry_times >= 3:
raise TimeoutException("Retried {} times to GET '{}' webpage "
"but failed out of a timeout!\n\t{}".format(retry_times, str(url), str(e)))
retry_times += 1
time.sleep(1)
def is_exist_element(browser, track, keyword): # 判断元素是否存在
if track == 'class':
s = browser.find_elements_by_class_name(keyword)
elif track == 'css':
s = browser.find_elements_by_css_selector(keyword)
elif track == 'xpath':
s = browser.find_elements_by_xpath(keyword)
elif track == 'id':
s = browser.find_elements_by_id(keyword)
else:
return False
if len(s) == 0:
return False
if len(s) == 1:
return True
else:
return False
def upload(file_name, path=r'C:\Users\Administrator\Desktop\ui\photos'):
kk = PyKeyboard() # 实例化
time.sleep(1)
kk.tap_key(kk.shift_key) # 切换为英文,看实际情况是否需要
# kk.tap_key(kk.shift_key)
time.sleep(4)
kk.type_string(path) # 打开文件所在目录,方便多个文件上传
time.sleep(1)
kk.tap_key(kk.enter_key)
time.sleep(1)
kk.type_string(file_name) # 多文件上传
time.sleep(1)
kk.tap_key(kk.enter_key)
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,848 | guzhijun369/test3 | refs/heads/master | /utomarket/skip_util.py | from utomarket.util import *
# from utomarket.logout_util import *
from selenium.webdriver.common.action_chains import ActionChains
import re
def user_info(browser, menu): # 移到头像显示下拉菜单点击菜单
btn_info = browser.find_element_by_xpath('//*[@id="root"]/div/div/div/div[1]/div/div/span[1]/span[4]/span')
ActionChains(browser).move_to_element(btn_info).perform()
time.sleep(2)
btc_item = browser.find_element_by_xpath("//span[contains(text(),'%s')]/.." % menu)
btc_item.click()
def ad_management(browser, menu): # 移动到广告管理
time.sleep(2)
btn_ad = browser.find_element_by_xpath('//div[@title="广告中心"]/..')
ActionChains(browser).move_to_element(btn_ad).perform()
# explicit_wait(browser, 'VOEL', ["//span[contains(text(),'广告管理')]", 'xpath'])
time.sleep(2)
btc_item = browser.find_element_by_xpath("//span[text()='%s']/../.." % menu)
btc_item.click()
def enter_menu_o(browser, menu):
ad_management(browser, menu)
if menu == '购买':
assert get_current_url(browser) == 'https://dev.utomarket.com:9094/#/ad/buy'
elif menu == '出售':
assert get_current_url(browser) == 'https://dev.utomarket.com:9094/#/ad/sell'
else:
assert get_current_url(browser) == 'https://dev.utomarket.com:9094/#/ad/my'
def top_notice_o(browser): # 置顶公告
info_btn = browser.find_element_by_class_name('system_notice___3cCZX')
info_text = info_btn.find_element_by_tag_name('a').text
info_btn.click()
title_text = browser.find_element_by_xpath(
'//*[@id="root"]/div/div/div/div[2]/div/div/div/div[2]/div/div[1]/div/div[2]').text
time.sleep(1)
back_btn = browser.find_element_by_xpath("//a[contains(text(),'返回')]")
back_btn.click()
time.sleep(1)
result = is_exist_element(browser, 'xpath', '//*[@id="root"]/div/div/div/div[2]/div/div/div[2]')
assert info_text == title_text
assert result
def popup_o(browser, text): # 判断操作结果弹窗
explicit_wait(browser, 'VOEL', ['ant-message-custom-content', 'class'])
notice = browser.find_element_by_class_name('ant-message-custom-content').find_element_by_tag_name('span').text
assert notice == text
def select_country(browser, country): # 下拉列表选择
time.sleep(2)
country_text = browser.find_elements_by_class_name('ant-select-dropdown-menu-item') # 获取国家列表
for itme in country_text:
if itme.text == country:
itme.click()
time.sleep(1)
break
def switch_language_o(browser, language):
language_btn = browser.find_elements_by_class_name('ant-dropdown-trigger')[0]
ActionChains(browser).move_to_element(language_btn).perform()
time.sleep(2)
zh_btn = browser.find_element_by_xpath("//li[contains(text(),'%s')]" % language)
zh_btn.click()
browser.implicitly_wait(20)
ad_btn = browser.find_element_by_class_name('ant-menu-submenu-title')
assert ad_btn.text == 'Advertisement Center'
# def choice_button(browser, target):#筛选下拉框选择元素
# time.sleep(2)
# elements = browser.find_elements_by_class_name('ant-select-dropdown-menu-item')
# for item in elements:
# if item.text == target:
# item.click()
# break
def ad_screen_o(browser, country, currency, payment_method):
explicit_wait(browser, 'VOEL', ["search_box___2kIFM", 'class'])
screen_btn = browser.find_element_by_class_name("search_box___2kIFM")
screen_btn.click()
time.sleep(2)
# btns = browser.find_elements_by_class_name('ant-select-selection__rendered')
# country_btn = btns[0]
# currency_btn = btns[1]
# payment_method_btn = btns[2]
country_btn = browser.find_element_by_xpath("//div[@title='全部国家']/..")
currency_btn = browser.find_element_by_xpath("//div[@title='全部币种']/..")
payment_method_btn = browser.find_element_by_xpath("//div[@title='全部支付方式']/..")
submit_btn = browser.find_element_by_xpath("//span[contains(text(),'查询')]/..")
country_btn.click()
select_country(browser, country)
time.sleep(1)
currency_btn.click()
select_country(browser, currency)
time.sleep(1)
payment_method_btn.click()
select_country(browser, payment_method)
time.sleep(1)
submit_btn.click()
time.sleep(2)
status = is_exist_element(browser, "xpath", "//span[contains(text(),'查询')]/..")
assert not status
def cut_type_o(browser, btn):
explicit_wait(browser, 'VOEL', ["//button[contains(text(),'%s')]" % btn, 'xpath'])
cut_btn = browser.find_element_by_xpath("//button[contains(text(),'%s')]" % btn)
cut_btn.click()
# url = get_current_url(browser)
# if btn == "出售":
# assert url == 'https://dev.utomarket.com:9094/#/trade/index?ad_type=1'
# else:
# assert url == 'https://dev.utomarket.com:9094/#/trade/index?ad_type=2'
# rgb = cut_btn.value_of_css_property('color')
# print(rgb, '-----------------')
# r, g, b = map(int, re.search(
# r'rgba\((\d+),\s*(\d+),\s*(\d+)', rgb).groups())
# color = '#%02x%02x%02x' % (r, g, b)
# print(color)
# assert color == '#ffffff'
active = cut_btn.get_attribute('class')
print(active)
class_name = 'bt_trade_tabs_active'
assert class_name in active
def progress_order_o(browser):
clock_button = browser.find_element_by_xpath('//*[@id="root"]/div/div/div/div[1]/div/div/span[1]/span[2]/span')
clock_button.click()
explicit_wait(browser, 'VOEL', ["ant-list-item-meta-content", 'class'])
order_list = browser.find_element_by_class_name('ant-list-item-meta-content')
order_list.click()
explicit_wait(browser, 'VOEL', ["ant-list-item-content", 'class'])
url = 'https://dev.utomarket.com:9094/#/trade/step'
url_true = get_current_url(browser)
assert url in url_true
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,849 | guzhijun369/test3 | refs/heads/master | /utomarket/transaction.py | from utomarket.skip_util import *
from utomarket.util import *
from selenium.webdriver.common.keys import Keys
from .personal_center_util import select_mode
def ad_detail_o(browser, name): # 进入广告详情
ad_div = browser.find_element_by_xpath("//a[contains(text(),'%s')]/../../.." % name)
click_btn = ad_div.find_elements_by_tag_name('td')[-1]
click_btn.click()
explicit_wait(browser, 'VOEL', ["ant-card-meta-avatar", 'class'])
url = get_current_url(browser)
url_true = 'https://dev.utomarket.com:9094/#/trade/detail'
name_text = browser.find_element_by_class_name('ant-card-meta-title').text
assert url_true in url
assert name_text == name
def create_buy_o(browser, type='法币计价', trading_volume='1,234.24',
trading_count='0.2345871', trading_note='这是交易备注'):
type_button = browser.find_element_by_xpath("//div[@title='法币计价']/..")
trading_note_input = browser.find_element_by_id('trading_notes') #创建购买订单
submit_button = browser.find_elements_by_tag_name('button')[1]
type_button.click()
select_mode(browser, type)
if type == '法币计价':
trading_input = browser.find_element_by_id('trading_volume')
trading_input.send_keys(trading_volume)
else:
trading_input = browser.find_element_by_id('trading_count')
trading_input.send_keys(trading_count)
trading_note_input.send_keys(trading_note)
submit_button.click()
popup_o(browser, '操作成功')
explicit_wait(browser, 'VOEL', ["ant-list-item-content", 'class'])
url = 'https://dev.utomarket.com:9094/#/trade/step'
url_true = get_current_url(browser)
if type == '法币计价':
verify_div = is_exist_element(browser, 'xpath', "//span[text()='%s CNY ']" % trading_volume)
else:
verify_div = is_exist_element(browser, 'xpath', "//span[text()=' %s BTC']" % trading_count)
assert url in url_true
assert verify_div
def send_massege_o(browser, massege_content): # 发送消息
time.sleep(2)
explicit_wait(browser, 'VOEL', ["ant-list-item-meta-content", 'class'])
massege_input = browser.find_element_by_id('message')
send_button = browser.find_element_by_xpath("//span[text()='发送']/..")
smart_input(massege_input, massege_content)
send_button.click()
explicit_wait(browser, 'VOEL', ["//div[contains(text(),'%s')]" % massege_content, 'xpath'])
judge_massege_result_o(browser, massege_content)
def judge_massege_result_o(browser, massege_content): # 判断消息是否在聊天框显示
result = is_exist_element(browser, 'xpath', "//div[contains(text(),'%s')]" % massege_content)
assert result
def cancel_order_buyer_o(browser): # 买家取消订单
cancel_button = browser.find_element_by_xpath("//span[text()='取消订单']/..")
cancel_button.click()
explicit_wait(browser, 'VOEL', ["//textarea[@placeholder='请填写原因']", 'xpath'])
reason_input = browser.find_element_by_xpath("//textarea[@placeholder='请填写原因']")
confirm_btn = browser.find_element_by_xpath("//span[text()='确定']/..")
smart_input(reason_input, '这是取消订单原因')
confirm_btn.click()
explicit_wait(browser, 'VOEL', ["//div[text()='买家已取消交易,订单已关闭。']", 'xpath'])
m = is_exist_element(browser, 'xpath', "//span[text()='已取消']") # 判断已取消状态的span有没有出现
assert m
def cancel_order_seller_o(browser): # 取消订单判断卖家页面
explicit_wait(browser, 'VOEL', ["//div[text()='买家已取消交易,订单已关闭。']", 'xpath'])
m = is_exist_element(browser, 'xpath', "//span[text()='已取消']") # 判断已取消状态的span有没有出现
assert m
def confirm_payment_buyer_o(browser): # 买家确认支付
confirm_payment_btn = browser.find_element_by_xpath("//span[text()='确认支付']/..")
confirm_payment_btn.click()
# explicit_wait(browser, 'VOEL', ["//span[text()='确 定']", 'xpath'])
time.sleep(2)
confirm_btn = browser.find_element_by_xpath("//span[text()='确 定']/..")
print(confirm_btn, 'confirm_btn---------------')
confirm_btn.click()
explicit_wait(browser, 'VOEL', ["//div[text()='买家已付款,等待卖家确认']", 'xpath'])
m = is_exist_element(browser, 'xpath', "//span[text()='申述']") # 判断申述按钮有没有出现
assert m
def confirm_payment_seller_o(browser): # 确认支付后卖家页面判断
explicit_wait(browser, 'VOEL', ["//div[text()='买家已付款,等待卖家确认']", 'xpath'])
time.sleep(2)
cancel_btn = is_exist_element(browser, 'xpath', "//span[text()='申述']") # 判断申述按钮有没有出现
confirm_release_btn = is_exist_element(browser, 'xpath', "//span[text()='确认释放']") # 判断释放按钮有没有出现
assert cancel_btn
assert confirm_release_btn
def confirm_release_buyer_o(browser): # 卖家确认释放
confirm_release_btn = browser.find_element_by_xpath("//span[text()='确认释放']/..")
confirm_release_btn.click()
time.sleep(2)
confirm_btn = browser.find_element_by_xpath("//span[text()='确 定']/..")
confirm_btn.click()
explicit_wait(browser, 'VOEL', ["//div[text()='卖家已释放托管BTC,请留意资产到账。']", 'xpath'])
released_btn = is_exist_element(browser, 'xpath', "//span[text()='已释放']")
good_rating_btn = is_exist_element(browser, 'xpath', "//span[text()='好评']")
assert released_btn
assert good_rating_btn
def confirm_release_seller_o(browser): # 释放后卖家页面判断
explicit_wait(browser, 'VOEL', ["//div[text()='卖家已释放托管BTC,请留意资产到账。']", 'xpath'])
released_btn = is_exist_element(browser, 'xpath', "//span[text()='已释放']")
rating_btn = is_exist_element(browser, 'xpath', "//span[text()='好评']")
assert released_btn
assert rating_btn
def order_rating_o(browser, content='和你合作真是太愉快了'):
good_rating_btn = browser.find_element_by_xpath("//img[@alt='good']/..")
print(good_rating_btn, 'good_rating_btn-----')
rating_input = browser.find_element_by_id('content')
submit_btn = browser.find_elements_by_class_name('ant-btn-primary')[0]
print(submit_btn, 'submit_btn----')
good_rating_btn.click()
rating_input.clear()
rating_input.send_keys(content)
submit_btn.click()
popup_o(browser, '操作成功')
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,850 | guzhijun369/test3 | refs/heads/master | /utomarket/wallet_util.py | from selenium.webdriver.common.action_chains import ActionChains
from utomarket.util import *
def enter_wallet_home(browser):
btn_wallet_xpath = '//*[@id="root"]/div/div/div/div[1]/div/div/span[1]/span[1]/a/span/span'
explicit_wait(browser, 'VOEL', [btn_wallet_xpath, 'xpath'])
btn_wallet = browser.find_element_by_xpath(btn_wallet_xpath)
click_element(browser, btn_wallet)
def do_withdraw_internal(browser, address, amount):
tab_withdraw_xpath = '//*[@id="root"]/div/div/div/div[2]/div/div[2]/div/div[1]/div/div/div/div/div[1]/div[1]'
explicit_wait(browser, 'VOEL', [tab_withdraw_xpath, 'xpath'])
tab_withdraw = browser.find_element_by_xpath(tab_withdraw_xpath)
input_address = browser.find_element_by_id('address')
input_amount= browser.find_element_by_id('amount')
btn_commit = browser.find_element_by_xpath('//*[@id="root"]/div/div/div/div[2]/div/div[2]/div/div[2]/div[1]/div/div[1]/form/div[4]/div/div/span/button')
(ActionChains(browser).
move_to_element(tab_withdraw).
click().
move_to_element(input_address).
click().
send_keys(address).
move_to_element(input_amount).
click().
send_keys(amount).
move_to_element(btn_commit).
click().
perform())
def after_withdraw(browser, address, amount, method):
trading_record_xpath = '//*[@id="root"]/div/div/div/div[2]/div/div[2]/div/div[1]/div/div/div/div/div[1]/div[2]'
explicit_wait(browser, 'VOEL', [trading_record_xpath, 'xpath'])
frozen_money = browser.find_elements_by_class_name('text-blue')
stutas_btn = browser.find_element_by_xpath('//*[@id="root"]/div/div/div/div[2]/div/div[2]/div/div[2]/div[2]/div/div/div[3]/div/div/div/div/div/table/tbody/tr[1]/td[4]')
fee_btn = browser.find_element_by_xpath('//*[@id="root"]/div/div/div/div[2]/div/div[2]/div/div[2]/div[2]/div/div/div[3]/div/div/div/div/div/table/tbody/tr[1]/td[3]')
amount_list = browser.find_element_by_xpath('//*[@id="root"]/div/div/div/div[2]/div/div[2]/div/div[2]/div[2]/div/div/div[3]/div/div/div/div/div/table/tbody/tr[1]/td[2]')
address_list = browser.find_element_by_xpath('//*[@id="root"]/div/div/div/div[2]/div/div[2]/div/div[2]/div[2]/div/div/div[3]/div/div/div/div/div/table/tbody/tr[1]/td[6]')
if method == '内部':
assert frozen_money[1].text == '0 BTC'
assert stutas_btn.text == '已完成'
assert fee_btn.text == '-'
assert amount_list.text == '- %s'%amount
assert address_list.text == address
elif method == '外部':
assert frozen_money[1].text == '%s BTC'%amount
assert stutas_btn.text == '待审核'
assert fee_btn.text == '0.00005'
assert amount_list.text == '- %s'%amount
assert address_list.text == address
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,851 | guzhijun369/test3 | refs/heads/master | /tests/test_wallet.py | import time
from utomarket.util import explicit_wait
from utomarket.utomarket import Utomarket
def test_withdraw_internal_inner(): # 内部转账
ins = Utomarket('test129', '3201')
address = '37jzbkEiZW4KfY7ueFLiXtnCJbu98Ay7wS'
amount = '0.0005'
method = '内部'
ins.login().withdraw_internal(address, amount)
ins.after_withdraw_internal(address, amount, method)
time.sleep(2)
ins.browser.close()
def test_withdraw_internal_out(): # 外部转账
ins = Utomarket('test129', '3201')
address = '3Cni6jyLVEYL73dVJYN6zasVryLWbJtAAb'
amount = '0.0005'
method = '外部'
ins.login().withdraw_internal(address, amount).after_withdraw_internal(address, amount, method)
time.sleep(2)
ins.browser.close()
def test_withdraw_myself(): # 转给自己
ins = Utomarket('test129', '3201')
address = '34eSJjag5rRWsjPsiwp45rosnqYmteYwWq'
amount = '0.0005'
ins.login().withdraw_internal(address, amount)
explicit_wait(ins.browser, 'VOEL', ['ant-message-custom-content', 'class'])
notice = ins.browser.find_element_by_class_name('ant-message-custom-content').find_element_by_tag_name('span').text
assert notice == '不能转账给自己'
ins.browser.close()
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,852 | guzhijun369/test3 | refs/heads/master | /tests/test_transaction.py | from utomarket.utomarket import Utomarket
from utomarket.util import explicit_wait, get_current_url
from utomarket.settings import Settings
import time
def test_ad_detail(): # 出售广告详情
# ins_owner = Utomarket('test129', '3201') # 广告主
ins_trader = Utomarket('test128', '3201') # 交易对象
ins_trader.login().ad_detail('test129')
ins_trader.browser.close()
def test_cancel_order(): # 取消订单
# ins_owner = Utomarket('test129', '3201') # 广告主
ins_trader = Utomarket('test128', '3201') # 交易对象
massege_content = '你是傻逼斯达克警方扩大解放开绿灯顺'
ins_trader.login().ad_detail('test129').create_buy(type='交易数量').send_massege(massege_content)
ins_owner = Utomarket('test129', '3201') # 广告主
ins_owner.login().progress_order()
ins_owner.judge_massege_result(massege_content)
ins_trader.cancel_order_buyer()
ins_owner.cancel_order_seller()
ins_trader.browser.close()
ins_owner.browser.close()
def test_buy_process(): # 主动购买全流程
ins_trader = Utomarket('test128', '3201') # 买家
massege_content = '你是傻逼斯达克警方扩大解放开绿灯顺丰快递副书记撒大陆军撒'
ins_trader.login().ad_detail('test129').create_buy(type='交易数量').send_massege(massege_content)
ins_owner = Utomarket('test129', '3201') # 卖家
ins_owner.login().progress_order()
ins_owner.judge_massege_result(massege_content)
ins_trader.confirm_payment_buyer()
ins_owner.confirm_payment_seller().send_massege("你好,我已经支付了")
ins_trader.judge_massege_result("你好,我已经支付了")
ins_owner.confirm_release_buyer()
ins_trader.confirm_release_seller()
ins_owner.order_rating(content='这是卖家的交易评价')
ins_trader.order_rating(content='这是买家的交易评价')
ins_trader.browser.close()
ins_owner.browser.close()
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,853 | guzhijun369/test3 | refs/heads/master | /tests/test_login.py | from utomarket.utomarket import Utomarket
from utomarket.util import get_page_title, explicit_wait
def test_login(): # 登录
# username = 'test130'
# password = '3201'
# ins = Utomarket(username, password)
# ins.login().sliding_verification()
cases = [
('test130', '3201', 'success'),
('test119', '3202', '用户名密码错误'),
('test1120', '3202', '用户不存在')
]
for case in cases:
ins = Utomarket(case[0], case[1])
ins.login().sliding_verification()
if case[2] == 'success':
explicit_wait(ins.browser, 'TC', 'P2P交易 - 乌托市场(TEST)')
assert get_page_title(ins.browser) == 'P2P交易 - 乌托市场(TEST)'
ins.browser.close()
elif case[2] == '用户名密码错误':
span_errmsg = ins.browser.find_element_by_class_name('ant-alert-message').text
assert span_errmsg == '用户名或密码错误'
ins.browser.close()
# def test_band_google_login():
# ins = Utomarket('niriliya', '3201')
# ins.login().my_center('个人中心').band_google('3201').logout('退出登录').login()
# explicit_wait(ins.browser, 'TC', 'P2P交易 - 乌托市场(TEST)')
# explicit_wait(ins.browser, 'VOEL', ['//*[@id="root"]/div/div/div[2]/div[1]/div/div/span[1]/span[1]/a/span/span', 'xpath'], timeout=5)
| {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,854 | guzhijun369/test3 | refs/heads/master | /utomarket/utomarket.py | import os
import logging
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.common.proxy import Proxy, ProxyType
from selenium.common.exceptions import WebDriverException
# from .settings import Settings
from .login_util import login_user, login_user_two, sliding_verification_o
from .wallet_util import enter_wallet_home, do_withdraw_internal, after_withdraw
from .logout_util import *
from .release_ad import *
from .register_util import *
from .personal_center_util import *
from .transaction import *
class UtomarketError(Exception):
pass
class Utomarket:
def __init__(self,
username=None,
password=None,
headless_browser=False,
disable_image_load=False,
proxy_address=None,
proxy_port=None,
browser_profile_path=None,
page_delay=25,
multi_logs=True,
show_log_in_console=True,
):
self.username = username
self.password = password
self.browser = None
self.headless_browser = headless_browser
self.disable_image_load = disable_image_load
self.proxy_address = proxy_address
self.proxy_port = proxy_port
self.browser_profile_path = browser_profile_path
self.page_delay = page_delay
self.multi_logs = multi_logs
self.show_log_in_console = show_log_in_console
self._init_logger()
self._init_browser()
def _init_logger(self):
self.log_folder = Settings.log_location + os.path.sep
if self.multi_logs:
self.log_folder = '{0}{1}{2}{1}'.format(Settings.log_location, os.path.sep, self.username)
if not os.path.exists(self.log_folder):
os.makedirs(self.log_folder)
self.logger = self._get_logger(self.show_log_in_console)
def _init_browser(self):
chrome_options = Options()
chrome_options.add_argument("--disable-infobars")
chrome_options.add_argument("--mute-audio")
chrome_options.add_argument('--dns-prefetch-disable')
chrome_options.add_argument('--disable-setuid-sandbox')
if self.headless_browser:
chrome_options.add_argument('--headless')
chrome_options.add_argument('--no-sandbox')
if self.disable_image_load:
chrome_options.add_argument('--blink-settings=imagesEnabled=false')
# Replaces browser User Agent from "HeadlessChrome".
user_agent = "Chrome"
chrome_options.add_argument('user-agent={user_agent}'.format(user_agent=user_agent))
capabilities = DesiredCapabilities.CHROME
if self.proxy_address and self.proxy_port:
prox = Proxy()
proxy = ":".join([self.proxy_address, str(self.proxy_port)])
prox.proxy_type = ProxyType.MANUAL
prox.http_proxy = proxy
prox.socks_proxy = proxy
prox.ssl_proxy = proxy
prox.add_to_capabilities(capabilities)
if self.browser_profile_path is not None:
chrome_options.add_argument('user-data-dir={}'.format(self.browser_profile_path))
chrome_prefs = {
'intl.accept_languages': Settings.browser_language,
}
if self.disable_image_load:
chrome_prefs['profile.managed_default_content_settings.images'] = 2
chrome_options.add_experimental_option('prefs', chrome_prefs)
try:
self.browser = webdriver.Chrome(desired_capabilities=capabilities, chrome_options=chrome_options)
except WebDriverException as exc:
self.logger.exception(exc)
raise UtomarketError('初始化chrome失败')
self.browser.implicitly_wait(self.page_delay)
def _get_logger(self, show_log_in_console):
existing_logger = Settings.loggers.get(self.username)
if existing_logger is not None:
return existing_logger
else:
# initialize and setup logging system for the InstaPy object
logger = logging.getLogger(self.username)
logger.setLevel(logging.DEBUG)
file_handler = logging.FileHandler('{}general.log'.format(self.log_folder))
file_handler.setLevel(logging.DEBUG)
extra = {"username": self.username}
logger_formatter = logging.Formatter('%(levelname)s [%(asctime)s] [%(username)s] %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
file_handler.setFormatter(logger_formatter)
logger.addHandler(file_handler)
if show_log_in_console:
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.DEBUG)
console_handler.setFormatter(logger_formatter)
logger.addHandler(console_handler)
logger = logging.LoggerAdapter(logger, extra)
Settings.loggers[self.username] = logger
Settings.logger = logger
return logger
def get_url(self):
now_url = get_current_url(self.browser)
return now_url
def login(self):
login_user(self.browser, self.username, self.password, self.logger)
return self
def login_two(self):
login_user_two(self.browser, self.username, self.password, self.logger)
# google_login(self.browser)
return self
def withdraw_internal(self, address, amount):
enter_wallet_home(self.browser)
do_withdraw_internal(self.browser, address, amount)
return self
def after_withdraw_internal(self, address, amount, method):
after_withdraw(self.browser, address, amount, method)
return self
def top_notice_internal(self):
top_notice_o(self.browser)
return self
def popup(self, text):
popup_o(text)
return self
def logout(self, menu):
logout_o(self.browser, self.logger, menu)
return self
def register(self, email, code, country, invitation_code):
register_test(self.browser, email, code, self.username, self.password, country, invitation_code)
return self
def forget(self, verification_mode, account, code, new_pw):
forget_pw(self.browser, verification_mode, account, code, new_pw)
return self
def my_center(self, menu):
user_info(self.browser, menu)
return self
def ad_btn(self, menu):
ad_management(self.browser, menu)
return self
def enter_menu(self, menu):
enter_menu_o(self.browser, menu)
return self
def upload_avatar(self):
upload_avatar_o(self.browser)
return self
def change_mail(self, email, code):
change_mail_o(self.browser, email, code)
return self
def band_telephone(self, phone, code, country):
band_telephone_o(self.browser, phone, code, country)
return self
def change_telephone(self, phone, code, country):
change_telephone_o(self.browser, phone, code, country)
return self
def band_google(self, code):
band_google_o(self.browser, code)
return self
def stop_google(self, code):
stop_google_o(self.browser, code)
return self
def change_pw(self, code, old_pw, new_pw, method):
change_pw_o(self.browser, code, old_pw, new_pw, method)
return self
def auth_c1(self, name, id_number):
auth_c1_o(self.browser, name, id_number)
return self
def auth_c2(self):
auth_c2_o(self.browser)
return self
def payment_alipay(self, method, name, account, receipt_code):
payment_alipay_o(self.browser, method, name, account, receipt_code)
return self
def payment_bank(self, method, name, account, card_number):
payment_bank_o(self.browser, method, name, account, card_number)
return self
def payment_western_union(self, method, name, transfer_information):
payment_western_union_o(self.browser, method, name, transfer_information)
return self
def delete_payment(self):
delete_payment_o(self.browser)
return self
def switch_language(self, language):
switch_language_o(self.browser, language)
return self
def ad_screen(self, country, currency, payment_method):
ad_screen_o(self.browser, country, currency, payment_method)
return self
def cut_type(self, btn):
cut_type_o(self.browser, btn)
return self
def others_information(self):
others_information_o(self.browser)
return self
def release_ad(self, **kwags):
release_ad_o(self.browser, **kwags)
return self
def ad_detail(self, name):
ad_detail_o(self.browser, name)
return self
def create_buy(self, **kwargs):
create_buy_o(self.browser, **kwargs)
return self
def progress_order(self):
progress_order_o(self.browser)
return self
def send_massege(self, massege_content):
send_massege_o(self.browser, massege_content)
return self
def judge_massege_result(self, massege_content):
judge_massege_result_o(self.browser, massege_content)
return self
def cancel_order_buyer(self):
cancel_order_buyer_o(self.browser)
return self
def cancel_order_seller(self):
cancel_order_seller_o(self.browser)
return self
def confirm_payment_buyer(self):
confirm_payment_buyer_o(self.browser)
return self
def confirm_payment_seller(self):
confirm_payment_seller_o(self.browser)
return self
def confirm_release_buyer(self):
confirm_release_buyer_o(self.browser)
return self
def confirm_release_seller(self):
confirm_release_seller_o(self.browser)
return self
def order_rating(self, **kwargs):
order_rating_o(self.browser, **kwargs)
return self
def remove_ad(self):
remove_ad_o(self.browser)
return self
def delete_ad(self):
delete_ad_o(self.browser)
return self
def sliding_verification(self):
sliding_verification_o(self.browser)
return self | {"/utomarket/release_ad.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/tests/test_release_ad.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_user_info.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/login_util.py": ["/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/register_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py", "/utomarket/get_code_util.py"], "/tests/test_logout.py": ["/utomarket/utomarket.py"], "/tests/test_util.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/utomarket/logout_util.py": ["/utomarket/util.py", "/utomarket/skip_util.py"], "/utomarket/personal_center_util.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_register.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_top_notice.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/utomarket/skip_util.py": ["/utomarket/util.py"], "/utomarket/transaction.py": ["/utomarket/skip_util.py", "/utomarket/util.py", "/utomarket/personal_center_util.py"], "/utomarket/wallet_util.py": ["/utomarket/util.py"], "/tests/test_wallet.py": ["/utomarket/util.py", "/utomarket/utomarket.py"], "/tests/test_transaction.py": ["/utomarket/utomarket.py", "/utomarket/util.py", "/utomarket/settings.py"], "/tests/test_login.py": ["/utomarket/utomarket.py", "/utomarket/util.py"], "/utomarket/utomarket.py": ["/utomarket/login_util.py", "/utomarket/wallet_util.py", "/utomarket/logout_util.py", "/utomarket/release_ad.py", "/utomarket/register_util.py", "/utomarket/personal_center_util.py", "/utomarket/transaction.py"]} |
65,860 | Rama-007/Aspect_Based_Sentiment_Analysis | refs/heads/master | /parser.py | import xml.sax
import sys
from collections import defaultdict
import nltk,random
from sklearn.multiclass import OneVsRestClassifier
from sklearn.svm import LinearSVC
from sklearn.model_selection import KFold, cross_val_score
# from autocorrect import spell
from sklearn.preprocessing import MultiLabelBinarizer
from sklearn.metrics import accuracy_score
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten, GRU
from keras.layers import Conv2D, MaxPooling2D, BatchNormalization
from keras.preprocessing.sequence import pad_sequences
# from category_classifier import classification
from category_classifier_svm import classification1
# from generate_ontology import ontology
# from aspect_extractor import extractor
#from neural_aspect_extractor import neural_extractor
from sentiment_analysis import aspect_sentiment
from final import neural_extractor1
categories=defaultdict(list)
data=[]
labels=[]
labels_polar=[]
aspects=[]
sentiments=[]
class ContentHandler(xml.sax.ContentHandler):
def __init__(self):
xml.sax.ContentHandler.__init__(self)
self.flag=0
self.data=""
self.labels=[]
self.labels_polar=[]
self.aspect=[]
self.sentiment=[]
def startElement(self,name,attrs):
if name == "aspectCategory":
self.labels.append(attrs.getValue("category"))
self.labels_polar.append(attrs.getValue("polarity"))
categories[attrs.getValue("category")].append(self.data)
if name == "aspectTerm":
polar=attrs.getValue("polarity")
if(polar=='negative' or polar=='positive'):
self.aspect.append(attrs.getValue("term"))
self.sentiment.append(attrs.getValue("polarity"))
# if name == "Opinion":
# self.data+=attrs.getValue("aspect")+"aspolar"+attrs.getValue("polarity")+"27071998"
# # print("aspect="+ attrs.getValue("aspect"))
# # print("polarity="+ attrs.getValue("polarity"))
if name=="text":
self.flag=1
def endElement(self,name):
if name=="sentence":
# print (self.data)
data.append(self.data.lower())
labels.append(self.labels)
labels_polar.append(self.labels_polar)
aspects.append(self.aspect)
sentiments.append(self.sentiment)
self.data=""
self.labels=[]
self.labels_polar=[]
self.aspect=[]
self.sentiment=[]
pass
def characters(self,content):
# pass
if(self.flag==1):
self.data+=content#+"27071997"
# print(content)
self.flag=0
source=open(sys.argv[1])
xml.sax.parse(source,ContentHandler())
print("sent for classification")
# classification(data,labels)
# classification1(data,labels)
# ontology_dict=ontology(data,labels)
# extractor(ontology_dict,data,labels,aspects)
#neural_extractor(data,labels,aspects)
#aspect_sentiment(data,aspects,sentiments)
test_size=int(0.2*len(data))
train_data=data[:-test_size]
train_labels=labels[:-test_size]
test_data=data[-test_size:]
test_labels=labels[-test_size:]
train_labels_polar=labels_polar[:-test_size]
test_labels_polar=labels_polar[-test_size:]
train_aspects=aspects[:-test_size]
test_aspects=aspects[-test_size:]
train_sentiments=sentiments[:-test_size]
test_sentiments=sentiments[-test_size:]
#text_to_predict=raw_input("ENTER:")
pred_categories=classification1(train_data,train_labels,train_labels_polar,test_data,test_labels,test_labels_polar)
print(pred_categories)
#pred_aspects=[u'food ',u'kitchen ']
pred_aspects=neural_extractor1(data,labels,aspects,test_data)
asp_acc_checker=[]
for i in range(0,len(test_aspects)):
if(len(test_aspects[i])==0):
asp_acc_checker.append(1)
else:
for j in test_aspects[i]:
strng=''.join(pred_aspects[i])
if(j in strng):
asp_acc_checker.append(1)
else:
asp_acc_checker.append(0)
true=[1 for i in asp_acc_checker]
print(accuracy_score(asp_acc_checker,true))
print(pred_aspects)
if(len(pred_aspects)!=0):
asp_senti=aspect_sentiment(data,aspects,sentiments,test_data,pred_aspects)
#asp_senti=aspect_sentiment(data,aspects,sentiments,[text_to_predict],[pred_aspects])
#print("Categories, Sentiment:")
#print(pred_categories)
#print("Extracted Aspects:")
#print(test_data[:20])
#print(pred_aspects[:20])
try:
for i in range(0,len(asp_senti)):
print(asp_senti[i])
except:
pass | {"/parser.py": ["/category_classifier_svm.py", "/sentiment_analysis.py", "/final.py"]} |
65,861 | Rama-007/Aspect_Based_Sentiment_Analysis | refs/heads/master | /aspect_extractor.py |
import nltk
from collections import defaultdict
from scipy import spatial
import numpy as np
text_aspects=[]
fp=open("glove.6B/glove.6B.300d.txt","r")
glove_emb={}
for line in fp:
temp=line.split(" ")
glove_emb[temp[0]]=np.asarray([float(i) for i in temp[1:]])
ontology_embeded=defaultdict(list)
def extractor(ontology_dict,data,labels,aspects):
for i in ontology_dict.keys():
listt=ontology_dict[i]
for j in listt:
try:
if(glove_emb[j] not in ontology_embeded[i]):
ontology_embeded[i].append(glove_emb[j])
except:
pass
found_aspect=[]
for i in range(0,len(data)):
words=nltk.word_tokenize(data[i])
pos=nltk.pos_tag(words)
temp=[]
for word in pos:
if('NN' in word[1]):
temp.append(word[0])
categories=labels[i]
temp_aspect=[]
for word in temp:
try:
vec=glove_emb[word]
except:
continue
mini=0
for category in categories:
for embeddings in ontology_embeded[category]:
mini=1-spatial.distance.cosine(vec,embeddings)
if(mini>0.6):
temp_aspect.append((category,word))
break
# print(temp_aspect)
found_aspect.append(temp_aspect)
for i in range(0,len(data)):
print(aspects[i],found_aspect[i])
if(i>10):
break
| {"/parser.py": ["/category_classifier_svm.py", "/sentiment_analysis.py", "/final.py"]} |
65,862 | Rama-007/Aspect_Based_Sentiment_Analysis | refs/heads/master | /sentiment_analysis.py | import nltk
from collections import defaultdict
import numpy as np
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten, GRU, Embedding, LSTM, Input, Bidirectional, TimeDistributed, merge, Reshape, Flatten
from keras.layers import Conv2D, MaxPooling2D, BatchNormalization
from keras.preprocessing.sequence import pad_sequences
from keras.models import Model, Input
from keras import backend as K
from keras.preprocessing.text import Tokenizer
from keras.utils import to_categorical
from sklearn.model_selection import train_test_split
fp=open("glove.6B/glove.6B.300d.txt","r")
glove_emb={}
for line in fp:
temp=line.split(" ")
glove_emb[temp[0]]=np.asarray([float(i) for i in temp[1:]])
sentences=[]
kk_sentences=[]
pos_tags=[]
def aspect_sentiment(data,aspects,sentiments,kk,kk_aspects):
for i in range(0,len(kk)):
temp_sent=[]
text=kk[i].lower()
words=nltk.word_tokenize(text)
pos_kk=[]
for word in nltk.pos_tag(words):
pos_kk.append(word[1])
tags=['O' for ff in range(0,len(words))]
dist=[]
for aspect in kk_aspects[i]:
asp_words=nltk.word_tokenize(aspect)
j=0;k=0;
flag=0
while(k<len(asp_words)):
while(j<len(words)):
if(asp_words[k]==words[j] and tags[j]=='O'):
if(flag==0):
tags[j]='B'
dist.append(j)
flag=1
else:
tags[j]='I'
k+=1
if(k>=len(asp_words)):
break
j+=1
k+=1
for d in range(0,len(dist)):
flag=0
for ii in range(0,len(words)):
if(d==ii):
flag=1
if(tags[ii]=='O'):
flag=0
if(tags[ii]=='I' and flag==1):
distance=0
else:
distance=ii-dist[d]
temp_sent.append((words[ii],distance,pos_kk[ii]))
kk_sentences.append((temp_sent,kk_aspects[i][d],kk[i]))
i=0
print(len(data),len(aspects))
for i in range(0,len(aspects)):
temp_sent=[]
text=data[i]
words=nltk.word_tokenize(text)
pos=[]
for word in nltk.pos_tag(words):
pos.append(word[1])
pos_tags.append(word[1])
tags=['O' for ff in range(0,len(words))]
dist=[]
for aspect in aspects[i]:
asp_words=nltk.word_tokenize(aspect)
j=0;k=0;
flag=0
while(k<len(asp_words)):
while(j<len(words)):
if(asp_words[k]==words[j] and tags[j]=='O'):
if(flag==0):
tags[j]='B'
dist.append(j)
flag=1
else:
tags[j]='I'
k+=1
if(k>=len(asp_words)):
break
j+=1
k+=1
for d in range(0,len(dist)):
flag=0
for ii in range(0,len(words)):
if(d==ii):
flag=1
if(tags[ii]=='O'):
flag=0
if(tags[ii]=='I' and flag==1):
distance=0
else:
distance=ii-dist[d]
temp_sent.append((words[ii],distance,pos[ii]))
sentences.append((temp_sent,sentiments[i][d],aspects[i][d],data[i]))
list_of_pos=list(set(pos_tags))
pos2idx={t:i for i,t in enumerate(list_of_pos)}
word_list=[]
for i in range(0,len(data)):
tokens = nltk.word_tokenize(data[i])
word_list.extend(tokens)
string=' '.join(tokens)
data[i]=string
data.append("ENDPAD")
word_list.append("endpad")
wordss=list(set(word_list))
word_index={w:i for i,w in enumerate(wordss)}
# tokenizer=Tokenizer()
# tokenizer.fit_on_texts(data)
# sequences=tokenizer.texts_to_sequences(data)
# word_index=tokenizer.word_index
X=[[word_index[w[0]] for w in s[0]] for s in sentences]
X_to_predict=[[word_index[w[0]] for w in s[0]] for s in kk_sentences]
dist_to_predict=[[w[1] for w in s[0]] for s in kk_sentences]
distances=[[w[1] for w in s[0]] for s in sentences]
pos_l=[[pos2idx[w[2]] for w in s[0]] for s in sentences]
pos_to_predict=[[pos2idx[w[2]] for w in s[0]] for s in kk_sentences]
X=pad_sequences(X,maxlen=50,padding="post", value=len(word_index.keys())-1)
D1=pad_sequences(distances,maxlen=50,padding="post", value=50)
P1=pad_sequences(pos_l,maxlen=50,padding="post", value=len(list_of_pos))
X_to_predict=pad_sequences(X_to_predict,maxlen=50,padding="post", value=len(word_index.keys())-1)
D_to_predict1=pad_sequences(dist_to_predict,maxlen=50,padding="post", value=50)
P_to_predict1=pad_sequences(pos_to_predict,maxlen=50,padding="post", value=len(list_of_pos))
D=np.reshape(D1,(D1.shape[0],50,1))
P=np.reshape(P1,(P1.shape[0],50,1))
n_words=len(word_index)
D_to_predict=np.reshape(D_to_predict1,(D_to_predict1.shape[0],50,1))
P_to_predict=np.reshape(P_to_predict1,(P_to_predict1.shape[0],50,1))
embedding_matrix = np.zeros((n_words, 300))
for word,i in word_index.items():
if(i>=len(word_index)):
continue
if word in glove_emb:
embedding_matrix[i]=glove_emb[word]
tag_list=['negative','positive','neutral']#,'neutral','conflict']
n_tags=len(tag_list)
max_len=50
tag2idx={t:i for i,t in enumerate(tag_list)}
y = [tag2idx[s[1]] for s in sentences]
asp=[s[2] for s in sentences]
kk_asp=[s[1] for s in kk_sentences]
kk_sents=[s[2] for s in kk_sentences]
sents=[s[3] for s in sentences]
y = [to_categorical(i, num_classes=n_tags) for i in y]
validation_size=int(0.2*X.shape[0])
X_tr=X[:-validation_size]
y_tr=y[:-validation_size]
asp_tr=asp[:-validation_size]
sents_tr=sents[:-validation_size]
D_tr=D[:-validation_size]
P_tr=P[:-validation_size]
X_te=X[-validation_size:]
y_te=y[-validation_size:]
D_te=D[-validation_size:]
P_te=P[-validation_size:]
asp_te=asp[-validation_size:]
sents_te=sents[-validation_size:]
print(X_tr.shape,np.asarray(y_tr).shape)
vocab_size=len(word_index)
inputt= Input(shape=(50,))
emb=Embedding(vocab_size,300,weights=[embedding_matrix],
input_length=50,
#mask_zero=True,
trainable=False)(inputt)
other_features=Input(shape=(50,1))
other_features1=Input(shape=(50,1))
emb=merge([emb,other_features,other_features1],mode='concat')
gru_f=Bidirectional(GRU(50,return_sequences=True))(emb)
dense=Dense(25,activation='relu')(gru_f)
drop=Dropout(0.1)(dense)
drop=Flatten()(drop)
out=Dense(3,activation='softmax')(drop)
model=Model([inputt,other_features,other_features1],out)
model.compile(loss='categorical_crossentropy',
optimizer="rmsprop",
metrics=['accuracy'])
model.fit([X,D,P], np.array(y), batch_size=25, epochs=5, validation_split=0.1, verbose=0)
#print(model.evaluate([X_te,D_te,P_te], np.array(y_te)))
k=model.predict([X_to_predict,D_to_predict,P_to_predict])
print(kk_sents[0])
ret_val=[]
for i in range(0,len(k)):
p=np.argmax(k[i],axis=-1)
ret_val.append((kk_asp[i].lower(),tag_list[p]))
#print(kk_asp[i].lower(),tag_list[p])
return ret_val | {"/parser.py": ["/category_classifier_svm.py", "/sentiment_analysis.py", "/final.py"]} |
65,863 | Rama-007/Aspect_Based_Sentiment_Analysis | refs/heads/master | /neural_aspect_extractor.py | import nltk
from collections import defaultdict
import numpy as np
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten, GRU, Embedding, Concatenate, LSTM, Input, Bidirectional, TimeDistributed
from keras.layers import Conv2D, MaxPooling2D, BatchNormalization
from keras.preprocessing.sequence import pad_sequences
from keras.models import Model
from keras import backend as K
from keras.preprocessing.text import Tokenizer
from keras.utils import to_categorical
from sklearn.model_selection import train_test_split
fp=open("glove.6B/glove.6B.300d.txt","r")
glove_emb={}
for line in fp:
temp=line.split(" ")
glove_emb[temp[0]]=np.asarray([float(i) for i in temp[1:]])
sentences=[]
parts_of_speech=defaultdict(int)
def neural_extractor(data,categories,aspects):
for i in range(0,len(data)):
flag=0
temp_sent=[]
text=data[i]
if("i recommend their pad see" in data[i]):
print(aspects[i])
flag=1
words=nltk.word_tokenize(text)
pos=[]
for word in nltk.pos_tag(words):
parts_of_speech[word[1]]=1
pos.append(word[1])
tags=['O' for ff in range(0,len(words))]
for aspect in aspects[i]:
asp_words=nltk.word_tokenize(aspect.lower())
j=0;k=0;
# flag=0
while(k<len(asp_words)):
while(j<len(words)):
if(asp_words[k]==words[j] and tags[j]=='O'):
if(flag==1):
print(k,asp_words[k],j,words[j])
if(k==0):
tags[j]='B'
else:
tags[j]='I'
# if(flag==0):
# tags[j]='B'
# flag=1
# else:
# tags[j]='I'
k+=1
if(k>=len(asp_words)):
break
j+=1
k+=1
for ii in range(0,len(words)):
temp_sent.append((words[ii],pos[ii],tags[ii]))
sentences.append(temp_sent)
print(len(sentences))
for i in range(0,len(data)):
tokens = nltk.word_tokenize(data[i])
string=' '.join(tokens)
data[i]=string
data.append("ENDPAD")
tokenizer=Tokenizer()
tokenizer.fit_on_texts(data)
sequences=tokenizer.texts_to_sequences(data)
word_index=tokenizer.word_index
X=pad_sequences(sequences[:-1],maxlen=30,padding="post", value=word_index["endpad"])
n_words=len(word_index)
tag_list=['B','I','O','P']
n_tags=len(tag_list)
embedding_matrix = np.zeros((n_words, 300))
for word,i in word_index.items():
if(i>=len(word_index)):
continue
if word in glove_emb:
embedding_matrix[i]=glove_emb[word]
max_len=30
tag2idx={t:i for i,t in enumerate(tag_list)}
idx2word={t:i for i,t in word_index.items()}
pos2idx={t:i for i,t in enumerate(parts_of_speech.keys())}
y = [[tag2idx[w[2]] for w in s] for s in sentences]
y = pad_sequences(maxlen=max_len, sequences=y, padding="post", value=tag2idx["P"])
y = [to_categorical(i, num_classes=n_tags) for i in y]
pos=[[pos2idx[w[1]] for w in s] for s in sentences]
pos1=pad_sequences(maxlen=max_len, sequences=pos, padding="post", value=len(parts_of_speech.keys())+1)
pos=np.asarray([np.reshape(i,(max_len,1)) for i in pos1])
# indices=np.arange(X.shape[0])
# np.random.shuffle(indices)
# X=X[indices]
# y=y[indices]
validation_size=int(0.2*X.shape[0])
X_tr=X[:-validation_size]
tr_pos=pos[:-validation_size]
y_tr=y[:-validation_size]
X_te=X[-validation_size:]
te_pos=pos[-validation_size:]
y_te=y[-validation_size:]
# X_tr, X_te, y_tr, y_te = train_test_split(X, y, test_size=0.1)
vocab_size=len(word_index)
e=Input(shape=(max_len,))
emb=Embedding(vocab_size,300,weights=[embedding_matrix],input_length=30,mask_zero=True,trainable=False)(e)
ad_pos=Input(shape=(max_len,1))
co_tm=Concatenate()([emb]+[ad_pos])
bi_gru=Bidirectional(GRU(50,return_sequences=True))(co_tm)
out=Dense(25,activation='relu')(bi_gru)
# out=Dropout(0.1)(out)
out=TimeDistributed(Dense(n_tags,activation='softmax'))(out)
model = Model(inputs=[e,ad_pos], outputs=[out])
model.compile(loss='categorical_crossentropy',optimizer="rmsprop",metrics=['accuracy'])
model.fit([X_tr,tr_pos], np.array(y_tr), batch_size=25, epochs=10, validation_data=([X_te,te_pos],np.array(y_te)), verbose=1)
# model=Sequential()
# model.add(Embedding(vocab_size,300,weights=[embedding_matrix],
# input_length=30,
# mask_zero=True,
# trainable=False))
# model.add(Bidirectional(GRU(50,return_sequences=True)))
# model.add(Dense(25,activation='relu'))
# model.add(Dropout(0.1))
# model.add(TimeDistributed(Dense(n_tags,activation='softmax')))
# model.compile(loss='categorical_crossentropy',
# optimizer="rmsprop",
# metrics=['accuracy'])
# model.fit(X_tr, np.array(y_tr), batch_size=25, epochs=15, validation_split=0.1, verbose=1)
p1=model.predict([X_tr,tr_pos])
p2=model.predict([X_te,te_pos])
pred_aspects=[]
for i in range(0,len(p1)):
p=np.argmax(p1[i],axis=-1)
temp1=[]
flag=0
string1=""
for j in range(0,len(p)):
if(idx2word[X_tr[i][j]]=="endpad"):
break
if(tag_list[p[j]]=='B'):
string1+=idx2word[X_tr[i][j]]+" "
if(flag==0):
flag=1
elif(tag_list[p[j]]=='I'):
string1+=idx2word[X_tr[i][j]]+" "
elif(tag_list[p[j]]=='O'):
if(string1!=""):
temp1.append(string1)
string1=""
flag=0
pred_aspects.append(temp1)
# print(aspects[:-validation_size][69])
for i in range(0,20):
print(aspects[i],pred_aspects[i])
# p=np.argmax(p,axis=-1)
# true_p=np.argmax(y_tr[69],axis=-1)
# for i in range(0,len(p)):
# print(true_p[i],p[i])
# for w, pred in zip(X_tr[69], p):
# print(idx2word[w], tag_list[pred]) | {"/parser.py": ["/category_classifier_svm.py", "/sentiment_analysis.py", "/final.py"]} |
65,864 | Rama-007/Aspect_Based_Sentiment_Analysis | refs/heads/master | /category_classifier_svm.py | import numpy as np
import nltk,random
from sklearn.multiclass import OneVsRestClassifier
from sklearn.svm import LinearSVC
from sklearn.model_selection import KFold, cross_val_score
# from autocorrect import spell
from sklearn.preprocessing import MultiLabelBinarizer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.pipeline import Pipeline
from sklearn.metrics import f1_score
from collections import defaultdict
D=defaultdict(list)
L=defaultdict(list)
cls={}
def classification1(data,labels,labels_polar,kk,test_labels,test_labels_polar):
for i in range(0,len(data)):
for j in range(0,len(labels[i])):
D[labels[i][j]].append(data[i])
L[labels[i][j]].append(labels_polar[i][j])
mlb = MultiLabelBinarizer()
y=mlb.fit_transform(labels)
y_test=mlb.transform(test_labels)
print(mlb.classes_)
# vectorizer = TfidfVectorizer(min_df=1,ngram_range=(1, 3))
classifier = Pipeline([('tfidf', TfidfVectorizer(min_df=1,ngram_range=(1, 1))),('clf', OneVsRestClassifier(LinearSVC()))])
# scores=cross_val_score(classifier,data,y,cv=5,scoring="f1_samples")
# print("Accuracy: %0.2f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))
for i in D.keys():
cls[i] = Pipeline([('tfidf', TfidfVectorizer(min_df=1,ngram_range=(1, 1))),('clf', OneVsRestClassifier(LinearSVC()))])
cls[i].fit(D[i],L[i])
classifier.fit(data,y)
te_pred=classifier.predict(kk)
print("FSCORE",f1_score(te_pred,y_test,average='samples'))
cat=[]
for i in kk:
f=mlb.inverse_transform(classifier.predict([i]))[0]
if(len(f)==0):
f=(u'anecdotes/miscellaneous',)
cat.append(f)
#print(cat)
#print(kk)
ret_val=[]
for i in range(0,len(cat)):
temp=[]
for j in cat[i]:
temp.append((j,cls[j].predict([kk[i]])[0]))
ret_val.append(temp)
#print(i,cls[i].predict([kk])[0])
return ret_val
| {"/parser.py": ["/category_classifier_svm.py", "/sentiment_analysis.py", "/final.py"]} |
65,865 | Rama-007/Aspect_Based_Sentiment_Analysis | refs/heads/master | /category_classifier.py | import numpy as np
import nltk,random
from sklearn.multiclass import OneVsRestClassifier
from sklearn.svm import LinearSVC
from sklearn.model_selection import KFold, cross_val_score
# from autocorrect import spell
from sklearn.preprocessing import MultiLabelBinarizer
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten, GRU, Embedding, LSTM, Input
from keras.layers import Conv2D, MaxPooling2D, BatchNormalization
from keras.preprocessing.sequence import pad_sequences
from keras.models import Model
from keras import backend as K
from keras.preprocessing.text import Tokenizer
fp=open("glove.6B/glove.6B.300d.txt","r")
glove_emb={}
for line in fp:
temp=line.split(" ")
glove_emb[temp[0]]=np.asarray([float(i) for i in temp[1:]])
def classification(data,labels):
for i in range(0,len(data)):
tokens = nltk.word_tokenize(data[i])
string=' '.join(tokens)
data[i]=string
y=MultiLabelBinarizer().fit_transform(labels)
tokenizer=Tokenizer()
tokenizer.fit_on_texts(data)
sequences=tokenizer.texts_to_sequences(data)
word_index=tokenizer.word_index
print('Found %s unique tokens.' % len(word_index))
data=pad_sequences(sequences,maxlen=30)
print('Shape of data tensor:', data.shape)
indices=np.arange(data.shape[0])
np.random.shuffle(indices)
data=data[indices]
y=y[indices]
validation_size=int(0.2*data.shape[0])
X_train=data[:-validation_size]
y_train=y[:-validation_size]
X_test=data[-validation_size:]
y_test=y[-validation_size:]
print(X_train.shape,X_test.shape)
embedding_matrix = np.zeros((len(word_index), 300))
for word,i in word_index.items():
if(i>=len(word_index)):
continue
if word in glove_emb:
embedding_matrix[i]=glove_emb[word]
vocab_size=len(word_index)
model=Sequential()
model.add(Embedding(vocab_size,300,weights=[embedding_matrix],
input_length=30,
mask_zero=True,
trainable=False))
model.add(LSTM(50))
model.add(Dense(25,activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(5,activation='sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer=keras.optimizers.Adam(),
metrics=['accuracy'])
model.fit(X_train,y_train,batch_size=25,epochs=25,verbose=1,validation_split=0.1)
loss,acc=model.evaluate(X_test,y_test, verbose=0)
print(loss,acc)
# pred=model.predict(X_test)
# for i in range(0,len(pred)):
# print(pred[i],y_test[i])
return | {"/parser.py": ["/category_classifier_svm.py", "/sentiment_analysis.py", "/final.py"]} |
65,866 | Rama-007/Aspect_Based_Sentiment_Analysis | refs/heads/master | /generate_ontology.py |
from collections import defaultdict
ontology_dict=defaultdict(list)
noun_frequency=defaultdict(int)
category_list=[]
import nltk
def ontology(data,labels):
for e1 in labels:
category_list.extend(e1)
category_list1=list(set(category_list))
for text in data:
words=nltk.word_tokenize(text)
pos=nltk.pos_tag(words)
for word in pos:
if('NN' in word[1]):
noun_frequency[word[0]]+=1
for i in range(0,len(data)):
categories=labels[i]
words=nltk.word_tokenize(data[i])
pos=nltk.pos_tag(words)
for word in pos:
if('NN' in word[1]):
if(noun_frequency[word[0]]>=5):
for category in categories:
if(word[0] not in ontology_dict[category]):
ontology_dict[category].append(word[0])
# print(ontology_dict)
return ontology_dict
| {"/parser.py": ["/category_classifier_svm.py", "/sentiment_analysis.py", "/final.py"]} |
65,867 | Rama-007/Aspect_Based_Sentiment_Analysis | refs/heads/master | /final.py | import nltk
from collections import defaultdict
import numpy as np
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten, GRU, Embedding, Concatenate, LSTM, Input, Bidirectional, TimeDistributed
from keras.layers import Conv2D, MaxPooling2D, BatchNormalization
from keras.preprocessing.sequence import pad_sequences
from keras.models import Model
from keras import backend as K
from keras.preprocessing.text import Tokenizer
from keras.utils import to_categorical
from sklearn.model_selection import train_test_split
from keras.models import load_model
import keras.models
import random as rn
import tensorflow as tf
import os
os.environ['PYTHONHASHSEED'] = '0'
np.random.seed(42)
rn.seed(12345)
tf.set_random_seed(1234)
fp=open("glove.6B/glove.6B.300d.txt","r")
glove_emb={}
for line in fp:
temp=line.split(" ")
glove_emb[temp[0]]=np.asarray([float(i) for i in temp[1:]])
sentences=[]
parts_of_speech=defaultdict(int)
def neural_extractor1(data,categories,aspects,text_to_predict):
for i in range(0,len(data)):
flag=0
temp_sent=[]
text=data[i]
words=nltk.word_tokenize(text)
pos=[]
for word in nltk.pos_tag(words):
parts_of_speech[word[1]]=1
pos.append(word[1])
tags=['O' for ff in range(0,len(words))]
for aspect in aspects[i]:
asp_words=nltk.word_tokenize(aspect.lower())
j=0;k=0;
# flag=0
while(k<len(asp_words)):
while(j<len(words)):
if(asp_words[k]==words[j] and tags[j]=='O'):
if(k==0):
tags[j]='B'
else:
tags[j]='I'
# if(flag==0):
# tags[j]='B'
# flag=1
# else:
# tags[j]='I'
k+=1
if(k>=len(asp_words)):
break
j+=1
k+=1
for ii in range(0,len(words)):
temp_sent.append((words[ii],pos[ii],tags[ii]))
sentences.append(temp_sent)
print(len(sentences))
for i in range(0,len(data)):
tokens = nltk.word_tokenize(data[i])
string=' '.join(tokens)
data[i]=string
#data.append(' '.join(words_to_predict))
#lll=len(data)-1
data.append("ENDPAD")
tokenizer=Tokenizer()
tokenizer.fit_on_texts(data)
sequences=tokenizer.texts_to_sequences(data)
word_index=tokenizer.word_index
X=pad_sequences(sequences[:-1],maxlen=50,padding="post", value=word_index["endpad"])
validation_size=int(0.2*X.shape[0])
#print(X_to_predict)
n_words=len(word_index)
tag_list=['B','I','O','P']
n_tags=len(tag_list)
embedding_matrix = np.zeros((n_words, 300))
for word,i in word_index.items():
if(i>=len(word_index)):
continue
if word in glove_emb:
embedding_matrix[i]=glove_emb[word]
max_len=50
tag2idx={t:i for i,t in enumerate(tag_list)}
idx2word={t:i for i,t in word_index.items()}
pos2idx={t:i for i,t in enumerate(parts_of_speech.keys())}
y = [[tag2idx[w[2]] for w in s] for s in sentences]
y = pad_sequences(maxlen=max_len, sequences=y, padding="post", value=tag2idx["P"])
y = [to_categorical(i, num_classes=n_tags) for i in y]
pos=[[pos2idx[w[1]] for w in s] for s in sentences]
pos1=pad_sequences(maxlen=max_len, sequences=pos, padding="post", value=len(parts_of_speech.keys())+1)
pos=np.asarray([np.reshape(i,(max_len,1)) for i in pos1])
# indices=np.arange(X.shape[0])
# np.random.shuffle(indices)
# X=X[indices]
# y=y[indices]
#validation_size=int(0.2*X.shape[0])
X_tr=X[:-validation_size]
tr_pos=pos[:-validation_size]
y_tr=y[:-validation_size]
X_te=X[-validation_size:]
te_pos=pos[-validation_size:]
y_te=y[-validation_size:]
X_to_predict=X[-validation_size:]
pos_to_predict=pos[-validation_size:]
# X_tr, X_te, y_tr, y_te = train_test_split(X, y, test_size=0.1)
vocab_size=len(word_index)
e=Input(shape=(max_len,))
emb=Embedding(vocab_size,300,weights=[embedding_matrix],input_length=50,mask_zero=True,trainable=False)(e)
ad_pos=Input(shape=(max_len,1))
co_tm=Concatenate()([emb]+[ad_pos])
bi_gru=Bidirectional(GRU(50,return_sequences=True))(emb)
out=Dense(25,activation='relu')(bi_gru)
# out=Dropout(0.1)(out)
out=TimeDistributed(Dense(n_tags,activation='softmax'))(out)
model = Model(inputs=[e,ad_pos], outputs=[out])
model.compile(loss='categorical_crossentropy',optimizer="rmsprop",metrics=['accuracy'])
model.fit([X,pos], np.array(y), batch_size=25, epochs=20, validation_data=([X_te,te_pos],np.array(y_te)), verbose=1)
pos_tp=np.asarray([np.asarray([1 for i in range(0,50)]).reshape(50,1)])
#model=load_model('aspect_extractor.h5')
#with open('aspect.json') as ff:
# model_json=ff.read()
# model=keras.models.model_from_json(model_json)
#model.compile(loss='categorical_crossentropy',optimizer="rmsprop",metrics=['accuracy'])
#model.load_weights('aspect_weights.h5')
#model.fit([X], np.array(y), batch_size=25, epochs=15, validation_data=([X_te],np.array(y_te)), verbose=0)
#print(X_to_predict,X_to_predict.shape)
p1=model.predict([X_to_predict,pos_to_predict])
#p1=model.predict([X_to_predict])
#print(p1)
pred_aspects=[]
for i in range(0,len(p1)):
p=np.argmax(p1[i],axis=-1)
temp1=[]
flag=0
string1=""
for j in range(0,len(p)):
#print(idx2word[X_to_predict[i][j]],tag_list[p[j]])
if(idx2word[X_to_predict[i][j]]=="endpad"):
break
if(tag_list[p[j]]=='B'):
string1+=idx2word[X_to_predict[i][j]]+" "
if(flag==0):
flag=1
elif(tag_list[p[j]]=='I'):
string1+=idx2word[X_to_predict[i][j]]+" "
elif(tag_list[p[j]]=='O'):
if(string1!=""):
temp1.append(string1)
string1=""
flag=0
pred_aspects.append(temp1)
#print(pred_aspects)
return pred_aspects
# print(aspects[:-validation_size][69])
# for i in range(0,20):
# print(aspects[i],pred_aspects[i])
# p=np.argmax(p,axis=-1)
# true_p=np.argmax(y_tr[69],axis=-1)
# for i in range(0,len(p)):
# print(true_p[i],p[i])
#for w, pred in zip(X_to_predict[0], p1[0]):
# print(idx2word[w], tag_list[pred]) | {"/parser.py": ["/category_classifier_svm.py", "/sentiment_analysis.py", "/final.py"]} |
65,872 | PedroQuintella/esw_cinema | refs/heads/main | /application/urls.py | from django.urls import path
from rest_framework.routers import SimpleRouter
from .views import IndexView, SobreView, FilmesView, ContatoView, FilmeDetalheView, DadosGraficoFilmesView, \
RelatorioFilmesView, FilmeViewSet, GeneroViewSet, SessaoViewSet, UsuarioViewSet, SalaViewSet, AssentoViewSet
router = SimpleRouter()
router.register('filmes', FilmeViewSet)
router.register('generos', GeneroViewSet)
router.register('sessoes', SessaoViewSet)
router.register('usuarios', UsuarioViewSet)
router.register('salas', SalaViewSet)
router.register('assentos', AssentoViewSet)
urlpatterns = [
path('', IndexView.as_view(), name='index'),
path('sobre/', SobreView.as_view(), name='sobre'),
path('filmes/', FilmesView.as_view(), name='filmes'),
path('contato/', ContatoView.as_view(), name='contato'),
path('filme-detalhe/<int:id>/', FilmeDetalheView.as_view(), name='filme-detalhe'),
path('relatorio-filmes/', RelatorioFilmesView.as_view(), name='relatorio-filmes'),
path('dados-grafico-filmes/', DadosGraficoFilmesView.as_view(), name='dados-grafico-filmes'),
]
| {"/application/urls.py": ["/application/views.py"], "/application/views.py": ["/application/models.py", "/application/serializers.py"], "/application/migrations/0001_initial.py": ["/application/models.py"], "/application/admin.py": ["/application/models.py"], "/application/serializers.py": ["/application/models.py"]} |
65,873 | PedroQuintella/esw_cinema | refs/heads/main | /application/views.py | # Create your views here.
from django.views.generic import TemplateView, ListView, FormView
from .models import Filme, Sessao, Genero, Usuario, Assento, Sala
from django_weasyprint import WeasyTemplateView
from django.core.files.storage import FileSystemStorage
from django.template.loader import render_to_string
from django.http import HttpResponse
from weasyprint import HTML
from django.utils.translation import gettext as _
from django.utils import translation
from .forms import ContatoForm
from django.urls import reverse_lazy
from django.contrib import messages
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework import permissions
from rest_framework import viewsets
from django.db.models import Count
from chartjs.views.lines import BaseLineChartView
from . import serializers
from application.serializers import GeneroSerializer, FilmeSerializer, UsuarioSerializer, SalaSerializer, SessaoSerializer, AssentoSerializer
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
lang = translation.get_language()
context['lang'] = lang
context['filmes'] = Filme.objects.order_by('-dataEstreia')[0:4]
context['lancamentos'] = Filme.objects.order_by('-dataEstreia')[0:6]
context['filmesDisponiveis'] = Filme.objects.order_by('-dataEstreia').all()
translation.activate(lang)
return context
class SobreView(TemplateView):
template_name = 'about.html'
def get_context_data(self, **kwargs):
context = super(SobreView, self).get_context_data(**kwargs)
context['lancamentos'] = Filme.objects.order_by('-dataEstreia')[0:6]
return context
class RelatorioFilmesView(WeasyTemplateView):
def get(self, request, *args, **kwargs):
filmes = Filme.objects.order_by('-dataEstreia').all()
html_string = render_to_string('relatorio-filmes.html', {'filmes': filmes})
html = HTML(string=html_string, base_url=request.build_absolute_uri())
html.write_pdf(target='/tmp/relatorio-filmes.pdf')
fs = FileSystemStorage('/tmp')
with fs.open('relatorio-filmes.pdf') as pdf:
response = HttpResponse(pdf, content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="relatorio-filmes.pdf"'
return response
class FilmesView(ListView):
template_name = 'movies.html'
model = Filme
paginate_by = 12
ordering = ['dataEstreia']
def get_queryset(self):
return Filme.objects.order_by('-dataEstreia').all()
class ContatoView(FormView):
template_name = 'contact.html'
form_class = ContatoForm
success_url = reverse_lazy('contato')
def get_context_data(self, **kwargs):
context = super(ContatoView, self).get_context_data(**kwargs)
return context
def form_valid(self, form, *args, **kwargs):
form.send_mail()
messages.success(self.request, _('E-mail enviado com sucesso'), extra_tags='success')
return super(ContatoView, self).form_valid(form, *args, **kwargs)
def form_invalid(self, form, *args, **kwargs):
messages.error(self.request, _('Falha ao enviar e-mail'), extra_tags='danger')
return super(ContatoView, self).form_invalid(form, *args, **kwargs)
class FilmeDetalheView(ListView):
template_name = 'movie-detail.html'
model = Sessao
paginate_by = 9
ordering = ['data']
def get_context_data(self, **kwargs):
context = super(FilmeDetalheView, self).get_context_data(**kwargs)
id = self.kwargs['id']
context['filme'] = Filme.objects.filter(id=id).first
context['lancamentos'] = Filme.objects.order_by('-dataEstreia')[0:6]
return context
def get_queryset(self):
id = self.kwargs['id']
return Sessao.objects.filter(filme_id=id).order_by('data')
class DadosGraficoFilmesView(BaseLineChartView):
def get_labels(self):
labels = []
queryset = Genero.objects.order_by('id')
for genero in queryset:
labels.append(genero.nome)
return labels
def get_data(self):
resultado = []
dados = []
queryset = Genero.objects.order_by('id').annotate(total=Count('filme'))
for linha in queryset:
dados.append(int(linha.total))
resultado.append(dados)
return resultado
class FilmeViewSet(viewsets.ModelViewSet):
permission_classes = (permissions.DjangoModelPermissions, )
queryset = Filme.objects.all()
serializer_class = FilmeSerializer
@action(detail=True, methods=['get'])
def sessoes(self, request, pk=None):
sessoes = Sessao.objects.filter(filme_id=pk)
page = self.paginate_queryset(sessoes)
if page is not None:
serializer = SessaoSerializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = SessaoSerializer(sessoes, many=True)
return Response(serializer.data)
class GeneroViewSet(viewsets.ModelViewSet):
permission_classes = (permissions.DjangoModelPermissions,)
queryset = Genero.objects.all()
serializer_class = GeneroSerializer
@action(detail=True, methods=['get'])
def filmes(self, request, pk=None):
filmes = Filme.objects.filter(genero_id=pk)
page = self.paginate_queryset(filmes)
if page is not None:
serializer = FilmeSerializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = FilmeSerializer(filmes, many=True)
return Response(serializer.data)
class SessaoViewSet(viewsets.ModelViewSet):
queryset = Sessao.objects.all()
serializer_class = SessaoSerializer
class UsuarioViewSet(viewsets.ModelViewSet):
queryset = Usuario.objects.all()
serializer_class = UsuarioSerializer
class SalaViewSet(viewsets.ModelViewSet):
queryset = Sala.objects.all()
serializer_class = SalaSerializer
class AssentoViewSet(viewsets.ModelViewSet):
queryset = Assento.objects.all()
serializer_class = AssentoSerializer
| {"/application/urls.py": ["/application/views.py"], "/application/views.py": ["/application/models.py", "/application/serializers.py"], "/application/migrations/0001_initial.py": ["/application/models.py"], "/application/admin.py": ["/application/models.py"], "/application/serializers.py": ["/application/models.py"]} |
65,874 | PedroQuintella/esw_cinema | refs/heads/main | /application/migrations/0003_auto_20210530_2329.py | # Generated by Django 2.2.19 on 2021-05-31 02:29
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('application', '0002_auto_20210525_1918'),
]
operations = [
migrations.CreateModel(
name='Genero',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(max_length=200, null=True, unique=True, verbose_name='Nome')),
],
options={
'verbose_name': 'Gênero',
'verbose_name_plural': 'Gêneros',
},
),
migrations.AddField(
model_name='filme',
name='genero',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='application.Genero'),
),
]
| {"/application/urls.py": ["/application/views.py"], "/application/views.py": ["/application/models.py", "/application/serializers.py"], "/application/migrations/0001_initial.py": ["/application/models.py"], "/application/admin.py": ["/application/models.py"], "/application/serializers.py": ["/application/models.py"]} |
65,875 | PedroQuintella/esw_cinema | refs/heads/main | /venv/Scripts/django-admin.py | #!d:\pedro\documents\arquivos pedro\arquivos da faculdade\laboratório de programação com frameworks\projeto cinema\esw_cinema\venv\scripts\python.exe
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| {"/application/urls.py": ["/application/views.py"], "/application/views.py": ["/application/models.py", "/application/serializers.py"], "/application/migrations/0001_initial.py": ["/application/models.py"], "/application/admin.py": ["/application/models.py"], "/application/serializers.py": ["/application/models.py"]} |
65,876 | PedroQuintella/esw_cinema | refs/heads/main | /application/migrations/0001_initial.py | # Generated by Django 2.2.19 on 2021-04-27 23:47
import application.models
from django.db import migrations, models
import django.db.models.deletion
import stdimage.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Assento',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('numero', models.IntegerField(verbose_name='Assento')),
('disponibilidade', models.CharField(choices=[('Disponível', 'Disponível'), ('Indisponível', 'Indisponível')], max_length=100, verbose_name='Disponibilidade')),
],
options={
'verbose_name': 'Assento',
'verbose_name_plural': 'Assentos',
},
),
migrations.CreateModel(
name='Compra',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('codigo', models.IntegerField(unique=True, verbose_name='Código')),
('estado', models.CharField(choices=[('AC', 'AC'), ('AL', 'AL'), ('AP', 'AP'), ('AM', 'AM'), ('BA', 'BA'), ('CE', 'CE'), ('DF', 'DF'), ('ES', 'ES'), ('GO', 'GO'), ('MA', 'MA'), ('MT', 'MT'), ('MS', 'MS'), ('MG', 'MG'), ('PA', 'PA'), ('PB', 'PB'), ('PR', 'PR'), ('PE', 'PE'), ('PI', 'PI'), ('RJ', 'RJ'), ('RN', 'RN'), ('RS', 'RS'), ('RO', 'RO'), ('RR', 'RR'), ('SC', 'SC'), ('SP', 'SP'), ('SE', 'SE'), ('TO', 'TO')], max_length=100, verbose_name='UF')),
('cidade', models.CharField(max_length=100, verbose_name='Cidade')),
('cep', models.CharField(help_text='Digite sem pontos e traços.', max_length=8, verbose_name='CEP')),
('endereco', models.CharField(max_length=200, verbose_name='Endereço')),
('valor', models.DecimalField(decimal_places=2, max_digits=6, verbose_name='Valor')),
],
options={
'verbose_name': 'Compra',
'verbose_name_plural': 'Compras',
},
),
migrations.CreateModel(
name='Filme',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('titulo', models.CharField(max_length=200, verbose_name='Título')),
('cartaz', stdimage.models.StdImageField(blank=True, null=True, upload_to=application.models.get_file_path, verbose_name='Cartaz')),
('duracao', models.CharField(help_text='Use este formato: 02h30min', max_length=8, verbose_name='Duração')),
('sinopse', models.TextField(max_length=1000, verbose_name='Sinopse')),
('trailer', models.URLField(blank=True, null=True, verbose_name='Trailer')),
('dataEstreia', models.DateField(blank=True, help_text='Use este formato: DD/MM/AAAA', null=True, verbose_name='Data de Estreia')),
],
options={
'verbose_name': 'Filme',
'verbose_name_plural': 'Filmes',
},
),
migrations.CreateModel(
name='Sala',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('numero', models.IntegerField(unique=True, verbose_name='Número')),
],
options={
'verbose_name': 'Sala',
'verbose_name_plural': 'Salas',
},
),
migrations.CreateModel(
name='Usuario',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(max_length=100, verbose_name='Nome')),
('cpf', models.CharField(help_text='Digite sem pontos e traços.', max_length=11, unique=True, verbose_name='CPF')),
('email', models.EmailField(max_length=254, unique=True, verbose_name='Email')),
('senha', models.CharField(max_length=30, verbose_name='Senha')),
],
options={
'verbose_name': 'Usuário',
'verbose_name_plural': 'Usuários',
},
),
migrations.CreateModel(
name='Sessao',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data', models.DateField(help_text='Use este formato: DD/MM/AAAA', verbose_name='Data')),
('horario', models.TimeField(help_text='Use este formato: HH:MM', verbose_name='Horário')),
('filme', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='application.Filme')),
('sala', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='application.Sala')),
],
options={
'verbose_name': 'Sessão',
'verbose_name_plural': 'Sessões',
},
),
migrations.CreateModel(
name='Ingresso',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('codigo', models.IntegerField(unique=True, verbose_name='Código')),
('assento', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='application.Assento')),
('compra', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='application.Compra')),
('sala', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='application.Sala')),
('sessao', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='application.Sessao')),
],
options={
'verbose_name': 'Ingresso',
'verbose_name_plural': 'Ingressos',
},
),
migrations.AddField(
model_name='compra',
name='usuario',
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='application.Usuario'),
),
migrations.AddField(
model_name='assento',
name='sala',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='application.Sala'),
),
]
| {"/application/urls.py": ["/application/views.py"], "/application/views.py": ["/application/models.py", "/application/serializers.py"], "/application/migrations/0001_initial.py": ["/application/models.py"], "/application/admin.py": ["/application/models.py"], "/application/serializers.py": ["/application/models.py"]} |
65,877 | PedroQuintella/esw_cinema | refs/heads/main | /application/migrations/0004_auto_20210607_2059.py | # Generated by Django 2.2.19 on 2021-06-07 23:59
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('application', '0003_auto_20210530_2329'),
]
operations = [
migrations.AlterModelOptions(
name='filme',
options={'ordering': ['id'], 'verbose_name': 'Filme', 'verbose_name_plural': 'Filmes'},
),
]
| {"/application/urls.py": ["/application/views.py"], "/application/views.py": ["/application/models.py", "/application/serializers.py"], "/application/migrations/0001_initial.py": ["/application/models.py"], "/application/admin.py": ["/application/models.py"], "/application/serializers.py": ["/application/models.py"]} |
65,878 | PedroQuintella/esw_cinema | refs/heads/main | /application/admin.py | from django.contrib import admin
# Register your models here.
from .models import Usuario, Genero, Filme, Compra, Sala, Sessao, Assento, Ingresso
@admin.register(Usuario)
class UsuarioAdmin(admin.ModelAdmin):
list_display = ('nome', 'email')
@admin.register(Genero)
class GeneroAdmin(admin.ModelAdmin):
list_display = ('nome',)
@admin.register(Filme)
class FilmeAdmin(admin.ModelAdmin):
list_display = ('titulo', 'cartaz', 'duracao', 'genero', 'trailer', 'dataEstreia')
@admin.register(Compra)
class CompraAdmin(admin.ModelAdmin):
list_display = ('codigo', 'usuario', 'estado', 'cidade', 'cep', 'endereco', 'valor')
@admin.register(Sala)
class SalaAdmin(admin.ModelAdmin):
list_display = ('numero',)
@admin.register(Sessao)
class SessaoAdmin(admin.ModelAdmin):
list_display = ('data', 'horario', 'filme', 'sala')
@admin.register(Assento)
class AssentoAdmin(admin.ModelAdmin):
list_display = ('numero', 'sala')
@admin.register(Ingresso)
class IngressoAdmin(admin.ModelAdmin):
list_display = ('codigo', 'sessao', 'sala', 'assento')
| {"/application/urls.py": ["/application/views.py"], "/application/views.py": ["/application/models.py", "/application/serializers.py"], "/application/migrations/0001_initial.py": ["/application/models.py"], "/application/admin.py": ["/application/models.py"], "/application/serializers.py": ["/application/models.py"]} |
65,879 | PedroQuintella/esw_cinema | refs/heads/main | /application/models.py | # Create your models here.
from django.db import models
from stdimage.models import StdImageField
import uuid
from django.utils.translation import gettext_lazy as _
def get_file_path(_instance, filename):
ext = filename.split('.')[-1]
filename = f'{uuid.uuid4()}.{ext}'
return filename
class Usuario(models.Model):
nome = models.CharField(_('Nome'), max_length=100)
cpf = models.CharField(_('CPF'), max_length=11, unique=True, help_text=_('Digite sem pontos e traços.'))
email = models.EmailField(_('Email'), unique=True)
senha = models.CharField(_('Senha'), max_length=30)
class Meta:
verbose_name = _('Usuário')
verbose_name_plural = _('Usuários')
def __str__(self):
return self.nome
class Genero(models.Model):
nome = models.CharField(_('Nome'), null=True, unique=True, max_length=200)
class Meta:
verbose_name = _('Gênero')
verbose_name_plural = _('Gêneros')
def __str__(self):
return self.nome
class Filme(models.Model):
titulo = models.CharField(_('Título'), max_length=200)
cartaz = StdImageField(_('Cartaz'), null=True, blank=True, upload_to=get_file_path, variations={'thumb': {'width': 240, 'height': 356, 'crop': True}})
duracao = models.CharField(_('Duração'), max_length=8, help_text=_('Use este formato: 02h30min'))
sinopse = models.TextField(_('Sinopse'), max_length=1000)
trailer = models.URLField(_('Trailer'), null=True, blank=True)
dataEstreia = models.DateField(_('Data de Estreia'), null=True, blank=True, help_text=_('Use este formato: DD/MM/AAAA'))
genero = models.ForeignKey(Genero, null=True, on_delete=models.DO_NOTHING)
class Meta:
verbose_name = _('Filme')
verbose_name_plural = _('Filmes')
ordering = ['id']
def __str__(self):
return self.titulo
class Compra(models.Model):
codigo = models.IntegerField(_('Código'), unique=True)
OPCOES = (
('AC', 'AC'),
('AL', 'AL'),
('AP', 'AP'),
('AM', 'AM'),
('BA', 'BA'),
('CE', 'CE'),
('DF', 'DF'),
('ES', 'ES'),
('GO', 'GO'),
('MA', 'MA'),
('MT', 'MT'),
('MS', 'MS'),
('MG', 'MG'),
('PA', 'PA'),
('PB', 'PB'),
('PR', 'PR'),
('PE', 'PE'),
('PI', 'PI'),
('RJ', 'RJ'),
('RN', 'RN'),
('RS', 'RS'),
('RO', 'RO'),
('RR', 'RR'),
('SC', 'SC'),
('SP', 'SP'),
('SE', 'SE'),
('TO', 'TO'),
)
estado = models.CharField('UF', max_length=100, choices=OPCOES)
cidade = models.CharField(_('Cidade'), max_length=100)
cep = models.CharField(_('CEP'), max_length=8, help_text=_('Digite sem pontos e traços.'))
endereco = models.CharField(_('Endereço'), max_length=200)
valor = models.DecimalField(_('Valor'), max_digits=6, decimal_places=2)
usuario = models.ForeignKey(Usuario, on_delete=models.DO_NOTHING)
class Meta:
verbose_name = _('Compra')
verbose_name_plural = _('Compras')
def __str__(self):
return str(self.codigo)
class Sala(models.Model):
numero = models.IntegerField(_('Número'), unique=True)
class Meta:
verbose_name = _('Sala')
verbose_name_plural = _('Salas')
def __str__(self):
return str(self.numero)
class Assento(models.Model):
numero = models.IntegerField(_('Assento'))
sala = models.ForeignKey(Sala, on_delete=models.CASCADE)
class Meta:
verbose_name = _('Assento')
verbose_name_plural = _('Assentos')
def __str__(self):
return f'{self.numero} (Sala {self.sala})'
class Sessao(models.Model):
data = models.DateField(_('Data'), help_text=_('Use este formato: DD/MM/AAAA'))
horario = models.TimeField(_('Horário'), help_text=_('Use este formato: HH:MM'))
filme = models.ForeignKey(Filme, related_name='sessoes', on_delete=models.CASCADE)
sala = models.ForeignKey(Sala, null=True, on_delete=models.SET_NULL)
assentos = models.ManyToManyField(Assento)
class Meta:
verbose_name = _('Sessão')
verbose_name_plural = _('Sessões')
ordering = ['id']
def __str__(self):
return f'{self.data}, {self.horario}, {self.sala}, {self.filme}'
class Ingresso(models.Model):
codigo = models.IntegerField(_('Código'), unique=True)
compra = models.ForeignKey(Compra, on_delete=models.DO_NOTHING)
sessao = models.ForeignKey(Sessao, null=True, on_delete=models.SET_NULL)
sala = models.ForeignKey(Sala, null=True, on_delete=models.SET_NULL)
assento = models.ForeignKey(Assento, null=True, on_delete=models.SET_NULL)
class Meta:
verbose_name = _('Ingresso')
verbose_name_plural = _('Ingressos')
def __str__(self):
return str(self.codigo)
| {"/application/urls.py": ["/application/views.py"], "/application/views.py": ["/application/models.py", "/application/serializers.py"], "/application/migrations/0001_initial.py": ["/application/models.py"], "/application/admin.py": ["/application/models.py"], "/application/serializers.py": ["/application/models.py"]} |
65,880 | PedroQuintella/esw_cinema | refs/heads/main | /application/migrations/0006_auto_20210607_2246.py | # Generated by Django 2.2.19 on 2021-06-08 01:46
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('application', '0005_auto_20210607_2227'),
]
operations = [
migrations.AlterField(
model_name='sessao',
name='filme',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sessoes', to='application.Filme'),
),
]
| {"/application/urls.py": ["/application/views.py"], "/application/views.py": ["/application/models.py", "/application/serializers.py"], "/application/migrations/0001_initial.py": ["/application/models.py"], "/application/admin.py": ["/application/models.py"], "/application/serializers.py": ["/application/models.py"]} |
65,881 | PedroQuintella/esw_cinema | refs/heads/main | /application/migrations/0008_remove_assento_disponibilidade.py | # Generated by Django 2.2.19 on 2021-06-11 20:20
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('application', '0007_sessao_assentos'),
]
operations = [
migrations.RemoveField(
model_name='assento',
name='disponibilidade',
),
]
| {"/application/urls.py": ["/application/views.py"], "/application/views.py": ["/application/models.py", "/application/serializers.py"], "/application/migrations/0001_initial.py": ["/application/models.py"], "/application/admin.py": ["/application/models.py"], "/application/serializers.py": ["/application/models.py"]} |
65,882 | PedroQuintella/esw_cinema | refs/heads/main | /application/migrations/0005_auto_20210607_2227.py | # Generated by Django 2.2.19 on 2021-06-08 01:27
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('application', '0004_auto_20210607_2059'),
]
operations = [
migrations.AlterModelOptions(
name='sessao',
options={'ordering': ['id'], 'verbose_name': 'Sessão', 'verbose_name_plural': 'Sessões'},
),
]
| {"/application/urls.py": ["/application/views.py"], "/application/views.py": ["/application/models.py", "/application/serializers.py"], "/application/migrations/0001_initial.py": ["/application/models.py"], "/application/admin.py": ["/application/models.py"], "/application/serializers.py": ["/application/models.py"]} |
65,883 | PedroQuintella/esw_cinema | refs/heads/main | /application/serializers.py | from rest_framework import serializers
from application.models import Usuario, Genero, Filme, Sala, Sessao, Assento
class GeneroSerializer(serializers.ModelSerializer):
class Meta:
model = Genero
fields = (
'id',
'nome'
)
class FilmeSerializer(serializers.ModelSerializer):
# sessoes = SessaoSerializer(many=True, read_only=True)
# sessoes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
sessoes = serializers.HyperlinkedRelatedField(many=True, read_only=True, view_name='sessao-detail')
class Meta:
model = Filme
fields = (
'id',
'titulo',
'cartaz',
'duracao',
'sinopse',
'trailer',
'dataEstreia',
'genero',
'sessoes'
)
class UsuarioSerializer(serializers.ModelSerializer):
primeiro_nome = serializers.SerializerMethodField()
class Meta:
model = Usuario
fields = (
'id',
'nome',
'cpf',
'email',
'senha',
'primeiro_nome'
)
def get_primeiro_nome(self, obj):
return str(obj.nome).split()[0]
class SalaSerializer(serializers.ModelSerializer):
class Meta:
model = Sala
fields = (
'id',
'numero'
)
class SessaoSerializer(serializers.ModelSerializer):
class Meta:
model = Sessao
fields = (
'id',
'data',
'horario',
'filme',
'sala'
)
class AssentoSerializer(serializers.ModelSerializer):
class Meta:
model = Assento
fields = (
'id',
'numero',
'sala'
)
| {"/application/urls.py": ["/application/views.py"], "/application/views.py": ["/application/models.py", "/application/serializers.py"], "/application/migrations/0001_initial.py": ["/application/models.py"], "/application/admin.py": ["/application/models.py"], "/application/serializers.py": ["/application/models.py"]} |
65,891 | orike122/pydeepspace | refs/heads/master | /components/vision.py | import time
from collections import deque
from typing import Deque, NamedTuple, Tuple
import hal
from networktables import NetworkTablesInstance
from pyswervedrive.chassis import SwerveChassis
from utilities.functions import rotate_vector
class Odometry(NamedTuple):
x: float
y: float
heading: float
t: float
class Vision:
chassis: SwerveChassis
# NOTE: x and y are relative to the robot co-ordinate system, not the camera
@property
def fiducial_x(self) -> float:
return self.fiducial_x_entry.getDouble(0.0)
@property
def fiducial_y(self) -> float:
return self.fiducial_y_entry.getDouble(0.0)
@property
def fiducial_time(self) -> float:
return self.fiducial_time_entry.getDouble(-1.0)
@property
def ping_time(self) -> float:
return self.ping_time_entry.getDouble(0.0)
@ping_time.setter
def ping_time(self, value: float) -> None:
self.ping_time_entry.setDouble(value)
@property
def raspi_pong_time(self) -> float:
return self.raspi_pong_time_entry.getDouble(0.0)
@property
def rio_pong_time(self) -> float:
return self.rio_pong_time_entry.getDouble(0.0)
@property
def latency(self) -> float:
return self.latency_entry.getDouble(0.0)
@latency.setter
def latency(self, value: float) -> None:
self.latency_entry.setDouble(value)
@property
def processing_time(self) -> float:
return self.processing_time_entry.getDouble(0.0)
@processing_time.setter
def processing_time(self, value: float) -> None:
self.processing_time_entry.setDouble(value)
def __init__(self) -> None:
self.last_pong = time.monotonic()
# 50Hz control loop for 2 seconds
self.odometry: Deque[Odometry] = deque(maxlen=50 * 2)
self.ntinst = NetworkTablesInstance()
if hal.isSimulation():
self.ntinst.startTestMode(server=False)
else:
self.ntinst.startClient("10.47.74.6") # Raspberry pi's IP
self.ntinst.setUpdateRate(1) # ensure our flush calls flush immediately
self.fiducial_x_entry = self.ntinst.getEntry("/vision/fiducial_x")
self.fiducial_y_entry = self.ntinst.getEntry("/vision/fiducial_y")
self.fiducial_time_entry = self.ntinst.getEntry("/vision/fiducial_time")
self.ping_time_entry = self.ntinst.getEntry("/vision/ping")
self.raspi_pong_time_entry = self.ntinst.getEntry("/vision/raspi_pong")
self.rio_pong_time_entry = self.ntinst.getEntry("/vision/rio_pong")
self.latency_entry = self.ntinst.getEntry("/vision/clock_offset")
self.processing_time_entry = self.ntinst.getEntry("/vision/processing_time")
self.camera_entry = self.ntinst.getEntry("/vision/game_piece")
def execute(self) -> None:
"""Store the current odometry in the queue. Allows projection of target into current position."""
self.odometry.appendleft(
Odometry(
self.chassis.odometry_x,
self.chassis.odometry_y,
self.chassis.imu.getAngle(),
time.monotonic(),
)
)
self.ping()
self.pong()
vision_time = self.fiducial_time + self.latency
self.processing_time = time.monotonic() - vision_time
self.ntinst.flush()
@property
def fiducial_in_sight(self) -> bool:
return time.monotonic() - (self.fiducial_time + self.latency) < 0.1
def get_fiducial_position(self) -> Tuple[float, float, float]:
"""Return the position of the retroreflective fiducials relative to the current robot pose."""
vision_time = self.fiducial_time + self.latency
vision_delta_x, vision_delta_y, vision_delta_heading = self._get_pose_delta(
vision_time
)
x = self.fiducial_x - vision_delta_x
y = self.fiducial_y - vision_delta_y
return x, y, vision_delta_heading
def _get_pose_delta(self, t: float) -> Tuple[float, float, float]:
"""Search the stored odometry and return the position difference between now and the specified time."""
current = previous = self.odometry[0]
for odom in self.odometry:
if odom.t >= t:
previous = odom
else:
break
x = current.x - previous.x
y = current.y - previous.y
# Rotate to the robot frame of reference
# Use the previous heading - that's where we were when the picture was taken
heading = previous.heading
robot_x, robot_y = rotate_vector(x, y, -heading)
return robot_x, robot_y, current.heading - heading
def ping(self) -> None:
"""Send a ping to the RasPi to determine the connection latency."""
self.ping_time = time.monotonic()
def pong(self) -> None:
"""Receive a pong from the RasPi to determine the connection latency."""
if abs(self.rio_pong_time - self.last_pong) > 1e-4: # Floating point comparison
alpha = 0.9 # Exponential averaging
self.latency = (1 - alpha) * self.latency + alpha * (
self.rio_pong_time - self.raspi_pong_time
)
self.last_pong = self.rio_pong_time
def use_hatch(self) -> None:
"""Switch to the hatch camera."""
self.camera_entry.setDouble(0)
def use_cargo(self) -> None:
"""Switch to the cargo camera."""
self.camera_entry.setDouble(1)
| {"/autonomous/autonomous.py": ["/automations/alignment.py", "/automations/cargo.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/utilities/pure_pursuit.py"], "/tests/test_vision.py": ["/components/vision.py"], "/automations/alignment.py": ["/automations/cargo.py", "/automations/hatch.py", "/components/vision.py"], "/robot.py": ["/automations/alignment.py", "/automations/cargo.py", "/automations/climb.py", "/automations/hatch.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/components/climb.py"], "/automations/climb.py": ["/components/climb.py", "/components/cargo.py"], "/automations/hatch.py": ["/components/hatch.py"], "/autonomous/test.py": ["/utilities/pure_pursuit.py"], "/components/cargo.py": ["/components/vision.py"], "/automations/cargo.py": ["/components/cargo.py", "/components/vision.py"]} |
65,892 | orike122/pydeepspace | refs/heads/master | /autonomous/autonomous.py | from dataclasses import dataclass
import math
from magicbot.state_machine import AutonomousStateMachine, state
import wpilib
from automations.alignment import (
HatchDepositAligner,
HatchIntakeAligner,
CargoDepositAligner,
)
from automations.cargo import CargoManager
from components.cargo import CargoManipulator
from components.hatch import Hatch
from components.vision import Vision
from pyswervedrive.chassis import SwerveChassis
from utilities.navx import NavX
from utilities.pure_pursuit import PurePursuit, Waypoint, insert_trapezoidal_waypoints
@dataclass
class Coordinates:
start_pos: Waypoint
start_pos_cargo: Waypoint
front_cargo_ship: Waypoint
setup_loading_bay: Waypoint
loading_bay: Waypoint
cargo_depot_setup: Waypoint
cargo_depot: Waypoint
side_cargo_ship_alignment_point: Waypoint
side_cargo_ship_middle: Waypoint
side_cargo_ship: Waypoint
cargo_endpoint: Waypoint
left_coordinates = Coordinates(
start_pos=Waypoint(
1.3 + SwerveChassis.LENGTH / 2, 0 + SwerveChassis.WIDTH / 2, 0, 2
),
start_pos_cargo=Waypoint(
1.3 + SwerveChassis.LENGTH / 2, 1.63 - SwerveChassis.WIDTH / 2, 0, 2
),
front_cargo_ship=Waypoint(5.5 - SwerveChassis.LENGTH / 2, 0.3, 0, 1.5),
setup_loading_bay=Waypoint(3.3, 3, math.pi, 2),
loading_bay=Waypoint(0.2 + SwerveChassis.LENGTH / 2, 3.4, math.pi, 1.5),
cargo_depot_setup=Waypoint(2.4, 3, -math.atan2(2.4 - 1.2, 3 - 2.3), 1),
cargo_depot=Waypoint(1.2, 2.3, -math.atan2(2.4 - 1.2, 3 - 2.3), 0.5),
side_cargo_ship_alignment_point=Waypoint(
6.6, 1.8 + SwerveChassis.WIDTH / 2, -math.pi / 2, 1.5
),
side_cargo_ship_middle=Waypoint(
(6.6 + 7.4) / 2, 0.8 + SwerveChassis.WIDTH / 2, math.pi / 2, 0.75
),
side_cargo_ship=Waypoint(6.6, 0.8 + SwerveChassis.WIDTH / 2, -math.pi / 2, 1),
cargo_endpoint=Waypoint(7.4, 1.3, 0, 3),
)
right_coordinates = Coordinates(
start_pos=left_coordinates.start_pos.reflect(),
start_pos_cargo=left_coordinates.start_pos_cargo.reflect(),
front_cargo_ship=left_coordinates.front_cargo_ship.reflect(),
setup_loading_bay=left_coordinates.setup_loading_bay.reflect(),
loading_bay=left_coordinates.loading_bay.reflect(),
cargo_depot_setup=left_coordinates.cargo_depot_setup.reflect(),
cargo_depot=left_coordinates.cargo_depot.reflect(),
side_cargo_ship_alignment_point=left_coordinates.side_cargo_ship_alignment_point.reflect(),
side_cargo_ship_middle=left_coordinates.side_cargo_ship_middle.reflect(),
side_cargo_ship=left_coordinates.side_cargo_ship.reflect(),
cargo_endpoint=left_coordinates.cargo_endpoint.reflect(),
)
class AutoBase(AutonomousStateMachine):
# Here magicbot injects components
hatch_deposit: HatchDepositAligner
hatch_intake: HatchIntakeAligner
cargo: CargoManager
chassis: SwerveChassis
hatch: Hatch
cargo_component: CargoManipulator
imu: NavX
vision: Vision
# This one is just a typehint
pursuit: PurePursuit
def __init__(self):
super().__init__()
self.coordinates: Coordinates = left_coordinates
self.completed_runs = 0
self.desired_angle = 0
self.desired_angle_navx = 0
self.minimum_path_completion = 0.85
self.pursuit = PurePursuit(look_ahead=0.2, look_ahead_speed_modifier=0.25)
def setup(self):
self.hatch.has_hatch = True
self.vision.use_hatch()
def on_enable(self):
super().on_enable()
self.chassis.odometry_x = self.coordinates.start_pos.x
self.chassis.odometry_y = self.coordinates.start_pos.y
self.completed_runs = 0
# @state(first=True)
# def intake_starting_hatch(self, initial_call):
# if initial_call:
# self.counter = 0
# if self.counter > 5:
# self.next_state("drive_to_cargo_ship")
# else:
# self.counter += 1
@state(first=True)
def drive_to_cargo_ship(self, initial_call):
if initial_call:
if self.completed_runs == 0:
waypoints = insert_trapezoidal_waypoints(
(self.current_pos, self.coordinates.front_cargo_ship),
self.chassis.acceleration,
self.chassis.deceleration,
)
elif self.completed_runs == 1:
waypoints = insert_trapezoidal_waypoints(
(
self.current_pos,
self.coordinates.side_cargo_ship_alignment_point,
self.coordinates.side_cargo_ship,
),
self.chassis.acceleration,
self.chassis.deceleration,
)
else:
self.next_state("drive_to_loading_bay")
self.completed_runs += 1
return
self.pursuit.build_path(waypoints)
self.follow_path()
if (
self.vision.fiducial_in_sight and self.ready_for_vision()
) or self.pursuit.completed_path:
self.next_state("deposit_hatch")
self.completed_runs += 1
@state
def deposit_hatch(self, initial_call):
if initial_call:
self.hatch_deposit.engage(initial_state="target_tape_align")
if not self.hatch.has_hatch:
self.next_state("drive_to_loading_bay")
@state
def drive_to_loading_bay(self, initial_call):
if initial_call:
if self.completed_runs == 1:
waypoints = insert_trapezoidal_waypoints(
(
self.current_pos,
Waypoint(
self.current_pos.x - 1,
self.current_pos.y,
self.imu.getAngle(),
1.5,
),
self.coordinates.setup_loading_bay,
self.coordinates.loading_bay,
),
self.chassis.acceleration,
self.chassis.deceleration,
)
elif self.completed_runs == 2:
waypoints = insert_trapezoidal_waypoints(
(
self.current_pos,
self.coordinates.setup_loading_bay,
self.coordinates.loading_bay,
),
self.chassis.acceleration,
self.chassis.deceleration,
)
else:
self.next_state("stop")
return
self.pursuit.build_path(waypoints)
self.follow_path()
if (
self.vision.fiducial_in_sight and self.ready_for_vision()
) or self.pursuit.completed_path:
self.next_state("intake_hatch")
@state
def intake_hatch(self, initial_call):
if initial_call:
self.hatch_intake.engage(initial_state="target_tape_align")
elif not self.hatch_intake.is_executing:
self.next_state("drive_to_cargo_ship")
@state
def stop(self):
self.chassis.set_inputs(0, 0, 0)
self.done()
@property
def current_pos(self):
return Waypoint(
self.chassis.odometry_x, self.chassis.odometry_y, self.imu.getAngle(), 3
)
def follow_path(self):
vx, vy, heading = self.pursuit.find_velocity(self.chassis.position)
if self.pursuit.completed_path:
self.chassis.set_inputs(0, 0, 0, field_oriented=True)
return
self.chassis.set_velocity_heading(vx, vy, heading)
def ready_for_vision(self):
if self.pursuit.waypoints[-1][4] - self.pursuit.distance_traveled < 2:
return True
else:
return False
class RightFullAuto(AutoBase):
MODE_NAME = "Right Full Autonomous"
DEFAULT = True
def __init__(self):
super().__init__()
self.coordinates = right_coordinates
class LeftFullAuto(AutoBase):
MODE_NAME = "Left Full Autonomous"
class FrontOnlyBase(AutoBase):
@state
def deposit_hatch(self, initial_call):
if initial_call:
self.hatch_deposit.engage(initial_state="target_tape_align")
if not self.hatch.has_hatch:
self.done()
class LeftFrontOnly(FrontOnlyBase):
MODE_NAME = "Left Front Hatch Only"
class RightFrontOnly(FrontOnlyBase):
MODE_NAME = "Right Front Hatch Only"
def __init__(self):
super().__init__()
self.coordinates = right_coordinates
class SideOnlyBase(AutoBase):
@state(first=True)
def drive_to_cargo_ship(self, initial_call):
if initial_call:
waypoints = insert_trapezoidal_waypoints(
(
self.current_pos,
self.coordinates.side_cargo_ship_alignment_point,
self.coordinates.side_cargo_ship,
),
self.chassis.acceleration,
self.chassis.deceleration,
)
self.pursuit.build_path(waypoints)
self.follow_path()
if (
self.vision.fiducial_in_sight and self.ready_for_vision()
) or self.pursuit.completed_path:
self.next_state("deposit_hatch")
@state
def deposit_hatch(self, initial_call):
if initial_call:
self.hatch_deposit.engage(initial_state="target_tape_align")
if not self.hatch.has_hatch:
self.done()
class LeftSideOnly(SideOnlyBase):
MODE_NAME = "Left Side Hatch Only"
class RightSideOnly(SideOnlyBase):
MODE_NAME = "Right Side Hatch Only"
def __init__(self):
super().__init__()
self.coordinates = right_coordinates
class DriveForwards(AutonomousStateMachine):
MODE_NAME = "Drive Forwards - Default"
chassis: SwerveChassis
imu: NavX
hatch: Hatch
cargo_component: CargoManipulator
joystick: wpilib.Joystick
def __init__(self):
super().__init__()
self.pursuit = PurePursuit(look_ahead=0.2, look_ahead_speed_modifier=0.25)
def on_enable(self):
super().on_enable()
self.chassis.odometry_x = 0
self.chassis.odometry_y = 0
@state(first=True)
def wait_for_input(self):
self.hatch.has_hatch = False
self.cargo_component.has_cargo = True
if self.joystick.getY() < -0.5: # joystick -y is forwards
self.next_state("drive_forwards")
@state
def drive_forwards(self, initial_call):
if initial_call:
waypoints = insert_trapezoidal_waypoints(
(self.current_pos, Waypoint(1.5, 0, 0, 0)),
acceleration=self.chassis.acceleration,
deceleration=self.chassis.deceleration,
)
self.pursuit.build_path(waypoints)
self.follow_path()
if self.pursuit.completed_path:
self.chassis.set_inputs(0, 0, 0)
self.done()
@property
def current_pos(self):
return Waypoint(
self.chassis.odometry_x, self.chassis.odometry_y, self.imu.getAngle(), 2
)
def follow_path(self):
vx, vy, heading = self.pursuit.find_velocity(self.chassis.position)
if self.pursuit.completed_path:
self.chassis.set_inputs(0, 0, 0, field_oriented=True)
return
self.chassis.set_velocity_heading(vx, vy, heading)
class DoubleFrontBase(AutoBase):
@state(first=True)
def drive_to_cargo_ship(self, initial_call):
if initial_call:
if self.completed_runs == 0:
waypoints = insert_trapezoidal_waypoints(
(self.current_pos, self.coordinates.front_cargo_ship),
self.chassis.acceleration,
self.chassis.deceleration,
)
elif self.completed_runs == 1:
waypoints = insert_trapezoidal_waypoints(
(
self.current_pos,
self.coordinates.setup_loading_bay,
self.coordinates.front_cargo_ship.reflect(),
),
self.chassis.acceleration,
self.chassis.deceleration,
)
else:
self.next_state("drive_to_loading_bay")
self.completed_runs += 1
return
self.pursuit.build_path(waypoints)
self.follow_path()
if (
self.vision.fiducial_in_sight and self.ready_for_vision()
) or self.pursuit.completed_path:
self.next_state("deposit_hatch")
self.completed_runs += 1
class LeftDoubleFront(DoubleFrontBase):
MODE_NAME = "Left Double Front Hatch"
class RightDoubleFront(DoubleFrontBase):
MODE_NAME = "Right Double Front Hatch"
def __init__(self):
super().__init__()
self.coordinates = right_coordinates
class CargoAutoBase(AutoBase):
vision: Vision
cargo_deposit: CargoDepositAligner
cargo_component: CargoManipulator
def __init__(self):
super().__init__()
self.cargo_intake_speed = 0.5
def setup(self):
self.cargo.has_cargo = True
self.vision.use_cargo()
def on_enable(self):
super().on_enable()
self.chassis.odometry_x = self.coordinates.start_pos_cargo.x
self.chassis.odometry_y = self.coordinates.start_pos_cargo.y
@state(first=True)
def drive_to_cargo_ship(self, initial_call):
if initial_call:
self.hatch.has_hatch = False
self.cargo_component.has_cargo = True
waypoints = insert_trapezoidal_waypoints(
(
self.current_pos,
self.coordinates.side_cargo_ship_alignment_point,
self.coordinates.side_cargo_ship_middle,
),
self.chassis.acceleration,
self.chassis.deceleration,
)
self.pursuit.build_path(waypoints)
self.follow_path()
if (
self.vision.fiducial_in_sight and self.ready_for_vision()
) or self.pursuit.completed_path:
self.next_state("deposit_cargo")
@state
def deposit_cargo(self, initial_call):
if initial_call:
self.cargo_deposit.engage(initial_state="target_tape_align")
if not (self.cargo_deposit.is_executing or self.cargo.is_executing):
self.next_state("drive_to_cargo_depot_setup")
@state
def drive_to_cargo_depot_setup(self, initial_call):
if initial_call:
waypoints = insert_trapezoidal_waypoints(
(self.current_pos, self.coordinates.cargo_depot_setup),
self.chassis.acceleration,
self.chassis.deceleration,
)
self.pursuit.build_path(waypoints)
self.follow_path()
if self.pursuit.completed_path:
self.done()
# self.next_state("intake_cargo")
# @state
# def intake_cargo(self, initial_call):
# """
# Start cargo intake and move forwards slowly
# """
# if initial_call:
# self.cargo.intake_floor()
# self.chassis.set_inputs(
# -self.cargo_intake_speed, 0, 0, field_orineted=False
# )
# # Move towards the cargo side of the robot
# if self.cargo_component.has_cargo:
# self.next_state("drive_to_endpoint")
@state
def drive_to_endpoint(self, initial_call):
"""
Move to the point where we hand over to drivers
"""
if initial_call:
waypoints = insert_trapezoidal_waypoints(
(
self.current_pos,
self.coordinates.side_cargo_ship_alignment_point,
self.coordinates.cargo_endpoint,
),
self.chassis.acceleration,
self.chassis.deceleration,
)
self.pursuit.build_path(waypoints)
self.follow_path()
if self.pursuit.completed_path:
self.done()
class RightCargoAuto(CargoAutoBase):
MODE_NAME = "Right Cargo Pickup"
def __init__(self):
super().__init__()
self.coordinates = right_coordinates
class LeftCargoAuto(CargoAutoBase):
MODE_NAME = "Left Cargo Pickup"
def __init__(self):
super().__init__()
self.coordinates = left_coordinates
| {"/autonomous/autonomous.py": ["/automations/alignment.py", "/automations/cargo.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/utilities/pure_pursuit.py"], "/tests/test_vision.py": ["/components/vision.py"], "/automations/alignment.py": ["/automations/cargo.py", "/automations/hatch.py", "/components/vision.py"], "/robot.py": ["/automations/alignment.py", "/automations/cargo.py", "/automations/climb.py", "/automations/hatch.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/components/climb.py"], "/automations/climb.py": ["/components/climb.py", "/components/cargo.py"], "/automations/hatch.py": ["/components/hatch.py"], "/autonomous/test.py": ["/utilities/pure_pursuit.py"], "/components/cargo.py": ["/components/vision.py"], "/automations/cargo.py": ["/components/cargo.py", "/components/vision.py"]} |
65,893 | orike122/pydeepspace | refs/heads/master | /utilities/pure_pursuit.py | import math
from typing import List, NamedTuple, Optional, Sequence, Tuple
import numpy as np
#: A point in 2D cartesian space.
Cartesian2D = Tuple[float, float]
class Waypoint(NamedTuple):
"""A waypoint to feed into PurePursuit.build_path."""
x: float
y: float
#: Desired robot heading
theta: float
#: Desired velocity
v: float
def reflect(self) -> "Waypoint":
return self._replace(y=-self.y, theta=-self.theta)
class Segment(NamedTuple):
"""A Waypoint with an additional cumulative displacement."""
x: float
y: float
theta: float
v: float
#: Cumulative displacement
s: float
class PurePursuit:
"""
Pure Pursuit controller for navigation with absolute waypoints.
Uses the method outlined here with some changes to be suitible for a swervedrive
https://www.ri.cmu.edu/pub_files/pub3/coulter_r_craig_1992_1/coulter_r_craig_1992_1.pdf
"""
waypoints: List[Segment]
def __init__(self, look_ahead: float, look_ahead_speed_modifier: float):
self.waypoints = []
self.current_waypoint_number = 0
self.look_ahead = look_ahead
self.look_ahead_speed_modifier = look_ahead_speed_modifier
self.speed_look_ahead = look_ahead
self.completed_path = False
self.distance_traveled = 0.0
def find_intersections(
self,
waypoint_start: Segment,
waypoint_end: Segment,
robot_position: Cartesian2D,
) -> Optional[np.ndarray]:
"""
Find the intersection/s between our lookahead distance and path.
http://mathworld.wolfram.com/Circle-LineIntersection.html
NOTE: this will return the intersections in global co-ordinates
"""
x1, y1 = waypoint_start.x, waypoint_start.y
x2, y2 = waypoint_end.x, waypoint_end.y
robot_x, robot_y = robot_position
x2 -= robot_x
x1 -= robot_x
y2 -= robot_y
y1 -= robot_y
segment_end = np.array((x2, y2))
dx = x2 - x1
dy = y2 - y1
dr = math.hypot(dx, dy)
D = x1 * y2 - x2 * y1
r = self.speed_look_ahead
delta = r ** 2 * dr ** 2 - D ** 2
if delta >= 0: # if an intersection exists
sqrt_delta = math.sqrt(delta)
right_x = self.sgn(dy) * dx * sqrt_delta
left_x = D * dy
right_y = abs(dy) * sqrt_delta
left_y = -D * dx
denominator = dr ** 2
if denominator == 0:
# print("Pursuit: caught division by zero")
return None
intersection_1 = np.array((left_x + right_x, left_y + right_y))
intersection_1 /= denominator
if delta == 0: # if we are tangent to our path
return intersection_1
intersection_2 = np.array((left_x - right_x, left_y - right_y))
intersection_2 /= denominator
if np.linalg.norm(intersection_1 - segment_end) < np.linalg.norm(
intersection_2 - segment_end
):
return intersection_1
else:
return intersection_2
else:
return None
def build_path(self, waypoints: Sequence[Waypoint]) -> None:
"""
Take in a list of waypoints used to build a path.
The waypoints must be a tuple (x, y, theta, speed), this method will
create waypoints with these co-ordinates and distance
along the path from the start of the trajectory.
"""
self.last_robot_x = waypoints[0].x
self.last_robot_y = waypoints[0].y
self.completed_path = False
self.distance_traveled = 0
self.waypoints = []
waypoint_distance = 0.0
previous_waypoint = waypoints[0]
for waypoint in waypoints:
x, y, theta, speed = waypoint
# print(waypoint)
waypoint_distance += math.hypot(
x - previous_waypoint.x, y - previous_waypoint.y
)
previous_waypoint = waypoint
self.waypoints.append(Segment(x, y, theta, speed, waypoint_distance))
self.current_waypoint_number = 0
def compute_direction(
self,
robot_position: Cartesian2D,
segment_start: Segment,
segment_end: Segment,
distance_along_path: float,
) -> np.ndarray:
"""Find the goal_point and convert it to relative co-ordinates"""
goal_point = self.find_intersections(segment_start, segment_end, robot_position)
if goal_point is None:
# if we cant find an intersection between the look_ahead and path
# use the next waypoint as our goal point
goal_point = segment_end[:2]
# print(goal_point)
goal_point /= np.linalg.norm(goal_point)
return goal_point
@staticmethod
def sgn(number: float) -> int:
"""Returns the sign of a number, 0 is positive"""
if number < 0:
return -1
else:
return 1
def distance_along_path(self, robot_position: Cartesian2D) -> float:
"""
Find the robots position on the path using odometry.
Every timestep, add the distance the robot has travelled to a
running total used to check for waypoints.
"""
robot_x, robot_y = robot_position
self.distance_traveled += math.hypot(
robot_x - self.last_robot_x, robot_y - self.last_robot_y
)
self.last_robot_x = robot_x
self.last_robot_y = robot_y
return self.distance_traveled
def find_speed(
self,
start_path_distance: float,
end_path_distance: float,
start_speed: float,
end_speed: float,
distance_along_path: float,
) -> float:
"""
Find the how fast the robot should be moving at its current point.
"""
local_end_distance = end_path_distance - start_path_distance
local_robot_distance = min(
max(distance_along_path - start_path_distance, 0), local_end_distance
)
speed_difference = end_speed - start_speed
portion_path_completed = local_robot_distance / local_end_distance
target_speed = speed_difference * portion_path_completed + start_speed
return target_speed
def find_velocity(self, robot_position: Cartesian2D) -> Tuple[float, float, float]:
if self.current_waypoint_number >= len(self.waypoints) - 1:
self.completed_path = True
# print("WARNING: path completed")
return 0, 0, 0
distance_along_path = self.distance_along_path(robot_position)
segment_start = self.waypoints[self.current_waypoint_number]
segment_end = self.waypoints[self.current_waypoint_number + 1]
start_speed, start_distance = segment_start[3:]
end_speed, end_distance = segment_end[3:]
direction = self.compute_direction(
robot_position, segment_start, segment_end, distance_along_path
)
speed = self.find_speed(
start_distance, end_distance, start_speed, end_speed, distance_along_path
)
vx, vy = direction * speed
heading = segment_end.theta
self.speed_look_ahead = self.look_ahead + self.look_ahead_speed_modifier * speed
if self.distance_traveled + self.speed_look_ahead >= end_distance:
# if we have reached the end of our current segment
self.current_waypoint_number += 1
# print("changed segment")
return vx, vy, heading
def insert_trapezoidal_waypoints(
waypoints: Sequence[Waypoint], acceleration: float, deceleration: float
) -> List[Waypoint]:
"""Generate how far you have to travel to accelerate and decelerate for speed control.
Assumes that the robot should accelerate then cruise when v_init < v_final,
otherwise we cruise then decelerate.
Args:
acceleration: acceleration when increasing speed
deceleration: acceleration when decreasing speed
"""
trap_waypoints = []
for segment_start, segment_end in zip(waypoints, waypoints[1:]):
dx = segment_end.x - segment_start.x
dy = segment_end.y - segment_start.y
segment_distance = math.hypot(dx, dy)
u = segment_start.v
v = segment_end.v
trap_waypoints.append(segment_start)
if v > u:
# Faster at the end - accelerating
a = acceleration
# Rearrange v^2 = u^2 + 2as
s = (v ** 2 - u ** 2) / (2 * a)
if s > segment_distance:
# Cannot actually get to speed in time
# Leave the segments as they are
continue
intermediate = Waypoint(
dx * s / segment_distance + segment_start.x,
dy * s / segment_distance + segment_start.y,
*segment_end[2:],
)
trap_waypoints.append(intermediate)
elif u > v:
a = deceleration
# Rearrange v^2 = u^2 + 2as, then subtract from the segment length
s = segment_distance - (v ** 2 - u ** 2) / (2 * a)
if s < 0:
# Not enough time to decelerate
# Leave the segments as they are
continue
intermediate = Waypoint(
dx * s / segment_distance + segment_start[0],
dy * s / segment_distance + segment_start[1],
*segment_start[2:],
)
trap_waypoints.append(intermediate)
trap_waypoints.append(waypoints[-1])
# print(f"waypoints = {trap_waypoints}")
return trap_waypoints
| {"/autonomous/autonomous.py": ["/automations/alignment.py", "/automations/cargo.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/utilities/pure_pursuit.py"], "/tests/test_vision.py": ["/components/vision.py"], "/automations/alignment.py": ["/automations/cargo.py", "/automations/hatch.py", "/components/vision.py"], "/robot.py": ["/automations/alignment.py", "/automations/cargo.py", "/automations/climb.py", "/automations/hatch.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/components/climb.py"], "/automations/climb.py": ["/components/climb.py", "/components/cargo.py"], "/automations/hatch.py": ["/components/hatch.py"], "/autonomous/test.py": ["/utilities/pure_pursuit.py"], "/components/cargo.py": ["/components/vision.py"], "/automations/cargo.py": ["/components/cargo.py", "/components/vision.py"]} |
65,894 | orike122/pydeepspace | refs/heads/master | /tests/test_vision.py | from components.vision import Odometry, Vision
from utilities.functions import rotate_vector
import math
import time
class FakeImu:
def __init__(self):
self.angle = 0.0
def getAngle(self):
return self.angle
class FakeChassis:
def __init__(self):
self.odometry_x = 0.0
self.odometry_y = 0.0
self.imu = FakeImu()
def init_vision(heading=0.0):
t = time.monotonic()
v = Vision()
v.chassis = FakeChassis()
# Inject some fake odometry
v.odometry.appendleft(Odometry(0, 0, heading, t - 0.3))
v.odometry.appendleft(Odometry(1, 1, heading, t - 0.2))
v.odometry.appendleft(Odometry(2, 2, heading, t - 0.1))
v.odometry.appendleft(Odometry(3, 3, heading, t))
return v, t
def test_odom_deque_order():
v = Vision()
v.chassis = FakeChassis()
for i in range(5):
time.sleep(0.05)
v.execute()
assert len(v.odometry) > 0
newer_odom = v.odometry.popleft()
for odom in v.odometry:
assert newer_odom.t > odom.t
newer_odom = odom
def test_get_pose_delta():
v, t = init_vision()
x, y, heading = v._get_pose_delta(t)
assert x == 0.0
assert y == 0.0
x, y, heading = v._get_pose_delta(t - 0.1)
assert x == 1.0
assert y == 1.0
x, y, heading = v._get_pose_delta(t - 0.2)
assert x == 2.0
assert y == 2.0
def test_get_fiducial_position():
v, t = init_vision()
v.fiducial_x_entry.setDouble(5.0)
v.fiducial_y_entry.setDouble(0.5)
v.fiducial_time_entry.setDouble(t)
x, y, heading = v.get_fiducial_position()
assert x == 5.0
assert y == 0.5
assert heading == 0.0
v.fiducial_time_entry.setDouble(t - 0.15)
x, y, heading = v.get_fiducial_position()
assert x == 4.0
assert y == -0.5
assert heading == 0.0
v.fiducial_time_entry.setDouble(t - 0.25)
x, y, heading = v.get_fiducial_position()
assert x == 3.0
assert y == -1.5
assert heading == 0.0
def test_get_fiducial_position_rotated():
v, t = init_vision(math.pi / 2)
v.chassis.imu.heading = math.pi / 2
v.fiducial_x_entry.setDouble(5.0)
v.fiducial_y_entry.setDouble(0.5)
v.fiducial_time_entry.setDouble(t)
x, y, heading = v.get_fiducial_position()
assert x == 5.0
assert y == 0.5
assert heading == 0.0
v.fiducial_time_entry.setDouble(t - 0.15)
x, y, heading = v.get_fiducial_position()
assert x == 4.0
assert y == 1.5
assert heading == 0.0
v.fiducial_time_entry.setDouble(t - 0.25)
x, y, heading = v.get_fiducial_position()
assert x == 3.0
assert y == 2.5
assert heading == 0.0
def test_odometry_rotation():
v, t = init_vision()
v.chassis.imu.heading = math.pi / 2
odom = v.odometry.popleft()
v.odometry.appendleft(Odometry(odom.x, odom.y, math.pi / 2, odom.t))
v.fiducial_x_entry.setDouble(5.0)
v.fiducial_y_entry.setDouble(0.5)
v.fiducial_time_entry.setDouble(t)
x, y, heading = v.get_fiducial_position()
assert x == 5.0
assert y == 0.5
assert heading == 0.0
v.fiducial_time_entry.setDouble(t - 0.15)
x, y, heading = v.get_fiducial_position()
assert x == 4.0
assert y == -0.5
assert heading == math.pi / 2
rot_x, rot_y = rotate_vector(x, y, -heading)
assert abs(rot_x - -0.5) < 1e-4
assert abs(rot_y - -4) < 1e-4
| {"/autonomous/autonomous.py": ["/automations/alignment.py", "/automations/cargo.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/utilities/pure_pursuit.py"], "/tests/test_vision.py": ["/components/vision.py"], "/automations/alignment.py": ["/automations/cargo.py", "/automations/hatch.py", "/components/vision.py"], "/robot.py": ["/automations/alignment.py", "/automations/cargo.py", "/automations/climb.py", "/automations/hatch.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/components/climb.py"], "/automations/climb.py": ["/components/climb.py", "/components/cargo.py"], "/automations/hatch.py": ["/components/hatch.py"], "/autonomous/test.py": ["/utilities/pure_pursuit.py"], "/components/cargo.py": ["/components/vision.py"], "/automations/cargo.py": ["/components/cargo.py", "/components/vision.py"]} |
65,895 | orike122/pydeepspace | refs/heads/master | /components/climb.py | import math
import ctre
import magicbot
import rev
import wpilib
import wpilib_controller
from utilities.navx import NavX
class Lift:
HEIGHT_PER_REV = 0.002
GROUND_CLEARANCE = -0.05
__slots__ = ("motor", "encoder", "forward_limit_switch")
def __init__(self, motor: rev.CANSparkMax) -> None:
self.motor = motor
self.encoder = motor.getEncoder()
self.forward_limit_switch = motor.getForwardLimitSwitch(
rev.LimitSwitchPolarity.kNormallyOpen
)
self.motor.setIdleMode(rev.IdleMode.kBrake)
self.forward_limit_switch.enableLimitSwitch(True)
self.encoder.setPositionConversionFactor(self.HEIGHT_PER_REV)
self.encoder.setVelocityConversionFactor(self.HEIGHT_PER_REV / 60)
def is_retracted(self) -> bool:
return self.forward_limit_switch.get()
def is_above_ground(self) -> bool:
return self.encoder.getPosition() > self.GROUND_CLEARANCE
class Climber:
front_motor: rev.CANSparkMax
back_motor: rev.CANSparkMax
drive_motor: ctre.TalonSRX
front_podium_switch: wpilib.DigitalInput
back_podium_switch: wpilib.DigitalInput
pistons: wpilib.DoubleSolenoid
imu: NavX
LIFT_SPEED = 1 # 1500 # 0.5 # 4700/5840 rpm
SLOW_DOWN_SPEED = 0.15
DRIVE_SPEED = 0.6
front_direction = magicbot.will_reset_to(0)
back_direction = magicbot.will_reset_to(0)
drive_output = magicbot.will_reset_to(0)
def setup(self):
self.drive_motor.setNeutralMode(ctre.NeutralMode.Brake)
self.drive_motor.setInverted(True)
self.front = Lift(self.front_motor)
self.back = Lift(self.back_motor)
self.lifts = (self.front, self.back)
self.front_reverse_limit_switch = self.front_motor.getReverseLimitSwitch(
rev.LimitSwitchPolarity.kNormallyOpen
)
self.front_reverse_limit_switch.enableLimitSwitch(True)
self.level_pid = wpilib_controller.PIDController(
Kp=3, Ki=0, Kd=0, period=1 / 50, measurement_source=self.imu.getPitch
)
self.level_pid.setInputRange(-math.pi, math.pi)
self.level_pid.setOutputRange(-1, 1)
self.level_pid.setReference(0)
self.running = True
# wpilib.SmartDashboard.putData("lift_level_pid", self.level_pid)
def extend_all(self):
self.front_direction = -1
self.back_direction = -1
def retract_all(self):
self.retract_front()
self.retract_back()
def retract_front(self):
self.front_direction = 1
def retract_back(self):
self.back_direction = 1
def level_back(self):
self.back_direction = 0
def is_both_extended(self):
return self.front_reverse_limit_switch.get()
def is_front_touching_podium(self):
return not self.front_podium_switch.get()
def is_back_touching_podium(self):
return not self.back_podium_switch.get()
def execute(self):
for lift in self.lifts:
if lift.forward_limit_switch.get():
lift.encoder.setPosition(0)
pid_output = self.level_pid.update()
# Extend both
if self.front_direction < 0 and self.back_direction < 0:
if self.is_both_extended():
self.back.motor.disable()
self.front.motor.disable()
else:
self.front.motor.set(-self.LIFT_SPEED + pid_output)
self.back.motor.set(-self.LIFT_SPEED - pid_output)
# Retract both
elif self.front_direction > 0 and self.back_direction > 0:
output = self.LIFT_SPEED * 0.4
if self.front.is_above_ground():
self.front.motor.set(self.SLOW_DOWN_SPEED)
else:
self.front.motor.set(output)
if self.back.is_above_ground():
self.back.motor.set(self.SLOW_DOWN_SPEED)
else:
self.back.motor.set(output)
else:
output = self.LIFT_SPEED
# Retract front
if self.front_direction > 0:
if self.front.is_above_ground():
self.front.motor.set(self.SLOW_DOWN_SPEED)
self.back_motor.set(-pid_output)
else:
self.front.motor.set(output)
else:
self.front.motor.disable()
# Retract back
if self.back_direction > 0:
if self.back.is_above_ground():
self.back.motor.set(self.SLOW_DOWN_SPEED)
else:
self.back.motor.set(output)
else:
self.back.motor.disable()
if self.back_direction == 0 and self.running:
self.back_motor.set(-pid_output)
self.drive_motor.set(ctre.ControlMode.PercentOutput, self.drive_output)
def on_disable(self):
self.front.motor.disable()
self.back.motor.disable()
def on_enable(self):
self.retract_pistons()
def drive_forward(self, drive_speed=DRIVE_SPEED):
self.drive_output = drive_speed
def fire_pistons(self):
self.pistons.set(wpilib.DoubleSolenoid.Value.kForward)
def retract_pistons(self):
self.pistons.set(wpilib.DoubleSolenoid.Value.kReverse)
| {"/autonomous/autonomous.py": ["/automations/alignment.py", "/automations/cargo.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/utilities/pure_pursuit.py"], "/tests/test_vision.py": ["/components/vision.py"], "/automations/alignment.py": ["/automations/cargo.py", "/automations/hatch.py", "/components/vision.py"], "/robot.py": ["/automations/alignment.py", "/automations/cargo.py", "/automations/climb.py", "/automations/hatch.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/components/climb.py"], "/automations/climb.py": ["/components/climb.py", "/components/cargo.py"], "/automations/hatch.py": ["/components/hatch.py"], "/autonomous/test.py": ["/utilities/pure_pursuit.py"], "/components/cargo.py": ["/components/vision.py"], "/automations/cargo.py": ["/components/cargo.py", "/components/vision.py"]} |
65,896 | orike122/pydeepspace | refs/heads/master | /automations/alignment.py | from magicbot import tunable
from magicbot.state_machine import StateMachine, state
from automations.cargo import CargoManager
from automations.hatch import HatchAutomation
from components.vision import Vision
from pyswervedrive.chassis import SwerveChassis
from utilities.functions import rotate_vector
class Aligner(StateMachine):
"""
A state machine for alignment using vision systems.
The robot will use two methods of alignment, targets above
objectives from longer range and fine adjustment using the ground
tape once we are able to see it.
"""
VERBOSE_LOGGING = True
chassis: SwerveChassis
vision: Vision
def setup(self):
self.successful = False
self.last_vision = 0
self.direction = 1
self.tolerance = 0.1
alignment_speed = tunable(0.5) # m/s changed in teleop and autonomous
alignment_kp_y = tunable(1.5)
# lookahead_factor = tunable(4)
def on_disable(self):
self.done()
# def get_fiducial_y(self):
# return self.vision.get_fiducial_position()[2]
@state(first=True)
def wait_for_vision(self):
if self.vision.fiducial_in_sight:
self.next_state("target_tape_align")
@state(must_finish=True)
def target_tape_align(self, initial_call, state_tm):
"""
Align with the objective using the vision tape above the objective.
The robot will try to correct errors until they are within tolerance
by strafing and moving in a hyberbolic curve towards the target.
"""
if initial_call:
self.successful = False
self.last_vision = state_tm
self.chassis.automation_running = True
self.counter = 0
self.last_range = 2.5
# self.v = 0
# self.u = self.chassis.speed
# if abs(self.v - self.alignment_speed) > self.tolerance:
# if self.v > self.u:
# a = self.chassis.decceleration
# if self.v < self.u:
# a = self.chassis.acceleration
# self.v = self.u + a * state_tm
fiducial_x, fiducial_y, delta_heading = self.vision.get_fiducial_position()
if not self.vision.fiducial_in_sight or abs(fiducial_x) > abs(self.last_range):
# self.chassis.set_inputs(0, 0, 0)
# self.next_state("success")
self.chassis.set_inputs(
self.alignment_speed * self.direction, 0, 0, field_oriented=False
)
if state_tm - self.last_vision > self.last_range / self.alignment_speed:
self.chassis.set_inputs(0, 0, 0)
self.next_state("success")
else:
if self.counter < 1:
self.logger.info("Seen vision")
self.counter += 1
self.last_vision = state_tm
self.last_range = fiducial_x
# fiducial_x /= self.lookahead_factor
# norm = math.hypot(fiducial_x, fiducial_y)
# vx = fiducial_x / norm * self.alignment_speed
# vy = fiducial_y / norm * self.alignment_speed
if fiducial_x > 0:
# Target in front of us means we are using the hatch camera - move forwards
vx = self.alignment_speed * (1 - min(abs(fiducial_y), 1.5) / 1.5)
else:
# Target behind us means we are using the cargo camera - move backwards
vx = -self.alignment_speed * (1 - min(abs(fiducial_y), 1.5) / 1.5)
vy = self.alignment_speed * max(
min(fiducial_y * self.alignment_kp_y, 1), -1
)
vx, vy = rotate_vector(vx, vy, -delta_heading)
self.chassis.set_inputs(vx, vy, 0, field_oriented=False)
@state(must_finish=True)
def success(self):
self.done()
def done(self):
super().done()
self.chassis.automation_running = False
class HatchDepositAligner(Aligner):
VERBOSE_LOGGING = True
hatch_automation: HatchAutomation
@state(must_finish=True)
def success(self, state_tm, initial_call):
if initial_call:
self.hatch_automation.outake()
self.done()
class CargoDepositAligner(Aligner):
VERBOSE_LOGGING = True
cargo: CargoManager
def setup(self):
super().setup()
self.direction = -1
@state(must_finish=True)
def success(self):
self.done()
class HatchIntakeAligner(Aligner):
VERBOSE_LOGGING = True
hatch_automation: HatchAutomation
@state(must_finish=True)
def success(self):
self.hatch_automation.grab()
self.done()
| {"/autonomous/autonomous.py": ["/automations/alignment.py", "/automations/cargo.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/utilities/pure_pursuit.py"], "/tests/test_vision.py": ["/components/vision.py"], "/automations/alignment.py": ["/automations/cargo.py", "/automations/hatch.py", "/components/vision.py"], "/robot.py": ["/automations/alignment.py", "/automations/cargo.py", "/automations/climb.py", "/automations/hatch.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/components/climb.py"], "/automations/climb.py": ["/components/climb.py", "/components/cargo.py"], "/automations/hatch.py": ["/components/hatch.py"], "/autonomous/test.py": ["/utilities/pure_pursuit.py"], "/components/cargo.py": ["/components/vision.py"], "/automations/cargo.py": ["/components/cargo.py", "/components/vision.py"]} |
65,897 | orike122/pydeepspace | refs/heads/master | /robot.py | #!/usr/bin/env python3
import enum
import math
import rev
import ctre
import magicbot
import wpilib
from automations.alignment import (
HatchIntakeAligner,
HatchDepositAligner,
CargoDepositAligner,
)
from automations.cargo import CargoManager
from automations.climb import ClimbAutomation
from automations.hatch import HatchAutomation
from components.cargo import CargoManipulator
from components.hatch import Hatch
from components.vision import Vision
from components.climb import Climber
from pyswervedrive.chassis import SwerveChassis
from pyswervedrive.module import SwerveModule
from utilities.functions import constrain_angle, rescale_js
from utilities.navx import NavX
class FieldAngle(enum.Enum):
CARGO_FRONT = 0
CARGO_RIGHT = math.pi / 2
CARGO_LEFT = -math.pi / 2
LOADING_STATION = math.pi
@classmethod
def closest(cls, robot_heading: float) -> "FieldAngle":
return min(cls, key=lambda a: abs(constrain_angle(robot_heading - a.value)))
class Robot(magicbot.MagicRobot):
# Declare magicbot components here using variable annotations.
# NOTE: ORDER IS IMPORTANT.
# Any components that actuate objects should be declared after
# any higher-level components (automations) that depend on them.
# Automations
cargo: CargoManager
cargo_deposit: CargoDepositAligner
climb_automation: ClimbAutomation
hatch_deposit: HatchDepositAligner
hatch_intake: HatchIntakeAligner
hatch_automation: HatchAutomation
# Actuators
cargo_component: CargoManipulator
chassis: SwerveChassis
hatch: Hatch
climber: Climber
vision: Vision
offset_rotation_rate = 20
def createObjects(self):
"""Create motors and stuff here."""
# a + + b - + c - - d + -
x_dist = 0.2625
y_dist = 0.2665
self.module_a = SwerveModule( # front left module
"a",
steer_talon=ctre.TalonSRX(3),
drive_talon=ctre.TalonSRX(4),
x_pos=x_dist,
y_pos=y_dist,
)
self.module_b = SwerveModule( # front right module
"b",
steer_talon=ctre.TalonSRX(7),
drive_talon=ctre.TalonSRX(8),
x_pos=-x_dist,
y_pos=y_dist,
)
self.module_c = SwerveModule( # bottom left module
"c",
steer_talon=ctre.TalonSRX(1),
drive_talon=ctre.TalonSRX(6),
x_pos=-x_dist,
y_pos=-y_dist,
)
self.module_d = SwerveModule( # bottom right module
"d",
steer_talon=ctre.TalonSRX(23),
drive_talon=ctre.TalonSRX(24),
x_pos=x_dist,
y_pos=-y_dist,
)
self.imu = NavX()
wpilib.SmartDashboard.putData("Gyro", self.imu.ahrs)
# hatch objects
self.hatch_fingers = wpilib.DoubleSolenoid(7, 6)
self.hatch_punchers = wpilib.Solenoid(0)
self.hatch_enable_piston = wpilib.DoubleSolenoid(3, 2)
self.hatch_left_limit_switch = wpilib.DigitalInput(8)
self.hatch_right_limit_switch = wpilib.DigitalInput(9)
self.climber_front_motor = rev.CANSparkMax(10, rev.MotorType.kBrushless)
self.climber_back_motor = rev.CANSparkMax(11, rev.MotorType.kBrushless)
self.climber_front_podium_switch = wpilib.DigitalInput(4)
self.climber_back_podium_switch = wpilib.DigitalInput(5)
self.climber_drive_motor = ctre.TalonSRX(20)
self.climber_pistons = wpilib.DoubleSolenoid(forwardChannel=4, reverseChannel=5)
# cargo related objects
self.intake_motor = ctre.VictorSPX(9)
self.intake_switch = wpilib.DigitalInput(0)
self.arm_motor = rev.CANSparkMax(2, rev.MotorType.kBrushless)
# boilerplate setup for the joystick
self.joystick = wpilib.Joystick(0)
self.gamepad = wpilib.XboxController(1)
self.spin_rate = 1.5
def autonomous(self):
self.imu.resetHeading()
self.chassis.set_heading_sp(0)
self.hatch.enable_hatch = True
self.hatch_intake.alignment_speed = 0.5
self.hatch_deposit.alignment_speed = 0.5
self.chassis.derate_drive_modules(6)
super().autonomous()
def disabledPeriodic(self):
self.chassis.set_inputs(0, 0, 0)
self.vision.execute() # Keep the time offset calcs running
def teleopInit(self):
"""Initialise driver control."""
self.chassis.set_inputs(0, 0, 0)
self.hatch_intake.alignment_speed = 0.5
self.hatch_deposit.alignment_speed = 0.5
self.chassis.derate_drive_modules(9)
def teleopPeriodic(self):
"""Allow the drivers to control the robot."""
throttle = max(0.1, (1 - self.joystick.getThrottle()) / 2) # min 10%
# this is where the joystick inputs get converted to numbers that are sent
# to the chassis component. we rescale them using the rescale_js function,
# in order to make their response exponential, and to set a dead zone -
# which just means if it is under a certain value a 0 will be sent
# TODO: Tune these constants for whatever robot they are on
joystick_vx = -rescale_js(
self.joystick.getY(), deadzone=0.1, exponential=1.5, rate=4 * throttle
)
joystick_vy = -rescale_js(
self.joystick.getX(), deadzone=0.1, exponential=1.5, rate=4 * throttle
)
joystick_vz = -rescale_js(
self.joystick.getZ(), deadzone=0.2, exponential=20.0, rate=self.spin_rate
)
joystick_hat = self.joystick.getPOV()
# Allow big stick movements from the driver to break out of an automation
if abs(joystick_vx) > 0.5 or abs(joystick_vy) > 0.5:
self.hatch_intake.done()
self.hatch_deposit.done()
self.cargo_deposit.done()
if not self.chassis.automation_running:
if joystick_vx or joystick_vy or joystick_vz:
self.chassis.set_inputs(
joystick_vx,
joystick_vy,
joystick_vz,
field_oriented=not self.joystick.getRawButton(6),
)
else:
self.chassis.set_inputs(0, 0, 0)
if joystick_hat != -1:
if self.cargo_component.has_cargo or self.cargo.is_executing:
constrained_angle = -constrain_angle(
math.radians(joystick_hat) + math.pi
)
else:
constrained_angle = -constrain_angle(math.radians(joystick_hat))
self.chassis.set_heading_sp(constrained_angle)
# Starts Hatch Alignment and Cargo State Machines
if (
self.joystick.getTrigger()
or self.gamepad.getTriggerAxis(self.gamepad.Hand.kLeft) > 0.5
or self.gamepad.getTriggerAxis(self.gamepad.Hand.kRight) > 0.5
):
angle = FieldAngle.closest(self.imu.getAngle())
self.logger.info("closest field angle: %s", angle)
if self.cargo_component.has_cargo:
self.cargo_deposit.engage()
else:
if angle is FieldAngle.LOADING_STATION:
self.hatch_intake.engage()
else:
self.hatch_deposit.engage()
self.chassis.set_heading_sp(angle.value)
# Hatch Manual Outake/Intake
if self.joystick.getRawButtonPressed(5) or self.gamepad.getBumperPressed(6):
angle = FieldAngle.closest(self.imu.getAngle())
self.logger.info("closest field angle: %s", angle)
if angle is not FieldAngle.LOADING_STATION:
self.hatch_automation.outake()
else:
self.hatch_automation.grab()
if self.gamepad.getXButtonPressed():
self.hatch.retract_fingers()
self.hatch.retract()
# Stops Cargo Intake Motor
if self.gamepad.getBButtonPressed():
self.cargo.outake_cargo_ship(force=True)
# Toggles the Heading Hold
if self.joystick.getRawButtonPressed(8):
if self.chassis.hold_heading:
self.chassis.heading_hold_off()
else:
self.chassis.heading_hold_on()
# Resets the IMU's Heading
if self.joystick.getRawButtonPressed(7):
self.imu.resetHeading()
self.chassis.set_heading_sp(0)
# Start Button starts Climb State Machine
if self.gamepad.getStartButtonPressed() and self.gamepad.getRawButtonPressed(5):
self.climb_automation.start_climb_lv3()
# Back Button Ends Climb State Machine
if self.gamepad.getBackButtonPressed():
if self.gamepad.getRawButtonPressed(5):
self.climb_automation.abort()
else:
self.climb_automation.done()
# Cargo Floor Intake
if self.gamepad.getAButtonPressed():
self.cargo.intake_floor(force=True)
# Cargo Loading Station Intake
if self.gamepad.getYButtonPressed():
self.cargo.intake_loading(force=True)
self.chassis.set_heading_sp(
FieldAngle.CARGO_FRONT.value
) # Reversed side of robot
if self.gamepad.getPOV() != -1:
speed = 0.65
azimuth = math.radians(-self.gamepad.getPOV())
if self.cargo_component.has_cargo:
azimuth += math.pi
self.chassis.set_inputs(
speed * math.cos(azimuth),
speed * math.sin(azimuth),
0,
field_oriented=False,
)
def robotPeriodic(self):
# super().robotPeriodic()
wpilib.SmartDashboard.updateValues()
def testPeriodic(self):
self.vision.execute() # Keep the time offset calcs running
joystick_vx = -rescale_js(
self.joystick.getY(), deadzone=0.1, exponential=1.5, rate=0.5
)
for button, module in zip((5, 3, 4, 6), self.chassis.modules):
if self.joystick.getRawButton(button):
module.store_steer_offsets()
module.steer_motor.set(ctre.ControlMode.PercentOutput, joystick_vx)
if self.joystick.getTriggerPressed():
module.steer_motor.set(
ctre.ControlMode.Position,
module.steer_motor.getSelectedSensorPosition(0)
+ self.offset_rotation_rate,
)
if self.joystick.getRawButtonPressed(2):
module.steer_motor.set(
ctre.ControlMode.Position,
module.steer_motor.getSelectedSensorPosition(0)
- self.offset_rotation_rate,
)
if self.joystick.getRawButtonPressed(8):
for module in self.chassis.modules:
module.drive_motor.set(ctre.ControlMode.PercentOutput, 0.3)
if self.joystick.getRawButtonPressed(12):
for module in self.chassis.modules:
module.steer_motor.set(
ctre.ControlMode.Position, module.steer_enc_offset
)
if self.gamepad.getStartButton():
self.climber.retract_all()
self.climber.execute()
if self.gamepad.getPOV() != -1:
speed = 0.1
azimuth = math.radians(-self.gamepad.getPOV())
for module in self.chassis.modules:
module.set_velocity(
speed * math.cos(azimuth),
speed * math.sin(azimuth),
absolute_rotation=True,
)
if self.gamepad.getTriggerAxis(self.gamepad.Hand.kLeft) > 0.5:
self.hatch_enable_piston.set(wpilib.DoubleSolenoid.Value.kReverse)
if __name__ == "__main__":
wpilib.run(Robot)
| {"/autonomous/autonomous.py": ["/automations/alignment.py", "/automations/cargo.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/utilities/pure_pursuit.py"], "/tests/test_vision.py": ["/components/vision.py"], "/automations/alignment.py": ["/automations/cargo.py", "/automations/hatch.py", "/components/vision.py"], "/robot.py": ["/automations/alignment.py", "/automations/cargo.py", "/automations/climb.py", "/automations/hatch.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/components/climb.py"], "/automations/climb.py": ["/components/climb.py", "/components/cargo.py"], "/automations/hatch.py": ["/components/hatch.py"], "/autonomous/test.py": ["/utilities/pure_pursuit.py"], "/components/cargo.py": ["/components/vision.py"], "/automations/cargo.py": ["/components/cargo.py", "/components/vision.py"]} |
65,898 | orike122/pydeepspace | refs/heads/master | /automations/climb.py | from magicbot import StateMachine, state, timed_state
from components.climb import Climber
from components.cargo import CargoManipulator, Height
from pyswervedrive.chassis import SwerveChassis
class ClimbAutomation(StateMachine):
chassis: SwerveChassis
climber: Climber
cargo_component: CargoManipulator
VERBOSE_LOGGING = True
def on_disable(self):
self.done()
def start_climb_lv3(self):
self.engage()
self.climber.running = True
def done(self):
super().done()
self.chassis.set_modules_drive_brake()
self.chassis.automation_running = False
self.climber.running = False
@state(first=True, must_finish=True)
def extend_both_lifts_lv3(self, initial_call):
if initial_call:
self.chassis.set_modules_drive_coast()
self.chassis.heading_hold_off()
self.chassis.automation_running = True
self.cargo_component.move_to(Height.LOADING_STATION)
self.move_swerves()
self.climber.extend_all()
if self.climber.is_both_extended():
self.next_state_now("align_front_lift")
@timed_state(must_finish=True, next_state="retract_front_lift", duration=1)
def align_front_lift(self):
self.climber.drive_forward()
if self.climber.is_front_touching_podium():
self.next_state("retract_front_lift")
@state(must_finish=True)
def retract_front_lift(self):
self.climber.retract_front()
self.move_swerves(0.5)
if self.climber.front.is_above_ground():
self.next_state_now("align_back_lift")
@timed_state(must_finish=True, next_state="retract_back_lift", duration=2)
def align_back_lift(self):
self.climber.retract_front() # make sure it is all the way up
self.move_swerves(0.5)
self.climber.drive_forward()
self.climber.level_back()
def abort(self):
self.engage("aborting")
@state(must_finish=True)
def aborting(self):
self.climber.retract_all()
if self.climber.front.is_retracted() and self.climber.back.is_retracted():
self.chassis.heading_hold_on()
self.done()
# @timed_state(must_finish=True, next_state="retract_back_lift", duration=2)
# def roll_back(self, initial_call):
# if initial_call:
# self.A_counts_start = self.chassis.module_a.drive_motor.getSelectedSensorPosition(
# 0
# )
# self.B_counts_start = self.chassis.module_b.drive_motor.getSelectedSensorPosition(
# 0
# )
# self.move_swerves(-0.3)
# self.climber.drive_forward(-0.2)
# if (
# abs(
# self.chassis.module_a.drive_motor.getSelectedSensorPosition(0)
# - self.A_counts_start
# )
# + abs(
# self.chassis.module_b.drive_motor.getSelectedSensorPosition(0)
# - self.B_counts_start
# )
# / 2
# > 0.05*self.chassis.module_a.COUNTS_PER_METRE
# ):
# self.next_state("retract_back_lift")
# self.move_swerves(0)
@state(must_finish=True)
def retract_back_lift(self, initial_call):
if initial_call:
self.climber.fire_pistons()
self.climber.drive_forward(1)
self.move_swerves(0)
self.climber.retract_back()
if self.climber.back.is_retracted():
self.done()
def move_swerves(self, velocity=0.05):
self.chassis.set_inputs(0, velocity, 0, field_oriented=False)
| {"/autonomous/autonomous.py": ["/automations/alignment.py", "/automations/cargo.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/utilities/pure_pursuit.py"], "/tests/test_vision.py": ["/components/vision.py"], "/automations/alignment.py": ["/automations/cargo.py", "/automations/hatch.py", "/components/vision.py"], "/robot.py": ["/automations/alignment.py", "/automations/cargo.py", "/automations/climb.py", "/automations/hatch.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/components/climb.py"], "/automations/climb.py": ["/components/climb.py", "/components/cargo.py"], "/automations/hatch.py": ["/components/hatch.py"], "/autonomous/test.py": ["/utilities/pure_pursuit.py"], "/components/cargo.py": ["/components/vision.py"], "/automations/cargo.py": ["/components/cargo.py", "/components/vision.py"]} |
65,899 | orike122/pydeepspace | refs/heads/master | /automations/hatch.py | import math
from magicbot import StateMachine, state
from components.hatch import Hatch
from pyswervedrive.chassis import SwerveChassis
class HatchAutomation(StateMachine):
chassis: SwerveChassis
hatch: Hatch
def __init__(self):
super().__init__()
self.fired_position = 0, 0
def grab(self):
self.hatch.extend_fingers()
self.hatch.has_hatch = True
def outake(self, force=False):
self.engage("outaking", force=force)
@state(first=True, must_finish=True)
def outaking(self, state_tm, initial_call):
if initial_call:
self.hatch.retract_fingers()
if state_tm > 0.5:
self.hatch.punch()
self.next_state("retract_after_move")
@state(must_finish=True)
def retract_after_move(self, initial_call, state_tm):
"""
Ensure we have moved away before we retract punchers.
"""
if initial_call:
self.fired_position = self.chassis.position
if (
math.hypot(
self.fired_position[0] - self.chassis.position[0],
self.fired_position[1] - self.chassis.position[1],
)
> 0.5
or state_tm > 5
):
self.hatch.retract()
self.done()
| {"/autonomous/autonomous.py": ["/automations/alignment.py", "/automations/cargo.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/utilities/pure_pursuit.py"], "/tests/test_vision.py": ["/components/vision.py"], "/automations/alignment.py": ["/automations/cargo.py", "/automations/hatch.py", "/components/vision.py"], "/robot.py": ["/automations/alignment.py", "/automations/cargo.py", "/automations/climb.py", "/automations/hatch.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/components/climb.py"], "/automations/climb.py": ["/components/climb.py", "/components/cargo.py"], "/automations/hatch.py": ["/components/hatch.py"], "/autonomous/test.py": ["/utilities/pure_pursuit.py"], "/components/cargo.py": ["/components/vision.py"], "/automations/cargo.py": ["/components/cargo.py", "/components/vision.py"]} |
65,900 | orike122/pydeepspace | refs/heads/master | /autonomous/test.py | from magicbot.state_machine import AutonomousStateMachine, state
from pyswervedrive.chassis import SwerveChassis
from utilities.navx import NavX
from utilities.pure_pursuit import PurePursuit, Waypoint
class TestPursuitAuto(AutonomousStateMachine):
MODE_NAME = "Test Pursuit Auto"
DEFAULT = False
chassis: SwerveChassis
imu: NavX
def on_enable(self):
super().on_enable()
self.chassis.odometry_x = 0
self.chassis.odometry_y = 0
self.points = (
self.current_pos,
Waypoint(2, 0, 0, 1),
Waypoint(2, 2, 0, 1),
Waypoint(0, 2, 0, 1),
)
self.pursuit = PurePursuit(look_ahead=0.2, look_ahead_speed_modifier=0.0)
@state(first=True)
def move_forwards(self, initial_call):
if initial_call:
self.pursuit.build_path(self.points)
vx, vy, vz = self.pursuit.find_velocity(self.chassis.position)
self.chassis.set_inputs(vx, vy, 0)
if self.pursuit.completed_path:
self.chassis.set_inputs(0, 0, 0)
self.done()
@property
def current_pos(self):
return Waypoint(
self.chassis.odometry_x, self.chassis.odometry_y, self.imu.getAngle(), 1
)
| {"/autonomous/autonomous.py": ["/automations/alignment.py", "/automations/cargo.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/utilities/pure_pursuit.py"], "/tests/test_vision.py": ["/components/vision.py"], "/automations/alignment.py": ["/automations/cargo.py", "/automations/hatch.py", "/components/vision.py"], "/robot.py": ["/automations/alignment.py", "/automations/cargo.py", "/automations/climb.py", "/automations/hatch.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/components/climb.py"], "/automations/climb.py": ["/components/climb.py", "/components/cargo.py"], "/automations/hatch.py": ["/components/hatch.py"], "/autonomous/test.py": ["/utilities/pure_pursuit.py"], "/components/cargo.py": ["/components/vision.py"], "/automations/cargo.py": ["/components/cargo.py", "/components/vision.py"]} |
65,901 | orike122/pydeepspace | refs/heads/master | /components/cargo.py | import enum
import math
import ctre
import rev
import wpilib
from components.vision import Vision
class Height(enum.Enum):
FLOOR = 18.6
CARGO_SHIP = 0
LOADING_STATION = 0
class CargoManipulator:
vision: Vision
arm_motor: rev.CANSparkMax
intake_motor: ctre.VictorSPX
intake_switch: wpilib.DigitalInput
GEAR_RATIO = 7 * 5 * 84 / 50
UNITS_PER_RADIAN = 18.6 / math.radians(105) # measured
INTAKE_SPEED = -0.75
SLOW_INTAKE_SPEED = -0.4
OUTTAKE_SPEED = 1.0
def __init__(self):
self.intake_motor_output = 0.0
def setup(self) -> None:
self.arm_motor.setIdleMode(rev.IdleMode.kBrake)
self.arm_motor.setInverted(False)
self.intake_motor.setNeutralMode(ctre.NeutralMode.Coast)
self.encoder = self.arm_motor.getEncoder()
self.pid_controller = self.arm_motor.getPIDController()
self.pid_controller.setP(5e-4)
self.pid_controller.setI(1e-6)
self.pid_controller.setD(0)
self.pid_controller.setIZone(0)
self.pid_controller.setFF(1 / 5675)
self.pid_controller.setOutputRange(-1, 1)
self.pid_controller.setSmartMotionMaxVelocity(1200) # rpm
self.pid_controller.setSmartMotionMaxAccel(1000) # rpm/s
self.pid_controller.setSmartMotionAllowedClosedLoopError(0)
self.pid_controller.setOutputRange(-1, 1)
self.top_limit_switch = self.arm_motor.getReverseLimitSwitch(
rev.LimitSwitchPolarity.kNormallyOpen
)
self.bottom_limit_switch = self.arm_motor.getForwardLimitSwitch(
rev.LimitSwitchPolarity.kNormallyOpen
)
self.top_limit_switch.enableLimitSwitch(True)
self.bottom_limit_switch.enableLimitSwitch(True)
self.setpoint = Height.LOADING_STATION.value
self.tolerance = 0.1
self.has_cargo = False
def execute(self) -> None:
self.intake_motor.set(ctre.ControlMode.PercentOutput, self.intake_motor_output)
self.pid_controller.setReference(self.setpoint, rev.ControlType.kSmartMotion)
if self.is_contained():
self.has_cargo = True
self.vision.use_cargo()
if self.top_limit_switch.get():
self.encoder.setPosition(Height.LOADING_STATION.value)
if self.bottom_limit_switch.get():
self.encoder.setPosition(Height.FLOOR.value)
def at_height(self, desired_height) -> bool:
return abs(desired_height.value - self.encoder.getPosition()) <= self.tolerance
def move_to(self, height: Height) -> None:
"""Move arm to specified height.
Args:
height: Height to move arm to
"""
self.setpoint = height.value
def on_disable(self) -> None:
self.intake_motor.set(ctre.ControlMode.PercentOutput, 0)
self.arm_motor.set(0)
def on_enable(self) -> None:
self.setpoint = self.encoder.getPosition()
def intake(self) -> None:
self.intake_motor_output = self.INTAKE_SPEED
def outtake(self) -> None:
self.has_cargo = False
self.intake_motor_output = self.OUTTAKE_SPEED
def slow_intake(self) -> None:
self.intake_motor_output = self.SLOW_INTAKE_SPEED
def stop(self) -> None:
self.intake_motor_output = 0
def is_contained(self) -> bool:
return not self.intake_switch.get()
| {"/autonomous/autonomous.py": ["/automations/alignment.py", "/automations/cargo.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/utilities/pure_pursuit.py"], "/tests/test_vision.py": ["/components/vision.py"], "/automations/alignment.py": ["/automations/cargo.py", "/automations/hatch.py", "/components/vision.py"], "/robot.py": ["/automations/alignment.py", "/automations/cargo.py", "/automations/climb.py", "/automations/hatch.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/components/climb.py"], "/automations/climb.py": ["/components/climb.py", "/components/cargo.py"], "/automations/hatch.py": ["/components/hatch.py"], "/autonomous/test.py": ["/utilities/pure_pursuit.py"], "/components/cargo.py": ["/components/vision.py"], "/automations/cargo.py": ["/components/cargo.py", "/components/vision.py"]} |
65,902 | orike122/pydeepspace | refs/heads/master | /automations/cargo.py | from magicbot import StateMachine, state, timed_state
from components.cargo import CargoManipulator, Height
from components.vision import Vision
class CargoManager(StateMachine):
cargo_component: CargoManipulator
vision: Vision
def on_disable(self):
self.done()
def intake_floor(self, force=False):
self.engage(initial_state="move_to_floor", force=force)
@state(first=True, must_finish=True)
def move_to_floor(self, initial_call, state_tm):
self.cargo_component.move_to(Height.FLOOR)
self.cargo_component.intake()
self.next_state("intaking_cargo")
def outake_cargo_ship(self, force=False):
self.engage(initial_state="move_to_cargo_ship", force=force)
@state(must_finish=True)
def move_to_cargo_ship(self, initial_call, state_tm):
self.cargo_component.move_to(Height.CARGO_SHIP)
if self.cargo_component.at_height(Height.CARGO_SHIP):
self.next_state("outtaking_cargo")
def intake_loading(self, force=False):
self.engage(initial_state="move_to_loading_station", force=force)
@state(must_finish=True)
def move_to_loading_station(self, initial_call, state_tm):
self.cargo_component.move_to(Height.LOADING_STATION)
self.cargo_component.intake()
self.next_state("intaking_cargo")
@state(must_finish=True)
def intaking_cargo(self):
self.vision.use_cargo()
if self.cargo_component.is_contained():
self.next_state("finishing_intake")
else:
self.cargo_component.intake()
@state(must_finish=True)
def outtaking_cargo(self, initial_call, state_tm):
self.cargo_component.outtake()
if state_tm > 1:
self.vision.use_hatch()
self.done()
@timed_state(duration=1)
def finishing_intake(self):
self.cargo_component.slow_intake()
def done(self):
self.cargo_component.stop()
self.cargo_component.move_to(Height.LOADING_STATION)
super().done()
| {"/autonomous/autonomous.py": ["/automations/alignment.py", "/automations/cargo.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/utilities/pure_pursuit.py"], "/tests/test_vision.py": ["/components/vision.py"], "/automations/alignment.py": ["/automations/cargo.py", "/automations/hatch.py", "/components/vision.py"], "/robot.py": ["/automations/alignment.py", "/automations/cargo.py", "/automations/climb.py", "/automations/hatch.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/components/climb.py"], "/automations/climb.py": ["/components/climb.py", "/components/cargo.py"], "/automations/hatch.py": ["/components/hatch.py"], "/autonomous/test.py": ["/utilities/pure_pursuit.py"], "/components/cargo.py": ["/components/vision.py"], "/automations/cargo.py": ["/components/cargo.py", "/components/vision.py"]} |
65,903 | orike122/pydeepspace | refs/heads/master | /components/hatch.py | import wpilib
class Hatch:
fingers: wpilib.DoubleSolenoid
punchers: wpilib.Solenoid
enable_piston: wpilib.DoubleSolenoid
left_limit_switch: wpilib.DigitalInput
right_limit_switch: wpilib.DigitalInput
def setup(self):
self.has_hatch = False
self._fingers_state = wpilib.DoubleSolenoid.Value.kReverse
self.enable_hatch = False
def on_enable(self):
self._punch_on = False
self.enable_piston.set(wpilib.DoubleSolenoid.Value.kForward)
self.loop_counter = 0
self.enable_counter = 0
def execute(self):
"""Run at the end of every control loop iteration."""
delay = -1
self.fingers.set(self._fingers_state)
self.punchers.set(self._punch_on and self.loop_counter > delay)
if self._punch_on and self.loop_counter > delay:
self.has_hatch = False
self.loop_counter += 1
self.enable_counter += 1
if self.enable_hatch:
if self.enable_counter > 5:
self.extend_fingers()
self.has_hatch = True
self.enable_hatch = False
# if self.is_contained():
# self.has_hatch = True
def punch(self):
self.loop_counter = 0
self._punch_on = True
def retract(self):
self._punch_on = False
def extend_fingers(self):
self._fingers_state = wpilib.DoubleSolenoid.Value.kForward
def retract_fingers(self):
self._fingers_state = wpilib.DoubleSolenoid.Value.kReverse
def is_contained(self):
return any(
[not self.left_limit_switch.get(), not self.right_limit_switch.get()]
)
| {"/autonomous/autonomous.py": ["/automations/alignment.py", "/automations/cargo.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/utilities/pure_pursuit.py"], "/tests/test_vision.py": ["/components/vision.py"], "/automations/alignment.py": ["/automations/cargo.py", "/automations/hatch.py", "/components/vision.py"], "/robot.py": ["/automations/alignment.py", "/automations/cargo.py", "/automations/climb.py", "/automations/hatch.py", "/components/cargo.py", "/components/hatch.py", "/components/vision.py", "/components/climb.py"], "/automations/climb.py": ["/components/climb.py", "/components/cargo.py"], "/automations/hatch.py": ["/components/hatch.py"], "/autonomous/test.py": ["/utilities/pure_pursuit.py"], "/components/cargo.py": ["/components/vision.py"], "/automations/cargo.py": ["/components/cargo.py", "/components/vision.py"]} |
65,918 | wjt/fewerror | refs/heads/master | /fewerror/twitter/fmk.py | import enum
import logging
from .util import user_url
log = logging.getLogger(__name__)
def lang_base(lang):
base, *rest = lang.split('-')
return base
class FMK(enum.Enum):
'''Classification for new followers.'''
FOLLOW_BACK = 1
NEUTRAL = 2
BLOCK = 3
def classify_user(api, whom, fetch_statuses=True):
'''Crude attempt to identify spammy followers. It appears that this bot
is used to boost follower counts since it almost always follows back.
Returns an entry from FMK.'''
label = '{} (#{})'.format(user_url(whom), whom.id)
# Sorry if you speak these languages, but after getting several
# thousand spam followers I needed a crude signal.
forbidden_langs = {'ar', 'ja', 'tr', 'zh'}
if lang_base(whom.lang) in forbidden_langs:
log.info('%s has forbidden lang %s',
label, whom.lang)
return FMK.BLOCK
# Many spam users had user.lang == 'en' but tweet only in those languages.
try:
# "fully-hydrated" users have a status on them
statuses = [whom.status]
except AttributeError:
# (if they're not protected...)
if whom.protected:
log.info('%s is protected; assume they are okay', label)
return FMK.FOLLOW_BACK
if whom.statuses_count == 0 and whom.followers_count > 1000:
log.info('%s has never tweeted but has %d followers',
label, whom.followers_count)
return FMK.BLOCK
# but users in follow notifications do not; and nor do users who
# haven't tweeted for a while (or ever)
if fetch_statuses:
# TODO: this fails for protected accounts who haven't accepted our request
statuses = api.user_timeline(user_id=whom.id, count=20)
else:
log.info('%s: not enough information', label)
return FMK.NEUTRAL
langs = {lang_base(status.lang) for status in statuses}
if langs & forbidden_langs:
log.info('%s tweets in forbidden lang %s',
label, ', '.join(langs & forbidden_langs))
return FMK.BLOCK
if 'en' not in langs:
log.info('%s tweets in %s, not en -- why are they following us?',
label, ', '.join(langs))
return FMK.NEUTRAL
return FMK.FOLLOW_BACK
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,919 | wjt/fewerror | refs/heads/master | /tests/test_state.py | #!/usr/bin/env python
from contextlib import contextmanager
from datetime import datetime, timedelta
from fewerror.state import State
class Now:
def __init__(self):
self.now = datetime.now()
def advance(self, td):
self.now += td
def __call__(self):
return self.now
@contextmanager
def roundtripped_state(tmpdir, per_word_timeout_seconds=-1):
d = str(tmpdir)
now = Now()
def load():
return State.load(
"test", d,
per_word_timeout_seconds=per_word_timeout_seconds,
now=now)
s = load()
yield s, now
s2 = load()
assert s == s2
def test_str(tmpdir):
with roundtripped_state(tmpdir) as (s, now):
assert ' 0 ' in str(s)
def test_reply_once(tmpdir):
with roundtripped_state(tmpdir) as (s, now):
assert s.can_reply(123, ['blood'])
assert s.can_reply(123, ['blood'])
s.record_reply(123, ['blood'], 124)
assert not s.can_reply(123, ['blood'])
# It shouldn't matter what the word is, we don't reply to the same tweet twice.
assert not s.can_reply(123, ['annoying'])
# But rate-limiting is disabled, so reply immediately to the same word in any other toot
assert s.can_reply(456, ['blood'])
assert s.can_reply(456, ['annoying'])
def test_word_rate_limit(tmpdir):
with roundtripped_state(tmpdir, per_word_timeout_seconds=30) as (s, now):
assert s.can_reply(123, ['blood'])
s.record_reply(123, ['blood'], 124)
assert not s.can_reply(123, ['blood'])
# Reply to new tweets, but not about blood
assert not s.can_reply(456, ['blood'])
assert not s.can_reply(456, ['blood', 'annoying'])
assert not s.can_reply(456, ['annoying', 'blood'])
assert s.can_reply(789, ['annoying'])
now.advance(timedelta(seconds=31))
assert s.can_reply(456, ['blood'])
assert s.can_reply(789, ['annoying'])
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,920 | wjt/fewerror | refs/heads/master | /fewerror/telegram.py | #!/usr/bin/env python3
# vim: fileencoding=utf-8
import argparse
import logging
import os
import telegram
from telegram.ext import (
Updater, CommandHandler, MessageHandler, Filters,
)
from . import checkedshirt, find_corrections, format_reply
log = logging.getLogger(__name__)
def _context(message):
"""Just for logging convenience"""
sender = message.from_user.username
if message.chat.type == telegram.Chat.GROUP:
return '{} @ {}'.format(sender, message.chat.title)
else:
return sender
def on_start(bot, update):
log.info('<%s> %s', _context(update.message), update.message.text)
bot.sendMessage(chat_id=update.message.chat_id,
text="Hi. I'll let you know when you say ‘less’ but "
"should say ‘fewer’.")
def on_message(bot, update):
message = update.message
context = _context(message)
qs = find_corrections(message.text)
if qs:
log.info('<%s> %s', context, update.message.text)
reply = format_reply(qs)
log.info('--> %s', reply)
bot.sendMessage(
chat_id=message.chat_id,
reply_to_message_id=message.message_id,
text=reply)
def main():
parser = argparse.ArgumentParser(
description='Annoy some Telegram users. '
'Set $TELEGRAM_BOT_TOKEN for success.')
checkedshirt.add_arguments(parser)
args = parser.parse_args()
checkedshirt.init(args)
token = os.environ['TELEGRAM_BOT_TOKEN']
updater = Updater(token=token)
dispatcher = updater.dispatcher
dispatcher.add_handler(CommandHandler('start', on_start))
dispatcher.add_handler(MessageHandler(Filters.text, on_message))
updater.start_polling()
updater.idle()
if __name__ == '__main__':
main()
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,921 | wjt/fewerror | refs/heads/master | /fewerror/twitter/__init__.py | # coding=utf-8
import json
import logging
import logging.config
import os
import random
import re
import time
import tweepy
from tweepy.streaming import StreamListener
from .. import find_corrections, format_reply
from ..state import State
from ..util import reverse_inits, OrderedSet
from .util import user_url, status_url
from .fmk import FMK, classify_user
log = logging.getLogger(__name__)
def get_sanitized_text(status):
if hasattr(status, 'extended_tweet'):
# https://dev.twitter.com/overview/api/upcoming-changes-to-tweets#compatibility-mode-json-rendering
# Note that the field containing “The full set of entities” is helpfully
# documented as “entities/extended_entities, etc.” We could use
# display_text_range to strip leading usernames and trailing URLs but we
# also want to remove internal entities.
text = status.extended_tweet['full_text']
for key in ('entities', 'extended_entities'):
if key in status.extended_tweet:
entities = status.extended_tweet[key]
break
else:
raise ValueError("Can't find entities in extended_tweet", status._json)
else:
text = status.text
entities = status.entities
flat_entities = [
e
for k in ('media', 'urls', 'user_mentions') # TODO: what about hashtags?
if k in entities
for e in entities[k]
]
flat_entities.sort(key=lambda e: e['indices'], reverse=True)
for e in flat_entities:
i, j = e['indices']
text = text[:i] + text[j:]
text = text.replace("&", "&")
return text.strip()
lessish_rx = re.compile(r'\bLESS\b', re.IGNORECASE)
manual_rt_rx = re.compile(r'''\b[RM]T\b''')
quote_rx = re.compile(r'''^['"‘“]''')
def looks_like_retweet(text):
return manual_rt_rx.search(text) # or quote_rx.match(text)
class LessListener(StreamListener):
def __init__(self, *args, **kwargs):
state_dir = kwargs.pop('state_dir')
self.post_replies = kwargs.pop('post_replies', False)
self.gather = kwargs.pop('gather', None)
StreamListener.__init__(self, *args, **kwargs)
self.me = self.api.me()
self._state = State.load(self.me.screen_name, state_dir)
if self.gather:
os.makedirs(self.gather, exist_ok=True)
def on_connect(self):
me = self.me
log.info("streaming as @%s (#%d)", me.screen_name, me.id)
def on_error(self, status_code):
log.info("HTTP status %d", status_code)
return True # permit tweepy.Stream to retry
december_greetings = (
'Ho ho ho!',
'Merry Christmas!',
'🎅🎅🎅',
'🎄🎄🎄',
)
festive_probability = 0.25
def get_festive_probability(self, d):
"""Festivities increase linearly as crim cram approaches"""
if d.month != 12 or d.day > 25:
return 0
x = (d.day - 1) / 24
c = self.festive_probability
m = 1 - c
p = m * x + c
log.info("%s -> %.2f", d, p)
return p
def get_festive_greeting(self, d):
p = self.get_festive_probability(d)
if random.random() < p:
return random.choice(self.december_greetings)
else:
return ''
def save_tweet(self, received_status):
if not self.gather:
return
id_ = received_status.id_str
id_bits = [
id_[0:-16],
]
dir_ = os.path.join(self.gather, *id_bits)
os.makedirs(dir_, exist_ok=True)
filename = os.path.join(dir_, '{}.json'.format(id_))
with open(filename, 'w') as f:
json.dump(obj=received_status._json, fp=f)
def on_status(self, status):
to_mention = OrderedSet()
# Reply to the original when a tweet is RTed properly
if hasattr(status, 'retweeted_status'):
# Ignore real RTs
return
text = get_sanitized_text(status)
if not lessish_rx.search(text):
return
log.info("%s %s", status_url(status), text)
if looks_like_retweet(text):
log.info('…looks like a manual RT, skipping')
return
self.save_tweet(status)
try:
quantities = find_corrections(text)
except Exception:
log.exception(u'exception while wrangling ‘%s’:', text)
return
if not quantities:
return
if not self._state.can_reply(status.id, quantities):
return
to_mention.add(status.author.screen_name)
for x in status.entities['user_mentions']:
to_mention.add(x['screen_name'])
mentioned_me = self.me.screen_name in to_mention
to_mention.discard(self.me.screen_name)
log.info('would like to mention %s', to_mention)
for rel in self.api.lookup_friendships(screen_names=tuple(to_mention)):
if not rel.is_followed_by:
# If someone explicitly tags us, they're fair game
is_author = rel.screen_name == status.author.screen_name
if not (is_author and mentioned_me):
to_mention.discard(rel.screen_name)
if rel.is_following:
log.info(u"%s no longer follows us; unfollowing", rel.screen_name)
self.api.destroy_friendship(screen_name=rel.screen_name)
if status.author.screen_name not in to_mention:
log.info('sender %s does not follow us (any more), not replying',
status.author.screen_name)
return
# Keep dropping mentions until the reply is short enough
# TODO: hashtags?
correction = format_reply(quantities)
greeting = self.get_festive_greeting(status.created_at)
reply = None
for mentions in reverse_inits([u'@' + sn for sn in to_mention]):
reply = u'{mentions} {correction}. {greeting}'.format(
mentions=u' '.join(mentions),
correction=correction,
greeting=greeting).strip()
if len(reply) <= 140:
break
if reply is not None and len(reply) <= 140:
log.info('--> %s', reply)
if self.post_replies:
# TODO: I think tweepy commit f99b1da broke calling this without naming the status
# parameter by adding media_ids before *args -- why do the tweepy tests pass?
r = self.api.update_status(status=reply, in_reply_to_status_id=status.id)
log.info(" %s", status_url(r))
self._state.record_reply(status.id, quantities, r.id)
else:
log.info('too long, not replying')
def on_event(self, event):
if event.source.id == self.me.id:
return
if event.event == 'follow' and event.target.id == self.me.id:
self.on_follow(event.source)
if event.event == 'favorite' and event.target.id == self.me.id:
log.info("tweet favorited by %s: %s",
user_url(event.source),
status_url(event.target_object))
def on_follow(self, whom):
log.info("followed by %s", user_url(whom))
if whom.following:
return
classification = classify_user(self.api, whom)
if classification == FMK.BLOCK:
log.info('blocking %s', user_url(whom))
self.block(whom.id)
elif classification == FMK.FOLLOW_BACK:
# TODO: delay this
log.info("following %s back", user_url(whom))
whom.follow()
def block(self, user_id):
self.api.create_block(user_id=user_id,
include_entities=False,
skip_status=True)
def auth_from_env():
consumer_key = os.environ["CONSUMER_KEY"]
consumer_secret = os.environ["CONSUMER_SECRET"]
access_token = os.environ["ACCESS_TOKEN"]
access_token_secret = os.environ["ACCESS_TOKEN_SECRET"]
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
return auth
def stream(api, args):
while True:
try:
listener = LessListener(api,
post_replies=args.post_replies,
gather=args.gather,
state_dir=args.state)
stream = tweepy.Stream(api.auth, listener)
if args.use_public_stream:
stream.filter(track=['less'])
else:
stream.userstream(replies='all')
except tweepy.RateLimitError:
log.warning("Rate-limited, and Tweepy didn't save us; time for a nap",
exc_info=True)
time.sleep(15 * 60)
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,922 | wjt/fewerror | refs/heads/master | /fewerror/checkedshirt.py | import argparse
import json
import logging
import os
import raven
from raven.handlers.logging import SentryHandler
log = logging.getLogger(__name__)
def add_arguments(parser):
g = parser.add_argument_group('logging').add_mutually_exclusive_group()
g.add_argument('--log-config',
type=argparse.FileType('r'),
metavar='FILE.json',
help='Read logging config from FILE.json (default: LEVEL to stderr)')
g.add_argument('--log-level',
default='DEBUG',
help='Log at this level to stderr (default: DEBUG)')
def init(args):
if args.log_config:
log_config = json.load(args.log_config)
logging.config.dictConfig(log_config)
else:
logging.basicConfig(level=args.log_level,
format='%(asctime)s %(levelname)8s [%(name)s] %(message)s')
log.info('--- Starting ---')
git_sha = raven.fetch_git_sha(os.path.dirname(os.path.dirname(__file__)))
log.info('Git commit: %s', git_sha)
# Log errors to Sentry
client = raven.Client(
# dsn=os.environ.get('SENTRY_DSN'),
include_paths=['fewerror'],
release=git_sha,
ignore_exceptions=[
KeyboardInterrupt,
],
)
handler = SentryHandler(client)
handler.setLevel(logging.WARNING)
raven.conf.setup_logging(handler)
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,923 | wjt/fewerror | refs/heads/master | /fewerror/twitter/util.py | def user_url(user):
return "https://twitter.com/{}".format(user.screen_name)
def status_url(status):
return "{}/status/{}".format(user_url(status.author), status.id)
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,924 | wjt/fewerror | refs/heads/master | /tests/test_crashes.py | #!/usr/bin/env python
import fewerror
crashy_tweet = '''2x more likely to hire men than women, even when he's less capable http://t.co/XcovCXpsqC
"We shouldn't sacrifice quality to hire women!"'''
def test_not_crashing():
reply = fewerror.find_corrections(crashy_tweet)
if reply:
# if we get anything out at all, it should be 'fewer capable'
assert reply == ['fewer capable']
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,925 | wjt/fewerror | refs/heads/master | /cbdq/gnewline.py | #!/usr/bin/env python3
import argparse
import json
import re
import sys
import yaml
skip_keys = frozenset(('QUOTE', 'SPACE', '00-SOURCE'))
traceryish_rx = re.compile(r'#(\w+)(?:\.\w+)*#')
def fmap(f, val):
if isinstance(val, list):
return [fmap(f, x) for x in val]
else:
return f(val)
def space(val):
return val.replace(' ', '#SPACE#')
def validate(j):
used = {'origin'} | skip_keys
for k, v in j.items():
def _validate(val, k=k):
for var in traceryish_rx.findall(val):
if var not in j:
raise ValueError(k, val, var)
used.add(var)
fmap(_validate, v)
unused = j.keys() - used
if unused:
raise ValueError(unused)
def transform(j):
return {
k: v if k in skip_keys else fmap(space, v)
for k, v in j.items()
}
def main():
p = argparse.ArgumentParser()
p.add_argument('source', type=argparse.FileType(mode='r'))
p.add_argument('--verify', action='store_true')
a = p.parse_args()
j = yaml.load(a.source, Loader=yaml.CLoader)
validate(j)
j_ = transform(j)
if a.verify:
json.dumps(j_)
else:
json.dump(j_, sys.stdout, indent=2, sort_keys=True)
if __name__ == '__main__':
main()
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,926 | wjt/fewerror | refs/heads/master | /fewerror/twitter/__main__.py | #!/usr/bin/env python3
import argparse
import logging
import os
import tweepy
from . import auth_from_env, batch, stream
from .. import checkedshirt
log = logging.getLogger(__name__)
def main():
var = os.path.abspath('var')
parser = argparse.ArgumentParser()
# Annoyingly you really do have to write
# python -m fewerror.twitter --log-level DEBUG stream
# rather than
# python -m fewerror.twitter stream --log-level DEBUG
# but life is too short.
checkedshirt.add_arguments(parser)
subparsers = parser.add_subparsers(help='subcommand', dest='mode')
subparsers.required = True
# stream
stream_p = subparsers.add_parser('stream', help=u'annoy some tweeps')
stream_p.set_defaults(func=stream)
gather_dir = os.path.join(var, 'tweets')
stream_p.add_argument('--gather', metavar='DIR', nargs='?',
const=gather_dir, default=None,
help='save matched tweets in DIR for later '
'degustation (default: {})'.format(gather_dir))
stream_p.add_argument('--state', metavar='DIR', default=var,
help='store state in DIR (default: {})'.format(var))
modes = stream_p.add_argument_group('stream mode').add_mutually_exclusive_group()
modes.add_argument('--post-replies', action='store_true',
help='post (rate-limited) replies, rather than just printing them locally')
modes.add_argument('--use-public-stream', action='store_true',
help='search public tweets for "less", rather than your own stream')
batch.add_subcommands(subparsers, var)
args = parser.parse_args()
checkedshirt.init(args)
log.info('Initializing API')
auth = auth_from_env()
api = tweepy.API(
auth,
wait_on_rate_limit=True,
wait_on_rate_limit_notify=True,
# It looks like if retry_count is 0 (the default), wait_on_rate_limit=True will not
# actually retry after a rate limit.
retry_count=1)
args.func(api, args)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
exit(1)
except SystemExit:
raise
except Exception:
log.info('oh no', exc_info=True)
raise
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,927 | wjt/fewerror | refs/heads/master | /fewerror/twitter/batch.py | import argparse
import glob
import json
import logging
import os
import re
import time
import tweepy
from . import FMK, classify_user, user_url
log = logging.getLogger(__name__)
DEFAULT_BLOCK_TIMEOUT = 120
def add_user_args(parser):
g = parser.add_mutually_exclusive_group(required=True)
g.add_argument('--user-id', type=int)
g.add_argument('--screen-name', type=str)
def get_user_kwargs(args):
if args.user_id is not None:
return {'user_id': args.user_id}
elif args.screen_name is not None:
return {'screen_name': args.screen_name}
else:
raise ValueError(args)
def save_user(user, directory):
path = os.path.join(directory, 'user.{}.json'.format(user.id_str))
with open(path, 'w') as f:
json.dump(obj=user._json, fp=f)
def fetch_followers(api, args):
'''Fetches all followers' JSON and saves them to the given directory.
Files will have names of the form 'user.<numeric id>.json'.'''
os.makedirs(args.directory, exist_ok=True)
n = api.me().followers_count
g = tweepy.Cursor(api.followers, count=200).items()
for i, follower in enumerate(g, 1):
log.info('[%d/%d] %s', i, n, follower.screen_name)
save_user(follower, args.directory)
def fetch_mutuals(api, args):
'''Intersects a directory of user.*.json (as populated with fetch-followers) with users
following USER_ID/SCREEN_NAME.'''
mine = {
int(re.match(r'user.(\d+).json', os.path.basename(f)).group(1))
for f in glob.glob(os.path.join(args.directory, 'user.*.json'))
}
mutuals = set()
kwargs = get_user_kwargs(args)
kwargs['count'] = 5000
g = tweepy.Cursor(api.followers_ids, **kwargs).pages()
for i, page in enumerate(g, 1):
m = (mine & set(page))
log.info('Page %d: %d mutuals', i, len(m))
print('\n'.join(map(str, m)), flush=True)
mutuals |= m
time.sleep(60)
log.info('Done; %d mutuals total', len(mutuals))
def classify(api, args):
classes = {e: set() for e in FMK}
for dirpath, _, filenames in os.walk(args.directory):
for filename in filenames:
if not re.match(r'user.\d+.json', filename):
continue
with open(os.path.join(dirpath, filename), 'rb') as f:
j = json.load(f)
user = tweepy.models.User.parse(api, j)
c = classify_user(api, user, fetch_statuses=False)
classes[c].add(user)
for user in classes[FMK.BLOCK]:
args.block_file.write('{}\n'.format(user.id))
already_following = {u for u in classes[FMK.FOLLOW_BACK] if u.following}
classes[FMK.FOLLOW_BACK] -= already_following
results = {'already following': len(already_following)}
for e, us in classes.items():
results[e.name.lower().replace('_', ' ')] = len(us)
w = max(map(len, results))
v = max(len(str(n)) for n in results.values())
for label, n in results.items():
print('{:>{w}}: {:{v}} users'.format(label, n, w=w, v=v))
def report_spam(api, *args, **kwargs):
sleep_time = 15 * 60
for i in reversed(range(5)):
try:
return api.report_spam(*args, **kwargs)
except tweepy.TweepError as e:
if e.api_code == 205 and i > 0:
# “You are over the limit for spam reports. The account limit
# for reporting spam has been reached. Try again later.”
#
# Annoyingly, this is a different error code to the normal
# “rate-limited“ error code so tweepy's built-in rate limiting
# doesn't apply.
log.info("Over the spam-report limit; sleeping for %ds",
sleep_time, exc_info=True)
time.sleep(sleep_time)
sleep_time *= 1.5
else:
raise
def _block_many(api, to_block_ids, timeout, report):
n = len(to_block_ids)
for i, to_block_id in enumerate(to_block_ids, 1):
try:
if report:
log.info('[%d/%d] reporting #%d', i, n, to_block_id)
u = report_spam(api, user_id=to_block_id, perform_block=True)
log.info('reported and blocked %s (#%d)', user_url(u), to_block_id)
else:
log.info('[%d/%d] blocking #%d', i, n, to_block_id)
u = api.create_block(user_id=to_block_id,
include_entities=False,
skip_status=True)
log.info('blocked %s (#%d)', user_url(u), to_block_id)
api.destroy_friendship(user_id=to_block_id)
log.info('Unfollowed #%d', to_block_id)
except tweepy.TweepError as e:
if e.api_code in (
34, # reported by report_spam
50, # reported by create_block
):
log.info('#%d no longer exists', to_block_id)
else:
raise
if i < n:
time.sleep(timeout)
def block(api, args):
'''Unfollow, block, and optionally report as spam many user IDs.'''
to_block_ids = {int(line) for line in args.block_file if line.strip()}
log.info('would like to unfollow block %d ids', len(to_block_ids))
existing_block_ids = set(tweepy.Cursor(api.blocks_ids).items())
log.info('%d existing blocks', len(existing_block_ids))
to_block_ids.difference_update(existing_block_ids)
_block_many(api, to_block_ids, timeout=args.timeout, report=args.report)
def block_one(api, args):
'''Block and unfollow a user, and (optionally) our friends who follow them.'''
kwargs = get_user_kwargs(args)
if args.mutuals:
log.info('Fetching our friends')
my_friends = set(tweepy.Cursor(api.friends_ids).items())
log.info('Fetched %d friends', len(my_friends))
time.sleep(args.timeout)
mutuals = set()
log.info('Intersecting friends with users following %s', kwargs)
g = tweepy.Cursor(api.followers_ids, **kwargs).pages()
for i, page in enumerate(g, 1):
m = my_friends & set(page)
log.info('Page %d: %d mutuals', i, len(m))
mutuals |= m
time.sleep(args.timeout)
_block_many(api, mutuals, timeout=args.timeout, report=False)
u = api.create_block(include_entities=False,
skip_status=True,
**kwargs)
log.info('Blocked %s', user_url(u))
api.destroy_friendship(**kwargs)
log.info('Unfollowed %s', user_url(u))
def ℕ(value):
'''Vim really deals badly with this function name.'''
try:
i = int(value)
if i < 0:
raise ValueError
return i
except ValueError:
raise argparse.ArgumentTypeError('{!r} ∉ ℕ'.format(value))
def add_subcommands(subparsers, var):
# fetch-followers
fetch_p = subparsers.add_parser('fetch-followers', help='fetch some tweeps',
description=fetch_followers.__doc__)
fetch_p.set_defaults(func=fetch_followers)
default_fetch_directory = os.path.join(var, 'followers')
fetch_p.add_argument('directory', default=default_fetch_directory,
help='(default: {})'.format(default_fetch_directory))
# fetch-mutuals
fetch_m = subparsers.add_parser('fetch-mutuals', help='intersect some tweeps',
description=fetch_mutuals.__doc__)
fetch_m.set_defaults(func=fetch_mutuals)
fetch_m.add_argument('directory')
add_user_args(fetch_m)
# classify
classify_p = subparsers.add_parser('classify', help='group some tweeps')
classify_p.set_defaults(func=classify)
classify_p.add_argument('directory', default=default_fetch_directory,
help='(default: {})'.format(default_fetch_directory))
classify_p.add_argument('block_file', type=argparse.FileType('w'),
help='file to store one numeric user id per line, '
'as used by "block" command')
block_one_p = subparsers.add_parser('block-one', help='block one tweep',
description=block_one.__doc__)
block_one_p.set_defaults(func=block_one)
add_user_args(block_one_p)
block_one_p.add_argument('--mutuals', action='store_true',
help='Also block friends who follow them')
block_one_p.add_argument('--timeout', type=ℕ, default=DEFAULT_BLOCK_TIMEOUT,
help='delay in seconds between each API call')
# block
block_p = subparsers.add_parser('block', help='block some tweeps',
description=block.__doc__)
block_p.set_defaults(func=block)
block_p.add_argument('block_file', type=argparse.FileType('r'),
help='file with one numeric user id per line')
block_p.add_argument('--report', action='store_true',
help='with --block, also report for spam')
block_p.add_argument('--timeout', type=ℕ, default=DEFAULT_BLOCK_TIMEOUT,
help='delay in seconds between each API call')
__all__ = ['add_subcommands']
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,928 | wjt/fewerror | refs/heads/master | /fewerror/thatsnotmybot.py | #!/usr/bin/env python3
import argparse
import logging
import os
import re
import textblob
import tracery
import tweepy
import yaml
from tracery.modifiers import base_english
from fewerror.twitter import auth_from_env, status_url
from fewerror import checkedshirt
SOURCE = os.path.join(os.path.dirname(__file__), 'thatsnotmybot.yaml')
traceryish_rx = re.compile(r'#(\w+)(?:\.\w+)*#')
log = logging.getLogger(__name__)
def fmap(f, val):
if isinstance(val, list):
return [fmap(f, x) for x in val]
else:
return f(val)
def validate(j):
used = {'origin', }
for k, v in j.items():
def _validate(val, k=k):
for var in traceryish_rx.findall(val):
if var not in j:
raise ValueError(k, val, var)
used.add(var)
fmap(_validate, v)
unused = j.keys() - used
if unused:
raise ValueError(unused)
def modifier_is(noun_phrase):
'''Extremely crude noun_phrase + {is,are} agreement.'''
s = textblob.Sentence(noun_phrase)
_, pos_tag = s.pos_tags[-1]
verb = ' are' if pos_tag in ('NNS', 'NNPS') else ' is'
return noun_phrase + verb
class ThatsNotMyBot(object):
def __init__(self):
with open(SOURCE, 'r', encoding='utf-8') as f:
self.source = yaml.load(f, Loader=yaml.CLoader)
validate(self.source)
self.grammar = tracery.Grammar(self.source)
self.grammar.add_modifiers(base_english)
self.grammar.add_modifiers({'is': modifier_is})
def generate(self):
return self.grammar.flatten('#origin#')
def sample(self, n):
'''Print out n sample texts'''
for _ in range(n):
print(self.generate())
@staticmethod
def get_twitter_api():
auth = auth_from_env()
api = tweepy.API(auth,
wait_on_rate_limit=True,
wait_on_rate_limit_notify=True,
retry_count=1)
return api
def tweet(self, state_filename):
'''🐦🐦🐦'''
log.info('Loading state from %s', state_filename)
try:
with open(state_filename, 'r', encoding='utf-8') as f:
state = yaml.load(f)
log.info('Loaded state %s', state)
except FileNotFoundError:
state = {}
try:
object_ = state['object']
last_id = state['last_id']
except KeyError:
object_ = self.grammar.flatten('#object#')
last_id = None
n = state.get('n', 0) + 1
yes = n >= 8
status = self.grammar.flatten(
'#[object:{}]{}#'.format(
object_,
'is' if yes else 'not'))
log.info("Posting “%s”", status)
if last_id is not None:
log.info(" in reply to %s", last_id)
api = self.get_twitter_api()
r = api.update_status(status, in_reply_to_status_id=last_id)
log.info("Posted %s", status_url(r))
if yes:
state = {}
else:
state['object'] = object_
state['last_id'] = r.id
state['n'] = n
log.info('Saving state %s to %s', state, state_filename)
with open(state_filename, 'w', encoding='utf-8') as f:
yaml.dump(state, f)
def normalize(self):
'''Write back the source. This would be more useful if I knew how to
dump YAML in JSON style, and preserve comments.'''
with open(SOURCE, 'w', encoding='utf-8') as f:
yaml.dump(self.source, f, Dumper=yaml.CDumper,
indent=4, default_flow_style=False)
def main(self, argv=None):
p = argparse.ArgumentParser()
checkedshirt.add_arguments(p)
s = p.add_subparsers(title='commands')
s.add_parser('validate',
help='Just validate the tracery source (default)')
def add_argument(x, *args, **kwargs):
kwargs['help'] += ' (default: {})'.format(kwargs['default'])
return x.add_argument(*args, **kwargs)
sample_parser = s.add_parser('sample', help=self.sample.__doc__)
add_argument(sample_parser, 'n', type=int, nargs='?', default=5,
help='Number of sample texts')
sample_parser.set_defaults(cmd=lambda args: self.sample(args.n))
tweet_parser = s.add_parser('tweet', help=self.tweet.__doc__)
add_argument(tweet_parser, '--state',
default=os.path.abspath('thatsnotmybot.state.yaml'),
help='Load and save state to STATE')
tweet_parser.set_defaults(cmd=lambda args: self.tweet(args.state))
normalize = s.add_parser('normalize', help=self.normalize.__doc__)
normalize.set_defaults(cmd=lambda args: self.normalize())
args = p.parse_args(argv)
checkedshirt.init(args)
if hasattr(args, 'cmd'):
args.cmd(args)
if __name__ == '__main__':
try:
ThatsNotMyBot().main()
finally:
logging.shutdown()
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,929 | wjt/fewerror | refs/heads/master | /tests/test_twitter.py | # vim: fileencoding=utf-8
import json
import os
import datetime as dt
from unittest.mock import NonCallableMock
import pytest
from tweepy.models import User, Status, Relationship
from tweepy.parsers import ModelParser
from fewerror.twitter import get_sanitized_text, LessListener
@pytest.mark.parametrize('filename,expected', [
('647349406191218688.json',
"Four Penguin novels of 128pp or less* that turn me on. Unlike that one published yesterday. \n\n* yes, I do mean less",
),
('582960339714744320.json',
"If media had shown more news curiosity in recent years, this would seem less surprising.",
),
# No media
('649911069322948608.json',
"""OH:\n“It's all vaguely Sisyphean.”\n“Oh! THAT's the word I was looking for yesterday!”""",
),
# extended_tweet with trailing quoted tweet link
('793203535626309632.json',
"""Imagine thinking giving the nuclear codes to someone who treats classified material carelessly wouldn't be a problem.""",
),
# extended_tweet with leading @replies
('793561534861574144.json',
"""Evolution is on the curriculum, so this is irrelevant. Unless you're proposing we apply "pressure" by continuing exactly as we are?""",
),
])
def test_sanitize(filename, expected):
api = NonCallableMock()
with open(os.path.join('tests', filename), 'r') as f:
status = Status.parse(api, json.load(f))
text = get_sanitized_text(status)
assert '&' not in text
assert 'http' not in text
assert text == expected
'''
@pytest.mark.parametrize("fmt", [
(u"RT @test: {}"),
(u"THIS. MT @test: {}"),
(u'"{}" @myfriend'),
(u'“{}” ýéş'),
])
def test_ignores_manual_rts(fmt):
tweet = fmt.format(true_positives[0])
assert fewerror.make_reply(tweet) is None
'''
fewerror_user = {
"screen_name": "fewerror",
"id": 1932168457,
"id_str": "1932168457",
"name": "Fewer Errors",
}
class MockAPI:
parser = ModelParser()
def __init__(self, connections):
self._updates = []
self._connections = {k: set(v) for k, v in connections.items()}
def me(self):
return User.parse(self, fewerror_user)
def lookup_friendships(self, screen_names):
return [
Relationship.parse(self, {
"name": "{x}y Mc{x}face".format(x=screen_name),
"screen_name": screen_name,
"id": i,
"id_str": str(i),
"connections": self._connections.get(screen_name, [
# "following",
# "followed_by",
]),
})
for i, screen_name in enumerate(screen_names, 2 ** 32)
]
def destroy_friendship(self, screen_name):
self._connections[screen_name] -= {"following"}
def update_status(self, **kwargs):
self._updates.append(kwargs)
r = Status(api=self)
setattr(r, 'id', len(self._updates))
setattr(r, 'author', self.me())
# Status.user is "DEPRECIATED" so we omit it
return r
@pytest.mark.parametrize('filename,connections,expected', [
('tests/640748887330942977.json',
{
"krinndnz": ["following", "followed_by"],
"eevee": ["following", "followed_by"],
"mistydemeo": ["following"],
},
"@krinndnz @eevee I think you mean “fewer bad”."
),
('tests/671809680902127616.json',
{
"benjammingh": ["following", "followed_by"],
},
"@benjammingh I think you mean “fewer grip”. It is cold outside."),
('tests/671809680902127616.json',
{},
None),
('tests/738052925646340096.json',
{
'ArosOrcidae': ['following', 'followed_by'],
},
"@Renferos @ArosOrcidae I think you mean “fewer skilled”.",
),
# Tweet from @davidgerard, who we were following but had stopped following
# us. We correctly unfollowed him, and
('tests/801120047829753856.json',
{
'mjg59': ['following', 'followed_by'],
'davidgerard': ['following'],
},
None,
),
# Modified version of 793561534861574144.json where the string 'less' only appears in the
# full_text, not in the truncated text. Regression test for a bug where we dropped any such
# tweet.
('tests/less-only-in-extended-text.json',
{
'RobTH26': ['following', 'followed_by'],
},
'@RobTH26 I think you mean “fewer cake”.',
),
])
def test_end_to_end(filename, connections, expected, tmpdir):
api = MockAPI(connections=connections)
with open(filename, 'r') as f:
status = Status.parse(api, json.load(fp=f))
l = LessListener(api=api, post_replies=True, gather='tweets', state_dir=str(tmpdir))
# 100% festivity for all of December
l.december_greetings = ('It is cold outside.',)
l.festive_probability = 1.
assert l.get_festive_probability(dt.date(2016, 12, 5)) == 1.
l.on_status(status)
# Never reply to the same toot twice
l.on_status(status)
# Rate-limit replies for same word
setattr(status, 'id', status.id + 1)
l.on_status(status)
if expected is None:
assert api._updates == []
else:
assert len(api._updates) == 1
u = api._updates[0]
assert u['status'] == expected
for k, before in connections.items():
after = api._connections[k]
assert ('following' in after) == ('followed_by' in before), \
(k, before, after)
@pytest.mark.parametrize('date,p', [
(dt.date(2016, 11, 30), 0),
(dt.date(2016, 12, 1), 0.25),
(dt.date(2016, 12, 9), 0.5),
(dt.date(2016, 12, 17), 0.75),
(dt.date(2016, 12, 25), 1),
(dt.date(2016, 12, 26), 0),
])
def test_festivity(date, p, tmpdir):
api = MockAPI(connections={})
l = LessListener(api=api, post_replies=True, gather=None, state_dir=str(tmpdir))
assert l.get_festive_probability(date) == p
@pytest.mark.parametrize('id_,expected_filename', [
('649911069322948608', '64/649911069322948608.json'),
('1649911069322948608', '164/1649911069322948608.json'),
])
def test_save_tweet(tmpdir, id_, expected_filename):
api = MockAPI(connections={})
foo = tmpdir.join('foo')
l = LessListener(api=api, gather=str(foo), state_dir=str(tmpdir))
s = Status.parse(api=api, json={
'id': int(id_),
'id_str': id_,
})
l.save_tweet(s)
j = tmpdir.join('foo', expected_filename)
assert j.check()
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,930 | wjt/fewerror | refs/heads/master | /fewerror/state.py | import datetime
import dateutil.parser
import json
import logging
import os
from tempfile import NamedTemporaryFile
log = logging.getLogger(__name__)
class State(object):
def __init__(self,
filename,
olde=None,
now=datetime.datetime.utcnow,
per_word_timeout_seconds=60*60):
self._state_filename = filename
self._replied_to = {
int(k): v for k, v in olde.get('replied_to', {}).items()
}
self._last_time_for_word = {
k: dateutil.parser.parse(v)
for k, v in olde.get('last_time_for_word', {}).items()
}
self._per_word_timeout = datetime.timedelta(
seconds=per_word_timeout_seconds)
self._now = now
def __str__(self):
return '<State: {} replied_to, {} last_time_for_word>'.format(
len(self._replied_to), len(self._last_time_for_word))
def __eq__(self, value):
return (
self._state_filename == value._state_filename and
self._replied_to == value._replied_to and
self._last_time_for_word == value._last_time_for_word
)
@classmethod
def load(cls, screen_name, directory, **kwargs):
filename = os.path.join(directory, 'state.{}.json'.format(screen_name))
try:
with open(filename, 'r') as f:
olde = json.load(f)
except FileNotFoundError:
olde = {}
state = cls(filename, olde, **kwargs)
log.info('loaded %s: %s', filename, state)
return state
def save(self):
state_dir = os.path.dirname(self._state_filename)
with NamedTemporaryFile(prefix=self._state_filename, suffix='.tmp',
dir=state_dir, mode='w', delete=False) as f:
json.dump(fp=f, obj={
'replied_to': self._replied_to,
'last_time_for_word': {
k: v.isoformat()
for k, v in self._last_time_for_word.items()
},
})
os.rename(f.name, self._state_filename)
def can_reply(self, status_id, quantities):
for quantity in quantities:
quantity = quantity.lower()
now = self._now()
r_id = self._replied_to.get(status_id, None)
if r_id is not None:
log.info(u"…already replied: %d", r_id)
return False
last_for_this = self._last_time_for_word.get(quantity, None)
if last_for_this and now - last_for_this < self._per_word_timeout:
log.info(u"…corrected '%s' at %s, waiting till %s", quantity, last_for_this,
last_for_this + self._per_word_timeout)
return False
return True
def record_reply(self, status_id, quantities, r_id):
now = self._now()
self._replied_to[status_id] = r_id
for quantity in quantities:
self._last_time_for_word[quantity.lower()] = now
self.save()
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,931 | wjt/fewerror | refs/heads/master | /fewerror/get_oauth_token.py | #!/usr/bin/env python3
# vim: tw=79
import argparse
import dotenv
import logging
import os
import subprocess
import tweepy
from . import checkedshirt
log = logging.getLogger(__name__)
def main():
default_path = os.path.join(os.path.dirname(__file__),
'..',
'.env')
default_path = os.path.abspath(default_path)
parser = argparse.ArgumentParser(
description='''
Get OAuth tokens, using CONSUMER_KEY and CONSUMER_SECRET
from env and writing the new ACCESS_TOKEN and
ACCESS_TOKEN_SECRET back to it.''',
epilog='''
If env does not exist, but env.asc does, it will be decrypted with
gpg2 to provide CONSUMER_KEY and CONSUMER_SECRET. If env already
exists and contains ACCESS_TOKEN/ACCESS_TOKEN_SECRET, they will be
preserved.''')
parser.add_argument('env', nargs='?', default=default_path,
help='environment file to read and update '
'(default: {}'.format(default_path))
checkedshirt.add_arguments(parser)
args = parser.parse_args()
checkedshirt.init(args)
try:
with open(args.env, 'x') as env_f:
asc = args.env + '.asc'
log.info("Populating %s from %s", args.env, asc)
subprocess.call(('gpg2', '--decrypt', asc), stdout=env_f)
except FileExistsError:
pass
dotenv.load_dotenv(args.env)
consumer_key = os.environ["CONSUMER_KEY"]
consumer_secret = os.environ["CONSUMER_SECRET"]
ks = ('ACCESS_TOKEN', 'ACCESS_TOKEN_SECRET')
if all(k in os.environ for k in ks):
log.info('%s already contains %s', args.env, ' & '.join(ks))
return
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
try:
redirect_url = auth.get_authorization_url()
except tweepy.TweepError:
log.exception('Failed to get authorization URL')
exit(1)
print()
print("Go to %s" % redirect_url)
verifier = input('Verifier: ')
print()
try:
access_token, access_token_secret = auth.get_access_token(verifier)
dotenv.set_key(args.env, 'ACCESS_TOKEN', access_token)
dotenv.set_key(args.env, 'ACCESS_TOKEN_SECRET', access_token_secret)
except tweepy.TweepError:
log.exception('Failed to get access token')
exit(1)
if __name__ == '__main__':
main()
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,932 | wjt/fewerror | refs/heads/master | /fewerror/__init__.py | # coding=utf-8
import logging
from textblob import TextBlob, Word
from nltk.corpus.reader import WordListCorpusReader
from .util import OrderedSet
log = logging.getLogger(__name__)
def furthermore(qs):
if len(qs) > 1:
return "{}, and furthermore {}".format(
", ".join(qs[:-1]),
qs[-1]
)
else:
return qs[0]
def format_reply(corrections):
return "I think you mean " + furthermore(["“{}”".format(c) for c in corrections])
class POS:
"""
https://www.ling.upenn.edu/courses/Fall_2003/ling001/penn_treebank_pos.html
http://www.surdeanu.info/mihai/teaching/ista555-fall13/readings/PennTreebankTagset.html
"""
# 1. Coordinating conjunction
CC = 'CC'
# 2. Cardinal number
CD = 'CD'
# 3. Determiner
DT = 'DT'
# 4. Existential there
EX = 'EX'
# 5. Foreign word
FW = 'FW'
# 6. Preposition or subordinating conjunction
IN = 'IN'
# 7. Adjective or numeral, ordinal
JJ = 'JJ'
# 8. Adjective, comparative
JJR = 'JJR'
# 9. Adjective, superlative
JJS = 'JJS'
# 10. List item marker
LS = 'LS'
# 11. Modal
MD = 'MD'
# Unfortunately there is no POS tag for mass nouns specifically:
# 12. Noun, singular or mass
NN = 'NN'
# 13. Noun, plural
NNS = 'NNS'
# 14. Proper noun, singular
NNP = 'NNP'
# 15. Proper noun, plural
NNPS = 'NNPS'
# 16. Predeterminer
PDT = 'PDT'
# 17. Possessive ending
POS = 'POS'
# 18. Personal pronoun
PRP = 'PRP'
# 19. Possessive pronoun
PRP_ = 'PRP$'
# 20. Adverb
RB = 'RB'
# 21. Adverb, comparative
RBR = 'RBR'
# 22. Adverb, superlative
RBS = 'RBS'
# 23. Particle
RP = 'RP'
# 24. Symbol
SYM = 'SYM'
# 25. to
TO = 'TO'
# 26. Interjection
UH = 'UH'
# 27. Verb, base form
VB = 'VB'
# 28. Verb, past tense
VBD = 'VBD'
# 29. Verb, gerund or present participle
VBG = 'VBG'
# 30. Verb, past participle
VBN = 'VBN'
# 31. Verb, non-3rd person singular present
VBP = 'VBP'
# 32. Verb, 3rd person singular present
VBZ = 'VBZ'
# 33. Wh-determiner
WDT = 'WDT'
# 34. Wh-pronoun
WP = 'WP'
# 35. Possessive wh-pronoun
WP_ = 'WP$'
# 36. Wh-adverb
WRB = 'WRB'
@staticmethod
def nounish(word, pos):
# nltk apparently defaults to 'NN' for smileys :) so special-case those
return pos in (POS.NN, POS.NNS, POS.NNP, POS.NNPS) and \
any(c.isalpha() for c in word)
mass_noun_corpora = WordListCorpusReader('wordlist/massnoun', r'[a-z]+')
mass_nouns = mass_noun_corpora.words()
QUANTITY_POS_TAGS = frozenset((
POS.JJ,
POS.VBN,
POS.VBP,
POS.NN,
POS.NNP,
POS.RB,
POS.RBR,
POS.RBS,
))
bad_words_corpora = WordListCorpusReader('wordlist/shutterstock-bad-words', r'[a-z]{2,3}')
bad_words_en = bad_words_corpora.words('en')
def match(blob_tags, i):
if ["could", "care", "less"] == [w.lower() for w, tag in blob_tags[i-2:i+1]]:
return "could care fewer"
if ["less", "than", "jake"] == [w.lower() for w, tag in blob_tags[i:i+3]]:
return "Fewer Than Jake"
reply_words = []
if i > 0:
v, v_pos = blob_tags[i - 1]
if v_pos == POS.CD and not v.endswith('%'):
# ignore "one less xxx" but allow "100% less xxx"
return
if i > 1:
u, u_pos = blob_tags[i - 2]
if u.lower() == 'more' and v.lower() == 'or':
reply_words.extend([u, v])
elif u.isdigit() and v == '%':
reply_words.append(u + v)
if not reply_words:
if v_pos in (POS.RB, POS.DT):
reply_words.append(v)
less, _less_pos = blob_tags[i]
if less.isupper():
fewer = 'FEWER'
elif less.istitle() and i != 0:
fewer = 'Fewer'
else:
fewer = 'fewer'
try:
w, w_pos = blob_tags[i + 1]
except IndexError:
return
if w_pos not in QUANTITY_POS_TAGS and w not in mass_nouns:
return
if not w.replace('/', '').isalpha():
return
for v, v_pos in blob_tags[i + 2:]:
# Avoid replying "fewer lonely" to "less lonely girl"
# why? this is "right"! but it would be better to say "fewer lonely girl"
# but: "less happy sheep" -> "fewer happy sheep" is bad
if POS.nounish(v, v_pos):
return
# less than 5 Seconds
if v_pos in (POS.CD,):
return
# if we reject "less happy sheep" we should also reject "less happy fluffy sheep".
if v_pos not in (POS.JJ, POS.VBG):
break
reply_words.extend([fewer, w])
return ' '.join(reply_words)
def find_corrections(text):
blob = TextBlob(text)
words = OrderedSet()
for s in blob.sentences:
# blob.tags excludes punctuation, but we need that to avoid correcting
# across a comma, ellipsis, etc. In fact, it's not clear there is
# all that much point splitting into sentences…
s_tags = [(Word(word, pos_tag=t), str(t))
for word, t in s.pos_tagger.tag(s.raw)]
less_indices = [i for i, (word, tag) in enumerate(s_tags) if word.lower() == 'less']
for i in less_indices:
q = match(s_tags, i)
if q is not None:
words.add(q)
words = list(words)
for word in words:
if any(w in word for w in bad_words_en):
return []
return words
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,933 | wjt/fewerror | refs/heads/master | /tests/test_grammar.py | # vim: fileencoding=utf-8
import fewerror
import codecs
import string
import pytest
from textblob import TextBlob
true_positives = [
(u"I don't know whether I find the Believe.in thing more or less offensive than Tesco Clubcard sending HTML with `text/plain`",
'more or fewer offensive',
),
pytest.mark.xfail(reason='regressed at some point; maybe splitting subclauses would help')(
(u"Good: Sweet puppies sleeping. Less Good: Vet tells us they will be 50-60lbs instead of the 25-30 the Rescue group... pic.twitter.com/CBUpjZyxLu",
'fewer good',
),
),
(u"Sitting next to Dan Winship here at the @WebKitGTK hackfest, turned out it was a missing TCP_NODELAY. Fixed! HTTPS now 33% less slow :)",
'33% fewer slow',
),
(u"My phone is more or less screwed.",
'more or fewer screwed',
),
(u"One broken string, one shock from a microphone, and one admonition from the sound guy to rock less hard. Success! Thanks for coming.",
'fewer hard',
),
(u"Hispanic-American Adults Are Less Catholic and More ‘Unaffiliated’ Than Ever Before",
'Fewer Catholic',
),
(u"Okay, it was an ad for an emergency-alarm watch. I feel less annoyed now.",
'fewer annoyed',
),
(u"We're not from a faraway country. We were just less lucky than you.",
'just fewer lucky',
),
(u"@tellingfibulas Awww cheers mate. That's much appreciated :D I'm getting less janky hopefully.",
'fewer janky',
),
(u"(And I know it's heresy to say it, but while Hissing Fauna is excellent I'm less keen on the direction it heralded)",
'fewer keen',
),
# mass noun vvvvvvvvvv
(u"Reckon you'd lose less blood having a major heart op!!",
'fewer blood',
),
# Would be nice to get this right. "a less theatrical version" -> "I think you mean 'a fewer
# theatrical version'" would be funny, whereas "I think you mean 'fewer theatrical'" is less
# good.
pytest.mark.xfail(reason='a less adj noun')(
(u"hey, remember that google bus thing? sf delivers a less theatrical version http://t.co/YxVq1JYZP9",
'fewer theatrical',
),
),
# https://github.com/wjt/fewerror/issues/2
(u"In the context of https://medium.com/@b_k/https-the-end-of-an-era-c106acded474 … it’s striking that the problems setting up ssh are much much less onerous",
'much fewer onerous',
),
("which is to say I found it no less surprising than 'with' itself.",
'no fewer surprising',
),
(u"So if I say fewer less often all is well?",
u"fewer often",
),
(u"Less monitoring if you ask me",
u"fewer monitoring",
),
# [100%] fewer exercise is be ungrammatical, though "100% fewer exercises" would be grammatical...
(u"I've eaten 50% more food and done 100% less exercise since I got to NY.",
"100% fewer exercise",
),
(u"The One True Syntax Pedant Bot is @fewerror. Much less bad than all others.",
"fewer bad",
),
(u"it’s WAY less spiritually exhausting than constantly having to educate about race/gender/etc.",
"fewer spiritually", # TODO: would be nice to say "fewer spiritually exhausting"
),
(u"""Telegram is certainly less popular, but WhatsApp is much less open.""",
[
"certainly fewer popular",
"much fewer open",
],
),
(u"""I could care less""",
"could care fewer",
),
(u"""I love less than jake""",
u"Fewer Than Jake",
),
(u"""Goals\n\nLess hate.\nLess stress.\nLess pain.\nMore love.\nMore rest.\nMore joy.""",
[
# TODO: "Less hate." here is really a new sentence so the titlecase logic shouldn't kick
# in.
"Fewer hate", # VBP
"fewer stress", # NN
"fewer pain", # NN
],
),
(u"""the intersection becomes LESS SAFE than before""",
"FEWER SAFE",
),
# https://twitter.com/resiak/status/805478063270137857
("I've never seen him less happy to be there.",
"fewer happy",
),
("less politics please",
"fewer politics",
),
("""“‘Sometimes’ is less controversial than ‘often’”""",
"fewer controversial",
),
("""I guess in the same way as the mom gets less effective as she gets
better at testing, he gets less effective as he succumbs to
cirrhosis…""",
"fewer effective",
),
# referenced by https://twitter.com/ruskin147/status/903178201941377024
# I think this makes the joke better.
("""More or Less back on cracking form - do 65% of Irish exports go to UK as claimed by DUP?""",
"More or Fewer back",
),
("""Obviously less qualified/experienced, but someone with column and radio show is a journo, however much she lets the rest down I'm afraid! :)""",
"Obviously fewer qualified/experienced",
),
]
@pytest.mark.parametrize("tweet,reply", true_positives)
def test_true_positives(tweet, reply):
replies = [reply] if isinstance(reply, str) else reply
actual_replies = fewerror.find_corrections(tweet)
assert actual_replies == replies, str(TextBlob(tweet).tags)
false_positives = [
u"The fact that @merrittwhitley can Instagram me but not text me back.... haha I expect nothing less. #Cool #IllJustWait #MyBestFriendIsSlow",
u"one less lonely girl is my song",
u"There's going to be one less lonely girl",
# Less JJ JJ+ nounish. "Fewer successful political unions" is not what the speaker meant, but it
# is grammatical.
u"@AdamRamsay @dhothersall For sake of balance; Less successful political unions include USSR and Yugoslavia.",
# Similar. https://twitter.com/kiehlmanniac/status/578486683353661441
u"@resiak @fewerror @travisci Are there any less over-engineered satirical grammar bots?",
u"oh yh due to there being less gender-neutral people, right? :D",
u"Yes, Fred Phelps did horrible things, said horrible things. That doesn't mean you can do slightly less horrible things and be a good person.",
u"There are people with life sentences for way less: Tim Allen arrested for over 650 grams (1.43 lb) of cocaine. 1978. http://twitter.com/History_Pics/status/442776869742854145/photo/1pic.twitter.com/EtUND0xYxm ",
u"I wish there were less pretentious motherfucking ass holes on this planet...i feel so worthless right now",
u"Firefox Tweaks – An attempt to make Firefox suck less",
# https://twitter.com/fewerror/status/659747048099618825
u'''I was about to do this but then realised they have reveals that say “show less” rather than “show fewer” so now _I’m_ angry :-\\''',
u"""it's like four swords but with one less person and internet rando multiplayer""",
# wordfilter errs on the side of caution, that's a good idea
u"""I want less Scunthorpe in my life""",
u"""That bath was less therapeutic than I had hoped""",
# Would be nice to be "bore me fewer" here
u"""I would have thought a comic book movie might bore me less, given my history!""",
u"""get back to being ~electable~ by doing all the horrible oppressive things the Tories do but just ~slightly less~? hope voters buy that?""",
# https://twitter.com/fewerror/status/803742976291663872
"""★★ 97% Of People Can't Find The Horse In This Picture In LESS Than 5 Seconds. Can You?""",
# https://twitter.com/fewerror/status/805181450182070273
"""Can I have a ham sandwich please? So what does that mean...? Just what it says... no more no less... Focus is obvious LOL""",
# Wow, I can't believe this one wasn't in the test suite. Sentence case is fine:
"""Less errors!""",
# ...but shouty-case is not:
pytest.mark.xfail(reason='all-caps words are assumed to be singular proper nouns')(
"""LESS ERRORS!""",
),
]
@pytest.mark.parametrize("tweet", false_positives)
def test_false_positives(tweet):
assert fewerror.find_corrections(tweet) == [], str([s.tags for s in TextBlob(tweet).sentences])
def test_mass_nouns():
assert fewerror.find_corrections("I wish I had studied less mathematics") == ['fewer mathematics']
assert fewerror.find_corrections("I wish I had studied less mathematics students") == []
@pytest.mark.parametrize("corrections,reply", [
(("a"), "I think you mean “a”"),
(("a", "b"), "I think you mean “a”, and furthermore “b”"),
(("a", "b", "c"), "I think you mean “a”, “b”, and furthermore “c”"),
(("a", "b", "c", "d"), "I think you mean “a”, “b”, “c”, and furthermore “d”"),
])
def test_format_reply(corrections, reply):
assert fewerror.format_reply(corrections) == reply
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,934 | wjt/fewerror | refs/heads/master | /fewerror/util.py | import collections
import collections.abc
def reverse_inits(xs):
for i in range(len(xs), 0, -1):
yield xs[:i]
class OrderedSet(collections.abc.MutableSet):
def __init__(self, it=()):
super(OrderedSet, self).__init__()
self._map = collections.OrderedDict((x, None) for x in it)
def __contains__(self, elem):
return elem in self._map
def __iter__(self):
yield from self._map
def __len__(self):
return len(self._map)
def add(self, elem):
self._map[elem] = None
def discard(self, elem):
self._map.pop(elem, None)
def __repr__(self):
return '{}(({}))'.format(
self.__class__.__name__,
', '.join(map(repr, self)))
def __str__(self):
return '{' + ', '.join(map(str, self)) + '}'
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,935 | wjt/fewerror | refs/heads/master | /tests/test_thatsnotmybot.py | import os
import yaml
import pytest
from unittest.mock import MagicMock, call
from fewerror.thatsnotmybot import ThatsNotMyBot
def test_validate():
ThatsNotMyBot()
def test_generate():
ThatsNotMyBot().generate()
def test_argparse():
ThatsNotMyBot().main([])
@pytest.mark.parametrize('initial_state,is_my', [
(None, False),
({}, False),
({'object': 'teapot', 'n': 1, 'last_id': 42}, False),
({'object': 'cake', 'n': 7, 'last_id': 42}, True),
])
def test_first_tweet(tmpdir, initial_state, is_my):
state_yaml = str(tmpdir.join('x.yaml'))
if initial_state is not None:
with open(state_yaml, 'w') as f:
yaml.dump(initial_state, f)
status = MagicMock()
status.author.screen_name = 'abcde'
status.id = 12345
api = MagicMock()
api.update_status.return_value = status
tnmb = ThatsNotMyBot()
tnmb.get_twitter_api = lambda: api
tnmb.main(['tweet', '--state', state_yaml])
with open(state_yaml, 'r') as f:
state = yaml.load(f)
if is_my:
assert state == {}
else:
assert state['n'] == (initial_state or {}).get('n', 0) + 1
assert 'object' in state
if initial_state and 'object' in initial_state:
assert initial_state['object'] == state['object']
assert state['last_id'] == 12345
assert len(api.update_status.mock_calls) == 1
args, kwargs = api.update_status.call_args
tweet = args[0]
if initial_state:
assert initial_state['object'] in tweet
assert kwargs['in_reply_to_status_id'] == initial_state['last_id']
else:
assert kwargs['in_reply_to_status_id'] is None
if is_my:
assert tweet.startswith("THAT'S my")
else:
assert tweet.startswith("That's not")
| {"/fewerror/twitter/fmk.py": ["/fewerror/twitter/util.py"], "/tests/test_state.py": ["/fewerror/state.py"], "/fewerror/telegram.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__init__.py": ["/fewerror/__init__.py", "/fewerror/state.py", "/fewerror/util.py", "/fewerror/twitter/util.py", "/fewerror/twitter/fmk.py"], "/tests/test_crashes.py": ["/fewerror/__init__.py"], "/fewerror/twitter/__main__.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/fewerror/twitter/batch.py": ["/fewerror/twitter/__init__.py"], "/fewerror/thatsnotmybot.py": ["/fewerror/twitter/__init__.py", "/fewerror/__init__.py"], "/tests/test_twitter.py": ["/fewerror/twitter/__init__.py"], "/fewerror/get_oauth_token.py": ["/fewerror/__init__.py"], "/fewerror/__init__.py": ["/fewerror/util.py"], "/tests/test_grammar.py": ["/fewerror/__init__.py"], "/tests/test_thatsnotmybot.py": ["/fewerror/thatsnotmybot.py"]} |
65,956 | timhebert1963/distance-between-two-cities | refs/heads/master | /distance_between_two_cities.py | from functions_distance_between_two_cities import *
import os
def main():
'''
a googlemaps geocode api_key is needed to run this script
2 youtube links which explain how to get the api_key and set environment variables
https://www.youtube.com/watch?v=1MVDIFShE5Q
https://www.youtube.com/watch?v=sI8py6soTWs
'''
# explain input is needed from user of the two cities, states to calculate distance
intro_message()
# controls the while loop
continue_exploring = True
while continue_exploring:
# display 50 state abbreviations and names
display_state_abbr()
# display formatting. Need carriage return and prompt user to hit Enter key
print('\n')
input(" Press Enter to continue")
# clear the screen
clear_screen()
# display 50 state abbreviations and names
display_state_abbr()
print('\n')
# call ask_for_two_cities() and return the cities and state names
city1, state1, city2, state2 = ask_for_two_cities()
# calculate the distance between two cities
distance = calculate_dist(city1, state1, city2, state2)
# clear the screen
clear_screen()
# display a banner informing user of the distance
distance_banner(city1, state1, city2, state2, distance)
for i in range(4):
print('\n')
continue_exploring = continue_exploring_distance()
clear_screen()
thank_you_for_exploring_banner()
# **** End of function main() **** #
clear_screen()
print('\n')
print('\n')
#display_state_abbr()
main() | {"/distance_between_two_cities.py": ["/functions_distance_between_two_cities.py"]} |
65,957 | timhebert1963/distance-between-two-cities | refs/heads/master | /functions_distance_between_two_cities.py | from math import sin, cos, sqrt, atan2, radians
#from modules_distance_between_two_cities import *
from Tim_common import *
import os
import googlemaps
def display_state_abbr():
print(" State Abbreviations:")
print('\n')
print(" {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s}".format('AL', 'Alabama',\
'HI', 'Hawaii', \
'MA', 'Massachusetts', \
'NM', 'New Mexico', \
'SD', 'South Dakota'))
print(" {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s}".format('AK', 'Alaska', \
'ID', 'Idaho', \
'MI', 'Michigan', \
'NY', 'New York', \
'TN', 'Tennessee'))
print(" {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s}".format('AZ', 'Arizona',\
'IL', 'Illinois', \
'MN', 'Minnesota', \
'NC', 'North Carolina',\
'TX', 'Texas'))
print(" {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s}".format('AR', 'Arkansas',\
'IN', 'Indiana', \
'MS', 'Mississippi', \
'ND', 'North Dakota', \
'UT', 'Utah'))
print(" {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s}".format('CA', 'California',\
'IA', 'Iowa', \
'MO', 'Missouri', \
'OH', 'Ohio', \
'VT', 'Vermont'))
print(" {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s}".format('CO', 'Colorado', \
'KS', 'Kansas', \
'MT', 'Montana', \
'OK', 'Oklahoma', \
'VA', 'Virginia'))
print(" {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s}".format('CT', 'Connecticut',\
'KY', 'Kentucky', \
'NE', 'Nebraska', \
'OR', 'Oregon', \
'WA', 'Washington'))
print(" {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s}".format('DE', 'Delaware', \
'LA', 'Louisiana', \
'NV', 'Nevada', \
'PA', 'Pennsylvania', \
'WV', 'West Virginia'))
print(" {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s}".format('FL', 'Florida', \
'ME', 'Maine', \
'NH', 'New Hampshire', \
'RI', 'Rhode Island', \
'WI', 'Wisconsin'))
print(" {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s} {:2s} : {:15s}".format('GA', 'Georgia', \
'MD', 'Maryland', \
'NJ', 'New Jersey', \
'SC', 'South Carolina',\
'WY', 'Wyoming'))
# **** End of function display_state_abbr() **** #
def intro_message():
# intro message explaining user will be asked for input of the two cities to calculate the distance between
print(" You will be asked to enter the city and the state for the start city and state,")
print(" and for the destination city and state. You can enter the abbreviation for the state.")
print('\n')
# **** End of function intro_message() **** #
def ask_for_two_cities():
# get input from user of the two cities and states
city1 = input(" Enter 1st city to calculate distance from: ")
state1 = input(" Enter the state {} is located ".format(city1.title()))
# make sure multi word city names are capitalized
# make sure state abbreviations are upper case
city1 = city1.title()
state1 = state1.upper()
print('\n')
city2 = input(" Enter 2nd city to calculate distance to : ")
state2 = input(" Enter the state {} is located ".format(city2.title()))
# make sure multi word city names are capitalized
# make sure state abbreviations are upper case
city2 = city2.title()
state2 = state2.upper()
for i in range(2):
print('\n')
# return the two cities and states to caller
return city1, state1, city2, state2
# **** End of function ask_for_two_cities() **** #
def calculate_dist(city1, state1, city2, state2):
# approximate radius of earth in km
# Rm is the mile representation for Radius
R = get_radius_of_earth_km()
Rm = convert_km_to_miles(R)
# start city latitue and Longitude in degrees - lat1 and lon1
# you need to get the degrees for lat1 and lon1
#
# radians(x) converts degrees to radians
lat1_d, lon1_d = get_degrees(city1, state1)
lat1_r, lon1_r = get_radians(lat1_d, lon1_d)
# destination city latitue and Longitude in degrees - lat2 and lon2
#
# radians(x) converts degrees to radians
lat2_d, lon2_d = get_degrees(city2, state2)
lat2_r, lon2_r = get_radians(lat2_d, lon2_d)
# distance between lon2_r and lon1_r (two longitudes)
# distance between lat2_r and lat1_r (two latitudes)
dlon = lon2_r - lon1_r
dlat = lat2_r - lat1_r
# formulas
a = sin(dlat / 2)**2 + cos(lat1_r) * cos(lat2_r) * sin(dlon / 2)**2
c = 2 * atan2(sqrt(a), sqrt(1 - a))
# calculate distance in miles
#
# convert distance to int
distance = Rm * c
distance = int(distance)
return distance
# **** End of function calculate_dist() **** #
def get_radius_of_earth_km():
# radius of earth in kilometers
R = 6373.0
return R
# **** End of function radius_of_earth_km() **** #
def convert_km_to_miles(distance):
# convert km to miles
KILOMETERS_TO_MILES = 0.621371
return distance * KILOMETERS_TO_MILES
# **** End of function convert_km_to_miles() **** #
def get_degrees(city, state):
city_state = city + ', ' + state
# api_key for googlemaps and geocode app
api_key = os.environ['APIKEY']
# start googlemaps Client
gm = googlemaps.Client(key=api_key)
#geocode_result = gm.geocode(city_state)[0] is another option that could be used
lat = gm.geocode(city_state)[0]['geometry']['location']['lat']
lng = gm.geocode(city_state)[0]['geometry']['location']['lng']
return lat,lng
# **** End of function get_degrees() **** #
def get_radians(lat, lon):
# return radians of lat, lon
lat_r = radians(lat)
lon_r = radians(lon)
return lat_r, lon_r
# **** End of function get_radians() **** #
def distance_banner(city1, state1, city2, state2, distance):
# strings to display in display banner
first_string = ("Distance Between Two Cities")
second_string = ("The starting city is {}, {}".format(city1, state1))
third_string = ("Destination city is {}, {}".format(city2, state2))
fourth_string = ("The distance between {}, {} and {}, {} is {} miles".format(city1, state1, \
city2, state2, \
distance))
# assign values to Banner object attributes
banner_object = assign_banner_attributes(first_string, second_string, third_string, fourth_string)
# call display_banner
display_banner(banner_object)
del banner_object
# **** End of function get_radians() **** #
def thank_you_for_exploring_banner():
# strings to display in display banner
first_string = ("Thank You for Exploring the")
second_string = ("Distance Between Two Cities!!")
# assign values to Banner object attributes
banner_object = assign_banner_attributes(first_string, second_string)
# call display_banner
display_banner(banner_object)
del banner_object
# **** End of function get_radians() **** #
def continue_exploring_distance():
valid_answer = False
while not valid_answer:
answer = input(" Would you like to continue exploring the distance between two cities? 'y' or 'n' ")
# check for 'y' or 'n' input
if answer.lower() == 'y' or answer.lower() == 'n':
valid_answer = True
# ask user to try again
else:
print(" You did not enter 'y' or 'n'. Please try again!")
print('\n')
# valid answer given - return True or False
if answer.lower() == 'y':
return True
else:
return False
# **** End of function continue_exploring_distance() **** # | {"/distance_between_two_cities.py": ["/functions_distance_between_two_cities.py"]} |
65,962 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py | import unittest
from unittest import mock
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from datetime import datetime
from meerkat_abacus import model
from meerkat_abacus.pipeline_worker.process_steps import add_multiple_alerts
from meerkat_abacus.pipeline_worker.process_steps import write_to_db
from meerkat_abacus.consumer.database_setup import create_db
from meerkat_abacus.config import config
class TestAddMultipleAlerts(unittest.TestCase):
def setUp(self):
create_db(config.DATABASE_URL, drop=True)
engine = create_engine(config.DATABASE_URL)
model.form_tables(config)
model.Base.metadata.create_all(engine)
self.engine = create_engine(config.DATABASE_URL)
Session = sessionmaker(bind=self.engine)
self.session = Session()
def test_app_multiple_alerts(self):
existing_raw_data = [
{
"uuid": "a",
"data": {
"SubmissionDate": "2017-06-10",
"end": "2017-06-10",
"pt1./gender": "male",
"pt1./age": 32
}
},
{
"uuid": "b",
"data": {
"SubmissionDate": "2017-06-10",
"end": "2017-06-10",
"pt1./gender": "male",
"pt1./age": 32
}
},
{
"uuid": "c",
"data": {
"SubmissionDate": "2017-06-10",
"end": "2017-06-10",
"pt1./gender": "male",
"pt1./age": 32
}
}
]
existing_data = [
{
"clinic": 1,
"uuid": "a",
"type": "case",
"date": datetime(2017, 6, 10),
"variables": {
"cmd_1": 1
}
},
{
"clinic": 1,
"uuid": "b",
"type": "case",
"date": datetime(2017, 6, 10),
"variables": {
"cmd_1": 1
}
},
{
"clinic": 1,
"uuid": "c",
"type": "case",
"date": datetime(2017, 6, 10),
"variables": {
"cmd_1": 1
}
}
]
table = model.form_tables(config)["demo_case"]
con = self.engine.connect()
con.execute(table.__table__.insert(), existing_raw_data)
con.execute(model.Data.__table__.insert(), existing_data)
variable = model.AggregationVariables(
id="cmd_1",
method="match", db_column="icd_code",
type="case",
condition="A00",
category=[],
alert=1,
alert_type="threshold:3,5",
form="demo_case")
self.session.add(variable)
self.session.commit()
add_alerts = add_multiple_alerts.AddMultipleAlerts(config,
self.session)
new_data = {
"clinic": 1,
"uuid": "c",
"date": datetime(2017, 6, 10),
"variables": {
"cmd_1": 1
}
}
add_alerts.start_step()
results = []
for d in existing_data:
results += add_alerts.run("data", d)
add_alerts.end_step(3)
self.assertEqual(len(results), 3)
self.assertEqual(len(add_alerts.found_uuids), 3)
for result in results:
if result["data"]["uuid"] == "a":
self.assertIn("alert", result["data"]["variables"])
self.assertIn("alert_id", result["data"]["variables"])
else:
self.assertNotIn("alert", result["data"]["variables"])
self.assertNotIn("alert_id", result["data"]["variables"])
self.assertEqual(result["data"]["variables"]["sub_alert"], 1)
self.assertEqual(result["data"]["variables"]["master_alert"],
"a")
db_writer = write_to_db.WriteToDb(config, self.session)
db_writer.engine = self.engine
db_writer.start_step()
for r in results:
db_writer.run(r["form"], r["data"])
db_writer.end_step(3)
additional_data = [
{
"clinic": 1,
"uuid": "d",
"type": "case",
"date": datetime(2017, 6, 10),
"variables": {
"cmd_1": 1
}
}
]
additional_raw_data = [
{
"uuid": "d",
"data": {
"SubmissionDate": "2017-06-10",
"end": "2017-06-10",
"pt1./gender": "male",
"pt1./age": 32
}
}
]
con.execute(table.__table__.insert(), additional_raw_data)
con.execute(model.Data.__table__.insert(), additional_data)
add_alerts.start_step()
results = add_alerts.run("data", new_data)
self.assertEqual(len(results), 4)
for result in results:
if result["data"]["uuid"] == "a":
self.assertIn("alert", result["data"]["variables"])
self.assertIn("alert_id", result["data"]["variables"])
else:
self.assertNotIn("alert", result["data"]["variables"])
self.assertNotIn("alert_id", result["data"]["variables"])
self.assertEqual(result["data"]["variables"]["sub_alert"], 1)
self.assertEqual(result["data"]["variables"]["master_alert"],
"a")
class TestAlertTypes(unittest.TestCase):
def setUp(self):
create_db(config.DATABASE_URL, drop=True)
engine = create_engine(config.DATABASE_URL)
model.form_tables(config)
model.Base.metadata.create_all(engine)
self.engine = create_engine(config.DATABASE_URL)
Session = sessionmaker(bind=self.engine)
self.session = Session()
year = 2014
self.year = year
self.threshold = [
model.Data(
date=datetime(year, 1, 3),
clinic=6,
uuid="1",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 3),
clinic=6,
uuid="2",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 3),
clinic=6,
uuid="3",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
clinic=6,
uuid="4",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
clinic=6,
uuid="5",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 11),
clinic=6,
uuid="6",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 11),
clinic=6,
uuid="7",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 12),
clinic=6,
uuid="8",
variables={"cmd_1": 1}),
]
self.double = [
model.Data(
date=datetime(year, 1, 3),
epi_year=year,
epi_week=1,
clinic=6,
uuid="1",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 3),
epi_year=year,
epi_week=1,
clinic=6,
uuid="2",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
epi_year=year,
epi_week=2,
clinic=6,
uuid="3",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
epi_year=year,
epi_week=2,
clinic=6,
uuid="4",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
epi_year=year,
epi_week=2,
clinic=6,
uuid="5",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
epi_year=year,
epi_week=2,
clinic=6,
uuid="6",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="7",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="8",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="9",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="10",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="11",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="12",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="13",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="14",
variables={"cmd_1": 1}),
]
self.double_wrong_clinic = [
model.Data(
date=datetime(year, 1, 3),
epi_year=year,
epi_week=1,
clinic=6,
uuid="1",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 3),
epi_year=year,
epi_week=1,
clinic=7,
uuid="2",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
epi_year=year,
epi_week=2,
clinic=6,
uuid="3",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
epi_year=year,
epi_week=2,
clinic=6,
uuid="4",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
epi_year=year,
epi_week=2,
clinic=6,
uuid="5",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
epi_year=year,
epi_week=2,
clinic=6,
uuid="6",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="7",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="8",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="9",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="10",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="11",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="12",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="13",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=3,
clinic=6,
uuid="14",
variables={"cmd_1": 1}),
]
self.double_new_year = [
model.Data(
date=datetime(year, 1, 3),
epi_year=year - 1,
epi_week=52,
clinic=6,
uuid="1",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 3),
epi_year=year - 1,
epi_week=52,
clinic=6,
uuid="2",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
epi_year=year,
epi_week=1,
clinic=6,
uuid="3",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
epi_year=year,
epi_week=1,
clinic=6,
uuid="4",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
epi_year=year,
epi_week=1,
clinic=6,
uuid="5",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 10),
epi_year=year,
epi_week=1,
clinic=6,
uuid="6",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=2,
clinic=6,
uuid="7",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=2,
clinic=6,
uuid="8",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=2,
clinic=6,
uuid="9",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=2,
clinic=6,
uuid="10",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=2,
clinic=6,
uuid="11",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=2,
clinic=6,
uuid="12",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=2,
clinic=6,
uuid="13",
variables={"cmd_1": 1}),
model.Data(
date=datetime(year, 1, 17),
epi_year=year,
epi_week=2,
clinic=6,
uuid="14",
variables={"cmd_1": 1}),
]
self.session.commit()
def tearDown(self):
self.session.commit()
self.session.close()
def test_threshold(self):
self.session.query(model.Data).delete()
self.session.commit()
self.session.bulk_save_objects(self.threshold)
self.session.commit()
new_alerts = add_multiple_alerts.threshold("cmd_1",
"threshold:3,5",
datetime(self.year, 1, 3),
6,
self.session)
self.assertEqual(len(new_alerts), 1)
self.assertEqual(new_alerts[0]["duration"], 1)
self.assertEqual(sorted(new_alerts[0]["uuids"]), ["1", "2", "3"])
self.assertEqual(new_alerts[0]["clinic"], 6)
self.assertEqual(new_alerts[0]["reason"], "cmd_1")
new_alerts = add_multiple_alerts.threshold("cmd_1",
"threshold:3,5",
datetime(self.year, 1, 11),
6,
self.session)
self.assertEqual(len(new_alerts), 1)
self.assertEqual(new_alerts[0]["duration"], 7)
self.assertEqual(
sorted(new_alerts[0]["uuids"]), ["4", "5", "6", "7", "8"])
self.assertEqual(new_alerts[0]["clinic"], 6)
self.assertEqual(new_alerts[0]["reason"], "cmd_1")
def test_double_double(self):
self.session.query(model.Data).delete()
self.session.commit()
self.session.bulk_save_objects(self.double_wrong_clinic)
self.session.commit()
new_alerts = add_multiple_alerts.double_double("cmd_1",
1,
self.year,
6,
self.engine)
self.assertEqual(len(new_alerts), 0)
self.session.query(model.Data).delete()
self.session.commit()
self.session.bulk_save_objects(self.double)
self.session.commit()
new_alerts = add_multiple_alerts.double_double("cmd_1",
1,
self.year,
6,
self.engine)
self.assertEqual(len(new_alerts), 1)
self.assertEqual(new_alerts[0]["duration"], 7)
self.assertEqual(
sorted(new_alerts[0]["uuids"]),
sorted(["7", "8", "9", "10", "11", "12", "13", "14"]))
self.assertEqual(new_alerts[0]["clinic"], 6)
self.assertEqual(new_alerts[0]["reason"], "cmd_1")
# sert False
to_delete = self.session.query(model.Data).filter(model.Data.uuid == "1").one()
self.session.delete(to_delete)
self.session.commit()
# With only one entry for the first week, we should not create an alert
new_alerts = add_multiple_alerts.double_double("cmd_1",
1,
self.year,
6,
self.engine)
self.assertEqual(len(new_alerts), 0)
def test_double_double_corner_cases(self):
self.session.query(model.Data).delete()
self.session.commit()
self.session.bulk_save_objects(self.double_new_year)
self.session.commit()
for week in [52, 1, 2]:
year = self.year
if week == 52:
year = year - 1
new_alerts = add_multiple_alerts.double_double("cmd_1",
week,
year,
6,
self.engine)
self.assertEqual(len(new_alerts), 1)
self.assertEqual(new_alerts[0]["duration"], 7)
self.assertEqual(
sorted(new_alerts[0]["uuids"]),
sorted(["7", "8", "9", "10", "11", "12", "13", "14"]))
self.assertEqual(new_alerts[0]["clinic"], 6)
self.assertEqual(new_alerts[0]["reason"], "cmd_1")
if __name__ == "__main__":
unittest.main()
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,963 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/tests/send_alerts_test.py | import unittest
from unittest import mock
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from datetime import datetime
from meerkat_abacus import model
from meerkat_abacus.pipeline_worker.process_steps.send_alerts import SendAlerts
from meerkat_abacus.consumer.database_setup import create_db
from meerkat_abacus.config import config
class TestSendAlerts(unittest.TestCase):
def setUp(self):
create_db(config.DATABASE_URL, drop=True)
engine = create_engine(config.DATABASE_URL)
model.form_tables(config)
model.Base.metadata.create_all(engine)
self.engine = create_engine(config.DATABASE_URL)
Session = sessionmaker(bind=self.engine)
self.session = Session()
@mock.patch('meerkat_abacus.pipeline_worker.process_steps.send_alerts.util.send_alert')
def test_send_alert(self, send_alert_mock):
send = SendAlerts(config, self.session)
data = {"uuid": "abcdefghijk",
"variables": {"alert": 1,
"alert_type": "individual"}
}
result = send.run("data", data)
self.assertEqual(send_alert_mock.call_count, 1)
self.assertEqual(result[0]["data"]["variables"]["alert_id"], "fghijk")
data = {"uuid": "abcdefghijk",
"variables": {}
}
result = send.run("data", data)
self.assertEqual(send_alert_mock.call_count, 1)
self.assertNotIn("alert_id", result[0]["data"]["variables"])
data = {"uuid": "abcdefghijk",
"variables": {"alert": 1,
"alert_type": "threshold"}
}
result = send.run("data", data)
self.assertEqual(send_alert_mock.call_count, 1)
self.assertNotIn("alert_id", result[0]["data"]["variables"])
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,964 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/pipeline.py | """
Main pipeline for abacus
"""
import datetime
from meerkat_abacus import model
from meerkat_abacus.pipeline_worker.process_steps.quality_control import QualityControl
from meerkat_abacus.pipeline_worker.process_steps.write_to_db import WriteToDb
from meerkat_abacus.pipeline_worker.process_steps.add_links import AddLinks
from meerkat_abacus.pipeline_worker.process_steps.to_codes import ToCodes
from meerkat_abacus.pipeline_worker.process_steps.send_alerts import SendAlerts
from meerkat_abacus.pipeline_worker.process_steps.add_multiple_alerts import AddMultipleAlerts
from meerkat_abacus.pipeline_worker.process_steps.to_data_type import ToDataType
from meerkat_abacus.pipeline_worker.process_steps.initial_visit_control import InitialVisitControl
from meerkat_abacus.pipeline_worker.process_steps import DoNothing
from meerkat_abacus import logger
class Pipeline:
"""
Creates and then runs data through a pipeline as specifed by
config object
"""
def __init__(self, engine, session, param_config):
pipeline_spec = param_config.country_config["pipeline"]
pipeline = []
step_args = (param_config, session)
for step_name in pipeline_spec:
if step_name == "do_nothing":
step_ = DoNothing(session)
elif step_name == "quality_control":
step_ = QualityControl(*step_args)
elif step_name == "write_to_db":
step_ = WriteToDb(*step_args)
step_.engine = engine
elif step_name == "initial_visit_control":
step_ = InitialVisitControl(*step_args)
step_.engine = engine
elif step_name == "to_data_type":
step_ = ToDataType(*step_args)
elif step_name == "add_links":
step_ = AddLinks(*step_args)
step_.engine = engine
elif step_name == "to_codes":
step_ = ToCodes(*step_args)
elif step_name == "send_alerts":
step_ = SendAlerts(*step_args)
elif step_name == "add_multiple_alerts":
step_ = AddMultipleAlerts(*step_args)
step_.engine = engine
else:
raise NotImplementedError(f"Step '{step_name}' is not implemented")
pipeline.append(step_)
self.session = session
self.engine = engine
self.param_config = param_config
self.pipeline = pipeline
self.param_config = param_config
def process_chunk(self, input_data):
"""
Processing a chunk of data from the internal buffer
Each step in this pipeline should take a single record and return
data = input_data
"""
data = input_data
for step in self.pipeline:
step.start_step()
n = len(data)
new_data = []
for d in data:
data_field = d["data"]
form = d["form"]
try:
new_data += step.run(form, data_field)
except Exception as exception:
self.handle_exception(d, exception, step)
n = n - 1
step.end_step(n)
data = new_data
if not new_data:
break
return data
def handle_exception(self, data, exception, step):
"""
Handles an exeption in the step.run method by writing the data
to a log table and logging the exception
"""
form_data = data["data"]
form = data["form"]
logger.exception(f"There was an error in step {step}", exc_info=True)
self.session.rollback()
error_str = type(exception).__name__ + ": " + str(exception)
self.session.add(
model.StepFailiure(
data=fix_json(form_data),
form=form,
step_name=str(step),
error=error_str
)
)
self.session.commit()
def fix_json(row):
for key, value in row.items():
if isinstance(value, datetime.datetime):
row[key] = value.isoformat()
#### ALERT CODE
# if "alert" in variable_data:
# variable_data["alert_id"] = row[data_type["form"]][data_type[
# "uuid"]][-param_config.country_config["alert_id_length"]:]
# if "alert" in variable_data and not disregard:
# alerts = session.query(model.AggregationVariables).filter(
# model.AggregationVariables.alert == 1)
# alert_variables = {a.id: a for a in alerts}
# alert_id = new_data["uuid"][-param_config.country_config["alert_id_length"]:]
# util.send_alert(alert_id, new_data,
# alert_variables, locations[0], param_config)
### CODE that will be needed again soon
#
# self.quality_control_arguments = quality_control_arguments
# self.locations = util.all_location_data(session)
# self.links = util.get_links(param_config.config_directory +
# param_config.country_config["links_file"])
# uuids = []
# tables = defaultdict(list)
# for data_row in input_data:
# data = data_row["data"]
# form = data_row["form"]
# data = data_import.quality_control(
# form,
# data,
# **self.quality_control_arguments)
# if not data:
# continue
# #consul.flush_dhis2_events()
# corrected = data_management.initial_visit_control(
# form,
# data,
# self.engine,
# self.session,
# param_config=self.param_config
# )
# initial_visit.append(time.time() - s)
# s = time.time()
# insert_data = []
# for row in corrected:
# insert_data.append({
# "uuid": row[kwargs["uuid_field"]],
# "data": row}
# )
# #consul.send_dhis2_events(uuid=data[kwargs["uuid_field"],
# # form_id=corrected,
# # raw_row=data)
# try:
# table = model.form_tables(param_config=param_config)[form]
# except KeyError:
# logger.exception("Error in process buffer", exc_info=True)
# continue
# write_to_db(engine, insert_data, table=table)
# first_db_write.append(time.time() - s)
# s = time.time()
# data = []
# disregarded = []
# data_types = []
# for row in corrected:
# data_i, disregarded_i, data_types_i = data_management.new_data_to_codes(
# form,
# row,
# row[kwargs["uuid_field"]],
# locations,
# links,
# variables,
# session,
# engine,
# debug_enabled=True,
# param_config=param_config,
# )
# data += data_i
# disregarded += disregarded_i
# data_types += data_types_i
# to_data.append(time.time() - s)
# s = time.time()
# for i in range(len(data)):
# write_to_db(engine, data[i],
# table=[model.Data, model.DisregardedData][disregarded[i]],
# delete=("type", data_types[i]))
# second_db_write.append(time.time() - s)
# data_management.add_alerts(session, data,
# param_config=param_config)
# end = time.time() - start #after_insert - after_qc - start
# logger.info(end)
# qc_m = statistics.mean(qc)
# initial_visit_m = statistics.mean(initial_visit)
# first_db_write_m = statistics.mean(first_db_write)
# to_data_m = statistics.mean(to_data)
# second_db_write_m = statistics.mean(second_db_write)
# logger.info(f"{qc_m}, {initial_visit_m}, {first_db_write_m}, {to_data_m}, {second_db_write_m}")
# import sys
# sys.exit()
# import statistics
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,965 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/consul_export.py | import json
import logging
import os
import sys
import pathlib
import boto3
import botocore
from time import sleep
from meerkat_libs import consul_client
from meerkat_abacus.util import get_db_engine
from meerkat_abacus.util.authenticate import abacus_auth_token
import meerkat_abacus.model as abacus_model
from sqlalchemy import Column, Integer, String
from sqlalchemy.dialects.postgresql import JSONB
logger = logging.getLogger(__name__)
def set_logging_level():
if config.PRODUCTION:
logger.setLevel(logging.ERROR)
else:
logger.setLevel(logging.DEBUG)
DB_MARKER_DIR = 'build'
DB_MARKER_FILENAME = 'db_marker_file_path.json'
DB_MARKER_FILEPATH = os.path.join(DB_MARKER_DIR, DB_MARKER_FILENAME)
form_tables = {}
config = {}
def __create_table(case_form_name):
return type(case_form_name, (abacus_model.Base,), {
"__tablename__": case_form_name,
"id": Column(Integer, primary_key=True),
"uuid": Column(String, index=True),
"data": Column(JSONB)
})
def __get_form_tables():
country_config = config.country_config
try:
forms_to_export = country_config["consul_export_config"]["forms"]["dhis2"]
except KeyError:
raise Exception("Could not read dhis2 export config for consul in the country config.")
# return {form_name: __create_table(form_name) for form_name in forms_to_export}
all_form_tables = abacus_model.form_tables(config)
return {form_name: all_form_tables[form_name] for form_name in forms_to_export}
s3 = boto3.resource('s3')
def get_last_read_row_marker(export_codename):
try:
pathlib.Path(DB_MARKER_DIR).mkdir(parents=True, exist_ok=True)
s3.meta.client.download_file('meerkat-consul-db-markers', export_codename, DB_MARKER_FILEPATH)
with open(DB_MARKER_FILEPATH) as f:
marker = json.load(f)
for form_name in form_tables:
if not form_name in marker:
marker[form_name] = 0
except botocore.exceptions.ClientError:
logger.info("No db marker found at S3")
marker = {form_name: 0 for form_name in form_tables}
return marker
def update_last_read_row_marker(marker, marker_aws_filename):
with open(DB_MARKER_FILEPATH, 'w') as f:
json.dump(marker, f)
s3.meta.client.upload_file(DB_MARKER_FILEPATH, 'meerkat-consul-db-markers', marker_aws_filename)
def get_export_codename(argv):
if len(argv) < 1:
return 'unknown-test-run.json'
return f"{argv[1]}.json"
def __export_form(form_name, marker, marker_aws_filename, session, table):
last_read_id = marker[form_name]
q = session.query(table).order_by(table.id).filter(table.id >= last_read_id).yield_per(2000)
logger.info(f"There are {q.count()} records")
for i, row in enumerate(q):
consul_client.send_dhis2_events(row.uuid, row.data, form_name, abacus_auth_token(), force=True)
marker[form_name] = row.id
if i != 0 and i % 100 == 0:
update_last_read_row_marker(marker, marker_aws_filename=marker_aws_filename)
logger.info(f"{form_name}: send {i} records.")
sleep(20)
update_last_read_row_marker(marker, marker_aws_filename=marker_aws_filename)
def work(argv):
set_logging_level()
global form_tables
form_tables = __get_form_tables()
if not consul_client.wait_for_consul_start():
logger.error("Failed to get a response from consul")
return
engine, session = get_db_engine()
marker_aws_filename = get_export_codename(argv)
logger.info("Running the export for %s", marker_aws_filename)
marker = get_last_read_row_marker(marker_aws_filename)
for form_name, table in form_tables.items():
logger.info("Exporting form %s started.", form_name)
__export_form(form_name, marker, marker_aws_filename, session, table)
logger.info("Exporting form %s finished.", form_name)
consul_client.flush_dhis2_events(abacus_auth_token())
def celery_trigger(param_config):
global config
config = param_config
if config.PRODUCTION:
sufix = 'production'
else:
sufix = 'development'
argv = ['', f"{config.country_config['country_name']}-{sufix}"]
work(argv)
if __name__ == '__main__':
from meerkat_abacus.config import config as meerkat_config
config = meerkat_config
argv = sys.argv
work(argv) | {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,966 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/util/fake_data_to_form.py | import pyxform
from meerkat_abacus.config import config
country_config = config.country_config
from pyxform import builder, xls2json
from pyxform.utils import has_external_choices, sheet_to_csv
for form in country_config["fake_data"].keys():
json_survey = {
'type': 'survey', 'name': form, 'title': form, 'id_string': form, 'sms_keyword': 'sample', 'default_language': 'default', 'children': []
}
groups = {}
for field in ["start", "end", "today", "deviceid"]:
json_survey["children"].append(
{'hint': 'hint',
'type': "text",
'name': field,
'label': 'Label'})
for field, type_info in country_config["fake_data"][form].items():
ty = "text"
if list(type_info.keys())[0] == "integer":
ty = "integer"
if "./" in field:
# Create a group
group_name, field_name = field.split("./")
if not group_name in groups.keys():
json_survey["children"].append(
{'control': {'appearance': 'field-list'},
'type': 'group',
'name': group_name,
'label': 'A group',
'children': [
{'hint': 'hint',
'type': ty,
'name': field_name,
'label': 'Label'}
]
}
)
groups[group_name] = json_survey["children"][-1]["children"]
else:
groups[group_name].append(
{'hint': 'hint',
'type': ty,
'name': field_name,
'label': 'Label'}
)
else:
json_survey["children"].append(
{'hint': 'hint',
'type': ty,
'name': field,
'label': 'Label'})
survey = builder.create_survey_element_from_dict(json_survey)
# Setting validate to false will cause the form not to be processed by
# ODK Validate.
# This may be desirable since ODK Validate requires launching a subprocess
# that runs some java code.
survey.print_xform_to_file(
form +".xml", validate=False, warnings=True)
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,967 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/util/epi_week.py | from collections import defaultdict
from datetime import datetime, timedelta
from meerkat_abacus.config import config
country_config = config.country_config
def __handle_epi_week_53(epi_year,
epi_week_53_strategy=country_config.get("epi_week_53_strategy",
"leave_as_is")):
return {
"include_in_52": (lambda epi_year: (epi_year, 52)),
"include_in_1": (lambda epi_year: (epi_year + 1, 1)),
"leave_as_is": (lambda epi_year: (epi_year, 53))
}[epi_week_53_strategy](epi_year)
def epi_week_for_date(date, param_config=country_config):
"""
Calculate epi week for given date.
Returned epi week number is from range 1, 53. 53rd week includes dates from the end of 52nd week of current year
and the start of 1st epi week of next year.
Args:
date
Returns tuple epi_year, epi_week
"""
# We don't support timezone info in date comparison
# TODO: .replace(tzinfo=None) should be moved to a common meerkat dateparser
date = date.replace(tzinfo=None)
_epi_config = param_config["epi_week"]
_epi_week_53_strategy=param_config.get("epi_week_53_strategy",
"leave_as_is")
_epi_year_start_date = epi_year_start_date(date, epi_config=_epi_config)
_epi_year = epi_year_by_date(date, epi_config=_epi_config)
_epi_week_number = (date - _epi_year_start_date).days // 7 + 1
if _epi_week_number in [0, 53]:
_epi_year, _epi_week_number = __handle_epi_week_53(epi_year=_epi_year,
epi_week_53_strategy=_epi_week_53_strategy)
return _epi_year, _epi_week_number
def epi_year_start_date(date, epi_config=country_config["epi_week"]):
"""
Get the first day of epi week 1 for year including given date.
if epi_config==international epi_week 1 starts on the 1st of January
if epi_config== day:<weekday> then the first epi_week start on the first weekday
First weekday after 1st of January
<weekday> is an integer where <weekday>=0 is Monday, 2 is Tuesday, etc.
if epi_config is a dict one can specify custom start dates for epi years
e.g.
"epi_week": {
2011: datetime.datetime(2011, 1, 2),
2012: datetime.datetime(2011, 12, 31)
}
Args:
date: date for which to return the start of epi year
epi_config: how epi-weeks are calculated
Returns:
start_date: date of start of epi week 1 which includes provided date
"""
if epi_config == "international":
return datetime(date.year, 1, 1)
elif "day" in epi_config:
day_of_week = int(epi_config.split(":")[1])
first_of_year = datetime(date.year, 1, 1)
f_day_of_week = first_of_year.weekday()
adjustment = day_of_week - f_day_of_week
if adjustment < 0:
adjustment = 7 + adjustment
return first_of_year + timedelta(days=adjustment)
elif isinstance(epi_config, dict):
_epi_year, _epi_year_start_date = __get_epi_week_for_custom_config(date, epi_config)
return _epi_year_start_date
else:
return datetime(date.year, 1, 1)
def epi_year_start_date_by_year(year, epi_config=country_config["epi_week"]):
"""
Get the first day of epi week 1 for given year
if epi_config==international epi_week 1 starts on the 1st of January
if epi_config== day:<weekday> then the first epi_week start on the first weekday
First weekday after 1st of January
<weekday> is an integer where <weekday>=0 is Monday, 2 is Tuesday, etc.
if epi_config is a dict one can specify custom start dates for epi years
e.g.
"epi_week": {
2011: datetime.datetime(2011, 1, 2),
2012: datetime.datetime(2011, 12, 31)
}
Args:
year: year for which to return the start of epi year
epi_config: how epi-weeks are calculated
Returns:
start_date: date of start of epi week 1 in provided year
"""
if epi_config == "international":
return datetime(year, 1, 1)
elif "day" in epi_config:
return __epi_year_start_date_for_weekday_config(year, epi_config)
elif isinstance(epi_config, dict):
return epi_config[year]
else:
return datetime(year, 1, 1)
def epi_year_by_date(date, epi_config=country_config["epi_week"]):
"""
Calculates the epi year for provided date
:param date: date to caluclate the epi year for
:param epi_config: epi year computation logic, "international", "day:X" or custom dict
if epi_config==international epi_week 1 starts on the 1st of January
if epi_config== day:<weekday> then the first epi_week start on the first weekday
First weekday after 1st of January
<weekday> is an integer where <weekday>=0 is Monday, 2 is Tuesday, etc.
if epi_config is a dict one can specify custom start dates for epi years
e.g.
"epi_week": {
2011: datetime.datetime(2011, 1, 2),
2012: datetime.datetime(2011, 12, 31)
}
:return: epi year
"""
if isinstance(epi_config, dict):
_epi_year, _epi_year_start_date = __get_epi_week_for_custom_config(date, epi_config)
return _epi_year
elif isinstance(epi_config, str) and "day:" in epi_config:
year = date.year
_epi_year_start_date = __epi_year_start_date_for_weekday_config(year, epi_config)
if date < _epi_year_start_date:
return year - 1
else:
return year
else:
return date.year
def epi_week_start_date(year, epi_week):
"""
Calculates the start of an epi week in given year:
Args:
epi-week: epi week
year: year
Returns:
start-date: datetime
"""
_epi_year_start_date = epi_year_start_date_by_year(int(year))
start_date = _epi_year_start_date + timedelta(weeks=int(epi_week) - 1)
return start_date
def __epi_year_start_date_for_weekday_config(year, weekday_config):
config_weekday = int(weekday_config.split(":")[1])
first_of_year = datetime(year, 1, 1)
first_day_of_year_weekday = first_of_year.weekday()
adjustment = config_weekday - first_day_of_year_weekday
if adjustment < 0:
adjustment = 7 + adjustment
return first_of_year + timedelta(days=adjustment)
def __get_epi_week_for_custom_config(date, dict_config):
for epi_year, epi_year_start_datetime in reversed(sorted(dict_config.items())):
if date >= epi_year_start_datetime:
return epi_year, epi_year_start_datetime
raise ValueError("Could not compute epi year for date {!r}".format(date))
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,968 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/util/create_fake_data.py | """
Functionality to create fake data
"""
import random
import datetime
import uuid
from meerkat_abacus import util
from meerkat_abacus import model
from meerkat_abacus import logger
random.seed(1)
def get_value(field, data):
"""
Takes a field and returns the value
A field is a dict with a key that gives the method to choose from a value for the dict value.
I.e.
field = {"one": ["A", "B", "C"]}
We then want to choose either A, B, or C randomly.
The available methos are:
one: choose one of the items in the list
integer: choose an intenger between [uppper, lower]
multiple: choose a random subset of the list
date: choose a date in the last three weeks
data: the value gives a key that should exist in the data dict. We choose one value from the list in the data dict
Args:
field: a field
data: data to be used for certain field types
Returns:
value: A random value for the field
"""
field_type = list(field)[0]
argument = field[field_type]
if field_type == "integer":
upper, lower = argument
value = random.randint(upper, lower)
elif field_type == "one":
value = random.sample(argument, 1)[0]
elif field_type == "multiple":
number_of_options = random.randint(1, len(argument))
value = ",".join(random.sample(argument, number_of_options))
elif field_type == "multiple-spaces":
number_of_options = random.randint(1, len(argument))
value = " ".join(random.sample(argument, number_of_options))
elif field_type == "patient_id":
value = random.randint(0, 10000)
elif field_type == "range":
upper, lower = argument
value = random.uniform(upper, lower)
elif field_type == "date":
now = datetime.datetime.now()
start_offset = 150
if argument == "age":
start_offset = 365*80
start = now - datetime.timedelta(days=start_offset)
total_days = (now - start).days
date = start + datetime.timedelta(
days=random.uniform(0, total_days))
value = date.replace(hour=0,
second=0,
minute=0,
microsecond=0).isoformat()
elif field_type == "data":
if argument in data.keys():
if len(data[argument]) == 0:
value = None
else:
value = random.sample(data[argument], 1)[0]
else:
print("{} not in data".format(argument))
else:
value = None
return value
def create_form(fields, data=None, N=500, odk=True, dates_is_now=False):
"""
Creates a csv file with data form the given fields
The types for fields are:
{"integer": [lower, upper]}
random int between upper and lower
{"one": ["choice1",choice2",....]}
one random choice from the list
{"multiple: ["choice1",choice2",....]}
a random subset of choices
{"data: "key"}
a random choice from key in data
Args:
from_name: name of the form
fields: list of fields to include
previous_data: data from other forms
N: number of rows to generate
odk: Does the form come from odk
Returns:
list_of_records(list): list of dicts with data
"""
logger.debug("Creating fields: " + str(fields))
logger.debug("number of records: " + str(N))
list_of_records = []
for i in range(N):
row = {}
unique_ids = {}
for field_name in fields.keys():
if field_name != "deviceids": # We deal with deviceid in the odk part below
value = get_value(fields[field_name], data)
row[field_name] = value
for field_name in fields.keys():
if field_name != "deviceids" and list(fields[field_name].keys())[0] == "patient_id":
unique_ids.setdefault(field_name, list())
unique_field, unique_condition = fields[field_name]["patient_id"].split(";")
if row[unique_field] == unique_condition:
current_id = row[field_name]
while current_id in unique_ids[field_name]:
current_id = random.randint(0, 100000)
row[field_name] = current_id
unique_ids[field_name].append(row[field_name])
else:
if field_name in unique_ids and len(unique_ids[field_name]) > 1:
row[field_name] = random.sample(unique_ids[field_name], 1)[0]
else:
row[field_name] = random.randint(0, 10000)
if odk:
# If we are creating fake data for an odk form, we want to add a number of special fields
if "deviceids" in data.keys():
row["deviceid"] = random.sample(data["deviceids"],
1)[0]
else:
print("No deviceids given for an odk form")
row["index"] = i
row["meta/instanceID"] = "uuid:" + str(uuid.uuid4())
now = datetime.datetime.now()
if dates_is_now:
start = now - datetime.timedelta(minutes=1)
end = now
submission_date = now
else:
start = now - datetime.timedelta(days=150)
total_days = (now - start).days
start = start + datetime.timedelta(
days=random.uniform(0, total_days))
end_total_days = (now - start).days
end = start + datetime.timedelta(
days=random.uniform(0, end_total_days))
submission_days = (now - end).days
submission_date = end + datetime.timedelta(
days=random.uniform(0, submission_days))
row["end"] = end.isoformat()
row["start"] = start.isoformat()
row["SubmissionDate"] = submission_date.isoformat()
list_of_records.append(row)
return list_of_records
def get_new_fake_data(form, session, N, param_config=None, dates_is_now=False):
logger.debug("fake data")
deviceids = util.get_deviceids(session, case_report=True)
# Make sure the case report form is handled before the alert form
logger.debug("Processing form: %s", form)
if form not in param_config.country_config["fake_data"]:
return []
if "deviceids" in param_config.country_config["fake_data"][form]:
# This is a special way to limit the deviceids for a form in
# the config file
form_deviceids = param_config.country_config["fake_data"][form]["deviceids"]
else:
form_deviceids = deviceids
alert_ids = []
for value in param_config.country_config["fake_data"][form]:
if "data" in value and value["data"] == "uuids" and "from_form" in value:
from_form = value["from_form"]
table = model.form_tables(param_config=param_config)[from_form]
uuids = [r[0] for r in session.query(table.uuid).all()]
for row in uuids:
alert_ids.append(row[-param_config.country_config["alert_id_length"]:])
data = create_form(
fields=param_config.country_config["fake_data"][form],
data={"deviceids":
form_deviceids,
"uuids": alert_ids},
N=N,
dates_is_now=dates_is_now)
return [(row, row["meta/instanceID"]) for row in data]
def create_fake_data(session, config, N=500, append=False,
from_files=False,
write_to="file"):
"""
Creates a csv file with fake data for each form. We make
sure that the forms have deviceids that match the imported locations.
For the case report forms we save the X last characters of
meta/instanceID to use as alert_ids for the alert_form,
where X is the alert_id_lenght from the config file.
Args:
session: SQLAlchemy session
N: number of rows to create for each from (default=500)
append: If we should append the new fake data or write
over the old (default=False)
from_files: whether to add data from the manual test case
files defined in country_config
"""
logger.debug("fake data")
deviceids = util.get_deviceids(session, case_report=True)
alert_ids = []
country_config = config.country_config
forms = country_config["tables"]
# Make sure the case report form is handled before the alert form
for form in forms:
logger.debug("Processing form: %s", form)
file_name = config.data_directory + form + ".csv"
current_form = []
if form not in country_config["fake_data"]:
continue
if append:
current_form = util.read_csv(file_name)
if "deviceids" in country_config["fake_data"][form]:
# This is a special way to limit the deviceids for a form in
# the config file
form_deviceids = country_config["fake_data"][form]["deviceids"]
else:
form_deviceids = deviceids
manual_test_data = {}
if from_files and form in country_config.get("manual_test_data", {}).keys():
current_directory = os.path.dirname(os.path.realpath(__file__))
for fake_data_file in country_config.get("manual_test_data", {})[form]:
manual_test_data[fake_data_file] = []
logger.debug("Adding test data from file: %s.csv", fake_data_file)
manual_test_data[fake_data_file] = util.read_csv(current_directory + '/test/test_data/test_cases/' + \
fake_data_file + ".csv")
generated_data = create_form(
country_config["fake_data"][form], data={"deviceids":
form_deviceids,
"uuids": alert_ids}, N=N)
if "case" in form:
alert_ids = []
for row in generated_data:
alert_ids.append(row["meta/instanceID"][-country_config[
"alert_id_length"]:])
manual_test_data_list = []
for manual_test_data_file in manual_test_data.keys():
manual_test_data_list += list(manual_test_data[manual_test_data_file])
for row in manual_test_data_list:
if len(generated_data) > 0:
for key in generated_data[0].keys():
if key not in row:
row[key] = None
data_to_write = list(current_form) + list(manual_test_data_list) + generated_data
if write_to == "file":
util.write_csv(data_to_write, file_name)
elif write_to == "local_db":
util.write_to_db(data_to_write, form,
param_config.PERSISTENT_DATABASE_URL,
param_config=param_config)
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,969 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py | import unittest
from unittest.mock import patch
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from datetime import datetime
from meerkat_abacus.config import config
from meerkat_abacus import model
from meerkat_abacus.pipeline_worker.process_steps import initial_visit_control
from meerkat_abacus.consumer.database_setup import create_db
class TestInitialVisitControl(unittest.TestCase):
def setUp(self):
create_db(config.DATABASE_URL, drop=True)
engine = create_engine(config.DATABASE_URL)
model.form_tables(config)
model.Base.metadata.create_all(engine)
self.engine = create_engine(config.DATABASE_URL)
Session = sessionmaker(bind=self.engine)
self.session = Session()
def test_initial_visit_control(self):
config.country_config["initial_visit_control"] = {
"demo_case": {
"identifier_key_list": ["patientid", "icd_code"],
"visit_type_key": "intro./visit",
"visit_date_key": "visit_date",
"module_key": "module",
"module_value": "ncd"
}
}
ivc = initial_visit_control.InitialVisitControl(config, self.session)
existing_data = [{
"uuid": "a",
"data": {
"visit_date": "2017-01-14T05:38:33.482144",
"icd_code": "A01",
"patientid": "1",
"module": "ncd",
"intro./visit": "new",
"id": "1"
}
}]
table = model.form_tables(config)["demo_case"]
con = self.engine.connect()
con.execute(table.__table__.insert(), existing_data)
con.close()
new_data = {
"visit_date": "2017-02-14T05:38:33.482144",
"icd_code": "A01",
"patientid": "1",
"module": "ncd",
"intro./visit": "new",
"id": "2"
}
new_data_wrong_module = {
"visit_date": "2017-02-14T05:38:33.482144",
"icd_code": "A01",
"patientid": "1",
"module": "cd",
"intro./visit": "new"
}
new_data_different_pid = {
"visit_date": "2017-02-14T05:38:33.482144",
"icd_code": "A01",
"patientid": "2",
"module": "ncd",
"intro./visit": "new"
}
new_data_different_icd = {
"visit_date": "2017-02-14T05:38:33.482144",
"icd_code": "A02",
"patientid": "1",
"module": "ncd",
"intro./visit": "new"
}
result = ivc.run("demo_register", new_data)[0]
self.assertEqual(result["form"], "demo_register")
self.assertEqual(result["data"], new_data)
result = ivc.run("demo_case", new_data_wrong_module)
self.assertEqual(len(result), 1)
result = ivc.run("demo_case", new_data_different_pid)
self.assertEqual(len(result), 1)
result = ivc.run("demo_case", new_data_different_icd)
self.assertEqual(len(result), 1)
result = ivc.run("demo_case", new_data)
self.assertEqual(len(result), 2)
self.assertEqual(result[1]["data"]["intro./visit"], "return")
self.assertEqual(result[1]["data"]["id"], "2")
new_data = {
"visit_date": "2017-01-01T05:38:33.482144",
"icd_code": "A01",
"patientid": "1",
"module": "ncd",
"intro./visit": "new",
"id": "2"
}
result = ivc.run("demo_case", new_data)
self.assertEqual(len(result), 2)
self.assertEqual(result[1]["data"]["intro./visit"], "return")
self.assertEqual(result[1]["data"]["id"], "1")
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,970 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/consumer/tests/__init__.py | """
Meerkat Abacus Test
Unit tests Meerkat Abacus
"""
from unittest import mock
import random
import unittest
from meerkat_abacus.consumer import get_data
from meerkat_abacus.config import config as param_config
class TestConsumer(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@mock.patch('meerkat_abacus.consumer.get_data.inspect')
def test_read_data(self, inspect_mock):
"""
Tests that read_stationary_data gets data from
a get function and sends apropriate process_data.delay calls
"""
inspect_mock_rv = mock.MagicMock()
inspect_mock_rv.reserved = mock.MagicMock(return_value={"celery@abacus": []})
inspect_mock.return_value = inspect_mock_rv
param_config.country_config["tables"] = ["table1", "table2"]
celery_app_mock = mock.MagicMock()
numbers = get_data.read_stationary_data(yield_data_function,
param_config, celery_app_mock, N_send_to_task=9)
self.assertEqual(numbers["table1"], 99)
self.assertEqual(numbers["table2"], 99)
celery_app_mock.send_task.assert_called()
self.assertEqual(celery_app_mock.send_task.call_count, 24)
# 24 = 2 * 12. We get 11 normal calls and one extra for the last record
def yield_data_function(form, param_config=None, N=100):
for i in range(N):
yield {"a": random.random(),
"b": random.random()}
if __name__ == "__main__":
unittest.main()
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,971 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/util/datetime_helper.py | import datetime
SUBMISSION_DATE_FORMAT = "%b %d, %Y %H:%M:%S %p"
PSQL_SUBMISSION_DATE_FORMAT = "Mon DD, YYYY HH:MI:SS AM"
PSQL_VISIT_DATE_FORMAT = "Mon DD, YYYY"
def strptime(date_string):
return datetime.datetime.strptime(date_string, SUBMISSION_DATE_FORMAT) | {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,972 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/process_steps/write_to_db.py | from meerkat_abacus.pipeline_worker.process_steps import ProcessingStep
from meerkat_abacus import model
class WriteToDb(ProcessingStep):
def __init__(self, param_config, session):
super().__init__()
self.step_name = "write_to_db"
config = {
"delete": {"data": "type",
"disregardedData": "type"},
"form_to_table": {
"data": model.Data,
"disregardedData": model.DisregardedData
},
"country_config": param_config.country_config
}
config["form_to_table"].update(model.form_tables(param_config))
config["raw_data_forms"] = param_config.country_config["tables"]
self.config = config
self.session = session
self.data_to_write = {}
self.data_to_delete = {}
@property
def engine(self):
return self._engine
@engine.setter
def engine(self, new_engine):
self._engine = new_engine
self._update_engine()
def _update_engine(self):
self.config['engine'] = self._engine
def end_step(self, n):
conn = self.config["engine"].connect()
for table in self.data_to_delete.keys():
for condition, uuids in self.data_to_delete[table].items():
conn.execute(table.__table__.delete().where(
table.__table__.c.uuid.in_(uuids)).where(
getattr(
table.__table__.c, self.config["delete"][table]) ==
condition))
for table in self.data_to_write.keys():
conn.execute(table.__table__.insert(), self.data_to_write[table])
self.data_to_write = {}
self.data_to_delete = {}
super(WriteToDb, self).end_step(n)
def run(self, form, data):
"""
Write to db
"""
table = self.config["form_to_table"][form]
if form in self.config["raw_data_forms"]:
insert_data = {"uuid": get_uuid(data, form, self.config),
"data": data}
else:
insert_data = data
if form in self.config["delete"]:
uuid = data["uuid"]
other_condition = data[self.config["delete"][form]]
self.config["delete"][table] = self.config["delete"][form]
self.data_to_delete.setdefault(table, {})
self.data_to_delete[table].setdefault(other_condition, [])
self.data_to_delete[table][other_condition].append(uuid)
if data:
if "id" in data:
del data["id"]
self.data_to_write.setdefault(table, [])
self.data_to_write[table].append(insert_data)
return [{"form": form,
"data": data}]
def get_uuid(data, form, config):
uuid_field = "meta/instanceID"
if "tables_uuid" in config["country_config"]:
uuid_field = config["country_config"]["tables_uuid"].get(form, uuid_field)
return data[uuid_field]
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,973 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/process_steps/__init__.py | from abc import abstractmethod
import cProfile, pstats, io
import datetime
from meerkat_abacus import model
from meerkat_abacus import logger
class ProcessingStep(object):
"""
Base class for all ProcessingSteps
"""
def __init__(self):
self.step_name = "processing_step"
self.session = None
self.start = None
self.end = None
def __repr__(self):
return f'<{self.__class__}, step_name="{self.step_name}">'
@property
def duration(self):
if not self.start or not self.end:
return None
return self.end - self.start
@abstractmethod
def run(self, form, data):
pass
def start_step(self):
self.start = datetime.datetime.now()
# self.profiler = cProfile.Profile()
# self.profiler.enable()
def end_step(self, n):
self.end = datetime.datetime.now()
self._write_monitoring_data(n)
# self.profiler.disable()
s = io.StringIO()
sortby = 'cumulative'
# ps = pstats.Stats(self.profiler, stream=s).sort_stats(sortby)
# ps.print_stats(30)
# logger.info(s.getvalue())
def _write_monitoring_data(self, n=None):
monitoring = model.StepMonitoring(
step=self.step_name,
end=self.end,
start=self.start,
duration=self.duration.total_seconds(),
n=n)
self.session.add(monitoring)
self.session.commit()
class DoNothing(ProcessingStep):
def __init__(self, session):
super().__init__()
self.step_name = "do_nothing"
self.session = session
def run(self, form, data):
return [{"form": form,
"data": data}]
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,974 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/util/__init__.py | """
Various utility functions for meerkat abacus
"""
import csv
import itertools
import boto3
from xmljson import badgerfish as bf
from lxml.html import Element, tostring
import requests
from requests.auth import HTTPDigestAuth
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from botocore.exceptions import ClientError
from datetime import datetime, timedelta
import pytz
from dateutil.parser import parse
from jinja2 import Environment, FileSystemLoader, select_autoescape
from meerkat_abacus.model import Locations, AggregationVariables, Devices, form_tables
from meerkat_abacus.config import config
from meerkat_abacus import logger
import meerkat_libs as libs
country_config = config.country_config
# Alert messages are rendered with Jinja2, setup the Jinja2 env
env = None
language = country_config.get("language", 'en')
translation_dir = country_config.get("translation_dir", None)
translator = libs.get_translator(translation_dir, language)
def get_env(param_config=config):
global env
if env:
return env
else:
env = Environment(
loader=FileSystemLoader(param_config.config_directory + 'templates/'),
extensions=['jinja2.ext.i18n', 'jinja2.ext.autoescape'],
autoescape=select_autoescape(['html'])
)
env.install_gettext_translations(translator)
return env
def is_child(parent, child, locations):
"""
Determines if child is child of parent
Args:
parent: parent_id
child: child_id
locations: all locations in dict
Returns:
is_child(Boolean): True if child is child of parent
"""
parent = int(parent)
child = int(child)
if child == parent or parent == 1:
return True
loc_id = child
while loc_id != 1:
loc_id = locations[loc_id].parent_location
if loc_id == parent:
return True
return False
def get_db_engine(db_url=config.DATABASE_URL):
"""
Returns a db engine and session
"""
engine = create_engine(db_url)
Session = sessionmaker(bind=engine)
session = Session()
return engine, session
def field_to_list(row, key):
"""
Transforms key in row to a list. We split on semicolons if they exist in
the string, otherwise we use commas.
Args:
row: row of data
key: key for the field we want
Reutrns:
row: modified row
"""
if not row[key]:
return row
if ";" in row[key]:
row[key] = [c.strip() for c in row[key].split(";")]
elif "," in row[key]:
row[key] = [c.strip() for c in row[key].split(",")]
else:
row[key] = [row[key]]
return row
def get_links(file_path):
"""
Returns links indexed by type
"""
links = read_csv(file_path)
links_by_type = {}
links_by_name = {}
for l in links:
links_by_type.setdefault(l["type"], [])
links_by_type[l["type"]].append(l)
links_by_name[l["name"]] = l
return links_by_type, links_by_name
def all_location_data(session):
"""
Returns all location data, which is all locations indexed by location_id,
locations by deviceid, regions and districts
Args:
session: db session
Returns:
locations(tuple): (loction_dict,loc_by_deviceid, regions, districts)
"""
locations = get_locations(session)
locations_by_deviceid = get_locations_by_deviceid(session)
zones, regions, districts = get_zones_regions_districts(session)
devices = get_device_tags(session)
return (locations, locations_by_deviceid, zones, regions, districts, devices)
def get_variables(session):
"""
Returns a list of aggregation variables indexed by the variable_id
Args:
session: db-session
Returns:
dict: dict of id:Variable
"""
result = session.query(AggregationVariables)
variables = {}
for row in result:
variables[row.id] = row
return variables
def get_device_tags(session):
"""
Returns a dict of device tags by id
Args:
session: db-session
Returns:
devices(dict): dict of device_id:tags
"""
result = session.query(Devices)
devices = {}
for row in result:
devices[row.device_id] = row.tags
return devices
def get_zones_regions_districts(session):
"""
get list of ids for regions and districts
Args:
session: db session
Returns:
regions_district(tuple): (zones, regions, districts)
"""
locations = get_locations(session)
zones = []
regions = []
districts = []
for l in locations.keys():
if locations[l].level == "zone":
zones.append(l)
elif locations[l].level == "region":
regions.append(l)
for l in locations.keys():
if locations[l].parent_location in regions and locations[l].level == "district":
districts.append(l)
return zones, regions, districts
def get_locations_by_deviceid(session):
"""
get a dict with deviceid: location_id
Args:
session: db session
Returns:
locations(dict) : deviceid:location_id
"""
locations = get_locations(session)
locations_by_deviceid = {}
for l in locations.keys():
if locations[l].deviceid:
if "," in locations[l].deviceid:
dev_ids = locations[l].deviceid.split(",")
for did in dev_ids:
locations_by_deviceid[did] = l
else:
locations_by_deviceid[locations[l].deviceid] = l
return locations_by_deviceid
def get_start_date_by_deviceid(session):
"""
get a dict with deviceid: start_date
Args:
session: db session
Returns:
locations(dict) : deviceid:start_date
"""
locations = get_locations(session)
locations_by_deviceid = get_locations_by_deviceid(session)
start_date_by_deviceid = {}
for l in locations_by_deviceid:
start_date_by_deviceid[l] = locations[
locations_by_deviceid[l]].start_date
return start_date_by_deviceid
def get_locations(session):
"""
get locations indexed by location_id
Args:
session: db session
Returns:
locations(dict) : id:location dict
"""
result = session.query(Locations)
locations = {}
for row in result:
locations[row.id] = row
# if row.area is not None:
# row.area = to_shape(row.area)
return locations
def get_deviceids(session, case_report=False):
"""
Returns a list of deviceids
Args:
session: SQLAlchemy session
case_report: flag to only get deviceids from case
reporing clinics
Returns:
list_of_deviceids(list): list of deviceids
"""
if case_report:
result = session.query(Locations).filter(
Locations.case_report == 1)
else:
result = session.query(Locations)
deviceids = []
for r in result:
if r.deviceid:
if "," in r.deviceid:
for deviceid in r.deviceid.split(","):
deviceids.append(deviceid)
else:
deviceids.append(r.deviceid)
return deviceids
def write_csv(rows, file_path, mode='w'):
"""
Writes rows to csvfile
Args:
rows: list of dicts with data
file_path: path to write file to
mode: 'w' for writing to a new file, 'a' for
appending without overwriting
"""
# Only write if rows were inserted
if rows:
with open(file_path, mode, encoding='utf-8') as f:
columns = sorted(list(rows[0]))
out = csv.DictWriter(f, columns)
if mode == 'w':
out.writeheader()
for row in rows:
out.writerow(row)
def write_to_db(data, form, db_url, param_config=config):
"""
Writes data to db_url
Args:
rows: list of dicts with data
form: form to write to
db_url: Which db url to use
"""
if data:
dicts = []
uuid_field = "meta/instanceID"
if "tables_uuid" in param_config.country_config:
uuid_field = param_config.country_config["tables_uuid"].get(form, uuid_field)
for d in data:
dicts.append(
{
"data": d,
"uuid": d[uuid_field]
}
)
table = form_tables(param_config=param_config)[form]
engine, session = get_db_engine(db_url)
conn = engine.connect()
conn.execute(table.__table__.insert(), dicts)
conn.close()
def get_exclusion_list(session, form):
"""
Get exclusion list for a form
Args:
session: db session
form: which form to get the exclusion list for
"""
exclusion_lists = config.country_config.get("exclusion_lists", {})
ret = []
for exclusion_list_file in exclusion_lists.get(form, []):
exclusion_list = read_csv(config.config_directory + exclusion_list_file)
for uuid_to_be_removed in exclusion_list:
ret.append(uuid_to_be_removed["uuid"])
return ret
def read_csv_file(filename, param_config=config):
""" Read a csv file from the filename"""
file_path = param_config.data_directory + filename + ".csv"
for row in read_csv(file_path):
yield row
def get_data_from_rds_persistent_storage(form, param_config=config):
""" Get data from RDS persistent storage"""
engine, session = get_db_engine(param_config.PERSISTENT_DATABASE_URL)
logger.info(session)
form_data = session.query(form_tables(param_config=param_config)[form])
for row in form_data.yield_per(1000).enable_eagerloads(False):
yield row.__dict__['data']
def subscribe_to_sqs(sqs_endpoint, sqs_queue_name):
""" Subscribes to an sqs_enpoint with the sqs_queue_name"""
logger.info("Connecting to SQS")
region_name = "eu-west-1"
if sqs_endpoint == 'DEFAULT':
sqs_client = boto3.client('sqs', region_name=region_name)
else:
sqs_client = boto3.client('sqs', region_name=region_name,
endpoint_url=sqs_endpoint)
logger.info("Getting SQS url")
try:
queue_url = __get_queue_url(sqs_client, sqs_queue_name)
logger.info("Subscribed to %s.", queue_url)
except ClientError as e:
logger.debug("Failed to connect to %s", sqs_queue_name)
logger.info("Creating queue %s", sqs_queue_name)
sqs_client.create_queue(QueueName=sqs_queue_name)
queue_url = __get_queue_url(sqs_client, sqs_queue_name)
logger.info("Subscribed to %s.", queue_url)
return sqs_client, queue_url
def __get_queue_url(sqs_client, sqs_queue_name):
return sqs_client.get_queue_url(QueueName=sqs_queue_name)['QueueUrl']
def groupify(data):
"""
Takes a dict with groups identified by ./ and turns it into a nested dict
"""
new = {}
for key in data.keys():
if "./" in key:
group, field = key.split("./")
if group not in new:
new[group] = {}
new[group][field] = data[key]
else:
new[key] = data[key]
return new
def submit_data_to_aggregate(data, form_id, aggregate_config):
""" Submits data to aggregate """
data.pop("meta/instanceID", None)
data.pop("SubmissionDate", None)
grouped_json = groupify(data)
grouped_json["@id"] = form_id
result = bf.etree(grouped_json, root=Element(form_id))
aggregate_user = aggregate_config.get('aggregate_username', None)
aggregate_password = aggregate_config.get('aggregate_password', None)
auth = HTTPDigestAuth(aggregate_user, aggregate_password)
aggregate_url = aggregate_config.get('aggregate_url', None)
r = requests.post(aggregate_url + "/submission", auth=auth,
files={
"xml_submission_file": ("tmp.xml", tostring(result), "text/xml")
})
logger.info("Aggregate submission status code: " + str(r.status_code))
return r.status_code
def read_csv(file_path):
"""
Reads csvfile and returns list of rows
Args:
file_path: path of file to read
Returns:
rows(list): list of rows
"""
with open(file_path, "r", encoding='utf-8', errors="replace") as f:
reader = csv.DictReader(f)
for row in reader:
yield row
def create_topic_list(alert, locations, country_config=config.country_config):
"""
Assemble the appropriate topic ID list for a given alert. Make sure the
topic list includes all appropriate location levels from clinic to whole
country.
So for an alert with reason "rea_1", in country with prefix "null", from
clinic "4" in district "3" in region "2" in country "1", we get a topic
list that looks like:
['null-rea_1-4', 'null-rea_1-3', 'null-rea_1-2',
'null-rea_1-1', 'null-allDis-4', 'null-allDis-3',
'null-allDis-2', 'null-allDis-1']
"""
prefix = [country_config["messaging_topic_prefix"]]
reason = [alert["variables"]["alert_reason"], 'allDis']
locs = [alert["clinic"], alert["region"], 1]
# The district isn't stored in the alert model, so calulate it as the
# parent of the clinic.
district = locations[alert["clinic"]].parent_location
if (district != alert["region"]):
locs.append(district)
combinations = itertools.product(prefix, locs, reason)
topics = []
for comb in combinations:
topics.append(str(comb[0]) + "-" + str(comb[1]) + "-" + str(comb[2]))
logger.debug("Sending alert to topic list: {}".format(topics))
return topics
def send_alert(alert_id, alert, variables, locations, param_config=config):
"""
Assemble the alert message and send it using the hermes API
We need to send alerts to four topics to cover all the different possible
subscriptions.
There are:
1-allDis for all locations and all diseases
1-alert.reason for all locations and the specific disease
alert.region-allDis for specific region and all diseases
alert.region-alert.reason for specific region and specific disease
Args:
alert: the alert to we need to send a message about
variables: dict with variables
locations: dict with locations
"""
if alert["date"] > datetime.now() - timedelta(days=7):
# List the possible strings that construct an alert sms message
district = ""
if alert["district"]:
district = locations[alert["district"]].name
# To display date-times as a local date string.
def tostr(date):
try:
local_timezone = pytz.timezone(param_config.country_config["timezone"])
utc_date = parse(date).replace(tzinfo=pytz.utc)
local_date = utc_date.astimezone(local_timezone)
return local_date.strftime("%H:%M %d %b %Y")
except AttributeError:
return "Not available" # Catch if date not a date type
# Assemble the data to be shown in the messsage
data = {
"date": alert["date"].strftime("%d %b %Y"),
"received": tostr(alert["variables"].get('alert_received')),
"submitted": tostr(alert["variables"].get('alert_submitted')),
"clinic": locations[alert["clinic"]].name,
"district": district,
"region": locations[alert["region"]].name,
"uuid": alert["uuid"],
"alert_id": alert_id,
"reason": variables[alert["variables"]["alert_reason"]].name
}
data = {**alert["variables"], **data}
# Get the message template to use
template = variables[alert["variables"]['alert_reason']].alert_message
if not template:
template = "case" # default to case message template
# Create the alert messages using the Jinja2 templates
text_template = get_env(param_config).get_template('alerts/{}/text'.format(template))
text_message = text_template.render(data=data)
sms_template = get_env(param_config).get_template('alerts/{}/sms'.format(template))
sms_message = sms_template.render(data=data)
html_template = get_env(param_config).get_template('alerts/{}/html'.format(template))
html_message = html_template.render(data=data)
# Select the correct communication medium using country configs
medium_settings = dict(param_config.country_config.get(
'alert_mediums',
{}
))
medium = medium_settings.pop('DEFAULT', ['email', 'sms'])
for alert_code, alert_mediums in medium_settings.items():
if alert_code in alert["variables"]["alert_reason"]:
medium = alert_mediums
break
# Structure and send the hermes request
data = {
"from": param_config.country_config['messaging_sender'],
"topics": create_topic_list(
alert,
locations,
country_config=param_config.country_config
),
"id": alert_id,
"message": text_message,
"sms-message": sms_message,
"html-message": html_message,
"subject": f"{translator.gettext('Public Health Surveillance Alerts')}: #{alert_id}",
"medium": medium
}
logger.info("CREATED ALERT {}".format(data['message']))
if not param_config.country_config["messaging_silent"]:
libs.hermes('/publish', 'PUT', data, config=param_config)
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,975 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py | import unittest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from datetime import datetime
from meerkat_abacus import model
from meerkat_abacus.pipeline_worker.process_steps import to_codes
from meerkat_abacus.consumer.database_setup import create_db
from meerkat_abacus.config import config
from geoalchemy2.shape import from_shape
from shapely.geometry import MultiPolygon
class TestToCode(unittest.TestCase):
def setUp(self):
create_db(config.DATABASE_URL, drop=True)
engine = create_engine(config.DATABASE_URL)
model.form_tables(config)
model.Base.metadata.create_all(engine)
self.engine = create_engine(config.DATABASE_URL)
Session = sessionmaker(bind=self.engine)
self.session = Session()
def test_to_code(self):
variables = [
model.AggregationVariables(
id="var_1", method="not_null", db_column="index", condition="",
category=[],
type="case",
form="demo_case"),
model.AggregationVariables(
id="var_2",
method="match",
db_column="column1",
alert=1,
type="case",
category=[],
alert_type="individual",
condition="A",
form="demo_case"),
model.AggregationVariables(
id="var_3",
category=[],
type="case",
method="sub_match",
db_column="column2",
condition="B",
form="demo_case"),
model.AggregationVariables(
id="var_4",
category=[],
method="between",
type="case",
calculation="column3",
db_column="column3",
condition="5,10",
disregard=1,
form="demo_case"),
model.AggregationVariables(
id="var_confirmed",
category=[],
method="match",
multiple_link="first",
type="case",
calculation="column3",
db_column="confirmed",
condition="yes",
form="alert_investigation")
]
for v in variables:
self.session.add(v)
self.session.commit()
locations = {1: model.Locations(name="Demo", id=1),
2: model.Locations(
name="Region 1", parent_location=1, id=2),
3: model.Locations(
name="Region 2", parent_location=1, id=3),
4: model.Locations(
name="District 1", parent_location=2,
level="district", id=4
),
5: model.Locations(
name="District 2", parent_location=3,
level="district", id=5),
6: model.Locations(
name="Clinic 1", parent_location=4,
deviceid="1",
id=6),
7: model.Locations(
name="Clinic 2",
deviceid="2",
parent_location=5, id=7),
8: model.Locations(
name="Clinic with no district", parent_location=2, id=8)}
for l in locations.values():
self.session.add(l)
self.session.commit()
data = {"type": "Case",
"original_form": "demo_case",
"raw_data": {
"SubmissionDate": "2017-01-14T05:38:33.482144",
"pt./visit_date": "2017-01-14T05:38:33.482144",
"meta/instanceID": "a1",
"index": 1,
"deviceid": "1",
"column1": "A",
"column2": "C",
"column3": "7"
}
}
tc = to_codes.ToCodes(config, self.session)
result = tc.run("data", data)
print(result)
self.assertEqual(len(result), 1)
self.assertEqual(result[0]["form"], "disregardedData")
self.assertEqual(result[0]["data"]["date"],
datetime(2017, 1, 14))
self.assertEqual(result[0]["data"]["clinic"], 6)
self.assertIn("var_1", result[0]["data"]["variables"])
self.assertIn("var_2", result[0]["data"]["variables"])
self.assertNotIn("var_3", result[0]["data"]["variables"])
self.assertIn("var_4", result[0]["data"]["variables"])
data = {"type": "Case",
"original_form": "demo_case",
"raw_data": {
"SubmissionDate": "2017-01-14T05:38:33.482144",
"pt./visit_date": "2017-01-14T05:38:33.482144",
"meta/instanceID": "a1",
"index": 1,
"deviceid": "1",
"column1": "A",
"column2": "C",
"column3": "3"
},
"link_data": {
"alert_investigation": [{
"confirmed": "yes",
"meta/instanceID": "b3"
}
]
}
}
tc = to_codes.ToCodes(config, self.session)
result = tc.run("data", data)
self.assertEqual(len(result), 1)
self.assertEqual(result[0]["form"], "data")
self.assertEqual(result[0]["data"]["date"],
datetime(2017, 1, 14))
self.assertIn("var_1", result[0]["data"]["variables"])
self.assertIn("var_2", result[0]["data"]["variables"])
self.assertNotIn("var_3", result[0]["data"]["variables"])
self.assertNotIn("var_4", result[0]["data"]["variables"])
self.assertIn("var_confirmed", result[0]["data"]["variables"])
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,976 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/tests/test_to_data_type.py | import unittest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from meerkat_abacus.config import config
from meerkat_abacus import model
from meerkat_abacus.pipeline_worker.process_steps import to_data_type
from meerkat_abacus.consumer.database_setup import create_db
class TestToDataType(unittest.TestCase):
def setUp(self):
create_db(config.DATABASE_URL, drop=True)
engine = create_engine(config.DATABASE_URL)
model.form_tables(config)
model.Base.metadata.create_all(engine)
self.engine = create_engine(config.DATABASE_URL)
Session = sessionmaker(bind=self.engine)
self.session = Session()
def test_to_data_type(self):
tdt = to_data_type.ToDataType(config, self.session)
data_1 = {"form": "demo_case",
"data": {"intro./visit": "new"}}
data_2 = {"form": "demo_case",
"data": {"intro./visit": "return"}}
data_3 = {"form": "demo_alert",
"data": {"intro./visit": "new"}}
data_4 = {"form": "demo_register",
"data": {"intro./visit": "new"}}
data_5 = {"form": "demo_does_not_exisit",
"data": {"intro./visit": "new"}}
result = tdt.run(data_1["form"], data_1["data"])
self.assertEqual(len(result), 2)
types = [d["data"]["type"] for d in result]
self.assertEqual(["Case", "Visit"], sorted(types))
self.assertEqual(result[0]["data"].get("raw_data"), data_1["data"])
result = tdt.run(data_2["form"], data_2["data"])
self.assertEqual(len(result), 1)
types = [d["data"]["type"] for d in result]
self.assertEqual(["Visit"], sorted(types))
result = tdt.run(data_3["form"], data_3["data"])
self.assertEqual(len(result), 1)
types = [d["data"]["type"] for d in result]
self.assertEqual(["Case"], sorted(types))
self.assertEqual(result[0]["data"].get("link_data"),
{"alert_investigation": [data_1["data"]]})
result = tdt.run(data_4["form"], data_4["data"])
self.assertEqual(len(result), 1)
types = [d["data"]["type"] for d in result]
self.assertEqual(["Register"], sorted(types))
result = tdt.run(data_5["form"], data_5["data"])
self.assertEqual(result, [])
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,977 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/tests/variable_test.py | import unittest
from meerkat_abacus import model
from meerkat_abacus.codes.variable import Variable
positive = {
"applicable": 1,
"value": 1
}
negative = {
"applicable": 0,
"value": 0
}
class VariableTest(unittest.TestCase):
"""
Tests to check that Variables class gives the correct results on test cases
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_not_null(self):
"""
testing the not_null method
"""
agg_variable = model.AggregationVariables(
id=4,
method="not_null",
condition="None",
db_column="index")
variable = Variable(agg_variable)
row = {"index": "hei"}
self.assertEqual(variable.test(row), positive)
row = {"index": ""}
self.assertEqual(variable.test(row), negative)
row = {"index": 0}
self.assertEqual(variable.test(row), negative)
row = {"index": None}
self.assertEqual(variable.test(row), negative)
def test_value(self):
"""
testing the not_null method
"""
agg_variable = model.AggregationVariables(
id=4,
method="value",
condition="None",
db_column="index")
variable = Variable(agg_variable)
row = {"index": "hei"}
self.assertEqual(variable.test(row),
{"applicable": True,
"value": "hei"
})
row = {"index": ""}
self.assertEqual(variable.test(row), negative)
row = {"index": 0}
self.assertEqual(variable.test(row), negative)
row = {"index": None}
self.assertEqual(variable.test(row), negative)
def test_between(self):
"""
testing the between method
"""
agg_variable = model.AggregationVariables(
id=4,
method="between",
condition="0,1",
calculation="A**2/(B-4)",
db_column="A,B")
variable = Variable(agg_variable)
row = {"A": "1", "B": "6"}
self.assertEqual(variable.test(row), positive)
row = {"A": "2", "B": "6"}
self.assertEqual(variable.test(row), negative)
row = {"A": "2", "B": "400"}
self.assertEqual(variable.test(row), positive)
row = {"A": "2", "B": "1"}
self.assertEqual(variable.test(row), negative)
row = {"A": "2"} # test if column is missing
self.assertEqual(variable.test(row), negative)
agg_variable = model.AggregationVariables(
id=4,
method="between",
condition="0,1",
calculation="C/(B-4)",
db_column="A,B")
# note we have used C which is not one of the columns, so the test should give an error
variable = Variable(agg_variable)
row = {"A": "2", "B": "6"}
with self.assertRaises(NameError):
variable.test(row)
# Test with date
agg_variable = model.AggregationVariables(
id=4,
method="between",
condition="1388527200,2019679200", # 2014-2034
calculation="Variable.to_date(A)",
db_column="A")
variable = Variable(agg_variable)
row = {"A": "01-Jan-2016"}
self.assertEqual(variable.test(row), positive)
row = {"A": "01-Jan-2035"}
self.assertEqual(variable.test(row), negative)
row = {"A": "01-Jan-2010"}
self.assertEqual(variable.test(row), negative)
def test_calc(self):
"""
testing the calc_between method
"""
agg_variable = model.AggregationVariables(
id=4,
method="calc",
condition="None",
calculation="A+B",
db_column="A,B")
variable = Variable(agg_variable)
row = {"A": "1", "B": "6"}
self.assertEqual(variable.test(row), {
"applicable": True,
"value": 7
}
)
row = {"A": "2", "B": "400"}
self.assertEqual(variable.test(row), {
"applicable": True,
"value": 402
}
)
row = {"A": "2"} # test if column is missing
self.assertEqual(variable.test(row),
{"applicable": False,
"value": 0})
row = {"A": "2", "B": "-2"} # test if result is 0
self.assertEqual(variable.test(row),
{"applicable": True,
"value": 0})
agg_variable = model.AggregationVariables(
id=4,
method="calc",
condition="None",
calculation="A+C",
db_column="A,B")
# note we have used C which is not one of the columns, so the test should give an error
variable = Variable(agg_variable)
row = {"A": "2", "B": "6"}
with self.assertRaises(NameError):
variable.test(row)
def test_match(self):
agg_variable = model.AggregationVariables(
id=4,
method="match",
db_column="column1",
condition="A")
variable = Variable(agg_variable)
row = {"column1": "A"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B"}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa"}
self.assertEqual(variable.test(row), negative)
agg_variable.condition = "A,C"
variable = Variable(agg_variable)
row = {"column1": "A"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "C"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B"}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa"}
self.assertEqual(variable.test(row), negative)
def test_sub_match(self):
agg_variable = model.AggregationVariables(
id=4,
method="sub_match",
db_column="column1",
condition="A")
variable = Variable(agg_variable)
row = {"column1": "A"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "A3"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B"}
self.assertEqual(variable.test(row), negative)
agg_variable.condition = "A,C"
variable = Variable(agg_variable)
row = {"column1": "A"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "C"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "A1"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "C3"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B"}
self.assertEqual(variable.test(row), negative)
def test_and(self):
agg_variable = model.AggregationVariables(
id=4,
method="match and match",
db_column="column1;column2",
condition="A;B")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "B"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B", "column2": "A"}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa", "column2": "B"}
self.assertEqual(variable.test(row), negative)
agg_variable = model.AggregationVariables(
id=4,
method="match and match",
db_column="column1;column2",
condition="A,C;B")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "B"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "C", "column2": "B"}
self.assertEqual(variable.test(row), positive)
def test_or(self):
agg_variable = model.AggregationVariables(
id=4,
method="match or match",
db_column="column1;column2",
condition="A;B")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "B"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B", "column2": "A"}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa", "column2": "B"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "Aa", "column2": "C"}
self.assertEqual(variable.test(row), negative)
agg_variable = model.AggregationVariables(
id=4,
method="match or match",
db_column="column1;column2",
condition="A,C;B")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "B"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "C", "column2": "D"}
self.assertEqual(variable.test(row), positive)
def test_different_test_types(self):
agg_variable = model.AggregationVariables(
id=4,
method="match and sub_match",
db_column="column1;column2",
condition="A;B")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "Bb"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B", "column2": "A"}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa", "column2": "B"}
self.assertEqual(variable.test(row), negative)
agg_variable = model.AggregationVariables(
id=4,
method="match and between",
db_column="column1;column2",
calculation="None;column2",
condition="A;4,9")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "5"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "A", "column2": "3"}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa", "column2": "5"}
self.assertEqual(variable.test(row), negative)
agg_variable = model.AggregationVariables(
id=4,
method="sub_match or not_null",
db_column="column1;column2",
condition="A;None")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "5"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "A", "column2": ""}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B", "column2": ""}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa", "column2": "5"}
self.assertEqual(variable.test(row), positive)
def test_no_such_method(self):
agg_variable = model.AggregationVariables(
id=4,
method="no_such_method",
db_column="column1",
condition="A")
with self.assertRaises(NameError):
variable = Variable(agg_variable)
if __name__ == "__main__":
unittest.main()
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,978 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/tests/to_codes_test.py | import unittest
from meerkat_abacus import model
from meerkat_abacus.codes.to_codes import to_code
from meerkat_abacus.codes.variable import Variable
from geoalchemy2.shape import from_shape
from shapely.geometry import Polygon
# Data for the tests
class ToCodeTest(unittest.TestCase):
"""
Test the to_code functionality
"""
def setUp(self):
locations = {1: model.Locations(name="Demo", id=1),
2: model.Locations(
name="Region 1", parent_location=1, id=2),
3: model.Locations(
name="Region 2", parent_location=1, id=3),
4: model.Locations(
name="District 1", parent_location=2,
level="district", id=4,
area=from_shape(Polygon([(0, 0), (0, 0.4), (0.2, 0.4),
(0.2, 0), (0, 0)]))
),
5: model.Locations(
name="District 2", parent_location=3,
level="district", id=5,
area=from_shape(Polygon([(0.2, 0.4), (0.4, 0.4), (0.4, 0),
(0.2, 0), (0.2, 0.4)]))),
6: model.Locations(
name="Clinic 1", parent_location=4, id=6),
7: model.Locations(
name="Clinic 2", parent_location=5, id=7),
8: model.Locations(
name="Clinic with no district", parent_location=2, id=8)}
locations_by_deviceid = {"1": 6, "2": 7, "3": 8}
zones = []
regions = [2, 3]
districts = [4, 5]
agg_variables = [
model.AggregationVariables(
id=1, method="not_null", db_column="index", condition="",
category=[],
form="form1"),
model.AggregationVariables(
id=2,
method="match",
db_column="column1",
alert=1,
category=[],
alert_type="individual",
condition="A",
form="form1"),
model.AggregationVariables(
id=3,
category=[],
method="sub_match",
db_column="column2",
condition="B",
form="form1"),
model.AggregationVariables(
id=4,
category=[],
method="between",
calculation="column3",
db_column="column3",
condition="5,10",
disregard=1,
form="form1")
]
self.alert_data = {"form1": {"column1": "column1"}}
devices = {"1": [], "2": [], "3": [], "4": [], "5": [],
"6": [], "7": [], "8": []}
self.all_locations = (locations, locations_by_deviceid, zones, regions, districts, devices)
self.variables = {"case": {1: {}, 2: {}, 3: {}, 4: {}}}
self.variables_forms = {}
self.variables_test = {}
self.variables_groups = {}
self.mul_forms = []
for av in agg_variables:
self.variables["case"][av.id][av.id] = Variable(av)
self.variables_forms[av.id] = "form1"
self.variables_test[av.id] = self.variables["case"][av.id][av.id].test
self.variables_groups[av.id] = [av.id]
def tearDown(self):
pass
def test_location_information(self):
"""
Testing that all the location infomation is translated correctly
"""
row = {"form1":
{"index": 1,
"column1": "A",
"column2": "B34",
"column3": "7",
"date": "2015-10-25",
"deviceid": "1",
"meta/instanceID": "a"},
"original_form": "form1"}
var, category, ret_location, disregarded = to_code(
row,
(self.variables, self.variables_forms, self.variables_test, self.variables_groups, {}),
self.all_locations, "case", self.alert_data, self.mul_forms, "deviceid")
self.assertEqual(ret_location["country"], 1)
self.assertEqual(ret_location["region"], 2)
self.assertEqual(ret_location["district"], 4)
self.assertEqual(ret_location["clinic"], 6)
self.assertEqual(ret_location["device_id"], '1')
row["form1"]["deviceid"] = "2"
var, category, ret_location, disregard = to_code(
row,
(self.variables, self.variables_forms, self.variables_test, self.variables_groups, {}),
self.all_locations, "case", self.alert_data, self.mul_forms, "deviceid")
self.assertEqual(ret_location["country"], 1)
self.assertEqual(ret_location["region"], 3)
self.assertEqual(ret_location["district"], 5)
self.assertEqual(ret_location["device_id"], '2')
row["form1"]["deviceid"] = "3"
var, category, ret_location, disregard = to_code(
row,
(self.variables, self.variables_forms, self.variables_test, self.variables_groups, {}),
self.all_locations, "case", self.alert_data, self.mul_forms, "deviceid")
self.assertEqual(ret_location["country"], 1)
self.assertEqual(ret_location["region"], 2)
self.assertEqual(ret_location["district"], None)
self.assertEqual(ret_location["device_id"], '3')
row["form1"]["deviceid"] = "99"
var, category, ret_location, disregard = to_code(
row,
(self.variables, self.variables_forms, self.variables_test, self.variables_groups, {}),
self.all_locations, "case", self.alert_data, self.mul_forms, "deviceid")
self.assertEqual(ret_location, None)
# Test gps in district
row = {"form1":
{"index": 1,
"column1": "A",
"column2": "B34",
"column3": "7",
"lat": "0.1",
"lng": "0.1",
"date": "2015-10-25",
"deviceid": "1",
"meta/instanceID": "a"},
"original_form": "form1"}
var, category, ret_location, disregard = to_code(
row,
(self.variables, self.variables_forms, self.variables_test, self.variables_groups, {}),
self.all_locations, "case", self.alert_data, self.mul_forms,
"in_geometry$lat,lng")
self.assertEqual(ret_location["district"], 4)
self.assertEqual(ret_location["region"], 2)
self.assertEqual(ret_location["clinic"], None)
row = {"form1":
{"index": 1,
"column1": "A",
"column2": "B34",
"column3": "7",
"lat": "0.3",
"lng": "0.1",
"date": "2015-10-25",
"deviceid": "1",
"meta/instanceID": "a"},
"original_form": "form1"}
var, category, ret_location, disregard = to_code(
row,
(self.variables, self.variables_forms, self.variables_test, self.variables_groups, {}),
self.all_locations, "case", self.alert_data, self.mul_forms,
"in_geometry$lat,lng")
self.assertEqual(ret_location["district"], 5)
self.assertEqual(ret_location["region"], 3)
self.assertEqual(ret_location["clinic"], None)
row = {"form1":
{"index": 1,
"column1": "A",
"column2": "B34",
"column3": "7",
"lat": "0.5",
"lng": "0.1",
"date": "2015-10-25",
"deviceid": "1",
"meta/instanceID": "a"},
"original_form": "form1"}
var, category, ret_location, disregard = to_code(
row,
(self.variables, self.variables_forms, self.variables_test, self.variables_groups, {}),
self.all_locations, "case", self.alert_data, self.mul_forms,
"in_geometry$lat,lng")
self.assertEqual(ret_location, None)
def test_variables(self):
"""
Checking that variables returned and alerts are working
"""
row1 = {"form1": {"index": 1,
"column1": "A",
"column2": "B34",
"column3": "7",
"date": "2015-10-25",
"deviceid": "1",
"meta/instanceID": "a"},
"original_form": "form1"}
row2 = {"form1": {"index": 2,
"column1": "B",
"column2": "A",
"column3": "4",
"date": "2015-10-25",
"deviceid": "2",
"meta/instanceID": "b"},
"original_form": "form1"}
row3 = {"form1": {"index": 1,
"column1": "A",
"column2": "C",
"column3": "7",
"date": "2015-10-25",
"deviceid": "2",
"meta/instanceID": "c"},
"original_form": "form1"}
var, category, ret_loc, disregard = to_code(
row1,
(self.variables, self.variables_forms, self.variables_test, self.variables_groups, {}),
self.all_locations, "case", self.alert_data, self.mul_forms, "deviceid")
self.assertEqual(var, {1: 1,
2: 1,
3: 1,
4: 1,
'alert_reason': 2,
'alert': 1,
'alert_type': "individual",
'alert_column1': 'A'})
self.assertEqual(disregard, True)
var, category, ret_loc, disregard = to_code(
row2,
(self.variables, self.variables_forms, self.variables_test, self.variables_groups, {}),
self.all_locations, "case", self.alert_data, self.mul_forms, "deviceid")
self.assertEqual(var, {1: 1})
self.assertEqual(disregard, False)
var, category, ret_loc, disregard = to_code(
row3,
(self.variables, self.variables_forms, self.variables_test, self.variables_groups, {}),
self.all_locations, "case", self.alert_data, self.mul_forms, "deviceid")
self.assertEqual(var, {1: 1,
2: 1,
4: 1,
'alert': 1,
'alert_column1': 'A',
"alert_type": "individual",
'alert_reason': 2})
self.assertEqual(disregard, True)
if __name__ == "__main__":
unittest.main()
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,979 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/tests/test_quality_control.py | import unittest
from unittest.mock import patch
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from datetime import datetime
import meerkat_abacus
from meerkat_abacus.config import config
from meerkat_abacus import model
from meerkat_abacus.pipeline_worker.process_steps import quality_control
from meerkat_abacus.consumer.database_setup import create_db
# TODO: Test deviceid and exclusion list
class TestQualityControll(unittest.TestCase):
def setUp(self):
create_db(config.DATABASE_URL, drop=True)
engine = create_engine(config.DATABASE_URL)
model.Base.metadata.create_all(engine)
self.engine = create_engine(config.DATABASE_URL)
Session = sessionmaker(bind=self.engine)
self.session = Session()
def test_quality_control(self):
variables = [
model.AggregationVariables(
id="qul_1",
type="import",
form="demo_case",
db_column="results./bmi_height",
method="between",
calculation="results./bmi_height",
condition="50,220"
),
model.AggregationVariables(
id="qul_2",
type="import",
form="demo_case",
db_column="pt./visit_date",
method="between",
category=["discard"],
calculation='Variable.to_date(pt./visit_date)',
condition="1388527200,2019679200"
),
model.AggregationVariables(
id="qul_3",
type="import",
form="demo_case",
db_column="pt./visit_date2",
method="match",
category=["replace:SubmissionDate"],
condition="15-Apr-2018"
)
]
config.country_config["quality_control"] = ["demo_case"]
self.session.query(model.AggregationVariables).delete()
self.session.commit()
for v in variables:
self.session.add(v)
self.session.commit()
qc = quality_control.QualityControl(
config,
self.session
)
data = {
"meta/instanceID": 1,
"deviceid": "1",
"SubmissionDate": "2016-04-17T02:43:31.306860",
"pt./visit_date": "2016-04-17",
"results./bmi_height": 60,
"intro./visit": "new"
}
result = qc.run("demo_case", data)[0]
self.assertEqual(result["data"]["results./bmi_height"], 60)
data["results./bmi_height"] = 20
result = qc.run("demo_case", data)[0]
self.assertEqual(result["data"]["results./bmi_height"], None)
data["result./bmi_height"] = 220
result = qc.run("demo_case", data)[0]
self.assertEqual(result["data"]["results./bmi_height"], None)
data["pt./visit_date"] = "15-Apr-2010"
result = qc.run("demo_case", data)
self.assertEqual(result, [])
data["pt./visit_date"] = "15-Apr-2016"
data["pt./visit_date2"] = "15-Apr-2019"
result = qc.run("demo_case", data)[0]
self.assertEqual(result["data"]["pt./visit_date2"],
"2016-04-17T02:43:31.306860")
class ValidateDateToEpiWeekConversionTest(unittest.TestCase):
test_data_types_list = [{"date": "date_column"}]
@patch.object(quality_control.data_types, 'data_types_for_form_name', return_value=test_data_types_list)
def test_validates_proper_date(self, mock):
test_row = {"date_column": "2017-01-01"}
self.assertTrue(quality_control._validate_date_to_epi_week_convertion("test_form",
test_row,
config))
@patch.object(quality_control.data_types, 'data_types_for_form_name', return_value=test_data_types_list)
def test_bypass_for_missing_date(self, mock):
test_row = {"date_column": ''}
self.assertFalse(quality_control._validate_date_to_epi_week_convertion("test_form",
test_row,
config))
@patch.object(quality_control.data_types, 'data_types_for_form_name', return_value=test_data_types_list)
def test_bypass_and_logs_incorrect_date(self, mock):
test_row = {"deviceid": "fake_me", "date_column": '31 Feb 2011'}
with self.assertLogs(logger=meerkat_abacus.logger, level='DEBUG') as logs:
quality_control._validate_date_to_epi_week_convertion("test_form", test_row,
config)
self.assertTrue(len(logs.output))
self.assertIn("Failed to process date column for row with device_id: fake_me", logs.output[0])
multiple_data_types_single_date = [
{
"db_column": "condition1",
"condition": "valid",
"date": "same_date"
},
{
"date": "same_date"
}
]
@patch.object(quality_control.data_types, 'data_types_for_form_name', return_value=multiple_data_types_single_date)
def test_dates_should_be_tested_once(self, mock):
test_row = {
"condition1": "valid",
"same_date": "June 14, 2015"
}
with patch.object(quality_control, 'epi_week_for_date') as mock:
quality_control._validate_date_to_epi_week_convertion("test_form", test_row,
param_config=config)
mock.assert_called_once()
mock.assert_called_with(datetime(2015, 6, 14), param_config=config.country_config)
test_epi_config = ({2015: datetime(2015, 3, 5)},)
@patch.object(quality_control.data_types, 'data_types_for_form_name', return_value=test_data_types_list)
def test_bypass_if_date_out_of_custom_epi_config(self, data_types_mock):
test_row = {"deviceid": "fake_me", "date_column": "03-05-2014"}
config.country_config["epi_week"] = self.test_epi_config[0]
with self.assertLogs(logger=meerkat_abacus.logger, level='DEBUG') as logs:
quality_control._validate_date_to_epi_week_convertion("test_form", test_row,
param_config=config)
self.assertTrue(len(logs.output))
print(logs)
self.assertIn("Failed to process date column for row with device_id: fake_me", logs.output[0])
test_multiple_data_types = [
{
"db_column": "condition1",
"condition": "valid",
"date": "first_date"
},
{
"db_column": "condition2",
"condition": "valid",
"date": "second_date"
}
]
@patch.object(quality_control.data_types, 'data_types_for_form_name', return_value=test_multiple_data_types)
def test_multiple_data_types_with_valid_dates(self, mock):
test_row = {
"condition1": "valid",
"first_date": "May 5,2015",
"condition2": "valid",
"second_date": "June 14, 2015"
}
config.country_config["epi_week"] = self.test_epi_config[0]
self.assertTrue(quality_control._validate_date_to_epi_week_convertion("test_form",
test_row,
param_config=config))
@patch.object(quality_control.data_types, 'data_types_for_form_name', return_value=test_multiple_data_types)
def test_multiple_data_types_fails_if_single_date_invalid(self, mock):
test_row = {
"condition1": "valid",
"first_date": "May 5,2015",
"condition2": "valid",
"second_date": "June 14, 2014"
}
config.country_config["epi_week"] = self.test_epi_config[0]
self.assertFalse(quality_control._validate_date_to_epi_week_convertion("test_form",
test_row,
param_config=config))
data_types_mixed_condition = [
{
"db_column": "condition1",
"condition": "valid",
"date": "first_date"
},
{
"date": "second_date"
}
]
@patch('meerkat_abacus.util.epi_week.epi_year_start_date.__defaults__', new=test_epi_config)
@patch('meerkat_abacus.util.epi_week.epi_year_by_date.__defaults__', new=test_epi_config)
@patch.object(quality_control.data_types, 'data_types_for_form_name', return_value=data_types_mixed_condition)
def test_multiple_data_types_passes_for_mixed_conditions(self, mock):
test_row = {
"condition1": "valid",
"first_date": "May 5,2015",
"second_date": "June 14, 2015"
}
self.assertTrue(quality_control._validate_date_to_epi_week_convertion("test_form", test_row,
config))
if __name__ == "__main__":
unittest.main()
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,980 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/config.py | """
Configuration file for meerkat_abacus
This configuration file sets up application level configurations
and imports the country specific configurations.
Many of the application level configurations can be overwritten by
environemental variables:
MEERKAT_ABACUS_DB_URL: db_url
DATA_DIRECTORY: path to directory where we store data csv files
COUNTRY_CONFIG_DIR: path to directory with country config
COUNTRY_CONFIG: name of country config file
NEW_FAKE_DATA: if we should generate fake data
GET_DATA_FROM_S3: if we should download data from an S3 bucket
START_CELERY: if we want to star the celery hourly tasks
"""
import os
import importlib.util
from unittest.mock import MagicMock
import yaml
from dateutil.parser import parse
import logging
class Config:
def __init__(self):
# Logging
self.LOGGER_NAME = os.environ.get("LOGGER_NAME", "meerkat_abacus")
self.LOGGING_LEVEL = os.environ.get("LOGGING_LEVEL", "ERROR")
self.LOGGING_FORMAT = os.environ.get("LOGGING_FORMAT", '%(asctime)s - %(name)-15s - %(levelname)-7s - %(module)s:%(filename)s:%(lineno)d - %(message)s')
self.DEPLOYMENT = os.environ.get("DEPLOYMENT", "unknown")
self.DEVELOPMENT = bool(os.environ.get("DEVELOPMENT", False))
self.PRODUCTION = os.environ.get("PRODUCTION", False)
current_directory = os.path.dirname(os.path.realpath(__file__))
self.DATABASE_URL = os.environ.get(
"MEERKAT_ABACUS_DB_URL",
'postgresql+psycopg2://postgres:postgres@db/meerkat_db'
)
self.data_directory = os.environ.get("DATA_DIRECTORY",
current_directory + "/data/")
self.config_directory = os.environ.get("COUNTRY_CONFIG_DIR",
current_directory + "/country_config/")
self.start_celery = os.environ.get("START_CELERY", False)
self.setup = True
self.hermes_api_key = os.environ.get("HERMES_API_KEY", "")
self.hermes_api_root = os.environ.get("HERMES_API_ROOT", "")
self.hermes_dev = int(os.environ.get("HERMES_DEV", False))
self.mailing_key = os.environ.get("MAILING_KEY", "")
self.mailing_root = os.environ.get("MAILING_ROOT", "")
self.device_messaging_api = os.environ.get("DEVICE_MESSAGING_API", "")
self.auth_root = os.environ.get('MEERKAT_AUTH_ROOT', 'http://nginx/auth')
self.api_root = os.environ.get('MEERKAT_API_ROOT', 'http://nginx/api')
self.send_test_emails = os.environ.get('MEERKAT_TEST_EMAILS', False)
self.server_auth_username = os.environ.get('SERVER_AUTH_USERNAME', 'root')
self.server_auth_password = os.environ.get('SERVER_AUTH_PASSWORD', 'password')
self.send_test_device_messages = os.environ.get('MEERKAT_TEST_DEVICE_MESSAGES',
False)
self.sentry_dns = os.environ.get('SENTRY_DNS', '')
self.db_dump = os.environ.get('DB_DUMP', '')
self.db_dump_folder = '/var/www/dumps/'
self.import_fraction = float(os.environ.get("IMPORT_FRACTION", 0))
only_import_after_date = os.environ.get("ONLY_IMPORT_AFTER", None)
if only_import_after_date:
self.only_import_after_date = parse(only_import_after_date)
else:
self.only_import_after_date = None
# TODO: log it in a different place
# self.logger.info(
# "Only importing data after {}".format(
# self.only_import_after_date)
# )
self.consul_enabled = os.environ.get("CONSUL_ENABLED", "False") == "True"
# Country config
country_config_file = os.environ.get("COUNTRY_CONFIG", "demo_config.py")
spec = importlib.util.spec_from_file_location(
"country_config_module",
self.config_directory + country_config_file
)
country_config_module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(country_config_module)
self.country_config = country_config_module.country_config
if hasattr(country_config_module, 'dhis2_config'):
# dhis2 export is feature toggled for now
# proper country configs will be added after feature launch
self.dhis2_config = country_config_module.dhis2_config
if self.hermes_dev:
self.country_config["messaging_silent"] = True
if not self.country_config.get("timezone"):
self.country_config["timezone"] = "Europe/Dublin"
self.s3_bucket = country_config_module.s3_bucket
# Configure data initialisation
self.initial_data_source = os.environ.get("INITIAL_DATA_SOURCE", "FAKE_DATA")
self.PERSISTENT_DATABASE_URL = None
self.get_data_from_s3 = 0
self.s3_data_stream_interval = None
self.initial_data = "FAKE_DATA"
if self.initial_data_source == "FAKE_DATA":
self.initial_data = "FAKE_DATA"
elif self.initial_data_source == "AWS_RDS":
self.PERSISTENT_DATABASE_URL = os.environ.get("PERSISTENT_DATABASE_URL")
self.initial_data = "RDS"
elif self.initial_data_source == "LOCAL_RDS":
self.PERSISTENT_DATABASE_URL = os.environ.get(
"PERSISTENT_DATABASE_URL",
'postgresql+psycopg2://postgres:postgres@db/persistent_demo_db'
)
self.initial_data = "RDS"
elif self.initial_data_source == "AWS_S3":
self.get_data_from_s3 = 1 # int(os.environ.get("GET_DATA_FROM_S3", False))
self.initial_data = "S3"
elif self.initial_data_source == "LOCAL_CSV":
self.get_data_from_s3 = 0 # int(os.environ.get("GET_DATA_FROM_S3", False))
self.initial_data = "LOCAL_CSV"
else:
msg = f"INITIAL_DATA_SOURCE={self.initial_data_source} unsupported."
raise ValueError(msg)
# Configure data streaming
self.stream_data_source = os.environ.get("STREAM_DATA_SOURCE", "AWS_S3")
if self.stream_data_source == "LOCAL_SQS":
self.SQS_ENDPOINT = os.environ.get("SQS_ENDPOINT", 'http://172.18.0.1:9324')
self.sqs_queue = os.environ.get("SQS_QUEUE", 'nest-queue-demo')
elif self.stream_data_source == "AWS_SQS":
self.SQS_ENDPOINT = os.environ.get("SQS_ENDPOINT", "DEFAULT")
self.sqs_queue = 'nest-queue-' + self.country_config.get("implementation_id", "demo") + '-' + self.DEPLOYMENT
elif self.stream_data_source == "AWS_S3":
self.get_data_from_s3 = 1
self.s3_data_stream_interval = os.environ.get("S3_DATA_STREAM_INTERVAL", 3600)
elif self.stream_data_source == "FAKE_DATA":
self.fake_data_generation = "INTERNAL"
elif self.stream_data_source == "NO_STREAMING":
pass # Don't set up any streaming.
else:
msg = f"STREAM_DATA_SOURCE={self.stream_data_source} unsupported."
raise ValueError(msg)
# Configure generating fake data
self.fake_data = False
self.internal_fake_data = None
self.fake_data_interval = 60 * 5
self.aggregate_password = None
self.aggregate_username = None
self.aggregate_url = None
self.fake_data_generation = os.environ.get("FAKE_DATA_GENERATION", None)
if self.fake_data_generation == "INTERNAL":
self.fake_data = True
self.internal_fake_data = True
elif self.fake_data_generation == "SEND_TO_AGGREGATE":
self.fake_data = True
self.internal_fake_data = False
self.aggregate_password = os.environ.get("AGGREGATE_PASSWORD", "password")
self.aggregate_username = os.environ.get("AGGREGATE_USERNAME", "test")
self.aggregate_url = os.environ.get("AGGREGATE_URL", "http://172.18.0.1:81")
elif self.fake_data_generation == "SEND_TO_SQS":
self.fake_data_sqs_queue = os.environ.get("SQS_QUEUE", 'nest-queue-demo')
self.fake_data_sqs_endpoint = os.environ.get("SQS_ENDPOINT", 'http://172.18.0.1:9324')
self.SQS_ENDPOINT = self.fake_data_sqs_endpoint
self.sqs_queue = self.fake_data_sqs_queue
def __repr__(self):
return yaml.dump(self)
config = Config()
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,981 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/country_config/demo_config.py | """ Config for Demo Location """
import datetime
s3_bucket = False
country_config = {
"country_name": "Demo",
"tables": [
"demo_case",
"demo_alert",
"demo_register",
# "plague_data"
],
"pipeline": ["quality_control",
"initial_visit_control", "write_to_db",
"to_data_type", "add_links",
"to_codes", "write_to_db",
"add_multiple_alerts",
"send_alerts",
"write_to_db"],
"initial_visit_control": {
"demo_case": {
"identifier_key_list": ["patientid", "icd_code"],
"visit_type_key": "intro./visit",
"visit_date_key": "pt./visit_date",
"module_key": "intro./module",
"module_value": "ncd"
}
},
"require_case_report": ["demo_case", "demo_register"],
"codes_file": "demo_codes",
"coding_list": [
"demo_codes.csv",
"demographic_codes.csv",
"icd_codes.csv"
],
"links_file": "demo_links.csv",
"types_file": "data_types.csv",
"country_tests": "demo_test.py",
"epi_week": "day:0",
"locations": {
"clinics": "demo_clinics.csv",
"districts": "demo_districts.csv",
"regions": "demo_regions.csv",
"zones": "demo_zones.csv"
},
"geojson_files": ["demo_json.json"],
"calculation_parameters": ["vaccination_vials.json","medicine_kits.json"],
"exclusion_list":"demo_exclusion.csv",
"form_dates": {
"case": "pt./visit_date",
"alert": "end",
"register": "end",
},
"fake_data": {
"demo_case": {
"pt1./age": {"integer": [0, 120]},
"child_age": {"integer": [0, 60]},
"pt./pid": {"patient_id": "intro./visit;new"},
"pt1./gender": {"one": ["male", "female"]},
"pt./visit_date": {"date": "year"},
"intro./visit": {"one": ["new", "return", "referral"]},
"intro./module": {"one": ["cd", "ncd", "mh"]},
"nationality": {"one": ["demo", "null_island"]},
"pt1./status": {"one": ["refugee", "national"]},
"intro_module": {
"multiple": ["mh","imci", "rh", "labs", "px"]
},
"symptoms": {"multiple": ["A9_4", "B15-17_2", "A39_3",
"A03_2", "!0_1", "", ""]},
"pregnant": {"one": ["yes", "no"]},
"pregnancy_complications": {"one": ["yes", "no"]},
"smoke_ever": {"one": ["yes", "no"]},
"smoke_now": {"one": ["yes", "no"]},
"icd_code": {"one": ["A80.9", "B05.3", "A00.1", "A00", "A39", "A87",
"A03", "A36.8", "A33.3", "A34.4","E10", "G08", "J40",
"A35.4", "A37", "E15", "E16", "E20.4","I10",
"E40", "E41", "E50", "E65", "F40", "O60",
"P70", "S10"]},
"vaccination_type": {"multiple": ["bcg", "hepb", "diptheria",
"tetanus", "pertussis",
"polio", "hib", "measles",
"mumps", "rubella"]},
"pip./namru": {"one": range(1, 400)},
"patientid": {"one": range(1, 3)},
"sari": {"one": ["yes", "no"]},
"pip_fu5./icu": {"one": ["yes", "no"]},
"pip_fu5./ventilated": {"one": ["yes", "no"]},
"pip_fu7./outcome": {"one": ["death", "not"]},
"results./bp_systolic": {"integer": [0, 200]},
"results./bp_diastolic": {"integer": [0, 100]},
"results./bmi_weight": {"integer": [40, 120]},
"results./bmi_height": {"integer": [120, 210]},
"results./glucose_fasting": {"integer": [1, 200]},
"results./hba1c": {"integer": [0, 20]},
"vaccination": {"one": ["yes", "no"]},
"risk_code": {"multiple-spaces": ["A80_5", "D67-67-2",
"E10-14_9"]},
"sympt_code": {"multiple-spaces": ["A80_3", "B05_8", ""]},
"breastfeed": {"one": ["yes", "no"]},
"exclusive_breastfeed": {"one": ["yes", "no"]},
"formula": {"one": ["yes", "no"]}},
"demo_register": {
"intro./module": {"one": ["ncd", "cd"]},
"consult./consultations": {"integer": [10, 20]},
"consult./ncd_consultations": {"integer": [10, 20]},
"consult./consultations_refugee": {"integer": [5, 15]},
"surveillance./afp": {"integer": [1, 5]},
"surveillance./measles": {"integer": [1, 5]}
},
"demo_alert": {"pt./alert_id": {"data": "uuids",
"from_form": "demo_case"},
"alert_labs./return_lab": {"one": ["yes", "no", "unsure"]},
"pt./checklist": {"multiple": ["referral",
"case_management",
"contact_tracing",
"return_lab"]}},
"demo_labs": {
"labs./namru": {"one": range(1, 400)},
"pcr./flu_a_pcr": {"one": ["positive", "negative"]},
"pcr./h1_pcr": {"one": ["positive", "negative"]},
"pcr./h3_pcr": {"one": ["positive", "negative"]},
"pcr./h1n1_pcr": {"one": ["positive", "negative"]},
"pcr./sw_n1_pcr": {"one": ["positive", "negative"]},
"pcr./flu_b_pcr": {"one": ["positive", "negative"]}
},
"plague_data": {
"pt./visit_date": {"date": "year"},
"lat": {"range": [0, 0.4]},
"lng": {"range": [0, 0.4]},
"pt1./gender1": {"one": ["male", "female"]},
"pt2./gender2": {"one": ["male", "female"]}
}
},
"manual_test_data": {
"demo_case":[
"demo_case_link_test_data",
"demo_case_duplicate_initial_visits_test_data",
"demo_case_exclusion_list_test_data"
]
},
"exclusion_lists": {
"demo_case": [
"demo_case_exclusion_list.csv"
]
},
"alert_data": {
"demo_case": {
"age": "pt1./age",
"gender": "pt1./gender",
"submitted": "SubmissionDate",
"received": "end"
}
},
"alert_id_length": 6,
"messaging_start_date": datetime.datetime(2016, 2, 15),
"messaging_topic_prefix": "null",
"messaging_sender": "",
"messaging_silent": True,
"default_start_date": datetime.datetime(2016, 1, 1),
"reports_schedule": {
"cd_public_health": {"period": "week", "send_day": "0", "language": "en"},
# "ncd_public_health": {"period": "month", "send_day": "1", "language": "en"},
# "communicable_diseases": {"period": "week", "send_day": "0", "language": "en"},
# "non_communicable_diseases": {"period": "month", "send_day": "1", "language": "en"}
},
"device_message_schedule": {
"thank_you": {
"period": "week",
"send_day": "0",
"message": "Thank you for all your work this week in sending data and please keep up the good work!",
"distribution": ["/topics/demo"]}
},
"translation_dir": "/var/www/meerkat_frontend/country_config/translations",
}
dhis2_config = {
"url": "http://localhost:8085",
"apiResource": "/api/26/",
"credentials": ('admin', 'district'),
"headers": {
"Content-Type": "application/json",
"Authorization": "Basic YWRtaW46ZGlzdHJpY3Q="
},
"loggingLevel": "DEBUG",
# "countryId": "EebWN0q7GpT", # Null Island
"countryId": "ImspTQPwCqd", # Sierra Leone
"forms": [
{
"name": "demo_case",
"event_date": "pt./visit_date",
"completed_date": "end",
# "programId": "fgrH0jPDNEP", # optional
"status": "COMPLETED"
},
{
"name": "demo_alert",
"date": "end"
},
{
"name": "demo_register",
"date": "intro./visit_date"
}
]
}
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.