code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import json
import http.client
conn = http.client.HTTPSConnection("public.radio.co")
station = 'stations/sd71de59b3/status'
payload = "{}"
conn.request("GET", station, payload)
res = conn.getresponse()
data = res.read()
json_string = data.decode("utf-8")
now_playing = json.loads(json_string)
print("JEMP is currently playing: " + now_playing["current_track"]["title"])
song = now_playing["current_track"]["title"]
artist = song[0:7]
showdate = song[song.find("(")+1:song.find(")")]
if artist == 'Phish -':
print("Phish is playing!")
if int(showdate[-2:]) < 80:
year = str(int(showdate[-2:]) + 2000)
else:
year = str(int(showdate[-2:]) + 1900)
month = showdate[:-3]
month = "000" + month[:month.find("-")]
month = month[-2:]
day = showdate[:-3]
day = "000" + day[day.find("-")+1:]
day = day[-2:]
show = year + "-" + month + "-" + day
else:
print("Phish is not playing :(")
show = "1900-01-01"
print(show)
|
[
"json.loads"
] |
[((271, 294), 'json.loads', 'json.loads', (['json_string'], {}), '(json_string)\n', (281, 294), False, 'import json\n')]
|
from setuptools import setup
setup(
name='voronoiz',
version='0.1.0',
author='<NAME>',
description="Functions for generating Voronoi diagrams with "
"alternate metrics.",
license="MIT",
url="https://github.com/WarrenWeckesser/voronoiz",
classifiers=[
"License :: OSI Approved :: MIT License",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
keywords="voronoi",
packages=['voronoiz'],
install_requires=['scipy', 'shapely']
)
|
[
"setuptools.setup"
] |
[((31, 602), 'setuptools.setup', 'setup', ([], {'name': '"""voronoiz"""', 'version': '"""0.1.0"""', 'author': '"""<NAME>"""', 'description': '"""Functions for generating Voronoi diagrams with alternate metrics."""', 'license': '"""MIT"""', 'url': '"""https://github.com/WarrenWeckesser/voronoiz"""', 'classifiers': "['License :: OSI Approved :: MIT License',\n 'Intended Audience :: Developers', 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9']", 'keywords': '"""voronoi"""', 'packages': "['voronoiz']", 'install_requires': "['scipy', 'shapely']"}), "(name='voronoiz', version='0.1.0', author='<NAME>', description=\n 'Functions for generating Voronoi diagrams with alternate metrics.',\n license='MIT', url='https://github.com/WarrenWeckesser/voronoiz',\n classifiers=['License :: OSI Approved :: MIT License',\n 'Intended Audience :: Developers', 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9'], keywords='voronoi', packages=\n ['voronoiz'], install_requires=['scipy', 'shapely'])\n", (36, 602), False, 'from setuptools import setup\n')]
|
# ORIE 7590
import numpy as np
from bd_sim_cython import discrete_bessel_sim, discrete_laguerre_sim, cmeixner
from scipy.special import jv, laguerre, poch, eval_laguerre, j0
from scipy.integrate import quad
from math import comb, factorial, exp, sqrt, log
import hankel
def bd_simulator(t, x0, num_paths, method='bessel', num_threads=4):
"""
:param t: terminal time, double
:param x0: initial state, callable or int
:param num_paths: number of paths
:param method: method of simulating birth-death chain, currently support 'bessel' and 'laguerre'
:param num_threads: number of threads for multiprocessing
:return: ndarray of simulated result at terminal time
"""
if isinstance(x0, int):
x0_array = np.array([x0]*num_paths, dtype=np.int64)
else:
x0_array = np.array([x0() for _ in range(num_paths)], dtype=np.int64)
output = np.zeros(dtype=np.int64, shape=num_paths)
if method == 'bessel':
discrete_bessel_sim(t, x0_array, num_paths, output, int(num_threads))
else:
discrete_laguerre_sim(t, x0_array, num_paths, output, int(num_threads))
return output
def MC_BESQ_gateway(N = 10**6, t = 0, x0 = 0, test = 'bessel', method = 'bessel', args = [], num_decimal = 4):
"""
Monte Carlo estimator of expected BESQ using dBESQ simulation or dLaguerre simulation
:param N: int, Number of simulations
:param T: positive float, Simulation horizon
:param x0: initial value of X
:param method: simulation method, currently support {'bessel', 'laguerre', 'bessel-delay', 'laguerre-delay'}
:param test: defines test function
:args: arguments to define test function
"""
if method == 'bessel':
if test == 'bessel':
f = lambda n : eval_laguerre(n, 1)
s = t
elif test == 'custom':
f = arg[0]
s = t
elif method == 'laguerre':
if test == 'bessel':
f = lambda n : eval_laguerre(n, 1+t)
s = log(t + 1)
elif method == 'bessel-delay':
method = 'bessel'
if test == 'bessel':
f = lambda n : j0(2*np.sqrt(np.random.gamma(n+1)))
s = t - 1
elif test == 'custom':
f = lambda n : args[0](np.random.gamma(n + 1))
s = t - 1
elif method == 'laguerre-delay':
method = 'laguerre'
if test == 'bessel':
f = lambda n : j0(2*np.sqrt(np.random.gamma(n+1) * (t/2 + 1/2)))
s = log(t/2 + 1/2)
def poisson_x0():
return np.random.poisson(x0)
xt_array = bd_simulator(s, x0=poisson_x0, num_paths=N, method=method, num_threads=4)
return np.mean(f(xt_array)).round(num_decimal)
def MC_Laguerre_gateway(N = 10**6, t = 0, x0 = 0, test = 'laguerre', method = 'laguerre', args = [], num_decimal = 4):
"""
Monte Carlo estimator of expected Laguerre using dLaguerre simulation or dLaguerre simulation
:param N: int, Number of simulations
:param T: positive float, Simulation horizon
:param x0: initial value of X
:param method: simulation method, currently support {'laguerre', 'laguerre-delay'}
:param test: defines test function
:args: arguments to define test function
"""
if method == 'laguerre':
if test == 'laguerre':
f = lambda m : eval_meixner(args['n'], m)
s = t
elif method == 'laguerre-delay':
if test == 'laguerre':
f = lambda m : eval_laguerre(args['n'], np.random.gamma(m+1)/2)
s = t - log(2)
elif test == 'relu':
f = lambda m : np.maximum(0, np.random.gamma(m+1)/2)
s = t - log(2)
def poisson_x0():
return np.random.poisson(x0)
xt_array = bd_simulator(s, x0=poisson_x0, num_paths=N, method='laguerre', num_threads=4)
return np.mean(f(xt_array)).round(num_decimal)
def MC_Laguerre(N = 10**6, t = 0, x0 = 0, test = 'laguerre', args = [], num_decimal = 4):
"""
Monte Carlo estimator of expected Laguerre using Brownian motion simulation
:param N: int, Number of simulations
:param T: positive float, Simulation horizon
:param x0: initial value of X
:param test: defines test function
:args: arguments to define test function
"""
if test == 'laguerre':
f = lambda x : eval_laguerre(args['n'], x)
elif test == 'relu':
f = lambda x : np.maximum(0, x)
s = exp(t) - 1
xt_array = exp(-t)/2 * np.sum(np.square(np.random.multivariate_normal(np.zeros(2), s*np.eye(2), size=N)
+ np.sqrt(x0)*np.ones((N,2))), axis=1)
return np.mean(f(xt_array)).round(num_decimal)
def MC_dBESQ_gateway(N = 10**6, t = 0, n0 = 0, test = 'laguerre', method = 'laguerre', args = [], num_decimal = 4):
"""
Monte Carlo estimator of expected dBESQ using birth-death simulation, exact BESQ solution, dLaguerre simulation
or PDE systems.
:param N: int, Number of simulations
:param T: positive float, Simulation horizon
:param x0: initial value of X
:param method: simulation method, currently support {'birth-death', 'exact-besq', 'laguerre', 'pde'}
:param test: defines test function
:args: arguments to define test function
"""
if method == 'birth-death':
if test == 'laguerre':
f = lambda n : eval_laguerre(n, 1)
xt_array = bd_simulator(t, x0=n0, num_paths=N, method='bessel', num_threads=4)
return np.mean(f(xt_array)).round(num_decimal)
elif method == 'exact-besq':
if test == 'laguerre':
return np.mean(exp(-t+1)*jv(0, 2*np.sqrt(np.random.gamma(n0+1)))).round(num_decimal)
elif method == 'laguerre':
if test == 'laguerre':
f = lambda n : eval_laguerre(n, 1)
s = log(t / 2)
def poisson_x0():
return np.random.poisson(np.random.gamma(n0+1))
xt_array = bd_simulator(s, x0=poisson_x0, num_paths=N, method='laguerre', num_threads=4)
return np.mean(f(np.random.poisson(t/2 *np.random.gamma(xt_array+1)))).round(num_decimal)
def MC_BESQ_hankel(N = 10**6, t = 0, x0 = 0, test = 'custom', function = lambda x : 0, args = [], num_decimal = 4):
"""
Monte Carlo estimator of expected BESQ using Hankel transform and Exponential r.v.
:param N: int, Number of simulations
:param T: positive float, Simulation horizon
:param x0: initial value of X
:param test: defines test function
:param function: custom test function
:args: arguments to define test function
"""
j0 = lambda x : jv(0, 2*np.sqrt(x))
if test == 'bessel':
f = j0
elif test == 'poly':
if len(args) < 1:
print('No coefficients provided')
coef = []
else:
coef = args[0]
f = lambda x : np.polyval(coef, x)
else:
f = function
estimates = np.zeros(N)
for n in range(N):
Z = np.random.exponential(1/t)
estimates[n] = j0(x0*Z)*hankel_reparam(Z, f)/t
return np.mean(estimates).round(num_decimal)
def discrete_poly(n, coef):
return sum([coef[i]*poch(n - i + 1, i) for i in range(len(coef)) if n >= i])
def exact_BESQ(t = 0, x0 = 0, num_decimal = 4):
return (exp(-t)*jv(0, 2*np.sqrt(x0))).round(num_decimal)
def exact_Laguerre(t = 0, x0 = 0, n = 0, num_decimal = 4):
return (exp(-t*n)*eval_laguerre(n, x0)).round(num_decimal)
def eval_meixner(n, m):
output = np.zeros(dtype=np.int64, shape=len(m))
cmeixner(n, m, len(m), output)
return output
def hankel_reparam(z, f):
"""
Monte Carlo estimator of expected BESQ using Hankel transform and Exponential r.v.
Based on <NAME> and <NAME>, “hankel: A Python library for performing simple and accurate Hankel transformations”, Journal of Open Source Software, 4(37), 1397, https://doi.org/10.21105/joss.01397
:param z: positive float
:param f: function in L^2(R_+)
"""
ht = hankel.HankelTransform(
nu= 0, # The order of the bessel function
N = 120, # Number of steps in the integration
h = 0.03 # Proxy for "size" of steps in integration
)
return 2*ht.transform(lambda x: f(x**2), 2*np.sqrt(z), ret_err = False)
# exp = np.random.exponential
# def bd_one_path(t, x0):
# """
# simulate a birth-death proecss X at time t.
#
# :param t: float, terminal time
# :param x0: initial value of X
# :return: one realization of X_t
# """
#
# s = 0
# state = x0
#
# while True:
# birth_rate = state + 1
# death_rate = state
# arrival_rate = birth_rate + death_rate
# time_to_arrival = exp(1/arrival_rate)
# s += time_to_arrival
# # stop and return when exceeds target time
# if s > t:
# return state
# # update
# if np.random.rand() < death_rate / arrival_rate:
# state -= 1
# else:
# state += 1
#
#
# def bd_simulator(t, x0):
# """
# :param t: terminal time
# :param x0: list of initial values from certain distribution
# :return: list of simulated X_t
# """
#
# num_iter = len(x0)
# result = np.zeros(num_iter, dtype = np.int64)
#
# for i in range(num_iter):
# result[i] = bd_one_path(t, x0[i])
#
# return result
|
[
"math.exp",
"hankel.HankelTransform",
"numpy.maximum",
"scipy.special.eval_laguerre",
"numpy.polyval",
"numpy.random.exponential",
"numpy.zeros",
"numpy.ones",
"numpy.random.gamma",
"scipy.special.poch",
"numpy.mean",
"numpy.array",
"numpy.random.poisson",
"scipy.special.j0",
"numpy.eye",
"math.log",
"numpy.sqrt"
] |
[((887, 928), 'numpy.zeros', 'np.zeros', ([], {'dtype': 'np.int64', 'shape': 'num_paths'}), '(dtype=np.int64, shape=num_paths)\n', (895, 928), True, 'import numpy as np\n'), ((6930, 6941), 'numpy.zeros', 'np.zeros', (['N'], {}), '(N)\n', (6938, 6941), True, 'import numpy as np\n'), ((7984, 8027), 'hankel.HankelTransform', 'hankel.HankelTransform', ([], {'nu': '(0)', 'N': '(120)', 'h': '(0.03)'}), '(nu=0, N=120, h=0.03)\n', (8006, 8027), False, 'import hankel\n'), ((744, 786), 'numpy.array', 'np.array', (['([x0] * num_paths)'], {'dtype': 'np.int64'}), '([x0] * num_paths, dtype=np.int64)\n', (752, 786), True, 'import numpy as np\n'), ((2542, 2563), 'numpy.random.poisson', 'np.random.poisson', (['x0'], {}), '(x0)\n', (2559, 2563), True, 'import numpy as np\n'), ((3699, 3720), 'numpy.random.poisson', 'np.random.poisson', (['x0'], {}), '(x0)\n', (3716, 3720), True, 'import numpy as np\n'), ((4416, 4422), 'math.exp', 'exp', (['t'], {}), '(t)\n', (4419, 4422), False, 'from math import comb, factorial, exp, sqrt, log\n'), ((6977, 7005), 'numpy.random.exponential', 'np.random.exponential', (['(1 / t)'], {}), '(1 / t)\n', (6998, 7005), True, 'import numpy as np\n'), ((4310, 4337), 'scipy.special.eval_laguerre', 'eval_laguerre', (["args['n']", 'x'], {}), "(args['n'], x)\n", (4323, 4337), False, 'from scipy.special import jv, laguerre, poch, eval_laguerre, j0\n'), ((4442, 4449), 'math.exp', 'exp', (['(-t)'], {}), '(-t)\n', (4445, 4449), False, 'from math import comb, factorial, exp, sqrt, log\n'), ((7070, 7088), 'numpy.mean', 'np.mean', (['estimates'], {}), '(estimates)\n', (7077, 7088), True, 'import numpy as np\n'), ((1765, 1784), 'scipy.special.eval_laguerre', 'eval_laguerre', (['n', '(1)'], {}), '(n, 1)\n', (1778, 1784), False, 'from scipy.special import jv, laguerre, poch, eval_laguerre, j0\n'), ((2000, 2010), 'math.log', 'log', (['(t + 1)'], {}), '(t + 1)\n', (2003, 2010), False, 'from math import comb, factorial, exp, sqrt, log\n'), ((4386, 4402), 'numpy.maximum', 'np.maximum', (['(0)', 'x'], {}), '(0, x)\n', (4396, 4402), True, 'import numpy as np\n'), ((5341, 5360), 'scipy.special.eval_laguerre', 'eval_laguerre', (['n', '(1)'], {}), '(n, 1)\n', (5354, 5360), False, 'from scipy.special import jv, laguerre, poch, eval_laguerre, j0\n'), ((6626, 6636), 'numpy.sqrt', 'np.sqrt', (['x'], {}), '(x)\n', (6633, 6636), True, 'import numpy as np\n'), ((6862, 6881), 'numpy.polyval', 'np.polyval', (['coef', 'x'], {}), '(coef, x)\n', (6872, 6881), True, 'import numpy as np\n'), ((7027, 7037), 'scipy.special.j0', 'j0', (['(x0 * Z)'], {}), '(x0 * Z)\n', (7029, 7037), False, 'from scipy.special import jv, laguerre, poch, eval_laguerre, j0\n'), ((7161, 7179), 'scipy.special.poch', 'poch', (['(n - i + 1)', 'i'], {}), '(n - i + 1, i)\n', (7165, 7179), False, 'from scipy.special import jv, laguerre, poch, eval_laguerre, j0\n'), ((7279, 7286), 'math.exp', 'exp', (['(-t)'], {}), '(-t)\n', (7282, 7286), False, 'from math import comb, factorial, exp, sqrt, log\n'), ((7400, 7411), 'math.exp', 'exp', (['(-t * n)'], {}), '(-t * n)\n', (7403, 7411), False, 'from math import comb, factorial, exp, sqrt, log\n'), ((7410, 7430), 'scipy.special.eval_laguerre', 'eval_laguerre', (['n', 'x0'], {}), '(n, x0)\n', (7423, 7430), False, 'from scipy.special import jv, laguerre, poch, eval_laguerre, j0\n'), ((8233, 8243), 'numpy.sqrt', 'np.sqrt', (['z'], {}), '(z)\n', (8240, 8243), True, 'import numpy as np\n'), ((1962, 1985), 'scipy.special.eval_laguerre', 'eval_laguerre', (['n', '(1 + t)'], {}), '(n, 1 + t)\n', (1975, 1985), False, 'from scipy.special import jv, laguerre, poch, eval_laguerre, j0\n'), ((3529, 3535), 'math.log', 'log', (['(2)'], {}), '(2)\n', (3532, 3535), False, 'from math import comb, factorial, exp, sqrt, log\n'), ((5815, 5825), 'math.log', 'log', (['(t / 2)'], {}), '(t / 2)\n', (5818, 5825), False, 'from math import comb, factorial, exp, sqrt, log\n'), ((2485, 2503), 'math.log', 'log', (['(t / 2 + 1 / 2)'], {}), '(t / 2 + 1 / 2)\n', (2488, 2503), False, 'from math import comb, factorial, exp, sqrt, log\n'), ((3650, 3656), 'math.log', 'log', (['(2)'], {}), '(2)\n', (3653, 3656), False, 'from math import comb, factorial, exp, sqrt, log\n'), ((4501, 4512), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (4509, 4512), True, 'import numpy as np\n'), ((4580, 4591), 'numpy.sqrt', 'np.sqrt', (['x0'], {}), '(x0)\n', (4587, 4591), True, 'import numpy as np\n'), ((4592, 4607), 'numpy.ones', 'np.ones', (['(N, 2)'], {}), '((N, 2))\n', (4599, 4607), True, 'import numpy as np\n'), ((5779, 5798), 'scipy.special.eval_laguerre', 'eval_laguerre', (['n', '(1)'], {}), '(n, 1)\n', (5792, 5798), False, 'from scipy.special import jv, laguerre, poch, eval_laguerre, j0\n'), ((7295, 7306), 'numpy.sqrt', 'np.sqrt', (['x0'], {}), '(x0)\n', (7302, 7306), True, 'import numpy as np\n'), ((3485, 3507), 'numpy.random.gamma', 'np.random.gamma', (['(m + 1)'], {}), '(m + 1)\n', (3500, 3507), True, 'import numpy as np\n'), ((4516, 4525), 'numpy.eye', 'np.eye', (['(2)'], {}), '(2)\n', (4522, 4525), True, 'import numpy as np\n'), ((5902, 5925), 'numpy.random.gamma', 'np.random.gamma', (['(n0 + 1)'], {}), '(n0 + 1)\n', (5917, 5925), True, 'import numpy as np\n'), ((2252, 2274), 'numpy.random.gamma', 'np.random.gamma', (['(n + 1)'], {}), '(n + 1)\n', (2267, 2274), True, 'import numpy as np\n'), ((3606, 3628), 'numpy.random.gamma', 'np.random.gamma', (['(m + 1)'], {}), '(m + 1)\n', (3621, 3628), True, 'import numpy as np\n'), ((5611, 5622), 'math.exp', 'exp', (['(-t + 1)'], {}), '(-t + 1)\n', (5614, 5622), False, 'from math import comb, factorial, exp, sqrt, log\n'), ((2141, 2163), 'numpy.random.gamma', 'np.random.gamma', (['(n + 1)'], {}), '(n + 1)\n', (2156, 2163), True, 'import numpy as np\n'), ((2432, 2454), 'numpy.random.gamma', 'np.random.gamma', (['(n + 1)'], {}), '(n + 1)\n', (2447, 2454), True, 'import numpy as np\n'), ((5637, 5660), 'numpy.random.gamma', 'np.random.gamma', (['(n0 + 1)'], {}), '(n0 + 1)\n', (5652, 5660), True, 'import numpy as np\n'), ((6078, 6107), 'numpy.random.gamma', 'np.random.gamma', (['(xt_array + 1)'], {}), '(xt_array + 1)\n', (6093, 6107), True, 'import numpy as np\n')]
|
from django.shortcuts import render, get_object_or_404
from .models import Contato
from django.http import Http404
def index(request):
contatos = Contato.objects.all()
return render(request, 'contatos/index.html', {
'contatos': contatos
})
def ver_contato(request, contato_id):
#contato = Contato.objects.get(id=contato_id)
contato = get_object_or_404(Contato, id=contato_id)
return render(request, 'contatos/ver_contato.html', {
'contatos': contato
})
|
[
"django.shortcuts.render",
"django.shortcuts.get_object_or_404"
] |
[((185, 247), 'django.shortcuts.render', 'render', (['request', '"""contatos/index.html"""', "{'contatos': contatos}"], {}), "(request, 'contatos/index.html', {'contatos': contatos})\n", (191, 247), False, 'from django.shortcuts import render, get_object_or_404\n'), ((367, 408), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Contato'], {'id': 'contato_id'}), '(Contato, id=contato_id)\n', (384, 408), False, 'from django.shortcuts import render, get_object_or_404\n'), ((420, 487), 'django.shortcuts.render', 'render', (['request', '"""contatos/ver_contato.html"""', "{'contatos': contato}"], {}), "(request, 'contatos/ver_contato.html', {'contatos': contato})\n", (426, 487), False, 'from django.shortcuts import render, get_object_or_404\n')]
|
#!/usr/bin/python3
import os
import sys
import getopt
import json
import re
import xlrd
import openpyxl
from openpyxl.utils import get_column_letter
from pprint import pprint
def usage():
print("Usage : {0}".format(sys.argv[0]))
def main():
ret = 0
try:
opts, args = getopt.getopt(
sys.argv[1:], "hvo:", ["help", "version", "output="])
except getopt.GetoptError as err:
print(str(err))
sys.exit(2)
output = None
for o, a in opts:
if o == "-v":
usage()
sys.exit(0)
elif o in ("-h", "--help"):
usage()
sys.exit(0)
elif o in ("-o", "--output"):
output = a
else:
assert False, "unknown option"
if output == None :
print("no output option")
ret += 1
if ret != 0:
sys.exit(1)
fp = open(output, mode='w', encoding='utf-8')
for filepath in args:
print("arg : {0}".format(filepath))
fp.write("# file : {0}\n".format(filepath))
filename, ext = os.path.splitext(filepath)
if ext == ".xls" :
book = xlrd.open_workbook(filepath, formatting_info=True)
pprint(book)
for sheet in book.sheets() :
pprint(sheet)
fp.write("# sheet : {0}\n".format(sheet.name))
pprint(sheet.colinfo_map)
pprint(sheet.rowinfo_map)
for row in range(sheet.nrows) :
if row in sheet.rowinfo_map:
if sheet.rowinfo_map[row].hidden == 1:
continue
fp.write(" ")
for col in range(sheet.ncols) :
if col in sheet.colinfo_map:
if sheet.colinfo_map[col].hidden == 1:
continue
cell = sheet.cell(row, col)
val = cell.value
if col != 0 :
fp.write("\t")
fp.write("{0}".format(val))
fp.write("\n")
elif ext == ".xlsx" :
wb = openpyxl.load_workbook(filename = filepath)
for sheet_name in wb.sheetnames:
sheet = wb[sheet_name]
fp.write("# sheet : {0}\n".format(sheet_name))
hidden_cols = {}
for collet, coldim in sheet.column_dimensions.items():
if coldim.hidden == True :
hidden_cols[collet] = 1
hidden_rows = {}
for rowlet, rowdim in sheet.row_dimensions.items():
if rowdim.hidden == True :
hidden_rows[str(rowlet)] = 1;
rows = len(tuple(sheet.rows))
cols = len(tuple(sheet.columns))
for row in range(rows) :
if str(row) in hidden_rows :
continue
fp.write(" ")
for col in range(cols):
col_let = get_column_letter(col + 1)
if col_let in hidden_cols:
continue
cell = sheet.cell(row=row + 1, column=col + 1)
val = cell.value
if col != 0 :
fp.write("\t")
if val == None:
val = ''
fp.write("{0}".format(val))
fp.write("\n")
pprint(sheet)
pass
fp.close()
if __name__ == "__main__":
main()
|
[
"getopt.getopt",
"xlrd.open_workbook",
"openpyxl.load_workbook",
"openpyxl.utils.get_column_letter",
"os.path.splitext",
"pprint.pprint",
"sys.exit"
] |
[((280, 347), 'getopt.getopt', 'getopt.getopt', (['sys.argv[1:]', '"""hvo:"""', "['help', 'version', 'output=']"], {}), "(sys.argv[1:], 'hvo:', ['help', 'version', 'output='])\n", (293, 347), False, 'import getopt\n'), ((723, 734), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (731, 734), False, 'import sys\n'), ((914, 940), 'os.path.splitext', 'os.path.splitext', (['filepath'], {}), '(filepath)\n', (930, 940), False, 'import os\n'), ((407, 418), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (415, 418), False, 'import sys\n'), ((487, 498), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (495, 498), False, 'import sys\n'), ((975, 1025), 'xlrd.open_workbook', 'xlrd.open_workbook', (['filepath'], {'formatting_info': '(True)'}), '(filepath, formatting_info=True)\n', (993, 1025), False, 'import xlrd\n'), ((1030, 1042), 'pprint.pprint', 'pprint', (['book'], {}), '(book)\n', (1036, 1042), False, 'from pprint import pprint\n'), ((543, 554), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (551, 554), False, 'import sys\n'), ((1079, 1092), 'pprint.pprint', 'pprint', (['sheet'], {}), '(sheet)\n', (1085, 1092), False, 'from pprint import pprint\n'), ((1155, 1180), 'pprint.pprint', 'pprint', (['sheet.colinfo_map'], {}), '(sheet.colinfo_map)\n', (1161, 1180), False, 'from pprint import pprint\n'), ((1185, 1210), 'pprint.pprint', 'pprint', (['sheet.rowinfo_map'], {}), '(sheet.rowinfo_map)\n', (1191, 1210), False, 'from pprint import pprint\n'), ((1701, 1742), 'openpyxl.load_workbook', 'openpyxl.load_workbook', ([], {'filename': 'filepath'}), '(filename=filepath)\n', (1723, 1742), False, 'import openpyxl\n'), ((2682, 2695), 'pprint.pprint', 'pprint', (['sheet'], {}), '(sheet)\n', (2688, 2695), False, 'from pprint import pprint\n'), ((2378, 2404), 'openpyxl.utils.get_column_letter', 'get_column_letter', (['(col + 1)'], {}), '(col + 1)\n', (2395, 2404), False, 'from openpyxl.utils import get_column_letter\n')]
|
import os.path as osp
import sys
import numpy as np
import torch
from matplotlib import pyplot as plt
from scipy.stats import norm
sys.path.append(osp.dirname(sys.path[0]))
from neko import neko_utils
class utils(neko_utils.neko_utils):
def __init__(self):
super(utils, self).__init__()
def plot_latent_image(self, model, latent_dim, patch_count, patch_side_size):
# 2σ原则
xs = norm.ppf(np.linspace(0.05, 0.95, patch_count))
ys = norm.ppf(np.linspace(0.05, 0.95, patch_count))
image_size = [patch_count * patch_side_size, patch_count * patch_side_size]
image = np.zeros(image_size)
for x_index, x in enumerate(xs):
for y_index, y in enumerate(ys):
z = np.tile(np.array([[x, y]]), latent_dim).reshape(-1, latent_dim)
z = torch.Tensor(z).cuda()
decoder_image = model.decoder(z)
decoder_image = decoder_image.reshape(-1, patch_side_size, patch_side_size)
image[x_index * patch_side_size:(x_index + 1) * patch_side_size,
y_index * patch_side_size:(y_index + 1) * patch_side_size] = decoder_image[0].cpu().detach().numpy()
plt.figure(figsize=(10, 10))
plt.imshow(image, cmap="gray")
plt.savefig("latent-{}_space_image_{}.png".format(latent_dim, self.get_now_time()))
self.divide_line("save latent space images !")
plt.show()
|
[
"matplotlib.pyplot.show",
"matplotlib.pyplot.imshow",
"os.path.dirname",
"numpy.zeros",
"matplotlib.pyplot.figure",
"torch.Tensor",
"numpy.array",
"numpy.linspace"
] |
[((149, 173), 'os.path.dirname', 'osp.dirname', (['sys.path[0]'], {}), '(sys.path[0])\n', (160, 173), True, 'import os.path as osp\n'), ((621, 641), 'numpy.zeros', 'np.zeros', (['image_size'], {}), '(image_size)\n', (629, 641), True, 'import numpy as np\n'), ((1203, 1231), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 10)'}), '(figsize=(10, 10))\n', (1213, 1231), True, 'from matplotlib import pyplot as plt\n'), ((1240, 1270), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image'], {'cmap': '"""gray"""'}), "(image, cmap='gray')\n", (1250, 1270), True, 'from matplotlib import pyplot as plt\n'), ((1426, 1436), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1434, 1436), True, 'from matplotlib import pyplot as plt\n'), ((423, 459), 'numpy.linspace', 'np.linspace', (['(0.05)', '(0.95)', 'patch_count'], {}), '(0.05, 0.95, patch_count)\n', (434, 459), True, 'import numpy as np\n'), ((483, 519), 'numpy.linspace', 'np.linspace', (['(0.05)', '(0.95)', 'patch_count'], {}), '(0.05, 0.95, patch_count)\n', (494, 519), True, 'import numpy as np\n'), ((833, 848), 'torch.Tensor', 'torch.Tensor', (['z'], {}), '(z)\n', (845, 848), False, 'import torch\n'), ((757, 775), 'numpy.array', 'np.array', (['[[x, y]]'], {}), '([[x, y]])\n', (765, 775), True, 'import numpy as np\n')]
|
import tensorflow as tf
import numpy as np
def img2mse(x, y):
return tf.reduce_mean(tf.square(x - y))
def mse2psnr(x):
return -10.*tf.math.log(x)/tf.math.log(10.)
def variance_weighted_loss(tof, gt, c=1.):
tof = outputs['tof_map']
tof_std = tof[..., -1:]
tof = tof[..., :2]
gt = gt[..., :2]
mse = tf.reduce_mean(tf.square(tof - gt) / (2 * tf.square(tof_std)))
return (mse + c * tf.reduce_mean(tf.math.log(tof_std)))
def tof_loss_variance(target_tof, outputs, tof_weight):
img_loss = variance_weighted_loss(outputs['tof_map'], target_tof) * tof_weight
img_loss0 = 0.0
if 'tof_map0' in outputs:
img_loss0 = variance_weighted_loss(outputs['tof_map0'], target_tof) * tof_weight
return img_loss, img_loss0
def tof_loss_default(target_tof, outputs, tof_weight):
img_loss = img2mse(outputs['tof_map'][..., :2], target_tof[..., :2]) * tof_weight
img_loss0 = 0.0
if 'tof_map0' in outputs:
img_loss0 = img2mse(outputs['tof_map0'][..., :2], target_tof[..., :2]) * tof_weight
return img_loss, img_loss0
def color_loss_default(target_color, outputs, color_weight):
img_loss = img2mse(outputs['color_map'], target_color) * color_weight
img_loss0 = 0.0
if 'color_map0' in outputs:
img_loss0 = img2mse(outputs['color_map0'], target_color) * color_weight
return img_loss, img_loss0
def disparity_loss_default(target_depth, outputs, disp_weight, near, far):
target_disp = 1. / np.clip(target_depth, near, far)
target
img_loss = img2mse(outputs['disp_map'], target_disp) * disp_weight
img_loss0 = 0.0
if 'disp_map0' in outputs:
img_loss0 = img2mse(outputs['disp_map0'], target_disp) * disp_weight
return img_loss, img_loss0
def depth_loss_default(target_depth, outputs, depth_weight):
img_loss = img2mse(outputs['depth_map'], target_depth) * depth_weight
img_loss0 = 0.0
if 'depth_map0' in outputs:
img_loss0 = img2mse(outputs['depth_map0'], target_depth) * depth_weight
return img_loss, img_loss0
def empty_space_loss(outputs):
loss = tf.reduce_mean(tf.abs(outputs['acc_map']))
if 'acc_map0' in outputs:
loss += tf.reduce_mean(tf.abs(outputs['acc_map0']))
return loss
def make_pose_loss(model, key):
def loss_fn(_):
return tf.reduce_mean(tf.square(
tf.abs(model.poses[key][1:] - model.poses[key][:-1])
))
return loss_fn
|
[
"tensorflow.math.log",
"tensorflow.abs",
"numpy.clip",
"tensorflow.square"
] |
[((89, 105), 'tensorflow.square', 'tf.square', (['(x - y)'], {}), '(x - y)\n', (98, 105), True, 'import tensorflow as tf\n'), ((156, 173), 'tensorflow.math.log', 'tf.math.log', (['(10.0)'], {}), '(10.0)\n', (167, 173), True, 'import tensorflow as tf\n'), ((1490, 1522), 'numpy.clip', 'np.clip', (['target_depth', 'near', 'far'], {}), '(target_depth, near, far)\n', (1497, 1522), True, 'import numpy as np\n'), ((2134, 2160), 'tensorflow.abs', 'tf.abs', (["outputs['acc_map']"], {}), "(outputs['acc_map'])\n", (2140, 2160), True, 'import tensorflow as tf\n'), ((141, 155), 'tensorflow.math.log', 'tf.math.log', (['x'], {}), '(x)\n', (152, 155), True, 'import tensorflow as tf\n'), ((345, 364), 'tensorflow.square', 'tf.square', (['(tof - gt)'], {}), '(tof - gt)\n', (354, 364), True, 'import tensorflow as tf\n'), ((2224, 2251), 'tensorflow.abs', 'tf.abs', (["outputs['acc_map0']"], {}), "(outputs['acc_map0'])\n", (2230, 2251), True, 'import tensorflow as tf\n'), ((372, 390), 'tensorflow.square', 'tf.square', (['tof_std'], {}), '(tof_std)\n', (381, 390), True, 'import tensorflow as tf\n'), ((430, 450), 'tensorflow.math.log', 'tf.math.log', (['tof_std'], {}), '(tof_std)\n', (441, 450), True, 'import tensorflow as tf\n'), ((2376, 2428), 'tensorflow.abs', 'tf.abs', (['(model.poses[key][1:] - model.poses[key][:-1])'], {}), '(model.poses[key][1:] - model.poses[key][:-1])\n', (2382, 2428), True, 'import tensorflow as tf\n')]
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import signal
import socket
import subprocess
import textwrap
import time
from contextlib import ExitStack, contextmanager
from typing import List, NamedTuple, Optional
from antlir.common import (
FD_UNIX_SOCK_TIMEOUT,
check_popen_returncode,
get_logger,
listen_temporary_unix_socket,
recv_fds_from_unix_sock,
)
from antlir.fs_utils import Path
log = get_logger()
_mockable_popen_for_repo_server = subprocess.Popen
def _make_debug_print(logger_name, fstring):
t = time.time()
ymdhms = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(t))
msecs = int((t - int(t)) * 1000)
return (
"print("
# Emulate the format of `init_logging(debug=True)`
+ repr(f"DEBUG _make_sockets_and_send_via {ymdhms},{msecs:03} ")
+ " + f'Sending {num_socks} FDs to parent', file=sys.stderr)"
)
def _make_sockets_and_send_via(*, num_socks: int, unix_sock_fd: int):
"""
Creates a TCP stream socket and sends it elsewhere via the provided Unix
domain socket file descriptor. This is useful for obtaining a socket
that belongs to a different network namespace (i.e. creating a socket
inside a container, but binding it from outside the container).
IMPORTANT: This code must not write anything to stdout, the fd can be 1.
"""
# NB: Some code here is (sort of) copy-pasta'd in `send_fds_and_run.py`,
# but it's not obviously worthwhile to reuse it here.
return [
"python3",
"-c",
textwrap.dedent(
"""
import array, contextlib, socket, sys
def send_fds(sock, msg: bytes, fds: 'List[int]'):
num_sent = sock.sendmsg([msg], [(
socket.SOL_SOCKET, socket.SCM_RIGHTS,
array.array('i', fds).tobytes(),
# Future: is `flags=socket.MSG_NOSIGNAL` a good idea?
)])
assert len(msg) == num_sent, (msg, num_sent)
num_socks = """
+ str(num_socks)
+ """
""" # indentation for the debug print
+ (
_make_debug_print(
"_make_sockets_and_send_via",
"f'Sending {num_socks} FDs to parent'",
)
if log.isEnabledFor(logging.DEBUG)
else ""
)
+ """
with contextlib.ExitStack() as stack:
# Make a socket in this netns, and send it to the parent.
lsock = stack.enter_context(
socket.socket(fileno="""
+ str(unix_sock_fd)
+ """)
)
lsock.settimeout("""
+ str(FD_UNIX_SOCK_TIMEOUT)
+ """)
csock = stack.enter_context(lsock.accept()[0])
csock.settimeout("""
+ str(FD_UNIX_SOCK_TIMEOUT)
+ """)
send_fds(csock, b'ohai', [
stack.enter_context(socket.socket(
socket.AF_INET, socket.SOCK_STREAM
)).fileno()
for _ in range(num_socks)
])
"""
),
]
def _create_sockets_inside_netns(
target_pid: int, num_socks: int
) -> List[socket.socket]:
"""
Creates TCP stream socket inside the container.
Returns the socket.socket() object.
"""
with listen_temporary_unix_socket() as (
unix_sock_path,
list_sock,
), subprocess.Popen(
[
# NB: /usr/local/fbcode/bin must come first because /bin/python3
# may be very outdated
"sudo",
"env",
"PATH=/usr/local/fbcode/bin:/bin",
"nsenter",
"--net",
"--target",
str(target_pid),
# NB: We pass our listening socket as FD 1 to avoid dealing with
# the `sudo` option of `-C`. Nothing here writes to `stdout`:
*_make_sockets_and_send_via(unix_sock_fd=1, num_socks=num_socks),
],
stdout=list_sock.fileno(),
) as sock_proc:
repo_server_socks = [
socket.socket(fileno=fd)
for fd in recv_fds_from_unix_sock(unix_sock_path, num_socks)
]
assert len(repo_server_socks) == num_socks, len(repo_server_socks)
check_popen_returncode(sock_proc)
return repo_server_socks
class RepoServer(NamedTuple):
rpm_repo_snapshot: Path
port: int
# The socket & server are invalid after the `_launch_repo_server` context
sock: socket.socket
proc: Optional[subprocess.Popen] = None
def __format__(self, _spec):
return f"RepoServer({self.rpm_repo_snapshot}, port={self.port})"
@contextmanager
def _launch_repo_server(repo_server_bin: Path, rs: RepoServer) -> RepoServer:
"""
Invokes `repo-server` with the given snapshot; passes it ownership of
the bound TCP socket -- it listens & accepts connections.
Returns a copy of the `RepoServer` with `server` populated.
"""
assert rs.proc is None
rs.sock.bind(("127.0.0.1", rs.port))
# Socket activation: allow requests to queue up, which means that
# we don't have to explicitly wait for the repo servers to start --
# any in-container clients will do so if/when needed. This reduces
# interactive `=container` boot time by hundreds of ms.
rs.sock.listen() # leave the request queue size at default
with rs.sock, _mockable_popen_for_repo_server(
[
repo_server_bin,
f"--socket-fd={rs.sock.fileno()}",
# TODO: Once the committed BAs all have a `repo-server` that
# knows to append `/snapshot` to the path, remove it here, and
# tidy up the snapshot resolution code in `repo_server.py`.
f"--snapshot-dir={rs.rpm_repo_snapshot / 'snapshot'}",
*(["--debug"] if log.isEnabledFor(logging.DEBUG) else []),
],
pass_fds=[rs.sock.fileno()],
) as server_proc:
try:
# pyre-fixme[7]: Expected `RepoServer` but got
# `Generator[RepoServer, None, None]`.
yield rs._replace(proc=server_proc)
finally:
# Uh-oh, the server already exited. Did it crash?
if server_proc.poll() is not None: # pragma: no cover
check_popen_returncode(server_proc)
else:
# Although `repo-server` is a read-only proxy, give it the
# chance to do graceful cleanup.
log.debug("Trying to gracefully terminate `repo-server`")
# `atexit` (used in an FB-specific `repo-server` plugin) only
# works on graceful termination. In `repo_server_main.py`, we
# graceful set up handling of `SIGTERM`. We signal once, and
# need to wait for it to clean up the resources it must to free.
# Signaling twice would interrupt cleanup (because this is
# Python, lol).
server_proc.send_signal(signal.SIGTERM)
try:
server_proc.wait(60.0)
except subprocess.TimeoutExpired: # pragma: no cover
log.warning(
f"Killing unresponsive `repo-server` {server_proc.pid}"
)
server_proc.kill()
@contextmanager
def launch_repo_servers_for_netns(
*, target_pid: int, snapshot_dir: Path, repo_server_bin: Path
) -> List[RepoServer]:
"""
Creates sockets inside the supplied netns, and binds them to the
supplied ports on localhost.
Yields a list of (host, port) pairs where the servers will listen.
"""
with open(snapshot_dir / "ports-for-repo-server") as infile:
repo_server_ports = {int(v) for v in infile.read().split() if v}
with ExitStack() as stack:
# Start a repo-server instance per port. Give each one a socket
# bound to the loopback inside the supplied netns. We don't
# `__enter__` the sockets since the servers take ownership of them.
servers = []
for sock, port in zip(
_create_sockets_inside_netns(target_pid, len(repo_server_ports)),
repo_server_ports,
):
rs = stack.enter_context(
# pyre-fixme[6]: Expected `ContextManager[Variable[
# contextlib._T]]` for 1st param but got `RepoServer`.
_launch_repo_server(
repo_server_bin,
RepoServer(
rpm_repo_snapshot=snapshot_dir,
port=port,
sock=sock,
),
)
)
log.debug(f"Launched {rs} in {target_pid}'s netns")
servers.append(rs)
# pyre-fixme[7]: Expected `List[RepoServer]` but got
# `Generator[List[typing.Any], None, None]`.
yield servers
|
[
"antlir.common.listen_temporary_unix_socket",
"antlir.common.get_logger",
"socket.socket",
"contextlib.ExitStack",
"time.time",
"antlir.common.recv_fds_from_unix_sock",
"time.localtime",
"antlir.common.check_popen_returncode"
] |
[((590, 602), 'antlir.common.get_logger', 'get_logger', ([], {}), '()\n', (600, 602), False, 'from antlir.common import FD_UNIX_SOCK_TIMEOUT, check_popen_returncode, get_logger, listen_temporary_unix_socket, recv_fds_from_unix_sock\n'), ((709, 720), 'time.time', 'time.time', ([], {}), '()\n', (718, 720), False, 'import time\n'), ((4369, 4402), 'antlir.common.check_popen_returncode', 'check_popen_returncode', (['sock_proc'], {}), '(sock_proc)\n', (4391, 4402), False, 'from antlir.common import FD_UNIX_SOCK_TIMEOUT, check_popen_returncode, get_logger, listen_temporary_unix_socket, recv_fds_from_unix_sock\n'), ((769, 786), 'time.localtime', 'time.localtime', (['t'], {}), '(t)\n', (783, 786), False, 'import time\n'), ((3435, 3465), 'antlir.common.listen_temporary_unix_socket', 'listen_temporary_unix_socket', ([], {}), '()\n', (3463, 3465), False, 'from antlir.common import FD_UNIX_SOCK_TIMEOUT, check_popen_returncode, get_logger, listen_temporary_unix_socket, recv_fds_from_unix_sock\n'), ((7893, 7904), 'contextlib.ExitStack', 'ExitStack', ([], {}), '()\n', (7902, 7904), False, 'from contextlib import ExitStack, contextmanager\n'), ((4182, 4206), 'socket.socket', 'socket.socket', ([], {'fileno': 'fd'}), '(fileno=fd)\n', (4195, 4206), False, 'import socket\n'), ((4229, 4279), 'antlir.common.recv_fds_from_unix_sock', 'recv_fds_from_unix_sock', (['unix_sock_path', 'num_socks'], {}), '(unix_sock_path, num_socks)\n', (4252, 4279), False, 'from antlir.common import FD_UNIX_SOCK_TIMEOUT, check_popen_returncode, get_logger, listen_temporary_unix_socket, recv_fds_from_unix_sock\n'), ((6375, 6410), 'antlir.common.check_popen_returncode', 'check_popen_returncode', (['server_proc'], {}), '(server_proc)\n', (6397, 6410), False, 'from antlir.common import FD_UNIX_SOCK_TIMEOUT, check_popen_returncode, get_logger, listen_temporary_unix_socket, recv_fds_from_unix_sock\n')]
|
"""
This is the main entry point for pyproj
e.g. python -m pyproj
"""
import argparse
from pyproj import __proj_version__, __version__, _show_versions
parser = argparse.ArgumentParser()
parser.add_argument(
"-v",
"--verbose",
help="Show verbose debugging version information.",
action="store_true",
)
args = parser.parse_args()
if args.verbose:
_show_versions.show_versions()
else:
print("pyproj version: {} [PROJ version: {}]".format(__version__, __proj_version__))
parser.print_help()
|
[
"pyproj._show_versions.show_versions",
"argparse.ArgumentParser"
] |
[((165, 190), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (188, 190), False, 'import argparse\n'), ((370, 400), 'pyproj._show_versions.show_versions', '_show_versions.show_versions', ([], {}), '()\n', (398, 400), False, 'from pyproj import __proj_version__, __version__, _show_versions\n')]
|
import math
import json
import os
import sys
from datetime import datetime
import pandas as pd
import googlemaps
import populartimes
from apikeys import API_KEY
#%%
PLACE_SEARCHES = [
# ('restaurant', 'restaurant', True),
# ('bar', 'bar', True),
('fast food', None, True),
# ('club', None, True),
('train station', None, None),
# ('tourist information', None, True),
# ('sights', 'tourist_attraction', None),
# ('park', 'park', None),
# ('mall', 'shopping_mall', True),
# ('shopping', 'shopping_mall', True),
# ('supermarket', 'supermarket', True),
# ('street market', None, None),
# ('hardware store', 'hardware_store', True)
]
PLACE_SEARCH_RADIUS = 20000 # in meters
LIMIT_NUM_PLACES = 20
RESULT_FILE = 'data/pois/%s.csv'
RESULT_POP_FILE = 'data/pois/%s_pop.json'
#%%
def haversine(a_lat, a_lng, b_lat, b_lng):
"""
haversine: calculate great circle distance between two points on earth in km
"""
R = 6371 # earth radius in km
a_lat = math.radians(a_lat)
a_lng = math.radians(a_lng)
b_lat = math.radians(b_lat)
b_lng = math.radians(b_lng)
d_lat = b_lat - a_lat
d_lng = b_lng - a_lng
a = math.pow(math.sin(d_lat / 2), 2) + math.cos(a_lat) * math.cos(b_lat) * math.pow(math.sin(d_lng / 2), 2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
return R * c
#%%
if len(sys.argv) >= 2:
t_start_ymdh = sys.argv[1]
assert len(t_start_ymdh) == 14
skip_already_queried_cities = len(sys.argv) == 3 and sys.argv[2] == 'skip_queried_cities'
if skip_already_queried_cities:
print('will skip already queried cities')
else:
t_start_ymdh = datetime.now().strftime('%Y-%m-%d_h%H')
skip_already_queried_cities = False
result_file = RESULT_FILE % t_start_ymdh
result_pop_file = RESULT_POP_FILE % t_start_ymdh
gmaps = googlemaps.Client(key=API_KEY)
cities = pd.read_csv('data/cities_edited.csv')
if os.path.exists(result_file):
print('loading existing POI CSV file', result_file)
existing_pois = pd.read_csv(result_file)
existing_place_ids = set(existing_pois.place_id)
print('> %d existing places' % len(existing_place_ids))
existing_queried_cities = set(existing_pois.city)
print('> %d existing cities' % len(existing_queried_cities))
else:
existing_pois = None
existing_place_ids = set()
existing_queried_cities = set()
if os.path.exists(result_pop_file):
print('loading existing POI initial popularity score JSON file', result_file)
with open(result_pop_file) as f:
resultrows_pop = json.load(f)
print('> %d existing place popularity score entries' % len(resultrows_pop))
else:
resultrows_pop = []
queried_cities = []
#%%
print('querying places in cities ...')
resultrows = []
for city_i, cityrow in cities.iterrows():
print('> city %d/%d: %s' % (city_i+1, len(cities), cityrow.city))
if skip_already_queried_cities and cityrow.city in existing_queried_cities:
print('> skipping (already queried this city)')
continue
for place_query, place_type, open_now in PLACE_SEARCHES:
utcnow = datetime.utcnow()
query_id = t_start_ymdh + cityrow.city + cityrow.country + place_query
if query_id in queried_cities:
print('>> skipping (already queried this city for this kind of places)')
continue
kwargs = {}
if place_type is not None:
kwargs['type'] = place_type
if open_now is not None:
kwargs['open_now'] = open_now
if open_now is not None:
open_now_info = '(open now restriction: ' + str(open_now) + ')'
else:
open_now_info = ''
full_query = place_query + ' in ' + cityrow.city + ', ' + cityrow.country
print('>> query: "%s" %s in lat=%.4f, lng=%.4f' % (full_query, open_now_info, cityrow.lat, cityrow.lng))
places = gmaps.places(query=full_query, location=(cityrow.lat, cityrow.lng), radius=PLACE_SEARCH_RADIUS,
**kwargs)
if places['status'] != 'OK':
print('>> skipping (bad status: %s)' % places['status'])
continue
print('>> got %d results' % len(places['results']))
queried_cities.append(query_id)
n_pois = 0
for i_place, place in enumerate(places['results']):
if i_place >= LIMIT_NUM_PLACES:
break
print('>>> place: %s' % place['name'])
place_lat, place_lng = place['geometry']['location']['lat'], place['geometry']['location']['lng']
dist = haversine(cityrow.lat, cityrow.lng, place_lat, place_lng)
if dist > (PLACE_SEARCH_RADIUS / 1000) * 2: # accept larger radius here
print('>> found place is out of search radius (distance is %.2f)' % dist)
continue
if place['place_id'] in existing_place_ids:
print('>> skipping (already queried place with ID %s)' % place['place_id'])
continue
poptimes = populartimes.get_id(api_key=API_KEY, place_id=place['place_id'])
if 'current_popularity' in poptimes and 'populartimes' in poptimes:
print('>>>> adding this place as place of interest')
resultrows.append(cityrow.to_list() + [
place_query,
place['place_id'],
place['name'],
place.get('formatted_address', ''),
place['geometry']['location']['lat'],
place['geometry']['location']['lng']
])
resultrows_pop.append([
place['place_id'],
utcnow.strftime('%Y-%m-%d'),
utcnow.hour,
poptimes['current_popularity'],
poptimes['populartimes']
])
existing_place_ids.add(place['place_id'])
n_pois += 1
print('>> got %d places of interest for this city and query' % n_pois)
print('preparing and storing dataset')
places_of_interest = pd.DataFrame(resultrows, columns=cities.columns.to_list() + [
'query', 'place_id', 'name', 'addr', 'place_lat', 'place_lng'
])
if existing_pois is not None:
places_of_interest = pd.concat((existing_pois, places_of_interest), ignore_index=True)
places_of_interest = places_of_interest \
.drop_duplicates(['city', 'country', 'iso2', 'query', 'place_id'])\
.sort_values(by=['country', 'city', 'query', 'name'])\
.reset_index(drop=True)
print('got %d places of interest so far' % len(places_of_interest))
places_of_interest.to_csv(result_file, index=False)
with open(result_pop_file, 'w') as f:
json.dump(resultrows_pop, f, indent=2)
print('\n')
print('done.')
|
[
"googlemaps.Client",
"json.dump",
"json.load",
"math.sqrt",
"math.radians",
"pandas.read_csv",
"os.path.exists",
"math.sin",
"datetime.datetime.utcnow",
"math.cos",
"populartimes.get_id",
"datetime.datetime.now",
"pandas.concat"
] |
[((1859, 1889), 'googlemaps.Client', 'googlemaps.Client', ([], {'key': 'API_KEY'}), '(key=API_KEY)\n', (1876, 1889), False, 'import googlemaps\n'), ((1900, 1937), 'pandas.read_csv', 'pd.read_csv', (['"""data/cities_edited.csv"""'], {}), "('data/cities_edited.csv')\n", (1911, 1937), True, 'import pandas as pd\n'), ((1942, 1969), 'os.path.exists', 'os.path.exists', (['result_file'], {}), '(result_file)\n', (1956, 1969), False, 'import os\n'), ((2406, 2437), 'os.path.exists', 'os.path.exists', (['result_pop_file'], {}), '(result_pop_file)\n', (2420, 2437), False, 'import os\n'), ((1023, 1042), 'math.radians', 'math.radians', (['a_lat'], {}), '(a_lat)\n', (1035, 1042), False, 'import math\n'), ((1055, 1074), 'math.radians', 'math.radians', (['a_lng'], {}), '(a_lng)\n', (1067, 1074), False, 'import math\n'), ((1087, 1106), 'math.radians', 'math.radians', (['b_lat'], {}), '(b_lat)\n', (1099, 1106), False, 'import math\n'), ((1119, 1138), 'math.radians', 'math.radians', (['b_lng'], {}), '(b_lng)\n', (1131, 1138), False, 'import math\n'), ((2047, 2071), 'pandas.read_csv', 'pd.read_csv', (['result_file'], {}), '(result_file)\n', (2058, 2071), True, 'import pandas as pd\n'), ((2583, 2595), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2592, 2595), False, 'import json\n'), ((3134, 3151), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3149, 3151), False, 'from datetime import datetime\n'), ((6354, 6419), 'pandas.concat', 'pd.concat', (['(existing_pois, places_of_interest)'], {'ignore_index': '(True)'}), '((existing_pois, places_of_interest), ignore_index=True)\n', (6363, 6419), True, 'import pandas as pd\n'), ((6819, 6857), 'json.dump', 'json.dump', (['resultrows_pop', 'f'], {'indent': '(2)'}), '(resultrows_pop, f, indent=2)\n', (6828, 6857), False, 'import json\n'), ((1210, 1229), 'math.sin', 'math.sin', (['(d_lat / 2)'], {}), '(d_lat / 2)\n', (1218, 1229), False, 'import math\n'), ((1328, 1340), 'math.sqrt', 'math.sqrt', (['a'], {}), '(a)\n', (1337, 1340), False, 'import math\n'), ((1342, 1358), 'math.sqrt', 'math.sqrt', (['(1 - a)'], {}), '(1 - a)\n', (1351, 1358), False, 'import math\n'), ((1679, 1693), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1691, 1693), False, 'from datetime import datetime\n'), ((5067, 5131), 'populartimes.get_id', 'populartimes.get_id', ([], {'api_key': 'API_KEY', 'place_id': "place['place_id']"}), "(api_key=API_KEY, place_id=place['place_id'])\n", (5086, 5131), False, 'import populartimes\n'), ((1236, 1251), 'math.cos', 'math.cos', (['a_lat'], {}), '(a_lat)\n', (1244, 1251), False, 'import math\n'), ((1254, 1269), 'math.cos', 'math.cos', (['b_lat'], {}), '(b_lat)\n', (1262, 1269), False, 'import math\n'), ((1281, 1300), 'math.sin', 'math.sin', (['(d_lng / 2)'], {}), '(d_lng / 2)\n', (1289, 1300), False, 'import math\n')]
|
'''
Title: Time Series Deconfounder: Estimating Treatment Effects over Time in the Presence of Hidden Confounders
Authors: <NAME>, <NAME>, <NAME>
International Conference on Machine Learning (ICML) 2020
Last Updated Date: July 20th 2020
Code Author: <NAME> (<EMAIL>)
'''
import logging
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
logging.getLogger().setLevel(logging.INFO)
import numpy as np
import keras
from tqdm import tqdm
import tensorflow as tf
from tensorflow.contrib.rnn import LSTMCell, DropoutWrapper
from tensorflow.python.ops import rnn
from utils.predictive_checks_utils import compute_test_statistic_all_timesteps
from utils.rnn_utils import AutoregressiveLSTMCell, compute_sequence_length
class FactorModel:
def __init__(self, params, hyperparams):
self.num_treatments = params['num_treatments']
self.num_covariates = params['num_covariates']
self.num_confounders = params['num_confounders']
self.max_sequence_length = params['max_sequence_length']
self.num_epochs = params['num_epochs']
self.rnn_hidden_units = hyperparams['rnn_hidden_units']
self.fc_hidden_units = hyperparams['fc_hidden_units']
self.learning_rate = hyperparams['learning_rate']
self.batch_size = hyperparams['batch_size']
self.rnn_keep_prob = hyperparams['rnn_keep_prob']
tf.compat.v1.reset_default_graph()
self.previous_covariates = tf.compat.v1.placeholder(tf.float32, [None, self.max_sequence_length - 1, self.num_covariates])
self.previous_treatments = tf.compat.v1.placeholder(tf.float32, [None, self.max_sequence_length - 1, self.num_treatments])
self.trainable_init_input = tf.compat.v1.get_variable(name='trainable_init_input',
shape=[self.batch_size, 1,
self.num_covariates + self.num_treatments], trainable=True)
self.current_covariates = tf.placeholder(tf.float32, [None, self.max_sequence_length, self.num_covariates])
self.target_treatments = tf.placeholder(tf.float32, [None, self.max_sequence_length, self.num_treatments])
def build_confounders(self, trainable_state=True):
previous_covariates_and_treatments = tf.concat([self.previous_covariates, self.previous_treatments],
axis=-1)
self.rnn_input = tf.concat([self.trainable_init_input, previous_covariates_and_treatments], axis=1)
self.sequence_length = compute_sequence_length(self.rnn_input)
rnn_cell = DropoutWrapper(LSTMCell(self.rnn_hidden_units, state_is_tuple=False),
output_keep_prob=self.rnn_keep_prob,
state_keep_prob=self.rnn_keep_prob, variational_recurrent=True,
dtype=tf.float32)
autoregressive_cell = AutoregressiveLSTMCell(rnn_cell, self.num_confounders)
if trainable_state:
init_state = tf.get_variable(name='init_cell',
shape=[self.batch_size, autoregressive_cell.state_size],
trainable=True)
else:
init_state = autoregressive_cell.zero_state(self.batch_size, dtype=tf.float32)
rnn_output, _ = rnn.dynamic_rnn(
autoregressive_cell,
self.rnn_input,
initial_state=init_state,
dtype=tf.float32,
sequence_length=self.sequence_length)
# Flatten to apply same weights to all time steps.
rnn_output = tf.reshape(rnn_output, [-1, self.num_confounders])
hidden_confounders = rnn_output
covariates = tf.reshape(self.current_covariates, [-1, self.num_covariates])
self.multitask_input = tf.concat([covariates, hidden_confounders], axis=-1)
self.hidden_confounders = tf.reshape(hidden_confounders,
[-1, self.max_sequence_length, self.num_confounders])
def build_treatment_assignments(self):
self.treatment_prob_predictions = dict()
for treatment in range(self.num_treatments):
treatment_network_layer = tf.layers.dense(self.multitask_input, self.fc_hidden_units,
name='treatment_network_%s' % str(treatment),
activation=tf.nn.leaky_relu)
treatment_output = tf.layers.dense(treatment_network_layer, 1, activation=tf.nn.sigmoid,
name='treatment_output_%s' % str(treatment))
self.treatment_prob_predictions[treatment] = treatment_output
self.treatment_prob_predictions = tf.concat(list(self.treatment_prob_predictions.values()), axis=-1)
return self.treatment_prob_predictions
def build_network(self):
self.build_confounders()
self.treatment_prob_predictions = self.build_treatment_assignments()
return self.treatment_prob_predictions
def gen_epoch(self, dataset):
dataset_size = dataset['previous_covariates'].shape[0]
num_batches = int(dataset_size / self.batch_size) + 1
for i in range(num_batches):
if (i == num_batches - 1):
batch_samples = range(dataset_size - self.batch_size, dataset_size)
else:
batch_samples = range(i * self.batch_size, (i + 1) * self.batch_size)
batch_previous_covariates = dataset['previous_covariates'][batch_samples, :, :]
batch_previous_treatments = dataset['previous_treatments'][batch_samples, :, :]
batch_current_covariates = dataset['covariates'][batch_samples, :, :]
batch_target_treatments = dataset['treatments'][batch_samples, :, :].astype(np.int32)
yield (batch_previous_covariates, batch_previous_treatments, batch_current_covariates,
batch_target_treatments)
def eval_network(self, dataset):
validation_losses = []
for (batch_previous_covariates, batch_previous_treatments, batch_current_covariates,
batch_target_treatments) in self.gen_epoch(dataset):
feed_dict = self.build_feed_dictionary(batch_previous_covariates, batch_previous_treatments,
batch_current_covariates, batch_target_treatments)
validation_loss= self.sess.run([self.loss], feed_dict=feed_dict)
validation_losses.append(validation_loss)
validation_loss = np.mean(np.array(validation_losses))
return validation_loss
def compute_test_statistic(self, num_samples, target_treatments, feed_dict, predicted_mask):
test_statistic = np.zeros(shape=(self.max_sequence_length,))
for sample_idx in range(num_samples):
[treatment_probability] = self.sess.run(
[self.treatment_prob_predictions], feed_dict=feed_dict)
treatment_probability = np.reshape(treatment_probability, newshape=(
self.batch_size, self.max_sequence_length, self.num_treatments))
test_statistic_sequence = compute_test_statistic_all_timesteps(target_treatments,
treatment_probability,
self.max_sequence_length, predicted_mask)
test_statistic += test_statistic_sequence
test_statistic = test_statistic / num_samples
return test_statistic
def eval_predictive_checks(self, dataset):
num_replications = 50
num_samples = 50
p_values_over_time = np.zeros(shape=(self.max_sequence_length,))
steps = 0
for (batch_previous_covariates, batch_previous_treatments, batch_current_covariates,
batch_target_treatments) in self.gen_epoch(dataset):
feed_dict = self.build_feed_dictionary(batch_previous_covariates, batch_previous_treatments,
batch_current_covariates, batch_target_treatments)
mask = tf.sign(tf.reduce_max(tf.abs(self.rnn_input), axis=2))
[seq_lenghts, predicted_mask] = self.sess.run([self.sequence_length, mask], feed_dict=feed_dict)
steps = steps + 1
""" Compute test statistics for replicas """
test_statistic_replicas = np.zeros(shape=(num_replications, self.max_sequence_length))
for replication_idx in range(num_replications):
[treatment_replica, treatment_prob_pred] = self.sess.run(
[self.treatment_realizations, self.treatment_prob_predictions], feed_dict=feed_dict)
treatment_replica = np.reshape(treatment_replica, newshape=(
self.batch_size, self.max_sequence_length, self.num_treatments))
test_statistic_replicas[replication_idx] = self.compute_test_statistic(num_samples, treatment_replica,
feed_dict, predicted_mask)
""" Compute test statistic for target """
test_statistic_target = self.compute_test_statistic(num_samples, batch_target_treatments, feed_dict,
predicted_mask)
probability = np.mean(np.less(test_statistic_replicas, test_statistic_target).astype(np.int32), axis=0)
p_values_over_time += probability
p_values_over_time = p_values_over_time / steps
return p_values_over_time
def train(self, dataset_train, dataset_val, verbose=False):
self.treatment_prob_predictions = self.build_network()
self.treatment_realizations = tf.distributions.Bernoulli(probs=self.treatment_prob_predictions).sample()
self.loss = self.compute_loss(self.target_treatments, self.treatment_prob_predictions)
optimizer = self.get_optimizer()
# Setup tensorflow
tf_device = 'gpu'
if tf_device == "cpu":
tf_config = tf.compat.v1.ConfigProto(log_device_placement=False, device_count={'GPU': 0})
else:
tf_config = tf.compat.v1.ConfigProto(log_device_placement=False, device_count={'GPU': 1})
tf_config.gpu_options.allow_growth = True
self.sess = tf.compat.v1.Session(config=tf_config)
self.sess.run(tf.compat.v1.global_variables_initializer())
self.sess.run(tf.compat.v1.local_variables_initializer())
for epoch in tqdm(range(self.num_epochs)):
for (batch_previous_covariates, batch_previous_treatments, batch_current_covariates,
batch_target_treatments) in self.gen_epoch(dataset_train):
feed_dict = self.build_feed_dictionary(batch_previous_covariates, batch_previous_treatments,
batch_current_covariates, batch_target_treatments)
_, training_loss = self.sess.run([optimizer, self.loss], feed_dict=feed_dict)
if (verbose):
logging.info(
"Epoch {} out of {}: Summary| Training loss = {}".format(
(epoch + 1), self.num_epochs, training_loss))
if ((epoch + 1) % 100 == 0):
validation_loss = self.eval_network(dataset_val)
logging.info(
"Epoch {} out of {}: Summary| Validation loss = {}".format(epoch, self.num_epochs, validation_loss))
def build_feed_dictionary(self, batch_previous_covariates, batch_previous_treatments,
batch_current_covariates, batch_target_treatments):
feed_dict = {self.previous_covariates: batch_previous_covariates,
self.previous_treatments: batch_previous_treatments,
self.current_covariates: batch_current_covariates,
self.target_treatments: batch_target_treatments}
return feed_dict
def compute_loss(self, target_treatments, treatment_predictions):
target_treatments_reshape = tf.reshape(target_treatments, [-1, self.num_treatments])
mask = tf.sign(tf.reduce_max(tf.abs(self.rnn_input), axis=2))
flat_mask = tf.reshape(mask, [-1, 1])
cross_entropy = - tf.reduce_sum((target_treatments_reshape * tf.math.log(
tf.clip_by_value(treatment_predictions, 1e-10, 1.0)) + (1 - target_treatments_reshape) * (tf.math.log(
tf.clip_by_value(1 - treatment_predictions, 1e-10, 1.0)))) * flat_mask, axis=0)
self.mask = mask
cross_entropy /= tf.reduce_sum(tf.cast(self.sequence_length, tf.float32), axis=0)
return tf.reduce_mean(cross_entropy)
def get_optimizer(self):
optimizer = tf.train.AdamOptimizer(self.learning_rate).minimize(self.loss)
return optimizer
def compute_hidden_confounders(self, dataset):
dataset_size = dataset['covariates'].shape[0]
hidden_confounders = np.zeros(
shape=(dataset_size, self.max_sequence_length, self.num_confounders))
num_batches = int(dataset_size / self.batch_size) + 1
batch_id = 0
num_samples = 50
for (batch_previous_covariates, batch_previous_treatments, batch_current_covariates,
batch_target_treatments) in self.gen_epoch(dataset):
feed_dict = self.build_feed_dictionary(batch_previous_covariates, batch_previous_treatments,
batch_current_covariates, batch_target_treatments)
total_predicted_hidden_confounders = np.zeros(
shape=(self.batch_size, self.max_sequence_length, self.num_confounders))
for sample in range(num_samples):
predicted_hidden_confounders, predicted_treatment_probs = self.sess.run(
[self.hidden_confounders, self.treatment_prob_predictions], feed_dict=feed_dict)
total_predicted_hidden_confounders += predicted_hidden_confounders
total_predicted_hidden_confounders /= num_samples
if (batch_id == num_batches - 1):
batch_samples = range(dataset_size - self.batch_size, dataset_size)
else:
batch_samples = range(batch_id * self.batch_size, (batch_id + 1) * self.batch_size)
batch_id += 1
hidden_confounders[batch_samples] = total_predicted_hidden_confounders
return hidden_confounders
|
[
"tensorflow.clip_by_value",
"tensorflow.distributions.Bernoulli",
"tensorflow.reshape",
"tensorflow.get_variable",
"tensorflow.compat.v1.global_variables_initializer",
"tensorflow.abs",
"utils.predictive_checks_utils.compute_test_statistic_all_timesteps",
"tensorflow.compat.v1.placeholder",
"tensorflow.concat",
"tensorflow.placeholder",
"tensorflow.compat.v1.Session",
"tensorflow.cast",
"numpy.reshape",
"numpy.less",
"tensorflow.compat.v1.get_variable",
"tensorflow.reduce_mean",
"tensorflow.compat.v1.ConfigProto",
"tensorflow.python.ops.rnn.dynamic_rnn",
"utils.rnn_utils.AutoregressiveLSTMCell",
"logging.basicConfig",
"utils.rnn_utils.compute_sequence_length",
"numpy.zeros",
"tensorflow.compat.v1.local_variables_initializer",
"numpy.array",
"tensorflow.compat.v1.reset_default_graph",
"tensorflow.contrib.rnn.LSTMCell",
"tensorflow.train.AdamOptimizer",
"logging.getLogger"
] |
[((287, 362), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(levelname)s:%(message)s"""', 'level': 'logging.INFO'}), "(format='%(levelname)s:%(message)s', level=logging.INFO)\n", (306, 362), False, 'import logging\n'), ((363, 382), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (380, 382), False, 'import logging\n'), ((1387, 1421), 'tensorflow.compat.v1.reset_default_graph', 'tf.compat.v1.reset_default_graph', ([], {}), '()\n', (1419, 1421), True, 'import tensorflow as tf\n'), ((1457, 1556), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', (['tf.float32', '[None, self.max_sequence_length - 1, self.num_covariates]'], {}), '(tf.float32, [None, self.max_sequence_length - 1,\n self.num_covariates])\n', (1481, 1556), True, 'import tensorflow as tf\n'), ((1588, 1687), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', (['tf.float32', '[None, self.max_sequence_length - 1, self.num_treatments]'], {}), '(tf.float32, [None, self.max_sequence_length - 1,\n self.num_treatments])\n', (1612, 1687), True, 'import tensorflow as tf\n'), ((1720, 1866), 'tensorflow.compat.v1.get_variable', 'tf.compat.v1.get_variable', ([], {'name': '"""trainable_init_input"""', 'shape': '[self.batch_size, 1, self.num_covariates + self.num_treatments]', 'trainable': '(True)'}), "(name='trainable_init_input', shape=[self.\n batch_size, 1, self.num_covariates + self.num_treatments], trainable=True)\n", (1745, 1866), True, 'import tensorflow as tf\n'), ((2008, 2094), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, self.max_sequence_length, self.num_covariates]'], {}), '(tf.float32, [None, self.max_sequence_length, self.\n num_covariates])\n', (2022, 2094), True, 'import tensorflow as tf\n'), ((2123, 2209), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, self.max_sequence_length, self.num_treatments]'], {}), '(tf.float32, [None, self.max_sequence_length, self.\n num_treatments])\n', (2137, 2209), True, 'import tensorflow as tf\n'), ((2306, 2378), 'tensorflow.concat', 'tf.concat', (['[self.previous_covariates, self.previous_treatments]'], {'axis': '(-1)'}), '([self.previous_covariates, self.previous_treatments], axis=-1)\n', (2315, 2378), True, 'import tensorflow as tf\n'), ((2459, 2545), 'tensorflow.concat', 'tf.concat', (['[self.trainable_init_input, previous_covariates_and_treatments]'], {'axis': '(1)'}), '([self.trainable_init_input, previous_covariates_and_treatments],\n axis=1)\n', (2468, 2545), True, 'import tensorflow as tf\n'), ((2573, 2612), 'utils.rnn_utils.compute_sequence_length', 'compute_sequence_length', (['self.rnn_input'], {}), '(self.rnn_input)\n', (2596, 2612), False, 'from utils.rnn_utils import AutoregressiveLSTMCell, compute_sequence_length\n'), ((2955, 3009), 'utils.rnn_utils.AutoregressiveLSTMCell', 'AutoregressiveLSTMCell', (['rnn_cell', 'self.num_confounders'], {}), '(rnn_cell, self.num_confounders)\n', (2977, 3009), False, 'from utils.rnn_utils import AutoregressiveLSTMCell, compute_sequence_length\n'), ((3384, 3523), 'tensorflow.python.ops.rnn.dynamic_rnn', 'rnn.dynamic_rnn', (['autoregressive_cell', 'self.rnn_input'], {'initial_state': 'init_state', 'dtype': 'tf.float32', 'sequence_length': 'self.sequence_length'}), '(autoregressive_cell, self.rnn_input, initial_state=\n init_state, dtype=tf.float32, sequence_length=self.sequence_length)\n', (3399, 3523), False, 'from tensorflow.python.ops import rnn\n'), ((3661, 3711), 'tensorflow.reshape', 'tf.reshape', (['rnn_output', '[-1, self.num_confounders]'], {}), '(rnn_output, [-1, self.num_confounders])\n', (3671, 3711), True, 'import tensorflow as tf\n'), ((3774, 3836), 'tensorflow.reshape', 'tf.reshape', (['self.current_covariates', '[-1, self.num_covariates]'], {}), '(self.current_covariates, [-1, self.num_covariates])\n', (3784, 3836), True, 'import tensorflow as tf\n'), ((3868, 3920), 'tensorflow.concat', 'tf.concat', (['[covariates, hidden_confounders]'], {'axis': '(-1)'}), '([covariates, hidden_confounders], axis=-1)\n', (3877, 3920), True, 'import tensorflow as tf\n'), ((3956, 4045), 'tensorflow.reshape', 'tf.reshape', (['hidden_confounders', '[-1, self.max_sequence_length, self.num_confounders]'], {}), '(hidden_confounders, [-1, self.max_sequence_length, self.\n num_confounders])\n', (3966, 4045), True, 'import tensorflow as tf\n'), ((6847, 6890), 'numpy.zeros', 'np.zeros', ([], {'shape': '(self.max_sequence_length,)'}), '(shape=(self.max_sequence_length,))\n', (6855, 6890), True, 'import numpy as np\n'), ((7809, 7852), 'numpy.zeros', 'np.zeros', ([], {'shape': '(self.max_sequence_length,)'}), '(shape=(self.max_sequence_length,))\n', (7817, 7852), True, 'import numpy as np\n'), ((10504, 10542), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {'config': 'tf_config'}), '(config=tf_config)\n', (10524, 10542), True, 'import tensorflow as tf\n'), ((12285, 12341), 'tensorflow.reshape', 'tf.reshape', (['target_treatments', '[-1, self.num_treatments]'], {}), '(target_treatments, [-1, self.num_treatments])\n', (12295, 12341), True, 'import tensorflow as tf\n'), ((12433, 12458), 'tensorflow.reshape', 'tf.reshape', (['mask', '[-1, 1]'], {}), '(mask, [-1, 1])\n', (12443, 12458), True, 'import tensorflow as tf\n'), ((12881, 12910), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['cross_entropy'], {}), '(cross_entropy)\n', (12895, 12910), True, 'import tensorflow as tf\n'), ((13185, 13263), 'numpy.zeros', 'np.zeros', ([], {'shape': '(dataset_size, self.max_sequence_length, self.num_confounders)'}), '(shape=(dataset_size, self.max_sequence_length, self.num_confounders))\n', (13193, 13263), True, 'import numpy as np\n'), ((2648, 2701), 'tensorflow.contrib.rnn.LSTMCell', 'LSTMCell', (['self.rnn_hidden_units'], {'state_is_tuple': '(False)'}), '(self.rnn_hidden_units, state_is_tuple=False)\n', (2656, 2701), False, 'from tensorflow.contrib.rnn import LSTMCell, DropoutWrapper\n'), ((3064, 3174), 'tensorflow.get_variable', 'tf.get_variable', ([], {'name': '"""init_cell"""', 'shape': '[self.batch_size, autoregressive_cell.state_size]', 'trainable': '(True)'}), "(name='init_cell', shape=[self.batch_size,\n autoregressive_cell.state_size], trainable=True)\n", (3079, 3174), True, 'import tensorflow as tf\n'), ((6663, 6690), 'numpy.array', 'np.array', (['validation_losses'], {}), '(validation_losses)\n', (6671, 6690), True, 'import numpy as np\n'), ((7100, 7213), 'numpy.reshape', 'np.reshape', (['treatment_probability'], {'newshape': '(self.batch_size, self.max_sequence_length, self.num_treatments)'}), '(treatment_probability, newshape=(self.batch_size, self.\n max_sequence_length, self.num_treatments))\n', (7110, 7213), True, 'import numpy as np\n'), ((7265, 7389), 'utils.predictive_checks_utils.compute_test_statistic_all_timesteps', 'compute_test_statistic_all_timesteps', (['target_treatments', 'treatment_probability', 'self.max_sequence_length', 'predicted_mask'], {}), '(target_treatments,\n treatment_probability, self.max_sequence_length, predicted_mask)\n', (7301, 7389), False, 'from utils.predictive_checks_utils import compute_test_statistic_all_timesteps\n'), ((8549, 8609), 'numpy.zeros', 'np.zeros', ([], {'shape': '(num_replications, self.max_sequence_length)'}), '(shape=(num_replications, self.max_sequence_length))\n', (8557, 8609), True, 'import numpy as np\n'), ((10235, 10312), 'tensorflow.compat.v1.ConfigProto', 'tf.compat.v1.ConfigProto', ([], {'log_device_placement': '(False)', 'device_count': "{'GPU': 0}"}), "(log_device_placement=False, device_count={'GPU': 0})\n", (10259, 10312), True, 'import tensorflow as tf\n'), ((10351, 10428), 'tensorflow.compat.v1.ConfigProto', 'tf.compat.v1.ConfigProto', ([], {'log_device_placement': '(False)', 'device_count': "{'GPU': 1}"}), "(log_device_placement=False, device_count={'GPU': 1})\n", (10375, 10428), True, 'import tensorflow as tf\n'), ((10565, 10608), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\n', (10606, 10608), True, 'import tensorflow as tf\n'), ((10632, 10674), 'tensorflow.compat.v1.local_variables_initializer', 'tf.compat.v1.local_variables_initializer', ([], {}), '()\n', (10672, 10674), True, 'import tensorflow as tf\n'), ((12814, 12855), 'tensorflow.cast', 'tf.cast', (['self.sequence_length', 'tf.float32'], {}), '(self.sequence_length, tf.float32)\n', (12821, 12855), True, 'import tensorflow as tf\n'), ((13802, 13888), 'numpy.zeros', 'np.zeros', ([], {'shape': '(self.batch_size, self.max_sequence_length, self.num_confounders)'}), '(shape=(self.batch_size, self.max_sequence_length, self.\n num_confounders))\n', (13810, 13888), True, 'import numpy as np\n'), ((8886, 8995), 'numpy.reshape', 'np.reshape', (['treatment_replica'], {'newshape': '(self.batch_size, self.max_sequence_length, self.num_treatments)'}), '(treatment_replica, newshape=(self.batch_size, self.\n max_sequence_length, self.num_treatments))\n', (8896, 8995), True, 'import numpy as np\n'), ((9914, 9979), 'tensorflow.distributions.Bernoulli', 'tf.distributions.Bernoulli', ([], {'probs': 'self.treatment_prob_predictions'}), '(probs=self.treatment_prob_predictions)\n', (9940, 9979), True, 'import tensorflow as tf\n'), ((12380, 12402), 'tensorflow.abs', 'tf.abs', (['self.rnn_input'], {}), '(self.rnn_input)\n', (12386, 12402), True, 'import tensorflow as tf\n'), ((12961, 13003), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['self.learning_rate'], {}), '(self.learning_rate)\n', (12983, 13003), True, 'import tensorflow as tf\n'), ((8281, 8303), 'tensorflow.abs', 'tf.abs', (['self.rnn_input'], {}), '(self.rnn_input)\n', (8287, 8303), True, 'import tensorflow as tf\n'), ((9529, 9584), 'numpy.less', 'np.less', (['test_statistic_replicas', 'test_statistic_target'], {}), '(test_statistic_replicas, test_statistic_target)\n', (9536, 9584), True, 'import numpy as np\n'), ((12554, 12605), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['treatment_predictions', '(1e-10)', '(1.0)'], {}), '(treatment_predictions, 1e-10, 1.0)\n', (12570, 12605), True, 'import tensorflow as tf\n'), ((12669, 12724), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['(1 - treatment_predictions)', '(1e-10)', '(1.0)'], {}), '(1 - treatment_predictions, 1e-10, 1.0)\n', (12685, 12724), True, 'import tensorflow as tf\n')]
|
from collections import namedtuple
from datasets import VUPDataset, NUPDataset, MLMDataset
import numpy as np
from data_utils import read_dataset
from models.VUPScorer import VUPScorer
from models.NUPScorer import NUPScorer
from models.MLMScorer import MLMScorer
import argparse
import json
from tqdm.auto import tqdm
import torch
from torch.utils.data import DataLoader
import pytorch_lightning as pl
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
def calc_minmax(model, X_data):
scores = []
with torch.no_grad():
for x in tqdm(X_data):
score = model.predict(x)
scores.append(score)
score_dict = {}
keys = scores[0].keys()
for k in keys:
arr = []
for score in scores:
arr.append(score[k]) # score of each metric
# min_s = min(arr)
# max_s = max(arr)
min_s = np.quantile(arr, 0.25).item()
max_s = np.quantile(arr, 0.75).item()
score_dict[k] = {
'min': min_s,
'max': max_s
}
return score_dict
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Calculating min and max of MLM for normalizatiion')
parser.add_argument('--weight-path', type=str, default='./checkpoints', help='Path to directory that stores the weight')
parser.add_argument('--data-path', type=str, required=True, help='Path to the directory of training set')
parser.add_argument('--output-path', type=str, default='mlm_minmax_score.json', help='Output path for the min max values')
args = parser.parse_args()
xdata = read_dataset(args.data_path)
model = MLMScorer.load_from_checkpoint(checkpoint_path=args.weight_path).to(device)
model.eval()
print ('[!] loading model complete')
scores = calc_minmax(model, xdata)
print ('[!] normalizing complete')
with open(args.output_path, 'w') as f:
f.write(json.dumps(scores, indent=4))
f.close()
print ('[!] complete')
|
[
"numpy.quantile",
"argparse.ArgumentParser",
"data_utils.read_dataset",
"models.MLMScorer.MLMScorer.load_from_checkpoint",
"json.dumps",
"tqdm.auto.tqdm",
"torch.cuda.is_available",
"torch.no_grad"
] |
[((1130, 1223), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Calculating min and max of MLM for normalizatiion"""'}), "(description=\n 'Calculating min and max of MLM for normalizatiion')\n", (1153, 1223), False, 'import argparse\n'), ((1625, 1653), 'data_utils.read_dataset', 'read_dataset', (['args.data_path'], {}), '(args.data_path)\n', (1637, 1653), False, 'from data_utils import read_dataset\n'), ((437, 462), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (460, 462), False, 'import torch\n'), ((534, 549), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (547, 549), False, 'import torch\n'), ((568, 580), 'tqdm.auto.tqdm', 'tqdm', (['X_data'], {}), '(X_data)\n', (572, 580), False, 'from tqdm.auto import tqdm\n'), ((1667, 1731), 'models.MLMScorer.MLMScorer.load_from_checkpoint', 'MLMScorer.load_from_checkpoint', ([], {'checkpoint_path': 'args.weight_path'}), '(checkpoint_path=args.weight_path)\n', (1697, 1731), False, 'from models.MLMScorer import MLMScorer\n'), ((1940, 1968), 'json.dumps', 'json.dumps', (['scores'], {'indent': '(4)'}), '(scores, indent=4)\n', (1950, 1968), False, 'import json\n'), ((900, 922), 'numpy.quantile', 'np.quantile', (['arr', '(0.25)'], {}), '(arr, 0.25)\n', (911, 922), True, 'import numpy as np\n'), ((946, 968), 'numpy.quantile', 'np.quantile', (['arr', '(0.75)'], {}), '(arr, 0.75)\n', (957, 968), True, 'import numpy as np\n')]
|
import unittest
class TestReadBatchfile(unittest.TestCase):
def test_read_batchfile(self):
# self.assertEqual(expected, read_batchfile(pythonpath, file_ending))
assert True # TODO: implement your test here
class TestBatchCommandProcessor(unittest.TestCase):
def test_parse_file(self):
# batch_command_processor = BatchCommandProcessor()
# self.assertEqual(expected, batch_command_processor.parse_file(pythonpath))
assert True # TODO: implement your test here
class TestTbFilename(unittest.TestCase):
def test_tb_filename(self):
# self.assertEqual(expected, tb_filename(tb))
assert True # TODO: implement your test here
class TestTbIter(unittest.TestCase):
def test_tb_iter(self):
# self.assertEqual(expected, tb_iter(tb))
assert True # TODO: implement your test here
class TestBatchCodeProcessor(unittest.TestCase):
def test_code_exec(self):
# batch_code_processor = BatchCodeProcessor()
# self.assertEqual(expected, batch_code_processor.code_exec(codedict, extra_environ, debug))
assert True # TODO: implement your test here
def test_parse_file(self):
# batch_code_processor = BatchCodeProcessor()
# self.assertEqual(expected, batch_code_processor.parse_file(pythonpath))
assert True # TODO: implement your test here
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main"
] |
[((1400, 1415), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1413, 1415), False, 'import unittest\n')]
|
from __future__ import annotations
import unittest
from src.austin_heller_repo.socket_queued_message_framework import ClientMessenger, ServerMessenger, ClientServerMessage, ClientServerMessageTypeEnum, Structure, StructureStateEnum, StructureFactory, StructureTransitionException, StructureInfluence, SourceTypeEnum, ClientMessengerFactory, ServerMessengerFactory
from austin_heller_repo.socket import ClientSocketFactory, ServerSocketFactory, ReadWriteSocketClosedException
from austin_heller_repo.common import HostPointer
from austin_heller_repo.kafka_manager import KafkaSequentialQueueFactory, KafkaManager, KafkaWrapper, KafkaManagerFactory
from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory
from typing import List, Tuple, Dict, Callable, Type
import uuid
import time
from datetime import datetime
from abc import ABC, abstractmethod
import multiprocessing as mp
import matplotlib.pyplot as plt
import math
is_socket_debug_active = False
is_client_messenger_debug_active = False
is_server_messenger_debug_active = False
is_kafka_debug_active = False
is_kafka_sequential_queue = False
is_plotted = False
def get_default_local_host_pointer() -> HostPointer:
return HostPointer(
host_address="0.0.0.0",
host_port=36429
)
def get_default_kafka_host_pointer() -> HostPointer:
return HostPointer(
host_address="0.0.0.0",
host_port=9092
)
def get_default_kafka_manager_factory() -> KafkaManagerFactory:
return KafkaManagerFactory(
kafka_wrapper=KafkaWrapper(
host_pointer=get_default_kafka_host_pointer()
),
read_polling_seconds=0,
is_cancelled_polling_seconds=0.01,
new_topic_partitions_total=1,
new_topic_replication_factor=1,
remove_topic_cluster_propagation_blocking_timeout_seconds=30,
is_debug=is_kafka_debug_active
)
def get_default_client_messenger_factory() -> ClientMessengerFactory:
return ClientMessengerFactory(
client_socket_factory=ClientSocketFactory(
to_server_packet_bytes_length=4096,
is_debug=is_socket_debug_active
),
server_host_pointer=get_default_local_host_pointer(),
client_server_message_class=BaseClientServerMessage,
is_debug=is_client_messenger_debug_active
)
def get_default_server_messenger_factory() -> ServerMessengerFactory:
if is_kafka_sequential_queue:
kafka_topic_name = str(uuid.uuid4())
kafka_manager = get_default_kafka_manager_factory().get_kafka_manager()
kafka_manager.add_topic(
topic_name=kafka_topic_name
).get_result()
sequential_queue_factory = KafkaSequentialQueueFactory(
kafka_manager=kafka_manager,
kafka_topic_name=kafka_topic_name
)
else:
sequential_queue_factory = SingletonMemorySequentialQueueFactory()
return ServerMessengerFactory(
server_socket_factory_and_local_host_pointer_per_source_type={
BaseSourceTypeEnum.Main: (
ServerSocketFactory(
to_client_packet_bytes_length=4096,
listening_limit_total=10,
accept_timeout_seconds=10.0,
is_debug=is_socket_debug_active
),
get_default_local_host_pointer()
)
},
client_server_message_class=BaseClientServerMessage,
source_type_enum_class=BaseSourceTypeEnum,
server_messenger_source_type=BaseSourceTypeEnum.ServerMessenger,
structure_factory=ButtonStructureFactory(),
is_debug=is_server_messenger_debug_active
)
class BaseClientServerMessageTypeEnum(ClientServerMessageTypeEnum):
HelloWorld = "hello_world" # basic test
Announce = "announce" # announces name to structure
AnnounceFailed = "announce_failed" # announce failed to apply to structure
PressButton = "press_button" # structural influence, three presses cause broadcast of transmission to users
ResetButton = "reset_button" # structural_influence, resets number of presses and informs button pressers that it was reset
ResetTransmission = "reset_transmission" # directed to specific users that pressed the button
ThreePressesTransmission = "three_presses_transmission" # broadcasts to all users that the button was pressed three times and then resets the button
PingRequest = "ping_request" # pings the server and gets a response
PingResponse = "ping_response" # the response from the ping request
EchoRequest = "echo_request" # records the messages that should be echoed back
EchoResponse = "echo_response" # the response containing the echo message
ErrorOnGetClientServerMessageType = "error_on_get_client_server_message_type"
ErrorRequest = "error_request" # a request that throws an exception as defined in the constructor
ErrorResponse = "error_response" # the response that will throw a predefined exception
PowerButton = "power_button" # increments a child structure by the number of presses processed by the parent structure
PowerOverloadTransmission = "power_overload_transmission" # if the power button is pressed three times at any stage of normal button presses an overload transmission is sent out to all clients involved
PowerButtonFailed = "power_button_failed" # power was already overloaded when attempted
TimerRequest = "timer_request" # set a timer for a later response
TimerResponse = "timer_response" # a response scheduled by the timer_request
class BaseSourceTypeEnum(SourceTypeEnum):
Main = "main"
ServerMessenger = "server_messenger"
class BaseClientServerMessage(ClientServerMessage, ABC):
@classmethod
def get_client_server_message_type_class(cls) -> Type[ClientServerMessageTypeEnum]:
return BaseClientServerMessageTypeEnum
class HelloWorldBaseClientServerMessage(BaseClientServerMessage):
def __init__(self):
super().__init__(
destination_uuid=None
)
pass
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.HelloWorld
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class AnnounceBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, name: str):
super().__init__(
destination_uuid=None
)
self.__name = name
def get_name(self) -> str:
return self.__name
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.Announce
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
json_object["name"] = self.__name
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
print(f"{datetime.utcnow()}: AnnounceBaseClientServerMessage: get_structural_error_client_server_message_response: structure state: {structure_transition_exception.get_structure_state()}")
return AnnounceFailedBaseClientServerMessage(
destination_uuid=destination_uuid
)
class AnnounceFailedBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.AnnounceFailed
def to_json(self) -> Dict:
json_object = super().to_json()
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class PressButtonBaseClientServerMessage(BaseClientServerMessage):
def __init__(self):
super().__init__(
destination_uuid=None
)
pass
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.PressButton
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class ResetButtonBaseClientServerMessage(BaseClientServerMessage):
def __init__(self):
super().__init__(
destination_uuid=None
)
pass
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.ResetButton
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class ResetTransmissionBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
pass
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.ResetTransmission
def to_json(self) -> Dict:
json_object = super().to_json()
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class ThreePressesTransmissionBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, power: str, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
self.__power = power
def get_power(self) -> str:
return self.__power
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.ThreePressesTransmission
def to_json(self) -> Dict:
json_object = super().to_json()
json_object["power"] = self.__power
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class PingRequestBaseClientServerMessage(BaseClientServerMessage):
def __init__(self):
super().__init__(
destination_uuid=None
)
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.PingRequest
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class PingResponseBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, ping_index: int, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
self.__ping_index = ping_index
def get_ping_index(self) -> int:
return self.__ping_index
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.PingResponse
def to_json(self) -> Dict:
json_object = super().to_json()
json_object["ping_index"] = self.__ping_index
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class EchoRequestBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, message: str, is_ordered: bool):
super().__init__(
destination_uuid=None
)
self.__message = message
self.__is_ordered = is_ordered
def get_message(self) -> str:
return self.__message
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.EchoRequest
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
json_object["message"] = self.__message
json_object["is_ordered"] = self.__is_ordered
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class EchoResponseBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, message: str, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
self.__message = message
def get_message(self) -> str:
return self.__message
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.EchoResponse
def to_json(self) -> Dict:
json_object = super().to_json()
json_object["message"] = self.__message
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class ErrorRequestBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, is_constructor_exception_to_set: str = None, constructor_exception: str = None, to_json_exception: str = None, get_structural_error_client_server_message_response_exception: str = None, response_constructor_arguments: Dict = None):
super().__init__(
destination_uuid=None
)
self.__is_constructor_exception_to_set = is_constructor_exception_to_set
self.__constructor_exception = constructor_exception
self.__to_json_exception = to_json_exception
self.__get_structural_error_client_server_message_response_exception = get_structural_error_client_server_message_response_exception
self.__response_constructor_arguments = response_constructor_arguments
if self.__constructor_exception is not None:
raise Exception(self.__constructor_exception)
if self.__is_constructor_exception_to_set is not None:
self.__constructor_exception = self.__is_constructor_exception_to_set
self.__is_constructor_exception_to_set = None
def get_response_constructor_arguments(self) -> Dict:
return self.__response_constructor_arguments
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.ErrorRequest
def to_json(self) -> Dict:
if self.__to_json_exception is not None:
raise Exception(self.__to_json_exception)
json_object = super().to_json()
del json_object["destination_uuid"]
json_object["is_constructor_exception_to_set"] = self.__is_constructor_exception_to_set
json_object["constructor_exception"] = self.__constructor_exception
json_object["to_json_exception"] = self.__to_json_exception
json_object["get_structural_error_client_server_message_response_exception"] = self.__get_structural_error_client_server_message_response_exception
json_object["response_constructor_arguments"] = self.__response_constructor_arguments
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
if self.__get_structural_error_client_server_message_response_exception is not None:
raise Exception(self.__get_structural_error_client_server_message_response_exception)
return None
class ErrorResponseBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, destination_uuid: str, is_constructor_exception_to_set: str = None, constructor_exception: str = None, to_json_exception: str = None, get_structural_error_client_server_message_response_exception: str = None):
super().__init__(
destination_uuid=destination_uuid
)
self.__is_constructor_exception_to_set = is_constructor_exception_to_set
self.__constructor_exception = constructor_exception
self.__to_json_exception = to_json_exception
self.__get_structural_error_client_server_message_response_exception = get_structural_error_client_server_message_response_exception
if self.__constructor_exception is not None:
raise Exception(self.__constructor_exception)
if self.__is_constructor_exception_to_set is not None:
self.__constructor_exception = self.__is_constructor_exception_to_set
self.__is_constructor_exception_to_set = None
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.ErrorResponse
def to_json(self) -> Dict:
if self.__to_json_exception is not None:
raise Exception(self.__to_json_exception)
json_object = super().to_json()
json_object["is_constructor_exception_to_set"] = self.__is_constructor_exception_to_set
json_object["constructor_exception"] = self.__constructor_exception
json_object["to_json_exception"] = self.__to_json_exception
json_object["get_structural_error_client_server_message_response_exception"] = self.__get_structural_error_client_server_message_response_exception
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
if self.__get_structural_error_client_server_message_response_exception is not None:
raise Exception(self.__get_structural_error_client_server_message_response_exception)
return None
class PowerButtonBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, is_anonymous: bool):
super().__init__(
destination_uuid=None
)
self.__is_anonymous = is_anonymous # if an overload should not be sent back to them due to this message
def is_anonymous(self) -> bool:
return self.__is_anonymous
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.PowerButton
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
json_object["is_anonymous"] = self.__is_anonymous
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return PowerButtonFailedBaseClientServerMessage(
destination_uuid=destination_uuid
)
class PowerOverloadTransmissionBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
pass
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.PowerOverloadTransmission
def to_json(self) -> Dict:
json_object = super().to_json()
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class PowerButtonFailedBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
pass
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.PowerButtonFailed
def to_json(self) -> Dict:
json_object = super().to_json()
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class TimerRequestBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, message: str, seconds: float):
super().__init__(
destination_uuid=None
)
self.__message = message
self.__seconds = seconds
def get_message(self) -> str:
return self.__message
def get_seconds(self) -> float:
return self.__seconds
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.TimerRequest
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
json_object["message"] = self.__message
json_object["seconds"] = self.__seconds
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class TimerResponseBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, message: str, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
self.__message = message
def get_message(self) -> str:
return self.__message
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.TimerResponse
def to_json(self) -> Dict:
json_object = super().to_json()
json_object["message"] = self.__message
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class PowerStructureStateEnum(StructureStateEnum):
Underpowered = "underpower"
Powered = "powered"
Overpowered = "overpowered"
class PowerStructure(Structure):
def __init__(self):
super().__init__(
states=PowerStructureStateEnum,
initial_state=PowerStructureStateEnum.Underpowered
)
self.__power_total = 0
self.__source_uuids_to_inform_on_power_overload = [] # type: List[str]
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PowerButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=PowerStructureStateEnum.Underpowered,
end_structure_state=PowerStructureStateEnum.Underpowered,
on_transition=self.__power_button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PowerButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=PowerStructureStateEnum.Powered,
end_structure_state=PowerStructureStateEnum.Powered,
on_transition=self.__power_button_pressed
)
def on_client_connected(self, *, source_uuid: str, source_type: SourceTypeEnum, tag_json: Dict):
raise Exception(f"Unexpected client connected: {source_uuid}: {source_type}: {tag_json}")
def add_source_uuid_for_power_overload_transmission(self, *, source_uuid: str):
if source_uuid not in self.__source_uuids_to_inform_on_power_overload:
self.__source_uuids_to_inform_on_power_overload.append(source_uuid)
def get_power(self) -> str:
if self.__power_total < 3:
return "underpowered"
elif self.__power_total == 3:
return "powered"
else:
return "overpowered"
def __power_button_pressed(self, structure_influence: StructureInfluence):
print(f"{datetime.utcnow()}: PowerStructure: __power_button_pressed: start")
print(f"get state: {self.get_state()}")
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
power_button = structure_influence.get_client_server_message() # type: PowerButtonBaseClientServerMessage
source_uuid = structure_influence.get_source_uuid()
if not power_button.is_anonymous():
self.add_source_uuid_for_power_overload_transmission(
source_uuid=source_uuid
)
self.__power_total += 1
if self.__power_total == 3:
# set the state to "powered"
self.set_state(
structure_state=PowerStructureStateEnum.Powered
)
elif self.__power_total == 4:
# set the state to "overpowered"
# NOTE this will also permit an impossible state change if another power button message is sent
self.set_state(
structure_state=PowerStructureStateEnum.Overpowered
)
for source_uuid in self.__source_uuids_to_inform_on_power_overload:
self.send_client_server_message(
client_server_message=PowerOverloadTransmissionBaseClientServerMessage(
destination_uuid=source_uuid
)
)
self.__source_uuids_to_inform_on_power_overload.clear()
print(f"{datetime.utcnow()}: PowerStructure: __power_button_pressed: end")
def dispose(self):
pass
class ButtonStructureStateEnum(StructureStateEnum):
ZeroPresses = "zero_presses"
OnePress = "one_press"
TwoPresses = "two_presses"
ThreePresses = "three_presses"
class ButtonStructure(Structure):
def __init__(self):
super().__init__(
states=ButtonStructureStateEnum,
initial_state=ButtonStructureStateEnum.ZeroPresses
)
self.__main_source_uuids = [] # type: List[str]
self.__pressed_button_source_uuids = [] # type: List[str]
self.__name_per_client_uuid = {} # type: Dict[str, str]
self.__presses_total = 0
self.__pings_total = 0
self.__power_structure = PowerStructure()
self.register_child_structure(
structure=self.__power_structure
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.Announce,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__name_announced
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PressButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.OnePress,
on_transition=self.__button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PressButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.OnePress,
end_structure_state=ButtonStructureStateEnum.TwoPresses,
on_transition=self.__button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PressButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.TwoPresses,
end_structure_state=ButtonStructureStateEnum.ThreePresses,
on_transition=self.__button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.ResetButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__button_reset
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.ResetButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.OnePress,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__button_reset
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.ResetButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.TwoPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__button_reset
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.ThreePressesTransmission,
from_source_type=BaseSourceTypeEnum.ServerMessenger,
start_structure_state=ButtonStructureStateEnum.ThreePresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__three_presses_transmission_sent
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PingRequest,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__ping_requested
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.EchoRequest,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__echo_requested
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.ErrorRequest,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__error_requested
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PowerButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__power_button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PowerButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.OnePress,
end_structure_state=ButtonStructureStateEnum.OnePress,
on_transition=self.__power_button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PowerButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.TwoPresses,
end_structure_state=ButtonStructureStateEnum.TwoPresses,
on_transition=self.__power_button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.TimerRequest,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__timer_requested
)
def on_client_connected(self, *, source_uuid: str, source_type: SourceTypeEnum, tag_json: Dict):
if source_type == BaseSourceTypeEnum.Main:
self.__main_source_uuids.append(source_uuid)
else:
raise Exception(f"Unexpected client connected: {source_uuid}: {source_type}: {tag_json}")
def __name_announced(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
announce = structure_influence.get_client_server_message() # type: AnnounceBaseClientServerMessage
source_uuid = structure_influence.get_source_uuid()
self.__name_per_client_uuid[source_uuid] = announce.get_name()
def __button_pressed(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
source_uuid = structure_influence.get_source_uuid()
if source_uuid not in self.__pressed_button_source_uuids:
self.__pressed_button_source_uuids.append(source_uuid)
if source_uuid in self.__name_per_client_uuid:
print(f"button pressed by {self.__name_per_client_uuid[source_uuid]}")
else:
print(f"button pressed by {source_uuid}")
self.__presses_total += 1
if self.__presses_total == 3:
self.send_client_server_message(
client_server_message=ThreePressesTransmissionBaseClientServerMessage(
destination_uuid=source_uuid,
power=self.__power_structure.get_power()
)
)
def __button_reset(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
for source_uuid in self.__pressed_button_source_uuids:
client_server_message = ResetTransmissionBaseClientServerMessage(
destination_uuid=source_uuid
)
self.send_client_server_message(
client_server_message=client_server_message
)
self.__pressed_button_source_uuids.clear()
def __three_presses_transmission_sent(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.ServerMessenger:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
self.__pressed_button_source_uuids.clear()
def __ping_requested(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
source_uuid = structure_influence.get_source_uuid()
self.send_client_server_message(
client_server_message=PingResponseBaseClientServerMessage(
destination_uuid=source_uuid,
ping_index=self.__pings_total
)
)
self.__pings_total += 1
def __echo_requested(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
echo_request = structure_influence.get_client_server_message() # type: EchoRequestBaseClientServerMessage
source_uuid = structure_influence.get_source_uuid()
message = echo_request.get_message()
self.send_client_server_message(
client_server_message=EchoResponseBaseClientServerMessage(
message=message,
destination_uuid=source_uuid
)
)
def __error_requested(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
error_request = structure_influence.get_client_server_message() # type: ErrorRequestBaseClientServerMessage
source_uuid = structure_influence.get_source_uuid()
constructor_arguments = error_request.get_response_constructor_arguments()
if constructor_arguments is None:
constructor_arguments = {}
constructor_arguments["destination_uuid"] = source_uuid
self.send_client_server_message(
client_server_message=ErrorResponseBaseClientServerMessage(
**constructor_arguments
)
)
def __power_button_pressed(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
self.__power_structure.update_structure(
structure_influence=structure_influence
)
def __timer_requested(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
timer_request = structure_influence.get_client_server_message() # type: TimerRequestBaseClientServerMessage
source_uuid = structure_influence.get_source_uuid()
def timer_thread_method():
nonlocal timer_request
nonlocal source_uuid
time.sleep(timer_request.get_seconds())
self.send_client_server_message(
client_server_message=TimerResponseBaseClientServerMessage(
destination_uuid=source_uuid,
message=timer_request.get_message()
)
)
start_thread(timer_thread_method)
def dispose(self):
self.__power_structure.dispose()
class ButtonStructureFactory(StructureFactory):
def __init__(self):
super().__init__()
pass
def get_structure(self) -> Structure:
return ButtonStructure()
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
class MessengerTest(unittest.TestCase):
def setUp(self) -> None:
print(f"{datetime.utcnow()}: setUp: start")
if is_kafka_sequential_queue:
kafka_manager = get_default_kafka_manager_factory().get_kafka_manager()
print(f"setUp: initialized: {datetime.utcnow()}")
topics = kafka_manager.get_topics().get_result() # type: List[str]
print(f"setUp: get_topics: {datetime.utcnow()}")
for topic in topics:
print(f"setUp: topic: {topic}: {datetime.utcnow()}")
async_handle = kafka_manager.remove_topic(
topic_name=topic
)
print(f"setUp: async: {topic}: {datetime.utcnow()}")
async_handle.get_result()
print(f"setUp: result: {topic}: {datetime.utcnow()}")
time.sleep(1)
print(f"{datetime.utcnow()}: setUp: end")
def test_initialize_client_messenger(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
self.assertIsNotNone(client_messenger)
client_messenger.dispose()
def test_initialize_server_messenger(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
self.assertIsNotNone(server_messenger)
def test_server_messenger_start_and_stop(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(3)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(5)
def test_connect_client_to_server_and_client_disposes_first(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
client_messenger.send_to_server(
client_server_message=HelloWorldBaseClientServerMessage()
)
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
time.sleep(1)
def test_connect_client_to_server_and_server_stops_first(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
client_messenger.send_to_server(
client_server_message=HelloWorldBaseClientServerMessage()
)
time.sleep(1)
server_messenger.stop_receiving_from_clients()
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
def test_connect_client_to_server_client_receives_and_client_disposes_first(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
time.sleep(1)
def callback(client_server_message: ClientServerMessage):
raise Exception("Unexpected response")
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
time.sleep(1)
client_messenger.send_to_server(
client_server_message=HelloWorldBaseClientServerMessage()
)
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
time.sleep(1)
if found_exception is not None:
raise found_exception
def test_connect_client_to_server_client_receives_and_server_stops_first(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
time.sleep(1)
def callback(client_server_message: ClientServerMessage):
raise Exception("Unexpected response")
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
time.sleep(1)
client_messenger.send_to_server(
client_server_message=HelloWorldBaseClientServerMessage()
)
time.sleep(1)
server_messenger.stop_receiving_from_clients()
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
self.assertIsInstance(found_exception, ReadWriteSocketClosedException)
def test_press_button_three_times(self):
# send three presses and wait for a reply
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ThreePressesTransmissionBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="<NAME>"
)
)
print(f"{datetime.utcnow()}: sending first press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: sending second press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: sending third press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNone(found_exception)
def test_one_client_sends_two_presses_then_reset(self):
# send two presses of the button, then send a reset, and finally wait for a reply
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ResetTransmissionBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="<NAME>"
)
)
print(f"{datetime.utcnow()}: sending first press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: sending second press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: sending reset")
client_messenger.send_to_server(
client_server_message=ResetButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNone(found_exception)
def test_two_clients_each_send_one_press_then_reset(self):
first_client_messenger = get_default_client_messenger_factory().get_client_messenger()
second_client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
first_client_messenger.connect_to_server()
second_client_messenger.connect_to_server()
callback_total = 0
def first_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: first_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ResetTransmissionBaseClientServerMessage)
first_found_exception = None # type: Exception
def first_on_exception(exception: Exception):
nonlocal first_found_exception
first_found_exception = exception
first_client_messenger.receive_from_server(
callback=first_callback,
on_exception=first_on_exception
)
def second_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: second_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ResetTransmissionBaseClientServerMessage)
second_found_exception = None # type: Exception
def second_on_exception(exception: Exception):
nonlocal second_found_exception
second_found_exception = exception
second_client_messenger.receive_from_server(
callback=second_callback,
on_exception=second_on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
first_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
print(f"{datetime.utcnow()}: sending second announcement")
second_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="Second"
)
)
time.sleep(1)
print(f"{datetime.utcnow()}: sending first press")
first_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: sending second press")
second_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: sending reset")
first_client_messenger.send_to_server(
client_server_message=ResetButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
first_client_messenger.dispose()
second_client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(2, callback_total)
self.assertIsNone(first_found_exception)
self.assertIsNone(second_found_exception)
def test_two_clients_each_send_one_press_then_third_client_reset(self):
first_client_messenger = get_default_client_messenger_factory().get_client_messenger()
second_client_messenger = get_default_client_messenger_factory().get_client_messenger()
third_client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
first_client_messenger.connect_to_server()
second_client_messenger.connect_to_server()
third_client_messenger.connect_to_server()
callback_total = 0
def first_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: first_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ResetTransmissionBaseClientServerMessage)
first_found_exception = None # type: Exception
def first_on_exception(exception: Exception):
nonlocal first_found_exception
first_found_exception = exception
first_client_messenger.receive_from_server(
callback=first_callback,
on_exception=first_on_exception
)
def second_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: second_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ResetTransmissionBaseClientServerMessage)
second_found_exception = None # type: Exception
def second_on_exception(exception: Exception):
nonlocal second_found_exception
second_found_exception = exception
second_client_messenger.receive_from_server(
callback=second_callback,
on_exception=second_on_exception
)
def third_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: third_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
raise Exception(f"Third client should not receive a message.")
third_found_exception = None # type: Exception
def third_on_exception(exception: Exception):
nonlocal third_found_exception
third_found_exception = exception
third_client_messenger.receive_from_server(
callback=third_callback,
on_exception=third_on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
first_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
print(f"{datetime.utcnow()}: sending second announcement")
second_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="Second"
)
)
print(f"{datetime.utcnow()}: sending third announcement")
third_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="Third"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending first press")
first_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending second press")
second_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending reset")
third_client_messenger.send_to_server(
client_server_message=ResetButtonBaseClientServerMessage()
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
first_client_messenger.dispose()
second_client_messenger.dispose()
third_client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(first_found_exception)
self.assertIsNone(second_found_exception)
self.assertIsNone(third_found_exception)
self.assertEqual(2, callback_total)
def test_client_disconnects_before_receiving_intended_message(self):
# the first client sends a press, disconnects, then the second client resets
# the server messenger should detect that the client disconnected and release the socket gracefully
first_client_messenger = get_default_client_messenger_factory().get_client_messenger()
second_client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
first_client_messenger.connect_to_server()
second_client_messenger.connect_to_server()
callback_total = 0
def first_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: first_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
raise Exception("This client should have been disposed of already.")
first_found_exception = None # type: Exception
def first_on_exception(exception: Exception):
nonlocal first_found_exception
first_found_exception = exception
first_client_messenger.receive_from_server(
callback=first_callback,
on_exception=first_on_exception
)
def second_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: second_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
raise Exception("This client should not be receiving a message.")
second_found_exception = None # type: Exception
def second_on_exception(exception: Exception):
nonlocal second_found_exception
second_found_exception = exception
second_client_messenger.receive_from_server(
callback=second_callback,
on_exception=second_on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
first_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending second announcement")
second_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="Second"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending first press")
first_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
time.sleep(1)
print(f"{datetime.utcnow()}: disposing first client")
first_client_messenger.dispose()
time.sleep(1)
print(f"{datetime.utcnow()}: sending reset")
second_client_messenger.send_to_server(
client_server_message=ResetButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
second_client_messenger.dispose()
time.sleep(1)
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
time.sleep(1)
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(0, callback_total)
self.assertIsNone(first_found_exception)
self.assertIsNone(second_found_exception)
def test_ping(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
ping_response_base_client_server_message = client_server_message # type: PingResponseBaseClientServerMessage
self.assertEqual(0, ping_response_base_client_server_message.get_ping_index())
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending ping")
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNone(found_exception)
def test_single_client_quickly_pings_using_threading(self):
# spam pings and detect timing differences between sends and receives
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
expected_pings_total = 1000
callback_total = 0
expected_ping_index = 0
first_message_datetime = None # type: datetime
last_message_datetime = None # type: datetime
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
nonlocal expected_ping_index
nonlocal first_message_datetime
nonlocal last_message_datetime
nonlocal expected_pings_total
#print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
ping_response_base_client_server_message = client_server_message # type: PingResponseBaseClientServerMessage
self.assertEqual(expected_ping_index, ping_response_base_client_server_message.get_ping_index())
expected_ping_index += 1
if expected_ping_index == 1:
first_message_datetime = datetime.utcnow()
if expected_ping_index == expected_pings_total:
last_message_datetime = datetime.utcnow()
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending first press")
sent_first_ping_datetime = None # type: datetime
sent_last_ping_datetime = None # type: datetime
def ping_thread_method():
nonlocal client_messenger
nonlocal expected_pings_total
nonlocal sent_first_ping_datetime
nonlocal sent_last_ping_datetime
sent_first_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
for index in range(expected_pings_total - 2):
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
sent_last_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
while last_message_datetime is None:
time.sleep(1)
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(expected_pings_total, callback_total)
print(f"Sent first message datetime: {sent_first_ping_datetime}")
print(f"Received first message datetime: {first_message_datetime}")
print(f"Diff: {(first_message_datetime - sent_first_ping_datetime).total_seconds()} seconds")
print(f"Sent last message datetime: {sent_last_ping_datetime}")
print(f"Received last message datetime: {last_message_datetime}")
print(f"Diff: {(last_message_datetime - sent_last_ping_datetime).total_seconds()} seconds")
seconds_total = (last_message_datetime - first_message_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Messages per seconds: {messages_per_second}")
print(f"Seconds per message: {1.0 / messages_per_second}")
self.assertIsNone(found_exception)
def test_single_client_quickly_pings_burst(self):
# spam pings and detect timing differences between sends and receives
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
expected_pings_total = 1000
print(f"{datetime.utcnow()}: sending first press")
found_exception = None # type: Exception
def ping_thread_method():
nonlocal expected_pings_total
nonlocal found_exception
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
expected_ping_index = 0
received_first_message_datetime = None # type: datetime
received_last_message_datetime = None # type: datetime
callback_semaphore = Semaphore()
def callback(client_server_message: ClientServerMessage):
nonlocal expected_pings_total
nonlocal expected_ping_index
nonlocal received_first_message_datetime
nonlocal received_last_message_datetime
nonlocal callback_semaphore
#print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
ping_response_base_client_server_message = client_server_message # type: PingResponseBaseClientServerMessage
self.assertEqual(expected_ping_index, ping_response_base_client_server_message.get_ping_index())
callback_semaphore.acquire()
expected_ping_index += 1
if expected_ping_index == 1:
received_first_message_datetime = datetime.utcnow()
if expected_ping_index == expected_pings_total:
received_last_message_datetime = datetime.utcnow()
callback_semaphore.release()
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
sent_first_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
for index in range(expected_pings_total - 2):
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
sent_last_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
while received_last_message_datetime is None:
time.sleep(1)
time.sleep(1)
print(f"Sent first message datetime: {sent_first_ping_datetime}")
print(f"Received first message datetime: {received_first_message_datetime}")
print(f"Diff: {(received_first_message_datetime - sent_first_ping_datetime).total_seconds()} seconds")
print(f"Sent last message datetime: {sent_last_ping_datetime}")
print(f"Received last message datetime: {received_last_message_datetime}")
print(f"Diff: {(received_last_message_datetime - sent_last_ping_datetime).total_seconds()} seconds")
seconds_total = (sent_last_ping_datetime - sent_first_ping_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to send all messages: {seconds_total}")
print(f"Sent messages per seconds: {messages_per_second}")
print(f"Seconds per sent message: {1.0 / messages_per_second}")
seconds_total = (received_last_message_datetime - received_first_message_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to receive all messages: {seconds_total}")
print(f"Received messages per seconds: {messages_per_second}")
print(f"Seconds per received message: {1.0 / messages_per_second}")
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(found_exception)
def test_single_client_quickly_pings_delayed(self):
# spam pings and detect timing differences between sends and receives
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
test_seconds = 10
test_messages_per_second = 500
expected_pings_total = test_seconds * test_messages_per_second
delay_between_sending_message_seconds = (1.0 / test_messages_per_second) * 0.6
#expected_pings_total = 1000
#delay_between_sending_message_seconds = 0.0025
print(f"{datetime.utcnow()}: sending first press")
found_exception = None # type: Exception
def ping_thread_method():
nonlocal expected_pings_total
nonlocal delay_between_sending_message_seconds
nonlocal found_exception
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
expected_ping_index = 0
callback_semaphore = Semaphore()
received_datetimes = [] # type: List[datetime]
sent_datetimes = [] # type: List[datetime]
def callback(client_server_message: ClientServerMessage):
nonlocal expected_pings_total
nonlocal expected_ping_index
nonlocal received_datetimes
nonlocal callback_semaphore
#print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
ping_response_base_client_server_message = client_server_message # type: PingResponseBaseClientServerMessage
#self.assertEqual(expected_ping_index, ping_response_base_client_server_message.get_ping_index())
callback_semaphore.acquire()
expected_ping_index += 1
received_datetimes.append(datetime.utcnow())
callback_semaphore.release()
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
print(f"{datetime.utcnow()}: starting to send messages")
sent_datetimes.append(datetime.utcnow())
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
time.sleep(delay_between_sending_message_seconds)
for index in range(expected_pings_total - 2):
sent_datetimes.append(datetime.utcnow())
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
time.sleep(delay_between_sending_message_seconds)
sent_datetimes.append(datetime.utcnow())
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
while len(received_datetimes) != expected_pings_total:
time.sleep(1)
print(f"len(received_datetimes): {len(received_datetimes)}")
time.sleep(1)
self.assertEqual(expected_pings_total, len(sent_datetimes))
self.assertEqual(expected_pings_total, len(received_datetimes))
diff_seconds_totals = [] # type: List[float]
for sent_datetime, received_datetime in zip(sent_datetimes, received_datetimes):
seconds_total = (received_datetime - sent_datetime).total_seconds()
diff_seconds_totals.append(seconds_total)
print(f"Time to send {(sent_datetimes[-1] - sent_datetimes[0]).total_seconds()} seconds")
print(f"Messages per second to send: {expected_pings_total / (sent_datetimes[-1] - sent_datetimes[0]).total_seconds()}")
print(f"Time to receive {(received_datetimes[-1] - received_datetimes[0]).total_seconds()} seconds")
print(f"Messages per second to receive: {expected_pings_total / (received_datetimes[-1] - received_datetimes[0]).total_seconds()}")
print(f"Min diff seconds {min(diff_seconds_totals)} at {diff_seconds_totals.index(min(diff_seconds_totals))}")
print(f"Max diff seconds {max(diff_seconds_totals)} at {diff_seconds_totals.index(max(diff_seconds_totals))}")
print(f"Ave diff seconds {sum(diff_seconds_totals)/expected_pings_total}")
if is_plotted:
plt.scatter(sent_datetimes, range(len(sent_datetimes)), s=1, c="red")
plt.scatter(received_datetimes, range(len(received_datetimes)), s=1, c="blue")
plt.show()
cutoff = 150
print(f"Min diff seconds {min(diff_seconds_totals[cutoff:])} at {diff_seconds_totals.index(min(diff_seconds_totals[cutoff:]))}")
print(f"Max diff seconds {max(diff_seconds_totals[cutoff:])} at {diff_seconds_totals.index(max(diff_seconds_totals[cutoff:]))}")
print(f"Ave diff seconds {sum(diff_seconds_totals[cutoff:]) / (expected_pings_total - cutoff)}")
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(found_exception)
def test_single_client_quickly_echos_burst_0B(self):
# spam pings and detect timing differences between sends and receives
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
expected_pings_total = 1000
message_contents = ""
print(f"{datetime.utcnow()}: sending first press")
found_exception = None # type: Exception
def ping_thread_method():
nonlocal expected_pings_total
nonlocal found_exception
nonlocal message_contents
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
expected_ping_index = 0
received_first_message_datetime = None # type: datetime
received_last_message_datetime = None # type: datetime
callback_semaphore = Semaphore()
def callback(client_server_message: ClientServerMessage):
nonlocal expected_pings_total
nonlocal expected_ping_index
nonlocal received_first_message_datetime
nonlocal received_last_message_datetime
nonlocal callback_semaphore
# print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
callback_semaphore.acquire()
expected_ping_index += 1
if expected_ping_index == 1:
received_first_message_datetime = datetime.utcnow()
if expected_ping_index == expected_pings_total:
received_last_message_datetime = datetime.utcnow()
callback_semaphore.release()
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
sent_first_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
for index in range(expected_pings_total - 2):
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
sent_last_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
print(f"{datetime.utcnow()}: waiting for messages")
while received_last_message_datetime is None:
time.sleep(1)
time.sleep(1)
print(f"Sent first message datetime: {sent_first_ping_datetime}")
print(f"Received first message datetime: {received_first_message_datetime}")
print(f"Diff: {(received_first_message_datetime - sent_first_ping_datetime).total_seconds()} seconds")
print(f"Sent last message datetime: {sent_last_ping_datetime}")
print(f"Received last message datetime: {received_last_message_datetime}")
print(f"Diff: {(received_last_message_datetime - sent_last_ping_datetime).total_seconds()} seconds")
seconds_total = (sent_last_ping_datetime - sent_first_ping_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to send all messages: {seconds_total}")
print(f"Sent messages per seconds: {messages_per_second}")
print(f"Seconds per sent message: {1.0 / messages_per_second}")
seconds_total = (received_last_message_datetime - received_first_message_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to receive all messages: {seconds_total}")
print(f"Received messages per seconds: {messages_per_second}")
print(f"Seconds per received message: {1.0 / messages_per_second}")
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(found_exception)
def test_single_client_quickly_echos_burst_1KB(self):
# spam pings and detect timing differences between sends and receives
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
expected_pings_total = 1000
message_contents = "12345678" * 128
print(f"{datetime.utcnow()}: sending first press")
found_exception = None # type: Exception
def ping_thread_method():
nonlocal expected_pings_total
nonlocal found_exception
nonlocal message_contents
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
expected_ping_index = 0
received_first_message_datetime = None # type: datetime
received_last_message_datetime = None # type: datetime
callback_semaphore = Semaphore()
def callback(client_server_message: ClientServerMessage):
nonlocal expected_pings_total
nonlocal expected_ping_index
nonlocal received_first_message_datetime
nonlocal received_last_message_datetime
nonlocal callback_semaphore
# print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
callback_semaphore.acquire()
expected_ping_index += 1
if expected_ping_index == 1:
received_first_message_datetime = datetime.utcnow()
if expected_ping_index == expected_pings_total:
received_last_message_datetime = datetime.utcnow()
callback_semaphore.release()
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
sent_first_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
for index in range(expected_pings_total - 2):
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
sent_last_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
print(f"{datetime.utcnow()}: waiting for messages")
while received_last_message_datetime is None:
time.sleep(1)
time.sleep(1)
print(f"Sent first message datetime: {sent_first_ping_datetime}")
print(f"Received first message datetime: {received_first_message_datetime}")
print(f"Diff: {(received_first_message_datetime - sent_first_ping_datetime).total_seconds()} seconds")
print(f"Sent last message datetime: {sent_last_ping_datetime}")
print(f"Received last message datetime: {received_last_message_datetime}")
print(f"Diff: {(received_last_message_datetime - sent_last_ping_datetime).total_seconds()} seconds")
seconds_total = (sent_last_ping_datetime - sent_first_ping_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to send all messages: {seconds_total}")
print(f"Sent messages per seconds: {messages_per_second}")
print(f"Seconds per sent message: {1.0 / messages_per_second}")
seconds_total = (received_last_message_datetime - received_first_message_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to receive all messages: {seconds_total}")
print(f"Received messages per seconds: {messages_per_second}")
print(f"Seconds per received message: {1.0 / messages_per_second}")
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(found_exception)
def test_single_client_quickly_echos_burst_5KB(self):
# spam pings and detect timing differences between sends and receives
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
expected_pings_total = 1000
message_contents = "12345678" * 128 * 5
print(f"{datetime.utcnow()}: sending first press")
found_exception = None # type: Exception
def ping_thread_method():
nonlocal expected_pings_total
nonlocal found_exception
nonlocal message_contents
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
expected_ping_index = 0
received_first_message_datetime = None # type: datetime
received_last_message_datetime = None # type: datetime
callback_semaphore = Semaphore()
def callback(client_server_message: ClientServerMessage):
nonlocal expected_pings_total
nonlocal expected_ping_index
nonlocal received_first_message_datetime
nonlocal received_last_message_datetime
nonlocal callback_semaphore
# print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
callback_semaphore.acquire()
expected_ping_index += 1
if expected_ping_index == 1:
received_first_message_datetime = datetime.utcnow()
if expected_ping_index == expected_pings_total:
received_last_message_datetime = datetime.utcnow()
callback_semaphore.release()
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
sent_first_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
for index in range(expected_pings_total - 2):
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
sent_last_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
print(f"{datetime.utcnow()}: waiting for messages")
while received_last_message_datetime is None:
time.sleep(1)
time.sleep(1)
print(f"Sent first message datetime: {sent_first_ping_datetime}")
print(f"Received first message datetime: {received_first_message_datetime}")
print(f"Diff: {(received_first_message_datetime - sent_first_ping_datetime).total_seconds()} seconds")
print(f"Sent last message datetime: {sent_last_ping_datetime}")
print(f"Received last message datetime: {received_last_message_datetime}")
print(f"Diff: {(received_last_message_datetime - sent_last_ping_datetime).total_seconds()} seconds")
seconds_total = (sent_last_ping_datetime - sent_first_ping_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to send all messages: {seconds_total}")
print(f"Sent messages per seconds: {messages_per_second}")
print(f"Seconds per sent message: {1.0 / messages_per_second}")
seconds_total = (received_last_message_datetime - received_first_message_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to receive all messages: {seconds_total}")
print(f"Received messages per seconds: {messages_per_second}")
print(f"Seconds per received message: {1.0 / messages_per_second}")
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(found_exception)
def test_single_client_quickly_echos_burst_10KB(self):
# spam pings and detect timing differences between sends and receives
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
expected_pings_total = 1000
message_contents = "12345678" * 128 * 10
print(f"{datetime.utcnow()}: sending first press")
found_exception = None # type: Exception
def ping_thread_method():
nonlocal expected_pings_total
nonlocal found_exception
nonlocal message_contents
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
expected_ping_index = 0
received_first_message_datetime = None # type: datetime
received_last_message_datetime = None # type: datetime
callback_semaphore = Semaphore()
def callback(client_server_message: ClientServerMessage):
nonlocal expected_pings_total
nonlocal expected_ping_index
nonlocal received_first_message_datetime
nonlocal received_last_message_datetime
nonlocal callback_semaphore
# print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
callback_semaphore.acquire()
expected_ping_index += 1
if expected_ping_index == 1:
received_first_message_datetime = datetime.utcnow()
if expected_ping_index == expected_pings_total:
received_last_message_datetime = datetime.utcnow()
callback_semaphore.release()
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
sent_first_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
for index in range(expected_pings_total - 2):
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
sent_last_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
print(f"{datetime.utcnow()}: waiting for messages")
while received_last_message_datetime is None:
time.sleep(1)
time.sleep(1)
print(f"Sent first message datetime: {sent_first_ping_datetime}")
print(f"Received first message datetime: {received_first_message_datetime}")
print(f"Diff: {(received_first_message_datetime - sent_first_ping_datetime).total_seconds()} seconds")
print(f"Sent last message datetime: {sent_last_ping_datetime}")
print(f"Received last message datetime: {received_last_message_datetime}")
print(f"Diff: {(received_last_message_datetime - sent_last_ping_datetime).total_seconds()} seconds")
seconds_total = (sent_last_ping_datetime - sent_first_ping_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to send all messages: {seconds_total}")
print(f"Sent messages per seconds: {messages_per_second}")
print(f"Seconds per sent message: {1.0 / messages_per_second}")
seconds_total = (received_last_message_datetime - received_first_message_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to receive all messages: {seconds_total}")
print(f"Received messages per seconds: {messages_per_second}")
print(f"Seconds per received message: {1.0 / messages_per_second}")
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(found_exception)
def test_client_attempts_message_impossible_for_structure_state_but_exception_in_callback(self):
# attempt to reset the presses without first pressing the button
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
expected_exception = Exception(f"Client should not receive any messages as part of this test.")
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
nonlocal expected_exception
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
raise expected_exception
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
time.sleep(1)
print(f"{datetime.utcnow()}: sending announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="<NAME>"
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNotNone(found_exception)
self.assertEqual(expected_exception, found_exception)
def test_client_attempts_message_impossible_for_structure_state(self):
# attempt to reset the presses without first pressing the button
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, AnnounceFailedBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
time.sleep(1)
print(f"{datetime.utcnow()}: sending announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="<NAME>"
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNone(found_exception)
def test_client_attempts_message_impossible_for_child_structure_state(self):
# call power 4 times
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(power_overload_transmission: PowerOverloadTransmissionBaseClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(power_overload_transmission, PowerOverloadTransmissionBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: first power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: second power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: third power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: fourth power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: fourth power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNone(found_exception)
def test_order_of_messages(self):
# send multiple messages from the same client to the server, expecting the response order to be the same
messages_total = 1000
print(f"{datetime.utcnow()}: setting up server")
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
print(f"{datetime.utcnow()}: setting up client")
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
callback_total = 0
last_message_index = -1
failed_at_message_index = None # type: int
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
nonlocal last_message_index
nonlocal failed_at_message_index
# print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
echo_response_client_server_message = client_server_message # type: EchoResponseBaseClientServerMessage
if int(echo_response_client_server_message.get_message()) == last_message_index + 1:
# correct message received
last_message_index += 1
else:
if failed_at_message_index is None:
failed_at_message_index = last_message_index
callback_total += 1
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
if found_exception is None:
found_exception = exception
# TODO determine why the first thread to spawn as a part of the connect_to_server process does not die
client_messenger.connect_to_server()
time.sleep(1)
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
time.sleep(1)
for message_index in range(messages_total):
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(message_index),
is_ordered=True
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing client messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposing client messenger: end")
time.sleep(1)
print(f"{datetime.utcnow()}: server_messenger.stop_receiving_from_clients(): start")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: server_messenger.stop_receiving_from_clients(): end")
if found_exception is not None:
raise found_exception
self.assertIsNone(failed_at_message_index)
print(f"end")
def test_two_clients_becoming_out_of_sync(self):
# as the delay between two different clients send messages shrinks, how often are the messages received in the wrong order
current_delay_between_messages_seconds = 1
delay_percentage_decrease_delta = 0.1
minimum_delay_between_messages_seconds = 0.0001
accepted_delay_between_messages_that_could_result_in_disorder = 0.001
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
print(f"{datetime.utcnow()}: setting up clients")
time.sleep(1)
client_messengers = [] # type: List[ClientMessenger]
client_messengers.append(get_default_client_messenger_factory().get_client_messenger())
client_messengers.append(get_default_client_messenger_factory().get_client_messenger())
callback_total = 0
last_message_index = -1
failed_at_message_index = None # type: int
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
nonlocal last_message_index
nonlocal failed_at_message_index
#print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
echo_response_client_server_message = client_server_message # type: EchoResponseBaseClientServerMessage
if int(echo_response_client_server_message.get_message()) == last_message_index + 1:
# correct message received
last_message_index += 1
else:
if failed_at_message_index is None:
failed_at_message_index = last_message_index
callback_total += 1
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
if found_exception is None:
found_exception = exception
for client_messenger in client_messengers:
client_messenger.connect_to_server()
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending messages")
client_messengers_index = 0
message_index = 0
client_messengers[client_messengers_index].send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(message_index),
is_ordered=True
)
)
message_index += 1
while minimum_delay_between_messages_seconds < current_delay_between_messages_seconds and failed_at_message_index is None:
time.sleep(current_delay_between_messages_seconds)
client_messengers_index += 1
if client_messengers_index == len(client_messengers):
client_messengers_index = 0
client_messengers[client_messengers_index].send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(message_index),
is_ordered=True
)
)
message_index += 1
current_delay_between_messages_seconds -= current_delay_between_messages_seconds * delay_percentage_decrease_delta
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(5)
print(f"{datetime.utcnow()}: disposing")
for client_messenger in client_messengers:
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
if found_exception is not None:
raise found_exception
print(f"{datetime.utcnow()}: last successful index {failed_at_message_index} with delay of {current_delay_between_messages_seconds} seconds")
self.assertLess(current_delay_between_messages_seconds, accepted_delay_between_messages_that_could_result_in_disorder)
def test_dispose_client_too_quickly_before_receiving_all_messages(self):
# a thread seems to remain alive when this happens
# NOTE: the client_socket read only gets to 988 before it stops reading
messages_total = 1000
print(f"{datetime.utcnow()}: setting up server")
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
print(f"{datetime.utcnow()}: setting up client")
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
callback_total = 0
last_message_index = -1
failed_at_message_index = None # type: int
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
nonlocal last_message_index
nonlocal failed_at_message_index
# print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
echo_response_client_server_message = client_server_message # type: EchoResponseBaseClientServerMessage
if int(echo_response_client_server_message.get_message()) == last_message_index + 1:
# correct message received
last_message_index += 1
else:
if failed_at_message_index is None:
failed_at_message_index = last_message_index
callback_total += 1
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
if found_exception is None:
found_exception = exception
print(f"{datetime.utcnow()}: connecting to server")
client_messenger.connect_to_server()
print(f"{datetime.utcnow()}: receiving from server")
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending messages")
for message_index in range(messages_total):
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(message_index),
is_ordered=True
)
)
print(f"{datetime.utcnow()}: immediately disposing")
client_messenger.dispose()
server_messenger.stop_receiving_from_clients()
if found_exception is not None:
raise found_exception
self.assertIsNone(failed_at_message_index)
def test_parse_client_server_message_raises_exception_when_receiving_in_server_messenger(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ErrorResponseBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending error messages")
expected_exception = f"test exception: {uuid.uuid4()}"
client_messenger.send_to_server(
client_server_message=ErrorRequestBaseClientServerMessage(
is_constructor_exception_to_set=expected_exception
)
)
print(f"{datetime.utcnow()}: wait for messages")
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
with self.assertRaises(Exception) as assertedException:
server_messenger.stop_receiving_from_clients()
self.assertEqual(expected_exception, str(assertedException.exception))
# the server encountered an exception, closing the connection
self.assertIsInstance(found_exception, ReadWriteSocketClosedException)
def test_getting_json_of_client_server_message_raises_exception_when_sending_to_server_messenger(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ErrorResponseBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending error messages")
expected_exception = f"test exception: {uuid.uuid4()}"
with self.assertRaises(Exception) as assertedException:
client_messenger.send_to_server(
client_server_message=ErrorRequestBaseClientServerMessage(
to_json_exception=expected_exception
)
)
self.assertEqual(expected_exception, str(assertedException.exception))
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
if found_exception is not None:
raise found_exception
def test_getting_structural_error_client_server_message_response_from_client_server_message_raises_exception_when_processing_in_server_messenger_but_succeeds(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
if callback_total == 0:
self.assertIsInstance(client_server_message, ErrorResponseBaseClientServerMessage)
else:
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
callback_total += 1
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending error messages")
expected_exception = f"test exception: {uuid.uuid4()}"
client_messenger.send_to_server(
client_server_message=ErrorRequestBaseClientServerMessage(
get_structural_error_client_server_message_response_exception=expected_exception
)
)
time.sleep(1)
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
# the server encountered an exception but did not close the connect due to it and is still receiving requests
if found_exception is not None:
raise found_exception
def test_getting_structural_error_client_server_message_response_from_client_server_message_raises_exception_when_processing_in_server_messenger_and_causes_exception(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
if callback_total == 0:
self.assertIsInstance(client_server_message, ErrorResponseBaseClientServerMessage)
else:
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
callback_total += 1
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending error messages")
expected_exception = f"test exception: {uuid.uuid4()}"
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
time.sleep(1)
print(f"{datetime.utcnow()}: sending ErrorRequestBaseClientServerMessage")
client_messenger.send_to_server(
client_server_message=ErrorRequestBaseClientServerMessage(
get_structural_error_client_server_message_response_exception=expected_exception
)
)
time.sleep(1)
print(f"{datetime.utcnow()}: sending PingRequestBaseClientServerMessage")
with self.assertRaises(ReadWriteSocketClosedException):
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing client_messenger")
client_messenger.dispose()
time.sleep(1)
print(f"{datetime.utcnow()}: server_messenger.stop_receiving_from_clients()")
with self.assertRaises(Exception) as assertedException:
server_messenger.stop_receiving_from_clients()
self.assertEqual(expected_exception, str(assertedException.exception))
self.assertIsInstance(found_exception, ReadWriteSocketClosedException)
def test_parse_client_server_message_in_response_raises_exception_when_parsing_in_server_messenger(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
if callback_total == 0:
self.assertIsInstance(client_server_message, ErrorResponseBaseClientServerMessage)
else:
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
callback_total += 1
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending error messages")
expected_exception = f"test exception: {uuid.uuid4()}"
client_messenger.send_to_server(
client_server_message=ErrorRequestBaseClientServerMessage(
response_constructor_arguments={
"is_constructor_exception_to_set": expected_exception
}
)
)
time.sleep(1)
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting")
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
self.assertEqual(expected_exception, str(found_exception))
# TODO create more server-side ErrorResponse tests
def test_unordered_client_server_messages_100m_10s(self):
messages_total = 100
message_subset_length = 10
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
previous_ordered_index = -1 - message_subset_length
previous_unordered_index = -1
is_printing = False
def callback(echo_response: EchoResponseBaseClientServerMessage):
nonlocal callback_total
nonlocal previous_ordered_index
nonlocal previous_unordered_index
nonlocal is_printing
nonlocal message_subset_length
#print(f"{datetime.utcnow()}: callback: echo_response: {echo_response.to_json()}")
self.assertIsInstance(echo_response, EchoResponseBaseClientServerMessage)
callback_total += 1
index = int(echo_response.get_message())
print(f"index: {index}")
subset_index = int(index / message_subset_length) % 2
print(f"subset_index: {subset_index}")
previous_subset_index = math.floor((index - 1) / message_subset_length) % 2
print(f"previous_subset_index: {previous_subset_index}")
if subset_index == 0:
if previous_subset_index != subset_index:
if previous_ordered_index + message_subset_length + 1 != index:
raise Exception(f"Failed to jump to next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found ordered index: {index}")
else:
if previous_ordered_index + 1 != index:
raise Exception(f"Failed to find next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found ordered index: {index}")
previous_ordered_index = index
else:
if previous_subset_index != subset_index:
if previous_unordered_index + message_subset_length + 1 != index:
raise Exception(f"Failed to jump to next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found unordered index: {index}")
else:
if previous_unordered_index + 1 != index:
raise Exception(f"Failed to find next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found unordered index: {index}")
previous_unordered_index = index
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending messages")
for index in range(messages_total):
subset_index = int(index / message_subset_length) % 2
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(index),
is_ordered=(subset_index == 0)
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(5)
print(f"{datetime.utcnow()}: disposing client")
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
# the server encountered an exception but did not close the connect due to it and is still receiving requests
if found_exception is not None:
raise found_exception
def test_unordered_client_server_messages_100m_1s(self):
messages_total = 100
message_subset_length = 1
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
previous_ordered_index = -1 - message_subset_length
previous_unordered_index = -1
is_printing = False
def callback(echo_response: EchoResponseBaseClientServerMessage):
nonlocal callback_total
nonlocal previous_ordered_index
nonlocal previous_unordered_index
nonlocal is_printing
nonlocal message_subset_length
#print(f"{datetime.utcnow()}: callback: echo_response: {echo_response.to_json()}")
self.assertIsInstance(echo_response, EchoResponseBaseClientServerMessage)
callback_total += 1
index = int(echo_response.get_message())
#print(f"index: {index}")
subset_index = int(index / message_subset_length) % 2
#print(f"subset_index: {subset_index}")
previous_subset_index = math.floor((index - 1) / message_subset_length) % 2
#print(f"previous_subset_index: {previous_subset_index}")
if subset_index == 0:
if previous_subset_index != subset_index:
if previous_ordered_index + message_subset_length + 1 != index:
raise Exception(f"Failed to jump to next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found ordered index: {index}")
else:
if previous_ordered_index + 1 != index:
raise Exception(f"Failed to find next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found ordered index: {index}")
previous_ordered_index = index
else:
if previous_subset_index != subset_index:
if previous_unordered_index + message_subset_length + 1 != index:
raise Exception(f"Failed to jump to next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found unordered index: {index}")
else:
if previous_unordered_index + 1 != index:
raise Exception(f"Failed to find next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found unordered index: {index}")
previous_unordered_index = index
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending messages")
for index in range(messages_total):
subset_index = int(index / message_subset_length) % 2
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(index),
is_ordered=(subset_index == 0)
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(5)
print(f"{datetime.utcnow()}: disposing client")
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
# the server encountered an exception but did not close the connect due to it and is still receiving requests
if found_exception is not None:
raise found_exception
def test_unordered_client_server_messages_1000m_1s(self):
messages_total = 1000
message_subset_length = 1
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
previous_ordered_index = -1 - message_subset_length
previous_unordered_index = -1
is_printing = False
def callback(echo_response: EchoResponseBaseClientServerMessage):
nonlocal callback_total
nonlocal previous_ordered_index
nonlocal previous_unordered_index
nonlocal is_printing
nonlocal message_subset_length
#print(f"{datetime.utcnow()}: callback: echo_response: {echo_response.to_json()}")
self.assertIsInstance(echo_response, EchoResponseBaseClientServerMessage)
callback_total += 1
index = int(echo_response.get_message())
#print(f"index: {index}")
subset_index = int(index / message_subset_length) % 2
#print(f"subset_index: {subset_index}")
previous_subset_index = math.floor((index - 1) / message_subset_length) % 2
#print(f"previous_subset_index: {previous_subset_index}")
if subset_index == 0:
if previous_subset_index != subset_index:
if previous_ordered_index + message_subset_length + 1 != index:
raise Exception(f"Failed to jump to next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found ordered index: {index}")
else:
if previous_ordered_index + 1 != index:
raise Exception(f"Failed to find next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found ordered index: {index}")
previous_ordered_index = index
else:
if previous_subset_index != subset_index:
if previous_unordered_index + message_subset_length + 1 != index:
raise Exception(f"Failed to jump to next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found unordered index: {index}")
else:
if previous_unordered_index + 1 != index:
raise Exception(f"Failed to find next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found unordered index: {index}")
previous_unordered_index = index
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending messages")
for index in range(messages_total):
subset_index = int(index / message_subset_length) % 2
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(index),
is_ordered=(subset_index == 0)
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(5)
print(f"{datetime.utcnow()}: disposing client")
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
# the server encountered an exception but did not close the connect due to it and is still receiving requests
if found_exception is not None:
raise found_exception
def test_child_structure_power_once_then_reset(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
callback_total += 1
print(f"received callback: {client_server_message}")
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
print(f"{datetime.utcnow()}: sending first power")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: sending reset")
client_messenger.send_to_server(
client_server_message=ResetButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(0, callback_total)
self.assertIsNone(found_exception)
def test_child_structure_power_four_times(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(power_overload_transmission: PowerOverloadTransmissionBaseClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(power_overload_transmission, PowerOverloadTransmissionBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: first power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: second power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: third power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: fourth power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: fourth power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNone(found_exception)
def test_child_structure_power_two_times_anonymous_underpowered_and_power_attempt_with_failure(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(client_server_message, PowerButtonFailedBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: first power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: second power: end")
time.sleep(0.1)
press_client_messenger = get_default_client_messenger_factory().get_client_messenger()
press_client_messenger.connect_to_server()
def press_callback(three_presses_transmission: ThreePressesTransmissionBaseClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(three_presses_transmission, ThreePressesTransmissionBaseClientServerMessage)
self.assertEqual("underpowered", three_presses_transmission.get_power())
press_client_messenger.receive_from_server(
callback=press_callback,
on_exception=on_exception
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: first press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: second press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: third press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: third power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
print(f"{datetime.utcnow()}: dispose press_client_messenger: start")
press_client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose press_client_messenger: end")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(2, callback_total)
if found_exception is not None:
raise found_exception
def test_child_structure_power_three_times_anonymous_powered(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
callback_total += 1
raise Exception("This client should not be receiving messages.")
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: first power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: second power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: third power: end")
time.sleep(0.1)
press_client_messenger = get_default_client_messenger_factory().get_client_messenger()
press_client_messenger.connect_to_server()
def press_callback(three_presses_transmission: ThreePressesTransmissionBaseClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(three_presses_transmission, ThreePressesTransmissionBaseClientServerMessage)
self.assertEqual("powered", three_presses_transmission.get_power())
press_client_messenger.receive_from_server(
callback=press_callback,
on_exception=on_exception
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: first press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: second press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: third press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
print(f"{datetime.utcnow()}: dispose press_client_messenger: start")
press_client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose press_client_messenger: end")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
if found_exception is not None:
raise found_exception
def test_child_structure_power_four_times_anonymous_overpowered(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
callback_total += 1
raise Exception("This client should not be receiving messages.")
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: first power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: second power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: third power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: fourth power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: fourth power: end")
time.sleep(0.1)
press_client_messenger = get_default_client_messenger_factory().get_client_messenger()
press_client_messenger.connect_to_server()
def press_callback(three_presses_transmission: ThreePressesTransmissionBaseClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(three_presses_transmission, ThreePressesTransmissionBaseClientServerMessage)
self.assertEqual("overpowered", three_presses_transmission.get_power())
press_client_messenger.receive_from_server(
callback=press_callback,
on_exception=on_exception
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: first press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: second press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: third press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
print(f"{datetime.utcnow()}: dispose press_client_messenger: start")
press_client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose press_client_messenger: end")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
if found_exception is not None:
raise found_exception
def test_child_structure_power_five_times_anonymous_impossible_state(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(power_button_failed: PowerButtonFailedBaseClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(power_button_failed, PowerButtonFailedBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
try:
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: first power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: second power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: third power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: fourth power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: fourth power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: fifth power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: fifth power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
finally:
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
if found_exception is not None:
raise found_exception
def test_timer_request_1s(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
expected_message = str(uuid.uuid4())
def callback(timer_response: TimerResponseBaseClientServerMessage):
nonlocal callback_total
nonlocal expected_message
callback_total += 1
print(f"{datetime.utcnow()}: received message")
self.assertIsInstance(timer_response, TimerResponseBaseClientServerMessage)
self.assertEqual(expected_message, timer_response.get_message())
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending message")
client_messenger.send_to_server(
client_server_message=TimerRequestBaseClientServerMessage(
message=expected_message,
seconds=1.0
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(5)
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(5)
self.assertEqual(1, callback_total)
if found_exception is not None:
raise found_exception
def test_timer_request_after_client_disposed(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
expected_message = str(uuid.uuid4())
def callback(timer_response: TimerResponseBaseClientServerMessage):
nonlocal callback_total
nonlocal expected_message
callback_total += 1
print(f"{datetime.utcnow()}: received message")
self.assertIsInstance(timer_response, TimerResponseBaseClientServerMessage)
self.assertEqual(expected_message, timer_response.get_message())
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending message")
client_messenger.send_to_server(
client_server_message=TimerRequestBaseClientServerMessage(
message=expected_message,
seconds=3.0
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(0.5)
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
time.sleep(4.0)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(5)
self.assertEqual(0, callback_total)
if found_exception is not None:
raise found_exception
def test_timer_request_after_server_stopped(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
expected_message = str(uuid.uuid4())
def callback(timer_response: TimerResponseBaseClientServerMessage):
nonlocal callback_total
nonlocal expected_message
callback_total += 1
print(f"{datetime.utcnow()}: received message")
self.assertIsInstance(timer_response, TimerResponseBaseClientServerMessage)
self.assertEqual(expected_message, timer_response.get_message())
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending message")
client_messenger.send_to_server(
client_server_message=TimerRequestBaseClientServerMessage(
message=expected_message,
seconds=10.0
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(0.5)
print(f"{datetime.utcnow()}: stopping server")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped server")
time.sleep(12.0)
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
time.sleep(5)
self.assertEqual(0, callback_total)
self.assertIsInstance(found_exception, ReadWriteSocketClosedException)
# TODO determine where the lingering thread is (2021-12-09)
|
[
"uuid.uuid4",
"matplotlib.pyplot.show",
"austin_heller_repo.threading.SingletonMemorySequentialQueueFactory",
"austin_heller_repo.kafka_manager.KafkaSequentialQueueFactory",
"austin_heller_repo.threading.start_thread",
"austin_heller_repo.threading.Semaphore",
"austin_heller_repo.socket.ClientSocketFactory",
"math.floor",
"time.sleep",
"austin_heller_repo.common.HostPointer",
"datetime.datetime.utcnow",
"austin_heller_repo.socket.ServerSocketFactory"
] |
[((1228, 1280), 'austin_heller_repo.common.HostPointer', 'HostPointer', ([], {'host_address': '"""0.0.0.0"""', 'host_port': '(36429)'}), "(host_address='0.0.0.0', host_port=36429)\n", (1239, 1280), False, 'from austin_heller_repo.common import HostPointer\n'), ((1351, 1402), 'austin_heller_repo.common.HostPointer', 'HostPointer', ([], {'host_address': '"""0.0.0.0"""', 'host_port': '(9092)'}), "(host_address='0.0.0.0', host_port=9092)\n", (1362, 1402), False, 'from austin_heller_repo.common import HostPointer\n'), ((2529, 2625), 'austin_heller_repo.kafka_manager.KafkaSequentialQueueFactory', 'KafkaSequentialQueueFactory', ([], {'kafka_manager': 'kafka_manager', 'kafka_topic_name': 'kafka_topic_name'}), '(kafka_manager=kafka_manager, kafka_topic_name=\n kafka_topic_name)\n', (2556, 2625), False, 'from austin_heller_repo.kafka_manager import KafkaSequentialQueueFactory, KafkaManager, KafkaWrapper, KafkaManagerFactory\n'), ((2667, 2706), 'austin_heller_repo.threading.SingletonMemorySequentialQueueFactory', 'SingletonMemorySequentialQueueFactory', ([], {}), '()\n', (2704, 2706), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((34910, 34943), 'austin_heller_repo.threading.start_thread', 'start_thread', (['timer_thread_method'], {}), '(timer_thread_method)\n', (34922, 34943), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((38691, 38704), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (38701, 38704), False, 'import time\n'), ((38843, 38856), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (38853, 38856), False, 'import time\n'), ((39148, 39161), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (39158, 39161), False, 'import time\n'), ((39306, 39319), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (39316, 39319), False, 'import time\n'), ((39353, 39366), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (39363, 39366), False, 'import time\n'), ((39420, 39433), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (39430, 39433), False, 'import time\n'), ((39722, 39735), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (39732, 39735), False, 'import time\n'), ((39880, 39893), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (39890, 39893), False, 'import time\n'), ((39947, 39960), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (39957, 39960), False, 'import time\n'), ((39994, 40007), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (40004, 40007), False, 'import time\n'), ((40315, 40328), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (40325, 40328), False, 'import time\n'), ((40372, 40385), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (40382, 40385), False, 'import time\n'), ((40735, 40748), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (40745, 40748), False, 'import time\n'), ((40853, 40866), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (40863, 40866), False, 'import time\n'), ((40900, 40913), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (40910, 40913), False, 'import time\n'), ((40967, 40980), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (40977, 40980), False, 'import time\n'), ((41345, 41358), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (41355, 41358), False, 'import time\n'), ((41402, 41415), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (41412, 41415), False, 'import time\n'), ((41765, 41778), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (41775, 41778), False, 'import time\n'), ((41883, 41896), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (41893, 41896), False, 'import time\n'), ((41950, 41963), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (41960, 41963), False, 'import time\n'), ((41997, 42010), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (42007, 42010), False, 'import time\n'), ((42394, 42407), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (42404, 42407), False, 'import time\n'), ((43725, 43738), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (43735, 43738), False, 'import time\n'), ((43994, 44007), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (44004, 44007), False, 'import time\n'), ((44448, 44461), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (44458, 44461), False, 'import time\n'), ((45766, 45779), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (45776, 45779), False, 'import time\n'), ((46035, 46048), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (46045, 46048), False, 'import time\n'), ((46505, 46518), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (46515, 46518), False, 'import time\n'), ((48220, 48233), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (48230, 48233), False, 'import time\n'), ((48774, 48787), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (48784, 48787), False, 'import time\n'), ((49085, 49098), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (49095, 49098), False, 'import time\n'), ((49706, 49719), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (49716, 49719), False, 'import time\n'), ((52228, 52243), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (52238, 52243), False, 'import time\n'), ((52409, 52424), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (52419, 52424), False, 'import time\n'), ((52592, 52607), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (52602, 52607), False, 'import time\n'), ((52767, 52782), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (52777, 52782), False, 'import time\n'), ((52841, 52854), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (52851, 52854), False, 'import time\n'), ((53187, 53200), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (53197, 53200), False, 'import time\n'), ((53941, 53954), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (53951, 53954), False, 'import time\n'), ((55427, 55442), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (55437, 55442), False, 'import time\n'), ((55636, 55651), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (55646, 55651), False, 'import time\n'), ((55817, 55830), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (55827, 55830), False, 'import time\n'), ((55927, 55940), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (55937, 55940), False, 'import time\n'), ((56156, 56169), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (56166, 56169), False, 'import time\n'), ((56254, 56267), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (56264, 56267), False, 'import time\n'), ((56407, 56420), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (56417, 56420), False, 'import time\n'), ((56466, 56479), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (56476, 56479), False, 'import time\n'), ((56851, 56864), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (56861, 56864), False, 'import time\n'), ((57846, 57861), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (57856, 57861), False, 'import time\n'), ((58014, 58029), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (58024, 58029), False, 'import time\n'), ((58088, 58101), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (58098, 58101), False, 'import time\n'), ((58357, 58370), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (58367, 58370), False, 'import time\n'), ((58803, 58816), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (58813, 58816), False, 'import time\n'), ((60311, 60326), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (60321, 60326), False, 'import time\n'), ((61126, 61158), 'austin_heller_repo.threading.start_thread', 'start_thread', (['ping_thread_method'], {}), '(ping_thread_method)\n', (61138, 61158), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((61183, 61198), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (61193, 61198), False, 'import time\n'), ((61313, 61326), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (61323, 61326), False, 'import time\n'), ((61582, 61595), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (61592, 61595), False, 'import time\n'), ((62678, 62691), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (62688, 62691), False, 'import time\n'), ((66506, 66538), 'austin_heller_repo.threading.start_thread', 'start_thread', (['ping_thread_method'], {}), '(ping_thread_method)\n', (66518, 66538), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((66563, 66578), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (66573, 66578), False, 'import time\n'), ((66717, 66730), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (66727, 66730), False, 'import time\n'), ((67033, 67046), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (67043, 67046), False, 'import time\n'), ((71729, 71761), 'austin_heller_repo.threading.start_thread', 'start_thread', (['ping_thread_method'], {}), '(ping_thread_method)\n', (71741, 71761), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((71786, 71801), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (71796, 71801), False, 'import time\n'), ((71940, 71953), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (71950, 71953), False, 'import time\n'), ((72257, 72270), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (72267, 72270), False, 'import time\n'), ((76098, 76130), 'austin_heller_repo.threading.start_thread', 'start_thread', (['ping_thread_method'], {}), '(ping_thread_method)\n', (76110, 76130), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((76155, 76170), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (76165, 76170), False, 'import time\n'), ((76309, 76322), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (76319, 76322), False, 'import time\n'), ((76627, 76640), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (76637, 76640), False, 'import time\n'), ((80482, 80514), 'austin_heller_repo.threading.start_thread', 'start_thread', (['ping_thread_method'], {}), '(ping_thread_method)\n', (80494, 80514), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((80539, 80554), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (80549, 80554), False, 'import time\n'), ((80693, 80706), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (80703, 80706), False, 'import time\n'), ((81011, 81024), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (81021, 81024), False, 'import time\n'), ((84870, 84902), 'austin_heller_repo.threading.start_thread', 'start_thread', (['ping_thread_method'], {}), '(ping_thread_method)\n', (84882, 84902), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((84927, 84942), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (84937, 84942), False, 'import time\n'), ((85081, 85094), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (85091, 85094), False, 'import time\n'), ((85400, 85413), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (85410, 85413), False, 'import time\n'), ((89260, 89292), 'austin_heller_repo.threading.start_thread', 'start_thread', (['ping_thread_method'], {}), '(ping_thread_method)\n', (89272, 89292), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((89317, 89332), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (89327, 89332), False, 'import time\n'), ((89471, 89484), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (89481, 89484), False, 'import time\n'), ((89911, 89924), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (89921, 89924), False, 'import time\n'), ((90753, 90766), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (90763, 90766), False, 'import time\n'), ((91001, 91014), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (91011, 91014), False, 'import time\n'), ((91270, 91283), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (91280, 91283), False, 'import time\n'), ((91781, 91794), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (91791, 91794), False, 'import time\n'), ((92552, 92565), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (92562, 92565), False, 'import time\n'), ((92800, 92813), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (92810, 92813), False, 'import time\n'), ((93069, 93082), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (93079, 93082), False, 'import time\n'), ((93483, 93496), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (93493, 93496), False, 'import time\n'), ((94236, 94251), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (94246, 94251), False, 'import time\n'), ((94488, 94503), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (94498, 94503), False, 'import time\n'), ((94742, 94757), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (94752, 94757), False, 'import time\n'), ((94994, 95009), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (95004, 95009), False, 'import time\n'), ((95248, 95263), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (95258, 95263), False, 'import time\n'), ((95322, 95335), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (95332, 95335), False, 'import time\n'), ((95591, 95604), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (95601, 95604), False, 'import time\n'), ((96090, 96103), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (96100, 96103), False, 'import time\n'), ((97331, 97344), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (97341, 97344), False, 'import time\n'), ((97446, 97459), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (97456, 97459), False, 'import time\n'), ((97728, 97741), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (97738, 97741), False, 'import time\n'), ((97909, 97922), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (97919, 97922), False, 'import time\n'), ((98843, 98856), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (98853, 98856), False, 'import time\n'), ((101266, 101279), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (101276, 101279), False, 'import time\n'), ((101581, 101594), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (101591, 101594), False, 'import time\n'), ((102388, 102401), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (102398, 102401), False, 'import time\n'), ((104466, 104479), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (104476, 104479), False, 'import time\n'), ((105499, 105512), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (105509, 105512), False, 'import time\n'), ((105546, 105559), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (105556, 105559), False, 'import time\n'), ((106125, 106138), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (106135, 106138), False, 'import time\n'), ((107229, 107242), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (107239, 107242), False, 'import time\n'), ((107276, 107289), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (107286, 107289), False, 'import time\n'), ((107705, 107718), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (107715, 107718), False, 'import time\n'), ((108839, 108852), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (108849, 108852), False, 'import time\n'), ((108958, 108971), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (108968, 108971), False, 'import time\n'), ((109005, 109018), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (109015, 109018), False, 'import time\n'), ((109554, 109567), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (109564, 109567), False, 'import time\n'), ((110598, 110611), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (110608, 110611), False, 'import time\n'), ((110885, 110898), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (110895, 110898), False, 'import time\n'), ((111184, 111197), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (111194, 111197), False, 'import time\n'), ((111292, 111305), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (111302, 111305), False, 'import time\n'), ((111890, 111903), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (111900, 111903), False, 'import time\n'), ((113041, 113054), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (113051, 113054), False, 'import time\n'), ((113202, 113215), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (113212, 113215), False, 'import time\n'), ((113249, 113262), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (113259, 113262), False, 'import time\n'), ((113678, 113691), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (113688, 113691), False, 'import time\n'), ((116428, 116441), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (116438, 116441), False, 'import time\n'), ((116526, 116539), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (116536, 116539), False, 'import time\n'), ((117011, 117024), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (117021, 117024), False, 'import time\n'), ((119764, 119777), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (119774, 119777), False, 'import time\n'), ((119862, 119875), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (119872, 119875), False, 'import time\n'), ((120349, 120362), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (120359, 120362), False, 'import time\n'), ((123102, 123115), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (123112, 123115), False, 'import time\n'), ((123200, 123213), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (123210, 123213), False, 'import time\n'), ((123714, 123727), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (123724, 123727), False, 'import time\n'), ((124772, 124785), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (124782, 124785), False, 'import time\n'), ((125041, 125054), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (125051, 125054), False, 'import time\n'), ((125404, 125417), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (125414, 125417), False, 'import time\n'), ((126157, 126172), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (126167, 126172), False, 'import time\n'), ((126409, 126424), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (126419, 126424), False, 'import time\n'), ((126663, 126678), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (126673, 126678), False, 'import time\n'), ((126915, 126930), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (126925, 126930), False, 'import time\n'), ((127169, 127184), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (127179, 127184), False, 'import time\n'), ((127243, 127256), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (127253, 127256), False, 'import time\n'), ((127512, 127525), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (127522, 127525), False, 'import time\n'), ((127928, 127941), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (127938, 127941), False, 'import time\n'), ((128632, 128647), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (128642, 128647), False, 'import time\n'), ((128883, 128898), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (128893, 128898), False, 'import time\n'), ((129136, 129151), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (129146, 129151), False, 'import time\n'), ((129727, 129742), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (129737, 129742), False, 'import time\n'), ((129958, 129973), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (129968, 129973), False, 'import time\n'), ((130191, 130206), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (130201, 130206), False, 'import time\n'), ((130422, 130437), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (130432, 130437), False, 'import time\n'), ((130673, 130688), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (130683, 130688), False, 'import time\n'), ((130747, 130760), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (130757, 130760), False, 'import time\n'), ((131237, 131250), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (131247, 131250), False, 'import time\n'), ((131642, 131655), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (131652, 131655), False, 'import time\n'), ((132324, 132339), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (132334, 132339), False, 'import time\n'), ((132575, 132590), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (132585, 132590), False, 'import time\n'), ((132828, 132843), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (132838, 132843), False, 'import time\n'), ((133079, 133094), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (133089, 133094), False, 'import time\n'), ((133665, 133680), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (133675, 133680), False, 'import time\n'), ((133896, 133911), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (133906, 133911), False, 'import time\n'), ((134129, 134144), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (134139, 134144), False, 'import time\n'), ((134360, 134375), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (134370, 134375), False, 'import time\n'), ((134434, 134447), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (134444, 134447), False, 'import time\n'), ((134924, 134937), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (134934, 134937), False, 'import time\n'), ((135332, 135345), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (135342, 135345), False, 'import time\n'), ((136014, 136029), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (136024, 136029), False, 'import time\n'), ((136265, 136280), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (136275, 136280), False, 'import time\n'), ((136518, 136533), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (136528, 136533), False, 'import time\n'), ((136769, 136784), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (136779, 136784), False, 'import time\n'), ((137022, 137037), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (137032, 137037), False, 'import time\n'), ((137612, 137627), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (137622, 137627), False, 'import time\n'), ((137843, 137858), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (137853, 137858), False, 'import time\n'), ((138076, 138091), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (138086, 138091), False, 'import time\n'), ((138307, 138322), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (138317, 138322), False, 'import time\n'), ((138381, 138394), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (138391, 138394), False, 'import time\n'), ((138871, 138884), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (138881, 138884), False, 'import time\n'), ((139284, 139297), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (139294, 139297), False, 'import time\n'), ((142096, 142109), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (142106, 142109), False, 'import time\n'), ((142805, 142820), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (142815, 142820), False, 'import time\n'), ((143082, 143095), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (143092, 143095), False, 'import time\n'), ((143394, 143407), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (143404, 143407), False, 'import time\n'), ((143783, 143796), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (143793, 143796), False, 'import time\n'), ((144492, 144507), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (144502, 144507), False, 'import time\n'), ((144769, 144784), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (144779, 144784), False, 'import time\n'), ((144948, 144963), 'time.sleep', 'time.sleep', (['(4.0)'], {}), '(4.0)\n', (144958, 144963), False, 'import time\n'), ((145102, 145115), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (145112, 145115), False, 'import time\n'), ((145490, 145503), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (145500, 145503), False, 'import time\n'), ((146199, 146214), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (146209, 146214), False, 'import time\n'), ((146477, 146492), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (146487, 146492), False, 'import time\n'), ((146645, 146661), 'time.sleep', 'time.sleep', (['(12.0)'], {}), '(12.0)\n', (146655, 146661), False, 'import time\n'), ((146825, 146838), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (146835, 146838), False, 'import time\n'), ((1946, 2039), 'austin_heller_repo.socket.ClientSocketFactory', 'ClientSocketFactory', ([], {'to_server_packet_bytes_length': '(4096)', 'is_debug': 'is_socket_debug_active'}), '(to_server_packet_bytes_length=4096, is_debug=\n is_socket_debug_active)\n', (1965, 2039), False, 'from austin_heller_repo.socket import ClientSocketFactory, ServerSocketFactory, ReadWriteSocketClosedException\n'), ((2334, 2346), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2344, 2346), False, 'import uuid\n'), ((38070, 38083), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (38080, 38083), False, 'import time\n'), ((60680, 60697), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (60695, 60697), False, 'from datetime import datetime\n'), ((60987, 61004), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (61002, 61004), False, 'from datetime import datetime\n'), ((61297, 61310), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (61307, 61310), False, 'import time\n'), ((63209, 63220), 'austin_heller_repo.threading.Semaphore', 'Semaphore', ([], {}), '()\n', (63218, 63220), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((64593, 64610), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (64608, 64610), False, 'from datetime import datetime\n'), ((64900, 64917), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (64915, 64917), False, 'from datetime import datetime\n'), ((65149, 65162), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (65159, 65162), False, 'import time\n'), ((67746, 67757), 'austin_heller_repo.threading.Semaphore', 'Semaphore', ([], {}), '()\n', (67755, 67757), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((69206, 69255), 'time.sleep', 'time.sleep', (['delay_between_sending_message_seconds'], {}), '(delay_between_sending_message_seconds)\n', (69216, 69255), False, 'import time\n'), ((69860, 69873), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (69870, 69873), False, 'import time\n'), ((72841, 72852), 'austin_heller_repo.threading.Semaphore', 'Semaphore', ([], {}), '()\n', (72850, 72852), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((74011, 74028), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (74026, 74028), False, 'from datetime import datetime\n'), ((74435, 74452), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (74450, 74452), False, 'from datetime import datetime\n'), ((74741, 74754), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (74751, 74754), False, 'import time\n'), ((77225, 77236), 'austin_heller_repo.threading.Semaphore', 'Semaphore', ([], {}), '()\n', (77234, 77236), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((78395, 78412), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (78410, 78412), False, 'from datetime import datetime\n'), ((78819, 78836), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (78834, 78836), False, 'from datetime import datetime\n'), ((79125, 79138), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (79135, 79138), False, 'import time\n'), ((81613, 81624), 'austin_heller_repo.threading.Semaphore', 'Semaphore', ([], {}), '()\n', (81622, 81624), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((82783, 82800), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (82798, 82800), False, 'from datetime import datetime\n'), ((83207, 83224), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (83222, 83224), False, 'from datetime import datetime\n'), ((83513, 83526), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (83523, 83526), False, 'import time\n'), ((86003, 86014), 'austin_heller_repo.threading.Semaphore', 'Semaphore', ([], {}), '()\n', (86012, 86014), False, 'from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory\n'), ((87173, 87190), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (87188, 87190), False, 'from datetime import datetime\n'), ((87597, 87614), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (87612, 87614), False, 'from datetime import datetime\n'), ((87903, 87916), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (87913, 87916), False, 'import time\n'), ((100707, 100757), 'time.sleep', 'time.sleep', (['current_delay_between_messages_seconds'], {}), '(current_delay_between_messages_seconds)\n', (100717, 100757), False, 'import time\n'), ((140020, 140035), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (140030, 140035), False, 'import time\n'), ((140279, 140294), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (140289, 140294), False, 'import time\n'), ((140540, 140555), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (140550, 140555), False, 'import time\n'), ((140799, 140814), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (140809, 140814), False, 'import time\n'), ((141060, 141075), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (141070, 141075), False, 'import time\n'), ((141319, 141334), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (141329, 141334), False, 'import time\n'), ((141395, 141408), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (141405, 141408), False, 'import time\n'), ((141726, 141739), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (141736, 141739), False, 'import time\n'), ((142197, 142209), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (142207, 142209), False, 'import uuid\n'), ((143884, 143896), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (143894, 143896), False, 'import uuid\n'), ((145591, 145603), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (145601, 145603), False, 'import uuid\n'), ((59769, 59786), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (59784, 59786), False, 'from datetime import datetime\n'), ((59866, 59883), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (59881, 59883), False, 'from datetime import datetime\n'), ((65132, 65145), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (65142, 65145), False, 'import time\n'), ((69080, 69097), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (69095, 69097), False, 'from datetime import datetime\n'), ((69461, 69510), 'time.sleep', 'time.sleep', (['delay_between_sending_message_seconds'], {}), '(delay_between_sending_message_seconds)\n', (69471, 69510), False, 'import time\n'), ((69536, 69553), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (69551, 69553), False, 'from datetime import datetime\n'), ((69778, 69791), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (69788, 69791), False, 'import time\n'), ((71200, 71210), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (71208, 71210), True, 'import matplotlib.pyplot as plt\n'), ((74724, 74737), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (74734, 74737), False, 'import time\n'), ((79108, 79121), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (79118, 79121), False, 'import time\n'), ((83496, 83509), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (83506, 83509), False, 'import time\n'), ((87886, 87899), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (87896, 87899), False, 'import time\n'), ((105267, 105279), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (105277, 105279), False, 'import uuid\n'), ((106926, 106938), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (106936, 106938), False, 'import uuid\n'), ((108629, 108641), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (108639, 108641), False, 'import uuid\n'), ((110478, 110490), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (110488, 110490), False, 'import uuid\n'), ((112814, 112826), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (112824, 112826), False, 'import uuid\n'), ((114558, 114605), 'math.floor', 'math.floor', (['((index - 1) / message_subset_length)'], {}), '((index - 1) / message_subset_length)\n', (114568, 114605), False, 'import math\n'), ((117893, 117940), 'math.floor', 'math.floor', (['((index - 1) / message_subset_length)'], {}), '((index - 1) / message_subset_length)\n', (117903, 117940), False, 'import math\n'), ((121231, 121278), 'math.floor', 'math.floor', (['((index - 1) / message_subset_length)'], {}), '((index - 1) / message_subset_length)\n', (121241, 121278), False, 'import math\n'), ((2839, 2991), 'austin_heller_repo.socket.ServerSocketFactory', 'ServerSocketFactory', ([], {'to_client_packet_bytes_length': '(4096)', 'listening_limit_total': '(10)', 'accept_timeout_seconds': '(10.0)', 'is_debug': 'is_socket_debug_active'}), '(to_client_packet_bytes_length=4096,\n listening_limit_total=10, accept_timeout_seconds=10.0, is_debug=\n is_socket_debug_active)\n', (2858, 2991), False, 'from austin_heller_repo.socket import ClientSocketFactory, ServerSocketFactory, ReadWriteSocketClosedException\n'), ((6766, 6783), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (6781, 6783), False, 'from datetime import datetime\n'), ((22878, 22895), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (22893, 22895), False, 'from datetime import datetime\n'), ((24158, 24175), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (24173, 24175), False, 'from datetime import datetime\n'), ((37436, 37453), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (37451, 37453), False, 'from datetime import datetime\n'), ((38096, 38113), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (38111, 38113), False, 'from datetime import datetime\n'), ((38717, 38734), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (38732, 38734), False, 'from datetime import datetime\n'), ((38810, 38827), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (38825, 38827), False, 'from datetime import datetime\n'), ((43034, 43051), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (43049, 43051), False, 'from datetime import datetime\n'), ((43210, 43227), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (43225, 43227), False, 'from datetime import datetime\n'), ((43366, 43383), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (43381, 43383), False, 'from datetime import datetime\n'), ((43523, 43540), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (43538, 43540), False, 'from datetime import datetime\n'), ((43679, 43696), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (43694, 43696), False, 'from datetime import datetime\n'), ((43751, 43768), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (43766, 43768), False, 'from datetime import datetime\n'), ((43825, 43842), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (43840, 43842), False, 'from datetime import datetime\n'), ((43868, 43885), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (43883, 43885), False, 'from datetime import datetime\n'), ((43961, 43978), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (43976, 43978), False, 'from datetime import datetime\n'), ((45081, 45098), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (45096, 45098), False, 'from datetime import datetime\n'), ((45257, 45274), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (45272, 45274), False, 'from datetime import datetime\n'), ((45413, 45430), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (45428, 45430), False, 'from datetime import datetime\n'), ((45570, 45587), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (45585, 45587), False, 'from datetime import datetime\n'), ((45720, 45737), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (45735, 45737), False, 'from datetime import datetime\n'), ((45792, 45809), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (45807, 45809), False, 'from datetime import datetime\n'), ((45866, 45883), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (45881, 45883), False, 'from datetime import datetime\n'), ((45909, 45926), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (45924, 45926), False, 'from datetime import datetime\n'), ((46002, 46019), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (46017, 46019), False, 'from datetime import datetime\n'), ((47852, 47869), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (47867, 47869), False, 'from datetime import datetime\n'), ((48039, 48056), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (48054, 48056), False, 'from datetime import datetime\n'), ((48246, 48263), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (48261, 48263), False, 'from datetime import datetime\n'), ((48408, 48425), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (48423, 48425), False, 'from datetime import datetime\n'), ((48572, 48589), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (48587, 48589), False, 'from datetime import datetime\n'), ((48728, 48745), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (48743, 48745), False, 'from datetime import datetime\n'), ((48800, 48817), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (48815, 48817), False, 'from datetime import datetime\n'), ((48916, 48933), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (48931, 48933), False, 'from datetime import datetime\n'), ((48959, 48976), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (48974, 48976), False, 'from datetime import datetime\n'), ((49052, 49069), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (49067, 49069), False, 'from datetime import datetime\n'), ((51673, 51690), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (51688, 51690), False, 'from datetime import datetime\n'), ((51860, 51877), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (51875, 51877), False, 'from datetime import datetime\n'), ((52050, 52067), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (52065, 52067), False, 'from datetime import datetime\n'), ((52256, 52273), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (52271, 52273), False, 'from datetime import datetime\n'), ((52437, 52454), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (52452, 52454), False, 'from datetime import datetime\n'), ((52620, 52637), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (52635, 52637), False, 'from datetime import datetime\n'), ((52795, 52812), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (52810, 52812), False, 'from datetime import datetime\n'), ((52867, 52884), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (52882, 52884), False, 'from datetime import datetime\n'), ((53018, 53035), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (53033, 53035), False, 'from datetime import datetime\n'), ((53061, 53078), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (53076, 53078), False, 'from datetime import datetime\n'), ((53154, 53171), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (53169, 53171), False, 'from datetime import datetime\n'), ((55249, 55266), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (55264, 55266), False, 'from datetime import datetime\n'), ((55455, 55472), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (55470, 55472), False, 'from datetime import datetime\n'), ((55664, 55681), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (55679, 55681), False, 'from datetime import datetime\n'), ((55843, 55860), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (55858, 55860), False, 'from datetime import datetime\n'), ((55953, 55970), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (55968, 55970), False, 'from datetime import datetime\n'), ((56110, 56127), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (56125, 56127), False, 'from datetime import datetime\n'), ((56182, 56199), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (56197, 56199), False, 'from datetime import datetime\n'), ((56280, 56297), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (56295, 56297), False, 'from datetime import datetime\n'), ((56323, 56340), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (56338, 56340), False, 'from datetime import datetime\n'), ((56433, 56450), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (56448, 56450), False, 'from datetime import datetime\n'), ((57674, 57691), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (57689, 57691), False, 'from datetime import datetime\n'), ((57874, 57891), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (57889, 57891), False, 'from datetime import datetime\n'), ((58042, 58059), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (58057, 58059), False, 'from datetime import datetime\n'), ((58114, 58131), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (58129, 58131), False, 'from datetime import datetime\n'), ((58188, 58205), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (58203, 58205), False, 'from datetime import datetime\n'), ((58231, 58248), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (58246, 58248), False, 'from datetime import datetime\n'), ((58324, 58341), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (58339, 58341), False, 'from datetime import datetime\n'), ((60139, 60156), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (60154, 60156), False, 'from datetime import datetime\n'), ((60339, 60356), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (60354, 60356), False, 'from datetime import datetime\n'), ((61211, 61228), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (61226, 61228), False, 'from datetime import datetime\n'), ((61339, 61356), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (61354, 61356), False, 'from datetime import datetime\n'), ((61413, 61430), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (61428, 61430), False, 'from datetime import datetime\n'), ((61456, 61473), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (61471, 61473), False, 'from datetime import datetime\n'), ((61549, 61566), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (61564, 61566), False, 'from datetime import datetime\n'), ((62735, 62752), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (62750, 62752), False, 'from datetime import datetime\n'), ((64011, 64028), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (64026, 64028), False, 'from datetime import datetime\n'), ((64119, 64136), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (64134, 64136), False, 'from datetime import datetime\n'), ((66591, 66608), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (66606, 66608), False, 'from datetime import datetime\n'), ((66684, 66701), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (66699, 66701), False, 'from datetime import datetime\n'), ((67341, 67358), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (67356, 67358), False, 'from datetime import datetime\n'), ((68549, 68566), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (68564, 68566), False, 'from datetime import datetime\n'), ((69331, 69348), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (69346, 69348), False, 'from datetime import datetime\n'), ((71814, 71831), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (71829, 71831), False, 'from datetime import datetime\n'), ((71907, 71924), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (71922, 71924), False, 'from datetime import datetime\n'), ((72338, 72355), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (72353, 72355), False, 'from datetime import datetime\n'), ((73429, 73446), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (73444, 73446), False, 'from datetime import datetime\n'), ((73537, 73554), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (73552, 73554), False, 'from datetime import datetime\n'), ((76183, 76200), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (76198, 76200), False, 'from datetime import datetime\n'), ((76276, 76293), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (76291, 76293), False, 'from datetime import datetime\n'), ((76722, 76739), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (76737, 76739), False, 'from datetime import datetime\n'), ((77813, 77830), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (77828, 77830), False, 'from datetime import datetime\n'), ((77921, 77938), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (77936, 77938), False, 'from datetime import datetime\n'), ((80567, 80584), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (80582, 80584), False, 'from datetime import datetime\n'), ((80660, 80677), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (80675, 80677), False, 'from datetime import datetime\n'), ((81110, 81127), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (81125, 81127), False, 'from datetime import datetime\n'), ((82201, 82218), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (82216, 82218), False, 'from datetime import datetime\n'), ((82309, 82326), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (82324, 82326), False, 'from datetime import datetime\n'), ((84955, 84972), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (84970, 84972), False, 'from datetime import datetime\n'), ((85048, 85065), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (85063, 85065), False, 'from datetime import datetime\n'), ((85500, 85517), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (85515, 85517), False, 'from datetime import datetime\n'), ((86591, 86608), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (86606, 86608), False, 'from datetime import datetime\n'), ((86699, 86716), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (86714, 86716), False, 'from datetime import datetime\n'), ((89345, 89362), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (89360, 89362), False, 'from datetime import datetime\n'), ((89438, 89455), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (89453, 89455), False, 'from datetime import datetime\n'), ((90612, 90629), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (90627, 90629), False, 'from datetime import datetime\n'), ((90779, 90796), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (90794, 90796), False, 'from datetime import datetime\n'), ((90955, 90972), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (90970, 90972), False, 'from datetime import datetime\n'), ((91027, 91044), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (91042, 91044), False, 'from datetime import datetime\n'), ((91101, 91118), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (91116, 91118), False, 'from datetime import datetime\n'), ((91144, 91161), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (91159, 91161), False, 'from datetime import datetime\n'), ((91237, 91254), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (91252, 91254), False, 'from datetime import datetime\n'), ((92411, 92428), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (92426, 92428), False, 'from datetime import datetime\n'), ((92578, 92595), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (92593, 92595), False, 'from datetime import datetime\n'), ((92754, 92771), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (92769, 92771), False, 'from datetime import datetime\n'), ((92826, 92843), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (92841, 92843), False, 'from datetime import datetime\n'), ((92900, 92917), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (92915, 92917), False, 'from datetime import datetime\n'), ((92943, 92960), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (92958, 92960), False, 'from datetime import datetime\n'), ((93036, 93053), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (93051, 93053), False, 'from datetime import datetime\n'), ((94064, 94081), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (94079, 94081), False, 'from datetime import datetime\n'), ((94264, 94281), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (94279, 94281), False, 'from datetime import datetime\n'), ((94446, 94463), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (94461, 94463), False, 'from datetime import datetime\n'), ((94516, 94533), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (94531, 94533), False, 'from datetime import datetime\n'), ((94699, 94716), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (94714, 94716), False, 'from datetime import datetime\n'), ((94770, 94787), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (94785, 94787), False, 'from datetime import datetime\n'), ((94952, 94969), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (94967, 94969), False, 'from datetime import datetime\n'), ((95022, 95039), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (95037, 95039), False, 'from datetime import datetime\n'), ((95205, 95222), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (95220, 95222), False, 'from datetime import datetime\n'), ((95276, 95293), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (95291, 95293), False, 'from datetime import datetime\n'), ((95348, 95365), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (95363, 95365), False, 'from datetime import datetime\n'), ((95422, 95439), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (95437, 95439), False, 'from datetime import datetime\n'), ((95465, 95482), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (95480, 95482), False, 'from datetime import datetime\n'), ((95558, 95575), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (95573, 95575), False, 'from datetime import datetime\n'), ((95861, 95878), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (95876, 95878), False, 'from datetime import datetime\n'), ((96048, 96065), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (96063, 96065), False, 'from datetime import datetime\n'), ((97682, 97699), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (97697, 97699), False, 'from datetime import datetime\n'), ((97754, 97771), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (97769, 97771), False, 'from datetime import datetime\n'), ((97852, 97869), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (97867, 97869), False, 'from datetime import datetime\n'), ((97935, 97952), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (97950, 97952), False, 'from datetime import datetime\n'), ((98073, 98090), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (98088, 98090), False, 'from datetime import datetime\n'), ((98800, 98817), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (98815, 98817), False, 'from datetime import datetime\n'), ((100285, 100302), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (100300, 100302), False, 'from datetime import datetime\n'), ((101220, 101237), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (101235, 101237), False, 'from datetime import datetime\n'), ((101292, 101309), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (101307, 101309), False, 'from datetime import datetime\n'), ((101412, 101429), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (101427, 101429), False, 'from datetime import datetime\n'), ((101455, 101472), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (101470, 101472), False, 'from datetime import datetime\n'), ((101548, 101565), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (101563, 101565), False, 'from datetime import datetime\n'), ((101666, 101683), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (101681, 101683), False, 'from datetime import datetime\n'), ((102159, 102176), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (102174, 102176), False, 'from datetime import datetime\n'), ((102346, 102363), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (102361, 102363), False, 'from datetime import datetime\n'), ((103493, 103510), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (103508, 103510), False, 'from datetime import datetime\n'), ((103588, 103605), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (103603, 103605), False, 'from datetime import datetime\n'), ((103740, 103757), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (103755, 103757), False, 'from datetime import datetime\n'), ((104001, 104018), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (104016, 104018), False, 'from datetime import datetime\n'), ((105179, 105196), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (105194, 105196), False, 'from datetime import datetime\n'), ((105456, 105473), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (105471, 105473), False, 'from datetime import datetime\n'), ((106838, 106855), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (106853, 106855), False, 'from datetime import datetime\n'), ((108541, 108558), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (108556, 108558), False, 'from datetime import datetime\n'), ((110390, 110407), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (110405, 110407), False, 'from datetime import datetime\n'), ((110624, 110641), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (110639, 110641), False, 'from datetime import datetime\n'), ((110911, 110928), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (110926, 110928), False, 'from datetime import datetime\n'), ((111151, 111168), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (111166, 111168), False, 'from datetime import datetime\n'), ((111210, 111227), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (111225, 111227), False, 'from datetime import datetime\n'), ((111318, 111335), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (111333, 111335), False, 'from datetime import datetime\n'), ((112726, 112743), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (112741, 112743), False, 'from datetime import datetime\n'), ((113169, 113186), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (113184, 113186), False, 'from datetime import datetime\n'), ((116065, 116082), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (116080, 116082), False, 'from datetime import datetime\n'), ((116382, 116399), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (116397, 116399), False, 'from datetime import datetime\n'), ((116454, 116471), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (116469, 116471), False, 'from datetime import datetime\n'), ((119401, 119418), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (119416, 119418), False, 'from datetime import datetime\n'), ((119718, 119735), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (119733, 119735), False, 'from datetime import datetime\n'), ((119790, 119807), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (119805, 119807), False, 'from datetime import datetime\n'), ((122739, 122756), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (122754, 122756), False, 'from datetime import datetime\n'), ((123056, 123073), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (123071, 123073), False, 'from datetime import datetime\n'), ((123128, 123145), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (123143, 123145), False, 'from datetime import datetime\n'), ((124212, 124229), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (124227, 124229), False, 'from datetime import datetime\n'), ((124393, 124410), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (124408, 124410), False, 'from datetime import datetime\n'), ((124576, 124593), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (124591, 124593), False, 'from datetime import datetime\n'), ((124726, 124743), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (124741, 124743), False, 'from datetime import datetime\n'), ((124798, 124815), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (124813, 124815), False, 'from datetime import datetime\n'), ((124872, 124889), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (124887, 124889), False, 'from datetime import datetime\n'), ((124915, 124932), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (124930, 124932), False, 'from datetime import datetime\n'), ((125008, 125025), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (125023, 125025), False, 'from datetime import datetime\n'), ((125985, 126002), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (126000, 126002), False, 'from datetime import datetime\n'), ((126185, 126202), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (126200, 126202), False, 'from datetime import datetime\n'), ((126367, 126384), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (126382, 126384), False, 'from datetime import datetime\n'), ((126437, 126454), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (126452, 126454), False, 'from datetime import datetime\n'), ((126620, 126637), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (126635, 126637), False, 'from datetime import datetime\n'), ((126691, 126708), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (126706, 126708), False, 'from datetime import datetime\n'), ((126873, 126890), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (126888, 126890), False, 'from datetime import datetime\n'), ((126943, 126960), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (126958, 126960), False, 'from datetime import datetime\n'), ((127126, 127143), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (127141, 127143), False, 'from datetime import datetime\n'), ((127197, 127214), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (127212, 127214), False, 'from datetime import datetime\n'), ((127269, 127286), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (127284, 127286), False, 'from datetime import datetime\n'), ((127343, 127360), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (127358, 127360), False, 'from datetime import datetime\n'), ((127386, 127403), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (127401, 127403), False, 'from datetime import datetime\n'), ((127479, 127496), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (127494, 127496), False, 'from datetime import datetime\n'), ((128460, 128477), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (128475, 128477), False, 'from datetime import datetime\n'), ((128660, 128677), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (128675, 128677), False, 'from datetime import datetime\n'), ((128841, 128858), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (128856, 128858), False, 'from datetime import datetime\n'), ((128911, 128928), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (128926, 128928), False, 'from datetime import datetime\n'), ((129093, 129110), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (129108, 129110), False, 'from datetime import datetime\n'), ((129755, 129772), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (129770, 129772), False, 'from datetime import datetime\n'), ((129916, 129933), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (129931, 129933), False, 'from datetime import datetime\n'), ((129986, 130003), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (130001, 130003), False, 'from datetime import datetime\n'), ((130148, 130165), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (130163, 130165), False, 'from datetime import datetime\n'), ((130219, 130236), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (130234, 130236), False, 'from datetime import datetime\n'), ((130380, 130397), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (130395, 130397), False, 'from datetime import datetime\n'), ((130450, 130467), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (130465, 130467), False, 'from datetime import datetime\n'), ((130631, 130648), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (130646, 130648), False, 'from datetime import datetime\n'), ((130701, 130718), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (130716, 130718), False, 'from datetime import datetime\n'), ((130773, 130790), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (130788, 130790), False, 'from datetime import datetime\n'), ((130869, 130886), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (130884, 130886), False, 'from datetime import datetime\n'), ((130933, 130950), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (130948, 130950), False, 'from datetime import datetime\n'), ((131041, 131058), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (131056, 131058), False, 'from datetime import datetime\n'), ((131111, 131128), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (131126, 131128), False, 'from datetime import datetime\n'), ((131204, 131221), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (131219, 131221), False, 'from datetime import datetime\n'), ((132152, 132169), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (132167, 132169), False, 'from datetime import datetime\n'), ((132352, 132369), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (132367, 132369), False, 'from datetime import datetime\n'), ((132533, 132550), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (132548, 132550), False, 'from datetime import datetime\n'), ((132603, 132620), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (132618, 132620), False, 'from datetime import datetime\n'), ((132785, 132802), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (132800, 132802), False, 'from datetime import datetime\n'), ((132856, 132873), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (132871, 132873), False, 'from datetime import datetime\n'), ((133037, 133054), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (133052, 133054), False, 'from datetime import datetime\n'), ((133693, 133710), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (133708, 133710), False, 'from datetime import datetime\n'), ((133854, 133871), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (133869, 133871), False, 'from datetime import datetime\n'), ((133924, 133941), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (133939, 133941), False, 'from datetime import datetime\n'), ((134086, 134103), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (134101, 134103), False, 'from datetime import datetime\n'), ((134157, 134174), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (134172, 134174), False, 'from datetime import datetime\n'), ((134318, 134335), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (134333, 134335), False, 'from datetime import datetime\n'), ((134388, 134405), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (134403, 134405), False, 'from datetime import datetime\n'), ((134460, 134477), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (134475, 134477), False, 'from datetime import datetime\n'), ((134556, 134573), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (134571, 134573), False, 'from datetime import datetime\n'), ((134620, 134637), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (134635, 134637), False, 'from datetime import datetime\n'), ((134728, 134745), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (134743, 134745), False, 'from datetime import datetime\n'), ((134798, 134815), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (134813, 134815), False, 'from datetime import datetime\n'), ((134891, 134908), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (134906, 134908), False, 'from datetime import datetime\n'), ((135842, 135859), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (135857, 135859), False, 'from datetime import datetime\n'), ((136042, 136059), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (136057, 136059), False, 'from datetime import datetime\n'), ((136223, 136240), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (136238, 136240), False, 'from datetime import datetime\n'), ((136293, 136310), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (136308, 136310), False, 'from datetime import datetime\n'), ((136475, 136492), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (136490, 136492), False, 'from datetime import datetime\n'), ((136546, 136563), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (136561, 136563), False, 'from datetime import datetime\n'), ((136727, 136744), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (136742, 136744), False, 'from datetime import datetime\n'), ((136797, 136814), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (136812, 136814), False, 'from datetime import datetime\n'), ((136979, 136996), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (136994, 136996), False, 'from datetime import datetime\n'), ((137640, 137657), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (137655, 137657), False, 'from datetime import datetime\n'), ((137801, 137818), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (137816, 137818), False, 'from datetime import datetime\n'), ((137871, 137888), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (137886, 137888), False, 'from datetime import datetime\n'), ((138033, 138050), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (138048, 138050), False, 'from datetime import datetime\n'), ((138104, 138121), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (138119, 138121), False, 'from datetime import datetime\n'), ((138265, 138282), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (138280, 138282), False, 'from datetime import datetime\n'), ((138335, 138352), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (138350, 138352), False, 'from datetime import datetime\n'), ((138407, 138424), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (138422, 138424), False, 'from datetime import datetime\n'), ((138503, 138520), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (138518, 138520), False, 'from datetime import datetime\n'), ((138567, 138584), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (138582, 138584), False, 'from datetime import datetime\n'), ((138675, 138692), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (138690, 138692), False, 'from datetime import datetime\n'), ((138745, 138762), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (138760, 138762), False, 'from datetime import datetime\n'), ((138838, 138855), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (138853, 138855), False, 'from datetime import datetime\n'), ((142833, 142850), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (142848, 142850), False, 'from datetime import datetime\n'), ((143036, 143053), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (143051, 143053), False, 'from datetime import datetime\n'), ((143108, 143125), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (143123, 143125), False, 'from datetime import datetime\n'), ((143204, 143221), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (143219, 143221), False, 'from datetime import datetime\n'), ((143268, 143285), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (143283, 143285), False, 'from datetime import datetime\n'), ((143361, 143378), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (143376, 143378), False, 'from datetime import datetime\n'), ((144520, 144537), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (144535, 144537), False, 'from datetime import datetime\n'), ((144723, 144740), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (144738, 144740), False, 'from datetime import datetime\n'), ((144797, 144814), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (144812, 144814), False, 'from datetime import datetime\n'), ((144893, 144910), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (144908, 144910), False, 'from datetime import datetime\n'), ((144976, 144993), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (144991, 144993), False, 'from datetime import datetime\n'), ((145069, 145086), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (145084, 145086), False, 'from datetime import datetime\n'), ((146227, 146244), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (146242, 146244), False, 'from datetime import datetime\n'), ((146431, 146448), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (146446, 146448), False, 'from datetime import datetime\n'), ((146505, 146522), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (146520, 146522), False, 'from datetime import datetime\n'), ((146605, 146622), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (146620, 146622), False, 'from datetime import datetime\n'), ((146674, 146691), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (146689, 146691), False, 'from datetime import datetime\n'), ((146770, 146787), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (146785, 146787), False, 'from datetime import datetime\n'), ((37613, 37630), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (37628, 37630), False, 'from datetime import datetime\n'), ((37738, 37755), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (37753, 37755), False, 'from datetime import datetime\n'), ((42570, 42587), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (42585, 42587), False, 'from datetime import datetime\n'), ((44624, 44641), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (44639, 44641), False, 'from datetime import datetime\n'), ((46739, 46756), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (46754, 46756), False, 'from datetime import datetime\n'), ((47339, 47356), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (47354, 47356), False, 'from datetime import datetime\n'), ((49985, 50002), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (50000, 50002), False, 'from datetime import datetime\n'), ((50585, 50602), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (50600, 50602), False, 'from datetime import datetime\n'), ((51192, 51209), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (51207, 51209), False, 'from datetime import datetime\n'), ((54175, 54192), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (54190, 54192), False, 'from datetime import datetime\n'), ((54757, 54774), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (54772, 54774), False, 'from datetime import datetime\n'), ((57027, 57044), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (57042, 57044), False, 'from datetime import datetime\n'), ((64388, 64405), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (64403, 64405), False, 'from datetime import datetime\n'), ((65035, 65052), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (65050, 65052), False, 'from datetime import datetime\n'), ((66382, 66399), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (66397, 66399), False, 'from datetime import datetime\n'), ((66458, 66475), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (66473, 66475), False, 'from datetime import datetime\n'), ((68819, 68836), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (68834, 68836), False, 'from datetime import datetime\n'), ((69006, 69023), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (69021, 69023), False, 'from datetime import datetime\n'), ((69672, 69689), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (69687, 69689), False, 'from datetime import datetime\n'), ((71605, 71622), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (71620, 71622), False, 'from datetime import datetime\n'), ((71681, 71698), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (71696, 71698), False, 'from datetime import datetime\n'), ((73806, 73823), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (73821, 73823), False, 'from datetime import datetime\n'), ((74627, 74644), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (74642, 74644), False, 'from datetime import datetime\n'), ((75974, 75991), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (75989, 75991), False, 'from datetime import datetime\n'), ((76050, 76067), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (76065, 76067), False, 'from datetime import datetime\n'), ((78190, 78207), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (78205, 78207), False, 'from datetime import datetime\n'), ((79011, 79028), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (79026, 79028), False, 'from datetime import datetime\n'), ((80358, 80375), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (80373, 80375), False, 'from datetime import datetime\n'), ((80434, 80451), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (80449, 80451), False, 'from datetime import datetime\n'), ((82578, 82595), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (82593, 82595), False, 'from datetime import datetime\n'), ((83399, 83416), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (83414, 83416), False, 'from datetime import datetime\n'), ((84746, 84763), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (84761, 84763), False, 'from datetime import datetime\n'), ((84822, 84839), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (84837, 84839), False, 'from datetime import datetime\n'), ((86968, 86985), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (86983, 86985), False, 'from datetime import datetime\n'), ((87789, 87806), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (87804, 87806), False, 'from datetime import datetime\n'), ((89136, 89153), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (89151, 89153), False, 'from datetime import datetime\n'), ((89212, 89229), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (89227, 89229), False, 'from datetime import datetime\n'), ((90217, 90234), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (90232, 90234), False, 'from datetime import datetime\n'), ((91957, 91974), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (91972, 91974), False, 'from datetime import datetime\n'), ((104726, 104743), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (104741, 104743), False, 'from datetime import datetime\n'), ((106385, 106402), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (106400, 106402), False, 'from datetime import datetime\n'), ((107965, 107982), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (107980, 107982), False, 'from datetime import datetime\n'), ((109814, 109831), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (109829, 109831), False, 'from datetime import datetime\n'), ((112150, 112167), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (112165, 112167), False, 'from datetime import datetime\n'), ((139842, 139859), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (139857, 139859), False, 'from datetime import datetime\n'), ((140049, 140066), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (140064, 140066), False, 'from datetime import datetime\n'), ((140236, 140253), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (140251, 140253), False, 'from datetime import datetime\n'), ((140308, 140325), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (140323, 140325), False, 'from datetime import datetime\n'), ((140496, 140513), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (140511, 140513), False, 'from datetime import datetime\n'), ((140569, 140586), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (140584, 140586), False, 'from datetime import datetime\n'), ((140756, 140773), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (140771, 140773), False, 'from datetime import datetime\n'), ((140828, 140845), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (140843, 140845), False, 'from datetime import datetime\n'), ((141016, 141033), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (141031, 141033), False, 'from datetime import datetime\n'), ((141089, 141106), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (141104, 141106), False, 'from datetime import datetime\n'), ((141276, 141293), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (141291, 141293), False, 'from datetime import datetime\n'), ((141348, 141365), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (141363, 141365), False, 'from datetime import datetime\n'), ((141434, 141451), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (141449, 141451), False, 'from datetime import datetime\n'), ((141532, 141549), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (141547, 141549), False, 'from datetime import datetime\n'), ((141597, 141614), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (141612, 141614), False, 'from datetime import datetime\n'), ((141692, 141709), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (141707, 141709), False, 'from datetime import datetime\n'), ((142373, 142390), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (142388, 142390), False, 'from datetime import datetime\n'), ((144060, 144077), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (144075, 144077), False, 'from datetime import datetime\n'), ((145767, 145784), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (145782, 145784), False, 'from datetime import datetime\n'), ((37821, 37838), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (37836, 37838), False, 'from datetime import datetime\n'), ((37955, 37972), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (37970, 37972), False, 'from datetime import datetime\n'), ((38045, 38062), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (38060, 38062), False, 'from datetime import datetime\n'), ((114932, 114949), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (114947, 114949), False, 'from datetime import datetime\n'), ((115157, 115174), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (115172, 115174), False, 'from datetime import datetime\n'), ((115491, 115508), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (115506, 115508), False, 'from datetime import datetime\n'), ((115720, 115737), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (115735, 115737), False, 'from datetime import datetime\n'), ((118268, 118285), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (118283, 118285), False, 'from datetime import datetime\n'), ((118493, 118510), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (118508, 118510), False, 'from datetime import datetime\n'), ((118827, 118844), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (118842, 118844), False, 'from datetime import datetime\n'), ((119056, 119073), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (119071, 119073), False, 'from datetime import datetime\n'), ((121606, 121623), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (121621, 121623), False, 'from datetime import datetime\n'), ((121831, 121848), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (121846, 121848), False, 'from datetime import datetime\n'), ((122165, 122182), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (122180, 122182), False, 'from datetime import datetime\n'), ((122394, 122411), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (122409, 122411), False, 'from datetime import datetime\n')]
|
from pymongo import MongoClient
import os
class Mongodb:
@classmethod
def db_connect(cls):
DB_URI = os.environ.get('DB_URI')
#print(DB_URI) os.environ.get('DB_URI')
client = MongoClient(DB_URI)
db = client.contentagregatordb
return db
@classmethod
def get_urls(cls):
db = Mongodb.db_connect()
website_collection = db['websites']
websites = website_collection.find()
return websites
@classmethod
def get_url_by(cls, url_category):
db = Mongodb.db_connect()
website_collection = db['websites']
websites = website_collection.find(url_category)
return websites
@classmethod
def get_articels_collection(cls):
db = Mongodb.db_connect()
return db['articles']
@classmethod
def is_saved_to(cls, articles_collection, article_url):
article_found = articles_collection.find_one({'url': article_url})
return article_found
@classmethod
def insert_articles(cls,articles):
articles_collection = Mongodb.get_articels_collection()
for article in articles:
article_found = Mongodb.is_saved_to(articles_collection,
article['url'])
if article_found is None:
articles_collection.insert_one(article)
else:
pass
@classmethod
def find_by(cls, baseurl, articles_collection):
all_article = articles_collection.find({'baseurl': baseurl}, sort=[('_id', -1)]).limit(10)
data =[]
for x in all_article:
datum = {}
datum['category'] = x['category']
datum['baseurl'] = x['baseurl']
datum['webname'] = x['webname']
datum['title'] = x['title']
datum['url'] = x['url']
data.append(datum)
return data
# db = Mongodb.db_connect()
# latest_articles = Mongodb.find_by('https://www.bbc.com/arabic', db['articles'])
# print(latest_articles)
|
[
"os.environ.get",
"pymongo.MongoClient"
] |
[((122, 146), 'os.environ.get', 'os.environ.get', (['"""DB_URI"""'], {}), "('DB_URI')\n", (136, 146), False, 'import os\n'), ((212, 231), 'pymongo.MongoClient', 'MongoClient', (['DB_URI'], {}), '(DB_URI)\n', (223, 231), False, 'from pymongo import MongoClient\n')]
|
from contextlib import contextmanager
from functools import lru_cache
from typing import Generator
@lru_cache(maxsize=None)
def slow_function(message, timeout):
"""This function is slow."""
print(message)
@contextmanager
def feeling_good(x: int, y: int) -> Generator:
"""You'll feel better in this context!"""
yield
|
[
"functools.lru_cache"
] |
[((102, 125), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': 'None'}), '(maxsize=None)\n', (111, 125), False, 'from functools import lru_cache\n')]
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from registry import registry_pb2 as registry_dot_registry__pb2
class RegistryStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetService = channel.unary_unary(
'/registry.Registry/GetService',
request_serializer=registry_dot_registry__pb2.GetRequest.SerializeToString,
response_deserializer=registry_dot_registry__pb2.GetResponse.FromString,
)
self.Register = channel.unary_unary(
'/registry.Registry/Register',
request_serializer=registry_dot_registry__pb2.Service.SerializeToString,
response_deserializer=registry_dot_registry__pb2.EmptyResponse.FromString,
)
self.Deregister = channel.unary_unary(
'/registry.Registry/Deregister',
request_serializer=registry_dot_registry__pb2.Service.SerializeToString,
response_deserializer=registry_dot_registry__pb2.EmptyResponse.FromString,
)
self.ListServices = channel.unary_unary(
'/registry.Registry/ListServices',
request_serializer=registry_dot_registry__pb2.ListRequest.SerializeToString,
response_deserializer=registry_dot_registry__pb2.ListResponse.FromString,
)
self.Watch = channel.unary_stream(
'/registry.Registry/Watch',
request_serializer=registry_dot_registry__pb2.WatchRequest.SerializeToString,
response_deserializer=registry_dot_registry__pb2.Result.FromString,
)
class RegistryServicer(object):
"""Missing associated documentation comment in .proto file."""
def GetService(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Register(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Deregister(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListServices(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Watch(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_RegistryServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetService': grpc.unary_unary_rpc_method_handler(
servicer.GetService,
request_deserializer=registry_dot_registry__pb2.GetRequest.FromString,
response_serializer=registry_dot_registry__pb2.GetResponse.SerializeToString,
),
'Register': grpc.unary_unary_rpc_method_handler(
servicer.Register,
request_deserializer=registry_dot_registry__pb2.Service.FromString,
response_serializer=registry_dot_registry__pb2.EmptyResponse.SerializeToString,
),
'Deregister': grpc.unary_unary_rpc_method_handler(
servicer.Deregister,
request_deserializer=registry_dot_registry__pb2.Service.FromString,
response_serializer=registry_dot_registry__pb2.EmptyResponse.SerializeToString,
),
'ListServices': grpc.unary_unary_rpc_method_handler(
servicer.ListServices,
request_deserializer=registry_dot_registry__pb2.ListRequest.FromString,
response_serializer=registry_dot_registry__pb2.ListResponse.SerializeToString,
),
'Watch': grpc.unary_stream_rpc_method_handler(
servicer.Watch,
request_deserializer=registry_dot_registry__pb2.WatchRequest.FromString,
response_serializer=registry_dot_registry__pb2.Result.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'registry.Registry', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Registry(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def GetService(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/registry.Registry/GetService',
registry_dot_registry__pb2.GetRequest.SerializeToString,
registry_dot_registry__pb2.GetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Register(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/registry.Registry/Register',
registry_dot_registry__pb2.Service.SerializeToString,
registry_dot_registry__pb2.EmptyResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Deregister(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/registry.Registry/Deregister',
registry_dot_registry__pb2.Service.SerializeToString,
registry_dot_registry__pb2.EmptyResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListServices(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/registry.Registry/ListServices',
registry_dot_registry__pb2.ListRequest.SerializeToString,
registry_dot_registry__pb2.ListResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Watch(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/registry.Registry/Watch',
registry_dot_registry__pb2.WatchRequest.SerializeToString,
registry_dot_registry__pb2.Result.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
|
[
"grpc.method_handlers_generic_handler",
"grpc.unary_stream_rpc_method_handler",
"grpc.unary_unary_rpc_method_handler",
"grpc.experimental.unary_stream",
"grpc.experimental.unary_unary"
] |
[((5081, 5159), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""registry.Registry"""', 'rpc_method_handlers'], {}), "('registry.Registry', rpc_method_handlers)\n", (5117, 5159), False, 'import grpc\n'), ((3551, 3769), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetService'], {'request_deserializer': 'registry_dot_registry__pb2.GetRequest.FromString', 'response_serializer': 'registry_dot_registry__pb2.GetResponse.SerializeToString'}), '(servicer.GetService,\n request_deserializer=registry_dot_registry__pb2.GetRequest.FromString,\n response_serializer=registry_dot_registry__pb2.GetResponse.\n SerializeToString)\n', (3586, 3769), False, 'import grpc\n'), ((3857, 4069), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.Register'], {'request_deserializer': 'registry_dot_registry__pb2.Service.FromString', 'response_serializer': 'registry_dot_registry__pb2.EmptyResponse.SerializeToString'}), '(servicer.Register, request_deserializer\n =registry_dot_registry__pb2.Service.FromString, response_serializer=\n registry_dot_registry__pb2.EmptyResponse.SerializeToString)\n', (3892, 4069), False, 'import grpc\n'), ((4162, 4379), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.Deregister'], {'request_deserializer': 'registry_dot_registry__pb2.Service.FromString', 'response_serializer': 'registry_dot_registry__pb2.EmptyResponse.SerializeToString'}), '(servicer.Deregister,\n request_deserializer=registry_dot_registry__pb2.Service.FromString,\n response_serializer=registry_dot_registry__pb2.EmptyResponse.\n SerializeToString)\n', (4197, 4379), False, 'import grpc\n'), ((4471, 4693), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.ListServices'], {'request_deserializer': 'registry_dot_registry__pb2.ListRequest.FromString', 'response_serializer': 'registry_dot_registry__pb2.ListResponse.SerializeToString'}), '(servicer.ListServices,\n request_deserializer=registry_dot_registry__pb2.ListRequest.FromString,\n response_serializer=registry_dot_registry__pb2.ListResponse.\n SerializeToString)\n', (4506, 4693), False, 'import grpc\n'), ((4778, 4986), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.Watch'], {'request_deserializer': 'registry_dot_registry__pb2.WatchRequest.FromString', 'response_serializer': 'registry_dot_registry__pb2.Result.SerializeToString'}), '(servicer.Watch, request_deserializer=\n registry_dot_registry__pb2.WatchRequest.FromString, response_serializer\n =registry_dot_registry__pb2.Result.SerializeToString)\n', (4814, 4986), False, 'import grpc\n'), ((5692, 6002), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/registry.Registry/GetService"""', 'registry_dot_registry__pb2.GetRequest.SerializeToString', 'registry_dot_registry__pb2.GetResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/registry.Registry/GetService', registry_dot_registry__pb2.GetRequest.\n SerializeToString, registry_dot_registry__pb2.GetResponse.FromString,\n options, channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (5721, 6002), False, 'import grpc\n'), ((6356, 6663), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/registry.Registry/Register"""', 'registry_dot_registry__pb2.Service.SerializeToString', 'registry_dot_registry__pb2.EmptyResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/registry.Registry/Register', registry_dot_registry__pb2.Service.\n SerializeToString, registry_dot_registry__pb2.EmptyResponse.FromString,\n options, channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (6385, 6663), False, 'import grpc\n'), ((7019, 7328), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/registry.Registry/Deregister"""', 'registry_dot_registry__pb2.Service.SerializeToString', 'registry_dot_registry__pb2.EmptyResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/registry.Registry/Deregister', registry_dot_registry__pb2.Service.\n SerializeToString, registry_dot_registry__pb2.EmptyResponse.FromString,\n options, channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (7048, 7328), False, 'import grpc\n'), ((7686, 8001), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/registry.Registry/ListServices"""', 'registry_dot_registry__pb2.ListRequest.SerializeToString', 'registry_dot_registry__pb2.ListResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/registry.Registry/ListServices', registry_dot_registry__pb2.\n ListRequest.SerializeToString, registry_dot_registry__pb2.ListResponse.\n FromString, options, channel_credentials, insecure, call_credentials,\n compression, wait_for_ready, timeout, metadata)\n", (7715, 8001), False, 'import grpc\n'), ((8351, 8653), 'grpc.experimental.unary_stream', 'grpc.experimental.unary_stream', (['request', 'target', '"""/registry.Registry/Watch"""', 'registry_dot_registry__pb2.WatchRequest.SerializeToString', 'registry_dot_registry__pb2.Result.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target, '/registry.Registry/Watch',\n registry_dot_registry__pb2.WatchRequest.SerializeToString,\n registry_dot_registry__pb2.Result.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (8381, 8653), False, 'import grpc\n')]
|
"""
Query and deal common tables.
"""
from evennia.utils import logger
from django.apps import apps
from django.conf import settings
class ImageResourcesMapper(object):
"""
Object's image.
"""
def __init__(self):
self.model_name = "image_resources"
self.model = apps.get_model(settings.WORLD_DATA_APP, self.model_name)
self.objects = self.model.objects
def get(self, resource):
"""
Get object's image.
Args:
resource: (string) resource's path.
"""
return self.objects.get(resource=resource)
def add(self, path, type, width, height):
"""
Add a new image record.
Args:
path: image's path
type: image's type
width: image's width
height: image's height
Return:
none
"""
record = {
"resource": path,
"type": type,
"image_width": width,
"image_height": height,
}
data = self.model(**record)
data.full_clean()
data.save()
IMAGE_RESOURCES = ImageResourcesMapper()
|
[
"django.apps.apps.get_model"
] |
[((297, 353), 'django.apps.apps.get_model', 'apps.get_model', (['settings.WORLD_DATA_APP', 'self.model_name'], {}), '(settings.WORLD_DATA_APP, self.model_name)\n', (311, 353), False, 'from django.apps import apps\n')]
|
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'Ipv4DefaultPingEnum' : _MetaInfoEnum('Ipv4DefaultPingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_io_cfg',
{
'disabled':'disabled',
'enabled':'enabled',
}, 'Cisco-IOS-XR-ipv4-io-cfg', _yang_ns._namespaces['Cisco-IOS-XR-ipv4-io-cfg']),
'Ipv4SelfPingEnum' : _MetaInfoEnum('Ipv4SelfPingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_io_cfg',
{
'disabled':'disabled',
'enabled':'enabled',
}, 'Cisco-IOS-XR-ipv4-io-cfg', _yang_ns._namespaces['Cisco-IOS-XR-ipv4-io-cfg']),
'Ipv4ReachableEnum' : _MetaInfoEnum('Ipv4ReachableEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_io_cfg',
{
'any':'any',
'received':'received',
}, 'Cisco-IOS-XR-ipv4-io-cfg', _yang_ns._namespaces['Cisco-IOS-XR-ipv4-io-cfg']),
'Ipv4InterfaceQppbEnum' : _MetaInfoEnum('Ipv4InterfaceQppbEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_io_cfg',
{
'ip-precedence':'ip_precedence',
'qos-group':'qos_group',
'both':'both',
}, 'Cisco-IOS-XR-ipv4-io-cfg', _yang_ns._namespaces['Cisco-IOS-XR-ipv4-io-cfg']),
}
|
[
"ydk._core._dm_meta_info._MetaInfoEnum"
] |
[((546, 773), 'ydk._core._dm_meta_info._MetaInfoEnum', '_MetaInfoEnum', (['"""Ipv4DefaultPingEnum"""', '"""ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_io_cfg"""', "{'disabled': 'disabled', 'enabled': 'enabled'}", '"""Cisco-IOS-XR-ipv4-io-cfg"""', "_yang_ns._namespaces['Cisco-IOS-XR-ipv4-io-cfg']"], {}), "('Ipv4DefaultPingEnum',\n 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_io_cfg', {'disabled':\n 'disabled', 'enabled': 'enabled'}, 'Cisco-IOS-XR-ipv4-io-cfg', _yang_ns\n ._namespaces['Cisco-IOS-XR-ipv4-io-cfg'])\n", (559, 773), False, 'from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum\n'), ((828, 1052), 'ydk._core._dm_meta_info._MetaInfoEnum', '_MetaInfoEnum', (['"""Ipv4SelfPingEnum"""', '"""ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_io_cfg"""', "{'disabled': 'disabled', 'enabled': 'enabled'}", '"""Cisco-IOS-XR-ipv4-io-cfg"""', "_yang_ns._namespaces['Cisco-IOS-XR-ipv4-io-cfg']"], {}), "('Ipv4SelfPingEnum',\n 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_io_cfg', {'disabled':\n 'disabled', 'enabled': 'enabled'}, 'Cisco-IOS-XR-ipv4-io-cfg', _yang_ns\n ._namespaces['Cisco-IOS-XR-ipv4-io-cfg'])\n", (841, 1052), False, 'from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum\n'), ((1108, 1325), 'ydk._core._dm_meta_info._MetaInfoEnum', '_MetaInfoEnum', (['"""Ipv4ReachableEnum"""', '"""ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_io_cfg"""', "{'any': 'any', 'received': 'received'}", '"""Cisco-IOS-XR-ipv4-io-cfg"""', "_yang_ns._namespaces['Cisco-IOS-XR-ipv4-io-cfg']"], {}), "('Ipv4ReachableEnum',\n 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_io_cfg', {'any': 'any',\n 'received': 'received'}, 'Cisco-IOS-XR-ipv4-io-cfg', _yang_ns.\n _namespaces['Cisco-IOS-XR-ipv4-io-cfg'])\n", (1121, 1325), False, 'from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum\n'), ((1385, 1648), 'ydk._core._dm_meta_info._MetaInfoEnum', '_MetaInfoEnum', (['"""Ipv4InterfaceQppbEnum"""', '"""ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_io_cfg"""', "{'ip-precedence': 'ip_precedence', 'qos-group': 'qos_group', 'both': 'both'}", '"""Cisco-IOS-XR-ipv4-io-cfg"""', "_yang_ns._namespaces['Cisco-IOS-XR-ipv4-io-cfg']"], {}), "('Ipv4InterfaceQppbEnum',\n 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_io_cfg', {'ip-precedence':\n 'ip_precedence', 'qos-group': 'qos_group', 'both': 'both'},\n 'Cisco-IOS-XR-ipv4-io-cfg', _yang_ns._namespaces[\n 'Cisco-IOS-XR-ipv4-io-cfg'])\n", (1398, 1648), False, 'from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum\n')]
|
#!/usr/bin/env python
from __future__ import print_function, division
from glob import glob
import astropy.io.fits as pyfits
import sys, os
from os import path, remove
from astropy import log
from astropy.table import Table
from subprocess import check_call
import argparse
import re
import numpy as np
# from nicer.values import *
# Array of DET_IDs that are used
IDS = np.array(
[
0,
1,
2,
3,
4,
5,
6,
7,
10,
11,
12,
13,
14,
15,
16,
17,
20,
21,
22,
23,
24,
25,
26,
27,
30,
31,
32,
33,
34,
35,
36,
37,
40,
41,
42,
43,
44,
45,
46,
47,
50,
51,
52,
53,
54,
55,
56,
57,
60,
61,
62,
63,
64,
65,
66,
67,
]
)
import matplotlib.pyplot as plt
import argparse
parser = argparse.ArgumentParser(
description="Compute deadtime correction to an EXPOSURE defined by a GTI extension, for a single OBSID."
)
parser.add_argument("obsdir", help="Directory containing the raw data for this OBSID")
parser.add_argument(
"gtifile",
help="FITS file containing a GTI extension to be used. Can be an event file, PHA file or any FITS file with a 'GTI' extension.",
)
parser.add_argument(
"--mask", help="Mask particular FPMs", nargs="+", type=int, default=[]
)
parser.add_argument("--plot", help="Plot deadtime per FPM", action="store_true")
args = parser.parse_args()
# The GTI file is assumed to apply to all FPMs. This is normally the case since the user
# is operating on a merged event file whose GTI is the AND of all the individual MPU GTIs
# then they may make additional GTI selections that are more restrictive than that.
# So, we can go over each MPU file and apply the GTI before counting up the deadtime.
# Get the names of all the individual MPU files
gstr = path.join(args.obsdir, "xti/event_uf/ni*mpu?_uf.evt*")
log.debug("Glob string {}".format(gstr))
ufiles = glob(gstr)
ufiles.sort()
log.info(
"Reading unfiltered events from these files :\n\t{}".format("\n\t".join(ufiles))
)
if len(ufiles) != 7:
log.error("Did not find 7 MPU files!")
fpm_deadtime = np.zeros(len(IDS))
t_mpu = -1
log.info("Mask {}".format(args.mask))
for i, det_id in enumerate(IDS):
if det_id in args.mask:
continue
mpu = det_id // 10
log.debug("{} DET_ID {} MPU {} File {}".format(i, det_id, mpu, ufiles[mpu]))
# Only read the raw MPU file once per MPU since all the FPMs for this MPU are in this file
if mpu != t_mpu:
cmd = "niextract-events {0} eventsout={1} timefile='{2}[GTI]' clobber=yes".format(
ufiles[mpu], "tmp.evt", args.gtifile
)
st = check_call(cmd, shell=True)
if st != 0:
log.error("niextract-events failed!")
t = Table.read("tmp.evt", hdu=1)
t_mpu = mpu
dets = t["DET_ID"]
if not np.any(dets == det_id):
fpm_deadtime[i] = 0.0
else:
fpm_deadtime[i] = (t["DEADTIME"][dets == det_id]).sum()
gtitable = Table.read("{}".format(args.gtifile), hdu="GTI")
exp = (gtitable["STOP"] - gtitable["START"]).sum()
log.debug("exp {}".format(exp))
active = np.where(fpm_deadtime > 0)[0]
if not np.any(fpm_deadtime > 0):
deadtime = 0.0
mindead = 0.0
maxdead = 0.0
stddead = 0.0
else:
deadtime = fpm_deadtime[active].mean()
mindead = fpm_deadtime[active].min()
maxdead = fpm_deadtime[active].max()
stddead = fpm_deadtime[active].std()
if args.plot:
if exp > 0:
plt.plot(IDS, 100 * fpm_deadtime / exp, "s")
plt.xlabel("DET_ID")
plt.ylabel("Deadtime %")
plt.title(t.meta["OBS_ID"])
# plt.savefig("deadtimeplots/{0}_deadtimes.png".format(t.meta["OBS_ID"]))
plt.show()
if exp == 0.0:
percent_frac = 0.0
else:
percent_frac = 100.0 * deadtime / exp
print(
"\nFile {} Exposure {:12.5f}, Mean Deadtime {:12.5f} ({:.3f} %) -> Livetime {:12.5f}".format(
args.gtifile, exp, deadtime, percent_frac, exp - deadtime
)
)
print(
"Deadtime Statistics for {} FPM: Min {:12.5f} Max {:12.5f} Std {:12.5f}".format(
len(active), mindead, maxdead, stddead
)
)
|
[
"matplotlib.pyplot.title",
"astropy.table.Table.read",
"matplotlib.pyplot.show",
"argparse.ArgumentParser",
"matplotlib.pyplot.plot",
"numpy.any",
"numpy.where",
"numpy.array",
"astropy.log.error",
"glob.glob",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"os.path.join",
"subprocess.check_call"
] |
[((372, 611), 'numpy.array', 'np.array', (['[0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 20, 21, 22, 23, 24,\n 25, 26, 27, 30, 31, 32, 33, 34, 35, 36, 37, 40, 41, 42, 43, 44, 45, 46,\n 47, 50, 51, 52, 53, 54, 55, 56, 57, 60, 61, 62, 63, 64, 65, 66, 67]'], {}), '([0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 20, 21, \n 22, 23, 24, 25, 26, 27, 30, 31, 32, 33, 34, 35, 36, 37, 40, 41, 42, 43,\n 44, 45, 46, 47, 50, 51, 52, 53, 54, 55, 56, 57, 60, 61, 62, 63, 64, 65,\n 66, 67])\n', (380, 611), True, 'import numpy as np\n'), ((1119, 1258), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Compute deadtime correction to an EXPOSURE defined by a GTI extension, for a single OBSID."""'}), "(description=\n 'Compute deadtime correction to an EXPOSURE defined by a GTI extension, for a single OBSID.'\n )\n", (1142, 1258), False, 'import argparse\n'), ((2125, 2179), 'os.path.join', 'path.join', (['args.obsdir', '"""xti/event_uf/ni*mpu?_uf.evt*"""'], {}), "(args.obsdir, 'xti/event_uf/ni*mpu?_uf.evt*')\n", (2134, 2179), False, 'from os import path, remove\n'), ((2230, 2240), 'glob.glob', 'glob', (['gstr'], {}), '(gstr)\n', (2234, 2240), False, 'from glob import glob\n'), ((2377, 2415), 'astropy.log.error', 'log.error', (['"""Did not find 7 MPU files!"""'], {}), "('Did not find 7 MPU files!')\n", (2386, 2415), False, 'from astropy import log\n'), ((3434, 3460), 'numpy.where', 'np.where', (['(fpm_deadtime > 0)'], {}), '(fpm_deadtime > 0)\n', (3442, 3460), True, 'import numpy as np\n'), ((3471, 3495), 'numpy.any', 'np.any', (['(fpm_deadtime > 0)'], {}), '(fpm_deadtime > 0)\n', (3477, 3495), True, 'import numpy as np\n'), ((3829, 3849), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""DET_ID"""'], {}), "('DET_ID')\n", (3839, 3849), True, 'import matplotlib.pyplot as plt\n'), ((3854, 3878), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Deadtime %"""'], {}), "('Deadtime %')\n", (3864, 3878), True, 'import matplotlib.pyplot as plt\n'), ((3883, 3910), 'matplotlib.pyplot.title', 'plt.title', (["t.meta['OBS_ID']"], {}), "(t.meta['OBS_ID'])\n", (3892, 3910), True, 'import matplotlib.pyplot as plt\n'), ((3993, 4003), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4001, 4003), True, 'import matplotlib.pyplot as plt\n'), ((2960, 2987), 'subprocess.check_call', 'check_call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (2970, 2987), False, 'from subprocess import check_call\n'), ((3070, 3098), 'astropy.table.Table.read', 'Table.read', (['"""tmp.evt"""'], {'hdu': '(1)'}), "('tmp.evt', hdu=1)\n", (3080, 3098), False, 'from astropy.table import Table\n'), ((3153, 3175), 'numpy.any', 'np.any', (['(dets == det_id)'], {}), '(dets == det_id)\n', (3159, 3175), True, 'import numpy as np\n'), ((3780, 3824), 'matplotlib.pyplot.plot', 'plt.plot', (['IDS', '(100 * fpm_deadtime / exp)', '"""s"""'], {}), "(IDS, 100 * fpm_deadtime / exp, 's')\n", (3788, 3824), True, 'import matplotlib.pyplot as plt\n'), ((3020, 3057), 'astropy.log.error', 'log.error', (['"""niextract-events failed!"""'], {}), "('niextract-events failed!')\n", (3029, 3057), False, 'from astropy import log\n')]
|
# Generated by Django 2.0 on 2018-01-06 08:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tangerine', '0016_auto_20180104_2324'),
]
operations = [
migrations.AddField(
model_name='config',
name='show_future',
field=models.BooleanField(default=False, help_text='If enabled, posts dated in the future appear immediately. Default is False (drip-date behavior).'),
),
]
|
[
"django.db.models.BooleanField"
] |
[((339, 493), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""If enabled, posts dated in the future appear immediately. Default is False (drip-date behavior)."""'}), "(default=False, help_text=\n 'If enabled, posts dated in the future appear immediately. Default is False (drip-date behavior).'\n )\n", (358, 493), False, 'from django.db import migrations, models\n')]
|
#!/usr/bin/env python
from subprocess import check_output
"""
Bus 001 Device 008: ID 239a:d1ed
Bus 001 Device 015: ID 045e:00db Microsoft Corp. Natural Ergonomic Keyboard 4000 V1.0
Bus 001 Device 014: ID 046d:c52f Logitech, Inc. Unifying Receiver
Bus 001 Device 013: ID 0b95:772a ASIX Electronics Corp. AX88772A Fast Ethernet
Bus 001 Device 012: ID 0d8c:0105 C-Media Electronics, Inc. CM108 Audio Controller
Bus 001 Device 011: ID 17e9:0117 DisplayLink
Bus 001 Device 010: ID 1a40:0201 Terminus Technology Inc. FE 2.1 7-port Hub
Bus 001 Device 016: ID 04d9:1603 Holtek Semiconductor, Inc. Keyboard
Bus 001 Device 003: ID 0424:ec00 Standard Microsystems Corp. SMSC9512/9514 Fast Ethernet Adapter
Bus 001 Device 002: ID 0424:9514 Standard Microsystems Corp.
Bus 001 Device 001: ID 1d6b:0002 Linux Foundation 2.0 root hub
"""
def lsusb():
lsusb_entries = []
output = check_output(["lsusb"])
for line in [line.strip(' ') for line in output.split('\n') if line.strip(' ')]:
location, description = line.split(':', 1)
id_str, vid_pid_name = description.strip(' ').split(' ', 1)
vid_pid = vid_pid_name.split(' ', 1)[0]
name = vid_pid_name.split(' ', 1)[1] if len(vid_pid_name.split(' ', 1)) > 1 else None
bus_str, bus, device_str, device = location.split(' ', 3)
bus = str(int(bus, 10))
device = str(int(device, 10))
lsusb_entries.append([bus, device, vid_pid, name])
return lsusb_entries
if __name__ == "__main__":
print(lsusb())
|
[
"subprocess.check_output"
] |
[((880, 903), 'subprocess.check_output', 'check_output', (["['lsusb']"], {}), "(['lsusb'])\n", (892, 903), False, 'from subprocess import check_output\n')]
|
import sys
from plotnn import plotnn
import plotnn.tikzeng as tk
def main():
namefile = str(sys.argv[0]).split('.')[0]
arch = [
tk.Image("input", "./images/dogcat.jpg"),
tk.Conv2D(name='conv_0', out_width=570, out_channel=64, activation="relu",
offset=(3, 0, 0), location="input", width=2, height=40, depth=40),
tk.Connection("input", "conv_0", origin_loc=None),
tk.Conv2D(name='conv_1', out_width=568, out_channel=64, activation="relu",
offset=(0, 0, 0), location="conv_0-east", width=2, height=40, depth=40),
tk.Pool(name="pool_b1", offset=(1, -6, 0), location="conv_1-south",
width=2, height=20, depth=20, opacity=0.5),
tk.Connection("conv_1", "pool_b1", origin_loc="east", target_loc="north", path="-|"),
tk.Conv2D(name='conv_2', out_width=282, out_channel=128, activation="relu",
offset=(0, 0, 0), location="pool_b1-east", width=4, height=20, depth=20),
tk.Conv2D(name='conv_3', out_width=280, out_channel=128, activation="relu",
offset=(0, 0, 0), location="conv_2-east", width=4, height=20, depth=20),
tk.Pool(name="pool_b2", offset=(1, -5, 0), location="conv_3-south",
width=4, height=10, depth=10, opacity=0.5),
tk.Connection("conv_3", "pool_b2", origin_loc="east", target_loc="north", path="-|"),
tk.Conv2D(name='conv_4', out_width=138, out_channel=256, activation="relu",
offset=(0, 0, 0), location="pool_b2-east", width=6, height=10, depth=10),
tk.Conv2D(name='conv_5', out_width=136, out_channel=256, activation="relu",
offset=(0, 0, 0), location="conv_4-east", width=6, height=10, depth=10),
tk.Pool(name="pool_b3", offset=(1, -4, 0), location="conv_5-south",
width=6, height=5, depth=5, opacity=0.5),
tk.Connection("conv_5", "pool_b3", origin_loc="east", target_loc="north", path="-|"),
tk.Conv2D(name='conv_6', out_width=66, out_channel=512, activation="relu",
offset=(0, 0, 0), location="pool_b3-east", width=8, height=5, depth=5),
tk.Conv2D(name='conv_7', out_width=64, out_channel=512, activation="relu",
offset=(0, 0, 0), location="conv_6-east", width=8, height=5, depth=5),
tk.Pool(name="pool_b4", offset=(1, -3, 0), location="conv_7-south",
width=8, height=4, depth=4, opacity=0.5),
tk.Connection("conv_7", "pool_b4", origin_loc="east", target_loc="north", path="-|"),
tk.Conv2D(name='conv_8', out_width=30, out_channel=1024, activation="relu",
offset=(0, 0, 0), location="pool_b4-east", width=10, height=4, depth=4),
tk.Conv2D(name='conv_9', out_width=28, out_channel=1024, activation="relu",
offset=(0, 0, 0), location="conv_8-east", width=10, height=4, depth=4),
tk.ConvTranspose2D(name='unpool_b1', out_channel=512, offset=(1, 3, 0), location="conv_9-northeast", width=8, height=5, depth=5),
tk.Connection("conv_9", "unpool_b1", path='|-'),
tk.Concat("concat_b1", location="unpool_b1-east", offset=(1, 0, 0), color="white"),
tk.Connection("unpool_b1", "concat_b1"),
tk.Box("concat_1", location="concat_b1-east", offset=(1, 0, 0), xlabel=1024, width=10, height=5, depth=5),
tk.Connection("concat_b1", "concat_1"),
tk.Connection('conv_7', 'concat_b1', origin_loc="north", target_loc="north", origin_pos=1.5, target_pos=2,
color="blue", linestyle="double", path="|-|"),
tk.Conv2D(name='conv_10', out_width=54, out_channel=512, activation="relu",
offset=(1, 0, 0), location="concat_1-east", width=8, height=5, depth=5),
tk.Connection("concat_1", "conv_10"),
tk.Conv2D(name='conv_11', out_width=52, out_channel=512, activation="relu",
offset=(0, 0, 0), location="conv_10-east", width=8, height=5, depth=5),
tk.ConvTranspose2D(name='unpool_b2', out_channel=256, offset=(1, 5, 0), location="conv_11-northeast", width=6, height=10, depth=10),
tk.Connection("conv_11", "unpool_b2", path='|-'),
tk.Concat("concat_b2", location="unpool_b2-east", offset=(1, 0, 0), color="white"),
tk.Connection("unpool_b2", "concat_b2"),
tk.Box("concat_2", location="concat_b2-east", offset=(1, 0, 0), xlabel=512, width=8, height=10, depth=10),
tk.Connection("concat_b2", "concat_2"),
tk.Connection('conv_5', 'concat_b2', origin_loc="north", target_loc="north", origin_pos=1, target_pos=4,
color="blue", linestyle="double", path="|-|"),
tk.Conv2D(name='conv_12', out_width=102, out_channel=256, activation="relu",
offset=(1, 0, 0), location="concat_2-east", width=6, height=10, depth=10),
tk.Connection("concat_2", "conv_12"),
tk.Conv2D(name='conv_13', out_width=100, out_channel=256, activation="relu",
offset=(0, 0, 0), location="conv_12-east", width=6, height=10, depth=10),
tk.ConvTranspose2D(name='unpool_b3', out_channel=128, offset=(2, 6, 0), location="conv_13-northeast", width=4, height=20, depth=20),
tk.Connection("conv_13", "unpool_b3", path='|-'),
tk.Concat("concat_b3", location="unpool_b3-east", offset=(1, 0, 0), color="white"),
tk.Connection("unpool_b3", "concat_b3"),
tk.Box("concat_3", location="concat_b3-east", offset=(1, 0, 0), xlabel=256, width=6, height=20, depth=20),
tk.Connection("concat_b3", "concat_3"),
tk.Connection('conv_3', 'concat_b3', origin_loc="north", target_loc="north", origin_pos=1, target_pos=6,
color="blue", linestyle="double", path="|-|"),
tk.Conv2D(name='conv_14', out_width=198, out_channel=128, activation="relu",
offset=(1, 0, 0), location="concat_3-east", width=4, height=20, depth=20),
tk.Connection("concat_3", "conv_14"),
tk.Conv2D(name='conv_15', out_width=196, out_channel=128, activation="relu",
offset=(0, 0, 0), location="conv_14-east", width=4, height=20, depth=20),
tk.ConvTranspose2D(name='unpool_b4', out_channel=64, offset=(2, 8, 0), location="conv_15-northeast", width=2, height=40, depth=40),
tk.Connection("conv_15", "unpool_b4", path='|-'),
tk.Concat("concat_b4", location="unpool_b4-east", offset=(2, 0, 0), color="white"),
tk.Connection("unpool_b4", "concat_b4"),
tk.Box("concat_4", location="concat_b4-east", offset=(2, 0, 0), xlabel=128, width=4, height=40, depth=40),
tk.Connection("concat_b4", "concat_4"),
tk.Connection('conv_1', 'concat_b4', origin_loc="north", target_loc="north", origin_pos=1, target_pos=8,
color="blue", linestyle="double", path="|-|"),
tk.Conv2D(name='conv_16', out_width=390, out_channel=64, activation="relu",
offset=(2, 0, 0), location="concat_4-east", width=2, height=40, depth=40),
tk.Connection("concat_4", "conv_16"),
tk.Conv2D(name='conv_17', out_width=388, out_channel=64, activation="relu",
offset=(0, 0, 0), location="conv_16-east", width=2, height=40, depth=40),
tk.Conv2D(name='conv_18', out_width=388, out_channel=2,
offset=(2, 0, 0), location="conv_17-east", width=1, height=40, depth=40),
tk.Connection("conv_17", "conv_18"),
tk.Softmax(name="softmax", out_channel=2, offset=(1, 0, 0), location="conv_18-east",
width=1, height=40, depth=40, caption="softmax"),
tk.Connection("conv_18", "softmax"),
tk.Legend(
items=[
(tk.Conv2D("conv"), "Conv2D"),
(tk.Conv2D("conva", activation="relu"), "Conv2D+ReLU"),
(tk.ConvTranspose2D("deconv"), "Upsample+Conv"),
(tk.Pool("maxpool"), "MaxPooling"),
(tk.Softmax("softmax"), "Softmax"),
(tk.Concat("concat", color="white", radius=0.7), "Concat"),
(tk.Connection((0, 0, 0), (1, 0, 0), color="blue", linestyle="double"), "Copy and Crop"),
],
scale=3.0,
location="south east",
offset=(0, 0, 0)
)
]
plotnn.generate([arch], namefile + '.tex')
if __name__ == '__main__':
main()
|
[
"plotnn.tikzeng.Conv2D",
"plotnn.tikzeng.ConvTranspose2D",
"plotnn.tikzeng.Image",
"plotnn.tikzeng.Softmax",
"plotnn.tikzeng.Connection",
"plotnn.plotnn.generate",
"plotnn.tikzeng.Concat",
"plotnn.tikzeng.Box",
"plotnn.tikzeng.Pool"
] |
[((8285, 8327), 'plotnn.plotnn.generate', 'plotnn.generate', (['[arch]', "(namefile + '.tex')"], {}), "([arch], namefile + '.tex')\n", (8300, 8327), False, 'from plotnn import plotnn\n'), ((147, 187), 'plotnn.tikzeng.Image', 'tk.Image', (['"""input"""', '"""./images/dogcat.jpg"""'], {}), "('input', './images/dogcat.jpg')\n", (155, 187), True, 'import plotnn.tikzeng as tk\n'), ((198, 342), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_0"""', 'out_width': '(570)', 'out_channel': '(64)', 'activation': '"""relu"""', 'offset': '(3, 0, 0)', 'location': '"""input"""', 'width': '(2)', 'height': '(40)', 'depth': '(40)'}), "(name='conv_0', out_width=570, out_channel=64, activation='relu',\n offset=(3, 0, 0), location='input', width=2, height=40, depth=40)\n", (207, 342), True, 'import plotnn.tikzeng as tk\n'), ((366, 415), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""input"""', '"""conv_0"""'], {'origin_loc': 'None'}), "('input', 'conv_0', origin_loc=None)\n", (379, 415), True, 'import plotnn.tikzeng as tk\n'), ((425, 575), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_1"""', 'out_width': '(568)', 'out_channel': '(64)', 'activation': '"""relu"""', 'offset': '(0, 0, 0)', 'location': '"""conv_0-east"""', 'width': '(2)', 'height': '(40)', 'depth': '(40)'}), "(name='conv_1', out_width=568, out_channel=64, activation='relu',\n offset=(0, 0, 0), location='conv_0-east', width=2, height=40, depth=40)\n", (434, 575), True, 'import plotnn.tikzeng as tk\n'), ((599, 713), 'plotnn.tikzeng.Pool', 'tk.Pool', ([], {'name': '"""pool_b1"""', 'offset': '(1, -6, 0)', 'location': '"""conv_1-south"""', 'width': '(2)', 'height': '(20)', 'depth': '(20)', 'opacity': '(0.5)'}), "(name='pool_b1', offset=(1, -6, 0), location='conv_1-south', width=2,\n height=20, depth=20, opacity=0.5)\n", (606, 713), True, 'import plotnn.tikzeng as tk\n'), ((735, 823), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_1"""', '"""pool_b1"""'], {'origin_loc': '"""east"""', 'target_loc': '"""north"""', 'path': '"""-|"""'}), "('conv_1', 'pool_b1', origin_loc='east', target_loc='north',\n path='-|')\n", (748, 823), True, 'import plotnn.tikzeng as tk\n'), ((830, 982), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_2"""', 'out_width': '(282)', 'out_channel': '(128)', 'activation': '"""relu"""', 'offset': '(0, 0, 0)', 'location': '"""pool_b1-east"""', 'width': '(4)', 'height': '(20)', 'depth': '(20)'}), "(name='conv_2', out_width=282, out_channel=128, activation='relu',\n offset=(0, 0, 0), location='pool_b1-east', width=4, height=20, depth=20)\n", (839, 982), True, 'import plotnn.tikzeng as tk\n'), ((1006, 1157), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_3"""', 'out_width': '(280)', 'out_channel': '(128)', 'activation': '"""relu"""', 'offset': '(0, 0, 0)', 'location': '"""conv_2-east"""', 'width': '(4)', 'height': '(20)', 'depth': '(20)'}), "(name='conv_3', out_width=280, out_channel=128, activation='relu',\n offset=(0, 0, 0), location='conv_2-east', width=4, height=20, depth=20)\n", (1015, 1157), True, 'import plotnn.tikzeng as tk\n'), ((1181, 1295), 'plotnn.tikzeng.Pool', 'tk.Pool', ([], {'name': '"""pool_b2"""', 'offset': '(1, -5, 0)', 'location': '"""conv_3-south"""', 'width': '(4)', 'height': '(10)', 'depth': '(10)', 'opacity': '(0.5)'}), "(name='pool_b2', offset=(1, -5, 0), location='conv_3-south', width=4,\n height=10, depth=10, opacity=0.5)\n", (1188, 1295), True, 'import plotnn.tikzeng as tk\n'), ((1317, 1405), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_3"""', '"""pool_b2"""'], {'origin_loc': '"""east"""', 'target_loc': '"""north"""', 'path': '"""-|"""'}), "('conv_3', 'pool_b2', origin_loc='east', target_loc='north',\n path='-|')\n", (1330, 1405), True, 'import plotnn.tikzeng as tk\n'), ((1412, 1564), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_4"""', 'out_width': '(138)', 'out_channel': '(256)', 'activation': '"""relu"""', 'offset': '(0, 0, 0)', 'location': '"""pool_b2-east"""', 'width': '(6)', 'height': '(10)', 'depth': '(10)'}), "(name='conv_4', out_width=138, out_channel=256, activation='relu',\n offset=(0, 0, 0), location='pool_b2-east', width=6, height=10, depth=10)\n", (1421, 1564), True, 'import plotnn.tikzeng as tk\n'), ((1588, 1739), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_5"""', 'out_width': '(136)', 'out_channel': '(256)', 'activation': '"""relu"""', 'offset': '(0, 0, 0)', 'location': '"""conv_4-east"""', 'width': '(6)', 'height': '(10)', 'depth': '(10)'}), "(name='conv_5', out_width=136, out_channel=256, activation='relu',\n offset=(0, 0, 0), location='conv_4-east', width=6, height=10, depth=10)\n", (1597, 1739), True, 'import plotnn.tikzeng as tk\n'), ((1763, 1875), 'plotnn.tikzeng.Pool', 'tk.Pool', ([], {'name': '"""pool_b3"""', 'offset': '(1, -4, 0)', 'location': '"""conv_5-south"""', 'width': '(6)', 'height': '(5)', 'depth': '(5)', 'opacity': '(0.5)'}), "(name='pool_b3', offset=(1, -4, 0), location='conv_5-south', width=6,\n height=5, depth=5, opacity=0.5)\n", (1770, 1875), True, 'import plotnn.tikzeng as tk\n'), ((1897, 1985), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_5"""', '"""pool_b3"""'], {'origin_loc': '"""east"""', 'target_loc': '"""north"""', 'path': '"""-|"""'}), "('conv_5', 'pool_b3', origin_loc='east', target_loc='north',\n path='-|')\n", (1910, 1985), True, 'import plotnn.tikzeng as tk\n'), ((1992, 2141), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_6"""', 'out_width': '(66)', 'out_channel': '(512)', 'activation': '"""relu"""', 'offset': '(0, 0, 0)', 'location': '"""pool_b3-east"""', 'width': '(8)', 'height': '(5)', 'depth': '(5)'}), "(name='conv_6', out_width=66, out_channel=512, activation='relu',\n offset=(0, 0, 0), location='pool_b3-east', width=8, height=5, depth=5)\n", (2001, 2141), True, 'import plotnn.tikzeng as tk\n'), ((2165, 2313), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_7"""', 'out_width': '(64)', 'out_channel': '(512)', 'activation': '"""relu"""', 'offset': '(0, 0, 0)', 'location': '"""conv_6-east"""', 'width': '(8)', 'height': '(5)', 'depth': '(5)'}), "(name='conv_7', out_width=64, out_channel=512, activation='relu',\n offset=(0, 0, 0), location='conv_6-east', width=8, height=5, depth=5)\n", (2174, 2313), True, 'import plotnn.tikzeng as tk\n'), ((2337, 2449), 'plotnn.tikzeng.Pool', 'tk.Pool', ([], {'name': '"""pool_b4"""', 'offset': '(1, -3, 0)', 'location': '"""conv_7-south"""', 'width': '(8)', 'height': '(4)', 'depth': '(4)', 'opacity': '(0.5)'}), "(name='pool_b4', offset=(1, -3, 0), location='conv_7-south', width=8,\n height=4, depth=4, opacity=0.5)\n", (2344, 2449), True, 'import plotnn.tikzeng as tk\n'), ((2471, 2559), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_7"""', '"""pool_b4"""'], {'origin_loc': '"""east"""', 'target_loc': '"""north"""', 'path': '"""-|"""'}), "('conv_7', 'pool_b4', origin_loc='east', target_loc='north',\n path='-|')\n", (2484, 2559), True, 'import plotnn.tikzeng as tk\n'), ((2566, 2717), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_8"""', 'out_width': '(30)', 'out_channel': '(1024)', 'activation': '"""relu"""', 'offset': '(0, 0, 0)', 'location': '"""pool_b4-east"""', 'width': '(10)', 'height': '(4)', 'depth': '(4)'}), "(name='conv_8', out_width=30, out_channel=1024, activation='relu',\n offset=(0, 0, 0), location='pool_b4-east', width=10, height=4, depth=4)\n", (2575, 2717), True, 'import plotnn.tikzeng as tk\n'), ((2741, 2891), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_9"""', 'out_width': '(28)', 'out_channel': '(1024)', 'activation': '"""relu"""', 'offset': '(0, 0, 0)', 'location': '"""conv_8-east"""', 'width': '(10)', 'height': '(4)', 'depth': '(4)'}), "(name='conv_9', out_width=28, out_channel=1024, activation='relu',\n offset=(0, 0, 0), location='conv_8-east', width=10, height=4, depth=4)\n", (2750, 2891), True, 'import plotnn.tikzeng as tk\n'), ((2916, 3048), 'plotnn.tikzeng.ConvTranspose2D', 'tk.ConvTranspose2D', ([], {'name': '"""unpool_b1"""', 'out_channel': '(512)', 'offset': '(1, 3, 0)', 'location': '"""conv_9-northeast"""', 'width': '(8)', 'height': '(5)', 'depth': '(5)'}), "(name='unpool_b1', out_channel=512, offset=(1, 3, 0),\n location='conv_9-northeast', width=8, height=5, depth=5)\n", (2934, 3048), True, 'import plotnn.tikzeng as tk\n'), ((3054, 3101), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_9"""', '"""unpool_b1"""'], {'path': '"""|-"""'}), "('conv_9', 'unpool_b1', path='|-')\n", (3067, 3101), True, 'import plotnn.tikzeng as tk\n'), ((3111, 3198), 'plotnn.tikzeng.Concat', 'tk.Concat', (['"""concat_b1"""'], {'location': '"""unpool_b1-east"""', 'offset': '(1, 0, 0)', 'color': '"""white"""'}), "('concat_b1', location='unpool_b1-east', offset=(1, 0, 0), color=\n 'white')\n", (3120, 3198), True, 'import plotnn.tikzeng as tk\n'), ((3203, 3242), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""unpool_b1"""', '"""concat_b1"""'], {}), "('unpool_b1', 'concat_b1')\n", (3216, 3242), True, 'import plotnn.tikzeng as tk\n'), ((3252, 3361), 'plotnn.tikzeng.Box', 'tk.Box', (['"""concat_1"""'], {'location': '"""concat_b1-east"""', 'offset': '(1, 0, 0)', 'xlabel': '(1024)', 'width': '(10)', 'height': '(5)', 'depth': '(5)'}), "('concat_1', location='concat_b1-east', offset=(1, 0, 0), xlabel=1024,\n width=10, height=5, depth=5)\n", (3258, 3361), True, 'import plotnn.tikzeng as tk\n'), ((3367, 3405), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""concat_b1"""', '"""concat_1"""'], {}), "('concat_b1', 'concat_1')\n", (3380, 3405), True, 'import plotnn.tikzeng as tk\n'), ((3415, 3571), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_7"""', '"""concat_b1"""'], {'origin_loc': '"""north"""', 'target_loc': '"""north"""', 'origin_pos': '(1.5)', 'target_pos': '(2)', 'color': '"""blue"""', 'linestyle': '"""double"""', 'path': '"""|-|"""'}), "('conv_7', 'concat_b1', origin_loc='north', target_loc='north',\n origin_pos=1.5, target_pos=2, color='blue', linestyle='double', path='|-|')\n", (3428, 3571), True, 'import plotnn.tikzeng as tk\n'), ((3599, 3750), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_10"""', 'out_width': '(54)', 'out_channel': '(512)', 'activation': '"""relu"""', 'offset': '(1, 0, 0)', 'location': '"""concat_1-east"""', 'width': '(8)', 'height': '(5)', 'depth': '(5)'}), "(name='conv_10', out_width=54, out_channel=512, activation='relu',\n offset=(1, 0, 0), location='concat_1-east', width=8, height=5, depth=5)\n", (3608, 3750), True, 'import plotnn.tikzeng as tk\n'), ((3774, 3810), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""concat_1"""', '"""conv_10"""'], {}), "('concat_1', 'conv_10')\n", (3787, 3810), True, 'import plotnn.tikzeng as tk\n'), ((3820, 3970), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_11"""', 'out_width': '(52)', 'out_channel': '(512)', 'activation': '"""relu"""', 'offset': '(0, 0, 0)', 'location': '"""conv_10-east"""', 'width': '(8)', 'height': '(5)', 'depth': '(5)'}), "(name='conv_11', out_width=52, out_channel=512, activation='relu',\n offset=(0, 0, 0), location='conv_10-east', width=8, height=5, depth=5)\n", (3829, 3970), True, 'import plotnn.tikzeng as tk\n'), ((3996, 4131), 'plotnn.tikzeng.ConvTranspose2D', 'tk.ConvTranspose2D', ([], {'name': '"""unpool_b2"""', 'out_channel': '(256)', 'offset': '(1, 5, 0)', 'location': '"""conv_11-northeast"""', 'width': '(6)', 'height': '(10)', 'depth': '(10)'}), "(name='unpool_b2', out_channel=256, offset=(1, 5, 0),\n location='conv_11-northeast', width=6, height=10, depth=10)\n", (4014, 4131), True, 'import plotnn.tikzeng as tk\n'), ((4137, 4185), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_11"""', '"""unpool_b2"""'], {'path': '"""|-"""'}), "('conv_11', 'unpool_b2', path='|-')\n", (4150, 4185), True, 'import plotnn.tikzeng as tk\n'), ((4195, 4282), 'plotnn.tikzeng.Concat', 'tk.Concat', (['"""concat_b2"""'], {'location': '"""unpool_b2-east"""', 'offset': '(1, 0, 0)', 'color': '"""white"""'}), "('concat_b2', location='unpool_b2-east', offset=(1, 0, 0), color=\n 'white')\n", (4204, 4282), True, 'import plotnn.tikzeng as tk\n'), ((4287, 4326), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""unpool_b2"""', '"""concat_b2"""'], {}), "('unpool_b2', 'concat_b2')\n", (4300, 4326), True, 'import plotnn.tikzeng as tk\n'), ((4336, 4445), 'plotnn.tikzeng.Box', 'tk.Box', (['"""concat_2"""'], {'location': '"""concat_b2-east"""', 'offset': '(1, 0, 0)', 'xlabel': '(512)', 'width': '(8)', 'height': '(10)', 'depth': '(10)'}), "('concat_2', location='concat_b2-east', offset=(1, 0, 0), xlabel=512,\n width=8, height=10, depth=10)\n", (4342, 4445), True, 'import plotnn.tikzeng as tk\n'), ((4451, 4489), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""concat_b2"""', '"""concat_2"""'], {}), "('concat_b2', 'concat_2')\n", (4464, 4489), True, 'import plotnn.tikzeng as tk\n'), ((4499, 4653), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_5"""', '"""concat_b2"""'], {'origin_loc': '"""north"""', 'target_loc': '"""north"""', 'origin_pos': '(1)', 'target_pos': '(4)', 'color': '"""blue"""', 'linestyle': '"""double"""', 'path': '"""|-|"""'}), "('conv_5', 'concat_b2', origin_loc='north', target_loc='north',\n origin_pos=1, target_pos=4, color='blue', linestyle='double', path='|-|')\n", (4512, 4653), True, 'import plotnn.tikzeng as tk\n'), ((4681, 4835), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_12"""', 'out_width': '(102)', 'out_channel': '(256)', 'activation': '"""relu"""', 'offset': '(1, 0, 0)', 'location': '"""concat_2-east"""', 'width': '(6)', 'height': '(10)', 'depth': '(10)'}), "(name='conv_12', out_width=102, out_channel=256, activation='relu',\n offset=(1, 0, 0), location='concat_2-east', width=6, height=10, depth=10)\n", (4690, 4835), True, 'import plotnn.tikzeng as tk\n'), ((4859, 4895), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""concat_2"""', '"""conv_12"""'], {}), "('concat_2', 'conv_12')\n", (4872, 4895), True, 'import plotnn.tikzeng as tk\n'), ((4905, 5058), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_13"""', 'out_width': '(100)', 'out_channel': '(256)', 'activation': '"""relu"""', 'offset': '(0, 0, 0)', 'location': '"""conv_12-east"""', 'width': '(6)', 'height': '(10)', 'depth': '(10)'}), "(name='conv_13', out_width=100, out_channel=256, activation='relu',\n offset=(0, 0, 0), location='conv_12-east', width=6, height=10, depth=10)\n", (4914, 5058), True, 'import plotnn.tikzeng as tk\n'), ((5083, 5218), 'plotnn.tikzeng.ConvTranspose2D', 'tk.ConvTranspose2D', ([], {'name': '"""unpool_b3"""', 'out_channel': '(128)', 'offset': '(2, 6, 0)', 'location': '"""conv_13-northeast"""', 'width': '(4)', 'height': '(20)', 'depth': '(20)'}), "(name='unpool_b3', out_channel=128, offset=(2, 6, 0),\n location='conv_13-northeast', width=4, height=20, depth=20)\n", (5101, 5218), True, 'import plotnn.tikzeng as tk\n'), ((5224, 5272), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_13"""', '"""unpool_b3"""'], {'path': '"""|-"""'}), "('conv_13', 'unpool_b3', path='|-')\n", (5237, 5272), True, 'import plotnn.tikzeng as tk\n'), ((5282, 5369), 'plotnn.tikzeng.Concat', 'tk.Concat', (['"""concat_b3"""'], {'location': '"""unpool_b3-east"""', 'offset': '(1, 0, 0)', 'color': '"""white"""'}), "('concat_b3', location='unpool_b3-east', offset=(1, 0, 0), color=\n 'white')\n", (5291, 5369), True, 'import plotnn.tikzeng as tk\n'), ((5374, 5413), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""unpool_b3"""', '"""concat_b3"""'], {}), "('unpool_b3', 'concat_b3')\n", (5387, 5413), True, 'import plotnn.tikzeng as tk\n'), ((5423, 5532), 'plotnn.tikzeng.Box', 'tk.Box', (['"""concat_3"""'], {'location': '"""concat_b3-east"""', 'offset': '(1, 0, 0)', 'xlabel': '(256)', 'width': '(6)', 'height': '(20)', 'depth': '(20)'}), "('concat_3', location='concat_b3-east', offset=(1, 0, 0), xlabel=256,\n width=6, height=20, depth=20)\n", (5429, 5532), True, 'import plotnn.tikzeng as tk\n'), ((5538, 5576), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""concat_b3"""', '"""concat_3"""'], {}), "('concat_b3', 'concat_3')\n", (5551, 5576), True, 'import plotnn.tikzeng as tk\n'), ((5586, 5740), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_3"""', '"""concat_b3"""'], {'origin_loc': '"""north"""', 'target_loc': '"""north"""', 'origin_pos': '(1)', 'target_pos': '(6)', 'color': '"""blue"""', 'linestyle': '"""double"""', 'path': '"""|-|"""'}), "('conv_3', 'concat_b3', origin_loc='north', target_loc='north',\n origin_pos=1, target_pos=6, color='blue', linestyle='double', path='|-|')\n", (5599, 5740), True, 'import plotnn.tikzeng as tk\n'), ((5768, 5922), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_14"""', 'out_width': '(198)', 'out_channel': '(128)', 'activation': '"""relu"""', 'offset': '(1, 0, 0)', 'location': '"""concat_3-east"""', 'width': '(4)', 'height': '(20)', 'depth': '(20)'}), "(name='conv_14', out_width=198, out_channel=128, activation='relu',\n offset=(1, 0, 0), location='concat_3-east', width=4, height=20, depth=20)\n", (5777, 5922), True, 'import plotnn.tikzeng as tk\n'), ((5946, 5982), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""concat_3"""', '"""conv_14"""'], {}), "('concat_3', 'conv_14')\n", (5959, 5982), True, 'import plotnn.tikzeng as tk\n'), ((5992, 6145), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_15"""', 'out_width': '(196)', 'out_channel': '(128)', 'activation': '"""relu"""', 'offset': '(0, 0, 0)', 'location': '"""conv_14-east"""', 'width': '(4)', 'height': '(20)', 'depth': '(20)'}), "(name='conv_15', out_width=196, out_channel=128, activation='relu',\n offset=(0, 0, 0), location='conv_14-east', width=4, height=20, depth=20)\n", (6001, 6145), True, 'import plotnn.tikzeng as tk\n'), ((6170, 6304), 'plotnn.tikzeng.ConvTranspose2D', 'tk.ConvTranspose2D', ([], {'name': '"""unpool_b4"""', 'out_channel': '(64)', 'offset': '(2, 8, 0)', 'location': '"""conv_15-northeast"""', 'width': '(2)', 'height': '(40)', 'depth': '(40)'}), "(name='unpool_b4', out_channel=64, offset=(2, 8, 0),\n location='conv_15-northeast', width=2, height=40, depth=40)\n", (6188, 6304), True, 'import plotnn.tikzeng as tk\n'), ((6310, 6358), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_15"""', '"""unpool_b4"""'], {'path': '"""|-"""'}), "('conv_15', 'unpool_b4', path='|-')\n", (6323, 6358), True, 'import plotnn.tikzeng as tk\n'), ((6368, 6455), 'plotnn.tikzeng.Concat', 'tk.Concat', (['"""concat_b4"""'], {'location': '"""unpool_b4-east"""', 'offset': '(2, 0, 0)', 'color': '"""white"""'}), "('concat_b4', location='unpool_b4-east', offset=(2, 0, 0), color=\n 'white')\n", (6377, 6455), True, 'import plotnn.tikzeng as tk\n'), ((6460, 6499), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""unpool_b4"""', '"""concat_b4"""'], {}), "('unpool_b4', 'concat_b4')\n", (6473, 6499), True, 'import plotnn.tikzeng as tk\n'), ((6509, 6618), 'plotnn.tikzeng.Box', 'tk.Box', (['"""concat_4"""'], {'location': '"""concat_b4-east"""', 'offset': '(2, 0, 0)', 'xlabel': '(128)', 'width': '(4)', 'height': '(40)', 'depth': '(40)'}), "('concat_4', location='concat_b4-east', offset=(2, 0, 0), xlabel=128,\n width=4, height=40, depth=40)\n", (6515, 6618), True, 'import plotnn.tikzeng as tk\n'), ((6624, 6662), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""concat_b4"""', '"""concat_4"""'], {}), "('concat_b4', 'concat_4')\n", (6637, 6662), True, 'import plotnn.tikzeng as tk\n'), ((6672, 6826), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_1"""', '"""concat_b4"""'], {'origin_loc': '"""north"""', 'target_loc': '"""north"""', 'origin_pos': '(1)', 'target_pos': '(8)', 'color': '"""blue"""', 'linestyle': '"""double"""', 'path': '"""|-|"""'}), "('conv_1', 'concat_b4', origin_loc='north', target_loc='north',\n origin_pos=1, target_pos=8, color='blue', linestyle='double', path='|-|')\n", (6685, 6826), True, 'import plotnn.tikzeng as tk\n'), ((6854, 7007), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_16"""', 'out_width': '(390)', 'out_channel': '(64)', 'activation': '"""relu"""', 'offset': '(2, 0, 0)', 'location': '"""concat_4-east"""', 'width': '(2)', 'height': '(40)', 'depth': '(40)'}), "(name='conv_16', out_width=390, out_channel=64, activation='relu',\n offset=(2, 0, 0), location='concat_4-east', width=2, height=40, depth=40)\n", (6863, 7007), True, 'import plotnn.tikzeng as tk\n'), ((7031, 7067), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""concat_4"""', '"""conv_16"""'], {}), "('concat_4', 'conv_16')\n", (7044, 7067), True, 'import plotnn.tikzeng as tk\n'), ((7077, 7229), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_17"""', 'out_width': '(388)', 'out_channel': '(64)', 'activation': '"""relu"""', 'offset': '(0, 0, 0)', 'location': '"""conv_16-east"""', 'width': '(2)', 'height': '(40)', 'depth': '(40)'}), "(name='conv_17', out_width=388, out_channel=64, activation='relu',\n offset=(0, 0, 0), location='conv_16-east', width=2, height=40, depth=40)\n", (7086, 7229), True, 'import plotnn.tikzeng as tk\n'), ((7253, 7385), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', ([], {'name': '"""conv_18"""', 'out_width': '(388)', 'out_channel': '(2)', 'offset': '(2, 0, 0)', 'location': '"""conv_17-east"""', 'width': '(1)', 'height': '(40)', 'depth': '(40)'}), "(name='conv_18', out_width=388, out_channel=2, offset=(2, 0, 0),\n location='conv_17-east', width=1, height=40, depth=40)\n", (7262, 7385), True, 'import plotnn.tikzeng as tk\n'), ((7409, 7444), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_17"""', '"""conv_18"""'], {}), "('conv_17', 'conv_18')\n", (7422, 7444), True, 'import plotnn.tikzeng as tk\n'), ((7455, 7593), 'plotnn.tikzeng.Softmax', 'tk.Softmax', ([], {'name': '"""softmax"""', 'out_channel': '(2)', 'offset': '(1, 0, 0)', 'location': '"""conv_18-east"""', 'width': '(1)', 'height': '(40)', 'depth': '(40)', 'caption': '"""softmax"""'}), "(name='softmax', out_channel=2, offset=(1, 0, 0), location=\n 'conv_18-east', width=1, height=40, depth=40, caption='softmax')\n", (7465, 7593), True, 'import plotnn.tikzeng as tk\n'), ((7617, 7652), 'plotnn.tikzeng.Connection', 'tk.Connection', (['"""conv_18"""', '"""softmax"""'], {}), "('conv_18', 'softmax')\n", (7630, 7652), True, 'import plotnn.tikzeng as tk\n'), ((7710, 7727), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', (['"""conv"""'], {}), "('conv')\n", (7719, 7727), True, 'import plotnn.tikzeng as tk\n'), ((7757, 7794), 'plotnn.tikzeng.Conv2D', 'tk.Conv2D', (['"""conva"""'], {'activation': '"""relu"""'}), "('conva', activation='relu')\n", (7766, 7794), True, 'import plotnn.tikzeng as tk\n'), ((7829, 7857), 'plotnn.tikzeng.ConvTranspose2D', 'tk.ConvTranspose2D', (['"""deconv"""'], {}), "('deconv')\n", (7847, 7857), True, 'import plotnn.tikzeng as tk\n'), ((7894, 7912), 'plotnn.tikzeng.Pool', 'tk.Pool', (['"""maxpool"""'], {}), "('maxpool')\n", (7901, 7912), True, 'import plotnn.tikzeng as tk\n'), ((7946, 7967), 'plotnn.tikzeng.Softmax', 'tk.Softmax', (['"""softmax"""'], {}), "('softmax')\n", (7956, 7967), True, 'import plotnn.tikzeng as tk\n'), ((7998, 8044), 'plotnn.tikzeng.Concat', 'tk.Concat', (['"""concat"""'], {'color': '"""white"""', 'radius': '(0.7)'}), "('concat', color='white', radius=0.7)\n", (8007, 8044), True, 'import plotnn.tikzeng as tk\n'), ((8074, 8143), 'plotnn.tikzeng.Connection', 'tk.Connection', (['(0, 0, 0)', '(1, 0, 0)'], {'color': '"""blue"""', 'linestyle': '"""double"""'}), "((0, 0, 0), (1, 0, 0), color='blue', linestyle='double')\n", (8087, 8143), True, 'import plotnn.tikzeng as tk\n')]
|
import tensorflow as tf
import numpy as np
from PIL import Image
from PIL import ImageDraw
from PIL import ImageColor
import cv2
import time
from styx_msgs.msg import TrafficLight
class TLClassifier(object):
def __init__(self):
self.current_light = TrafficLight.UNKNOWN
SSD_GRAPH_FILE = './frozen_inference_graph.pb'
self.detection_graph = self.load_graph(SSD_GRAPH_FILE)
# The input placeholder for the image.
# `get_tensor_by_name` returns the Tensor with the associated name in the Graph.
self.image_tensor = self.detection_graph.get_tensor_by_name('image_tensor:0')
# Each box represents a part of the image where a particular object was detected.
self.detection_boxes = self.detection_graph.get_tensor_by_name('detection_boxes:0')
# Each score represent how level of confidence for each of the objects.
# Score is shown on the result image, together with the class label.
self.detection_scores = self.detection_graph.get_tensor_by_name('detection_scores:0')
# The classification of the object (integer id).
self.detection_classes = self.detection_graph.get_tensor_by_name('detection_classes:0')
def load_graph(self,graph_file):
"""Loads a frozen inference graph"""
graph = tf.Graph()
with graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(graph_file, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
return graph
def get_classification(self, image):
"""Determines the color of the traffic light in the image
Args:
image (cv::Mat): image containing the traffic light
Returns:
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
"""
#TODO implement light color prediction
image_np = np.expand_dims(np.asarray(image, dtype=np.uint8), 0)
with tf.Session(graph=self.detection_graph) as sess:
# Actual detection.
(boxes, scores, classes) = sess.run([self.detection_boxes, self.detection_scores, self.detection_classes],
feed_dict={self.image_tensor: image_np})
# Remove unnecessary dimensions
boxes = np.squeeze(boxes)
scores = np.squeeze(scores)
classes = np.squeeze(classes)
# confidence_cutoff = 0.8
# # Filter boxes with a confidence score less than `confidence_cutoff`
# boxes, scores, classes = filter_boxes(confidence_cutoff, boxes, scores, classes)
min_score_thresh = .5
count = 0
count1 = 0
# print(scores)
for i in range(boxes.shape[0]):
if scores is None or scores[i] > min_score_thresh:
count1 += 1
class_name = self.category_index[classes[i]]['name']
# Traffic light thing
if class_name == 'Red':
count += 1
# print(count)
if count < count1 - count:
self.current_light = TrafficLight.GREEN
else:
self.current_light = TrafficLight.RED
return self.current_light
|
[
"numpy.asarray",
"tensorflow.Session",
"tensorflow.gfile.GFile",
"tensorflow.Graph",
"numpy.squeeze",
"tensorflow.import_graph_def",
"tensorflow.GraphDef"
] |
[((1342, 1352), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (1350, 1352), True, 'import tensorflow as tf\n'), ((1413, 1426), 'tensorflow.GraphDef', 'tf.GraphDef', ([], {}), '()\n', (1424, 1426), True, 'import tensorflow as tf\n'), ((2063, 2096), 'numpy.asarray', 'np.asarray', (['image'], {'dtype': 'np.uint8'}), '(image, dtype=np.uint8)\n', (2073, 2096), True, 'import numpy as np\n'), ((2114, 2152), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'self.detection_graph'}), '(graph=self.detection_graph)\n', (2124, 2152), True, 'import tensorflow as tf\n'), ((2484, 2501), 'numpy.squeeze', 'np.squeeze', (['boxes'], {}), '(boxes)\n', (2494, 2501), True, 'import numpy as np\n'), ((2523, 2541), 'numpy.squeeze', 'np.squeeze', (['scores'], {}), '(scores)\n', (2533, 2541), True, 'import numpy as np\n'), ((2564, 2583), 'numpy.squeeze', 'np.squeeze', (['classes'], {}), '(classes)\n', (2574, 2583), True, 'import numpy as np\n'), ((1444, 1476), 'tensorflow.gfile.GFile', 'tf.gfile.GFile', (['graph_file', '"""rb"""'], {}), "(graph_file, 'rb')\n", (1458, 1476), True, 'import tensorflow as tf\n'), ((1610, 1652), 'tensorflow.import_graph_def', 'tf.import_graph_def', (['od_graph_def'], {'name': '""""""'}), "(od_graph_def, name='')\n", (1629, 1652), True, 'import tensorflow as tf\n')]
|
# -*- coding: utf-8 -*-
"""Simple OSC client."""
import socket
try:
from ustruct import pack
except ImportError:
from struct import pack
from uosc.common import Bundle, to_frac
if isinstance("", bytes):
have_bytes = False
unicodetype = unicode # noqa
else:
have_bytes = True
unicodetype = str
TYPE_MAP = {
int: "i",
float: "f",
bytes: "b",
bytearray: "b",
unicodetype: "s",
True: "T",
False: "F",
None: "N",
}
def pack_addr(addr):
"""Pack a (host, port) tuple into the format expected by socket methods."""
if isinstance(addr, (bytes, bytearray)):
return addr
if len(addr) != 2:
raise NotImplementedError("Only IPv4/v6 supported")
addrinfo = socket.getaddrinfo(addr[0], addr[1])
return addrinfo[0][4]
def pack_timetag(t):
"""Pack an OSC timetag into 64-bit binary blob."""
return pack(">II", *to_frac(t))
def pack_string(s, encoding="utf-8"):
"""Pack a string into a binary OSC string."""
if isinstance(s, unicodetype):
s = s.encode(encoding)
assert all(
(i if have_bytes else ord(i)) < 128 for i in s
), "OSC strings may only contain ASCII chars."
slen = len(s)
return s + b"\0" * (((slen + 4) & ~0x03) - slen)
def pack_blob(b, encoding="utf-8"):
"""Pack a bytes, bytearray or tuple/list of ints into a binary OSC blob."""
if isinstance(b, (tuple, list)):
b = bytearray(b)
elif isinstance(b, unicodetype):
b = b.encode(encoding)
blen = len(b)
b = pack(">I", blen) + bytes(b)
return b + b"\0" * (((blen + 3) & ~0x03) - blen)
def pack_bundle(bundle):
"""Return bundle data packed into a binary string."""
data = []
for msg in bundle:
if isinstance(msg, Bundle):
msg = pack_bundle(msg)
elif isinstance(msg, tuple):
msg = create_message(*msg)
data.append(pack(">I", len(msg)) + msg)
return b"#bundle\0" + pack_timetag(bundle.timetag) + b"".join(data)
def pack_midi(val):
assert not isinstance(val, unicodetype), (
"Value with tag 'm' or 'r' must be bytes, bytearray or a sequence of "
"ints, not %s" % unicodetype
)
if not have_bytes and isinstance(val, str):
val = (ord(c) for c in val)
return pack("BBBB", *tuple(val))
def create_message(address, *args):
"""Create an OSC message with given address pattern and arguments.
The OSC types are either inferred from the Python types of the arguments,
or you can pass arguments as 2-item tuples with the OSC typetag as the
first item and the argument value as the second. Python objects are mapped
to OSC typetags as follows:
* ``int``: i
* ``float``: f
* ``str``: s
* ``bytes`` / ``bytearray``: b
* ``None``: N
* ``True``: T
* ``False``: F
If you want to encode a Python object to another OSC type, you have to pass
a ``(typetag, data)`` tuple, where ``data`` must be of the appropriate type
according to the following table:
* c: ``str`` of length 1
* h: ``int``
* d: ``float``
* I: ``None`` (unused)
* m: ``tuple / list`` of 4 ``int``s or ``bytes / bytearray`` of length 4
* r: same as 'm'
* t: OSC timetag as as ``int / float`` seconds since the NTP epoch
* S: ``str``
"""
assert address.startswith("/"), "Address pattern must start with a slash."
data = []
types = [","]
for arg in args:
type_ = type(arg)
if isinstance(arg, tuple):
typetag, arg = arg
else:
typetag = TYPE_MAP.get(type_) or TYPE_MAP.get(arg)
if typetag in "ifd":
data.append(pack(">" + typetag, arg))
elif typetag in "sS":
data.append(pack_string(arg))
elif typetag == "b":
data.append(pack_blob(arg))
elif typetag in "rm":
data.append(pack_midi(arg))
elif typetag == "c":
data.append(pack(">I", ord(arg)))
elif typetag == "h":
data.append(pack(">q", arg))
elif typetag == "t":
data.append(pack_timetag(arg))
elif typetag not in "IFNT":
raise TypeError("Argument of type '%s' not supported." % type_)
types.append(typetag)
return pack_string(address) + pack_string("".join(types)) + b"".join(data)
class Client:
def __init__(self, host, port=None):
if port is None:
if isinstance(host, (list, tuple)):
host, port = host
else:
port = host
host = "127.0.0.1"
self.dest = pack_addr((host, port))
self.sock = None
def send(self, msg, *args, **kw):
dest = pack_addr(kw.get("dest", self.dest))
if not self.sock:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if isinstance(msg, Bundle):
msg = pack_bundle(msg)
elif args or isinstance(msg, unicodetype):
msg = create_message(msg, *args)
self.sock.sendto(msg, dest)
def close(self):
if self.sock:
self.sock.close()
self.sock = None
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def send(dest, address, *args):
with Client(dest) as client:
client.send(address, *args)
|
[
"uosc.common.to_frac",
"socket.getaddrinfo",
"socket.socket",
"struct.pack"
] |
[((781, 817), 'socket.getaddrinfo', 'socket.getaddrinfo', (['addr[0]', 'addr[1]'], {}), '(addr[0], addr[1])\n', (799, 817), False, 'import socket\n'), ((1612, 1628), 'struct.pack', 'pack', (['""">I"""', 'blen'], {}), "('>I', blen)\n", (1616, 1628), False, 'from struct import pack\n'), ((952, 962), 'uosc.common.to_frac', 'to_frac', (['t'], {}), '(t)\n', (959, 962), False, 'from uosc.common import Bundle, to_frac\n'), ((5004, 5052), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (5017, 5052), False, 'import socket\n'), ((3833, 3857), 'struct.pack', 'pack', (["('>' + typetag)", 'arg'], {}), "('>' + typetag, arg)\n", (3837, 3857), False, 'from struct import pack\n'), ((4208, 4223), 'struct.pack', 'pack', (['""">q"""', 'arg'], {}), "('>q', arg)\n", (4212, 4223), False, 'from struct import pack\n')]
|
'''
Created on Jun 21, 2018
@author: moffat
'''
from django.contrib import admin
from ..models import IneligibleSubject
from ..admin_site import tp_screening_admin
class ChoiceInline(admin.TabularInline):
model = IneligibleSubject
@admin.register(IneligibleSubject, site=tp_screening_admin)
class IneligibleSubjectAdmin(admin.ModelAdmin):
fieldsets = (
('Screening Enrollment Loss', {
'fields': ('screening_identifier',
'report_datetime',
'reasons_ineligible'),
}),
)
list_display = ('screening_identifier', 'report_datetime',
'reasons_ineligible')
# list_filter = ['screening_identifier', 'eligible']
# search_fields = ['screening_identifier']
readonly_fields = ('screening_identifier',
'reasons_ineligible',)
admin.site.register(IneligibleSubject, IneligibleSubjectAdmin)
|
[
"django.contrib.admin.register",
"django.contrib.admin.site.register"
] |
[((243, 301), 'django.contrib.admin.register', 'admin.register', (['IneligibleSubject'], {'site': 'tp_screening_admin'}), '(IneligibleSubject, site=tp_screening_admin)\n', (257, 301), False, 'from django.contrib import admin\n'), ((868, 930), 'django.contrib.admin.site.register', 'admin.site.register', (['IneligibleSubject', 'IneligibleSubjectAdmin'], {}), '(IneligibleSubject, IneligibleSubjectAdmin)\n', (887, 930), False, 'from django.contrib import admin\n')]
|
#!/usr/bin/env python3
#
# Author: <NAME>
# License: BSD 2-clause
# Last Change: Mon Oct 25, 2021 at 09:41 PM +0200
import sys
import os
import os.path as op
from argparse import ArgumentParser, Action
from os import chdir
from shutil import rmtree
from pyBabyMaker.base import TermColor as TC
sys.path.insert(0, op.dirname(op.abspath(__file__)))
from utils import (
run_cmd_wrapper,
append_path, abs_path, ensure_dir, find_all_input,
aggregate_fltr, aggregate_output, load_yaml_db,
find_year, find_polarity,
generate_step2_name, parse_step2_name,
workflow_compile_cpp, workflow_cached_ntuple
)
#################################
# Command line arguments parser #
#################################
def parse_input():
parser = ArgumentParser(description='workflow for R(D(*)).')
parser.add_argument('job_name', help='specify job name.')
parser.add_argument('-d', '--debug', action='store_true',
help='enable debug mode.')
return parser.parse_args()
###########
# Helpers #
###########
rdx_default_fltr = aggregate_fltr(
keep=[r'^(Dst|D0).*\.root'], blocked=['__aux'])
rdx_default_output_fltrs = {
'ntuple': rdx_default_fltr,
'ntuple_aux': aggregate_fltr(keep=['__aux']),
}
def rdx_mc_fltr(decay_mode):
db = load_yaml_db()
# Unfortunately we need to use 'Filename' as the key so we need to re-build
# the dict on the fly
db = {v['Filename']: v['Keep'] for v in db.values() if 'Keep' in v}
if decay_mode not in db:
return rdx_default_fltr
return aggregate_fltr(keep=[r'^({}).*\.root'.format(
'|'.join(db[decay_mode]))])
def rdx_mc_add_info(decay_mode):
known_trees = ['D0', 'Dst']
tree_dict = {
'D0': 'TupleBminus/DecayTree',
'Dst': 'TupleB0/DecayTree'
}
raw_db = load_yaml_db()
# Unfortunately we need to use 'Filename' as the key so we need to re-build
# the dict on the fly
db_keep = {v['Filename']: v['Keep']
for v in raw_db.values() if 'Keep' in v}
db_id = {v['Filename']: k for k, v in raw_db.items()}
try:
decay_id = db_id[decay_mode]
except KeyError:
decay_id = '0'
if decay_mode not in db_keep:
return None, decay_id
# NOTE: Here we are returning trees to BLOCK!!
return [tree_dict[t] for t in known_trees
if t not in db_keep[decay_mode]], decay_id
######################
# Workflows: helpers #
######################
def workflow_ubdt(input_ntp,
trees=['TupleB0/DecayTree', 'TupleBminus/DecayTree'],
**kwargs):
weight_file = abs_path('../run2-rdx/weights_run2_no_cut_ubdt.xml')
cmd = 'addUBDTBranch {} mu_isMuonTight {} ubdt.root {}'.format(
input_ntp, weight_file, ' '.join(trees))
workflow_cached_ntuple(cmd, input_ntp, **kwargs)
try:
rmtree('./weights')
except FileNotFoundError:
pass
def workflow_hammer(input_ntp,
trees=['TupleB0/DecayTree', 'TupleBminus/DecayTree'],
**kwargs):
run = 'run1' if '2011' in input_ntp or '2012' in input_ntp else 'run2'
cmd = ['ReweightRDX '+input_ntp+' hammer.root '+t+' '+run for t in trees]
workflow_cached_ntuple(
cmd, input_ntp, output_ntp='hammer.root', cache_suffix='__aux_hammer',
**kwargs)
def workflow_pid(input_ntp, pid_histo_folder, config, **kwargs):
pid_histo_folder = abs_path(pid_histo_folder)
config = abs_path(config)
year = find_year(input_ntp)
polarity = find_polarity(input_ntp)
# This is in 'scripts' folder!
cmd = 'apply_histo_weight.py {} {} pid.root -c {} --year {} --polarity {}'.format(
input_ntp, pid_histo_folder, config, year, polarity)
workflow_cached_ntuple(
cmd, input_ntp, output_ntp='pid.root', cache_suffix='__aux_pid',
**kwargs)
def workflow_data_mc(job_name, inputs,
output_dir=abs_path('../gen'),
patterns=['*.root'],
blocked_patterns=['__aux'],
executor=run_cmd_wrapper()
):
print('{}==== Job: {} ===={}'.format(TC.BOLD+TC.GREEN, job_name, TC.END))
# Need to figure out the absolute path
input_files = find_all_input(inputs, patterns, blocked_patterns)
subworkdirs = {op.splitext(op.basename(i))[0]: i
for i in input_files}
# Now ensure the working dir
workdir = ensure_dir(op.join(output_dir, job_name))
return subworkdirs, workdir, executor
#############
# Workflows #
#############
def workflow_data(job_name, inputs, input_yml,
use_ubdt=True,
output_ntp_name_gen=generate_step2_name,
output_fltr=rdx_default_output_fltrs,
cli_vars=None,
blocked_input_trees=None,
blocked_output_trees=None,
directive_override=None,
**kwargs):
subworkdirs, workdir, executor = workflow_data_mc(
job_name, inputs, **kwargs)
chdir(workdir)
cpp_template = abs_path('../postprocess/cpp_templates/rdx.cpp')
if cli_vars:
cli_vars = ' '.join([k+':'+v for k, v in cli_vars.items()])
for subdir, input_ntp in subworkdirs.items():
print('{}Working on {}...{}'.format(TC.GREEN, input_ntp, TC.END))
ensure_dir(subdir, make_absolute=False)
chdir(subdir) # Switch to the workdir of the subjob
if use_ubdt:
# Generate a ubdt ntuple
workflow_ubdt(input_ntp, executor=executor)
bm_cmd = 'babymaker -i {} -o baby.cpp -n {} -t {} -f ubdt.root'
else:
bm_cmd = 'babymaker -i {} -o baby.cpp -n {} -t {}'
if cli_vars:
bm_cmd += ' -V '+cli_vars
if blocked_input_trees:
bm_cmd += ' -B '+' '.join(blocked_input_trees)
if blocked_output_trees:
bm_cmd += ' -X '+' '.join(blocked_output_trees)
if directive_override:
bm_cmd += ' -D '+' '.join([k+':'+v
for k, v in directive_override.items()])
executor(bm_cmd.format(abs_path(input_yml), input_ntp, cpp_template))
workflow_compile_cpp('baby.cpp', executor=executor)
output_suffix = output_ntp_name_gen(input_ntp)
executor('./baby.exe --{}'.format(output_suffix))
aggregate_output('..', subdir, output_fltr)
chdir('..') # Switch back to parent workdir
def workflow_mc(job_name, inputs, input_yml,
output_ntp_name_gen=generate_step2_name,
pid_histo_folder='../run2-rdx/reweight/pid/root-run2-rdx_oldcut',
config='../run2-rdx/reweight/pid/run2-rdx_oldcut.yml',
output_fltr=rdx_default_output_fltrs,
**kwargs):
subworkdirs, workdir, executor = workflow_data_mc(
job_name, inputs, **kwargs)
chdir(workdir)
cpp_template = abs_path('../postprocess/cpp_templates/rdx.cpp')
for subdir, input_ntp in subworkdirs.items():
print('{}Working on {}...{}'.format(TC.GREEN, input_ntp, TC.END))
ensure_dir(subdir, make_absolute=False)
chdir(subdir) # Switch to the workdir of the subjob
output_suffix = output_ntp_name_gen(input_ntp)
decay_mode = output_suffix.split('--')[2]
blocked_input_trees, decay_id = rdx_mc_add_info(decay_mode)
# Generate a HAMMER ntuple
workflow_hammer(input_ntp, executor=executor)
# Generate PID weights
workflow_pid(input_ntp, pid_histo_folder, config, executor=executor)
bm_cmd = 'babymaker -i {} -o baby.cpp -n {} -t {} -f hammer.root pid.root'
if blocked_input_trees:
bm_cmd += ' -B '+' '.join(blocked_input_trees)
bm_cmd += ' -V '+'cli_mc_id:'+decay_id
executor(bm_cmd.format(abs_path(input_yml), input_ntp, cpp_template))
workflow_compile_cpp('baby.cpp', executor=executor)
executor('./baby.exe --{}'.format(output_suffix))
aggregate_output('..', subdir, output_fltr)
chdir('..') # Switch back to parent workdir
#####################
# Production config #
#####################
args = parse_input()
executor = run_cmd_wrapper(args.debug)
JOBS = {
# Run 2
'rdx-ntuple-run2-data-oldcut': lambda name: workflow_data(
name,
'../ntuples/0.9.5-bugfix/Dst_D0-cutflow_data',
'../postprocess/rdx-run2/rdx-run2_oldcut.yml',
executor=executor
),
'rdx-ntuple-run2-mc-demo': lambda name: workflow_mc(
name,
'../ntuples/0.9.5-bugfix/Dst_D0-mc/Dst_D0--21_10_08--mc--MC_2016_Beam6500GeV-2016-MagDown-Nu1.6-25ns-Pythia8_Sim09j_Trig0x6139160F_Reco16_Turbo03a_Filtered_11574011_D0TAUNU.SAFESTRIPTRIG.DST.root',
'../postprocess/rdx-run2/rdx-run2_oldcut.yml',
executor=executor
),
# Run 2 debug
'rdx-ntuple-run2-data-oldcut-no-Dst-veto': lambda name: workflow_data(
name,
[
'../ntuples/0.9.4-trigger_emulation/Dst_D0-std',
'../ntuples/0.9.5-bugfix/Dst_D0-cutflow_data',
],
'../postprocess/rdx-run2/rdx-run2_oldcut.yml',
executor=executor,
cli_vars={'cli_no_dst_veto': '100.0'}
),
# Run 2 cutflow
'rdx-ntuple-run2-data-oldcut-cutflow': lambda name: workflow_data(
name,
'../ntuples/0.9.5-bugfix/Dst_D0-cutflow_data',
'../postprocess/rdx-run2/rdx-run2_oldcut.yml',
executor=executor,
cli_vars={'cli_cutflow': 'true'}
),
# Run 1
'rdx-ntuple-run1-data': lambda name: workflow_data(
name,
'../ntuples/0.9.5-bugfix/Dst_D0-std',
'../postprocess/rdx-run1/rdx-run1.yml',
use_ubdt=False,
executor=executor
),
# Reference Run 1
'ref-rdx-ntuple-run1-data-Dst': lambda name: workflow_data(
name,
'../ntuples/ref-rdx-run1/Dst-mix/Dst--21_10_21--mix--all--2011-2012--md-mu--phoebe.root',
'../postprocess/ref-rdx-run1/ref-rdx-run1-Dst.yml',
use_ubdt=False,
output_ntp_name_gen=parse_step2_name,
executor=executor,
directive_override={'one_cand_only/enable': 'false'}
),
'ref-rdx-ntuple-run1-data-D0': lambda name: workflow_data(
name,
'../ntuples/ref-rdx-run1/D0-mix/D0--21_10_21--mix--all--2011-2012--md-mu--phoebe.root',
'../postprocess/ref-rdx-run1/ref-rdx-run1-D0.yml',
use_ubdt=False,
output_ntp_name_gen=parse_step2_name,
executor=executor,
directive_override={'one_cand_only/enable': 'false'}
),
}
if args.job_name in JOBS:
JOBS[args.job_name](args.job_name)
else:
print('Unknown job name: {}'.format(args.job_name))
|
[
"utils.aggregate_fltr",
"os.path.abspath",
"argparse.ArgumentParser",
"utils.find_polarity",
"shutil.rmtree",
"utils.ensure_dir",
"os.path.basename",
"utils.workflow_cached_ntuple",
"utils.aggregate_output",
"utils.run_cmd_wrapper",
"utils.find_all_input",
"utils.load_yaml_db",
"utils.find_year",
"utils.workflow_compile_cpp",
"os.path.join",
"os.chdir",
"utils.abs_path"
] |
[((1080, 1142), 'utils.aggregate_fltr', 'aggregate_fltr', ([], {'keep': "['^(Dst|D0).*\\\\.root']", 'blocked': "['__aux']"}), "(keep=['^(Dst|D0).*\\\\.root'], blocked=['__aux'])\n", (1094, 1142), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((8265, 8292), 'utils.run_cmd_wrapper', 'run_cmd_wrapper', (['args.debug'], {}), '(args.debug)\n', (8280, 8292), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((761, 812), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""workflow for R(D(*))."""'}), "(description='workflow for R(D(*)).')\n", (775, 812), False, 'from argparse import ArgumentParser, Action\n'), ((1228, 1258), 'utils.aggregate_fltr', 'aggregate_fltr', ([], {'keep': "['__aux']"}), "(keep=['__aux'])\n", (1242, 1258), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((1302, 1316), 'utils.load_yaml_db', 'load_yaml_db', ([], {}), '()\n', (1314, 1316), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((1829, 1843), 'utils.load_yaml_db', 'load_yaml_db', ([], {}), '()\n', (1841, 1843), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((2633, 2685), 'utils.abs_path', 'abs_path', (['"""../run2-rdx/weights_run2_no_cut_ubdt.xml"""'], {}), "('../run2-rdx/weights_run2_no_cut_ubdt.xml')\n", (2641, 2685), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((2807, 2855), 'utils.workflow_cached_ntuple', 'workflow_cached_ntuple', (['cmd', 'input_ntp'], {}), '(cmd, input_ntp, **kwargs)\n', (2829, 2855), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((3231, 3338), 'utils.workflow_cached_ntuple', 'workflow_cached_ntuple', (['cmd', 'input_ntp'], {'output_ntp': '"""hammer.root"""', 'cache_suffix': '"""__aux_hammer"""'}), "(cmd, input_ntp, output_ntp='hammer.root',\n cache_suffix='__aux_hammer', **kwargs)\n", (3253, 3338), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((3442, 3468), 'utils.abs_path', 'abs_path', (['pid_histo_folder'], {}), '(pid_histo_folder)\n', (3450, 3468), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((3482, 3498), 'utils.abs_path', 'abs_path', (['config'], {}), '(config)\n', (3490, 3498), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((3511, 3531), 'utils.find_year', 'find_year', (['input_ntp'], {}), '(input_ntp)\n', (3520, 3531), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((3547, 3571), 'utils.find_polarity', 'find_polarity', (['input_ntp'], {}), '(input_ntp)\n', (3560, 3571), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((3760, 3862), 'utils.workflow_cached_ntuple', 'workflow_cached_ntuple', (['cmd', 'input_ntp'], {'output_ntp': '"""pid.root"""', 'cache_suffix': '"""__aux_pid"""'}), "(cmd, input_ntp, output_ntp='pid.root', cache_suffix=\n '__aux_pid', **kwargs)\n", (3782, 3862), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((3948, 3966), 'utils.abs_path', 'abs_path', (['"""../gen"""'], {}), "('../gen')\n", (3956, 3966), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((4089, 4106), 'utils.run_cmd_wrapper', 'run_cmd_wrapper', ([], {}), '()\n', (4104, 4106), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((4271, 4321), 'utils.find_all_input', 'find_all_input', (['inputs', 'patterns', 'blocked_patterns'], {}), '(inputs, patterns, blocked_patterns)\n', (4285, 4321), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((5078, 5092), 'os.chdir', 'chdir', (['workdir'], {}), '(workdir)\n', (5083, 5092), False, 'from os import chdir\n'), ((5112, 5160), 'utils.abs_path', 'abs_path', (['"""../postprocess/cpp_templates/rdx.cpp"""'], {}), "('../postprocess/cpp_templates/rdx.cpp')\n", (5120, 5160), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((6946, 6960), 'os.chdir', 'chdir', (['workdir'], {}), '(workdir)\n', (6951, 6960), False, 'from os import chdir\n'), ((6980, 7028), 'utils.abs_path', 'abs_path', (['"""../postprocess/cpp_templates/rdx.cpp"""'], {}), "('../postprocess/cpp_templates/rdx.cpp')\n", (6988, 7028), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((328, 348), 'os.path.abspath', 'op.abspath', (['__file__'], {}), '(__file__)\n', (338, 348), True, 'import os.path as op\n'), ((2873, 2892), 'shutil.rmtree', 'rmtree', (['"""./weights"""'], {}), "('./weights')\n", (2879, 2892), False, 'from shutil import rmtree\n'), ((4475, 4504), 'os.path.join', 'op.join', (['output_dir', 'job_name'], {}), '(output_dir, job_name)\n', (4482, 4504), True, 'import os.path as op\n'), ((5380, 5419), 'utils.ensure_dir', 'ensure_dir', (['subdir'], {'make_absolute': '(False)'}), '(subdir, make_absolute=False)\n', (5390, 5419), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((5428, 5441), 'os.chdir', 'chdir', (['subdir'], {}), '(subdir)\n', (5433, 5441), False, 'from os import chdir\n'), ((6241, 6292), 'utils.workflow_compile_cpp', 'workflow_compile_cpp', (['"""baby.cpp"""'], {'executor': 'executor'}), "('baby.cpp', executor=executor)\n", (6261, 6292), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((6416, 6459), 'utils.aggregate_output', 'aggregate_output', (['""".."""', 'subdir', 'output_fltr'], {}), "('..', subdir, output_fltr)\n", (6432, 6459), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((6468, 6479), 'os.chdir', 'chdir', (['""".."""'], {}), "('..')\n", (6473, 6479), False, 'from os import chdir\n'), ((7162, 7201), 'utils.ensure_dir', 'ensure_dir', (['subdir'], {'make_absolute': '(False)'}), '(subdir, make_absolute=False)\n', (7172, 7201), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((7210, 7223), 'os.chdir', 'chdir', (['subdir'], {}), '(subdir)\n', (7215, 7223), False, 'from os import chdir\n'), ((7947, 7998), 'utils.workflow_compile_cpp', 'workflow_compile_cpp', (['"""baby.cpp"""'], {'executor': 'executor'}), "('baby.cpp', executor=executor)\n", (7967, 7998), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((8067, 8110), 'utils.aggregate_output', 'aggregate_output', (['""".."""', 'subdir', 'output_fltr'], {}), "('..', subdir, output_fltr)\n", (8083, 8110), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((8119, 8130), 'os.chdir', 'chdir', (['""".."""'], {}), "('..')\n", (8124, 8130), False, 'from os import chdir\n'), ((4353, 4367), 'os.path.basename', 'op.basename', (['i'], {}), '(i)\n', (4364, 4367), True, 'import os.path as op\n'), ((6186, 6205), 'utils.abs_path', 'abs_path', (['input_yml'], {}), '(input_yml)\n', (6194, 6205), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n'), ((7892, 7911), 'utils.abs_path', 'abs_path', (['input_yml'], {}), '(input_yml)\n', (7900, 7911), False, 'from utils import run_cmd_wrapper, append_path, abs_path, ensure_dir, find_all_input, aggregate_fltr, aggregate_output, load_yaml_db, find_year, find_polarity, generate_step2_name, parse_step2_name, workflow_compile_cpp, workflow_cached_ntuple\n')]
|
#!/usr/bin/env python
# coding: utf-8
from mpi4py import MPI
from PyQNLPSimulator import PyQNLPSimulator as p
import QNLP as q
import numpy as np
num_qubits = 24
# Create simulator object
use_fusion = False
sim = p(num_qubits, use_fusion)
sim.initRegister()
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
val = 0
sim.applyGateX(0)
sim.applyGateH(2)
sim.applyGateH(4)
sim.applyGateX(7)
sim.applyGateX(22)
val = sim.applyMeasurementToRegister(range(num_qubits), True)
print("RANK={} VAL={}".format(rank,val))
comm.Barrier()
"""
# Note, performing operations on rank=0 only will causes failures. The following example will fail.
if rank == 0:
sim.initRegister()
val = sim.applyMeasurementToRegister(range(num_qubits), True)
print(val)
"""
|
[
"PyQNLPSimulator.PyQNLPSimulator"
] |
[((219, 244), 'PyQNLPSimulator.PyQNLPSimulator', 'p', (['num_qubits', 'use_fusion'], {}), '(num_qubits, use_fusion)\n', (220, 244), True, 'from PyQNLPSimulator import PyQNLPSimulator as p\n')]
|
import pandas as pd
import warnings
from ...pysd import read_vensim
from io import open
def read_tabular(table_file, sheetname='Sheet1'):
"""
Reads a vensim syntax model which has been formatted as a table.
This is useful in contexts where model building is performed
without the aid of Vensim.
Parameters
----------
table_file: .csv, .tab or .xls(x) file
Table should have columns titled as in the table below
| Variable | Equation | Units | Min | Max | Comment |
| :------- | :------- | :---- | :-- | :-- | :--------------- |
| Age | 5 | Yrs | 0 | inf | How old are you? |
| ... | ... | ... | ... | ... | ... |
sheetname: basestring
if the model is specified in an excel file, what sheet?
Returns
-------
PySD Model Object
Notes
-----
Creates an intermediate file in vensim `.mdl` syntax, just so that
the existing vensim parsing machinery can be used.
"""
if isinstance(table_file, str):
extension = table_file.split('.')[-1]
if extension in ['xls', 'xlsx']:
table = pd.read_excel(table_file, sheetname=sheetname)
elif extension == 'csv':
table = pd.read_csv(table_file, encoding='UTF-8')
elif extension == 'tab':
table = pd.read_csv(table_file, sep='\t', encoding='UTF-8')
else:
raise ValueError('Unknown file or table type')
else:
raise ValueError('Unknown file or table type')
if not set(table.columns).issuperset({'Variable', 'Equation'}):
raise ValueError('Table must contain at least columns "Variable" and "Equation"')
if "Units" not in set(table.columns):
warnings.warn('Column for "Units" not found', RuntimeWarning, stacklevel=2)
table['Units'] = ''
if "Min" not in set(table.columns):
warnings.warn('Column for "Min" not found', RuntimeWarning, stacklevel=2)
table['Min'] = ''
if "Max" not in set(table.columns):
warnings.warn('Column for "Max" not found', RuntimeWarning, stacklevel=2)
table['Max'] = ''
mdl_file = table_file.replace(extension, 'mdl')
with open(mdl_file, 'w', encoding='UTF-8') as outfile:
for element in table.to_dict(orient='records'):
outfile.write(
"%(Variable)s = \n"
"\t %(Equation)s \n"
"\t~\t %(Units)s [%(Min)s, %(Max)s] \n"
"\t~\t %(Comment)s \n\t|\n\n" % element
)
outfile.write(u'\\\---/// Sketch information - this is where sketch stuff would go.')
return read_vensim(mdl_file)
|
[
"pandas.read_excel",
"warnings.warn",
"pandas.read_csv",
"io.open"
] |
[((1751, 1826), 'warnings.warn', 'warnings.warn', (['"""Column for "Units" not found"""', 'RuntimeWarning'], {'stacklevel': '(2)'}), '(\'Column for "Units" not found\', RuntimeWarning, stacklevel=2)\n', (1764, 1826), False, 'import warnings\n'), ((1904, 1977), 'warnings.warn', 'warnings.warn', (['"""Column for "Min" not found"""', 'RuntimeWarning'], {'stacklevel': '(2)'}), '(\'Column for "Min" not found\', RuntimeWarning, stacklevel=2)\n', (1917, 1977), False, 'import warnings\n'), ((2053, 2126), 'warnings.warn', 'warnings.warn', (['"""Column for "Max" not found"""', 'RuntimeWarning'], {'stacklevel': '(2)'}), '(\'Column for "Max" not found\', RuntimeWarning, stacklevel=2)\n', (2066, 2126), False, 'import warnings\n'), ((2216, 2253), 'io.open', 'open', (['mdl_file', '"""w"""'], {'encoding': '"""UTF-8"""'}), "(mdl_file, 'w', encoding='UTF-8')\n", (2220, 2253), False, 'from io import open\n'), ((1156, 1202), 'pandas.read_excel', 'pd.read_excel', (['table_file'], {'sheetname': 'sheetname'}), '(table_file, sheetname=sheetname)\n', (1169, 1202), True, 'import pandas as pd\n'), ((1256, 1297), 'pandas.read_csv', 'pd.read_csv', (['table_file'], {'encoding': '"""UTF-8"""'}), "(table_file, encoding='UTF-8')\n", (1267, 1297), True, 'import pandas as pd\n'), ((1351, 1402), 'pandas.read_csv', 'pd.read_csv', (['table_file'], {'sep': '"""\t"""', 'encoding': '"""UTF-8"""'}), "(table_file, sep='\\t', encoding='UTF-8')\n", (1362, 1402), True, 'import pandas as pd\n')]
|
import netscrypt
with netscrypt.Client ('localhost', 6666) as client:
dogs = client ('dogs')
for dog in dogs:
print (dog.name)
print (dog.speak ('wraff'))
|
[
"netscrypt.Client"
] |
[((23, 58), 'netscrypt.Client', 'netscrypt.Client', (['"""localhost"""', '(6666)'], {}), "('localhost', 6666)\n", (39, 58), False, 'import netscrypt\n')]
|
import torch
from torch import nn
class ScaleNorm(nn.Module):
def __init__(self, dim, eps=1e-5):
super().__init__()
self.scale = dim ** -0.5
self.g = nn.Parameter(torch.ones(1))
self.eps = eps
def forward(self, x):
n = torch.norm(x, dim=-1, keepdim=True).clamp(min=self.eps) * self.scale
x = x / n * self.g
return x
|
[
"torch.norm",
"torch.ones"
] |
[((201, 214), 'torch.ones', 'torch.ones', (['(1)'], {}), '(1)\n', (211, 214), False, 'import torch\n'), ((282, 317), 'torch.norm', 'torch.norm', (['x'], {'dim': '(-1)', 'keepdim': '(True)'}), '(x, dim=-1, keepdim=True)\n', (292, 317), False, 'import torch\n')]
|
from flask import Blueprint, redirect, url_for, jsonify, make_response, request
from ..models.users import User
from ..utils.database import db
from flask_login import login_user, logout_user
from ..controllers.methods import check_email_exists, check_username_exists
auth_bp = Blueprint("auth", __name__)
@auth_bp.route("/signup", methods=["POST"])
def create_account():
data = request.get_json()
username = data.get("username")
email = data.get("email")
tel_phone = data.get("tel_phone")
password = data.get("password")
confirm = data.get("password")
if not check_email_exists(email) and not check_username_exists(password):
new_user = User(username=username, email=email, tel_phone=tel_phone)
new_user.create_password_hash(password)
new_user.save()
return make_response(
jsonify({"message": "Account Created Successully!", "success": True}), 201
)
else:
return make_response(
jsonify({"message": "Invalid Credentials", "success": False})
)
@auth_bp.route("/login", methods=["POST"])
def sign_in_user():
username = request.form.get("username")
password = request.form.get("password")
user = User.query.filter_by(username=username).first()
if user and user.check_password(password):
login_user(user)
if user.isAdmin:
return redirect(url_for("ui.new_orders"))
else:
return redirect(url_for("ui.users_orders"))
else:
return redirect(url_for("ui.login_failed"))
@auth_bp.route("/logout")
def logout():
logout_user()
return redirect(url_for("ui.index"))
|
[
"flask.Blueprint",
"flask.request.form.get",
"flask_login.login_user",
"flask_login.logout_user",
"flask.url_for",
"flask.jsonify",
"flask.request.get_json"
] |
[((280, 307), 'flask.Blueprint', 'Blueprint', (['"""auth"""', '__name__'], {}), "('auth', __name__)\n", (289, 307), False, 'from flask import Blueprint, redirect, url_for, jsonify, make_response, request\n'), ((388, 406), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (404, 406), False, 'from flask import Blueprint, redirect, url_for, jsonify, make_response, request\n'), ((1146, 1174), 'flask.request.form.get', 'request.form.get', (['"""username"""'], {}), "('username')\n", (1162, 1174), False, 'from flask import Blueprint, redirect, url_for, jsonify, make_response, request\n'), ((1190, 1218), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (1206, 1218), False, 'from flask import Blueprint, redirect, url_for, jsonify, make_response, request\n'), ((1612, 1625), 'flask_login.logout_user', 'logout_user', ([], {}), '()\n', (1623, 1625), False, 'from flask_login import login_user, logout_user\n'), ((1335, 1351), 'flask_login.login_user', 'login_user', (['user'], {}), '(user)\n', (1345, 1351), False, 'from flask_login import login_user, logout_user\n'), ((1646, 1665), 'flask.url_for', 'url_for', (['"""ui.index"""'], {}), "('ui.index')\n", (1653, 1665), False, 'from flask import Blueprint, redirect, url_for, jsonify, make_response, request\n'), ((856, 925), 'flask.jsonify', 'jsonify', (["{'message': 'Account Created Successully!', 'success': True}"], {}), "({'message': 'Account Created Successully!', 'success': True})\n", (863, 925), False, 'from flask import Blueprint, redirect, url_for, jsonify, make_response, request\n'), ((994, 1055), 'flask.jsonify', 'jsonify', (["{'message': 'Invalid Credentials', 'success': False}"], {}), "({'message': 'Invalid Credentials', 'success': False})\n", (1001, 1055), False, 'from flask import Blueprint, redirect, url_for, jsonify, make_response, request\n'), ((1538, 1564), 'flask.url_for', 'url_for', (['"""ui.login_failed"""'], {}), "('ui.login_failed')\n", (1545, 1564), False, 'from flask import Blueprint, redirect, url_for, jsonify, make_response, request\n'), ((1406, 1430), 'flask.url_for', 'url_for', (['"""ui.new_orders"""'], {}), "('ui.new_orders')\n", (1413, 1430), False, 'from flask import Blueprint, redirect, url_for, jsonify, make_response, request\n'), ((1475, 1501), 'flask.url_for', 'url_for', (['"""ui.users_orders"""'], {}), "('ui.users_orders')\n", (1482, 1501), False, 'from flask import Blueprint, redirect, url_for, jsonify, make_response, request\n')]
|
from mycroft.skills import MycroftSkill
from mycroft.messagebus.message import Message
from mail_monitor import EmailMonitor
from os.path import dirname, join
class EmailMonitorSkill(MycroftSkill):
def __init__(self):
super().__init__()
self.email_config = self.config_core.get("email", {})
if "processed_emails" not in self.settings:
self.settings["processed_emails"] = []
def initialize(self):
if "mail" not in self.email_config or "password" not in \
self.email_config or "whitelist" not in self.email_config or\
not self.email_config["whitelist"]:
self.speak_dialog("error")
raise RuntimeError
else:
filter = "(UNSEEN)"
if self.email_config.get("include_read"):
filter = "(ALL)"
if "include_read" in self.email_config:
self.email_config.pop("include_read")
self.email_config["filter"] = filter
try:
self.mail_client = EmailMonitor(**self.email_config)
self.mail_client.on_new_email = self.handle_new_email
self.mail_client.setDaemon(True)
self.mail_client.start()
except:
self.speak_dialog("error")
raise
def get_intro_message(self):
self.speak_dialog("intro")
def handle_new_email(self, email):
if email in self.settings["processed_emails"]:
# don't process same email twice
# important if "include_read" is set
# some uses cases, like using siri Notes, will mark emails as read
return
self.gui.show_animated_image(
join(dirname(__file__), "ui", "inbox.gif"), override_idle=2)
self.settings["processed_emails"].append(email)
self.log.debug(str(email))
self.bus.emit(Message("recognizer_loop:utterance",
{"utterances": [email['payload']]},
{"source": email['email'],
"destinatary": "skills"}))
def shutdown(self):
self.mail_client.stop()
def create_skill():
return EmailMonitorSkill()
|
[
"mail_monitor.EmailMonitor",
"os.path.dirname",
"mycroft.messagebus.message.Message"
] |
[((1908, 2038), 'mycroft.messagebus.message.Message', 'Message', (['"""recognizer_loop:utterance"""', "{'utterances': [email['payload']]}", "{'source': email['email'], 'destinatary': 'skills'}"], {}), "('recognizer_loop:utterance', {'utterances': [email['payload']]}, {\n 'source': email['email'], 'destinatary': 'skills'})\n", (1915, 2038), False, 'from mycroft.messagebus.message import Message\n'), ((1049, 1082), 'mail_monitor.EmailMonitor', 'EmailMonitor', ([], {}), '(**self.email_config)\n', (1061, 1082), False, 'from mail_monitor import EmailMonitor\n'), ((1739, 1756), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (1746, 1756), False, 'from os.path import dirname, join\n')]
|
'''
Created on Feb. 25, 2020
@author: cefect
helper functions w/ Qgis api
'''
#==============================================================================
# imports------------
#==============================================================================
#python
import os, configparser, logging, inspect, copy, datetime, re
import pandas as pd
import numpy as np
#qgis
from qgis.core import *
from qgis.analysis import QgsNativeAlgorithms
from qgis.gui import QgisInterface
from PyQt5.QtCore import QVariant, QMetaType
from PyQt5.QtWidgets import QProgressBar
"""throws depceciationWarning"""
import processing
#==============================================================================
# customs
#==============================================================================
mod_logger = logging.getLogger('Q') #get the root logger
from hlpr.exceptions import QError as Error
import hlpr.basic as basic
from hlpr.basic import get_valid_filename
#==============================================================================
# globals
#==============================================================================
fieldn_max_d = {'SpatiaLite':50, 'ESRI Shapefile':10, 'Memory storage':50, 'GPKG':50}
npc_pytype_d = {'?':bool,
'b':int,
'd':float,
'e':float,
'f':float,
'q':int,
'h':int,
'l':int,
'i':int,
'g':float,
'U':str,
'B':int,
'L':int,
'Q':int,
'H':int,
'I':int,
'O':str, #this is the catchall 'object'
}
type_qvar_py_d = {10:str, 2:int, 135:float, 6:float, 4:int, 1:bool, 16:datetime.datetime, 12:str} #QVariant.types to pythonic types
#parameters for lots of statistic algos
stat_pars_d = {'First': 0, 'Last': 1, 'Count': 2, 'Sum': 3, 'Mean': 4, 'Median': 5,
'St dev (pop)': 6, 'Minimum': 7, 'Maximum': 8, 'Range': 9, 'Minority': 10,
'Majority': 11, 'Variety': 12, 'Q1': 13, 'Q3': 14, 'IQR': 15}
#==============================================================================
# classes -------------
#==============================================================================
class Qcoms(basic.ComWrkr): #baseclass for working w/ pyqgis outside the native console
driverName = 'SpatiaLite' #default data creation driver type
out_dName = driverName #default output driver/file type
q_hndls = ['crs', 'crsid', 'algo_init', 'qap', 'vlay_drivers']
algo_init = False #flag indicating whether the algos have been initialized
qap = None
mstore = None
def __init__(self,
feedback=None,
#init controls
init_q_d = {}, #container of initilzied objects
crsid = 'EPSG:4326', #default crsID if no init_q_d is passed
**kwargs
):
""""
#=======================================================================
# plugin use
#=======================================================================
QprojPlugs don't execute super cascade
#=======================================================================
# Qgis inheritance
#=======================================================================
for single standalone runs
all the handles will be generated and Qgis instanced
for console runs
handles should be passed to avoid re-instancing Qgis
for session standalone runs
handles passed
for swapping crs
run set_crs() on the session prior to spawning the child
"""
#=======================================================================
# defaults
#=======================================================================
if feedback is None:
"""by default, building our own feedbacker
passed to ComWrkr.setup_feedback()
"""
feedback = MyFeedBackQ()
#=======================================================================
# cascade
#=======================================================================
super().__init__(
feedback = feedback,
**kwargs) #initilzie teh baseclass
log = self.logger
#=======================================================================
# attachments
#=======================================================================
self.fieldn_max_d=fieldn_max_d
self.crsid=crsid
#=======================================================================
# Qgis setup COMMON
#=======================================================================
"""both Plugin and StandAlone runs should call these"""
self.qproj = QgsProject.instance()
"""
each worker will have their own store
used to wipe any intermediate layers
"""
self.mstore = QgsMapLayerStore() #build a new map store
#do your own init (standalone r uns)
if len(init_q_d) == 0:
self._init_standalone()
else:
#check everything is there
miss_l = set(self.q_hndls).difference(init_q_d.keys())
assert len(miss_l)==0, 'init_q_d missing handles: %s'%miss_l
#set the handles
for k,v in init_q_d.items():
setattr(self, k, v)
self._upd_qd()
self.proj_checks()
#=======================================================================
# attach inputs
#=======================================================================
self.logger.debug('Qcoms.__init__ finished w/ out_dir: \n %s'%self.out_dir)
return
#==========================================================================
# standalone methods-----------
#==========================================================================
def _init_standalone(self, #setup for qgis runs
crsid = None,
):
"""
WARNING! do not call twice (phantom crash)
"""
log = self.logger.getChild('_init_standalone')
if crsid is None: crsid = self.crsid
#=======================================================================
# #crs
#=======================================================================
crs = QgsCoordinateReferenceSystem(crsid)
assert isinstance(crs, QgsCoordinateReferenceSystem), 'bad crs type'
assert crs.isValid()
self.crs = crs
self.qproj.setCrs(crs)
log.info('crs set to \'%s\''%self.crs.authid())
#=======================================================================
# setup qgis
#=======================================================================
self.qap = self.init_qgis()
self.algo_init = self.init_algos()
self.set_vdrivers()
#=======================================================================
# wrap
#=======================================================================
self._upd_qd()
log.debug('Qproj._init_standalone finished')
return
def _upd_qd(self): #set a fresh parameter set
self.init_q_d = {k:getattr(self, k) for k in self.q_hndls}
def init_qgis(self, #instantiate qgis
gui = False):
"""
WARNING: need to hold this app somewhere. call in the module you're working in (scripts)
"""
log = self.logger.getChild('init_qgis')
try:
QgsApplication.setPrefixPath(r'C:/OSGeo4W64/apps/qgis-ltr', True)
app = QgsApplication([], gui)
# Update prefix path
#app.setPrefixPath(r"C:\OSGeo4W64\apps\qgis", True)
app.initQgis()
#logging.debug(QgsApplication.showSettings())
""" was throwing unicode error"""
log.info(u' QgsApplication.initQgis. version: %s, release: %s'%(
Qgis.QGIS_VERSION.encode('utf-8'), Qgis.QGIS_RELEASE_NAME.encode('utf-8')))
return app
except:
raise Error('QGIS failed to initiate')
def init_algos(self): #initiilize processing and add providers
"""
crashing without raising an Exception
"""
log = self.logger.getChild('init_algos')
if not isinstance(self.qap, QgsApplication):
raise Error('qgis has not been properly initlized yet')
from processing.core.Processing import Processing
Processing.initialize() #crashing without raising an Exception
QgsApplication.processingRegistry().addProvider(QgsNativeAlgorithms())
assert not self.feedback is None, 'instance needs a feedback method for algos to work'
log.info('processing initilzied w/ feedback: \'%s\''%(type(self.feedback).__name__))
return True
def set_vdrivers(self):
log = self.logger.getChild('set_vdrivers')
#build vector drivers list by extension
"""couldnt find a good built-in to link extensions with drivers"""
vlay_drivers = {'SpatiaLite':'sqlite', 'OGR':'shp'}
#vlay_drivers = {'sqlite':'SpatiaLite', 'shp':'OGR','csv':'delimitedtext'}
for ext in QgsVectorFileWriter.supportedFormatExtensions():
dname = QgsVectorFileWriter.driverForExtension(ext)
if not dname in vlay_drivers.keys():
vlay_drivers[dname] = ext
#add in missing/duplicated
for vdriver in QgsVectorFileWriter.ogrDriverList():
if not vdriver.driverName in vlay_drivers.keys():
vlay_drivers[vdriver.driverName] ='?'
self.vlay_drivers = vlay_drivers
log.debug('built driver:extensions dict: \n %s'%vlay_drivers)
return
def set_crs(self, #load, build, and set the project crs
crsid = None, #integer
crs = None, #QgsCoordinateReferenceSystem
logger=None,
):
#=======================================================================
# setup and defaults
#=======================================================================
if logger is None: logger=self.logger
log = logger.getChild('set_crs')
if crsid is None:
crsid = self.crsid
#=======================================================================
# if not isinstance(crsid, int):
# raise IOError('expected integer for crs')
#=======================================================================
#=======================================================================
# build it
#=======================================================================
if crs is None:
crs = QgsCoordinateReferenceSystem(crsid)
assert isinstance(crs, QgsCoordinateReferenceSystem)
self.crs=crs #overwrite
if not self.crs.isValid():
raise IOError('CRS built from %i is invalid'%self.crs.authid())
#=======================================================================
# attach to project
#=======================================================================
self.qproj.setCrs(self.crs)
self.crsid = self.crs.authid()
if not self.qproj.crs().description() == self.crs.description():
raise Error('qproj crs does not match sessions')
log.info('crs set to EPSG: %s, \'%s\''%(self.crs.authid(), self.crs.description()))
self._upd_qd()
self.proj_checks(logger=log)
return self.crs
def proj_checks(self,
logger=None):
#log = self.logger.getChild('proj_checks')
if not self.driverName in self.vlay_drivers:
raise Error('unrecognized driver name')
if not self.out_dName in self.vlay_drivers:
raise Error('unrecognized driver name')
assert self.algo_init
assert not self.feedback is None
assert not self.progressBar is None
#=======================================================================
# crs checks
#=======================================================================
assert isinstance(self.crs, QgsCoordinateReferenceSystem)
assert self.crs.isValid()
assert self.crs.authid()==self.qproj.crs().authid(), 'crs mismatch'
assert self.crs.authid() == self.crsid, 'crs mismatch'
assert not self.crs.authid()=='', 'got empty CRS!'
#=======================================================================
# handle checks
#=======================================================================
assert isinstance(self.init_q_d, dict)
miss_l = set(self.q_hndls).difference(self.init_q_d.keys())
assert len(miss_l)==0, 'init_q_d missing handles: %s'%miss_l
for k,v in self.init_q_d.items():
assert getattr(self, k) == v, k
#log.info('project passed all checks')
return
def print_qt_version(self):
import inspect
from PyQt5 import Qt
vers = ['%s = %s' % (k,v) for k,v in vars(Qt).items() if k.lower().find('version') >= 0 and not inspect.isbuiltin(v)]
print('\n'.join(sorted(vers)))
#===========================================================================
# LOAD/WRITE LAYERS-----------
#===========================================================================
def load_vlay(self,
fp,
logger=None,
providerLib='ogr',
aoi_vlay = None,
allow_none=True, #control check in saveselectedfeastures
addSpatialIndex=True,
):
assert os.path.exists(fp), 'requested file does not exist: %s'%fp
if logger is None: logger = self.logger
log = logger.getChild('load_vlay')
basefn = os.path.splitext(os.path.split(fp)[1])[0]
log.debug('loading from %s'%fp)
vlay_raw = QgsVectorLayer(fp,basefn,providerLib)
#=======================================================================
# # checks
#=======================================================================
if not isinstance(vlay_raw, QgsVectorLayer):
raise IOError
#check if this is valid
if not vlay_raw.isValid():
raise Error('loaded vlay \'%s\' is not valid. \n \n did you initilize?'%vlay_raw.name())
#check if it has geometry
if vlay_raw.wkbType() == 100:
raise Error('loaded vlay has NoGeometry')
assert isinstance(self.mstore, QgsMapLayerStore)
"""only add intermediate layers to store
self.mstore.addMapLayer(vlay_raw)"""
if not vlay_raw.crs()==self.qproj.crs():
log.warning('crs mismatch: \n %s\n %s'%(
vlay_raw.crs(), self.qproj.crs()))
#=======================================================================
# aoi slice
#=======================================================================
if isinstance(aoi_vlay, QgsVectorLayer):
log.info('slicing by aoi %s'%aoi_vlay.name())
vlay = self.selectbylocation(vlay_raw, aoi_vlay, allow_none=allow_none,
logger=log, result_type='layer')
#check for no selection
if vlay is None:
return None
vlay.setName(vlay_raw.name()) #reset the name
#clear original from memory
self.mstore.addMapLayer(vlay_raw)
self.mstore.removeMapLayers([vlay_raw])
else:
vlay = vlay_raw
#=======================================================================
# clean------
#=======================================================================
#spatial index
if addSpatialIndex and (not vlay_raw.hasSpatialIndex()==QgsFeatureSource.SpatialIndexPresent):
self.createspatialindex(vlay_raw, logger=log)
#=======================================================================
# wrap
#=======================================================================
dp = vlay.dataProvider()
log.info('loaded vlay \'%s\' as \'%s\' %s geo with %i feats from file: \n %s'
%(vlay.name(), dp.storageType(), QgsWkbTypes().displayString(vlay.wkbType()), dp.featureCount(), fp))
return vlay
def load_rlay(self, fp,
aoi_vlay = None,
logger=None):
if logger is None: logger = self.logger
log = logger.getChild('load_rlay')
assert os.path.exists(fp), 'requested file does not exist: %s'%fp
assert QgsRasterLayer.isValidRasterFileName(fp), \
'requested file is not a valid raster file type: %s'%fp
basefn = os.path.splitext(os.path.split(fp)[1])[0]
#Import a Raster Layer
log.debug('QgsRasterLayer(%s, %s)'%(fp, basefn))
rlayer = QgsRasterLayer(fp, basefn)
"""
hanging for some reason...
QgsRasterLayer(C:\LS\03_TOOLS\CanFlood\_git\tutorials\1\haz_rast\haz_1000.tif, haz_1000)
"""
#=======================================================================
# rlayer = QgsRasterLayer(r'C:\LS\03_TOOLS\CanFlood\_git\tutorials\1\haz_rast\haz_1000.tif',
# 'haz_1000')
#=======================================================================
#===========================================================================
# check
#===========================================================================
assert isinstance(rlayer, QgsRasterLayer), 'failed to get a QgsRasterLayer'
assert rlayer.isValid(), "Layer failed to load!"
if not rlayer.crs() == self.qproj.crs():
log.warning('loaded layer \'%s\' crs mismatch!'%rlayer.name())
log.debug('loaded \'%s\' from \n %s'%(rlayer.name(), fp))
#=======================================================================
# aoi
#=======================================================================
if not aoi_vlay is None:
log.debug('clipping w/ %s'%aoi_vlay.name())
assert isinstance(aoi_vlay, QgsVectorLayer)
rlay2 = self.cliprasterwithpolygon(rlayer,aoi_vlay, logger=log, layname=rlayer.name())
#clean up
mstore = QgsMapLayerStore() #build a new store
mstore.addMapLayers([rlayer]) #add the layers to the store
mstore.removeAllMapLayers() #remove all the layers
else:
rlay2 = rlayer
return rlay2
def write_rlay(self, #make a local copy of the passed raster layer
rlayer, #raster layer to make a local copy of
extent = 'layer', #write extent control
#'layer': use the current extent (default)
#'mapCanvas': use the current map Canvas
#QgsRectangle: use passed extents
resolution = 'raw', #resolution for output
opts = ["COMPRESS=LZW"], #QgsRasterFileWriter.setCreateOptions
out_dir = None, #directory for puts
newLayerName = None,
logger=None,
):
"""
because processing tools only work on local copies
#=======================================================================
# coordinate transformation
#=======================================================================
NO CONVERSION HERE!
can't get native API to work. use gdal_warp instead
"""
#=======================================================================
# defaults
#=======================================================================
if logger is None: logger=self.logger
if out_dir is None: out_dir = self.out_dir
if newLayerName is None: newLayerName = rlayer.name()
newFn = get_valid_filename('%s.tif'%newLayerName) #clean it
out_fp = os.path.join(out_dir, newFn)
log = logger.getChild('write_rlay')
log.debug('on \'%s\' w/ \n crs:%s \n extents:%s\n xUnits:%.4f'%(
rlayer.name(), rlayer.crs(), rlayer.extent(), rlayer.rasterUnitsPerPixelX()))
#=======================================================================
# precheck
#=======================================================================
assert isinstance(rlayer, QgsRasterLayer)
assert os.path.exists(out_dir)
if os.path.exists(out_fp):
msg = 'requested file already exists! and overwrite=%s \n %s'%(
self.overwrite, out_fp)
if self.overwrite:
log.warning(msg)
else:
raise Error(msg)
#=======================================================================
# extract info from layer
#=======================================================================
"""consider loading the layer and duplicating the renderer?
renderer = rlayer.renderer()"""
provider = rlayer.dataProvider()
#build projector
projector = QgsRasterProjector()
#projector.setCrs(provider.crs(), provider.crs())
#build and configure pipe
pipe = QgsRasterPipe()
if not pipe.set(provider.clone()): #Insert a new known interface in default place
raise Error("Cannot set pipe provider")
if not pipe.insert(2, projector): #insert interface at specified index and connect
raise Error("Cannot set pipe projector")
#pipe = rlayer.pipe()
#coordinate transformation
"""see note"""
transformContext = self.qproj.transformContext()
#=======================================================================
# extents
#=======================================================================
if extent == 'layer':
extent = rlayer.extent()
elif extent=='mapCanvas':
assert isinstance(self.iface, QgisInterface), 'bad key for StandAlone?'
#get the extent, transformed to the current CRS
extent = QgsCoordinateTransform(
self.qproj.crs(),
rlayer.crs(),
transformContext
).transformBoundingBox(self.iface.mapCanvas().extent())
assert isinstance(extent, QgsRectangle), 'expected extent=QgsRectangle. got \"%s\''%extent
#expect the requested extent to be LESS THAN what we have in the raw raster
assert rlayer.extent().width()>=extent.width(), 'passed extents too wide'
assert rlayer.extent().height()>=extent.height(), 'passed extents too tall'
#=======================================================================
# resolution
#=======================================================================
#use the resolution of the raw file
if resolution == 'raw':
"""this respects the calculated extents"""
nRows = int(extent.height()/rlayer.rasterUnitsPerPixelY())
nCols = int(extent.width()/rlayer.rasterUnitsPerPixelX())
else:
"""dont think theres any decent API support for the GUI behavior"""
raise Error('not implemented')
#=======================================================================
# #build file writer
#=======================================================================
file_writer = QgsRasterFileWriter(out_fp)
#file_writer.Mode(1) #???
if not opts is None:
file_writer.setCreateOptions(opts)
log.debug('writing to file w/ \n %s'%(
{'nCols':nCols, 'nRows':nRows, 'extent':extent, 'crs':rlayer.crs()}))
#execute write
error = file_writer.writeRaster( pipe, nCols, nRows, extent, rlayer.crs(), transformContext)
log.info('wrote to file \n %s'%out_fp)
#=======================================================================
# wrap
#=======================================================================
if not error == QgsRasterFileWriter.NoError:
raise Error(error)
assert os.path.exists(out_fp)
assert QgsRasterLayer.isValidRasterFileName(out_fp), \
'requested file is not a valid raster file type: %s'%out_fp
return out_fp
def vlay_write(self, #write a VectorLayer
vlay,
out_fp=None,
driverName='GPKG',
fileEncoding = "CP1250",
opts = QgsVectorFileWriter.SaveVectorOptions(), #empty options object
overwrite=None,
logger=None):
"""
help(QgsVectorFileWriter.SaveVectorOptions)
QgsVectorFileWriter.SaveVectorOptions.driverName='GPKG'
opt2 = QgsVectorFileWriter.BoolOption(QgsVectorFileWriter.CreateOrOverwriteFile)
help(QgsVectorFileWriter)
"""
#==========================================================================
# defaults
#==========================================================================
if logger is None: logger=self.logger
log = logger.getChild('vlay_write')
if overwrite is None: overwrite=self.overwrite
if out_fp is None: out_fp = os.path.join(self.out_dir, '%s.gpkg'%vlay.name())
#===========================================================================
# assemble options
#===========================================================================
opts.driverName = driverName
opts.fileEncoding = fileEncoding
#===========================================================================
# checks
#===========================================================================
#file extension
fhead, ext = os.path.splitext(out_fp)
if not 'gpkg' in ext:
raise Error('unexpected extension: %s'%ext)
if os.path.exists(out_fp):
msg = 'requested file path already exists!. overwrite=%s \n %s'%(
overwrite, out_fp)
if overwrite:
log.warning(msg)
os.remove(out_fp) #workaround... should be away to overwrite with the QgsVectorFileWriter
else:
raise Error(msg)
if vlay.dataProvider().featureCount() == 0:
raise Error('\'%s\' has no features!'%(
vlay.name()))
if not vlay.isValid():
Error('passed invalid layer')
#=======================================================================
# write
#=======================================================================
error = QgsVectorFileWriter.writeAsVectorFormatV2(
vlay, out_fp,
QgsCoordinateTransformContext(),
opts,
)
#=======================================================================
# wrap and check
#=======================================================================
if error[0] == QgsVectorFileWriter.NoError:
log.info('layer \' %s \' written to: \n %s'%(vlay.name(),out_fp))
return out_fp
raise Error('FAILURE on writing layer \' %s \' with code:\n %s \n %s'%(vlay.name(),error, out_fp))
def load_dtm(self, #convienece loader for assining the correct attribute
fp,
logger=None,
**kwargs):
if logger is None: logger=self.logger
log=logger.getChild('load_dtm')
self.dtm_rlay = self.load_rlay(fp, logger=log, **kwargs)
return self.dtm_rlay
#==========================================================================
# GENERIC METHODS-----------------
#==========================================================================
def vlay_new_df2(self, #build a vlay from a df
df_raw,
geo_d = None, #container of geometry objects {fid: QgsGeometry}
crs=None,
gkey = None, #data field linking with geo_d (if None.. uses df index)
layname='df',
index = False, #whether to include the index as a field
logger=None,
):
"""
performance enhancement over vlay_new_df
simpler, clearer
although less versatile
"""
#=======================================================================
# setup
#=======================================================================
if crs is None: crs = self.qproj.crs()
if logger is None: logger = self.logger
log = logger.getChild('vlay_new_df')
#=======================================================================
# index fix
#=======================================================================
df = df_raw.copy()
if index:
if not df.index.name is None:
coln = df.index.name
df.index.name = None
else:
coln = 'index'
df[coln] = df.index
#=======================================================================
# precheck
#=======================================================================
#make sure none of hte field names execeed the driver limitations
max_len = self.fieldn_max_d[self.driverName]
#check lengths
boolcol = df_raw.columns.str.len() >= max_len
if np.any(boolcol):
log.warning('passed %i columns which exeed the max length=%i for driver \'%s\'.. truncating: \n %s'%(
boolcol.sum(), max_len, self.driverName, df_raw.columns[boolcol].tolist()))
df.columns = df.columns.str.slice(start=0, stop=max_len-1)
#make sure the columns are unique
assert df.columns.is_unique, 'got duplicated column names: \n %s'%(df.columns.tolist())
#check datatypes
assert np.array_equal(df.columns, df.columns.astype(str)), 'got non-string column names'
#check the geometry
if not geo_d is None:
assert isinstance(geo_d, dict)
if not gkey is None:
assert gkey in df_raw.columns
#assert 'int' in df_raw[gkey].dtype.name
#check gkey match
l = set(df_raw[gkey].drop_duplicates()).difference(geo_d.keys())
assert len(l)==0, 'missing %i \'%s\' keys in geo_d: %s'%(len(l), gkey, l)
#against index
else:
#check gkey match
l = set(df_raw.index).difference(geo_d.keys())
assert len(l)==0, 'missing %i (of %i) fid keys in geo_d: %s'%(len(l), len(df_raw), l)
#===========================================================================
# assemble the fields
#===========================================================================
#column name and python type
fields_d = {coln:np_to_pytype(col.dtype) for coln, col in df.items()}
#fields container
qfields = fields_build_new(fields_d = fields_d, logger=log)
#=======================================================================
# assemble the features
#=======================================================================
#convert form of data
feats_d = dict()
for fid, row in df.iterrows():
feat = QgsFeature(qfields, fid)
#loop and add data
for fieldn, value in row.items():
#skip null values
if pd.isnull(value): continue
#get the index for this field
findx = feat.fieldNameIndex(fieldn)
#get the qfield
qfield = feat.fields().at(findx)
#make the type match
ndata = qtype_to_pytype(value, qfield.type(), logger=log)
#set the attribute
if not feat.setAttribute(findx, ndata):
raise Error('failed to setAttribute')
#setgeometry
if not geo_d is None:
if gkey is None:
gobj = geo_d[fid]
else:
gobj = geo_d[row[gkey]]
feat.setGeometry(gobj)
#stor eit
feats_d[fid]=feat
log.debug('built %i \'%s\' features'%(
len(feats_d),
QgsWkbTypes.geometryDisplayString(feat.geometry().type()),
))
#=======================================================================
# get the geo type
#=======================================================================\
if not geo_d is None:
gtype = QgsWkbTypes().displayString(next(iter(geo_d.values())).wkbType())
else:
gtype='None'
#===========================================================================
# buidl the new layer
#===========================================================================
vlay = vlay_new_mlay(gtype,
crs,
layname,
qfields,
list(feats_d.values()),
logger=log,
)
self.createspatialindex(vlay, logger=log)
#=======================================================================
# post check
#=======================================================================
if not geo_d is None:
if vlay.wkbType() == 100:
raise Error('constructed layer has NoGeometry')
return vlay
def check_aoi(self, #special c hecks for AOI layers
vlay):
assert isinstance(vlay, QgsVectorLayer)
assert 'Polygon' in QgsWkbTypes().displayString(vlay.wkbType())
assert vlay.dataProvider().featureCount()==1
assert vlay.crs() == self.qproj.crs(), 'aoi CRS (%s) does not match project (%s)'%(vlay.crs(), self.qproj.crs())
return
#==========================================================================
# ALGOS--------------
#==========================================================================
def deletecolumn(self,
in_vlay,
fieldn_l, #list of field names
invert=False, #whether to invert selected field names
layname = None,
logger=None,
):
#=======================================================================
# presets
#=======================================================================
algo_nm = 'qgis:deletecolumn'
if logger is None: logger=self.logger
log = logger.getChild('deletecolumn')
self.vlay = in_vlay
#=======================================================================
# field manipulations
#=======================================================================
fieldn_l = self._field_handlr(in_vlay, fieldn_l, invert=invert, logger=log)
if len(fieldn_l) == 0:
log.debug('no fields requsted to drop... skipping')
return self.vlay
#=======================================================================
# assemble pars
#=======================================================================
#assemble pars
ins_d = { 'COLUMN' : fieldn_l,
'INPUT' : in_vlay,
'OUTPUT' : 'TEMPORARY_OUTPUT'}
log.debug('executing \'%s\' with ins_d: \n %s'%(algo_nm, ins_d))
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
res_vlay = res_d['OUTPUT']
#===========================================================================
# post formatting
#===========================================================================
if layname is None:
layname = '%s_delf'%self.vlay.name()
res_vlay.setName(layname) #reset the name
return res_vlay
def joinattributesbylocation(self,
#data definitions
vlay,
join_vlay, #layer from which to extract attribue values onto th ebottom vlay
jlay_fieldn_l, #list of field names to extract from the join_vlay
selected_only = False,
jvlay_selected_only = False, #only consider selected features on the join layer
#algo controls
prefix = '',
method=0, #one-to-many
predicate_l = ['intersects'],#list of geometric serach predicates
discard_nomatch = False, #Discard records which could not be joined
#data expectations
join_nullvs = True, #allow null values on jlay_fieldn_l on join_vlay
join_df = None, #if join_nullvs=FALSE, data to check for nulls (skips making a vlay_get_fdf)
allow_field_rename = False, #allow joiner fields to be renamed when mapped onto the main
allow_none = False,
#geometry expectations
expect_all_hits = False, #wheter every main feature intersects a join feature
expect_j_overlap = False, #wheter to expect the join_vlay to beoverlapping
expect_m_overlap = False, #wheter to expect the mainvlay to have overlaps
logger=None,
):
"""
TODO: really need to clean this up...
discard_nomatch:
TRUE: two resulting layers have no features in common
FALSE: in layer retains all non matchers, out layer only has the non-matchers?
METHOD: Join type
- 0: Create separate feature for each located feature (one-to-many)
- 1: Take attributes of the first located feature only (one-to-one)
"""
#=======================================================================
# presets
#=======================================================================
if logger is None: logger=self.logger
log = logger.getChild('joinattributesbylocation')
self.vlay = vlay
algo_nm = 'qgis:joinattributesbylocation'
predicate_d = {'intersects':0,'contains':1,'equals':2,'touches':3,'overlaps':4,'within':5, 'crosses':6}
jlay_fieldn_l = self._field_handlr(join_vlay,
jlay_fieldn_l,
invert=False)
#=======================================================================
# jgeot = vlay_get_bgeo_type(join_vlay)
# mgeot = vlay_get_bgeo_type(self.vlay)
#=======================================================================
mfcnt = self.vlay.dataProvider().featureCount()
#jfcnt = join_vlay.dataProvider().featureCount()
mfnl = vlay_fieldnl(self.vlay)
expect_overlaps = expect_j_overlap or expect_m_overlap
#=======================================================================
# geometry expectation prechecks
#=======================================================================
"""should take any geo
if not (jgeot == 'polygon' or mgeot == 'polygon'):
raise Error('one of the layres has to be a polygon')
if not jgeot=='polygon':
if expect_j_overlap:
raise Error('join vlay is not a polygon, expect_j_overlap should =False')
if not mgeot=='polygon':
if expect_m_overlap:
raise Error('main vlay is not a polygon, expect_m_overlap should =False')
if expect_all_hits:
if discard_nomatch:
raise Error('discard_nomatch should =FALSE if you expect all hits')
if allow_none:
raise Error('expect_all_hits=TRUE and allow_none=TRUE')
#method checks
if method==0:
if not jgeot == 'polygon':
raise Error('passed method 1:m but jgeot != polygon')
if not expect_j_overlap:
if not method==0:
raise Error('for expect_j_overlap=False, method must = 0 (1:m) for validation')
"""
#=======================================================================
# data expectation checks
#=======================================================================
#make sure none of the joiner fields are already on the layer
if len(mfnl)>0: #see if there are any fields on the main
l = basic.linr(jlay_fieldn_l, mfnl, result_type='matching')
if len(l) > 0:
#w/a prefix
if not prefix=='':
log.debug('%i fields on the joiner \'%s\' are already on \'%s\'... prefixing w/ \'%s\': \n %s'%(
len(l), join_vlay.name(), self.vlay.name(), prefix, l))
else:
log.debug('%i fields on the joiner \'%s\' are already on \'%s\'...renameing w/ auto-sufix: \n %s'%(
len(l), join_vlay.name(), self.vlay.name(), l))
if not allow_field_rename:
raise Error('%i field names overlap: %s'%(len(l), l))
#make sure that the joiner attributes are not null
if not join_nullvs:
if jvlay_selected_only:
raise Error('not implmeneted')
#pull thedata
if join_df is None:
join_df = vlay_get_fdf(join_vlay, fieldn_l=jlay_fieldn_l, db_f=self.db_f, logger=log)
#slice to the columns of interest
join_df = join_df.loc[:, jlay_fieldn_l]
#check for nulls
booldf = join_df.isna()
if np.any(booldf):
raise Error('got %i nulls on \'%s\' field %s data'%(
booldf.sum().sum(), join_vlay.name(), jlay_fieldn_l))
#=======================================================================
# assemble pars
#=======================================================================
#convert predicate to code
pred_code_l = [predicate_d[name] for name in predicate_l]
#selection flags
if selected_only:
"""WARNING! This will limit the output to only these features
(despite the DISCARD_NONMATCHING flag)"""
main_input = self._get_sel_obj(self.vlay)
else:
main_input = self.vlay
if jvlay_selected_only:
join_input = self._get_sel_obj(join_vlay)
else:
join_input = join_vlay
#assemble pars
ins_d = { 'DISCARD_NONMATCHING' : discard_nomatch,
'INPUT' : main_input,
'JOIN' : join_input,
'JOIN_FIELDS' : jlay_fieldn_l,
'METHOD' : method,
'OUTPUT' : 'TEMPORARY_OUTPUT',
#'NON_MATCHING' : 'TEMPORARY_OUTPUT', #not working as expected. see get_misses
'PREDICATE' : pred_code_l,
'PREFIX' : prefix}
log.info('extracting %i fields from %i feats from \'%s\' to \'%s\' join fields: %s'%
(len(jlay_fieldn_l), join_vlay.dataProvider().featureCount(),
join_vlay.name(), self.vlay.name(), jlay_fieldn_l))
log.debug('executing \'%s\' with ins_d: \n %s'%(algo_nm, ins_d))
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
res_vlay, join_cnt = res_d['OUTPUT'], res_d['JOINED_COUNT']
log.debug('got results: \n %s'%res_d)
#===========================================================================
# post checks
#===========================================================================
hit_fcnt = res_vlay.dataProvider().featureCount()
if not expect_overlaps:
if not discard_nomatch:
if not hit_fcnt == mfcnt:
raise Error('in and out fcnts dont match')
else:
pass
#log.debug('expect_overlaps=False, unable to check fcnts')
#all misses
if join_cnt == 0:
log.warning('got no joins from \'%s\' to \'%s\''%(
self.vlay.name(), join_vlay.name()))
if not allow_none:
raise Error('got no joins!')
if discard_nomatch:
if not hit_fcnt == 0:
raise Error('no joins but got some hits')
#some hits
else:
#check there are no nulls
if discard_nomatch and not join_nullvs:
#get data on first joiner
fid_val_ser = vlay_get_fdata(res_vlay, jlay_fieldn_l[0], logger=log, fmt='ser')
if np.any(fid_val_ser.isna()):
raise Error('discard=True and join null=FALSe but got %i (of %i) null \'%s\' values in the reuslt'%(
fid_val_ser.isna().sum(), len(fid_val_ser), fid_val_ser.name
))
#=======================================================================
# get the new field names
#=======================================================================
new_fn_l = set(vlay_fieldnl(res_vlay)).difference(vlay_fieldnl(self.vlay))
#=======================================================================
# wrap
#=======================================================================
log.debug('finished joining %i fields from %i (of %i) feats from \'%s\' to \'%s\' join fields: %s'%
(len(new_fn_l), join_cnt, self.vlay.dataProvider().featureCount(),
join_vlay.name(), self.vlay.name(), new_fn_l))
return res_vlay, new_fn_l, join_cnt
def joinbylocationsummary(self,
vlay, #polygon layer to sample from
join_vlay, #layer from which to extract attribue values onto th ebottom vlay
jlay_fieldn_l, #list of field names to extract from the join_vlay
jvlay_selected_only = False, #only consider selected features on the join layer
predicate_l = ['intersects'],#list of geometric serach predicates
smry_l = ['sum'], #data summaries to apply
discard_nomatch = False, #Discard records which could not be joined
use_raw_fn=False, #whether to convert names back to the originals
layname=None,
):
"""
WARNING: This ressets the fids
discard_nomatch:
TRUE: two resulting layers have no features in common
FALSE: in layer retains all non matchers, out layer only has the non-matchers?
"""
"""
view(join_vlay)
"""
#=======================================================================
# presets
#=======================================================================
algo_nm = 'qgis:joinbylocationsummary'
predicate_d = {'intersects':0,'contains':1,'equals':2,'touches':3,'overlaps':4,'within':5, 'crosses':6}
summaries_d = {'count':0, 'unique':1, 'min':2, 'max':3, 'range':4, 'sum':5, 'mean':6}
log = self.logger.getChild('joinbylocationsummary')
#=======================================================================
# defaults
#=======================================================================
if isinstance(jlay_fieldn_l, set):
jlay_fieldn_l = list(jlay_fieldn_l)
#convert predicate to code
pred_code_l = [predicate_d[pred_name] for pred_name in predicate_l]
#convert summaries to code
sum_code_l = [summaries_d[smry_str] for smry_str in smry_l]
if layname is None: layname = '%s_jsmry'%vlay.name()
#=======================================================================
# prechecks
#=======================================================================
if not isinstance(jlay_fieldn_l, list):
raise Error('expected a list')
#check requested join fields
fn_l = [f.name() for f in join_vlay.fields()]
s = set(jlay_fieldn_l).difference(fn_l)
assert len(s)==0, 'requested join fields not on layer: %s'%s
#check crs
assert join_vlay.crs().authid() == vlay.crs().authid()
#=======================================================================
# assemble pars
#=======================================================================
main_input=vlay
if jvlay_selected_only:
join_input = self._get_sel_obj(join_vlay)
else:
join_input = join_vlay
#assemble pars
ins_d = { 'DISCARD_NONMATCHING' : discard_nomatch,
'INPUT' : main_input,
'JOIN' : join_input,
'JOIN_FIELDS' : jlay_fieldn_l,
'OUTPUT' : 'TEMPORARY_OUTPUT',
'PREDICATE' : pred_code_l,
'SUMMARIES' : sum_code_l,
}
log.debug('executing \'%s\' with ins_d: \n %s'%(algo_nm, ins_d))
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
res_vlay = res_d['OUTPUT']
#===========================================================================
# post formatting
#===========================================================================
res_vlay.setName(layname) #reset the name
#get new field names
nfn_l = set([f.name() for f in res_vlay.fields()]).difference([f.name() for f in vlay.fields()])
"""
view(res_vlay)
"""
#=======================================================================
# post check
#=======================================================================
for fn in nfn_l:
rser = vlay_get_fdata(res_vlay, fieldn=fn, logger=log, fmt='ser')
if rser.isna().all().all():
log.warning('%s \'%s\' got all nulls'%(vlay.name(), fn))
#=======================================================================
# rename fields
#=======================================================================
if use_raw_fn:
assert len(smry_l)==1, 'rename only allowed for single sample stat'
rnm_d = {s:s.replace('_%s'%smry_l[0],'') for s in nfn_l}
s = set(rnm_d.values()).symmetric_difference(jlay_fieldn_l)
assert len(s)==0, 'failed to convert field names'
res_vlay = vlay_rename_fields(res_vlay, rnm_d, logger=log)
nfn_l = jlay_fieldn_l
log.info('sampled \'%s\' w/ \'%s\' (%i hits) and \'%s\'to get %i new fields \n %s'%(
join_vlay.name(), vlay.name(), res_vlay.dataProvider().featureCount(),
smry_l, len(nfn_l), nfn_l))
return res_vlay, nfn_l
def joinattributestable(self, #join csv edata to a vector layer
vlay, table_fp, fieldNm,
method = 1, #join type
#- 0: Create separate feature for each matching feature (one-to-many)
#- 1: Take attributes of the first matching feature only (one-to-one)
csv_params = {'encoding':'System',
'type':'csv',
'maxFields':'10000',
'detectTypes':'yes',
'geomType':'none',
'subsetIndex':'no',
'watchFile':'no'},
logger=None,
layname=None,
):
#=======================================================================
# defaults
#=======================================================================
if logger is None: logger = self.logger
if layname is None:
layname = '%s_j'%vlay.name()
algo_nm = 'native:joinattributestable'
log = self.logger.getChild('joinattributestable')
#=======================================================================
# prechecks
#=======================================================================
assert isinstance(vlay, QgsVectorLayer)
assert os.path.exists(table_fp)
assert fieldNm in [f.name() for f in vlay.fields()], 'vlay missing link field %s'%fieldNm
#=======================================================================
# setup table layer
#=======================================================================
uriW = QgsDataSourceUri()
for pName, pValue in csv_params.items():
uriW.setParam(pName, pValue)
table_uri = r'file:///' + table_fp.replace('\\','/') +'?'+ str(uriW.encodedUri(), 'utf-8')
table_vlay = QgsVectorLayer(table_uri,'table',"delimitedtext")
assert fieldNm in [f.name() for f in table_vlay.fields()], 'table missing link field %s'%fieldNm
#=======================================================================
# assemble p ars
#=======================================================================
ins_d = { 'DISCARD_NONMATCHING' : True,
'FIELD' : 'xid', 'FIELDS_TO_COPY' : [],
'FIELD_2' : 'xid',
'INPUT' : vlay,
'INPUT_2' : table_vlay,
'METHOD' : method,
'OUTPUT' : 'TEMPORARY_OUTPUT', 'PREFIX' : '' }
#=======================================================================
# execute
#=======================================================================
log.debug('executing \'native:buffer\' with ins_d: \n %s'%ins_d)
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
res_vlay = res_d['OUTPUT']
res_vlay.setName(layname) #reset the name
log.debug('finished w/ %i feats'%res_vlay.dataProvider().featureCount())
return res_vlay
def cliprasterwithpolygon(self,
rlay_raw,
poly_vlay,
layname = None,
#output = 'TEMPORARY_OUTPUT',
logger = None,
):
"""
clipping a raster layer with a polygon mask using gdalwarp
"""
#=======================================================================
# defaults
#=======================================================================
if logger is None: logger = self.logger
log = logger.getChild('cliprasterwithpolygon')
if layname is None:
layname = '%s_clipd'%rlay_raw.name()
algo_nm = 'gdal:cliprasterbymasklayer'
#=======================================================================
# precheck
#=======================================================================
assert isinstance(rlay_raw, QgsRasterLayer)
assert isinstance(poly_vlay, QgsVectorLayer)
assert 'Poly' in QgsWkbTypes().displayString(poly_vlay.wkbType())
assert rlay_raw.crs() == poly_vlay.crs()
#=======================================================================
# run algo
#=======================================================================
ins_d = { 'ALPHA_BAND' : False,
'CROP_TO_CUTLINE' : True,
'DATA_TYPE' : 0,
'EXTRA' : '',
'INPUT' : rlay_raw,
'KEEP_RESOLUTION' : True,
'MASK' : poly_vlay,
'MULTITHREADING' : False,
'NODATA' : None,
'OPTIONS' : '',
'OUTPUT' : 'TEMPORARY_OUTPUT',
'SET_RESOLUTION' : False,
'SOURCE_CRS' : None,
'TARGET_CRS' : None,
'X_RESOLUTION' : None,
'Y_RESOLUTION' : None,
}
log.debug('executing \'%s\' with ins_d: \n %s \n\n'%(algo_nm, ins_d))
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
log.debug('finished w/ \n %s'%res_d)
if not os.path.exists(res_d['OUTPUT']):
"""failing intermittently"""
raise Error('failed to get a result')
res_rlay = QgsRasterLayer(res_d['OUTPUT'], layname)
#=======================================================================
# #post check
#=======================================================================
assert isinstance(res_rlay, QgsRasterLayer), 'got bad type: %s'%type(res_rlay)
assert res_rlay.isValid()
res_rlay.setName(layname) #reset the name
log.debug('finished w/ %s'%res_rlay.name())
return res_rlay
def cliprasterwithpolygon2(self, #with saga
rlay_raw,
poly_vlay,
ofp = None,
layname = None,
#output = 'TEMPORARY_OUTPUT',
logger = None,
):
#=======================================================================
# defaults
#=======================================================================
if logger is None: logger = self.logger
log = logger.getChild('cliprasterwithpolygon')
if layname is None:
if not ofp is None:
layname = os.path.splitext(os.path.split(ofp)[1])[0]
else:
layname = '%s_clipd'%rlay_raw.name()
if ofp is None:
ofp = os.path.join(self.out_dir,layname+'.sdat')
if os.path.exists(ofp):
msg = 'requseted filepath exists: %s'%ofp
if self.overwrite:
log.warning('DELETING'+msg)
os.remove(ofp)
else:
raise Error(msg)
algo_nm = 'saga:cliprasterwithpolygon'
#=======================================================================
# precheck
#=======================================================================
if os.path.exists(ofp):
msg = 'requested filepath exists: %s'%ofp
if self.overwrite:
log.warning(msg)
else:
raise Error(msg)
if not os.path.exists(os.path.dirname(ofp)):
os.makedirs(os.path.dirname(ofp))
#assert QgsRasterLayer.isValidRasterFileName(ofp), 'invalid filename: %s'%ofp
assert 'Poly' in QgsWkbTypes().displayString(poly_vlay.wkbType())
assert rlay_raw.crs() == poly_vlay.crs()
#=======================================================================
# run algo
#=======================================================================
ins_d = { 'INPUT' : rlay_raw,
'OUTPUT' : ofp,
'POLYGONS' : poly_vlay }
log.debug('executing \'%s\' with ins_d: \n %s \n\n'%(algo_nm, ins_d))
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
log.debug('finished w/ \n %s'%res_d)
if not os.path.exists(res_d['OUTPUT']):
"""failing intermittently"""
raise Error('failed to get a result')
res_rlay = QgsRasterLayer(res_d['OUTPUT'], layname)
#=======================================================================
# #post check
#=======================================================================
assert isinstance(res_rlay, QgsRasterLayer), 'got bad type: %s'%type(res_rlay)
assert res_rlay.isValid()
res_rlay.setName(layname) #reset the name
log.debug('finished w/ %s'%res_rlay.name())
return res_rlay
def srastercalculator(self,
formula,
rlay_d, #container of raster layers to perform calculations on
logger=None,
layname=None,
ofp=None,
):
#=======================================================================
# defaults
#=======================================================================
if logger is None: logger = self.logger
log = logger.getChild('srastercalculator')
assert 'a' in rlay_d
if layname is None:
if not ofp is None:
layname = os.path.splitext(os.path.split(ofp)[1])[0]
else:
layname = '%s_calc'%rlay_d['a'].name()
if ofp is None:
ofp = os.path.join(self.out_dir, layname+'.sdat')
if not os.path.exists(os.path.dirname(ofp)):
log.info('building basedir: %s'%os.path.dirname(ofp))
os.makedirs(os.path.dirname(ofp))
if os.path.exists(ofp):
msg = 'requseted filepath exists: %s'%ofp
if self.overwrite:
log.warning(msg)
os.remove(ofp)
else:
raise Error(msg)
#=======================================================================
# execute
#=======================================================================
algo_nm = 'saga:rastercalculator'
ins_d = { 'FORMULA' : formula,
'GRIDS' : rlay_d.pop('a'),
'RESAMPLING' : 3,
'RESULT' : ofp,
'TYPE' : 7,
'USE_NODATA' : False,
'XGRIDS' : list(rlay_d.values())}
log.debug('executing \'%s\' with ins_d: \n %s'%(algo_nm, ins_d))
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
log.debug('finished w/ \n %s'%res_d)
if not os.path.exists(res_d['RESULT']):
raise Error('failed to get a result')
res_rlay = QgsRasterLayer(res_d['RESULT'], layname)
#=======================================================================
# #post check
#=======================================================================
assert isinstance(res_rlay, QgsRasterLayer), 'got bad type: %s'%type(res_rlay)
assert res_rlay.isValid()
res_rlay.setName(layname) #reset the name
log.debug('finished w/ %s'%res_rlay.name())
return res_rlay
def grastercalculator(self, #GDAL raster calculator
formula,
rlay_d, #container of raster layers to perform calculations on
nodata=0,
logger=None,
layname=None,
):
#=======================================================================
# defaults
#=======================================================================
if logger is None: logger = self.logger
log = logger.getChild('grastercalculator')
algo_nm = 'gdal:rastercalculator'
if layname is None:
layname = '%s_calc'%rlay_d['a'].name()
#=======================================================================
# prechecks
#=======================================================================
assert 'A' in rlay_d
#=======================================================================
# populate
#=======================================================================
for rtag in ('A', 'B', 'C', 'D', 'E', 'F'):
#set dummy placeholders for missing rasters
if not rtag in rlay_d:
rlay_d[rtag] = None
#check what the usre pasased
else:
assert isinstance(rlay_d[rtag], QgsRasterLayer), 'passed bad %s'%rtag
assert rtag in formula, 'formula is missing a reference to \'%s\''%rtag
#=======================================================================
# execute
#=======================================================================
ins_d = { 'BAND_A' : 1, 'BAND_B' : -1, 'BAND_C' : -1, 'BAND_D' : -1, 'BAND_E' : -1, 'BAND_F' : -1,
'EXTRA' : '',
'FORMULA' : formula,
'INPUT_A' : rlay_d['A'], 'INPUT_B' : rlay_d['B'], 'INPUT_C' : rlay_d['C'],
'INPUT_D' : rlay_d['D'], 'INPUT_E' : rlay_d['E'], 'INPUT_F' : rlay_d['F'],
'NO_DATA' : nodata,
'OPTIONS' : '',
'OUTPUT' : 'TEMPORARY_OUTPUT',
'RTYPE' : 5 }
log.debug('executing \'%s\' with ins_d: \n %s'%(algo_nm, ins_d))
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
log.debug('finished w/ \n %s'%res_d)
assert os.path.exists(res_d['OUTPUT']), 'failed to get result'
res_rlay = QgsRasterLayer(res_d['OUTPUT'], layname)
#=======================================================================
# #post check
#=======================================================================
assert isinstance(res_rlay, QgsRasterLayer), 'got bad type: %s'%type(res_rlay)
assert res_rlay.isValid()
res_rlay.setName(layname) #reset the name
log.debug('finished w/ %s'%res_rlay.name())
return res_rlay
def qrastercalculator(self, #QGIS native raster calculator
formula,
ref_layer = None, #reference layer
logger=None,
layname=None,
):
"""executes the algorhithim... better to use the constructor directly
QgsRasterCalculator"""
#=======================================================================
# defaults
#=======================================================================
if logger is None: logger = self.logger
log = logger.getChild('qrastercalculator')
algo_nm = 'qgis:rastercalculator'
if layname is None:
if ref_layer is None:
layname = 'qrastercalculator'
else:
layname = '%s_calc'%ref_layer.name()
#=======================================================================
# execute
#=======================================================================
"""
formula = '\'haz_100yr_cT2@1\'-\'dtm_cT1@1\''
"""
ins_d = { 'CELLSIZE' : 0,
'CRS' : None,
'EXPRESSION' : formula,
'EXTENT' : None,
'LAYERS' : [ref_layer], #referecnce layer
'OUTPUT' : 'TEMPORARY_OUTPUT' }
log.debug('executing \'%s\' with ins_d: \n %s'%(algo_nm, ins_d))
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
log.debug('finished w/ \n %s'%res_d)
if not os.path.exists(res_d['RESULT']):
raise Error('failed to get a result')
res_rlay = QgsRasterLayer(res_d['RESULT'], layname)
#=======================================================================
# #post check
#=======================================================================
assert isinstance(res_rlay, QgsRasterLayer), 'got bad type: %s'%type(res_rlay)
assert res_rlay.isValid()
res_rlay.setName(layname) #reset the name
log.debug('finished w/ %s'%res_rlay.name())
return res_rlay
def addgeometrycolumns(self, #add geometry data as columns
vlay,
layname=None,
logger=None,
):
if logger is None: logger=self.logger
log = logger.getChild('addgeometrycolumns')
algo_nm = 'qgis:exportaddgeometrycolumns'
#=======================================================================
# assemble pars
#=======================================================================
#assemble pars
ins_d = { 'CALC_METHOD' : 0, #use layer's crs
'INPUT' : vlay,
'OUTPUT' : 'TEMPORARY_OUTPUT'}
log.debug('executing \'%s\' with ins_d: \n %s'%(algo_nm, ins_d))
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
res_vlay = res_d['OUTPUT']
#===========================================================================
# post formatting
#===========================================================================
if layname is None:
layname = '%s_gcol'%self.vlay.name()
res_vlay.setName(layname) #reset the name
return res_vlay
def buffer(self, vlay,
distance, #buffer distance to apply
dissolve = False,
end_cap_style = 0,
join_style = 0,
miter_limit = 2,
segments = 5,
logger=None,
layname=None,
):
#=======================================================================
# defaults
#=======================================================================
if logger is None: logger = self.logger
if layname is None:
layname = '%s_buf'%vlay.name()
algo_nm = 'native:buffer'
log = self.logger.getChild('buffer')
distance = float(distance)
#=======================================================================
# prechecks
#=======================================================================
if distance==0 or np.isnan(distance):
raise Error('got no buffer!')
#=======================================================================
# build ins
#=======================================================================
"""
distance = 3.0
dcopoy = copy.copy(distance)
"""
ins_d = {
'INPUT': vlay,
'DISSOLVE' : dissolve,
'DISTANCE' : distance,
'END_CAP_STYLE' : end_cap_style,
'JOIN_STYLE' : join_style,
'MITER_LIMIT' : miter_limit,
'OUTPUT' : 'TEMPORARY_OUTPUT',
'SEGMENTS' : segments}
#=======================================================================
# execute
#=======================================================================
log.debug('executing \'native:buffer\' with ins_d: \n %s'%ins_d)
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
res_vlay = res_d['OUTPUT']
res_vlay.setName(layname) #reset the name
log.debug('finished')
return res_vlay
def selectbylocation(self, #select features (from main laye) by geoemtric relation with comp_vlay
vlay, #vlay to select features from
comp_vlay, #vlay to compare
result_type = 'select',
method= 'new', #Modify current selection by
pred_l = ['intersect'], #list of geometry predicate names
#expectations
allow_none = False,
logger = None,
):
#=======================================================================
# setups and defaults
#=======================================================================
if logger is None: logger=self.logger
algo_nm = 'native:selectbylocation'
log = logger.getChild('selectbylocation')
#===========================================================================
# #set parameter translation dictoinaries
#===========================================================================
meth_d = {'new':0}
pred_d = {
'are within':6,
'intersect':0,
'overlap':5,
}
#predicate (name to value)
pred_l = [pred_d[pred_nm] for pred_nm in pred_l]
#=======================================================================
# setup
#=======================================================================
ins_d = {
'INPUT' : vlay,
'INTERSECT' : comp_vlay,
'METHOD' : meth_d[method],
'PREDICATE' : pred_l }
log.debug('executing \'%s\' on \'%s\' with: \n %s'
%(algo_nm, vlay.name(), ins_d))
#===========================================================================
# #execute
#===========================================================================
_ = processing.run(algo_nm, ins_d, feedback=self.feedback)
#=======================================================================
# check
#=======================================================================
fcnt = vlay.selectedFeatureCount()
if fcnt == 0:
msg = 'No features selected!'
if allow_none:
log.warning(msg)
else:
raise Error(msg)
#=======================================================================
# wrap
#=======================================================================
log.debug('selected %i (of %i) features from %s'
%(vlay.selectedFeatureCount(),vlay.dataProvider().featureCount(), vlay.name()))
return self._get_sel_res(vlay, result_type=result_type, logger=log, allow_none=allow_none)
def saveselectedfeatures(self,#generate a memory layer from the current selection
vlay,
logger=None,
allow_none = False,
layname=None):
#===========================================================================
# setups and defaults
#===========================================================================
if logger is None: logger = self.logger
log = logger.getChild('saveselectedfeatures')
algo_nm = 'native:saveselectedfeatures'
if layname is None:
layname = '%s_sel'%vlay.name()
#=======================================================================
# precheck
#=======================================================================
fcnt = vlay.selectedFeatureCount()
if fcnt == 0:
msg = 'No features selected!'
if allow_none:
log.warning(msg)
return None
else:
raise Error(msg)
log.debug('on \'%s\' with %i feats selected'%(
vlay.name(), vlay.selectedFeatureCount()))
#=======================================================================
# # build inputs
#=======================================================================
ins_d = {'INPUT' : vlay,
'OUTPUT' : 'TEMPORARY_OUTPUT'}
log.debug('\'native:saveselectedfeatures\' on \'%s\' with: \n %s'
%(vlay.name(), ins_d))
#execute
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
res_vlay = res_d['OUTPUT']
assert isinstance(res_vlay, QgsVectorLayer)
#===========================================================================
# wrap
#===========================================================================
res_vlay.setName(layname) #reset the name
return res_vlay
def polygonfromlayerextent(self,
vlay,
round_to=0, #adds a buffer to the result?
logger=None,
layname=None):
"""
This algorithm takes a map layer and generates a new vector layer with the
minimum bounding box (rectangle polygon with N-S orientation) that covers the input layer.
Optionally, the extent can be enlarged to a rounded value.
"""
#===========================================================================
# setups and defaults
#===========================================================================
if logger is None: logger = self.logger
log = logger.getChild('polygonfromlayerextent')
algo_nm = 'qgis:polygonfromlayerextent'
if layname is None:
layname = '%s_exts'%vlay.name()
#=======================================================================
# precheck
#=======================================================================
#=======================================================================
# # build inputs
#=======================================================================
ins_d = {'INPUT' : vlay,
'OUTPUT' : 'TEMPORARY_OUTPUT',
'ROUND_TO':round_to}
log.debug('\'%s\' on \'%s\' with: \n %s'
%(algo_nm, vlay.name(), ins_d))
#execute
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
res_vlay = res_d['OUTPUT']
assert isinstance(res_vlay, QgsVectorLayer)
#===========================================================================
# wrap
#===========================================================================
res_vlay.setName(layname) #reset the name
return res_vlay
def fixgeometries(self, vlay,
logger=None,
layname=None,
):
#=======================================================================
# defaults
#=======================================================================
if logger is None: logger = self.logger
if layname is None:
layname = '%s_fix'%vlay.name()
algo_nm = 'native:fixgeometries'
log = self.logger.getChild('fixgeometries')
#=======================================================================
# build ins
#=======================================================================
"""
distance = 3.0
dcopoy = copy.copy(distance)
"""
ins_d = {
'INPUT': vlay,
'OUTPUT' : 'TEMPORARY_OUTPUT',
}
#=======================================================================
# execute
#=======================================================================
log.debug('executing \'%s\' with ins_d: \n %s'%(algo_nm, ins_d))
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
res_vlay = res_d['OUTPUT']
res_vlay.setName(layname) #reset the name
log.debug('finished')
return res_vlay
def createspatialindex(self,
in_vlay,
logger=None,
):
#=======================================================================
# presets
#=======================================================================
algo_nm = 'qgis:createspatialindex'
if logger is None: logger=self.logger
log = self.logger.getChild('createspatialindex')
in_vlay
#=======================================================================
# assemble pars
#=======================================================================
#assemble pars
ins_d = { 'INPUT' : in_vlay }
log.debug('executing \'%s\' with ins_d: \n %s'%(algo_nm, ins_d))
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
#===========================================================================
# post formatting
#===========================================================================
#=======================================================================
# if layname is None:
# layname = '%s_si'%self.vlay.name()
#
# res_vlay.setName(layname) #reset the name
#=======================================================================
return
def warpreproject(self, #repojrect a raster
rlay_raw,
crsOut = None, #crs to re-project to
layname = None,
options = 'COMPRESS=DEFLATE|PREDICTOR=2|ZLEVEL=9',
output = 'TEMPORARY_OUTPUT',
logger = None,
):
#=======================================================================
# defaults
#=======================================================================
if logger is None: logger = self.logger
log = logger.getChild('warpreproject')
if layname is None:
layname = '%s_rproj'%rlay_raw.name()
algo_nm = 'gdal:warpreproject'
if crsOut is None: crsOut = self.crs #just take the project's
#=======================================================================
# precheck
#=======================================================================
"""the algo accepts 'None'... but not sure why we'd want to do this"""
assert isinstance(crsOut, QgsCoordinateReferenceSystem), 'bad crs type'
assert isinstance(rlay_raw, QgsRasterLayer)
assert rlay_raw.crs() != crsOut, 'layer already on this CRS!'
#=======================================================================
# run algo
#=======================================================================
ins_d = {
'DATA_TYPE' : 0,
'EXTRA' : '',
'INPUT' : rlay_raw,
'MULTITHREADING' : False,
'NODATA' : None,
'OPTIONS' : options,
'OUTPUT' : output,
'RESAMPLING' : 0,
'SOURCE_CRS' : None,
'TARGET_CRS' : crsOut,
'TARGET_EXTENT' : None,
'TARGET_EXTENT_CRS' : None,
'TARGET_RESOLUTION' : None,
}
log.debug('executing \'%s\' with ins_d: \n %s \n\n'%(algo_nm, ins_d))
res_d = processing.run(algo_nm, ins_d, feedback=self.feedback)
log.debug('finished w/ \n %s'%res_d)
if not os.path.exists(res_d['OUTPUT']):
"""failing intermittently"""
raise Error('failed to get a result')
res_rlay = QgsRasterLayer(res_d['OUTPUT'], layname)
#=======================================================================
# #post check
#=======================================================================
assert isinstance(res_rlay, QgsRasterLayer), 'got bad type: %s'%type(res_rlay)
assert res_rlay.isValid()
assert rlay_raw.bandCount()==res_rlay.bandCount(), 'band count mismatch'
res_rlay.setName(layname) #reset the name
log.debug('finished w/ %s'%res_rlay.name())
return res_rlay
#===========================================================================
# ALGOS - CUSTOM--------
#===========================================================================
def vlay_pts_dist(self, #get the distance between points in a given order
vlay_raw,
ifn = 'fid', #fieldName to index by
request = None,
result = 'vlay_append', #result type
logger=None):
#===========================================================================
# defaults
#===========================================================================
if logger is None: logger=self.logger
log = logger.getChild('vlay_pts_dist')
if request is None:
request = QgsFeatureRequest(
).addOrderBy(ifn, ascending=True
).setSubsetOfAttributes([ifn], vlay_raw.fields())
#===========================================================================
# precheck
#===========================================================================
assert 'Point' in QgsWkbTypes().displayString(vlay_raw.wkbType()), 'passed bad geo type'
#see if indexer is unique
ifn_d = vlay_get_fdata(vlay_raw, fieldn=ifn, logger=log)
assert len(set(ifn_d.values()))==len(ifn_d)
#===========================================================================
# loop and calc
#===========================================================================
d = dict()
first, geo_prev = True, None
for i, feat in enumerate(vlay_raw.getFeatures(request)):
assert not feat.attribute(ifn) in d, 'indexer is not unique!'
geo = feat.geometry()
if first:
first=False
else:
d[feat.attribute(ifn)] = geo.distance(geo_prev)
geo_prev = geo
log.info('got %i distances using \"%s\''%(len(d), ifn))
#===========================================================================
# check
#===========================================================================
assert len(d) == (vlay_raw.dataProvider().featureCount() -1)
#===========================================================================
# results typing
#===========================================================================
if result == 'dict': return d
elif result == 'vlay_append':
#data manip
ncoln = '%s_dist'%ifn
df_raw = vlay_get_fdf(vlay_raw, logger=log)
df = df_raw.join(pd.Series(d, name=ncoln), on=ifn)
assert df[ncoln].isna().sum()==1, 'expected 1 null'
#reassemble
geo_d = vlay_get_fdata(vlay_raw, geo_obj=True, logger=log)
return self.vlay_new_df2(df, geo_d=geo_d, logger=log,
layname='%s_%s'%(vlay_raw.name(), ncoln))
#==========================================================================
# privates----------
#==========================================================================
def _field_handlr(self, #common handling for fields
vlay, #layer to check for field presence
fieldn_l, #list of fields to handle
invert = False,
logger=None,
):
if logger is None: logger=self.logger
log = logger.getChild('_field_handlr')
#=======================================================================
# all flag
#=======================================================================
if isinstance(fieldn_l, str):
if fieldn_l == 'all':
fieldn_l = vlay_fieldnl(vlay)
log.debug('user passed \'all\', retrieved %i fields: \n %s'%(
len(fieldn_l), fieldn_l))
else:
raise Error('unrecognized fieldn_l\'%s\''%fieldn_l)
#=======================================================================
# type setting
#=======================================================================
if isinstance(fieldn_l, tuple) or isinstance(fieldn_l, np.ndarray) or isinstance(fieldn_l, set):
fieldn_l = list(fieldn_l)
#=======================================================================
# checking
#=======================================================================
if not isinstance(fieldn_l, list):
raise Error('expected a list for fields, instead got \n %s'%fieldn_l)
#vlay_check(vlay, exp_fieldns=fieldn_l)
#=======================================================================
# #handle inversions
#=======================================================================
if invert:
big_fn_s = set(vlay_fieldnl(vlay)) #get all the fields
#get the difference
fieldn_l = list(big_fn_s.difference(set(fieldn_l)))
log.debug('inverted selection from %i to %i fields'%
(len(big_fn_s), len(fieldn_l)))
return fieldn_l
def _get_sel_obj(self, vlay): #get the processing object for algos with selections
log = self.logger.getChild('_get_sel_obj')
assert isinstance(vlay, QgsVectorLayer)
if vlay.selectedFeatureCount() == 0:
raise Error('Nothing selected on \'%s\'. exepects some pre selection'%(vlay.name()))
#handle project layer store
if self.qproj.mapLayer(vlay.id()) is None:
#layer not on project yet. add it
if self.qproj.addMapLayer(vlay, False) is None:
raise Error('failed to add map layer \'%s\''%vlay.name())
log.debug('based on %i selected features from \'%s\''%(len(vlay.selectedFeatureIds()), vlay.name()))
return QgsProcessingFeatureSourceDefinition(source=vlay.id(),
selectedFeaturesOnly=True,
featureLimit=-1,
geometryCheck=QgsFeatureRequest.GeometryAbortOnInvalid)
def _get_sel_res(self, #handler for returning selection like results
vlay, #result layer (with selection on it
result_type='select',
#expectiions
allow_none = False,
logger=None
):
#=======================================================================
# setup
#=======================================================================
if logger is None: logger = self.logger
log = logger.getChild('_get_sel_res')
#=======================================================================
# precheck
#=======================================================================
if vlay.selectedFeatureCount() == 0:
if not allow_none:
raise Error('nothing selected')
return None
#log.debug('user specified \'%s\' for result_type'%result_type)
#=======================================================================
# by handles
#=======================================================================
if result_type == 'select':
#log.debug('user specified \'select\', doing nothing with %i selected'%vlay.selectedFeatureCount())
result = None
elif result_type == 'fids':
result = vlay.selectedFeatureIds() #get teh selected feature ids
elif result_type == 'feats':
result = {feat.id(): feat for feat in vlay.getSelectedFeatures()}
elif result_type == 'layer':
result = self.saveselectedfeatures(vlay, logger=log)
else:
raise Error('unexpected result_type kwarg')
return result
def _in_out_checking(self,res_vlay,
):
"""placeholder"""
def __exit__(self, #destructor
*args,**kwargs):
self.mstore.removeAllMapLayers()
super().__exit__(*args,**kwargs) #initilzie teh baseclass
class MyFeedBackQ(QgsProcessingFeedback):
"""
wrapper for easier reporting and extended progress
Dialogs:
built by QprojPlug.qproj_setup()
Qworkers:
built by Qcoms.__init__()
"""
def __init__(self,
logger=mod_logger):
self.logger=logger.getChild('FeedBack')
super().__init__()
def setProgressText(self, text):
self.logger.debug(text)
def pushInfo(self, info):
self.logger.info(info)
def pushCommandInfo(self, info):
self.logger.info(info)
def pushDebugInfo(self, info):
self.logger.info(info)
def pushConsoleInfo(self, info):
self.logger.info(info)
def reportError(self, error, fatalError=False):
self.logger.error(error)
def upd_prog(self, #advanced progress handling
prog_raw, #pass None to reset
method='raw', #whether to append value to the progress
):
#=======================================================================
# defaults
#=======================================================================
#get the current progress
progress = self.progress()
#===================================================================
# prechecks
#===================================================================
#make sure we have some slots connected
"""not sure how to do this"""
#=======================================================================
# reseting
#=======================================================================
if prog_raw is None:
"""
would be nice to reset the progressBar.. .but that would be complicated
"""
self.setProgress(0)
return
#=======================================================================
# setting
#=======================================================================
if method=='append':
prog = min(progress + prog_raw, 100)
elif method=='raw':
prog = prog_raw
elif method == 'portion':
rem_prog = 100-progress
prog = progress + rem_prog*(prog_raw/100)
assert prog<=100
#===================================================================
# emit signalling
#===================================================================
self.setProgress(prog)
#==============================================================================
# FUNCTIONS----------
#==============================================================================
def init_q(gui=False):
try:
QgsApplication.setPrefixPath(r'C:/OSGeo4W64/apps/qgis-ltr', True)
app = QgsApplication([], gui)
# Update prefix path
#app.setPrefixPath(r"C:\OSGeo4W64\apps\qgis", True)
app.initQgis()
#logging.debug(QgsApplication.showSettings())
""" was throwing unicode error"""
print(u' QgsApplication.initQgis. version: %s, release: %s'%(
Qgis.QGIS_VERSION.encode('utf-8'), Qgis.QGIS_RELEASE_NAME.encode('utf-8')))
return app
except:
raise Error('QGIS failed to initiate')
def vlay_check( #helper to check various expectations on the layer
vlay,
exp_fieldns = None, #raise error if these field names are OUT
uexp_fieldns = None, #raise error if these field names are IN
real_atts = None, #list of field names to check if attribute value are all real
bgeot = None, #basic geo type checking
fcnt = None, #feature count checking. accepts INT or QgsVectorLayer
fkey = None, #optional secondary key to check
mlay = False, #check if its a memory layer or not
chk_valid = False, #check layer validty
logger = mod_logger,
db_f = False,
):
#=======================================================================
# prechecks
#=======================================================================
if vlay is None:
raise Error('got passed an empty vlay')
if not isinstance(vlay, QgsVectorLayer):
raise Error('unexpected type: %s'%type(vlay))
log = logger.getChild('vlay_check')
checks_l = []
#=======================================================================
# expected field names
#=======================================================================
if not basic.is_null(exp_fieldns): #robust null checking
skip=False
if isinstance(exp_fieldns, str):
if exp_fieldns=='all':
skip=True
if not skip:
fnl = basic.linr(exp_fieldns, vlay_fieldnl(vlay),
'expected field names', vlay.name(),
result_type='missing', logger=log, fancy_log=db_f)
if len(fnl)>0:
raise Error('%s missing expected fields: %s'%(
vlay.name(), fnl))
checks_l.append('exp_fieldns=%i'%len(exp_fieldns))
#=======================================================================
# unexpected field names
#=======================================================================
if not basic.is_null(uexp_fieldns): #robust null checking
#fields on the layer
if len(vlay_fieldnl(vlay))>0:
fnl = basic.linr(uexp_fieldns, vlay_fieldnl(vlay),
'un expected field names', vlay.name(),
result_type='matching', logger=log, fancy_log=db_f)
if len(fnl)>0:
raise Error('%s contains unexpected fields: %s'%(
vlay.name(), fnl))
#no fields on the layer
else:
pass
checks_l.append('uexp_fieldns=%i'%len(uexp_fieldns))
#=======================================================================
# null value check
#=======================================================================
#==========================================================================
# if not real_atts is None:
#
# #pull this data
# df = vlay_get_fdf(vlay, fieldn_l = real_atts, logger=log)
#
# #check for nulls
# if np.any(df.isna()):
# raise Error('%s got %i nulls on %i expected real fields: %s'%(
# vlay.name(), df.isna().sum().sum(), len(real_atts), real_atts))
#
#
# checks_l.append('real_atts=%i'%len(real_atts))
#==========================================================================
#=======================================================================
# basic geometry type
#=======================================================================
#==========================================================================
# if not bgeot is None:
# bgeot_lay = vlay_get_bgeo_type(vlay)
#
# if not bgeot == bgeot_lay:
# raise Error('basic geometry type expectation \'%s\' does not match layers \'%s\''%(
# bgeot, bgeot_lay))
#
# checks_l.append('bgeot=%s'%bgeot)
#==========================================================================
#=======================================================================
# feature count
#=======================================================================
if not fcnt is None:
if isinstance(fcnt, QgsVectorLayer):
fcnt=fcnt.dataProvider().featureCount()
if not fcnt == vlay.dataProvider().featureCount():
raise Error('\'%s\'s feature count (%i) does not match %i'%(
vlay.name(), vlay.dataProvider().featureCount(), fcnt))
checks_l.append('fcnt=%i'%fcnt)
#=======================================================================
# fkey
#=======================================================================
#==============================================================================
# if isinstance(fkey, str):
# fnl = vlay_fieldnl(vlay)
#
# if not fkey in fnl:
# raise Error('fkey \'%s\' not in the fields'%fkey)
#
# fkeys_ser = vlay_get_fdata(vlay, fkey, logger=log, fmt='ser').sort_values()
#
# if not np.issubdtype(fkeys_ser.dtype, np.number):
# raise Error('keys are non-numeric. type: %s'%fkeys_ser.dtype)
#
# if not fkeys_ser.is_unique:
# raise Error('\'%s\' keys are not unique'%fkey)
#
# if not fkeys_ser.is_monotonic:
# raise Error('fkeys are not monotonic')
#
# if np.any(fkeys_ser.isna()):
# raise Error('fkeys have nulls')
#
# checks_l.append('fkey \'%s\'=%i'%(fkey, len(fkeys_ser)))
#==============================================================================
#=======================================================================
# storage type
#=======================================================================
if mlay:
if not 'Memory' in vlay.dataProvider().storageType():
raise Error('\"%s\' unexpected storage type: %s'%(
vlay.name(), vlay.dataProvider().storageType()))
checks_l.append('mlay')
#=======================================================================
# validty
#=======================================================================
#==========================================================================
# if chk_valid:
# vlay_chk_validty(vlay, chk_geo=True)
#
# checks_l.append('validity')
#==========================================================================
#=======================================================================
# wrap
#=======================================================================
log.debug('\'%s\' passed %i checks: %s'%(
vlay.name(), len(checks_l), checks_l))
return
def load_vlay( #load a layer from a file
fp,
providerLib='ogr',
logger=mod_logger):
"""
what are we using this for?
see instanc emethod
"""
log = logger.getChild('load_vlay')
assert os.path.exists(fp), 'requested file does not exist: %s'%fp
basefn = os.path.splitext(os.path.split(fp)[1])[0]
#Import a Raster Layer
vlay_raw = QgsVectorLayer(fp,basefn,providerLib)
#check if this is valid
if not vlay_raw.isValid():
log.error('loaded vlay \'%s\' is not valid. \n \n did you initilize?'%vlay_raw.name())
raise Error('vlay loading produced an invalid layer')
#check if it has geometry
if vlay_raw.wkbType() == 100:
log.error('loaded vlay has NoGeometry')
raise Error('no geo')
#==========================================================================
# report
#==========================================================================
vlay = vlay_raw
dp = vlay.dataProvider()
log.info('loaded vlay \'%s\' as \'%s\' %s geo with %i feats from file: \n %s'
%(vlay.name(), dp.storageType(), QgsWkbTypes().displayString(vlay.wkbType()), dp.featureCount(), fp))
return vlay
def vlay_write( #write a VectorLayer
vlay, out_fp,
driverName='GPKG',
fileEncoding = "CP1250",
opts = QgsVectorFileWriter.SaveVectorOptions(), #empty options object
overwrite=False,
logger=mod_logger):
"""
help(QgsVectorFileWriter.SaveVectorOptions)
QgsVectorFileWriter.SaveVectorOptions.driverName='GPKG'
opt2 = QgsVectorFileWriter.BoolOption(QgsVectorFileWriter.CreateOrOverwriteFile)
help(QgsVectorFileWriter)
TODO: Move this back onto Qcoms
"""
#==========================================================================
# defaults
#==========================================================================
log = logger.getChild('vlay_write')
#===========================================================================
# assemble options
#===========================================================================
opts.driverName = driverName
opts.fileEncoding = fileEncoding
#===========================================================================
# checks
#===========================================================================
#file extension
fhead, ext = os.path.splitext(out_fp)
if not 'gpkg' in ext:
raise Error('unexpected extension: %s'%ext)
if os.path.exists(out_fp):
msg = 'requested file path already exists!. overwrite=%s \n %s'%(
overwrite, out_fp)
if overwrite:
log.warning(msg)
os.remove(out_fp) #workaround... should be away to overwrite with the QgsVectorFileWriter
else:
raise Error(msg)
if vlay.dataProvider().featureCount() == 0:
raise Error('\'%s\' has no features!'%(
vlay.name()))
if not vlay.isValid():
Error('passed invalid layer')
error = QgsVectorFileWriter.writeAsVectorFormatV2(
vlay, out_fp,
QgsCoordinateTransformContext(),
opts,
)
#=======================================================================
# wrap and check
#=======================================================================
if error[0] == QgsVectorFileWriter.NoError:
log.info('layer \' %s \' written to: \n %s'%(vlay.name(),out_fp))
return out_fp
raise Error('FAILURE on writing layer \' %s \' with code:\n %s \n %s'%(vlay.name(),error, out_fp))
def vlay_get_fdf( #pull all the feature data and place into a df
vlay,
fmt='df', #result fomrat key.
#dict: {fid:{fieldname:value}}
#df: index=fids, columns=fieldnames
#limiters
request = None, #request to pull data. for more customized requestes.
fieldn_l = None, #or field name list. for generic requests
#modifiers
reindex = None, #optinal field name to reindex df by
#expectations
expect_all_real = False, #whether to expect all real results
allow_none = False,
db_f = False,
logger=mod_logger,
feedback=MyFeedBackQ()):
"""
performance improvement
Warning: requests with getFeatures arent working as expected for memory layers
this could be combined with vlay_get_feats()
also see vlay_get_fdata() (for a single column)
RETURNS
a dictionary in the Qgis attribute dictionary format:
key: generally feat.id()
value: a dictionary of {field name: attribute value}
"""
#===========================================================================
# setups and defaults
#===========================================================================
log = logger.getChild('vlay_get_fdf')
assert isinstance(vlay, QgsVectorLayer)
all_fnl = [fieldn.name() for fieldn in vlay.fields().toList()]
if fieldn_l is None: #use all the fields
fieldn_l = all_fnl
else:
vlay_check(vlay, fieldn_l, logger=logger, db_f=db_f)
if allow_none:
if expect_all_real:
raise Error('cant allow none and expect all reals')
#===========================================================================
# prechecks
#===========================================================================
if not reindex is None:
if not reindex in fieldn_l:
raise Error('requested reindexer \'%s\' is not a field name'%reindex)
if not vlay.dataProvider().featureCount()>0:
raise Error('no features!')
if len(fieldn_l) == 0:
raise Error('no fields!')
if fmt=='dict' and not (len(fieldn_l)==len(all_fnl)):
raise Error('dict results dont respect field slicing')
assert hasattr(feedback, 'setProgress')
#===========================================================================
# build the request
#===========================================================================
feedback.setProgress(2)
if request is None:
"""WARNING: this doesnt seem to be slicing the fields.
see Alg().deletecolumns()
but this will re-key things
request = QgsFeatureRequest().setSubsetOfAttributes(fieldn_l,vlay.fields())"""
request = QgsFeatureRequest()
#never want geometry
request = request.setFlags(QgsFeatureRequest.NoGeometry)
log.debug('extracting data from \'%s\' on fields: %s'%(vlay.name(), fieldn_l))
#===========================================================================
# loop through each feature and extract the data
#===========================================================================
fid_attvs = dict() #{fid : {fieldn:value}}
fcnt = vlay.dataProvider().featureCount()
for indxr, feat in enumerate(vlay.getFeatures(request)):
#zip values
fid_attvs[feat.id()] = feat.attributes()
feedback.setProgress((indxr/fcnt)*90)
#===========================================================================
# post checks
#===========================================================================
if not len(fid_attvs) == vlay.dataProvider().featureCount():
log.debug('data result length does not match feature count')
if not request.filterType()==3: #check if a filter fids was passed
"""todo: add check to see if the fiter request length matches tresult"""
raise Error('no filter and data length mismatch')
#check the field lengthes
if not len(all_fnl) == len(feat.attributes()):
raise Error('field length mismatch')
#empty check 1
if len(fid_attvs) == 0:
log.warning('failed to get any data on layer \'%s\' with request'%vlay.name())
if not allow_none:
raise Error('no data found!')
else:
if fmt == 'dict':
return dict()
elif fmt == 'df':
return pd.DataFrame()
else:
raise Error('unexpected fmt type')
#===========================================================================
# result formatting
#===========================================================================
log.debug('got %i data elements for \'%s\''%(
len(fid_attvs), vlay.name()))
if fmt == 'dict':
return fid_attvs
elif fmt=='df':
#build the dict
df_raw = pd.DataFrame.from_dict(fid_attvs, orient='index', columns=all_fnl)
#handle column slicing and Qnulls
"""if the requester worked... we probably wouldnt have to do this"""
df = df_raw.loc[:, tuple(fieldn_l)].replace(NULL, np.nan)
feedback.setProgress(95)
if isinstance(reindex, str):
"""
reindex='zid'
view(df)
"""
#try and add the index (fids) as a data column
try:
df = df.join(pd.Series(df.index,index=df.index, name='fid'))
except:
log.debug('failed to preserve the fids.. column already there?')
#re-index by the passed key... should copy the fids over to 'index
df = df.set_index(reindex, drop=True)
log.debug('reindexed data by \'%s\''%reindex)
return df
else:
raise Error('unrecognized fmt kwarg')
def vlay_get_fdata( #get data for a single field from all the features
vlay,
fieldn = None, #get a field name. 'None' returns a dictionary of np.nan
geopropn = None, #get a geometry property
geo_obj = False, #whether to just get the geometry object
request = None, #additional requester (limiting fids). fieldn still required. additional flags added
selected= False, #whether to limit data to just those selected features
fmt = 'dict', #format to return results in
#'singleton' expect and aprovide a unitary value
rekey = None, #field name to rekey dictionary like results by
expect_all_real = False, #whether to expect all real results
dropna = False, #whether to drop nulls from the results
allow_none = False,
logger = mod_logger, db_f=False):
"""
TODO: combine this with vlay_get_fdatas
consider combining with vlay_get_feats
I'm not sure how this will handle requests w/ expressions
"""
log = logger.getChild('vlay_get_fdata')
if request is None:
request = QgsFeatureRequest()
#===========================================================================
# prechecks
#===========================================================================
if geo_obj:
if fmt == 'df': raise IOError
if not geopropn is None: raise IOError
if dropna:
if expect_all_real:
raise Error('cant expect_all_reals AND dropna')
if allow_none:
if expect_all_real:
raise Error('cant allow none and expect all reals')
vlay_check(vlay, exp_fieldns=[fieldn], logger=log, db_f=db_f)
#===========================================================================
# build the request
#===========================================================================
#no geometry
if (geopropn is None) and (not geo_obj):
if fieldn is None:
raise Error('no field name provided')
request = request.setFlags(QgsFeatureRequest.NoGeometry)
request = request.setSubsetOfAttributes([fieldn],vlay.fields())
else:
request = request.setNoAttributes() #dont get any attributes
#===========================================================================
# selection limited
#===========================================================================
if selected:
"""
todo: check if there is already a fid filter placed on the reuqester
"""
log.debug('limiting data pull to %i selected features on \'%s\''%(
vlay.selectedFeatureCount(), vlay.name()))
sfids = vlay.selectedFeatureIds()
request = request.setFilterFids(sfids)
#===========================================================================
# loop through and collect hte data
#===========================================================================
#if db_f: req_log(request, logger=log)
d = dict() #empty container for results
for feat in vlay.getFeatures(request):
#=======================================================================
# get geometry
#=======================================================================
if geo_obj:
d[feat.id()] = feat.geometry()
#=======================================================================
# get a geometry property
#=======================================================================
elif not geopropn is None:
geo = feat.geometry()
func = getattr(geo, geopropn) #get the method
d[feat.id()] = func() #call the method and store
#=======================================================================
# field request
#=======================================================================
else:
#empty shortcut
if qisnull(feat.attribute(fieldn)):
d[feat.id()] = np.nan
else: #pull real data
d[feat.id()] = feat.attribute(fieldn)
log.debug('retrieved %i attributes from features on \'%s\''%(
len(d), vlay.name()))
#===========================================================================
# null handling
#===========================================================================
if selected:
if not len(d) == vlay.selectedFeatureCount():
raise Error('failed to get data matching %i selected features'%(
vlay.selectedFeatureCount()))
if expect_all_real:
boolar = pd.isnull(np.array(list(d.values())))
if np.any(boolar):
raise Error('got %i nulls'%boolar.sum())
if dropna:
"""faster to use dfs?"""
log.debug('dropping nulls from %i'%len(d))
d2 = dict()
for k, v in d.items():
if np.isnan(v):
continue
d2[k] = v
d = d2 #reset
#===========================================================================
# post checks
#===========================================================================
if len(d) == 0:
log.warning('got no results! from \'%s\''%(
vlay.name()))
if not allow_none:
raise Error('allow_none=FALSE and no results')
"""
view(vlay)
"""
#===========================================================================
# rekey
#===========================================================================
if isinstance(rekey, str):
assert fmt=='dict'
d, _ = vlay_key_convert(vlay, d, rekey, id1_type='fid', logger=log)
#===========================================================================
# results
#===========================================================================
if fmt == 'dict':
return d
elif fmt == 'df':
return pd.DataFrame(pd.Series(d, name=fieldn))
elif fmt == 'singleton':
if not len(d)==1:
raise Error('expected singleton')
return next(iter(d.values()))
elif fmt == 'ser':
return pd.Series(d, name=fieldn)
else:
raise IOError
def vlay_new_mlay(#create a new mlay
gtype, #"Point", "LineString", "Polygon", "MultiPoint", "MultiLineString", or "MultiPolygon".
crs,
layname,
qfields,
feats_l,
logger=mod_logger,
):
#=======================================================================
# defaults
#=======================================================================
log = logger.getChild('vlay_new_mlay')
#=======================================================================
# prechecks
#=======================================================================
if not isinstance(layname, str):
raise Error('expected a string for layname, isntead got %s'%type(layname))
if gtype=='None':
log.warning('constructing mlay w/ \'None\' type')
#=======================================================================
# assemble into new layer
#=======================================================================
#initilzie the layer
EPSG_code=int(crs.authid().split(":")[1]) #get teh coordinate reference system of input_layer
uri = gtype+'?crs=epsg:'+str(EPSG_code)+'&index=yes'
vlaym = QgsVectorLayer(uri, layname, "memory")
# add fields
if not vlaym.dataProvider().addAttributes(qfields):
raise Error('failed to add fields')
vlaym.updateFields()
#add feats
if not vlaym.dataProvider().addFeatures(feats_l):
raise Error('failed to addFeatures')
vlaym.updateExtents()
#=======================================================================
# checks
#=======================================================================
if vlaym.wkbType() == 100:
msg = 'constructed layer \'%s\' has NoGeometry'%vlaym.name()
if gtype == 'None':
log.debug(msg)
else:
raise Error(msg)
log.debug('constructed \'%s\''%vlaym.name())
return vlaym
def vlay_new_df(#build a vlay from a df
df_raw,
crs,
geo_d = None, #container of geometry objects {fid: QgsGeometry}
geo_fn_tup = None, #if geo_d=None, tuple of field names to search for coordinate data
layname='df_layer',
allow_fid_mismatch = False,
infer_dtypes = True, #whether to referesh the dtyping in the df
driverName = 'GPKG',
#expectations
expect_unique_colns = True,
logger=mod_logger, db_f = False,
):
"""
todo: migrate off this
"""
#=======================================================================
# setup
#=======================================================================
log = logger.getChild('vlay_new_df')
log.warning('Depcreciate me')
#=======================================================================
# precheck
#=======================================================================
df = df_raw.copy()
max_len=50
#check lengths
boolcol = df_raw.columns.str.len() >= max_len
if np.any(boolcol):
log.warning('passed %i columns which exeed the max length %i for driver \'%s\'.. truncating: \n %s'%(
boolcol.sum(), max_len, driverName, df_raw.columns.values[boolcol]))
df.columns = df.columns.str.slice(start=0, stop=max_len-1)
#make sure the columns are unique
if not df.columns.is_unique:
"""
this can happen especially when some field names are super long and have their unique parts truncated
"""
boolcol = df.columns.duplicated(keep='first')
log.warning('got %i duplicated columns: \n %s'%(
boolcol.sum(), df.columns[boolcol].values))
if expect_unique_colns:
raise Error('got non unique columns')
#drop the duplicates
log.warning('dropping second duplicate column')
df = df.loc[:, ~boolcol]
#===========================================================================
# assemble the features
#===========================================================================
"""this does its own index check"""
feats_d = feats_build(df, logger=log, geo_d = geo_d,infer_dtypes=infer_dtypes,
geo_fn_tup = geo_fn_tup,
allow_fid_mismatch=allow_fid_mismatch, db_f=db_f)
#=======================================================================
# get the geo type
#=======================================================================
if not geo_d is None:
#pull geometry type from first feature
gtype = QgsWkbTypes().displayString(next(iter(geo_d.values())).wkbType())
elif not geo_fn_tup is None:
gtype = 'Point'
else:
gtype = 'None'
#===========================================================================
# buidl the new layer
#===========================================================================
vlay = vlay_new_mlay(gtype, #no geo
crs,
layname,
list(feats_d.values())[0].fields(),
list(feats_d.values()),
logger=log,
)
#=======================================================================
# post check
#=======================================================================
if db_f:
if vlay.wkbType() == 100:
raise Error('constructed layer has NoGeometry')
#vlay_chk_validty(vlay, chk_geo=True, logger=log)
return vlay
def vlay_fieldnl(vlay):
return [field.name() for field in vlay.fields()]
def feats_build( #build a set of features from teh passed data
data, #data from which to build features from (either df or qvlayd)
geo_d = None, #container of geometry objects {fid: QgsGeometry}
geo_fn_tup = None, #if geo_d=None, tuple of field names to search for coordinate data
allow_fid_mismatch = False, #whether to raise an error if the fids set on the layer dont match the data
infer_dtypes = True, #whether to referesh the dtyping in the df
logger=mod_logger, db_f=False):
log = logger.getChild('feats_build')
#===========================================================================
# precheck
#===========================================================================
#geometry input logic
if (not geo_d is None) and (not geo_fn_tup is None):
raise Error('todo: implement non geo layers')
#index match
if isinstance(geo_d, dict):
#get the data fid_l
if isinstance(data, pd.DataFrame):
dfid_l = data.index.tolist()
elif isinstance(data, dict):
dfid_l = list(data.keys())
else:
raise Error('unexpected type')
if not basic.linr(dfid_l, list(geo_d.keys()),'feat_data', 'geo_d',
sort_values=True, result_type='exact', logger=log):
raise Error('passed geo_d and data indexes dont match')
#overrides
if geo_fn_tup:
geofn_hits = 0
sub_field_match = False #dropping geometry fields
else:
sub_field_match = True
log.debug('for %i data type %s'%(
len(data), type(data)))
#===========================================================================
# data conversion
#===========================================================================
if isinstance(data, pd.DataFrame):
#check the index (this will be the fids)
if not data.index.dtype.char == 'q':
raise Error('expected integer index')
fid_ar = data.index.values
#infer types
if infer_dtypes:
data = data.infer_objects()
#convert the data
qvlayd = df_to_qvlayd(data)
#=======================================================================
# build fields container from data
#=======================================================================
"""we need to convert numpy types to pytypes.
these are later convert to Qtypes"""
fields_d = dict()
for coln, col in data.items():
if not geo_fn_tup is None:
if coln in geo_fn_tup:
geofn_hits +=1
continue #skip this one
#set the type for this name
fields_d[coln] = np_to_pytype(col.dtype, logger=log)
qfields = fields_build_new(fields_d = fields_d, logger=log)
#=======================================================================
# some checks
#=======================================================================
if db_f:
#calc hte expectation
if geo_fn_tup is None:
exp_cnt= len(data.columns)
else:
exp_cnt = len(data.columns) - len(geo_fn_tup)
if not exp_cnt == len(fields_d):
raise Error('only generated %i fields from %i columns'%(
len(data.columns), len(fields_d)))
#check we got them all
if not exp_cnt == len(qfields):
raise Error('failed to create all the fields')
"""
for field in qfields:
print(field)
qfields.toList()
new_qfield = QgsField(fname, qtype, typeName=QMetaType.typeName(QgsField(fname, qtype).type()))
"""
else:
fid_ar = np.array(list(data.keys()))
#set the data
qvlayd = data
#===========================================================================
# build fields container from data
#===========================================================================
#slice out geometry data if there
sub_d1 = list(qvlayd.values())[0] #just get the first
sub_d2 = dict()
for fname, value in sub_d1.items():
if not geo_fn_tup is None:
if fname in geo_fn_tup:
geofn_hits +=1
continue #skip this one
sub_d2[fname] = value
#build the fields from this sample data
qfields = fields_build_new(samp_d = sub_d2, logger=log)
#check for geometry field names
if not geo_fn_tup is None:
if not geofn_hits == len(geo_fn_tup):
log.error('missing some geometry field names form the data')
raise IOError
#===========================================================================
# extract geometry
#===========================================================================
if geo_d is None:
#check for nulls
if db_f:
chk_df= pd.DataFrame.from_dict(qvlayd, orient='index')
if chk_df.loc[:, geo_fn_tup].isna().any().any():
raise Error('got some nulls on the geometry fields: %s'%geo_fn_tup)
geo_d = dict()
for fid, sub_d in copy.copy(qvlayd).items():
#get the xy
xval, yval = sub_d.pop(geo_fn_tup[0]), sub_d.pop(geo_fn_tup[1])
#build the geometry
geo_d[fid] = QgsGeometry.fromPointXY(QgsPointXY(xval,yval))
#add the cleaned data back in
qvlayd[fid] = sub_d
#===========================================================================
# check geometry
#===========================================================================
if db_f:
#precheck geometry validty
for fid, geo in geo_d.items():
if not geo.isGeosValid():
raise Error('got invalid geometry on %i'%fid)
#===========================================================================
# loop through adn build features
#===========================================================================
feats_d = dict()
for fid, sub_d in qvlayd.items():
#=======================================================================
# #log.debug('assembling feature %i'%fid)
# #=======================================================================
# # assmble geometry data
# #=======================================================================
# if isinstance(geo_d, dict):
# geo = geo_d[fid]
#
# elif not geo_fn_tup is None:
# xval = sub_d[geo_fn_tup[0]]
# yval = sub_d[geo_fn_tup[1]]
#
# if pd.isnull(xval) or pd.isnull(yval):
# log.error('got null geometry values')
# raise IOError
#
# geo = QgsGeometry.fromPointXY(QgsPointXY(xval,yval))
# #Point(xval, yval) #make the point geometry
#
# else:
# geo = None
#=======================================================================
#=======================================================================
# buidl the feature
#=======================================================================
#=======================================================================
# feats_d[fid] = feat_build(fid, sub_d, qfields=qfields, geometry=geo,
# sub_field_match = sub_field_match, #because we are excluding the geometry from the fields
# logger=log, db_f=db_f)
#=======================================================================
feat = QgsFeature(qfields, fid)
for fieldn, value in sub_d.items():
"""
cut out feat_build() for simplicity
"""
#skip null values
if pd.isnull(value): continue
#get the index for this field
findx = feat.fieldNameIndex(fieldn)
#get the qfield
qfield = feat.fields().at(findx)
#make the type match
ndata = qtype_to_pytype(value, qfield.type(), logger=log)
#set the attribute
if not feat.setAttribute(findx, ndata):
raise Error('failed to setAttribute')
#setgeometry
feat.setGeometry(geo_d[fid])
#stor eit
feats_d[fid]=feat
#===========================================================================
# checks
#===========================================================================
if db_f:
#fid match
nfid_ar = np.array(list(feats_d.keys()))
if not np.array_equal(nfid_ar, fid_ar):
log.warning('fid mismatch')
if not allow_fid_mismatch:
raise Error('fid mismatch')
#feature validty
for fid, feat in feats_d.items():
if not feat.isValid():
raise Error('invalid feat %i'%feat.id())
if not feat.geometry().isGeosValid():
raise Error('got invalid geometry on feat \'%s\''%(feat.id()))
"""
feat.geometry().type()
"""
log.debug('built %i \'%s\' features'%(
len(feats_d),
QgsWkbTypes.geometryDisplayString(feat.geometry().type()),
))
return feats_d
def fields_build_new( #build qfields from different data containers
samp_d = None, #sample data from which to build qfields {fname: value}
fields_d = None, #direct data from which to build qfields {fname: pytype}
fields_l = None, #list of QgsField objects
logger=mod_logger):
log = logger.getChild('fields_build_new')
#===========================================================================
# buidl the fields_d
#===========================================================================
if (fields_d is None) and (fields_l is None): #only if we have nothign better to start with
if samp_d is None:
log.error('got no data to build fields on!')
raise IOError
fields_l = []
for fname, value in samp_d.items():
if pd.isnull(value):
log.error('for field \'%s\' got null value')
raise IOError
elif inspect.isclass(value):
raise IOError
fields_l.append(field_new(fname, pytype=type(value)))
log.debug('built %i fields from sample data'%len(fields_l))
#===========================================================================
# buidl the fields set
#===========================================================================
elif fields_l is None:
fields_l = []
for fname, ftype in fields_d.items():
fields_l.append(field_new(fname, pytype=ftype))
log.debug('built %i fields from explicit name/type'%len(fields_l))
#check it
if not len(fields_l) == len(fields_d):
raise Error('length mismatch')
elif fields_d is None: #check we have the other
raise IOError
#===========================================================================
# build the Qfields
#===========================================================================
Qfields = QgsFields()
fail_msg_d = dict()
for indx, field in enumerate(fields_l):
if not Qfields.append(field):
fail_msg_d[indx] = ('%i failed to append field \'%s\''%(indx, field.name()), field)
#report
if len(fail_msg_d)>0:
for indx, (msg, field) in fail_msg_d.items():
log.error(msg)
raise Error('failed to write %i fields'%len(fail_msg_d))
"""
field.name()
field.constraints().constraintDescription()
field.length()
"""
#check it
if not len(Qfields) == len(fields_l):
raise Error('length mismatch')
return Qfields
def field_new(fname,
pytype=str,
driverName = 'SpatiaLite', #desired driver (to check for field name length limitations)
fname_trunc = True, #whether to truncate field names tha texceed the limit
logger=mod_logger): #build a QgsField
#===========================================================================
# precheck
#===========================================================================
if not isinstance(fname, str):
raise IOError('expected string for fname')
#vector layer field name lim itation
max_len = fieldn_max_d[driverName]
"""
fname = 'somereallylongname'
"""
if len(fname) >max_len:
log = logger.getChild('field_new')
log.warning('got %i (>%i)characters for passed field name \'%s\'. truncating'%(len(fname), max_len, fname))
if fname_trunc:
fname = fname[:max_len]
else:
raise Error('field name too long')
qtype = ptype_to_qtype(pytype)
"""
#check this type
QMetaType.typeName(QgsField(fname, qtype).type())
QVariant.String
QVariant.Int
QMetaType.typeName(new_qfield.type())
"""
#new_qfield = QgsField(fname, qtype)
new_qfield = QgsField(fname, qtype, typeName=QMetaType.typeName(QgsField(fname, qtype).type()))
return new_qfield
def vlay_get_bgeo_type(vlay,
match_flags=re.IGNORECASE,
):
gstr = QgsWkbTypes().displayString(vlay.wkbType()).lower()
for gtype in ('polygon', 'point', 'line'):
if re.search(gtype, gstr, flags=match_flags):
return gtype
raise Error('failed to match')
def vlay_rename_fields(
vlay_raw,
rnm_d, #field name conversions to apply {old FieldName:newFieldName}
logger=None,
feedback=None,
):
"""
todo: replace with coms.hp.Qproj.vlay_rename_fields
"""
if logger is None: logger=mod_logger
log=logger.getChild('vlay_rename_fields')
#get a working layer
vlay_raw.selectAll()
vlay = processing.run('native:saveselectedfeatures',
{'INPUT' : vlay_raw, 'OUTPUT' : 'TEMPORARY_OUTPUT'},
feedback=feedback)['OUTPUT']
#get fieldname index conversion for layer
fni_d = {f.name():vlay.dataProvider().fieldNameIndex(f.name()) for f in vlay.fields()}
#check it
for k in rnm_d.keys():
assert k in fni_d.keys(), 'requested field \'%s\' not on layer'%k
#re-index rename request
fiRn_d = {fni_d[k]:v for k,v in rnm_d.items()}
#apply renames
if not vlay.dataProvider().renameAttributes(fiRn_d):
raise Error('failed to rename')
vlay.updateFields()
#check it
fn_l = [f.name() for f in vlay.fields()]
s = set(rnm_d.values()).difference(fn_l)
assert len(s)==0, 'failed to rename %i fields: %s'%(len(s), s)
vlay.setName(vlay_raw.name())
log.debug('applied renames to \'%s\' \n %s'%(vlay.name(), rnm_d))
return vlay
def vlay_key_convert(#convert a list of ids in one form to another
vlay,
id1_objs, #list of ids (or dict keyed b y ids) to get conversion of
id_fieldn, #field name for field type ids
id1_type = 'field', #type of ids passed in the id_l (result will return a dict of th eopposit etype)
#'field': keys in id1_objs are values from some field (on the vlay)
#'fid': keys in id1_objs are fids (on the vlay)
fid_fval_d = None, #optional pre-calced data (for performance improvement)
logger=mod_logger,
db_f = False, #extra checks
):
log = logger.getChild('vlay_key_convert')
#===========================================================================
# handle variable inputs
#===========================================================================
if isinstance(id1_objs, dict):
id1_l = list(id1_objs.keys())
elif isinstance(id1_objs, list):
id1_l = id1_objs
else:
raise Error('unrecognized id1_objs type')
#===========================================================================
# extract the fid to fval conversion
#===========================================================================
if fid_fval_d is None:
#limit the pull by id1s
if id1_type == 'fid':
request = QgsFeatureRequest().setFilterFids(id1_l)
log.debug('pulling \'fid_fval_d\' from %i fids'%(len(id1_l)))
#by field values
elif id1_type == 'field': #limit by field value
raise Error(' not implemented')
#build an expression so we only query features with values matching the id1_l
#===================================================================
# qexp = exp_vals_in_field(id1_l, id_fieldn, qfields = vlay.fields(), logger=log)
# request = QgsFeatureRequest(qexp)
#
# log.debug('pulling \'fid_fval_d\' from %i \'%s\' fvals'%(
# len(id1_l), id_fieldn))
#===================================================================
else:
raise Error('unrecognized id1_type')
fid_fval_d = vlay_get_fdata(vlay, fieldn=id_fieldn, request =request, logger=log,
expect_all_real=True, fmt='dict')
#no need
else:
log.debug('using passed \'fid_fval_d\' with %i'%len(fid_fval_d))
#check them
if db_f:
#log.debug('\'fid_fval_d\': \n %s'%fid_fval_d)
for dname, l in (
('keys', list(fid_fval_d.keys())),
('values', list(fid_fval_d.values()))
):
if not len(np.unique(np.array(l))) == len(l):
raise Error('got non unique \'%s\' on fid_fval_d'%dname)
#===========================================================================
# swap keys
#===========================================================================
if id1_type == 'fid':
id1_id2_d = fid_fval_d #o flip necessary
elif id1_type == 'field': #limit by field value
log.debug('swapping keys')
id1_id2_d = dict(zip(
fid_fval_d.values(), fid_fval_d.keys()
))
else:
raise Error('unrecognized id1_type')
#=======================================================================
# #make conversion
#=======================================================================
#for dictionaries
if isinstance(id1_objs, dict):
res_objs = dict()
for id1, val in id1_objs.items():
res_objs[id1_id2_d[id1]] = val
log.debug('got converted DICT results with %i'%len(res_objs))
#for lists
elif isinstance(id1_objs, list):
res_objs = [id1_id2_d[id1] for id1 in id1_objs]
log.debug('got converted LIST results with %i'%len(res_objs))
else:
raise Error('unrecognized id1_objs type')
return res_objs, fid_fval_d #converted objects, conversion dict ONLY FOR THSE OBJECTS!
#==============================================================================
# type checks-----------------
#==============================================================================
def qisnull(obj):
if obj is None:
return True
if isinstance(obj, QVariant):
if obj.isNull():
return True
else:
return False
if pd.isnull(obj):
return True
else:
return False
def is_qtype_match(obj, qtype_code, logger=mod_logger): #check if the object matches the qtype code
log = logger.getChild('is_qtype_match')
#get pythonic type for this code
try:
py_type = type_qvar_py_d[qtype_code]
except:
if not qtype_code in type_qvar_py_d.keys():
log.error('passed qtype_code \'%s\' not in dict from \'%s\''%(qtype_code, type(obj)))
raise IOError
if not isinstance(obj, py_type):
#log.debug('passed object of type \'%s\' does not match Qvariant.type \'%s\''%(type(obj), QMetaType.typeName(qtype_code)))
return False
else:
return True
#==============================================================================
# type conversions----------------
#==============================================================================
def np_to_pytype(npdobj, logger=mod_logger):
if not isinstance(npdobj, np.dtype):
raise Error('not passed a numpy type')
try:
return npc_pytype_d[npdobj.char]
except Exception as e:
log = logger.getChild('np_to_pytype')
if not npdobj.char in npc_pytype_d.keys():
log.error('passed npdtype \'%s\' not found in the conversion dictionary'%npdobj.name)
raise Error('failed oto convert w/ \n %s'%e)
def qtype_to_pytype( #convert object to the pythonic type taht matches the passed qtype code
obj,
qtype_code, #qtupe code (qfield.type())
logger=mod_logger):
if is_qtype_match(obj, qtype_code): #no conversion needed
return obj
#===========================================================================
# shortcut for nulls
#===========================================================================
if qisnull(obj):
return None
#get pythonic type for this code
py_type = type_qvar_py_d[qtype_code]
try:
return py_type(obj)
except:
#datetime
if qtype_code == 16:
return obj.toPyDateTime()
log = logger.getChild('qtype_to_pytype')
if obj is None:
log.error('got NONE type')
elif isinstance(obj, QVariant):
log.error('got a Qvariant object')
else:
log.error('unable to map object \'%s\' of type \'%s\' to type \'%s\''
%(obj, type(obj), py_type))
"""
QMetaType.typeName(obj)
"""
raise IOError
def ptype_to_qtype(py_type, logger=mod_logger): #get the qtype corresponding to the passed pytype
"""useful for buildign Qt objects
really, this is a reverse
py_type=str
"""
if not inspect.isclass(py_type):
logger.error('got unexpected type \'%s\''%type(py_type))
raise Error('bad type')
#build a QVariant object from this python type class, then return its type
try:
qv = QVariant(py_type())
except:
logger.error('failed to build QVariant from \'%s\''%type(py_type))
raise IOError
"""
#get the type
QMetaType.typeName(qv.type())
"""
return qv.type()
def df_to_qvlayd( #convert a data frame into the layer data structure (keeyd by index)
df, #data to convert. df index should match fid index
logger=mod_logger):
log = logger.getChild('df_to_qvlayd')
d = dict() #data dictionary in qgis structure
#prechecks
if not df.index.is_unique:
log.error('got passed non-unique index')
raise IOError
#===========================================================================
# loop and fill
#===========================================================================
for fid, row in df.iterrows():
#=======================================================================
# build sub data
#=======================================================================
sub_d = dict() #sub data structure
for fieldn, value in row.items():
sub_d[fieldn] = value
#=======================================================================
# add the sub into the main
#=======================================================================
d[fid] = sub_d
if not len(df) == len(d):
log.error('got length mismatch')
raise IOError
log.debug('converted df %s into qvlayd'%str(df.shape))
return d
def view(#view the vector data (or just a df) as a html frame
obj, logger=mod_logger,
#**gfd_kwargs, #kwaqrgs to pass to vlay_get_fdatas() 'doesnt work well with the requester'
):
if isinstance(obj, pd.DataFrame) or isinstance(obj, pd.Series):
df = obj
elif isinstance(obj, QgsVectorLayer):
"""this will index the viewed frame by fid"""
df = vlay_get_fdf(obj)
else:
raise Error('got unexpected object type: %s'%type(obj))
basic.view(df)
logger.info('viewer closed')
return
if __name__ == '__main__':
print('???')
|
[
"os.remove",
"hlpr.exceptions.QError",
"processing.run",
"numpy.isnan",
"hlpr.basic.linr",
"os.path.join",
"pandas.DataFrame",
"hlpr.basic.view",
"processing.core.Processing.Processing.initialize",
"inspect.isclass",
"os.path.dirname",
"os.path.exists",
"inspect.isbuiltin",
"hlpr.basic.is_null",
"re.search",
"pandas.DataFrame.from_dict",
"hlpr.basic.get_valid_filename",
"qgis.analysis.QgsNativeAlgorithms",
"pandas.Series",
"copy.copy",
"pandas.isnull",
"numpy.any",
"numpy.array",
"os.path.splitext",
"numpy.array_equal",
"os.path.split",
"logging.getLogger"
] |
[((816, 838), 'logging.getLogger', 'logging.getLogger', (['"""Q"""'], {}), "('Q')\n", (833, 838), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((110896, 110914), 'os.path.exists', 'os.path.exists', (['fp'], {}), '(fp)\n', (110910, 110914), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((113188, 113212), 'os.path.splitext', 'os.path.splitext', (['out_fp'], {}), '(out_fp)\n', (113204, 113212), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((113308, 113330), 'os.path.exists', 'os.path.exists', (['out_fp'], {}), '(out_fp)\n', (113322, 113330), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((131175, 131190), 'numpy.any', 'np.any', (['boolcol'], {}), '(boolcol)\n', (131181, 131190), True, 'import numpy as np\n'), ((149062, 149086), 'hlpr.exceptions.QError', 'Error', (['"""failed to match"""'], {}), "('failed to match')\n", (149067, 149086), True, 'from hlpr.exceptions import QError as Error\n'), ((155113, 155127), 'pandas.isnull', 'pd.isnull', (['obj'], {}), '(obj)\n', (155122, 155127), True, 'import pandas as pd\n'), ((160390, 160404), 'hlpr.basic.view', 'basic.view', (['df'], {}), '(df)\n', (160400, 160404), True, 'import hlpr.basic as basic\n'), ((9318, 9341), 'processing.core.Processing.Processing.initialize', 'Processing.initialize', ([], {}), '()\n', (9339, 9341), False, 'from processing.core.Processing import Processing\n'), ((14988, 15006), 'os.path.exists', 'os.path.exists', (['fp'], {}), '(fp)\n', (15002, 15006), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((18180, 18198), 'os.path.exists', 'os.path.exists', (['fp'], {}), '(fp)\n', (18194, 18198), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((21880, 21923), 'hlpr.basic.get_valid_filename', 'get_valid_filename', (["('%s.tif' % newLayerName)"], {}), "('%s.tif' % newLayerName)\n", (21898, 21923), False, 'from hlpr.basic import get_valid_filename\n'), ((21958, 21986), 'os.path.join', 'os.path.join', (['out_dir', 'newFn'], {}), '(out_dir, newFn)\n', (21970, 21986), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((22477, 22500), 'os.path.exists', 'os.path.exists', (['out_dir'], {}), '(out_dir)\n', (22491, 22500), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((22521, 22543), 'os.path.exists', 'os.path.exists', (['out_fp'], {}), '(out_fp)\n', (22535, 22543), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((26497, 26519), 'os.path.exists', 'os.path.exists', (['out_fp'], {}), '(out_fp)\n', (26511, 26519), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((28222, 28246), 'os.path.splitext', 'os.path.splitext', (['out_fp'], {}), '(out_fp)\n', (28238, 28246), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((28362, 28384), 'os.path.exists', 'os.path.exists', (['out_fp'], {}), '(out_fp)\n', (28376, 28384), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((32229, 32244), 'numpy.any', 'np.any', (['boolcol'], {}), '(boolcol)\n', (32235, 32244), True, 'import numpy as np\n'), ((38884, 38938), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (38898, 38938), False, 'import processing\n'), ((47656, 47710), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (47670, 47710), False, 'import processing\n'), ((54019, 54073), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (54033, 54073), False, 'import processing\n'), ((57463, 57487), 'os.path.exists', 'os.path.exists', (['table_fp'], {}), '(table_fp)\n', (57477, 57487), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((59020, 59074), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (59034, 59074), False, 'import processing\n'), ((61586, 61640), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (61600, 61640), False, 'import processing\n'), ((63362, 63381), 'os.path.exists', 'os.path.exists', (['ofp'], {}), '(ofp)\n', (63376, 63381), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((63869, 63888), 'os.path.exists', 'os.path.exists', (['ofp'], {}), '(ofp)\n', (63883, 63888), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((64860, 64914), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (64874, 64914), False, 'import processing\n'), ((66819, 66838), 'os.path.exists', 'os.path.exists', (['ofp'], {}), '(ofp)\n', (66833, 66838), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((67699, 67753), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (67713, 67753), False, 'import processing\n'), ((71038, 71092), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (71052, 71092), False, 'import processing\n'), ((71174, 71205), 'os.path.exists', 'os.path.exists', (["res_d['OUTPUT']"], {}), "(res_d['OUTPUT'])\n", (71188, 71205), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((73371, 73425), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (73385, 73425), False, 'import processing\n'), ((74982, 75036), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (74996, 75036), False, 'import processing\n'), ((77481, 77535), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (77495, 77535), False, 'import processing\n'), ((79779, 79833), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (79793, 79833), False, 'import processing\n'), ((82420, 82474), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (82434, 82474), False, 'import processing\n'), ((84455, 84509), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (84469, 84509), False, 'import processing\n'), ((86125, 86179), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (86139, 86179), False, 'import processing\n'), ((87160, 87214), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (87174, 87214), False, 'import processing\n'), ((90006, 90060), 'processing.run', 'processing.run', (['algo_nm', 'ins_d'], {'feedback': 'self.feedback'}), '(algo_nm, ins_d, feedback=self.feedback)\n', (90020, 90060), False, 'import processing\n'), ((104352, 104385), 'hlpr.exceptions.QError', 'Error', (['"""got passed an empty vlay"""'], {}), "('got passed an empty vlay')\n", (104357, 104385), True, 'from hlpr.exceptions import QError as Error\n'), ((104751, 104777), 'hlpr.basic.is_null', 'basic.is_null', (['exp_fieldns'], {}), '(exp_fieldns)\n', (104764, 104777), True, 'import hlpr.basic as basic\n'), ((105634, 105661), 'hlpr.basic.is_null', 'basic.is_null', (['uexp_fieldns'], {}), '(uexp_fieldns)\n', (105647, 105661), True, 'import hlpr.basic as basic\n'), ((111275, 111322), 'hlpr.exceptions.QError', 'Error', (['"""vlay loading produced an invalid layer"""'], {}), "('vlay loading produced an invalid layer')\n", (111280, 111322), True, 'from hlpr.exceptions import QError as Error\n'), ((111454, 111469), 'hlpr.exceptions.QError', 'Error', (['"""no geo"""'], {}), "('no geo')\n", (111459, 111469), True, 'from hlpr.exceptions import QError as Error\n'), ((113258, 113297), 'hlpr.exceptions.QError', 'Error', (["('unexpected extension: %s' % ext)"], {}), "('unexpected extension: %s' % ext)\n", (113263, 113297), True, 'from hlpr.exceptions import QError as Error\n'), ((113816, 113845), 'hlpr.exceptions.QError', 'Error', (['"""passed invalid layer"""'], {}), "('passed invalid layer')\n", (113821, 113845), True, 'from hlpr.exceptions import QError as Error\n'), ((116861, 116882), 'hlpr.exceptions.QError', 'Error', (['"""no features!"""'], {}), "('no features!')\n", (116866, 116882), True, 'from hlpr.exceptions import QError as Error\n'), ((116925, 116944), 'hlpr.exceptions.QError', 'Error', (['"""no fields!"""'], {}), "('no fields!')\n", (116930, 116944), True, 'from hlpr.exceptions import QError as Error\n'), ((117022, 117070), 'hlpr.exceptions.QError', 'Error', (['"""dict results dont respect field slicing"""'], {}), "('dict results dont respect field slicing')\n", (117027, 117070), True, 'from hlpr.exceptions import QError as Error\n'), ((118984, 119014), 'hlpr.exceptions.QError', 'Error', (['"""field length mismatch"""'], {}), "('field length mismatch')\n", (118989, 119014), True, 'from hlpr.exceptions import QError as Error\n'), ((125870, 125884), 'numpy.any', 'np.any', (['boolar'], {}), '(boolar)\n', (125876, 125884), True, 'import numpy as np\n'), ((129086, 129115), 'hlpr.exceptions.QError', 'Error', (['"""failed to add fields"""'], {}), "('failed to add fields')\n", (129091, 129115), True, 'from hlpr.exceptions import QError as Error\n'), ((129258, 129288), 'hlpr.exceptions.QError', 'Error', (['"""failed to addFeatures"""'], {}), "('failed to addFeatures')\n", (129263, 129288), True, 'from hlpr.exceptions import QError as Error\n'), ((135177, 135216), 'hlpr.exceptions.QError', 'Error', (['"""todo: implement non geo layers"""'], {}), "('todo: implement non geo layers')\n", (135182, 135216), True, 'from hlpr.exceptions import QError as Error\n'), ((147271, 147295), 'hlpr.exceptions.QError', 'Error', (['"""length mismatch"""'], {}), "('length mismatch')\n", (147276, 147295), True, 'from hlpr.exceptions import QError as Error\n'), ((148974, 149015), 're.search', 're.search', (['gtype', 'gstr'], {'flags': 'match_flags'}), '(gtype, gstr, flags=match_flags)\n', (148983, 149015), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((149499, 149618), 'processing.run', 'processing.run', (['"""native:saveselectedfeatures"""', "{'INPUT': vlay_raw, 'OUTPUT': 'TEMPORARY_OUTPUT'}"], {'feedback': 'feedback'}), "('native:saveselectedfeatures', {'INPUT': vlay_raw, 'OUTPUT':\n 'TEMPORARY_OUTPUT'}, feedback=feedback)\n", (149513, 149618), False, 'import processing\n'), ((150091, 150116), 'hlpr.exceptions.QError', 'Error', (['"""failed to rename"""'], {}), "('failed to rename')\n", (150096, 150116), True, 'from hlpr.exceptions import QError as Error\n'), ((156141, 156173), 'hlpr.exceptions.QError', 'Error', (['"""not passed a numpy type"""'], {}), "('not passed a numpy type')\n", (156146, 156173), True, 'from hlpr.exceptions import QError as Error\n'), ((158023, 158047), 'inspect.isclass', 'inspect.isclass', (['py_type'], {}), '(py_type)\n', (158038, 158047), False, 'import inspect\n'), ((158128, 158145), 'hlpr.exceptions.QError', 'Error', (['"""bad type"""'], {}), "('bad type')\n", (158133, 158145), True, 'from hlpr.exceptions import QError as Error\n'), ((9188, 9237), 'hlpr.exceptions.QError', 'Error', (['"""qgis has not been properly initlized yet"""'], {}), "('qgis has not been properly initlized yet')\n", (9193, 9237), True, 'from hlpr.exceptions import QError as Error\n'), ((9442, 9463), 'qgis.analysis.QgsNativeAlgorithms', 'QgsNativeAlgorithms', ([], {}), '()\n', (9461, 9463), False, 'from qgis.analysis import QgsNativeAlgorithms\n'), ((12404, 12446), 'hlpr.exceptions.QError', 'Error', (['"""qproj crs does not match sessions"""'], {}), "('qproj crs does not match sessions')\n", (12409, 12446), True, 'from hlpr.exceptions import QError as Error\n'), ((12844, 12877), 'hlpr.exceptions.QError', 'Error', (['"""unrecognized driver name"""'], {}), "('unrecognized driver name')\n", (12849, 12877), True, 'from hlpr.exceptions import QError as Error\n'), ((12957, 12990), 'hlpr.exceptions.QError', 'Error', (['"""unrecognized driver name"""'], {}), "('unrecognized driver name')\n", (12962, 12990), True, 'from hlpr.exceptions import QError as Error\n'), ((15895, 15930), 'hlpr.exceptions.QError', 'Error', (['"""loaded vlay has NoGeometry"""'], {}), "('loaded vlay has NoGeometry')\n", (15900, 15930), True, 'from hlpr.exceptions import QError as Error\n'), ((23450, 23483), 'hlpr.exceptions.QError', 'Error', (['"""Cannot set pipe provider"""'], {}), "('Cannot set pipe provider')\n", (23455, 23483), True, 'from hlpr.exceptions import QError as Error\n'), ((23607, 23641), 'hlpr.exceptions.QError', 'Error', (['"""Cannot set pipe projector"""'], {}), "('Cannot set pipe projector')\n", (23612, 23641), True, 'from hlpr.exceptions import QError as Error\n'), ((25473, 25497), 'hlpr.exceptions.QError', 'Error', (['"""not implemented"""'], {}), "('not implemented')\n", (25478, 25497), True, 'from hlpr.exceptions import QError as Error\n'), ((26460, 26472), 'hlpr.exceptions.QError', 'Error', (['error'], {}), '(error)\n', (26465, 26472), True, 'from hlpr.exceptions import QError as Error\n'), ((28304, 28343), 'hlpr.exceptions.QError', 'Error', (["('unexpected extension: %s' % ext)"], {}), "('unexpected extension: %s' % ext)\n", (28309, 28343), True, 'from hlpr.exceptions import QError as Error\n'), ((28930, 28959), 'hlpr.exceptions.QError', 'Error', (['"""passed invalid layer"""'], {}), "('passed invalid layer')\n", (28935, 28959), True, 'from hlpr.exceptions import QError as Error\n'), ((44563, 44618), 'hlpr.basic.linr', 'basic.linr', (['jlay_fieldn_l', 'mfnl'], {'result_type': '"""matching"""'}), "(jlay_fieldn_l, mfnl, result_type='matching')\n", (44573, 44618), True, 'import hlpr.basic as basic\n'), ((45887, 45901), 'numpy.any', 'np.any', (['booldf'], {}), '(booldf)\n', (45893, 45901), True, 'import numpy as np\n'), ((52846, 52870), 'hlpr.exceptions.QError', 'Error', (['"""expected a list"""'], {}), "('expected a list')\n", (52851, 52870), True, 'from hlpr.exceptions import QError as Error\n'), ((61722, 61753), 'os.path.exists', 'os.path.exists', (["res_d['OUTPUT']"], {}), "(res_d['OUTPUT'])\n", (61736, 61753), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((61814, 61845), 'hlpr.exceptions.QError', 'Error', (['"""failed to get a result"""'], {}), "('failed to get a result')\n", (61819, 61845), True, 'from hlpr.exceptions import QError as Error\n'), ((63295, 63340), 'os.path.join', 'os.path.join', (['self.out_dir', "(layname + '.sdat')"], {}), "(self.out_dir, layname + '.sdat')\n", (63307, 63340), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((64996, 65027), 'os.path.exists', 'os.path.exists', (["res_d['OUTPUT']"], {}), "(res_d['OUTPUT'])\n", (65010, 65027), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((65088, 65119), 'hlpr.exceptions.QError', 'Error', (['"""failed to get a result"""'], {}), "('failed to get a result')\n", (65093, 65119), True, 'from hlpr.exceptions import QError as Error\n'), ((66573, 66618), 'os.path.join', 'os.path.join', (['self.out_dir', "(layname + '.sdat')"], {}), "(self.out_dir, layname + '.sdat')\n", (66585, 66618), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((67835, 67866), 'os.path.exists', 'os.path.exists', (["res_d['RESULT']"], {}), "(res_d['RESULT'])\n", (67849, 67866), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((67886, 67917), 'hlpr.exceptions.QError', 'Error', (['"""failed to get a result"""'], {}), "('failed to get a result')\n", (67891, 67917), True, 'from hlpr.exceptions import QError as Error\n'), ((73507, 73538), 'os.path.exists', 'os.path.exists', (["res_d['RESULT']"], {}), "(res_d['RESULT'])\n", (73521, 73538), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((73558, 73589), 'hlpr.exceptions.QError', 'Error', (['"""failed to get a result"""'], {}), "('failed to get a result')\n", (73563, 73589), True, 'from hlpr.exceptions import QError as Error\n'), ((76502, 76520), 'numpy.isnan', 'np.isnan', (['distance'], {}), '(distance)\n', (76510, 76520), True, 'import numpy as np\n'), ((76540, 76563), 'hlpr.exceptions.QError', 'Error', (['"""got no buffer!"""'], {}), "('got no buffer!')\n", (76545, 76563), True, 'from hlpr.exceptions import QError as Error\n'), ((90142, 90173), 'os.path.exists', 'os.path.exists', (["res_d['OUTPUT']"], {}), "(res_d['OUTPUT'])\n", (90156, 90173), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((90234, 90265), 'hlpr.exceptions.QError', 'Error', (['"""failed to get a result"""'], {}), "('failed to get a result')\n", (90239, 90265), True, 'from hlpr.exceptions import QError as Error\n'), ((95755, 95826), 'hlpr.exceptions.QError', 'Error', (['("""expected a list for fields, instead got \n %s""" % fieldn_l)'], {}), '("""expected a list for fields, instead got \n %s""" % fieldn_l)\n', (95760, 95826), True, 'from hlpr.exceptions import QError as Error\n'), ((103305, 103337), 'hlpr.exceptions.QError', 'Error', (['"""QGIS failed to initiate"""'], {}), "('QGIS failed to initiate')\n", (103310, 103337), True, 'from hlpr.exceptions import QError as Error\n'), ((113503, 113520), 'os.remove', 'os.remove', (['out_fp'], {}), '(out_fp)\n', (113512, 113520), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((113625, 113635), 'hlpr.exceptions.QError', 'Error', (['msg'], {}), '(msg)\n', (113630, 113635), True, 'from hlpr.exceptions import QError as Error\n'), ((116409, 116454), 'hlpr.exceptions.QError', 'Error', (['"""cant allow none and expect all reals"""'], {}), "('cant allow none and expect all reals')\n", (116414, 116454), True, 'from hlpr.exceptions import QError as Error\n'), ((116729, 116792), 'hlpr.exceptions.QError', 'Error', (['("requested reindexer \'%s\' is not a field name" % reindex)'], {}), '("requested reindexer \'%s\' is not a field name" % reindex)\n', (116734, 116792), True, 'from hlpr.exceptions import QError as Error\n'), ((118836, 118879), 'hlpr.exceptions.QError', 'Error', (['"""no filter and data length mismatch"""'], {}), "('no filter and data length mismatch')\n", (118841, 118879), True, 'from hlpr.exceptions import QError as Error\n'), ((119195, 119218), 'hlpr.exceptions.QError', 'Error', (['"""no data found!"""'], {}), "('no data found!')\n", (119200, 119218), True, 'from hlpr.exceptions import QError as Error\n'), ((119864, 119930), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['fid_attvs'], {'orient': '"""index"""', 'columns': 'all_fnl'}), "(fid_attvs, orient='index', columns=all_fnl)\n", (119886, 119930), True, 'import pandas as pd\n'), ((120829, 120860), 'hlpr.exceptions.QError', 'Error', (['"""unrecognized fmt kwarg"""'], {}), "('unrecognized fmt kwarg')\n", (120834, 120860), True, 'from hlpr.exceptions import QError as Error\n'), ((122471, 122512), 'hlpr.exceptions.QError', 'Error', (['"""cant expect_all_reals AND dropna"""'], {}), "('cant expect_all_reals AND dropna')\n", (122476, 122512), True, 'from hlpr.exceptions import QError as Error\n'), ((122583, 122628), 'hlpr.exceptions.QError', 'Error', (['"""cant allow none and expect all reals"""'], {}), "('cant allow none and expect all reals')\n", (122588, 122628), True, 'from hlpr.exceptions import QError as Error\n'), ((123013, 123044), 'hlpr.exceptions.QError', 'Error', (['"""no field name provided"""'], {}), "('no field name provided')\n", (123018, 123044), True, 'from hlpr.exceptions import QError as Error\n'), ((126131, 126142), 'numpy.isnan', 'np.isnan', (['v'], {}), '(v)\n', (126139, 126142), True, 'import numpy as np\n'), ((126558, 126598), 'hlpr.exceptions.QError', 'Error', (['"""allow_none=FALSE and no results"""'], {}), "('allow_none=FALSE and no results')\n", (126563, 126598), True, 'from hlpr.exceptions import QError as Error\n'), ((129737, 129747), 'hlpr.exceptions.QError', 'Error', (['msg'], {}), '(msg)\n', (129742, 129747), True, 'from hlpr.exceptions import QError as Error\n'), ((131992, 132023), 'hlpr.exceptions.QError', 'Error', (['"""got non unique columns"""'], {}), "('got non unique columns')\n", (131997, 132023), True, 'from hlpr.exceptions import QError as Error\n'), ((133969, 134010), 'hlpr.exceptions.QError', 'Error', (['"""constructed layer has NoGeometry"""'], {}), "('constructed layer has NoGeometry')\n", (133974, 134010), True, 'from hlpr.exceptions import QError as Error\n'), ((135699, 135748), 'hlpr.exceptions.QError', 'Error', (['"""passed geo_d and data indexes dont match"""'], {}), "('passed geo_d and data indexes dont match')\n", (135704, 135748), True, 'from hlpr.exceptions import QError as Error\n'), ((136337, 136368), 'hlpr.exceptions.QError', 'Error', (['"""expected integer index"""'], {}), "('expected integer index')\n", (136342, 136368), True, 'from hlpr.exceptions import QError as Error\n'), ((139700, 139746), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['qvlayd'], {'orient': '"""index"""'}), "(qvlayd, orient='index')\n", (139722, 139746), True, 'import pandas as pd\n'), ((142812, 142828), 'pandas.isnull', 'pd.isnull', (['value'], {}), '(value)\n', (142821, 142828), True, 'import pandas as pd\n'), ((143742, 143773), 'numpy.array_equal', 'np.array_equal', (['nfid_ar', 'fid_ar'], {}), '(nfid_ar, fid_ar)\n', (143756, 143773), True, 'import numpy as np\n'), ((145411, 145427), 'pandas.isnull', 'pd.isnull', (['value'], {}), '(value)\n', (145420, 145427), True, 'import pandas as pd\n'), ((148300, 148328), 'hlpr.exceptions.QError', 'Error', (['"""field name too long"""'], {}), "('field name too long')\n", (148305, 148328), True, 'from hlpr.exceptions import QError as Error\n'), ((151481, 151516), 'hlpr.exceptions.QError', 'Error', (['"""unrecognized id1_objs type"""'], {}), "('unrecognized id1_objs type')\n", (151486, 151516), True, 'from hlpr.exceptions import QError as Error\n'), ((153854, 153884), 'hlpr.exceptions.QError', 'Error', (['"""unrecognized id1_type"""'], {}), "('unrecognized id1_type')\n", (153859, 153884), True, 'from hlpr.exceptions import QError as Error\n'), ((154560, 154595), 'hlpr.exceptions.QError', 'Error', (['"""unrecognized id1_objs type"""'], {}), "('unrecognized id1_objs type')\n", (154565, 154595), True, 'from hlpr.exceptions import QError as Error\n'), ((156488, 156534), 'hlpr.exceptions.QError', 'Error', (['("""failed oto convert w/ \n %s""" % e)'], {}), '("""failed oto convert w/ \n %s""" % e)\n', (156493, 156534), True, 'from hlpr.exceptions import QError as Error\n'), ((8870, 8902), 'hlpr.exceptions.QError', 'Error', (['"""QGIS failed to initiate"""'], {}), "('QGIS failed to initiate')\n", (8875, 8902), True, 'from hlpr.exceptions import QError as Error\n'), ((22768, 22778), 'hlpr.exceptions.QError', 'Error', (['msg'], {}), '(msg)\n', (22773, 22778), True, 'from hlpr.exceptions import QError as Error\n'), ((28577, 28594), 'os.remove', 'os.remove', (['out_fp'], {}), '(out_fp)\n', (28586, 28594), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((28707, 28717), 'hlpr.exceptions.QError', 'Error', (['msg'], {}), '(msg)\n', (28712, 28717), True, 'from hlpr.exceptions import QError as Error\n'), ((34513, 34529), 'pandas.isnull', 'pd.isnull', (['value'], {}), '(value)\n', (34522, 34529), True, 'import pandas as pd\n'), ((36710, 36751), 'hlpr.exceptions.QError', 'Error', (['"""constructed layer has NoGeometry"""'], {}), "('constructed layer has NoGeometry')\n", (36715, 36751), True, 'from hlpr.exceptions import QError as Error\n'), ((45464, 45488), 'hlpr.exceptions.QError', 'Error', (['"""not implmeneted"""'], {}), "('not implmeneted')\n", (45469, 45488), True, 'from hlpr.exceptions import QError as Error\n'), ((48631, 48653), 'hlpr.exceptions.QError', 'Error', (['"""got no joins!"""'], {}), "('got no joins!')\n", (48636, 48653), True, 'from hlpr.exceptions import QError as Error\n'), ((63528, 63542), 'os.remove', 'os.remove', (['ofp'], {}), '(ofp)\n', (63537, 63542), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((63583, 63593), 'hlpr.exceptions.QError', 'Error', (['msg'], {}), '(msg)\n', (63588, 63593), True, 'from hlpr.exceptions import QError as Error\n'), ((64048, 64058), 'hlpr.exceptions.QError', 'Error', (['msg'], {}), '(msg)\n', (64053, 64058), True, 'from hlpr.exceptions import QError as Error\n'), ((64102, 64122), 'os.path.dirname', 'os.path.dirname', (['ofp'], {}), '(ofp)\n', (64117, 64122), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((64149, 64169), 'os.path.dirname', 'os.path.dirname', (['ofp'], {}), '(ofp)\n', (64164, 64169), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((66660, 66680), 'os.path.dirname', 'os.path.dirname', (['ofp'], {}), '(ofp)\n', (66675, 66680), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((66773, 66793), 'os.path.dirname', 'os.path.dirname', (['ofp'], {}), '(ofp)\n', (66788, 66793), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((66974, 66988), 'os.remove', 'os.remove', (['ofp'], {}), '(ofp)\n', (66983, 66988), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((67029, 67039), 'hlpr.exceptions.QError', 'Error', (['msg'], {}), '(msg)\n', (67034, 67039), True, 'from hlpr.exceptions import QError as Error\n'), ((80247, 80257), 'hlpr.exceptions.QError', 'Error', (['msg'], {}), '(msg)\n', (80252, 80257), True, 'from hlpr.exceptions import QError as Error\n'), ((81860, 81870), 'hlpr.exceptions.QError', 'Error', (['msg'], {}), '(msg)\n', (81865, 81870), True, 'from hlpr.exceptions import QError as Error\n'), ((95113, 95158), 'hlpr.exceptions.QError', 'Error', (['("unrecognized fieldn_l\'%s\'" % fieldn_l)'], {}), '("unrecognized fieldn_l\'%s\'" % fieldn_l)\n', (95118, 95158), True, 'from hlpr.exceptions import QError as Error\n'), ((98522, 98547), 'hlpr.exceptions.QError', 'Error', (['"""nothing selected"""'], {}), "('nothing selected')\n", (98527, 98547), True, 'from hlpr.exceptions import QError as Error\n'), ((110991, 111008), 'os.path.split', 'os.path.split', (['fp'], {}), '(fp)\n', (111004, 111008), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((127276, 127301), 'pandas.Series', 'pd.Series', (['d'], {'name': 'fieldn'}), '(d, name=fieldn)\n', (127285, 127301), True, 'import pandas as pd\n'), ((135491, 135515), 'hlpr.exceptions.QError', 'Error', (['"""unexpected type"""'], {}), "('unexpected type')\n", (135496, 135515), True, 'from hlpr.exceptions import QError as Error\n'), ((138059, 138099), 'hlpr.exceptions.QError', 'Error', (['"""failed to create all the fields"""'], {}), "('failed to create all the fields')\n", (138064, 138099), True, 'from hlpr.exceptions import QError as Error\n'), ((139843, 139906), 'hlpr.exceptions.QError', 'Error', (["('got some nulls on the geometry fields: %s' % geo_fn_tup)"], {}), "('got some nulls on the geometry fields: %s' % geo_fn_tup)\n", (139848, 139906), True, 'from hlpr.exceptions import QError as Error\n'), ((139977, 139994), 'copy.copy', 'copy.copy', (['qvlayd'], {}), '(qvlayd)\n', (139986, 139994), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((140651, 140692), 'hlpr.exceptions.QError', 'Error', (["('got invalid geometry on %i' % fid)"], {}), "('got invalid geometry on %i' % fid)\n", (140656, 140692), True, 'from hlpr.exceptions import QError as Error\n'), ((143263, 143294), 'hlpr.exceptions.QError', 'Error', (['"""failed to setAttribute"""'], {}), "('failed to setAttribute')\n", (143268, 143294), True, 'from hlpr.exceptions import QError as Error\n'), ((143889, 143910), 'hlpr.exceptions.QError', 'Error', (['"""fid mismatch"""'], {}), "('fid mismatch')\n", (143894, 143910), True, 'from hlpr.exceptions import QError as Error\n'), ((145550, 145572), 'inspect.isclass', 'inspect.isclass', (['value'], {}), '(value)\n', (145565, 145572), False, 'import inspect\n'), ((146312, 146336), 'hlpr.exceptions.QError', 'Error', (['"""length mismatch"""'], {}), "('length mismatch')\n", (146317, 146336), True, 'from hlpr.exceptions import QError as Error\n'), ((152063, 152088), 'hlpr.exceptions.QError', 'Error', (['""" not implemented"""'], {}), "(' not implemented')\n", (152068, 152088), True, 'from hlpr.exceptions import QError as Error\n'), ((152646, 152676), 'hlpr.exceptions.QError', 'Error', (['"""unrecognized id1_type"""'], {}), "('unrecognized id1_type')\n", (152651, 152676), True, 'from hlpr.exceptions import QError as Error\n'), ((153309, 153359), 'hlpr.exceptions.QError', 'Error', (['("got non unique \'%s\' on fid_fval_d" % dname)'], {}), '("got non unique \'%s\' on fid_fval_d" % dname)\n', (153314, 153359), True, 'from hlpr.exceptions import QError as Error\n'), ((15190, 15207), 'os.path.split', 'os.path.split', (['fp'], {}), '(fp)\n', (15203, 15207), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((18410, 18427), 'os.path.split', 'os.path.split', (['fp'], {}), '(fp)\n', (18423, 18427), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((35016, 35047), 'hlpr.exceptions.QError', 'Error', (['"""failed to setAttribute"""'], {}), "('failed to setAttribute')\n", (35021, 35047), True, 'from hlpr.exceptions import QError as Error\n'), ((48260, 48296), 'hlpr.exceptions.QError', 'Error', (['"""in and out fcnts dont match"""'], {}), "('in and out fcnts dont match')\n", (48265, 48296), True, 'from hlpr.exceptions import QError as Error\n'), ((48763, 48798), 'hlpr.exceptions.QError', 'Error', (['"""no joins but got some hits"""'], {}), "('no joins but got some hits')\n", (48768, 48798), True, 'from hlpr.exceptions import QError as Error\n'), ((66727, 66747), 'os.path.dirname', 'os.path.dirname', (['ofp'], {}), '(ofp)\n', (66742, 66747), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((93665, 93689), 'pandas.Series', 'pd.Series', (['d'], {'name': 'ncoln'}), '(d, name=ncoln)\n', (93674, 93689), True, 'import pandas as pd\n'), ((119348, 119362), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (119360, 119362), True, 'import pandas as pd\n'), ((119403, 119431), 'hlpr.exceptions.QError', 'Error', (['"""unexpected fmt type"""'], {}), "('unexpected fmt type')\n", (119408, 119431), True, 'from hlpr.exceptions import QError as Error\n'), ((127376, 127403), 'hlpr.exceptions.QError', 'Error', (['"""expected singleton"""'], {}), "('expected singleton')\n", (127381, 127403), True, 'from hlpr.exceptions import QError as Error\n'), ((127486, 127511), 'pandas.Series', 'pd.Series', (['d'], {'name': 'fieldn'}), '(d, name=fieldn)\n', (127495, 127511), True, 'import pandas as pd\n'), ((14408, 14428), 'inspect.isbuiltin', 'inspect.isbuiltin', (['v'], {}), '(v)\n', (14425, 14428), False, 'import inspect\n'), ((99505, 99542), 'hlpr.exceptions.QError', 'Error', (['"""unexpected result_type kwarg"""'], {}), "('unexpected result_type kwarg')\n", (99510, 99542), True, 'from hlpr.exceptions import QError as Error\n'), ((120407, 120454), 'pandas.Series', 'pd.Series', (['df.index'], {'index': 'df.index', 'name': '"""fid"""'}), "(df.index, index=df.index, name='fid')\n", (120416, 120454), True, 'import pandas as pd\n'), ((63142, 63160), 'os.path.split', 'os.path.split', (['ofp'], {}), '(ofp)\n', (63155, 63160), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((66418, 66436), 'os.path.split', 'os.path.split', (['ofp'], {}), '(ofp)\n', (66431, 66436), False, 'import os, configparser, logging, inspect, copy, datetime, re\n'), ((153262, 153273), 'numpy.array', 'np.array', (['l'], {}), '(l)\n', (153270, 153273), True, 'import numpy as np\n')]
|
import pytest
from tests.unit import a_pkg_import
@pytest.fixture(scope="function")
def read_meta(a_pkg_import):
a_pkg = a_pkg_import()
return a_pkg.pkg.meta
def test_meta(read_meta):
expected = {
"name": "a-pkg",
"version": "1.2.3",
"author": "<NAME>",
"author-email": "<EMAIL>",
"summary": "A description",
}
for name, value in expected.items():
assert getattr(read_meta, name) == value
|
[
"pytest.fixture",
"tests.unit.a_pkg_import"
] |
[((54, 86), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (68, 86), False, 'import pytest\n'), ((128, 142), 'tests.unit.a_pkg_import', 'a_pkg_import', ([], {}), '()\n', (140, 142), False, 'from tests.unit import a_pkg_import\n')]
|
#!/usr/bin/env python3
# 6a-render-model3.py - investigate delauney triangulation for
# individual image surface mesh generation.
# for all the images in the fitted group, generate a 2d polygon
# surface fit. Then project the individual images onto this surface
# and generate an AC3D model.
#
# Note: insufficient image overlap (or long linear image match chains)
# are not good. Ideally we would have a nice mesh of match pairs for
# best results.
#
# this script can also project onto the SRTM surface, or a flat ground
# elevation plane.
import argparse
import cv2
import pickle
import math
import numpy as np
import os.path
import scipy.spatial
from props import getNode
from lib import groups
from lib import panda3d
from lib import project
from lib import srtm
from lib import transformations
mesh_steps = 8 # 1 = corners only
r2d = 180 / math.pi
tolerance = 0.5
parser = argparse.ArgumentParser(description='Set the initial camera poses.')
parser.add_argument('project', help='project directory')
parser.add_argument('--group', type=int, default=0, help='group index')
parser.add_argument('--texture-resolution', type=int, default=512, help='texture resolution (should be 2**n, so numbers like 256, 512, 1024, etc.')
parser.add_argument('--srtm', action='store_true', help='use srtm elevation')
parser.add_argument('--ground', type=float, help='force ground elevation in meters')
parser.add_argument('--direct', action='store_true', help='use direct pose')
args = parser.parse_args()
proj = project.ProjectMgr(args.project)
proj.load_images_info()
# lookup ned reference
ref_node = getNode("/config/ned_reference", True)
ref = [ ref_node.getFloat('lat_deg'),
ref_node.getFloat('lon_deg'),
ref_node.getFloat('alt_m') ]
# setup SRTM ground interpolator
srtm.initialize( ref, 6000, 6000, 30 )
width, height = proj.cam.get_image_params()
print("Loading optimized match points ...")
matches = pickle.load( open( os.path.join(proj.analysis_dir, "matches_grouped"), "rb" ) )
# load the group connections within the image set
group_list = groups.load(proj.analysis_dir)
# initialize temporary structures for vanity stats
for image in proj.image_list:
image.sum_values = 0.0
image.sum_count = 0.0
image.max_z = -9999.0
image.min_z = 9999.0
image.pool_xy = []
image.pool_z = []
image.pool_uv = []
image.fit_xy = []
image.fit_z = []
image.fit_uv = []
image.fit_edge = []
# sort through points to build a global list of feature coordinates
# and a per-image list of feature coordinates
print('Reading feature locations from optimized match points ...')
raw_points = []
raw_values = []
for match in matches:
if match[1] == args.group and len(match[2:]) > 2: # used by current group
ned = match[0]
raw_points.append( [ned[1], ned[0]] )
raw_values.append( ned[2] )
for m in match[2:]:
if proj.image_list[m[0]].name in group_list[args.group]:
image = proj.image_list[ m[0] ]
image.pool_xy.append( [ned[1], ned[0]] )
image.pool_z.append( -ned[2] )
image.pool_uv.append( m[1] )
z = -ned[2]
image.sum_values += z
image.sum_count += 1
if z < image.min_z:
image.min_z = z
#print(min_z, match)
if z > image.max_z:
image.max_z = z
#print(max_z, match)
K = proj.cam.get_K(optimized=True)
dist_coeffs = np.array(proj.cam.get_dist_coeffs(optimized=True))
def undistort(uv_orig):
# convert the point into the proper format for opencv
uv_raw = np.zeros((1,1,2), dtype=np.float32)
uv_raw[0][0] = (uv_orig[0], uv_orig[1])
# do the actual undistort
uv_new = cv2.undistortPoints(uv_raw, K, dist_coeffs, P=K)
# print(uv_orig, type(uv_new), uv_new)
return uv_new[0][0]
# cull points from the per-image pool that project outside the grid boundaries
for image in proj.image_list:
size = len(image.pool_uv)
for i in reversed(range(len(image.pool_uv))): # iterate in reverse order
uv_new = undistort(image.pool_uv[i])
if uv_new[0] < 0 or uv_new[0] >= width or uv_new[1] < 0 or uv_new[1] >= height:
print("out of range")
print('Generating Delaunay mesh and interpolator ...')
print(len(raw_points))
global_tri_list = scipy.spatial.Delaunay(np.array(raw_points))
interp = scipy.interpolate.LinearNDInterpolator(global_tri_list, raw_values)
def intersect2d(ned, v, avg_ground):
p = ned[:] # copy
# sanity check (always assume camera pose is above ground!)
if v[2] <= 0.0:
return p
eps = 0.01
count = 0
#print("start:", p)
#print("vec:", v)
#print("ned:", ned)
tmp = interp([p[1], p[0]])[0]
if not np.isnan(tmp):
surface = tmp
else:
print("Notice: starting vector intersect with avg ground elev:", avg_ground)
surface = avg_ground
error = abs(p[2] - surface)
#print("p=%s surface=%s error=%s" % (p, surface, error))
while error > eps and count < 25:
d_proj = -(ned[2] - surface)
factor = d_proj / v[2]
n_proj = v[0] * factor
e_proj = v[1] * factor
#print(" proj = %s %s" % (n_proj, e_proj))
p = [ ned[0] + n_proj, ned[1] + e_proj, ned[2] + d_proj ]
#print(" new p:", p)
tmp = interp([p[1], p[0]])[0]
if not np.isnan(tmp):
surface = tmp
error = abs(p[2] - surface)
#print(" p=%s surface=%.2f error = %.3f" % (p, surface, error))
count += 1
#print("surface:", surface)
#if np.isnan(surface):
# #print(" returning nans")
# return [np.nan, np.nan, np.nan]
dy = ned[0] - p[0]
dx = ned[1] - p[1]
dz = ned[2] - p[2]
dist = math.sqrt(dx*dx+dy*dy)
angle = math.atan2(-dz, dist) * r2d # relative to horizon
if angle < 30:
print(" returning high angle nans:", angle)
return [np.nan, np.nan, np.nan]
else:
return p
def intersect_vectors(ned, v_list, avg_ground):
pt_list = []
for v in v_list:
p = intersect2d(ned, v.flatten(), avg_ground)
pt_list.append(p)
return pt_list
for image in proj.image_list:
if image.sum_count > 0:
image.z_avg = image.sum_values / float(image.sum_count)
# print(image.name, 'avg elev:', image.z_avg)
else:
image.z_avg = 0
# compute the uv grid for each image and project each point out into
# ned space, then intersect each vector with the srtm / ground /
# delauney surface.
#for group in group_list:
if True:
group = group_list[args.group]
#if len(group) < 3:
# continue
for name in group:
image = proj.findImageByName(name)
print(image.name, image.z_avg)
# scale the K matrix if we have scaled the images
K = proj.cam.get_K(optimized=True)
IK = np.linalg.inv(K)
grid_list = []
u_list = np.linspace(0, width, mesh_steps + 1)
v_list = np.linspace(0, height, mesh_steps + 1)
# horizontal edges
for u in u_list:
grid_list.append( [u, 0] )
grid_list.append( [u, height] )
# vertical edges (minus corners)
for v in v_list[1:-1]:
grid_list.append( [0, v] )
grid_list.append( [width, v] )
#print('grid_list:', grid_list)
distorted_uv = proj.redistort(grid_list, optimized=True)
distorted_uv = grid_list
if args.direct:
proj_list = project.projectVectors( IK, image.get_body2ned(),
image.get_cam2body(),
grid_list )
else:
#print(image.get_body2ned(opt=True))
proj_list = project.projectVectors( IK,
image.get_body2ned(opt=True),
image.get_cam2body(),
grid_list )
#print 'proj_list:', proj_list
if args.direct:
ned, ypr, quat = image.get_camera_pose()
else:
ned, ypr, quat = image.get_camera_pose(opt=True)
#print('cam orig:', image.camera_pose['ned'], 'optimized:', ned)
if args.ground:
pts_ned = project.intersectVectorsWithGroundPlane(ned,
args.ground,
proj_list)
elif args.srtm:
pts_ned = srtm.interpolate_vectors(ned, proj_list)
else:
# intersect with our polygon surface approximation
pts_ned = intersect_vectors(ned, proj_list, -image.z_avg)
#print(image.name, "pts_3d (ned):\n", pts_ned)
# convert ned to xyz and stash the result for each image
image.grid_list = []
for p in pts_ned:
image.fit_xy.append([p[1], p[0]])
image.fit_z.append(-p[2])
image.fit_edge.append(True)
image.fit_uv = distorted_uv
print('len:', len(image.fit_xy), len(image.fit_z), len(image.fit_uv))
# Triangle fit algorithm
group = group_list[args.group]
#if len(group) < 3:
# continue
for name in group:
image = proj.findImageByName(name)
print(image.name, image.z_avg)
done = False
dist_uv = []
while not done:
tri_list = scipy.spatial.Delaunay(np.array(image.fit_xy))
interp = scipy.interpolate.LinearNDInterpolator(tri_list, image.fit_z)
# find the point in the pool furthest from the triangulated surface
next_index = None
max_error = 0.0
for i, pt in enumerate(image.pool_xy):
z = interp(image.pool_xy[i])[0]
if not np.isnan(z):
error = abs(z - image.pool_z[i])
if error > max_error:
max_error = error
next_index = i
if max_error > tolerance:
print("adding index:", next_index, "error:", max_error)
image.fit_xy.append(image.pool_xy[next_index])
image.fit_z.append(image.pool_z[next_index])
image.fit_uv.append(image.pool_uv[next_index])
image.fit_edge.append(False)
del image.pool_xy[next_index]
del image.pool_z[next_index]
del image.pool_uv[next_index]
else:
print("finished")
done = True
image.fit_uv.extend(proj.undistort_uvlist(image, dist_uv))
print(name, 'len:', len(image.fit_xy), len(image.fit_z), len(image.fit_uv))
# generate the panda3d egg models
dir_node = getNode('/config/directories', True)
img_src_dir = dir_node.getString('images_source')
panda3d.generate_from_fit(proj, group_list[args.group], src_dir=img_src_dir,
analysis_dir=proj.analysis_dir,
resolution=args.texture_resolution)
|
[
"lib.project.intersectVectorsWithGroundPlane",
"cv2.undistortPoints",
"argparse.ArgumentParser",
"math.sqrt",
"math.atan2",
"lib.srtm.interpolate_vectors",
"lib.panda3d.generate_from_fit",
"lib.groups.load",
"numpy.zeros",
"numpy.isnan",
"lib.project.ProjectMgr",
"numpy.array",
"numpy.linalg.inv",
"numpy.linspace",
"props.getNode",
"lib.srtm.initialize"
] |
[((905, 973), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Set the initial camera poses."""'}), "(description='Set the initial camera poses.')\n", (928, 973), False, 'import argparse\n'), ((1527, 1559), 'lib.project.ProjectMgr', 'project.ProjectMgr', (['args.project'], {}), '(args.project)\n', (1545, 1559), False, 'from lib import project\n'), ((1619, 1657), 'props.getNode', 'getNode', (['"""/config/ned_reference"""', '(True)'], {}), "('/config/ned_reference', True)\n", (1626, 1657), False, 'from props import getNode\n'), ((1807, 1843), 'lib.srtm.initialize', 'srtm.initialize', (['ref', '(6000)', '(6000)', '(30)'], {}), '(ref, 6000, 6000, 30)\n', (1822, 1843), False, 'from lib import srtm\n'), ((2090, 2120), 'lib.groups.load', 'groups.load', (['proj.analysis_dir'], {}), '(proj.analysis_dir)\n', (2101, 2120), False, 'from lib import groups\n'), ((10779, 10815), 'props.getNode', 'getNode', (['"""/config/directories"""', '(True)'], {}), "('/config/directories', True)\n", (10786, 10815), False, 'from props import getNode\n'), ((10866, 11014), 'lib.panda3d.generate_from_fit', 'panda3d.generate_from_fit', (['proj', 'group_list[args.group]'], {'src_dir': 'img_src_dir', 'analysis_dir': 'proj.analysis_dir', 'resolution': 'args.texture_resolution'}), '(proj, group_list[args.group], src_dir=img_src_dir,\n analysis_dir=proj.analysis_dir, resolution=args.texture_resolution)\n', (10891, 11014), False, 'from lib import panda3d\n'), ((3703, 3740), 'numpy.zeros', 'np.zeros', (['(1, 1, 2)'], {'dtype': 'np.float32'}), '((1, 1, 2), dtype=np.float32)\n', (3711, 3740), True, 'import numpy as np\n'), ((3826, 3874), 'cv2.undistortPoints', 'cv2.undistortPoints', (['uv_raw', 'K', 'dist_coeffs'], {'P': 'K'}), '(uv_raw, K, dist_coeffs, P=K)\n', (3845, 3874), False, 'import cv2\n'), ((4450, 4470), 'numpy.array', 'np.array', (['raw_points'], {}), '(raw_points)\n', (4458, 4470), True, 'import numpy as np\n'), ((5861, 5889), 'math.sqrt', 'math.sqrt', (['(dx * dx + dy * dy)'], {}), '(dx * dx + dy * dy)\n', (5870, 5889), False, 'import math\n'), ((4856, 4869), 'numpy.isnan', 'np.isnan', (['tmp'], {}), '(tmp)\n', (4864, 4869), True, 'import numpy as np\n'), ((5896, 5917), 'math.atan2', 'math.atan2', (['(-dz)', 'dist'], {}), '(-dz, dist)\n', (5906, 5917), False, 'import math\n'), ((6976, 6992), 'numpy.linalg.inv', 'np.linalg.inv', (['K'], {}), '(K)\n', (6989, 6992), True, 'import numpy as np\n'), ((7034, 7071), 'numpy.linspace', 'np.linspace', (['(0)', 'width', '(mesh_steps + 1)'], {}), '(0, width, mesh_steps + 1)\n', (7045, 7071), True, 'import numpy as np\n'), ((7089, 7127), 'numpy.linspace', 'np.linspace', (['(0)', 'height', '(mesh_steps + 1)'], {}), '(0, height, mesh_steps + 1)\n', (7100, 7127), True, 'import numpy as np\n'), ((5477, 5490), 'numpy.isnan', 'np.isnan', (['tmp'], {}), '(tmp)\n', (5485, 5490), True, 'import numpy as np\n'), ((8427, 8495), 'lib.project.intersectVectorsWithGroundPlane', 'project.intersectVectorsWithGroundPlane', (['ned', 'args.ground', 'proj_list'], {}), '(ned, args.ground, proj_list)\n', (8466, 8495), False, 'from lib import project\n'), ((9561, 9583), 'numpy.array', 'np.array', (['image.fit_xy'], {}), '(image.fit_xy)\n', (9569, 9583), True, 'import numpy as np\n'), ((8666, 8706), 'lib.srtm.interpolate_vectors', 'srtm.interpolate_vectors', (['ned', 'proj_list'], {}), '(ned, proj_list)\n', (8690, 8706), False, 'from lib import srtm\n'), ((9900, 9911), 'numpy.isnan', 'np.isnan', (['z'], {}), '(z)\n', (9908, 9911), True, 'import numpy as np\n')]
|
# coding:utf-8:
from lisp.sexpressions import (
SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol,
SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList,
SexprBool, bool_value, num_value, string_value, is_keyword,
intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr,
is_null, build_list, is_number,
)
def generic_fold(py2lisp, lisp2py, op, z, lst):
r = z
for x in lst:
r = op(r, lisp2py(x))
return py2lisp(r)
def num_fold(op, z, lst):
return generic_fold(SexprNumber, num_value, op, z, lst)
def bool_fold(op, z, lst):
return generic_fold(SexprBool, bool_value, op, z, lst)
def string_fold(op, z, lst):
return generic_fold(SexprString, string_value, op, z, lst)
def procedure_definition(environment, x):
if isinstance(x, SexprProcedure):
return SexprCons(intern_symbol('fun'),
SexprCons(x.parameters(),
x.body()))
elif isinstance(x, SexprSymbol):
p = env_lookup(environment, x)
if p is None or not isinstance(p, SexprProcedure):
raise Exception(u'El símbolo no está ligado a un procedimiento.')
return SexprCons(intern_symbol('def'),
SexprCons(SexprCons(x, p.parameters()),
p.body()))
else:
raise Exception(u'Se esperaba un símbolo o booleano')
def global_environment():
env = SexprCons({}, SexprNil())
def db(name, function):
env_define(env, intern_symbol(name), SexprBuiltin(name, function))
db('+', lambda env, *args: num_fold(lambda x, y: x + y, 0, args))
db('-', lambda env, a, b: SexprNumber(num_value(a) - num_value(b)))
db('*', lambda env, *args: num_fold(lambda x, y: x * y, 1, args))
db('/', lambda env, a, b: SexprNumber(num_value(a) / num_value(b)))
db('%', lambda env, a, b: SexprNumber(num_value(a) % num_value(b)))
db('and', lambda env, *args: bool_fold(lambda x, y: x and y, True, args))
db('&&', lambda env, *args: bool_fold(lambda x, y: x and y, True, args))
db('not', lambda env, x: SexprBool(not bool_value(x)))
db('=', lambda env, x, y: SexprBool(is_number(x) and is_number(y) and num_value(x) == num_value(y)))
db('or', lambda env, *args: bool_fold(lambda x, y: x or y, False, args))
db('||', lambda env, *args: bool_fold(lambda x, y: x or y, False, args))
db('>', lambda env, x, y: SexprBool(num_value(x) > num_value(y)))
db('>=', lambda env, x, y: SexprBool(num_value(x) >= num_value(y)))
db('<', lambda env, x, y: SexprBool(num_value(x) < num_value(y)))
db('<=', lambda env, x, y: SexprBool(num_value(x) <= num_value(y)))
db('str+', lambda env, *args: string_fold(lambda x, y: x + y, "", args))
db('cons', lambda env, x, y: SexprCons(x, y))
db('car', lambda env, x: car(x))
db('cdr', lambda env, x: cdr(x))
db('consp', lambda env, x: SexprBool(consp(x)))
db('set-car', lambda env, x, y: set_car(x, y))
db('set-cdr', lambda env, x, y: set_cdr(x, y))
db('symbolp', lambda env, x: SexprBool(symbolp(x)))
db('null', lambda env, x: SexprBool(is_null(x)))
db('eq', lambda env, x, y: SexprBool(x == y))
db('list', lambda env, *args: SexprList(args))
db('apply', lambda env, f, *args: fun_apply(f, args, env))
db('procedure-definition', procedure_definition)
db('string->int', lambda env, x: SexprNumber(int(string_value(x))))
return env
def rib_keys(rib):
if isinstance(rib, dict):
return rib.keys()
else:
d = {}
while consp(rib):
association = car(rib)
d[car(association)] = 1
rib = cdr(rib)
return d.keys()
def rib_lookup(rib, symbol):
if isinstance(rib, dict):
return rib.get(symbol, None)
else:
while consp(rib):
association = car(rib)
if car(association) == symbol:
return cdr(association)
rib = cdr(rib)
return None
def rib_define(rib, symbol, value):
if isinstance(rib, dict):
rib[symbol] = value
return rib
else:
return SexprCons(SexprCons(symbol, value), rib)
def rib_set(rib, symbol, value):
if isinstance(rib, dict):
rib[symbol] = value
else:
while consp(rib):
association = car(rib)
if car(association) == symbol:
set_cdr(association, value)
break
def env_keys(environment):
d = {}
while consp(environment):
rib = car(environment)
for k in rib_keys(rib):
d[k] = 1
environment = cdr(environment)
return d.keys()
def env_lookup(environment, symbol):
if is_keyword(symbol):
return symbol
while consp(environment):
rib = car(environment)
result = rib_lookup(rib, symbol)
if result is not None:
return result
environment = cdr(environment)
raise Exception('Variable no definida: ' + symbol_name(symbol))
def env_define(environment, symbol, value):
if is_keyword(symbol):
raise Exception('No se puede definir una keyword.')
if isinstance(value, SexprProcedure):
value.set_name(symbol_name(symbol))
rib = car(environment)
result = rib_lookup(rib, symbol)
if result is not None:
#raise Exception('Variable ya definida: ' + symbol_name(symbol))
rib_set(rib, symbol, value)
else:
set_car(environment, rib_define(car(environment), symbol, value))
def env_set(environment, symbol, value):
if is_keyword(symbol):
raise Exception('No se puede definir una keyword.')
while consp(environment):
rib = car(environment)
if rib_lookup(rib, symbol) is not None:
rib_set(rib, symbol, value)
return
environment = cdr(environment)
raise Exception('Variable no definida: ' + symbol_name(symbol))
def env_bind(environment, parameters, arguments):
environment = SexprCons(SexprNil(), environment)
while consp(parameters):
if not consp(arguments):
raise Exception(u'Faltan parámetros')
env_define(environment, car(parameters), car(arguments))
parameters = cdr(parameters)
arguments = cdr(arguments)
if parameters == SexprNil():
if arguments != SexprNil():
raise Exception(u'Sobran parámetros')
elif isinstance(parameters, SexprSymbol):
env_define(environment, parameters, arguments)
else:
raise Exception(u'Lista de parámetros deforme')
return environment
def first(expr):
return car(expr)
def second(expr):
return car(cdr(expr))
def third(expr):
return car(cdr(cdr(expr)))
def fourth(expr):
return car(cdr(cdr(cdr(expr))))
def eval_expression(expr, environment):
if isinstance(expr, SexprNumber):
return expr
elif isinstance(expr, SexprString):
return expr
elif isinstance(expr, SexprSymbol):
return env_lookup(environment, expr)
elif isinstance(expr, SexprCons):
head = first(expr)
if head == intern_symbol('quote'):
return second(expr)
elif head == intern_symbol('do'):
return eval_block(cdr(expr), environment)
elif head == intern_symbol('def'):
if consp(second(expr)):
expr2 = SexprList([
intern_symbol('def'),
car(second(expr)),
SexprCons(
intern_symbol('fun'),
SexprCons(
cdr(second(expr)),
cdr(cdr(expr))
)
)
])
return eval_expression(expr2, environment)
else:
value = eval_expression(third(expr), environment)
env_define(environment, second(expr), value)
return value
elif head == intern_symbol('let'):
local_environment = SexprCons(SexprNil(), environment)
decls = second(expr)
body = cdr(cdr(expr))
while consp(decls):
variable = first(decls)
value = eval_expression(second(decls), environment)
env_define(local_environment, variable, value)
decls = cdr(cdr(decls))
return eval_block(cdr(cdr(expr)), local_environment)
elif head == intern_symbol('let*'):
local_environment = SexprCons(SexprNil(), environment)
decls = second(expr)
body = cdr(cdr(expr))
while consp(decls):
variable = first(decls)
value = eval_expression(second(decls), local_environment)
env_define(local_environment, variable, value)
decls = cdr(cdr(decls))
return eval_block(cdr(cdr(expr)), local_environment)
elif head == intern_symbol('set'):
value = eval_expression(third(expr), environment)
env_set(environment, second(expr), value)
return value
elif head == intern_symbol('if'):
rest = cdr(expr)
while consp(rest):
if consp(cdr(rest)):
cond = eval_expression(first(rest), environment)
if cond != SexprFalse():
return eval_expression(second(rest), environment)
rest = cdr(cdr(rest))
else:
return eval_expression(car(rest), environment)
return SexprNil()
elif head == intern_symbol('fun'):
return SexprProcedure(environment, second(expr), cdr(cdr(expr)))
else:
function = eval_expression(head, environment)
arguments = eval_list(cdr(expr), environment)
return eval_application(function, arguments, environment)
else:
raise Exception(u'Expresión no reconocida: ' + repr(expr))
def eval_block(block, environment):
res = SexprNil()
while consp(block):
res = eval_expression(car(block), environment)
block = cdr(block)
return res
def eval_list(expr, environment):
res = []
while consp(expr):
res.append(eval_expression(car(expr), environment))
expr = cdr(expr)
return SexprList(res)
def eval_application(function, arguments, environment):
if isinstance(function, SexprProcedure):
closure_environment = function.environment()
local_environment = env_bind(closure_environment,
function.parameters(),
arguments)
return eval_block(function.body(), local_environment)
elif isinstance(function, SexprBuiltin):
return function.call(environment, arguments)
else:
raise Exception(u'El valor no es aplicable.')
def fun_apply(function, arguments, environment):
if len(arguments) == 0:
return eval_application(function, SexprNil(), environment)
xs = []
for x in arguments[:-1]:
xs.append(x)
rest = arguments[-1]
return eval_application(function, build_list(xs, rest), environment)
|
[
"lisp.sexpressions.consp",
"lisp.sexpressions.SexprList",
"lisp.sexpressions.symbolp",
"lisp.sexpressions.symbol_name",
"lisp.sexpressions.is_null",
"lisp.sexpressions.num_value",
"lisp.sexpressions.SexprFalse",
"lisp.sexpressions.bool_value",
"lisp.sexpressions.SexprCons",
"lisp.sexpressions.is_number",
"lisp.sexpressions.cdr",
"lisp.sexpressions.build_list",
"lisp.sexpressions.car",
"lisp.sexpressions.set_car",
"lisp.sexpressions.SexprBool",
"lisp.sexpressions.string_value",
"lisp.sexpressions.intern_symbol",
"lisp.sexpressions.set_cdr",
"lisp.sexpressions.is_keyword",
"lisp.sexpressions.SexprNil",
"lisp.sexpressions.SexprBuiltin"
] |
[((4543, 4561), 'lisp.sexpressions.consp', 'consp', (['environment'], {}), '(environment)\n', (4548, 4561), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((4751, 4769), 'lisp.sexpressions.is_keyword', 'is_keyword', (['symbol'], {}), '(symbol)\n', (4761, 4769), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((4803, 4821), 'lisp.sexpressions.consp', 'consp', (['environment'], {}), '(environment)\n', (4808, 4821), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((5111, 5129), 'lisp.sexpressions.is_keyword', 'is_keyword', (['symbol'], {}), '(symbol)\n', (5121, 5129), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((5289, 5305), 'lisp.sexpressions.car', 'car', (['environment'], {}), '(environment)\n', (5292, 5305), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((5612, 5630), 'lisp.sexpressions.is_keyword', 'is_keyword', (['symbol'], {}), '(symbol)\n', (5622, 5630), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((5702, 5720), 'lisp.sexpressions.consp', 'consp', (['environment'], {}), '(environment)\n', (5707, 5720), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((6081, 6098), 'lisp.sexpressions.consp', 'consp', (['parameters'], {}), '(parameters)\n', (6086, 6098), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((6658, 6667), 'lisp.sexpressions.car', 'car', (['expr'], {}), '(expr)\n', (6661, 6667), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((10168, 10178), 'lisp.sexpressions.SexprNil', 'SexprNil', ([], {}), '()\n', (10176, 10178), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((10189, 10201), 'lisp.sexpressions.consp', 'consp', (['block'], {}), '(block)\n', (10194, 10201), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((10358, 10369), 'lisp.sexpressions.consp', 'consp', (['expr'], {}), '(expr)\n', (10363, 10369), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((10467, 10481), 'lisp.sexpressions.SexprList', 'SexprList', (['res'], {}), '(res)\n', (10476, 10481), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((1499, 1509), 'lisp.sexpressions.SexprNil', 'SexprNil', ([], {}), '()\n', (1507, 1509), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3610, 3620), 'lisp.sexpressions.consp', 'consp', (['rib'], {}), '(rib)\n', (3615, 3620), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3865, 3875), 'lisp.sexpressions.consp', 'consp', (['rib'], {}), '(rib)\n', (3870, 3875), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((4338, 4348), 'lisp.sexpressions.consp', 'consp', (['rib'], {}), '(rib)\n', (4343, 4348), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((4577, 4593), 'lisp.sexpressions.car', 'car', (['environment'], {}), '(environment)\n', (4580, 4593), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((4669, 4685), 'lisp.sexpressions.cdr', 'cdr', (['environment'], {}), '(environment)\n', (4672, 4685), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((4837, 4853), 'lisp.sexpressions.car', 'car', (['environment'], {}), '(environment)\n', (4840, 4853), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((4974, 4990), 'lisp.sexpressions.cdr', 'cdr', (['environment'], {}), '(environment)\n', (4977, 4990), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((5736, 5752), 'lisp.sexpressions.car', 'car', (['environment'], {}), '(environment)\n', (5739, 5752), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((5882, 5898), 'lisp.sexpressions.cdr', 'cdr', (['environment'], {}), '(environment)\n', (5885, 5898), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((6046, 6056), 'lisp.sexpressions.SexprNil', 'SexprNil', ([], {}), '()\n', (6054, 6056), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((6269, 6284), 'lisp.sexpressions.cdr', 'cdr', (['parameters'], {}), '(parameters)\n', (6272, 6284), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((6305, 6319), 'lisp.sexpressions.cdr', 'cdr', (['arguments'], {}), '(arguments)\n', (6308, 6319), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((6341, 6351), 'lisp.sexpressions.SexprNil', 'SexprNil', ([], {}), '()\n', (6349, 6351), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((6702, 6711), 'lisp.sexpressions.cdr', 'cdr', (['expr'], {}), '(expr)\n', (6705, 6711), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((10274, 10284), 'lisp.sexpressions.cdr', 'cdr', (['block'], {}), '(block)\n', (10277, 10284), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((10446, 10455), 'lisp.sexpressions.cdr', 'cdr', (['expr'], {}), '(expr)\n', (10449, 10455), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((11297, 11317), 'lisp.sexpressions.build_list', 'build_list', (['xs', 'rest'], {}), '(xs, rest)\n', (11307, 11317), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((886, 906), 'lisp.sexpressions.intern_symbol', 'intern_symbol', (['"""fun"""'], {}), "('fun')\n", (899, 906), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((1563, 1582), 'lisp.sexpressions.intern_symbol', 'intern_symbol', (['name'], {}), '(name)\n', (1576, 1582), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((1584, 1612), 'lisp.sexpressions.SexprBuiltin', 'SexprBuiltin', (['name', 'function'], {}), '(name, function)\n', (1596, 1612), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2837, 2852), 'lisp.sexpressions.SexprCons', 'SexprCons', (['x', 'y'], {}), '(x, y)\n', (2846, 2852), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2883, 2889), 'lisp.sexpressions.car', 'car', (['x'], {}), '(x)\n', (2886, 2889), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2920, 2926), 'lisp.sexpressions.cdr', 'cdr', (['x'], {}), '(x)\n', (2923, 2926), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3016, 3029), 'lisp.sexpressions.set_car', 'set_car', (['x', 'y'], {}), '(x, y)\n', (3023, 3029), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3067, 3080), 'lisp.sexpressions.set_cdr', 'set_cdr', (['x', 'y'], {}), '(x, y)\n', (3074, 3080), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3222, 3239), 'lisp.sexpressions.SexprBool', 'SexprBool', (['(x == y)'], {}), '(x == y)\n', (3231, 3239), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3275, 3290), 'lisp.sexpressions.SexprList', 'SexprList', (['args'], {}), '(args)\n', (3284, 3290), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3648, 3656), 'lisp.sexpressions.car', 'car', (['rib'], {}), '(rib)\n', (3651, 3656), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3711, 3719), 'lisp.sexpressions.cdr', 'cdr', (['rib'], {}), '(rib)\n', (3714, 3719), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3903, 3911), 'lisp.sexpressions.car', 'car', (['rib'], {}), '(rib)\n', (3906, 3911), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((4013, 4021), 'lisp.sexpressions.cdr', 'cdr', (['rib'], {}), '(rib)\n', (4016, 4021), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((4191, 4215), 'lisp.sexpressions.SexprCons', 'SexprCons', (['symbol', 'value'], {}), '(symbol, value)\n', (4200, 4215), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((4376, 4384), 'lisp.sexpressions.car', 'car', (['rib'], {}), '(rib)\n', (4379, 4384), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((5038, 5057), 'lisp.sexpressions.symbol_name', 'symbol_name', (['symbol'], {}), '(symbol)\n', (5049, 5057), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((5257, 5276), 'lisp.sexpressions.symbol_name', 'symbol_name', (['symbol'], {}), '(symbol)\n', (5268, 5276), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((5946, 5965), 'lisp.sexpressions.symbol_name', 'symbol_name', (['symbol'], {}), '(symbol)\n', (5957, 5965), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((6115, 6131), 'lisp.sexpressions.consp', 'consp', (['arguments'], {}), '(arguments)\n', (6120, 6131), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((6215, 6230), 'lisp.sexpressions.car', 'car', (['parameters'], {}), '(parameters)\n', (6218, 6230), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((6232, 6246), 'lisp.sexpressions.car', 'car', (['arguments'], {}), '(arguments)\n', (6235, 6246), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((6377, 6387), 'lisp.sexpressions.SexprNil', 'SexprNil', ([], {}), '()\n', (6385, 6387), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((6750, 6759), 'lisp.sexpressions.cdr', 'cdr', (['expr'], {}), '(expr)\n', (6753, 6759), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((10233, 10243), 'lisp.sexpressions.car', 'car', (['block'], {}), '(block)\n', (10236, 10243), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((11147, 11157), 'lisp.sexpressions.SexprNil', 'SexprNil', ([], {}), '()\n', (11155, 11157), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((1243, 1263), 'lisp.sexpressions.intern_symbol', 'intern_symbol', (['"""def"""'], {}), "('def')\n", (1256, 1263), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2969, 2977), 'lisp.sexpressions.consp', 'consp', (['x'], {}), '(x)\n', (2974, 2977), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3125, 3135), 'lisp.sexpressions.symbolp', 'symbolp', (['x'], {}), '(x)\n', (3132, 3135), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3178, 3188), 'lisp.sexpressions.is_null', 'is_null', (['x'], {}), '(x)\n', (3185, 3188), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3671, 3687), 'lisp.sexpressions.car', 'car', (['association'], {}), '(association)\n', (3674, 3687), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3927, 3943), 'lisp.sexpressions.car', 'car', (['association'], {}), '(association)\n', (3930, 3943), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3978, 3994), 'lisp.sexpressions.cdr', 'cdr', (['association'], {}), '(association)\n', (3981, 3994), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((4400, 4416), 'lisp.sexpressions.car', 'car', (['association'], {}), '(association)\n', (4403, 4416), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((4444, 4471), 'lisp.sexpressions.set_cdr', 'set_cdr', (['association', 'value'], {}), '(association, value)\n', (4451, 4471), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((5529, 5545), 'lisp.sexpressions.car', 'car', (['environment'], {}), '(environment)\n', (5532, 5545), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((6804, 6813), 'lisp.sexpressions.cdr', 'cdr', (['expr'], {}), '(expr)\n', (6807, 6813), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((10406, 10415), 'lisp.sexpressions.car', 'car', (['expr'], {}), '(expr)\n', (10409, 10415), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((1726, 1738), 'lisp.sexpressions.num_value', 'num_value', (['a'], {}), '(a)\n', (1735, 1738), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((1741, 1753), 'lisp.sexpressions.num_value', 'num_value', (['b'], {}), '(b)\n', (1750, 1753), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((1868, 1880), 'lisp.sexpressions.num_value', 'num_value', (['a'], {}), '(a)\n', (1877, 1880), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((1883, 1895), 'lisp.sexpressions.num_value', 'num_value', (['b'], {}), '(b)\n', (1892, 1895), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((1940, 1952), 'lisp.sexpressions.num_value', 'num_value', (['a'], {}), '(a)\n', (1949, 1952), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((1955, 1967), 'lisp.sexpressions.num_value', 'num_value', (['b'], {}), '(b)\n', (1964, 1967), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2168, 2181), 'lisp.sexpressions.bool_value', 'bool_value', (['x'], {}), '(x)\n', (2178, 2181), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2224, 2236), 'lisp.sexpressions.is_number', 'is_number', (['x'], {}), '(x)\n', (2233, 2236), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2241, 2253), 'lisp.sexpressions.is_number', 'is_number', (['y'], {}), '(y)\n', (2250, 2253), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2483, 2495), 'lisp.sexpressions.num_value', 'num_value', (['x'], {}), '(x)\n', (2492, 2495), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2498, 2510), 'lisp.sexpressions.num_value', 'num_value', (['y'], {}), '(y)\n', (2507, 2510), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2554, 2566), 'lisp.sexpressions.num_value', 'num_value', (['x'], {}), '(x)\n', (2563, 2566), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2570, 2582), 'lisp.sexpressions.num_value', 'num_value', (['y'], {}), '(y)\n', (2579, 2582), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2625, 2637), 'lisp.sexpressions.num_value', 'num_value', (['x'], {}), '(x)\n', (2634, 2637), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2640, 2652), 'lisp.sexpressions.num_value', 'num_value', (['y'], {}), '(y)\n', (2649, 2652), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2696, 2708), 'lisp.sexpressions.num_value', 'num_value', (['x'], {}), '(x)\n', (2705, 2708), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2712, 2724), 'lisp.sexpressions.num_value', 'num_value', (['y'], {}), '(y)\n', (2721, 2724), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((3461, 3476), 'lisp.sexpressions.string_value', 'string_value', (['x'], {}), '(x)\n', (3473, 3476), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2258, 2270), 'lisp.sexpressions.num_value', 'num_value', (['x'], {}), '(x)\n', (2267, 2270), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((2274, 2286), 'lisp.sexpressions.num_value', 'num_value', (['y'], {}), '(y)\n', (2283, 2286), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((7145, 7167), 'lisp.sexpressions.intern_symbol', 'intern_symbol', (['"""quote"""'], {}), "('quote')\n", (7158, 7167), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((7222, 7241), 'lisp.sexpressions.intern_symbol', 'intern_symbol', (['"""do"""'], {}), "('do')\n", (7235, 7241), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((7273, 7282), 'lisp.sexpressions.cdr', 'cdr', (['expr'], {}), '(expr)\n', (7276, 7282), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((7318, 7338), 'lisp.sexpressions.intern_symbol', 'intern_symbol', (['"""def"""'], {}), "('def')\n", (7331, 7338), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((8096, 8116), 'lisp.sexpressions.intern_symbol', 'intern_symbol', (['"""let"""'], {}), "('let')\n", (8109, 8116), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((8270, 8282), 'lisp.sexpressions.consp', 'consp', (['decls'], {}), '(decls)\n', (8275, 8282), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((8160, 8170), 'lisp.sexpressions.SexprNil', 'SexprNil', ([], {}), '()\n', (8168, 8170), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((8241, 8250), 'lisp.sexpressions.cdr', 'cdr', (['expr'], {}), '(expr)\n', (8244, 8250), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((8581, 8602), 'lisp.sexpressions.intern_symbol', 'intern_symbol', (['"""let*"""'], {}), "('let*')\n", (8594, 8602), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((8756, 8768), 'lisp.sexpressions.consp', 'consp', (['decls'], {}), '(decls)\n', (8761, 8768), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((7440, 7460), 'lisp.sexpressions.intern_symbol', 'intern_symbol', (['"""def"""'], {}), "('def')\n", (7453, 7460), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((8483, 8493), 'lisp.sexpressions.cdr', 'cdr', (['decls'], {}), '(decls)\n', (8486, 8493), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((8529, 8538), 'lisp.sexpressions.cdr', 'cdr', (['expr'], {}), '(expr)\n', (8532, 8538), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((8646, 8656), 'lisp.sexpressions.SexprNil', 'SexprNil', ([], {}), '()\n', (8654, 8656), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((8727, 8736), 'lisp.sexpressions.cdr', 'cdr', (['expr'], {}), '(expr)\n', (8730, 8736), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((9073, 9093), 'lisp.sexpressions.intern_symbol', 'intern_symbol', (['"""set"""'], {}), "('set')\n", (9086, 9093), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((7580, 7600), 'lisp.sexpressions.intern_symbol', 'intern_symbol', (['"""fun"""'], {}), "('fun')\n", (7593, 7600), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((8975, 8985), 'lisp.sexpressions.cdr', 'cdr', (['decls'], {}), '(decls)\n', (8978, 8985), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((9021, 9030), 'lisp.sexpressions.cdr', 'cdr', (['expr'], {}), '(expr)\n', (9024, 9030), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((9257, 9276), 'lisp.sexpressions.intern_symbol', 'intern_symbol', (['"""if"""'], {}), "('if')\n", (9270, 9276), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((9297, 9306), 'lisp.sexpressions.cdr', 'cdr', (['expr'], {}), '(expr)\n', (9300, 9306), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((9325, 9336), 'lisp.sexpressions.consp', 'consp', (['rest'], {}), '(rest)\n', (9330, 9336), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((9713, 9723), 'lisp.sexpressions.SexprNil', 'SexprNil', ([], {}), '()\n', (9721, 9723), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((9745, 9765), 'lisp.sexpressions.intern_symbol', 'intern_symbol', (['"""fun"""'], {}), "('fun')\n", (9758, 9765), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((7740, 7749), 'lisp.sexpressions.cdr', 'cdr', (['expr'], {}), '(expr)\n', (7743, 7749), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((9363, 9372), 'lisp.sexpressions.cdr', 'cdr', (['rest'], {}), '(rest)\n', (9366, 9372), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((9950, 9959), 'lisp.sexpressions.cdr', 'cdr', (['expr'], {}), '(expr)\n', (9953, 9959), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((9475, 9487), 'lisp.sexpressions.SexprFalse', 'SexprFalse', ([], {}), '()\n', (9485, 9487), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((9594, 9603), 'lisp.sexpressions.cdr', 'cdr', (['rest'], {}), '(rest)\n', (9597, 9603), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((9670, 9679), 'lisp.sexpressions.car', 'car', (['rest'], {}), '(rest)\n', (9673, 9679), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n'), ((9832, 9841), 'lisp.sexpressions.cdr', 'cdr', (['expr'], {}), '(expr)\n', (9835, 9841), False, 'from lisp.sexpressions import SexprNumber, SexprString, SexprCons, SexprNil, SexprSymbol, SexprTrue, SexprFalse, SexprProcedure, SexprBuiltin, SexprList, SexprBool, bool_value, num_value, string_value, is_keyword, intern_symbol, consp, symbolp, symbol_name, car, cdr, set_car, set_cdr, is_null, build_list, is_number\n')]
|
from setuptools import setup, find_packages
setup(
name="ceres_infer",
version="1.0",
author="<NAME>",
description='CERES inference',
long_description=open('README.md').read(),
package_dir={"": "src"},
packages=find_packages("ceres_infer"),
include_package_data=True,
zip_safe=False,
install_requires=open('requirements.txt').read().strip().split('\n')
)
|
[
"setuptools.find_packages"
] |
[((240, 268), 'setuptools.find_packages', 'find_packages', (['"""ceres_infer"""'], {}), "('ceres_infer')\n", (253, 268), False, 'from setuptools import setup, find_packages\n')]
|
from flask import Flask,render_template,request
import random
import sqlite3
app = Flask(__name__)
DATABASE='mydb.db'
def connect_db():
return sqlite3.connect(DATABASE)
@app.route('/')
def index():
return render_template('Home.html')
@app.route('/details')
def details():
return render_template('Details.html')
@app.route('/addrec')
def addrec():
random_number=random.randint(600001,899999)
refno= random_number
name= request.args.get ('name')
email= request.args.get ('email')
consignment_no = request.args.get('cn')
date = request.args.get('date')
product_name = request.args.get('pn')
nature_of_complaint = request.args.get('noc')
db=connect_db()
sql = "insert into crform(refno,name, email,invoice_no, invoice_date,product_name,nature_of_complaint) values(?,?,?,?,?,?,?)"
db.execute(sql,[refno,name,email,consignment_no,date,product_name,nature_of_complaint])#
db.commit()
db.close()
return render_template('Details.html', name=name,email=email,cn=consignment_no,date=date,pn=product_name,noc=nature_of_complaint,refno=random_number)#
@app.route('/registrationform')
def registrationform():
return render_template('RegistrationForm.html')
@app.route('/retrieveform')
def retrieveform():
return render_template('RetrieveForm.html')
@app.route('/showdetails')
def showdetails():
print('successfully done')
db=connect_db()
cur = db.cursor()
refno = request.args.get('Enter Reference number')
cur.execute("select name,email,invoice_no,invoice_date,product_name,nature_of_complaint,status from crform where refno="+str(refno))
rows = cur.fetchall()
return render_template('Result.html',rows= rows)
@app.route('/result')
def result():
print('hi there i am printed')
return render_template('Result.html')
if __name__=='__main__':
app.run(debug=True)
|
[
"random.randint",
"flask.request.args.get",
"flask.Flask",
"sqlite3.connect",
"flask.render_template"
] |
[((88, 103), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (93, 103), False, 'from flask import Flask, render_template, request\n'), ((159, 184), 'sqlite3.connect', 'sqlite3.connect', (['DATABASE'], {}), '(DATABASE)\n', (174, 184), False, 'import sqlite3\n'), ((230, 258), 'flask.render_template', 'render_template', (['"""Home.html"""'], {}), "('Home.html')\n", (245, 258), False, 'from flask import Flask, render_template, request\n'), ((313, 344), 'flask.render_template', 'render_template', (['"""Details.html"""'], {}), "('Details.html')\n", (328, 344), False, 'from flask import Flask, render_template, request\n'), ((404, 434), 'random.randint', 'random.randint', (['(600001)', '(899999)'], {}), '(600001, 899999)\n', (418, 434), False, 'import random\n'), ((471, 495), 'flask.request.args.get', 'request.args.get', (['"""name"""'], {}), "('name')\n", (487, 495), False, 'from flask import Flask, render_template, request\n'), ((509, 534), 'flask.request.args.get', 'request.args.get', (['"""email"""'], {}), "('email')\n", (525, 534), False, 'from flask import Flask, render_template, request\n'), ((558, 580), 'flask.request.args.get', 'request.args.get', (['"""cn"""'], {}), "('cn')\n", (574, 580), False, 'from flask import Flask, render_template, request\n'), ((593, 617), 'flask.request.args.get', 'request.args.get', (['"""date"""'], {}), "('date')\n", (609, 617), False, 'from flask import Flask, render_template, request\n'), ((638, 660), 'flask.request.args.get', 'request.args.get', (['"""pn"""'], {}), "('pn')\n", (654, 660), False, 'from flask import Flask, render_template, request\n'), ((688, 711), 'flask.request.args.get', 'request.args.get', (['"""noc"""'], {}), "('noc')\n", (704, 711), False, 'from flask import Flask, render_template, request\n'), ((1003, 1155), 'flask.render_template', 'render_template', (['"""Details.html"""'], {'name': 'name', 'email': 'email', 'cn': 'consignment_no', 'date': 'date', 'pn': 'product_name', 'noc': 'nature_of_complaint', 'refno': 'random_number'}), "('Details.html', name=name, email=email, cn=consignment_no,\n date=date, pn=product_name, noc=nature_of_complaint, refno=random_number)\n", (1018, 1155), False, 'from flask import Flask, render_template, request\n'), ((1219, 1259), 'flask.render_template', 'render_template', (['"""RegistrationForm.html"""'], {}), "('RegistrationForm.html')\n", (1234, 1259), False, 'from flask import Flask, render_template, request\n'), ((1324, 1360), 'flask.render_template', 'render_template', (['"""RetrieveForm.html"""'], {}), "('RetrieveForm.html')\n", (1339, 1360), False, 'from flask import Flask, render_template, request\n'), ((1500, 1542), 'flask.request.args.get', 'request.args.get', (['"""Enter Reference number"""'], {}), "('Enter Reference number')\n", (1516, 1542), False, 'from flask import Flask, render_template, request\n'), ((1720, 1761), 'flask.render_template', 'render_template', (['"""Result.html"""'], {'rows': 'rows'}), "('Result.html', rows=rows)\n", (1735, 1761), False, 'from flask import Flask, render_template, request\n'), ((1850, 1880), 'flask.render_template', 'render_template', (['"""Result.html"""'], {}), "('Result.html')\n", (1865, 1880), False, 'from flask import Flask, render_template, request\n')]
|
import numpy as np
def sherman_morrison_row(e, inv, vec):
ratio = np.einsum("ij,ij->i", vec, inv[:, :, e])
tmp = np.einsum("ek,ekj->ej", vec, inv)
invnew = (
inv
- np.einsum("ki,kj->kij", inv[:, :, e], tmp) / ratio[:, np.newaxis, np.newaxis]
)
invnew[:, :, e] = inv[:, :, e] / ratio[:, np.newaxis]
return ratio, invnew
class PySCFSlaterUHF:
"""A wave function object has a state defined by a reference configuration of electrons.
The functions recompute() and updateinternals() change the state of the object, and
the rest compute and return values from that state. """
def __init__(self, mol, mf):
self.occ = np.asarray(mf.mo_occ > 0.9)
self.parameters = {}
# Determine if we're initializing from an RHF or UHF object...
if len(mf.mo_occ.shape) == 2:
self.parameters["mo_coeff_alpha"] = mf.mo_coeff[0][:, self.occ[0]]
self.parameters["mo_coeff_beta"] = mf.mo_coeff[1][:, self.occ[1]]
else:
self.parameters["mo_coeff_alpha"] = mf.mo_coeff[
:, np.asarray(mf.mo_occ > 0.9)
]
self.parameters["mo_coeff_beta"] = mf.mo_coeff[
:, np.asarray(mf.mo_occ > 1.1)
]
self._coefflookup = ("mo_coeff_alpha", "mo_coeff_beta")
self._mol = mol
self._nelec = tuple(mol.nelec)
def recompute(self, configs):
"""This computes the value from scratch. Returns the logarithm of the wave function as
(phase,logdet). If the wf is real, phase will be +/- 1."""
mycoords = configs.reshape(
(configs.shape[0] * configs.shape[1], configs.shape[2])
)
ao = self._mol.eval_gto("GTOval_sph", mycoords).reshape(
(configs.shape[0], configs.shape[1], -1)
)
self._aovals = ao
self._dets = []
self._inverse = []
for s in [0, 1]:
if s == 0:
mo = ao[:, 0 : self._nelec[0], :].dot(
self.parameters[self._coefflookup[s]]
)
else:
mo = ao[:, self._nelec[0] : self._nelec[0] + self._nelec[1], :].dot(
self.parameters[self._coefflookup[s]]
)
# This could be done faster; we are doubling our effort here.
self._dets.append(np.linalg.slogdet(mo))
self._inverse.append(np.linalg.inv(mo))
return self.value()
def updateinternals(self, e, epos, mask=None):
"""Update any internals given that electron e moved to epos. mask is a Boolean array
which allows us to update only certain walkers"""
s = int(e >= self._nelec[0])
if mask is None:
mask = [True] * epos.shape[0]
eeff = e - s * self._nelec[0]
ao = self._mol.eval_gto("GTOval_sph", epos)
mo = ao.dot(self.parameters[self._coefflookup[s]])
ratio, self._inverse[s][mask, :, :] = sherman_morrison_row(
eeff, self._inverse[s][mask, :, :], mo[mask, :]
)
self._updateval(ratio, s, mask)
### not state-changing functions
def value(self):
"""Return logarithm of the wave function as noted in recompute()"""
return self._dets[0][0] * self._dets[1][0], self._dets[0][1] + self._dets[1][1]
def _updateval(self, ratio, s, mask):
self._dets[s][0][mask] *= np.sign(ratio) # will not work for complex!
self._dets[s][1][mask] += np.log(np.abs(ratio))
def _testrow(self, e, vec):
"""vec is a nconfig,nmo vector which replaces row e"""
s = int(e >= self._nelec[0])
ratio = np.einsum(
"ij,ij->i", vec, self._inverse[s][:, :, e - s * self._nelec[0]]
)
return ratio
def _testcol(self, i, s, vec):
"""vec is a nconfig,nmo vector which replaces column i"""
ratio = np.einsum("ij,ij->i", vec, self._inverse[s][:, i, :])
return ratio
def gradient(self, e, epos):
""" Compute the gradient of the log wave function
Note that this can be called even if the internals have not been updated for electron e,
if epos differs from the current position of electron e."""
s = int(e >= self._nelec[0])
aograd = self._mol.eval_gto("GTOval_ip_sph", epos)
mograd = aograd.dot(self.parameters[self._coefflookup[s]])
ratios = [self._testrow(e, x) for x in mograd]
return np.asarray(ratios) / self.testvalue(e, epos)[np.newaxis, :]
def laplacian(self, e, epos):
""" Compute the laplacian Psi/ Psi. """
s = int(e >= self._nelec[0])
# aograd=self._mol.eval_gto('GTOval_sph_deriv2',epos)
aolap = np.sum(self._mol.eval_gto("GTOval_sph_deriv2", epos)[[4, 7, 9]], axis=0)
molap = aolap.dot(self.parameters[self._coefflookup[s]])
ratios = self._testrow(e, molap)
return ratios / self.testvalue(e, epos)
def testvalue(self, e, epos):
""" return the ratio between the current wave function and the wave function if
electron e's position is replaced by epos"""
s = int(e >= self._nelec[0])
ao = self._mol.eval_gto("GTOval_sph", epos)
mo = ao.dot(self.parameters[self._coefflookup[s]])
return self._testrow(e, mo)
def pgradient(self):
"""Compute the parameter gradient of Psi.
Returns d_p \Psi/\Psi as a dictionary of numpy arrays,
which correspond to the parameter dictionary.
"""
d = {}
for parm in self.parameters:
s = 0
if "beta" in parm:
s = 1
# Get AOs for our spin channel only
ao = self._aovals[
:, s * self._nelec[0] : self._nelec[s] + s * self._nelec[0], :
] # (config, electron, ao)
pgrad_shape = (ao.shape[0],) + self.parameters[parm].shape
pgrad = np.zeros(pgrad_shape)
# Compute derivatives w.r.t MO coefficients
for i in range(self._nelec[s]): # MO loop
for j in range(ao.shape[2]): # AO loop
vec = ao[:, :, j]
pgrad[:, j, i] = self._testcol(i, s, vec) # nconfig
d[parm] = np.array(pgrad) # Returns config, coeff
return d
def test():
from pyscf import lib, gto, scf
import pyqmc.testwf as testwf
mol = gto.M(atom="Li 0. 0. 0.; H 0. 0. 1.5", basis="cc-pvtz", unit="bohr", spin=0)
for mf in [scf.RHF(mol).run(), scf.ROHF(mol).run(), scf.UHF(mol).run()]:
print("")
nconf = 10
nelec = np.sum(mol.nelec)
slater = PySCFSlaterUHF(mol, mf)
configs = np.random.randn(nconf, nelec, 3)
print("testing internals:", testwf.test_updateinternals(slater, configs))
for delta in [1e-3, 1e-4, 1e-5, 1e-6, 1e-7]:
print(
"delta",
delta,
"Testing gradient",
testwf.test_wf_gradient(slater, configs, delta=delta),
)
print(
"delta",
delta,
"Testing laplacian",
testwf.test_wf_laplacian(slater, configs, delta=delta),
)
print(
"delta",
delta,
"Testing pgradient",
testwf.test_wf_pgradient(slater, configs, delta=delta),
)
if __name__ == "__main__":
test()
|
[
"numpy.sum",
"numpy.abs",
"numpy.random.randn",
"numpy.asarray",
"numpy.einsum",
"numpy.zeros",
"pyqmc.testwf.test_updateinternals",
"pyqmc.testwf.test_wf_gradient",
"pyscf.gto.M",
"pyscf.scf.RHF",
"numpy.array",
"numpy.linalg.slogdet",
"numpy.linalg.inv",
"numpy.sign",
"pyscf.scf.ROHF",
"pyscf.scf.UHF",
"pyqmc.testwf.test_wf_pgradient",
"pyqmc.testwf.test_wf_laplacian"
] |
[((76, 116), 'numpy.einsum', 'np.einsum', (['"""ij,ij->i"""', 'vec', 'inv[:, :, e]'], {}), "('ij,ij->i', vec, inv[:, :, e])\n", (85, 116), True, 'import numpy as np\n'), ((128, 161), 'numpy.einsum', 'np.einsum', (['"""ek,ekj->ej"""', 'vec', 'inv'], {}), "('ek,ekj->ej', vec, inv)\n", (137, 161), True, 'import numpy as np\n'), ((6563, 6639), 'pyscf.gto.M', 'gto.M', ([], {'atom': '"""Li 0. 0. 0.; H 0. 0. 1.5"""', 'basis': '"""cc-pvtz"""', 'unit': '"""bohr"""', 'spin': '(0)'}), "(atom='Li 0. 0. 0.; H 0. 0. 1.5', basis='cc-pvtz', unit='bohr', spin=0)\n", (6568, 6639), False, 'from pyscf import lib, gto, scf\n'), ((700, 727), 'numpy.asarray', 'np.asarray', (['(mf.mo_occ > 0.9)'], {}), '(mf.mo_occ > 0.9)\n', (710, 727), True, 'import numpy as np\n'), ((3496, 3510), 'numpy.sign', 'np.sign', (['ratio'], {}), '(ratio)\n', (3503, 3510), True, 'import numpy as np\n'), ((3752, 3826), 'numpy.einsum', 'np.einsum', (['"""ij,ij->i"""', 'vec', 'self._inverse[s][:, :, e - s * self._nelec[0]]'], {}), "('ij,ij->i', vec, self._inverse[s][:, :, e - s * self._nelec[0]])\n", (3761, 3826), True, 'import numpy as np\n'), ((3995, 4048), 'numpy.einsum', 'np.einsum', (['"""ij,ij->i"""', 'vec', 'self._inverse[s][:, i, :]'], {}), "('ij,ij->i', vec, self._inverse[s][:, i, :])\n", (4004, 4048), True, 'import numpy as np\n'), ((6774, 6791), 'numpy.sum', 'np.sum', (['mol.nelec'], {}), '(mol.nelec)\n', (6780, 6791), True, 'import numpy as np\n'), ((6853, 6885), 'numpy.random.randn', 'np.random.randn', (['nconf', 'nelec', '(3)'], {}), '(nconf, nelec, 3)\n', (6868, 6885), True, 'import numpy as np\n'), ((202, 244), 'numpy.einsum', 'np.einsum', (['"""ki,kj->kij"""', 'inv[:, :, e]', 'tmp'], {}), "('ki,kj->kij', inv[:, :, e], tmp)\n", (211, 244), True, 'import numpy as np\n'), ((3583, 3596), 'numpy.abs', 'np.abs', (['ratio'], {}), '(ratio)\n', (3589, 3596), True, 'import numpy as np\n'), ((4572, 4590), 'numpy.asarray', 'np.asarray', (['ratios'], {}), '(ratios)\n', (4582, 4590), True, 'import numpy as np\n'), ((6074, 6095), 'numpy.zeros', 'np.zeros', (['pgrad_shape'], {}), '(pgrad_shape)\n', (6082, 6095), True, 'import numpy as np\n'), ((6402, 6417), 'numpy.array', 'np.array', (['pgrad'], {}), '(pgrad)\n', (6410, 6417), True, 'import numpy as np\n'), ((6923, 6967), 'pyqmc.testwf.test_updateinternals', 'testwf.test_updateinternals', (['slater', 'configs'], {}), '(slater, configs)\n', (6950, 6967), True, 'import pyqmc.testwf as testwf\n'), ((2430, 2451), 'numpy.linalg.slogdet', 'np.linalg.slogdet', (['mo'], {}), '(mo)\n', (2447, 2451), True, 'import numpy as np\n'), ((2487, 2504), 'numpy.linalg.inv', 'np.linalg.inv', (['mo'], {}), '(mo)\n', (2500, 2504), True, 'import numpy as np\n'), ((6656, 6668), 'pyscf.scf.RHF', 'scf.RHF', (['mol'], {}), '(mol)\n', (6663, 6668), False, 'from pyscf import lib, gto, scf\n'), ((6676, 6689), 'pyscf.scf.ROHF', 'scf.ROHF', (['mol'], {}), '(mol)\n', (6684, 6689), False, 'from pyscf import lib, gto, scf\n'), ((6697, 6709), 'pyscf.scf.UHF', 'scf.UHF', (['mol'], {}), '(mol)\n', (6704, 6709), False, 'from pyscf import lib, gto, scf\n'), ((7147, 7200), 'pyqmc.testwf.test_wf_gradient', 'testwf.test_wf_gradient', (['slater', 'configs'], {'delta': 'delta'}), '(slater, configs, delta=delta)\n', (7170, 7200), True, 'import pyqmc.testwf as testwf\n'), ((7342, 7396), 'pyqmc.testwf.test_wf_laplacian', 'testwf.test_wf_laplacian', (['slater', 'configs'], {'delta': 'delta'}), '(slater, configs, delta=delta)\n', (7366, 7396), True, 'import pyqmc.testwf as testwf\n'), ((7538, 7592), 'pyqmc.testwf.test_wf_pgradient', 'testwf.test_wf_pgradient', (['slater', 'configs'], {'delta': 'delta'}), '(slater, configs, delta=delta)\n', (7562, 7592), True, 'import pyqmc.testwf as testwf\n'), ((1127, 1154), 'numpy.asarray', 'np.asarray', (['(mf.mo_occ > 0.9)'], {}), '(mf.mo_occ > 0.9)\n', (1137, 1154), True, 'import numpy as np\n'), ((1251, 1278), 'numpy.asarray', 'np.asarray', (['(mf.mo_occ > 1.1)'], {}), '(mf.mo_occ > 1.1)\n', (1261, 1278), True, 'import numpy as np\n')]
|
from dateutil.relativedelta import relativedelta
from custom.icds_reports.const import AGG_DAILY_FEEDING_TABLE
from custom.icds_reports.utils.aggregation_helpers import (
month_formatter,
transform_day_to_month,
)
from custom.icds_reports.utils.aggregation_helpers.distributed.base import (
StateBasedAggregationDistributedHelper,
)
class DailyFeedingFormsChildHealthAggregationDistributedHelper(StateBasedAggregationDistributedHelper):
helper_key = 'daily-feeding-forms-child-health'
ucr_data_source_id = 'dashboard_child_health_daily_feeding_forms'
aggregate_parent_table = AGG_DAILY_FEEDING_TABLE
def drop_index_queries(self):
return [
'DROP INDEX IF EXISTS "icds_dashboard_daily_feeding_forms_state_id_month_273d19dd_idx"',
]
def create_index_queries(self):
return [
'CREATE INDEX IF NOT EXISTS "icds_dashboard_daily_feeding_forms_state_id_month_273d19dd_idx" ON "{}" (state_id, month)'.format(self.aggregate_parent_table),
]
def aggregation_query(self):
current_month_start = month_formatter(self.month)
next_month_start = month_formatter(self.month + relativedelta(months=1))
query_params = {
"month": month_formatter(self.month),
"current_month_start": current_month_start,
"next_month_start": next_month_start,
"state_id": self.state_id,
}
# This query has a strange query plan so there's a few things to note.
# This is joined on the daily_attendance table.
# The daily_attendance aggregation only includes the most recently submitted form for each day.
# Often an AWW may submit multiple daily attendance forms in a day,
# so we choose the last form for each AWW's day.
# Because the result set of docs is actually coming from daily_attendance,
# the JOIN uses the primary key (supervisor_id, doc_id, repeat_iteration).
# Because of this, the UCR does not have an index on (state_id, timeend)
return f"""
INSERT INTO "{self.aggregate_parent_table}" (
state_id, supervisor_id, month, case_id, latest_time_end_processed,
sum_attended_child_ids, lunch_count
) (
SELECT DISTINCT ON (ucr.child_health_case_id)
ucr.state_id AS state_id,
ucr.supervisor_id,
%(month)s AS month,
ucr.child_health_case_id AS case_id,
MAX(ucr.timeend) OVER w AS latest_time_end_processed,
SUM(ucr.attended_child_ids) OVER w AS sum_attended_child_ids,
SUM(ucr.lunch) OVER w AS lunch_count
FROM "{self.ucr_tablename}" ucr
INNER JOIN daily_attendance ON (
ucr.doc_id = daily_attendance.doc_id AND
ucr.supervisor_id = daily_attendance.supervisor_id AND
ucr.state_id = daily_attendance.state_id AND
daily_attendance.month=%(current_month_start)s
)
WHERE ucr.timeend >= %(current_month_start)s AND ucr.timeend < %(next_month_start)s
AND ucr.child_health_case_id IS NOT NULL
AND ucr.state_id = %(state_id)s
WINDOW w AS (PARTITION BY ucr.supervisor_id, ucr.child_health_case_id)
)
""", query_params
def delete_old_data_query(self):
pass
def delete_previous_run_query(self):
pass
|
[
"custom.icds_reports.utils.aggregation_helpers.month_formatter",
"dateutil.relativedelta.relativedelta"
] |
[((1087, 1114), 'custom.icds_reports.utils.aggregation_helpers.month_formatter', 'month_formatter', (['self.month'], {}), '(self.month)\n', (1102, 1114), False, 'from custom.icds_reports.utils.aggregation_helpers import month_formatter, transform_day_to_month\n'), ((1243, 1270), 'custom.icds_reports.utils.aggregation_helpers.month_formatter', 'month_formatter', (['self.month'], {}), '(self.month)\n', (1258, 1270), False, 'from custom.icds_reports.utils.aggregation_helpers import month_formatter, transform_day_to_month\n'), ((1171, 1194), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(1)'}), '(months=1)\n', (1184, 1194), False, 'from dateutil.relativedelta import relativedelta\n')]
|
'''
Script to monitor ssh running on
a raspberry pi. If ssh is not
currently active, then reboot.
'''
import subprocess
def main():
cmd = subprocess.Popen("service ssh status", shell=True, stdout=subprocess.PIPE)
for line in cmd.stdout:
if "Active: " in line:
if "active" in line.split(' '):
return
else:
subprocess.call(['sudo','service', 'ssh','restart'])
return
if __name__ == '__main__':
main()
|
[
"subprocess.Popen",
"subprocess.call"
] |
[((142, 216), 'subprocess.Popen', 'subprocess.Popen', (['"""service ssh status"""'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), "('service ssh status', shell=True, stdout=subprocess.PIPE)\n", (158, 216), False, 'import subprocess\n'), ((327, 381), 'subprocess.call', 'subprocess.call', (["['sudo', 'service', 'ssh', 'restart']"], {}), "(['sudo', 'service', 'ssh', 'restart'])\n", (342, 381), False, 'import subprocess\n')]
|
''' Script constants '''
import os
from portfolio.settings import BASE_DIR
import constants.common as common
# db script arguments
FOR_PROD = 'for_prod'
UPDATING = 'updating'
DB_UPDATE = 'db_update'
TEST_UPDATE = 'test_update'
# s3 updater script argurments
HOME = 'home'
DELETE = 'delete'
static_files = list(common.STATIC_FILE_KEYS)
static_files.append('image')
S3_DATA_KEYS = static_files
# substrings
JSON_SUB = '.json'
PY_SUB = '.py'
SCRIPT_PARAM_SUBSTR = {'filename': '.json', 'process': 'process=', }
# directories
INPUT_CREATE_JSON = os.path.join(BASE_DIR, 'data/data_for_creates')
INPUT_TO_UPDATER_STEP_ONE = os.path.join(BASE_DIR, 'data/data_for_updates/dev_input_step_one')
INPUT_TO_UPDATER_STEP_THREE = os.path.join(BASE_DIR, 'data/data_for_updates/dev_input_step_three')
PROD_INPUT_JSON = os.path.join(BASE_DIR, 'data/data_for_updates/prod_input_json')
# screen scraping input html and any other one_off
GENERAL_INPUT = os.path.join(BASE_DIR, 'data/input')
# filename prefixes
PROD_PROCESS_IND = 'prod_input_'
DEFAULT_PREFIX = 'input_'
# used in utilities.random methods to clear out data, to make things be easier to work with
ALWAYS_ARCHIVE_INPUT_DIRECTORIES = [
'data/data_for_updates/dev_input_step_three/done',
'data/data_for_creates/loaded'
]
NOT_DONE_INPUT_DIRECTORIES = [
'data/data_for_updates/dev_input_step_three',
'data/data_for_creates'
]
PROD_INPUT_DIRECTORY = 'data/data_for_updates/prod_input_json'
|
[
"os.path.join"
] |
[((547, 594), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""data/data_for_creates"""'], {}), "(BASE_DIR, 'data/data_for_creates')\n", (559, 594), False, 'import os\n'), ((623, 689), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""data/data_for_updates/dev_input_step_one"""'], {}), "(BASE_DIR, 'data/data_for_updates/dev_input_step_one')\n", (635, 689), False, 'import os\n'), ((720, 788), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""data/data_for_updates/dev_input_step_three"""'], {}), "(BASE_DIR, 'data/data_for_updates/dev_input_step_three')\n", (732, 788), False, 'import os\n'), ((807, 870), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""data/data_for_updates/prod_input_json"""'], {}), "(BASE_DIR, 'data/data_for_updates/prod_input_json')\n", (819, 870), False, 'import os\n'), ((939, 975), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""data/input"""'], {}), "(BASE_DIR, 'data/input')\n", (951, 975), False, 'import os\n')]
|
#!/usr/bin/env python
# ~*~ coding: utf-8 ~*~
from __future__ import absolute_import
from django.urls import path
from users.views import login, users, groups, project, permission,role,keys
app_name = 'users'
urlpatterns = [
# Login View
path('login/', login.UserLoginView.as_view(), name='login'),
path('logout/', login.UserLogoutView.as_view(), name='logout'),
# User View
path('users-list/', users.UsersListAll.as_view(), name='users_list'),
path('users-add/', users.UsersAdd.as_view(), name='users_add'),
path('users-update/<int:pk>/', users.UsersUpdate.as_view(), name='users_update'),
path('users-all-del/', users.UsersAllDel.as_view(), name='users_all_del'),
path('users-change-password/', users.UsersChangePassword.as_view(), name='users_change_password'),
path('users-detail/<int:pk>/', users.UsersDetail.as_view(), name='users_detail'),
# DepartMent View
path('groups-list/', groups.GroupsListAll.as_view(), name='groups_list'),
path('groups-add/', groups.GroupsAdd.as_view(), name='groups_add'),
path('groups-update/<int:pk>/', groups.GroupsUpdate.as_view(), name='groups_update'),
path('groups-all-del/', groups.GroupsAllDel.as_view(), name='groups_all_del'),
# Project View
path('project-list/', project.ProjectListAll.as_view(), name='project_list'),
path('project-add/', project.ProjectAdd.as_view(), name='project_add'),
path('project-update/<int:pk>/', project.ProjectUpdate.as_view(), name='project_update'),
path('project-all-del/', project.ProjectDel.as_view(), name='project_all_del'),
# KeyManage View
path('key-list/', keys.KeyListAll.as_view(), name='key_list'),
path('key-add/', keys.KeyAdd.as_view(), name='key_add'),
path('key-update/<uuid:pk>/', keys.KeyUpdate.as_view(), name='key_update'),
path('key-all-del/', keys.KeyAllDel.as_view(), name='key_all_del'),
# PermissionList View
path('permission-list/', permission.PermissionListAll.as_view(), name='permission_list'),
path('permission-add/', permission.PermissionAdd.as_view(), name='permission_add'),
path('permission-update/<int:pk>/', permission.PermissionUpdate.as_view(), name='permission_update'),
path('permission-all-del/', permission.PermissionAllDel.as_view(), name='permission_all_del'),
# RoleList View
path('role-list/', role.RoleAll.as_view(), name='role_list'),
path('role-edit/<int:pk>/', role.RoleEdit.as_view(), name='role_edit'),
path('role-all-del/', role.RoleAllDel.as_view(), name='role_all_del'),
]
|
[
"users.views.keys.KeyAdd.as_view",
"users.views.users.UsersDetail.as_view",
"users.views.users.UsersChangePassword.as_view",
"users.views.permission.PermissionUpdate.as_view",
"users.views.permission.PermissionListAll.as_view",
"users.views.groups.GroupsListAll.as_view",
"users.views.role.RoleEdit.as_view",
"users.views.groups.GroupsAllDel.as_view",
"users.views.project.ProjectAdd.as_view",
"users.views.groups.GroupsUpdate.as_view",
"users.views.project.ProjectUpdate.as_view",
"users.views.keys.KeyListAll.as_view",
"users.views.role.RoleAll.as_view",
"users.views.permission.PermissionAllDel.as_view",
"users.views.users.UsersAdd.as_view",
"users.views.login.UserLoginView.as_view",
"users.views.users.UsersAllDel.as_view",
"users.views.groups.GroupsAdd.as_view",
"users.views.role.RoleAllDel.as_view",
"users.views.keys.KeyUpdate.as_view",
"users.views.users.UsersUpdate.as_view",
"users.views.project.ProjectDel.as_view",
"users.views.keys.KeyAllDel.as_view",
"users.views.login.UserLogoutView.as_view",
"users.views.project.ProjectListAll.as_view",
"users.views.users.UsersListAll.as_view",
"users.views.permission.PermissionAdd.as_view"
] |
[((277, 306), 'users.views.login.UserLoginView.as_view', 'login.UserLoginView.as_view', ([], {}), '()\n', (304, 306), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((344, 374), 'users.views.login.UserLogoutView.as_view', 'login.UserLogoutView.as_view', ([], {}), '()\n', (372, 374), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((436, 464), 'users.views.users.UsersListAll.as_view', 'users.UsersListAll.as_view', ([], {}), '()\n', (462, 464), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((510, 534), 'users.views.users.UsersAdd.as_view', 'users.UsersAdd.as_view', ([], {}), '()\n', (532, 534), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((591, 618), 'users.views.users.UsersUpdate.as_view', 'users.UsersUpdate.as_view', ([], {}), '()\n', (616, 618), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((670, 697), 'users.views.users.UsersAllDel.as_view', 'users.UsersAllDel.as_view', ([], {}), '()\n', (695, 697), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((758, 793), 'users.views.users.UsersChangePassword.as_view', 'users.UsersChangePassword.as_view', ([], {}), '()\n', (791, 793), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((862, 889), 'users.views.users.UsersDetail.as_view', 'users.UsersDetail.as_view', ([], {}), '()\n', (887, 889), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((964, 994), 'users.views.groups.GroupsListAll.as_view', 'groups.GroupsListAll.as_view', ([], {}), '()\n', (992, 994), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((1042, 1068), 'users.views.groups.GroupsAdd.as_view', 'groups.GroupsAdd.as_view', ([], {}), '()\n', (1066, 1068), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((1127, 1156), 'users.views.groups.GroupsUpdate.as_view', 'groups.GroupsUpdate.as_view', ([], {}), '()\n', (1154, 1156), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((1210, 1239), 'users.views.groups.GroupsAllDel.as_view', 'groups.GroupsAllDel.as_view', ([], {}), '()\n', (1237, 1239), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((1314, 1346), 'users.views.project.ProjectListAll.as_view', 'project.ProjectListAll.as_view', ([], {}), '()\n', (1344, 1346), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((1396, 1424), 'users.views.project.ProjectAdd.as_view', 'project.ProjectAdd.as_view', ([], {}), '()\n', (1422, 1424), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((1485, 1516), 'users.views.project.ProjectUpdate.as_view', 'project.ProjectUpdate.as_view', ([], {}), '()\n', (1514, 1516), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((1572, 1600), 'users.views.project.ProjectDel.as_view', 'project.ProjectDel.as_view', ([], {}), '()\n', (1598, 1600), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((1674, 1699), 'users.views.keys.KeyListAll.as_view', 'keys.KeyListAll.as_view', ([], {}), '()\n', (1697, 1699), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((1741, 1762), 'users.views.keys.KeyAdd.as_view', 'keys.KeyAdd.as_view', ([], {}), '()\n', (1760, 1762), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((1816, 1840), 'users.views.keys.KeyUpdate.as_view', 'keys.KeyUpdate.as_view', ([], {}), '()\n', (1838, 1840), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((1888, 1912), 'users.views.keys.KeyAllDel.as_view', 'keys.KeyAllDel.as_view', ([], {}), '()\n', (1910, 1912), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((1994, 2032), 'users.views.permission.PermissionListAll.as_view', 'permission.PermissionListAll.as_view', ([], {}), '()\n', (2030, 2032), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((2088, 2122), 'users.views.permission.PermissionAdd.as_view', 'permission.PermissionAdd.as_view', ([], {}), '()\n', (2120, 2122), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((2189, 2226), 'users.views.permission.PermissionUpdate.as_view', 'permission.PermissionUpdate.as_view', ([], {}), '()\n', (2224, 2226), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((2288, 2325), 'users.views.permission.PermissionAllDel.as_view', 'permission.PermissionAllDel.as_view', ([], {}), '()\n', (2323, 2325), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((2402, 2424), 'users.views.role.RoleAll.as_view', 'role.RoleAll.as_view', ([], {}), '()\n', (2422, 2424), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((2478, 2501), 'users.views.role.RoleEdit.as_view', 'role.RoleEdit.as_view', ([], {}), '()\n', (2499, 2501), False, 'from users.views import login, users, groups, project, permission, role, keys\n'), ((2549, 2574), 'users.views.role.RoleAllDel.as_view', 'role.RoleAllDel.as_view', ([], {}), '()\n', (2572, 2574), False, 'from users.views import login, users, groups, project, permission, role, keys\n')]
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import responses
from testing.util import read_json_file
YELP_SAN_FRANCISCO = responses.Response(
method="GET",
url="https://api.yelp.com/v3/businesses/yelp-san-francisco",
json=read_json_file("business_lookup_yelp_san_francisco.json"),
status=200,
)
SACRE_COEUR_PARIS = responses.Response(
method="GET",
url="https://api.yelp.com/v3/businesses/basilique-du-sacré-cœur-de-montmartre-paris-3", # noqa: E501
json=read_json_file("business_lookup_sacre_coeur_paris.json"),
status=200,
)
|
[
"testing.util.read_json_file"
] |
[((297, 354), 'testing.util.read_json_file', 'read_json_file', (['"""business_lookup_yelp_san_francisco.json"""'], {}), "('business_lookup_yelp_san_francisco.json')\n", (311, 354), False, 'from testing.util import read_json_file\n'), ((549, 605), 'testing.util.read_json_file', 'read_json_file', (['"""business_lookup_sacre_coeur_paris.json"""'], {}), "('business_lookup_sacre_coeur_paris.json')\n", (563, 605), False, 'from testing.util import read_json_file\n')]
|
from flask import Flask, render_template, request
import json
from thrift import Thrift
from thrift.transport import TSocket,TTransport
from thrift.protocol import TBinaryProtocol
from hbase import Hbase
from hbase.ttypes import ColumnDescriptor,Mutation,BatchMutation,TRegionInfo
from hbase.ttypes import IOError,AlreadyExists
app = Flask(__name__)
@app.route('/getallcustomers')
def getallcustomers():
socket = TSocket.TSocket('hbase', 9090)
socket.setTimeout(5000)
transport = TTransport.TBufferedTransport(socket)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = Hbase.Client(protocol)
transport.open()
scanId = client.scannerOpen("C360_STREAM", "", [])
result = client.scannerGetList(scanId, 10)
customers = []
for row in result:
customer = {}
customer["CUSTOMER_ID"] = row.row
customer["FIRST_NAME"] = row.columns.get('C360_STREAM:FIRST_NAME').value
customer["LAST_NAME"] = row.columns.get('C360_STREAM:LAST_NAME').value
# customer["DOB"] = row.columns.get('C360_STREAM:DOB').value
customer["PHONE_TYPE"] = row.columns.get('C360_STREAM:PHONE_TYPE').value
# customer["PHONE_NUM"] = row.columns.get('C360_STREAM:PHONE_NUM').value
customer["ADDRESS_LINE_1"] = row.columns.get('C360_STREAM:ADDRESS_LINE_1').value
customer["ADDRESS_LINE_2"] = row.columns.get('C360_STREAM:ADDRESS_LINE_2').value
# customer["PIN"] = row.columns.get('C360_STREAM:PIN').value
customer["ADDRESS_TYPE"] = row.columns.get('C360_STREAM:ADDRESS_TYPE').value
customers.append(customer)
transport.close()
return json.dumps(customers)
if __name__ == '__main__':
app.run(debug = True)
|
[
"hbase.Hbase.Client",
"thrift.transport.TSocket.TSocket",
"flask.Flask",
"json.dumps",
"thrift.protocol.TBinaryProtocol.TBinaryProtocol",
"thrift.transport.TTransport.TBufferedTransport"
] |
[((337, 352), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (342, 352), False, 'from flask import Flask, render_template, request\n'), ((420, 450), 'thrift.transport.TSocket.TSocket', 'TSocket.TSocket', (['"""hbase"""', '(9090)'], {}), "('hbase', 9090)\n", (435, 450), False, 'from thrift.transport import TSocket, TTransport\n'), ((493, 530), 'thrift.transport.TTransport.TBufferedTransport', 'TTransport.TBufferedTransport', (['socket'], {}), '(socket)\n', (522, 530), False, 'from thrift.transport import TSocket, TTransport\n'), ((549, 591), 'thrift.protocol.TBinaryProtocol.TBinaryProtocol', 'TBinaryProtocol.TBinaryProtocol', (['transport'], {}), '(transport)\n', (580, 591), False, 'from thrift.protocol import TBinaryProtocol\n'), ((604, 626), 'hbase.Hbase.Client', 'Hbase.Client', (['protocol'], {}), '(protocol)\n', (616, 626), False, 'from hbase import Hbase\n'), ((1637, 1658), 'json.dumps', 'json.dumps', (['customers'], {}), '(customers)\n', (1647, 1658), False, 'import json\n')]
|
__title__ = "playground"
__author__ = "murlux"
__copyright__ = "Copyright 2019, " + __author__
__credits__ = (__author__, )
__license__ = "MIT"
__email__ = "<EMAIL>"
import json
import logging
from typing import Any, Dict, Optional
from dateutil.relativedelta import relativedelta
from datetime import datetime
from jsonschema import validate, exceptions
from playground import enums
def timestamp_to_date(timestamp=None):
"""
Convert timestamp to datetime object
"""
return datetime.fromtimestamp(int(timestamp))
def setup_logger(name: str = None) -> logging.Logger:
logger = logging.getLogger(name)
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
)
return logger
class ArgumentDebugger:
"""
Class used to debug passed args and kwargs into a determined function
"""
@staticmethod
def print_kwargs(**kwargs):
print(kwargs)
@staticmethod
def print_values(**kwargs):
for key, value in kwargs.items():
print("The value of {} is {}".format(key, value))
def validateJSONString(json_string: str = None) -> Optional[Dict[str, Any]]:
"""
Validates if a string is json or not. If it is, returns the json object.
"""
logger: logging.Logger = setup_logger(name='JSONStringValidator')
if json_string is None:
raise Exception('JSONStringValidator expects `json_string` param to not be None')
json_obj = None
try:
json_obj = json.loads(json_string)
except ValueError as err:
logger.error('Invalid json provided. exc:', exc_info=err)
return None
return json_obj
def validateJSONFile(filename: str = None) -> Optional[Dict[str, Any]]:
"""
Reads a file and validates if it's content is json or not.
"""
logger: logging.Logger = setup_logger(name='JSONFileValidator')
if filename is None:
raise Exception('JSONFileValidator expects `filename` param to not be None')
try:
with open(file=filename, mode='r') as file:
file_content: str = file.read()
return validateJSONString(json_string=file_content)
except IOError as io_exc:
logger.error('I/O Error occurred. exc:', exc_info=io_exc)
except FileNotFoundError as fnf_exc:
logger.error('File not found. exc:', exc_info=fnf_exc)
except OSError as os_exc:
logger.error('System error occurred. exc:', exc_info=os_exc)
def validateJSONSchema(filename: str = None, schema_file: str = None) -> Optional[Dict[str, Any]]:
"""
Reads a file and validates if it's content is json or not as well as
checks if it's schema is according to defaults or not.
"""
logger: logging.Logger = setup_logger(name='JSONSchemaValidator')
def _load_json_schema(schema_filename: str = None) -> Optional[Dict[str, Any]]:
"""
Loads the given schema file.
"""
try:
with open(schema_filename) as schema_file:
return json.loads(schema_file.read())
except IOError as io_exc:
logger.error('I/O Error occurred. exc:', exc_info=io_exc)
except FileNotFoundError as fnf_exc:
logger.error('File not found. exc:', exc_info=fnf_exc)
except OSError as os_exc:
logger.error('System error occurred. exc:', exc_info=os_exc)
if filename is None:
raise Exception('JSONSchemaValidator expects `filename` param to not be None')
data = validateJSONFile(filename=filename)
schema = _load_json_schema(schema_filename=schema_file)
if schema is not None:
try:
validate(data, schema)
except exceptions.ValidationError as ex:
return None
return data
else:
raise Exception('JSONSchemaValidator expects schema file to be valid')
|
[
"jsonschema.validate",
"logging.basicConfig",
"json.loads",
"logging.getLogger"
] |
[((602, 625), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (619, 625), False, 'import logging\n'), ((630, 737), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'}), "(level=logging.INFO, format=\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (649, 737), False, 'import logging\n'), ((1533, 1556), 'json.loads', 'json.loads', (['json_string'], {}), '(json_string)\n', (1543, 1556), False, 'import json\n'), ((3686, 3708), 'jsonschema.validate', 'validate', (['data', 'schema'], {}), '(data, schema)\n', (3694, 3708), False, 'from jsonschema import validate, exceptions\n')]
|
"""Test the rules that protect AVU records having attributes with a
given prefix.
"""
import ConfigParser
import subprocess
import unittest
class AVUProtectTest(unittest.TestCase): #pylint: disable=R0904
"""Test suite based on the unittest framework."""
def __init__(self, *args, **kwargs):
"""Read configuration."""
super(AVUProtectTest, self).__init__(*args, **kwargs)
config = ConfigParser.RawConfigParser()
config.read('avuprotecttest.cfg')
self.rodspw = config.get('AVUProtectTest', 'admin_password')
self.expcoll = config.get('AVUProtectTest', 'expcoll')
self.testfile = config.get('AVUProtectTest', 'testfile')
self.attrprefix = config.get('AVUProtectTest', 'attrprefix')
self.fpath = self.expcoll + "/" + self.testfile
def listavus(self):
"""Print the AVUs for the test file."""
ret = subprocess.call("imeta ls -d '" + self.fpath + "'", shell=True)
self.assertEqual(ret, 0)
def listfiles(self):
"""Print the list of files in the collection."""
ret = subprocess.call("ils '" + self.expcoll + "'", shell=True)
self.assertEqual(ret, 0)
def setUp(self): #pylint: disable=C0103
"""Setup done before each test is called."""
if (self.rodspw == "" or
self.expcoll == "" or
self.testfile == "" or
self.attrprefix == ""):
print ("Edit avuprotesttest.cfg to specify collection, "
"temporary filename, "
"and attribute name prefix to use for testing.")
exit()
ret = subprocess.call("touch '" + self.testfile + "'", shell=True)
self.assertEqual(ret, 0)
ret = subprocess.call("iput '"
+ self.testfile + "' '"
+ self.expcoll + "'",
shell=True)
self.assertEqual(ret, 0)
def tearDown(self): #pylint: disable=C0103
"""Cleanup done aftr each test is called."""
ret = subprocess.call(["rm", self.testfile])
self.assertEqual(ret, 0)
ret = subprocess.call("irm -f '" + self.fpath + "'", shell=True)
self.assertEqual(ret, 0)
def test_01_disallow_add_nonadmin(self):
"""Do not allow non-admin users to add protected AVU."""
ret = subprocess.call("imeta add -d '"
+ self.fpath
+ "' '" + self.attrprefix + "archive' 'true'",
shell=True)
self.assertEqual(ret, 4)
def test_02_allow_add_nonadmin(self):
"""Confirm non-protected AVUs can be added."""
ret = subprocess.call("imeta add -d '"
+ self.fpath
+ "' 'T2T3' 'nonadmin'",
shell=True)
self.assertEqual(ret, 0)
def test_03_allow_rm_nonadmin(self):
"""Confirm non-protected AVUs can be removed."""
# Add the non-protected AVU.
ret = subprocess.call("imeta add -d '"
+ self.fpath
+ "' 'T2T3' 'nonadmin'",
shell=True)
self.assertEqual(ret, 0)
# Remove the non-protected AVU.
ret = subprocess.call("imeta rm -d '"
+ self.fpath
+ "' 'T2T3' 'nonadmin'",
shell=True)
self.assertEqual(ret, 0)
def test_04_allow_mod_nonadmin(self):
"""Confirm non-protected AVUs can be modified."""
# Add the non-protected AVU.
ret = subprocess.call("imeta add -d '"
+ self.fpath
+ "' 'T4-1' 'nonadmin'",
shell=True)
self.assertEqual(ret, 0)
# Modify the non-protected AVU.
ret = subprocess.call("imeta mod -d '"
+ self.fpath
+ "' 'T4-1' 'nonadmin' 'n:T4-2' 'v:nonadmin-2'",
shell=True)
self.assertEqual(ret, 0)
# Remove the non-protected AVU.
ret = subprocess.call("imeta rm -d '"
+ self.fpath
+ "' 'T4-2' 'nonadmin-2'",
shell=True)
self.assertEqual(ret, 0)
def test_05_allow_add_admin(self):
"""Confirm an admin can still add protected AVUs."""
# Grant admin write permission on the test file.
ret = subprocess.call("ichmod write rods '"
+ self.fpath
+ "'",
shell=True)
self.assertEqual(ret, 0)
# Add the protected AVU.
ret = subprocess.call("export irodsUserName='rods'; "
+ "export irodsAuthScheme='password'; "
+ "echo '" + self.rodspw + "' | iinit ; "
+ "imeta add -d '"
+ self.fpath
+ "' '" + self.attrprefix + "archive' 'true'",
shell=True)
self.assertEqual(ret, 0)
self.listavus()
# Remove the protected AVU.
ret = subprocess.call("export irodsUserName='rods'; "
+ "export irodsAuthScheme='password'; "
+ "echo '" + self.rodspw + "' | iinit ; "
+ "imeta rm -d '"
+ self.fpath
+ "' '" + self.attrprefix + "archive' 'true'",
shell=True)
self.assertEqual(ret, 0)
def test_06_disallow_del_archive(self):
"""Prevent deletion of data objects having the archive AVU set."""
# Grant admin write permission on the test file.
ret = subprocess.call("ichmod write rods '"
+ self.fpath
+ "'",
shell=True)
self.assertEqual(ret, 0)
# Add the protected AVU with archive set to true.
ret = subprocess.call("export irodsUserName='rods'; "
+ "export irodsAuthScheme='password'; "
+ "echo '" + self.rodspw + "' | iinit ; "
+ "imeta add -d '"
+ self.fpath
+ "' '" + self.attrprefix + "archive' 'true'",
shell=True)
self.assertEqual(ret, 0)
self.listavus()
# Verify that removal fails.
ret = subprocess.call("irm -f '" + self.fpath + "'", shell=True)
self.assertEqual(ret, 3)
self.listfiles()
# Remove the protected archive attribute.
ret = subprocess.call("export irodsUserName='rods'; "
+ "export irodsAuthScheme='password'; "
+ "echo '" + self.rodspw + "' | iinit ; "
+ "imeta rm -d '"
+ self.fpath
+ "' '" + self.attrprefix + "archive' 'true'",
shell=True)
self.assertEqual(ret, 0)
self.listavus()
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"ConfigParser.RawConfigParser",
"subprocess.call"
] |
[((7490, 7505), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7503, 7505), False, 'import unittest\n'), ((425, 455), 'ConfigParser.RawConfigParser', 'ConfigParser.RawConfigParser', ([], {}), '()\n', (453, 455), False, 'import ConfigParser\n'), ((907, 970), 'subprocess.call', 'subprocess.call', (['("imeta ls -d \'" + self.fpath + "\'")'], {'shell': '(True)'}), '("imeta ls -d \'" + self.fpath + "\'", shell=True)\n', (922, 970), False, 'import subprocess\n'), ((1101, 1158), 'subprocess.call', 'subprocess.call', (['("ils \'" + self.expcoll + "\'")'], {'shell': '(True)'}), '("ils \'" + self.expcoll + "\'", shell=True)\n', (1116, 1158), False, 'import subprocess\n'), ((1644, 1704), 'subprocess.call', 'subprocess.call', (['("touch \'" + self.testfile + "\'")'], {'shell': '(True)'}), '("touch \'" + self.testfile + "\'", shell=True)\n', (1659, 1704), False, 'import subprocess\n'), ((1752, 1838), 'subprocess.call', 'subprocess.call', (['("iput \'" + self.testfile + "\' \'" + self.expcoll + "\'")'], {'shell': '(True)'}), '("iput \'" + self.testfile + "\' \'" + self.expcoll + "\'",\n shell=True)\n', (1767, 1838), False, 'import subprocess\n'), ((2077, 2115), 'subprocess.call', 'subprocess.call', (["['rm', self.testfile]"], {}), "(['rm', self.testfile])\n", (2092, 2115), False, 'import subprocess\n'), ((2163, 2221), 'subprocess.call', 'subprocess.call', (['("irm -f \'" + self.fpath + "\'")'], {'shell': '(True)'}), '("irm -f \'" + self.fpath + "\'", shell=True)\n', (2178, 2221), False, 'import subprocess\n'), ((2381, 2489), 'subprocess.call', 'subprocess.call', (['("imeta add -d \'" + self.fpath + "\' \'" + self.attrprefix + "archive\' \'true\'")'], {'shell': '(True)'}), '("imeta add -d \'" + self.fpath + "\' \'" + self.attrprefix +\n "archive\' \'true\'", shell=True)\n', (2396, 2489), False, 'import subprocess\n'), ((2724, 2810), 'subprocess.call', 'subprocess.call', (['("imeta add -d \'" + self.fpath + "\' \'T2T3\' \'nonadmin\'")'], {'shell': '(True)'}), '("imeta add -d \'" + self.fpath + "\' \'T2T3\' \'nonadmin\'",\n shell=True)\n', (2739, 2810), False, 'import subprocess\n'), ((3085, 3171), 'subprocess.call', 'subprocess.call', (['("imeta add -d \'" + self.fpath + "\' \'T2T3\' \'nonadmin\'")'], {'shell': '(True)'}), '("imeta add -d \'" + self.fpath + "\' \'T2T3\' \'nonadmin\'",\n shell=True)\n', (3100, 3171), False, 'import subprocess\n'), ((3350, 3436), 'subprocess.call', 'subprocess.call', (['("imeta rm -d \'" + self.fpath + "\' \'T2T3\' \'nonadmin\'")'], {'shell': '(True)'}), '("imeta rm -d \'" + self.fpath + "\' \'T2T3\' \'nonadmin\'", shell\n =True)\n', (3365, 3436), False, 'import subprocess\n'), ((3712, 3798), 'subprocess.call', 'subprocess.call', (['("imeta add -d \'" + self.fpath + "\' \'T4-1\' \'nonadmin\'")'], {'shell': '(True)'}), '("imeta add -d \'" + self.fpath + "\' \'T4-1\' \'nonadmin\'",\n shell=True)\n', (3727, 3798), False, 'import subprocess\n'), ((3977, 4087), 'subprocess.call', 'subprocess.call', (['("imeta mod -d \'" + self.fpath + "\' \'T4-1\' \'nonadmin\' \'n:T4-2\' \'v:nonadmin-2\'")'], {'shell': '(True)'}), '("imeta mod -d \'" + self.fpath +\n "\' \'T4-1\' \'nonadmin\' \'n:T4-2\' \'v:nonadmin-2\'", shell=True)\n', (3992, 4087), False, 'import subprocess\n'), ((4265, 4352), 'subprocess.call', 'subprocess.call', (['("imeta rm -d \'" + self.fpath + "\' \'T4-2\' \'nonadmin-2\'")'], {'shell': '(True)'}), '("imeta rm -d \'" + self.fpath + "\' \'T4-2\' \'nonadmin-2\'",\n shell=True)\n', (4280, 4352), False, 'import subprocess\n'), ((4648, 4717), 'subprocess.call', 'subprocess.call', (['("ichmod write rods \'" + self.fpath + "\'")'], {'shell': '(True)'}), '("ichmod write rods \'" + self.fpath + "\'", shell=True)\n', (4663, 4717), False, 'import subprocess\n'), ((4893, 5126), 'subprocess.call', 'subprocess.call', (['("export irodsUserName=\'rods\'; " + "export irodsAuthScheme=\'password\'; " +\n "echo \'" + self.rodspw + "\' | iinit ; " + "imeta add -d \'" + self.fpath +\n "\' \'" + self.attrprefix + "archive\' \'true\'")'], {'shell': '(True)'}), '("export irodsUserName=\'rods\'; " +\n "export irodsAuthScheme=\'password\'; " + "echo \'" + self.rodspw +\n "\' | iinit ; " + "imeta add -d \'" + self.fpath + "\' \'" + self.\n attrprefix + "archive\' \'true\'", shell=True)\n', (4908, 5126), False, 'import subprocess\n'), ((5403, 5634), 'subprocess.call', 'subprocess.call', (['("export irodsUserName=\'rods\'; " + "export irodsAuthScheme=\'password\'; " +\n "echo \'" + self.rodspw + "\' | iinit ; " + "imeta rm -d \'" + self.fpath +\n "\' \'" + self.attrprefix + "archive\' \'true\'")'], {'shell': '(True)'}), '("export irodsUserName=\'rods\'; " +\n "export irodsAuthScheme=\'password\'; " + "echo \'" + self.rodspw +\n "\' | iinit ; " + "imeta rm -d \'" + self.fpath + "\' \'" + self.attrprefix +\n "archive\' \'true\'", shell=True)\n', (5418, 5634), False, 'import subprocess\n'), ((6029, 6098), 'subprocess.call', 'subprocess.call', (['("ichmod write rods \'" + self.fpath + "\'")'], {'shell': '(True)'}), '("ichmod write rods \'" + self.fpath + "\'", shell=True)\n', (6044, 6098), False, 'import subprocess\n'), ((6299, 6532), 'subprocess.call', 'subprocess.call', (['("export irodsUserName=\'rods\'; " + "export irodsAuthScheme=\'password\'; " +\n "echo \'" + self.rodspw + "\' | iinit ; " + "imeta add -d \'" + self.fpath +\n "\' \'" + self.attrprefix + "archive\' \'true\'")'], {'shell': '(True)'}), '("export irodsUserName=\'rods\'; " +\n "export irodsAuthScheme=\'password\'; " + "echo \'" + self.rodspw +\n "\' | iinit ; " + "imeta add -d \'" + self.fpath + "\' \'" + self.\n attrprefix + "archive\' \'true\'", shell=True)\n', (6314, 6532), False, 'import subprocess\n'), ((6818, 6876), 'subprocess.call', 'subprocess.call', (['("irm -f \'" + self.fpath + "\'")'], {'shell': '(True)'}), '("irm -f \'" + self.fpath + "\'", shell=True)\n', (6833, 6876), False, 'import subprocess\n'), ((7001, 7232), 'subprocess.call', 'subprocess.call', (['("export irodsUserName=\'rods\'; " + "export irodsAuthScheme=\'password\'; " +\n "echo \'" + self.rodspw + "\' | iinit ; " + "imeta rm -d \'" + self.fpath +\n "\' \'" + self.attrprefix + "archive\' \'true\'")'], {'shell': '(True)'}), '("export irodsUserName=\'rods\'; " +\n "export irodsAuthScheme=\'password\'; " + "echo \'" + self.rodspw +\n "\' | iinit ; " + "imeta rm -d \'" + self.fpath + "\' \'" + self.attrprefix +\n "archive\' \'true\'", shell=True)\n', (7016, 7232), False, 'import subprocess\n')]
|
import logging
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.views.generic.base import View
from django.views.generic.detail import DetailView
from getpaid.backends.payu import PaymentProcessor
from getpaid.models import Payment
logger = logging.getLogger('getpaid.backends.payu')
class OnlineView(View):
"""
This View answers on PayU online request that is acknowledge of payment
status change.
The most important logic of this view is delegated to ``PaymentProcessor.online()`` method
"""
def post(self, request, *args, **kwargs):
try:
pos_id = request.POST['pos_id']
session_id = request.POST['session_id']
ts = request.POST['ts']
sig = request.POST['sig']
except KeyError:
logger.warning('Got malformed POST request: %s' % str(request.POST))
return HttpResponse('MALFORMED')
status = PaymentProcessor.online(pos_id, session_id, ts, sig)
return HttpResponse(status)
class SuccessView(DetailView):
"""
This view just redirects to standard backend success link.
"""
model = Payment
def render_to_response(self, context, **response_kwargs):
return HttpResponseRedirect(reverse('getpaid-success-fallback', kwargs={'pk': self.object.pk}))
class FailureView(DetailView):
"""
This view just redirects to standard backend failure link.
"""
model = Payment
def render_to_response(self, context, **response_kwargs):
logger.error("Payment %s failed on backend error %s" % (self.kwargs['pk'], self.kwargs['error']))
return HttpResponseRedirect(reverse('getpaid-failure-fallback', kwargs={'pk': self.object.pk}))
|
[
"django.core.urlresolvers.reverse",
"django.http.HttpResponse",
"logging.getLogger",
"getpaid.backends.payu.PaymentProcessor.online"
] |
[((309, 351), 'logging.getLogger', 'logging.getLogger', (['"""getpaid.backends.payu"""'], {}), "('getpaid.backends.payu')\n", (326, 351), False, 'import logging\n'), ((982, 1034), 'getpaid.backends.payu.PaymentProcessor.online', 'PaymentProcessor.online', (['pos_id', 'session_id', 'ts', 'sig'], {}), '(pos_id, session_id, ts, sig)\n', (1005, 1034), False, 'from getpaid.backends.payu import PaymentProcessor\n'), ((1050, 1070), 'django.http.HttpResponse', 'HttpResponse', (['status'], {}), '(status)\n', (1062, 1070), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((1301, 1367), 'django.core.urlresolvers.reverse', 'reverse', (['"""getpaid-success-fallback"""'], {'kwargs': "{'pk': self.object.pk}"}), "('getpaid-success-fallback', kwargs={'pk': self.object.pk})\n", (1308, 1367), False, 'from django.core.urlresolvers import reverse\n'), ((1705, 1771), 'django.core.urlresolvers.reverse', 'reverse', (['"""getpaid-failure-fallback"""'], {'kwargs': "{'pk': self.object.pk}"}), "('getpaid-failure-fallback', kwargs={'pk': self.object.pk})\n", (1712, 1771), False, 'from django.core.urlresolvers import reverse\n'), ((938, 963), 'django.http.HttpResponse', 'HttpResponse', (['"""MALFORMED"""'], {}), "('MALFORMED')\n", (950, 963), False, 'from django.http import HttpResponse, HttpResponseRedirect\n')]
|
# Unless explicitly stated otherwise all files in this repository are licensed under the the Apache License Version 2.0.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2021 Datadog, Inc.
from utils import BaseTestCase, context, released, interfaces, coverage
import pytest
if context.library == "cpp":
pytestmark = pytest.mark.skip("not relevant")
@released(golang="?", dotnet="?", java="?", nodejs="?", php="?", python="?", ruby="?")
@coverage.not_implemented
class Test_Scrubbing(BaseTestCase):
"""Appsec scrubs all sensitive data"""
|
[
"pytest.mark.skip",
"utils.released"
] |
[((407, 496), 'utils.released', 'released', ([], {'golang': '"""?"""', 'dotnet': '"""?"""', 'java': '"""?"""', 'nodejs': '"""?"""', 'php': '"""?"""', 'python': '"""?"""', 'ruby': '"""?"""'}), "(golang='?', dotnet='?', java='?', nodejs='?', php='?', python='?',\n ruby='?')\n", (415, 496), False, 'from utils import BaseTestCase, context, released, interfaces, coverage\n'), ((371, 403), 'pytest.mark.skip', 'pytest.mark.skip', (['"""not relevant"""'], {}), "('not relevant')\n", (387, 403), False, 'import pytest\n')]
|
'''
Created on Jan, 2017
@author: hugo
'''
from __future__ import absolute_import
import multiprocessing
from gensim.models import Doc2Vec
class MyDoc2Vec(object):
def __init__(self, dim, hs=0, window=5, negative=5, epoches=5, dm=1, dm_concat=1):
super(MyDoc2Vec, self).__init__()
self.dim = dim
self.hs = hs
self.window = window
self.negative = negative
self.epoches = epoches
self.dm = dm
self.dm_concat = dm_concat
def train(self, corpus):
self.model = Doc2Vec(min_count=1, window=self.window, size=self.dim, \
workers=multiprocessing.cpu_count(), hs=self.hs,\
negative=self.negative, iter=1, dm=self.dm, dm_concat=self.dm_concat)
self.model.build_vocab(corpus())
for each in range(self.epoches):
self.model.train(corpus())
return self
def predict(model, corpus):
doc_codes = {}
for doc_words, doc_name in corpus():
doc_codes[doc_name[0]] = model.infer_vector(doc_words).tolist()
return doc_codes
def save_doc2vec(model, outfile):
model.save(outfile)
def load_doc2vec(mod_file):
return Doc2Vec.load(mod_file)
|
[
"gensim.models.Doc2Vec.load",
"multiprocessing.cpu_count"
] |
[((1167, 1189), 'gensim.models.Doc2Vec.load', 'Doc2Vec.load', (['mod_file'], {}), '(mod_file)\n', (1179, 1189), False, 'from gensim.models import Doc2Vec\n'), ((619, 646), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (644, 646), False, 'import multiprocessing\n')]
|
from enum import Enum
from typing import Any
from app.schemas.debit import DebitCreate, DebitUpdate
from fastapi import APIRouter, Depends, HTTPException
from fastapi import status as sts
from sqlalchemy.orm import Session
from app import crud, models, schemas
from app.api import deps
from app.core.celery_app import celery_app
router = APIRouter()
class StatusRequest(str, Enum):
canceled = "canceled"
approved = "approved"
rejected = "rejected"
@router.get("/request", response_model=schemas.Debit)
async def get_automatic_debit_request(
db: Session = Depends(deps.get_db),
current_user: models.User = Depends(deps.get_current_active_user)
) -> Any:
"""
Request Automatic Debit.
"""
debit = crud.debit.get_by_owner(db, owner_id=current_user.id)
if debit:
raise HTTPException(status_code=sts.HTTP_400_BAD_REQUEST,
detail="Automatic debit request already made.")
obj_in = DebitCreate()
return crud.debit.create_with_owner(db, obj_in=obj_in,
owner_id=current_user.id)
@router.put("/{owner_id}", response_model=schemas.Debit)
async def update_status(
owner_id: int,
status: StatusRequest,
db: Session = Depends(deps.get_db),
current_user: models.User = Depends(deps.get_current_active_superuser),
) -> Any:
"""
Update Automatic Debit Status.
"""
debit = crud.debit.get_by_owner(db, owner_id=owner_id)
if not debit:
raise HTTPException(status_code=sts.HTTP_404_NOT_FOUND,
detail="Not Found automatic debit request by id")
obj_in = DebitUpdate(status=status)
debit_out = crud.debit.update_status(db, db_obj=debit, obj_in=obj_in)
if status in (StatusRequest.canceled, StatusRequest.approved):
user = crud.user.get(db, id=debit.owner_id)
celery_app.send_task("app.tasks.send_email.email_task",
args=[status, user.email])
return debit_out
@router.get("/{owner_id}", response_model=schemas.Debit)
async def get_automatic_debit_by_owner_id(
owner_id: int,
db: Session = Depends(deps.get_db),
current_user: models.User = Depends(deps.get_current_active_user)
) -> Any:
"""
Get Automatic Debit By Owner Id.
"""
debit = crud.debit.get_by_owner(db, owner_id=owner_id)
if not debit:
raise HTTPException(status_code=sts.HTTP_404_NOT_FOUND,
detail="Automatic debit request not found.")
return debit
|
[
"app.crud.debit.get_by_owner",
"app.crud.user.get",
"fastapi.HTTPException",
"app.crud.debit.create_with_owner",
"app.core.celery_app.celery_app.send_task",
"app.schemas.debit.DebitCreate",
"fastapi.Depends",
"app.schemas.debit.DebitUpdate",
"app.crud.debit.update_status",
"fastapi.APIRouter"
] |
[((341, 352), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (350, 352), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((578, 598), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (585, 598), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((632, 669), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (639, 669), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((737, 790), 'app.crud.debit.get_by_owner', 'crud.debit.get_by_owner', (['db'], {'owner_id': 'current_user.id'}), '(db, owner_id=current_user.id)\n', (760, 790), False, 'from app import crud, models, schemas\n'), ((961, 974), 'app.schemas.debit.DebitCreate', 'DebitCreate', ([], {}), '()\n', (972, 974), False, 'from app.schemas.debit import DebitCreate, DebitUpdate\n'), ((986, 1059), 'app.crud.debit.create_with_owner', 'crud.debit.create_with_owner', (['db'], {'obj_in': 'obj_in', 'owner_id': 'current_user.id'}), '(db, obj_in=obj_in, owner_id=current_user.id)\n', (1014, 1059), False, 'from app import crud, models, schemas\n'), ((1248, 1268), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (1255, 1268), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((1302, 1344), 'fastapi.Depends', 'Depends', (['deps.get_current_active_superuser'], {}), '(deps.get_current_active_superuser)\n', (1309, 1344), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((1419, 1465), 'app.crud.debit.get_by_owner', 'crud.debit.get_by_owner', (['db'], {'owner_id': 'owner_id'}), '(db, owner_id=owner_id)\n', (1442, 1465), False, 'from app import crud, models, schemas\n'), ((1640, 1666), 'app.schemas.debit.DebitUpdate', 'DebitUpdate', ([], {'status': 'status'}), '(status=status)\n', (1651, 1666), False, 'from app.schemas.debit import DebitCreate, DebitUpdate\n'), ((1683, 1740), 'app.crud.debit.update_status', 'crud.debit.update_status', (['db'], {'db_obj': 'debit', 'obj_in': 'obj_in'}), '(db, db_obj=debit, obj_in=obj_in)\n', (1707, 1740), False, 'from app import crud, models, schemas\n'), ((2145, 2165), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (2152, 2165), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((2199, 2236), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (2206, 2236), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((2312, 2358), 'app.crud.debit.get_by_owner', 'crud.debit.get_by_owner', (['db'], {'owner_id': 'owner_id'}), '(db, owner_id=owner_id)\n', (2335, 2358), False, 'from app import crud, models, schemas\n'), ((819, 923), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': 'sts.HTTP_400_BAD_REQUEST', 'detail': '"""Automatic debit request already made."""'}), "(status_code=sts.HTTP_400_BAD_REQUEST, detail=\n 'Automatic debit request already made.')\n", (832, 923), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((1498, 1602), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': 'sts.HTTP_404_NOT_FOUND', 'detail': '"""Not Found automatic debit request by id"""'}), "(status_code=sts.HTTP_404_NOT_FOUND, detail=\n 'Not Found automatic debit request by id')\n", (1511, 1602), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((1828, 1864), 'app.crud.user.get', 'crud.user.get', (['db'], {'id': 'debit.owner_id'}), '(db, id=debit.owner_id)\n', (1841, 1864), False, 'from app import crud, models, schemas\n'), ((1873, 1960), 'app.core.celery_app.celery_app.send_task', 'celery_app.send_task', (['"""app.tasks.send_email.email_task"""'], {'args': '[status, user.email]'}), "('app.tasks.send_email.email_task', args=[status, user.\n email])\n", (1893, 1960), False, 'from app.core.celery_app import celery_app\n'), ((2391, 2490), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': 'sts.HTTP_404_NOT_FOUND', 'detail': '"""Automatic debit request not found."""'}), "(status_code=sts.HTTP_404_NOT_FOUND, detail=\n 'Automatic debit request not found.')\n", (2404, 2490), False, 'from fastapi import APIRouter, Depends, HTTPException\n')]
|
import warnings
warnings.filterwarnings('ignore')
from autox.autox_server.model import model_util
def lgb_with_fe(G_df_dict, G_data_info, G_hist, is_train, remain_time, params, lgb_para_dict, data_name, exp_name):
remain_time = model_util.lgb_model(G_df_dict['BIG_FE'], G_data_info, G_hist, is_train, remain_time, exp_name, params,
lgb_para_dict, data_name)
return remain_time
|
[
"autox.autox_server.model.model_util.lgb_model",
"warnings.filterwarnings"
] |
[((16, 49), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (39, 49), False, 'import warnings\n'), ((233, 366), 'autox.autox_server.model.model_util.lgb_model', 'model_util.lgb_model', (["G_df_dict['BIG_FE']", 'G_data_info', 'G_hist', 'is_train', 'remain_time', 'exp_name', 'params', 'lgb_para_dict', 'data_name'], {}), "(G_df_dict['BIG_FE'], G_data_info, G_hist, is_train,\n remain_time, exp_name, params, lgb_para_dict, data_name)\n", (253, 366), False, 'from autox.autox_server.model import model_util\n')]
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
"""Tests for MicroBenchmark and MicroBenchmarkWithInvoke modules."""
import os
import re
import shutil
from superbench.benchmarks import BenchmarkType, ReturnCode
from superbench.benchmarks.micro_benchmarks import MicroBenchmark, MicroBenchmarkWithInvoke
class FakeMicroBenchmark(MicroBenchmark):
"""Fake benchmark inherit from MicroBenchmark."""
def __init__(self, name, parameters=''):
"""Constructor.
Args:
name: benchmark name.
parameters: benchmark parameters.
"""
super().__init__(name, parameters)
def _benchmark(self):
"""Implementation for benchmarking.
Return:
True if run benchmark successfully.
"""
return True
class FakeMicroBenchmarkWithInvoke(MicroBenchmarkWithInvoke):
"""Fake benchmark inherit from MicroBenchmarkWithInvoke."""
def __init__(self, name, parameters=''):
"""Constructor.
Args:
name: benchmark name.
parameters: benchmark parameters.
"""
super().__init__(name, parameters)
def _preprocess(self):
"""Preprocess/preparation operations before the benchmarking.
Return:
True if _preprocess() succeed.
"""
if not super()._preprocess():
return False
command = os.path.join(self._args.bin_dir, self._bin_name)
command += " -n 'cost1: 10.2, cost2: 20.2'"
self._commands.append(command)
return True
def _process_raw_result(self, cmd_idx, raw_output):
"""Function to process raw results and save the summarized results.
self._result.add_raw_data() and self._result.add_result() need to be called to save the results.
Args:
cmd_idx (int): the index of command corresponding with the raw_output.
raw_output (str): raw output string of the micro-benchmark.
Return:
True if the raw output string is valid and result can be extracted.
"""
self._result.add_raw_data('raw_output_' + str(cmd_idx), raw_output)
pattern = r'\d+\.\d+'
result = re.findall(pattern, raw_output)
if len(result) != 2:
return False
try:
result = [float(item) for item in result]
except BaseException:
return False
self._result.add_result('cost1', result[0])
self._result.add_result('cost2', result[1])
return True
def test_micro_benchmark_base():
"""Test MicroBenchmark."""
benchmark = FakeMicroBenchmark('fake')
assert (benchmark._benchmark_type == BenchmarkType.MICRO)
assert (benchmark.run())
assert (benchmark.return_code == ReturnCode.SUCCESS)
benchmark._process_numeric_result('metric1', [1, 2, 3, 4, 5, 6])
assert (benchmark.result['metric1'] == [3.5])
assert (benchmark.raw_data['metric1'] == [[1, 2, 3, 4, 5, 6]])
def test_micro_benchmark_with_invoke_base():
"""Test MicroBenchmarkWithInvoke."""
# Negative case - MICROBENCHMARK_BINARY_NAME_NOT_SET.
benchmark = FakeMicroBenchmarkWithInvoke('fake')
assert (benchmark._benchmark_type == BenchmarkType.MICRO)
assert (benchmark.run() is False)
assert (benchmark.return_code == ReturnCode.MICROBENCHMARK_BINARY_NAME_NOT_SET)
# Negative case - MICROBENCHMARK_BINARY_NOT_EXIST.
benchmark = FakeMicroBenchmarkWithInvoke('fake')
benchmark._bin_name = 'not_existed_binary'
assert (benchmark.run() is False)
assert (benchmark.return_code == ReturnCode.MICROBENCHMARK_BINARY_NOT_EXIST)
# Positive case.
benchmark = FakeMicroBenchmarkWithInvoke('fake')
benchmark._bin_name = 'echo'
assert (benchmark.run())
assert (benchmark.return_code == ReturnCode.SUCCESS)
assert (os.path.join(benchmark._args.bin_dir, benchmark._bin_name) == shutil.which(benchmark._bin_name))
assert (benchmark._commands[0] == (shutil.which(benchmark._bin_name) + " -n 'cost1: 10.2, cost2: 20.2'"))
assert (benchmark.raw_data['raw_output_0'] == ['cost1: 10.2, cost2: 20.2'])
assert (benchmark.result['cost1'] == [10.2])
assert (benchmark.result['cost2'] == [20.2])
|
[
"re.findall",
"os.path.join",
"shutil.which"
] |
[((1415, 1463), 'os.path.join', 'os.path.join', (['self._args.bin_dir', 'self._bin_name'], {}), '(self._args.bin_dir, self._bin_name)\n', (1427, 1463), False, 'import os\n'), ((2219, 2250), 're.findall', 're.findall', (['pattern', 'raw_output'], {}), '(pattern, raw_output)\n', (2229, 2250), False, 'import re\n'), ((3861, 3919), 'os.path.join', 'os.path.join', (['benchmark._args.bin_dir', 'benchmark._bin_name'], {}), '(benchmark._args.bin_dir, benchmark._bin_name)\n', (3873, 3919), False, 'import os\n'), ((3923, 3956), 'shutil.which', 'shutil.which', (['benchmark._bin_name'], {}), '(benchmark._bin_name)\n', (3935, 3956), False, 'import shutil\n'), ((3997, 4030), 'shutil.which', 'shutil.which', (['benchmark._bin_name'], {}), '(benchmark._bin_name)\n', (4009, 4030), False, 'import shutil\n')]
|
# -*- coding: utf-8 -*-
import pdb,importlib,inspect,time,datetime,json
# from PyFin.api import advanceDateByCalendar
# from data.polymerize import DBPolymerize
from data.storage_engine import StorageEngine
import time
import pandas as pd
import numpy as np
from datetime import timedelta, datetime
from financial import factor_earning
from data.model import BalanceMRQ, BalanceTTM, BalanceReport
from data.model import CashFlowTTM, CashFlowReport
from data.model import IndicatorReport
from data.model import IncomeReport, IncomeTTM
from vision.db.signletion_engine import *
from data.sqlengine import sqlEngine
# pd.set_option('display.max_columns', None)
# pd.set_option('display.max_rows', None)
# from ultron.cluster.invoke.cache_data import cache_data
class CalcEngine(object):
def __init__(self, name, url, methods=[{'packet':'financial.factor_earning','class':'FactorEarning'},]):
self._name = name
self._methods = methods
self._url = url
def get_trade_date(self, trade_date, n, days=365):
"""
获取当前时间前n年的时间点,且为交易日,如果非交易日,则往前提取最近的一天。
:param days:
:param trade_date: 当前交易日
:param n:
:return:
"""
syn_util = SyncUtil()
trade_date_sets = syn_util.get_all_trades('001002', '19900101', trade_date)
trade_date_sets = trade_date_sets['TRADEDATE'].values
time_array = datetime.strptime(str(trade_date), "%Y%m%d")
time_array = time_array - timedelta(days=days) * n
date_time = int(datetime.strftime(time_array, "%Y%m%d"))
if str(date_time) < min(trade_date_sets):
# print('date_time %s is out of trade_date_sets' % date_time)
return str(date_time)
else:
while str(date_time) not in trade_date_sets:
date_time = date_time - 1
# print('trade_date pre %s year %s' % (n, date_time))
return str(date_time)
def _func_sets(self, method):
# 私有函数和保护函数过滤
return list(filter(lambda x: not x.startswith('_') and callable(getattr(method,x)), dir(method)))
def loading_data(self, trade_date):
"""
获取基础数据
按天获取当天交易日所有股票的基础数据
:param trade_date: 交易日
:return:
"""
# 转换时间格式
time_array = datetime.strptime(trade_date, "%Y-%m-%d")
trade_date = datetime.strftime(time_array, '%Y%m%d')
# 读取目前涉及到的因子
trade_date_pre_year = self.get_trade_date(trade_date, 1)
trade_date_pre_year_2 = self.get_trade_date(trade_date, 2)
trade_date_pre_year_3 = self.get_trade_date(trade_date, 3)
trade_date_pre_year_4 = self.get_trade_date(trade_date, 4)
trade_date_pre_year_5 = self.get_trade_date(trade_date, 5)
engine = sqlEngine()
columns = ['COMPCODE', 'PUBLISHDATE', 'ENDDATE', 'symbol', 'company_id', 'trade_date']
# Report Data
cash_flow_sets = engine.fetch_fundamentals_pit_extend_company_id(CashFlowReport,
[CashFlowReport.LABORGETCASH,
CashFlowReport.FINALCASHBALA,
], dates=[trade_date])
for column in columns:
if column in list(cash_flow_sets.keys()):
cash_flow_sets = cash_flow_sets.drop(column, axis=1)
cash_flow_sets = cash_flow_sets.rename(
columns={'LABORGETCASH': 'goods_sale_and_service_render_cash', # 销售商品、提供劳务收到的现金
'FINALCASHBALA': 'cash_and_equivalents_at_end', # 期末现金及现金等价物余额
})
income_sets = engine.fetch_fundamentals_pit_extend_company_id(IncomeReport,
[IncomeReport.BIZTOTINCO,
IncomeReport.BIZINCO,
IncomeReport.PERPROFIT,
IncomeReport.PARENETP,
IncomeReport.NETPROFIT,
], dates=[trade_date])
for column in columns:
if column in list(income_sets.keys()):
income_sets = income_sets.drop(column, axis=1)
income_sets = income_sets.rename(columns={'NETPROFIT': 'net_profit', # 净利润
'BIZTOTINCO': 'total_operating_revenue', # 营业总收入
'BIZINCO': 'operating_revenue', # 营业收入
'PERPROFIT': 'operating_profit', # 营业利润
'PARENETP': 'np_parent_company_owners', # 归属于母公司所有者的净利润
})
indicator_sets = engine.fetch_fundamentals_pit_extend_company_id(IndicatorReport,
[
IndicatorReport.NETPROFITCUT,
# 扣除非经常损益后的净利润
IndicatorReport.MGTEXPRT
], dates=[trade_date])
for column in columns:
if column in list(indicator_sets.keys()):
indicator_sets = indicator_sets.drop(column, axis=1)
indicator_sets = indicator_sets.rename(columns={'NETPROFITCUT': 'adjusted_profit', # 扣除非经常损益后的净利润
})
balance_sets = engine.fetch_fundamentals_pit_extend_company_id(BalanceReport,
[BalanceReport.PARESHARRIGH,
], dates=[trade_date])
for column in columns:
if column in list(balance_sets.keys()):
balance_sets = balance_sets.drop(column, axis=1)
balance_sets = balance_sets.rename(columns={'PARESHARRIGH': 'equities_parent_company_owners', # 归属于母公司股东权益合计
})
income_sets_pre_year_1 = engine.fetch_fundamentals_pit_extend_company_id(IncomeReport,
[IncomeReport.BIZINCO, # 营业收入
IncomeReport.NETPROFIT, # 净利润
], dates=[trade_date_pre_year])
for column in columns:
if column in list(income_sets_pre_year_1.keys()):
income_sets_pre_year_1 = income_sets_pre_year_1.drop(column, axis=1)
income_sets_pre_year_1 = income_sets_pre_year_1.rename(columns={'NETPROFIT': 'net_profit_pre_year_1', # 净利润
'BIZINCO': 'operating_revenue_pre_year_1',
# 营业收入
})
income_sets_pre_year_2 = engine.fetch_fundamentals_pit_extend_company_id(IncomeReport,
[IncomeReport.BIZINCO,
IncomeReport.NETPROFIT,
], dates=[trade_date_pre_year_2])
for column in columns:
if column in list(income_sets_pre_year_2.keys()):
income_sets_pre_year_2 = income_sets_pre_year_2.drop(column, axis=1)
income_sets_pre_year_2 = income_sets_pre_year_2.rename(columns={'NETPROFIT': 'net_profit_pre_year_2', # 净利润
'BIZINCO': 'operating_revenue_pre_year_2',
# 营业收入
})
income_sets_pre_year_3 = engine.fetch_fundamentals_pit_extend_company_id(IncomeReport,
[IncomeReport.BIZINCO,
IncomeReport.NETPROFIT,
], dates=[trade_date_pre_year_3])
for column in columns:
if column in list(income_sets_pre_year_3.keys()):
income_sets_pre_year_3 = income_sets_pre_year_3.drop(column, axis=1)
income_sets_pre_year_3 = income_sets_pre_year_3.rename(columns={'NETPROFIT': 'net_profit_pre_year_3', # 净利润
'BIZINCO': 'operating_revenue_pre_year_3',
# 营业收入
})
income_sets_pre_year_4 = engine.fetch_fundamentals_pit_extend_company_id(IncomeReport,
[IncomeReport.BIZINCO,
IncomeReport.NETPROFIT,
], dates=[trade_date_pre_year_4])
for column in columns:
if column in list(income_sets_pre_year_4.keys()):
income_sets_pre_year_4 = income_sets_pre_year_4.drop(column, axis=1)
income_sets_pre_year_4 = income_sets_pre_year_4.rename(columns={'NETPROFIT': 'net_profit_pre_year_4', # 净利润
'BIZINCO': 'operating_revenue_pre_year_4',
# 营业收入
})
tp_earning = pd.merge(cash_flow_sets, income_sets, how='outer', on='security_code')
tp_earning = pd.merge(indicator_sets, tp_earning, how='outer', on='security_code')
tp_earning = pd.merge(balance_sets, tp_earning, how='outer', on='security_code')
tp_earning = pd.merge(income_sets_pre_year_1, tp_earning, how='outer', on='security_code')
tp_earning = pd.merge(income_sets_pre_year_2, tp_earning, how='outer', on='security_code')
tp_earning = pd.merge(income_sets_pre_year_3, tp_earning, how='outer', on='security_code')
tp_earning = pd.merge(income_sets_pre_year_4, tp_earning, how='outer', on='security_code')
# MRQ
balance_mrq_sets = engine.fetch_fundamentals_pit_extend_company_id(BalanceMRQ,
[BalanceMRQ.TOTASSET, # 资产总计
BalanceMRQ.PARESHARRIGH, # 归属于母公司股东权益合计
BalanceMRQ.RIGHAGGR, # 所有者权益(或股东权益)合计
BalanceMRQ.LONGBORR, # 长期借款
], dates=[trade_date])
for column in columns:
if column in list(balance_mrq_sets.keys()):
balance_mrq_sets = balance_mrq_sets.drop(column, axis=1)
balance_mrq_sets = balance_mrq_sets.rename(columns={'TOTASSET': 'total_assets_mrq',
'PARESHARRIGH': 'equities_parent_company_owners_mrq',
# 归属于母公司股东权益合计
'RIGHAGGR': 'total_owner_equities_mrq', # 所有者权益(或股东权益)合计
'LONGBORR': 'longterm_loan_mrq', # 长期借款
})
balance_mrq_sets_pre = engine.fetch_fundamentals_pit_extend_company_id(BalanceMRQ,
[BalanceMRQ.TOTASSET, # 资产总计
BalanceMRQ.RIGHAGGR, # 所有者权益(或股东权益)合计
BalanceMRQ.LONGBORR, # 长期借款
], dates=[trade_date])
for column in columns:
if column in list(balance_mrq_sets_pre.keys()):
balance_mrq_sets_pre = balance_mrq_sets_pre.drop(column, axis=1)
balance_mrq_sets_pre = balance_mrq_sets_pre.rename(columns={'TOTASSET': 'total_assets_mrq_pre',
'RIGHAGGR': 'total_owner_equities_mrq_pre',
# 所有者权益(或股东权益)合计
'LONGBORR': 'longterm_loan_mrq_pre', # 长期借款
})
# TTM Data
cash_flow_ttm_sets = engine.fetch_fundamentals_pit_extend_company_id(CashFlowTTM,
[CashFlowTTM.FINNETCFLOW,
], dates=[trade_date])
for column in columns:
if column in list(cash_flow_ttm_sets.keys()):
cash_flow_ttm_sets = cash_flow_ttm_sets.drop(column, axis=1)
cash_flow_ttm_sets = cash_flow_ttm_sets.rename(columns={'FINNETCFLOW': 'net_finance_cash_flow'})
income_ttm_sets = engine.fetch_fundamentals_pit_extend_company_id(IncomeTTM,
[IncomeTTM.BIZINCO, # 营业收入
IncomeTTM.NETPROFIT, # 净利润
IncomeTTM.MANAEXPE, # 管理费用
IncomeTTM.BIZTOTINCO, # 营业总收入
IncomeTTM.TOTPROFIT, # 利润总额
IncomeTTM.FINEXPE, # 财务费用
IncomeTTM.INTEINCO, # 利息收入
IncomeTTM.SALESEXPE, # 销售费用
IncomeTTM.BIZTOTCOST, # 营业总成本
IncomeTTM.PERPROFIT, # 营业利润
IncomeTTM.PARENETP, # 归属于母公司所有者的净利润
IncomeTTM.BIZCOST, # 营业成本
IncomeTTM.ASSOINVEPROF, # 对联营企业和合营企业的投资收益
IncomeTTM.BIZTAX, # 营业税金及附加
IncomeTTM.ASSEIMPALOSS, # 资产减值损失
], dates=[trade_date])
for column in columns:
if column in list(income_ttm_sets.keys()):
income_ttm_sets = income_ttm_sets.drop(column, axis=1)
income_ttm_sets = income_ttm_sets.rename(columns={'BIZINCO': 'operating_revenue', # 营业收入
'NETPROFIT': 'net_profit', # 净利润
'MANAEXPE': 'administration_expense', # 管理费用
'BIZTOTINCO': 'total_operating_revenue', # 营业总收入
'TOTPROFIT': 'total_profit', # 利润总额
'FINEXPE': 'financial_expense', # 财务费用
'INTEINCO': 'interest_income', # 利息收入
'SALESEXPE': 'sale_expense', # 销售费用
'BIZTOTCOST': 'total_operating_cost', # 营业总成本
'PERPROFIT': 'operating_profit', # 营业利润
'PARENETP': 'np_parent_company_owners', # 归属于母公司所有者的净利润
'BIZCOST': 'operating_cost', # 营业成本
'ASSOINVEPROF': 'invest_income_associates', # 对联营企业和合营企业的投资收益
'BIZTAX': 'operating_tax_surcharges', # 营业税金及附加
'ASSEIMPALOSS': 'asset_impairment_loss', # 资产减值损失
})
balance_ttm_sets = engine.fetch_fundamentals_pit_extend_company_id(BalanceTTM,
[BalanceTTM.TOTASSET, # 资产总计
BalanceTTM.RIGHAGGR, # 所有者权益(或股东权益)合计
BalanceTTM.PARESHARRIGH, # 归属于母公司股东权益合计
], dates=[trade_date])
for column in columns:
if column in list(balance_ttm_sets.keys()):
balance_ttm_sets = balance_ttm_sets.drop(column, axis=1)
balance_ttm_sets = balance_ttm_sets.rename(
columns={'PARESHARRIGH': 'equities_parent_company_owners', # 归属于母公司股东权益合计
'RIGHAGGR': 'total_owner_equities', # 所有者权益(或股东权益)合计
'TOTASSET': 'total_assets', # 资产总计
})
income_ttm_sets_pre_year_1 = engine.fetch_fundamentals_pit_extend_company_id(IncomeTTM,
[IncomeTTM.BIZINCO,
IncomeTTM.NETPROFIT,
], dates=[trade_date_pre_year])
for column in columns:
if column in list(income_ttm_sets_pre_year_1.keys()):
income_ttm_sets_pre_year_1 = income_ttm_sets_pre_year_1.drop(column, axis=1)
income_ttm_sets_pre_year_1 = income_ttm_sets_pre_year_1.rename(
columns={'BIZINCO': 'operating_revenue_pre_year_1', # 营业收入
'NETPROFIT': 'net_profit_pre_year_1', # 净利润
})
income_ttm_sets_pre_year_2 = engine.fetch_fundamentals_pit_extend_company_id(IncomeTTM,
[IncomeTTM.BIZINCO,
IncomeTTM.NETPROFIT,
], dates=[trade_date_pre_year_2])
for column in columns:
if column in list(income_ttm_sets_pre_year_2.keys()):
income_ttm_sets_pre_year_2 = income_ttm_sets_pre_year_2.drop(column, axis=1)
income_ttm_sets_pre_year_2 = income_ttm_sets_pre_year_2.rename(
columns={'BIZINCO': 'operating_revenue_pre_year_2', # 营业收入
'NETPROFIT': 'net_profit_pre_year_2', # 净利润
})
income_ttm_sets_pre_year_3 = engine.fetch_fundamentals_pit_extend_company_id(IncomeTTM,
[IncomeTTM.BIZINCO,
IncomeTTM.NETPROFIT,
], dates=[trade_date_pre_year_3])
for column in columns:
if column in list(income_ttm_sets_pre_year_3.keys()):
income_ttm_sets_pre_year_3 = income_ttm_sets_pre_year_3.drop(column, axis=1)
income_ttm_sets_pre_year_3 = income_ttm_sets_pre_year_3.rename(
columns={'BIZINCO': 'operating_revenue_pre_year_3', # 营业收入
'NETPROFIT': 'net_profit_pre_year_3', # 净利润
})
income_ttm_sets_pre_year_4 = engine.fetch_fundamentals_pit_extend_company_id(IncomeTTM,
[IncomeTTM.BIZINCO,
IncomeTTM.NETPROFIT,
], dates=[trade_date_pre_year_4])
for column in columns:
if column in list(income_ttm_sets_pre_year_4.keys()):
income_ttm_sets_pre_year_4 = income_ttm_sets_pre_year_4.drop(column, axis=1)
income_ttm_sets_pre_year_4 = income_ttm_sets_pre_year_4.rename(
columns={'BIZINCO': 'operating_revenue_pre_year_4', # 营业收入
'NETPROFIT': 'net_profit_pre_year_4', # 净利润
})
# indicator_ttm_sets = engine.fetch_fundamentals_pit_extend_company_id(IndicatorTTM,
# [IndicatorTTM.ROIC, # 投入资本回报率
# ], dates=[trade_date]).drop(columns, axis=1)
#
# indicator_ttm_sets = indicator_ttm_sets.rename(columns={'ROIC': '',
# })
ttm_earning = pd.merge(income_ttm_sets, balance_ttm_sets, how='outer', on='security_code')
ttm_earning = pd.merge(ttm_earning, cash_flow_ttm_sets, how='outer', on='security_code')
ttm_earning = pd.merge(ttm_earning, income_ttm_sets_pre_year_1, how='outer', on='security_code')
ttm_earning = pd.merge(ttm_earning, income_ttm_sets_pre_year_2, how='outer', on='security_code')
ttm_earning = pd.merge(ttm_earning, income_ttm_sets_pre_year_3, how='outer', on='security_code')
ttm_earning = pd.merge(ttm_earning, income_ttm_sets_pre_year_4, how='outer', on='security_code')
ttm_earning = pd.merge(ttm_earning, balance_mrq_sets, how='outer', on='security_code')
ttm_earning = pd.merge(ttm_earning, balance_mrq_sets_pre, how='outer', on='security_code')
balance_con_sets = engine.fetch_fundamentals_pit_extend_company_id(BalanceTTM,
[BalanceTTM.TOTASSET, # 资产总计
BalanceTTM.RIGHAGGR, # 所有者权益(或股东权益)合计
],
dates=[trade_date,
trade_date_pre_year,
trade_date_pre_year_2,
trade_date_pre_year_3,
trade_date_pre_year_4,
])
for column in columns:
if column in list(balance_con_sets.keys()):
balance_con_sets = balance_con_sets.drop(column, axis=1)
balance_con_sets = balance_con_sets.groupby(['security_code'])
balance_con_sets = balance_con_sets.sum()
balance_con_sets = balance_con_sets.rename(columns={'TOTASSET': 'total_assets',
'RIGHAGGR': 'total_owner_equities'})
# cash_flow_con_sets = engine.fetch_fundamentals_pit_extend_company_id(CashFlowReport,
# [CashFlowReport.FINALCASHBALA,
# ],
# dates=[trade_date,
# trade_date_pre_year,
# trade_date_pre_year_2,
# trade_date_pre_year_3,
# trade_date_pre_year_4,
# trade_date_pre_year_5,
# ]).drop(columns, axis=1)
# cash_flow_con_sets = cash_flow_con_sets.groupby(['security_code'])
# cash_flow_con_sets = cash_flow_con_sets.sum()
# cash_flow_con_sets = cash_flow_con_sets.rename(columns={'FINALCASHBALA':'cash_and_equivalents_at_end'})
income_con_sets = engine.fetch_fundamentals_pit_extend_company_id(IncomeReport,
[IncomeReport.NETPROFIT,
],
dates=[trade_date,
trade_date_pre_year,
trade_date_pre_year_2,
trade_date_pre_year_3,
trade_date_pre_year_4,
trade_date_pre_year_5,
])
for column in columns:
if column in list(income_con_sets.keys()):
income_con_sets = income_con_sets.drop(column, axis=1)
income_con_sets = income_con_sets.groupby(['security_code'])
income_con_sets = income_con_sets.sum()
income_con_sets = income_con_sets.rename(columns={'NETPROFIT': 'net_profit'}).reset_index()
ttm_earning_5y = pd.merge(balance_con_sets, income_con_sets, how='outer', on='security_code')
return tp_earning, ttm_earning, ttm_earning_5y
def process_calc_factor(self, trade_date, tp_earning, ttm_earning, ttm_earning_5y):
tp_earning = tp_earning.set_index('security_code')
ttm_earning = ttm_earning.set_index('security_code')
ttm_earning_5y = ttm_earning_5y.set_index('security_code')
earning = factor_earning.FactorEarning()
# 因子计算
earning_sets = pd.DataFrame()
earning_sets['security_code'] = tp_earning.index
earning_sets = earning_sets.set_index('security_code')
earning_sets = earning.ROA5YChg(ttm_earning_5y, earning_sets)
earning_sets = earning.ROE5Y(ttm_earning_5y, earning_sets)
earning_sets = earning.NPCutToNP(tp_earning, earning_sets)
earning_sets = earning.ROE(tp_earning, earning_sets)
earning_sets = earning.ROEAvg(tp_earning, earning_sets)
earning_sets = earning.ROEcut(tp_earning, earning_sets)
# factor_earning = earning.invest_r_associates_to_tp_latest(tp_earning, earning_sets)
earning_sets = earning.NetPft5YAvgChgTTM(ttm_earning, earning_sets)
earning_sets = earning.Sales5YChgTTM(ttm_earning, earning_sets)
# factor_earning = earning.roa(ttm_earning, earning_sets)
earning_sets = earning.AdminExpTTM(ttm_earning, earning_sets)
earning_sets = earning.BerryRtTTM(ttm_earning, earning_sets)
earning_sets = earning.CFARatioMinusROATTM(ttm_earning, earning_sets)
earning_sets = earning.SalesCostTTM(ttm_earning, earning_sets)
earning_sets = earning.EBITToTORevTTM(ttm_earning, earning_sets)
earning_sets = earning.PeridCostTTM(ttm_earning, earning_sets)
earning_sets = earning.FinExpTTM(ttm_earning, earning_sets)
earning_sets = earning.ImpLossToTOITTM(ttm_earning, earning_sets)
earning_sets = earning.OIAToOITTM(ttm_earning, earning_sets)
earning_sets = earning.ROAexTTM(ttm_earning, earning_sets)
earning_sets = earning.NetNonOToTP(ttm_earning, earning_sets)
earning_sets = earning.NetProfitRtTTM(ttm_earning, earning_sets)
earning_sets = earning.NPToTORevTTM(ttm_earning, earning_sets)
earning_sets = earning.OperExpRtTTM(ttm_earning, earning_sets)
earning_sets = earning.OptProfitRtTTM(ttm_earning, earning_sets)
# factor_earning = earning.operating_profit_to_tor(ttm_earning, earning_sets)
earning_sets = earning.ROCTTM(ttm_earning, earning_sets)
earning_sets = earning.ROTATTM(ttm_earning, earning_sets)
earning_sets = earning.ROETTM(ttm_earning, earning_sets)
earning_sets = earning.ROICTTM(ttm_earning, earning_sets)
earning_sets = earning.OwnROETTM(ttm_earning, earning_sets)
earning_sets = earning.SalesGrossMarginTTM(ttm_earning, earning_sets)
earning_sets = earning.TaxRTTM(ttm_earning, earning_sets)
earning_sets = earning.TotaProfRtTTM(ttm_earning, earning_sets)
# factor_earning = earning.invest_r_associates_to_tp_ttm(ttm_earning, earning_sets)
earning_sets = earning_sets.reset_index()
earning_sets['trade_date'] = str(trade_date)
earning_sets.replace([-np.inf, np.inf, None], np.nan, inplace=True)
return earning_sets
def local_run(self, trade_date):
print('trade_date %s' % trade_date)
tic = time.time()
tp_earning, ttm_earning, ttm_earning_5y = self.loading_data(trade_date)
print('data load time %s' % (time.time()-tic))
storage_engine = StorageEngine(self._url)
result = self.process_calc_factor(trade_date, tp_earning, ttm_earning, ttm_earning_5y)
print('cal_time %s' % (time.time() - tic))
storage_engine.update_destdb(str(self._methods[-1]['packet'].split('.')[-1]), trade_date, result)
# storage_engine.update_destdb('factor_earning', trade_date, result)
# def remote_run(self, trade_date):
# total_data = self.loading_data(trade_date)
# #存储数据
# session = str(int(time.time() * 1000000 + datetime.datetime.now().microsecond))
# cache_data.set_cache(session, 'alphax', total_data.to_json(orient='records'))
# distributed_factor.delay(session, json.dumps(self._methods), self._name)
#
# def distributed_factor(self, total_data):
# mkt_df = self.calc_factor_by_date(total_data,trade_date)
# result = self.calc_factor('alphax.alpha191','Alpha191',mkt_df,trade_date)
# @app.task
# def distributed_factor(session, trade_date, packet_sets, name):
# calc_engines = CalcEngine(name, packet_sets)
# content = cache_data.get_cache(session, factor_name)
# total_data = json_normalize(json.loads(content))
# calc_engines.distributed_factor(total_data)
#
# # @app.task()
# def factor_calculate(**kwargs):
# print("constrain_kwargs: {}".format(kwargs))
# date_index = kwargs['date_index']
# session = kwargs['session']
# factor_name = kwargs['factor_name']
# content1 = cache_data.get_cache(session + str(date_index) + "1", date_index)
# content2 = cache_data.get_cache(session + str(date_index) + "2", date_index)
# content3 = cache_data.get_cache(session + str(date_index) + "3", date_index)
# print("len_con1: %s" % len(content1))
# print("len_con2: %s" % len(content2))
# print("len_con3: %s" % len(content3))
# tp_earning = json_normalize(json.loads(str(content1, encoding='utf8')))
# ttm_earning_5y = json_normalize(json.loads(str(content2, encoding='utf8')))
# ttm_earning = json_normalize(json.loads(str(content3, encoding='utf8')))
# # cache_date.get_cache使得index的名字丢失, 所以数据需要按照下面的方式设置index
# tp_earning.set_index('security_code', inplace=True)
# ttm_earning.set_index('security_code', inplace=True)
# ttm_earning_5y.set_index('security_code', inplace=True)
# # total_earning_data = {'tp_earning': tp_earning, 'ttm_earning_5y': ttm_earning_5y, 'ttm_earning': ttm_earning}
# calculate(date_index, tp_earning, ttm_earning, ttm_earning_5y, factor_name)
|
[
"datetime.datetime.strftime",
"pandas.DataFrame",
"data.storage_engine.StorageEngine",
"pandas.merge",
"data.sqlengine.sqlEngine",
"financial.factor_earning.FactorEarning",
"time.time",
"datetime.datetime.strptime",
"datetime.timedelta"
] |
[((2290, 2331), 'datetime.datetime.strptime', 'datetime.strptime', (['trade_date', '"""%Y-%m-%d"""'], {}), "(trade_date, '%Y-%m-%d')\n", (2307, 2331), False, 'from datetime import timedelta, datetime\n'), ((2353, 2392), 'datetime.datetime.strftime', 'datetime.strftime', (['time_array', '"""%Y%m%d"""'], {}), "(time_array, '%Y%m%d')\n", (2370, 2392), False, 'from datetime import timedelta, datetime\n'), ((2765, 2776), 'data.sqlengine.sqlEngine', 'sqlEngine', ([], {}), '()\n', (2774, 2776), False, 'from data.sqlengine import sqlEngine\n'), ((10479, 10549), 'pandas.merge', 'pd.merge', (['cash_flow_sets', 'income_sets'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(cash_flow_sets, income_sets, how='outer', on='security_code')\n", (10487, 10549), True, 'import pandas as pd\n'), ((10571, 10640), 'pandas.merge', 'pd.merge', (['indicator_sets', 'tp_earning'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(indicator_sets, tp_earning, how='outer', on='security_code')\n", (10579, 10640), True, 'import pandas as pd\n'), ((10662, 10729), 'pandas.merge', 'pd.merge', (['balance_sets', 'tp_earning'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(balance_sets, tp_earning, how='outer', on='security_code')\n", (10670, 10729), True, 'import pandas as pd\n'), ((10751, 10828), 'pandas.merge', 'pd.merge', (['income_sets_pre_year_1', 'tp_earning'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(income_sets_pre_year_1, tp_earning, how='outer', on='security_code')\n", (10759, 10828), True, 'import pandas as pd\n'), ((10850, 10927), 'pandas.merge', 'pd.merge', (['income_sets_pre_year_2', 'tp_earning'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(income_sets_pre_year_2, tp_earning, how='outer', on='security_code')\n", (10858, 10927), True, 'import pandas as pd\n'), ((10949, 11026), 'pandas.merge', 'pd.merge', (['income_sets_pre_year_3', 'tp_earning'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(income_sets_pre_year_3, tp_earning, how='outer', on='security_code')\n", (10957, 11026), True, 'import pandas as pd\n'), ((11048, 11125), 'pandas.merge', 'pd.merge', (['income_sets_pre_year_4', 'tp_earning'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(income_sets_pre_year_4, tp_earning, how='outer', on='security_code')\n", (11056, 11125), True, 'import pandas as pd\n'), ((22687, 22763), 'pandas.merge', 'pd.merge', (['income_ttm_sets', 'balance_ttm_sets'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(income_ttm_sets, balance_ttm_sets, how='outer', on='security_code')\n", (22695, 22763), True, 'import pandas as pd\n'), ((22786, 22860), 'pandas.merge', 'pd.merge', (['ttm_earning', 'cash_flow_ttm_sets'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(ttm_earning, cash_flow_ttm_sets, how='outer', on='security_code')\n", (22794, 22860), True, 'import pandas as pd\n'), ((22883, 22970), 'pandas.merge', 'pd.merge', (['ttm_earning', 'income_ttm_sets_pre_year_1'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(ttm_earning, income_ttm_sets_pre_year_1, how='outer', on=\n 'security_code')\n", (22891, 22970), True, 'import pandas as pd\n'), ((22988, 23075), 'pandas.merge', 'pd.merge', (['ttm_earning', 'income_ttm_sets_pre_year_2'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(ttm_earning, income_ttm_sets_pre_year_2, how='outer', on=\n 'security_code')\n", (22996, 23075), True, 'import pandas as pd\n'), ((23093, 23180), 'pandas.merge', 'pd.merge', (['ttm_earning', 'income_ttm_sets_pre_year_3'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(ttm_earning, income_ttm_sets_pre_year_3, how='outer', on=\n 'security_code')\n", (23101, 23180), True, 'import pandas as pd\n'), ((23198, 23285), 'pandas.merge', 'pd.merge', (['ttm_earning', 'income_ttm_sets_pre_year_4'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(ttm_earning, income_ttm_sets_pre_year_4, how='outer', on=\n 'security_code')\n", (23206, 23285), True, 'import pandas as pd\n'), ((23303, 23375), 'pandas.merge', 'pd.merge', (['ttm_earning', 'balance_mrq_sets'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(ttm_earning, balance_mrq_sets, how='outer', on='security_code')\n", (23311, 23375), True, 'import pandas as pd\n'), ((23398, 23474), 'pandas.merge', 'pd.merge', (['ttm_earning', 'balance_mrq_sets_pre'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(ttm_earning, balance_mrq_sets_pre, how='outer', on='security_code')\n", (23406, 23474), True, 'import pandas as pd\n'), ((27541, 27617), 'pandas.merge', 'pd.merge', (['balance_con_sets', 'income_con_sets'], {'how': '"""outer"""', 'on': '"""security_code"""'}), "(balance_con_sets, income_con_sets, how='outer', on='security_code')\n", (27549, 27617), True, 'import pandas as pd\n'), ((27968, 27998), 'financial.factor_earning.FactorEarning', 'factor_earning.FactorEarning', ([], {}), '()\n', (27996, 27998), False, 'from financial import factor_earning\n'), ((28038, 28052), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (28050, 28052), True, 'import pandas as pd\n'), ((30969, 30980), 'time.time', 'time.time', ([], {}), '()\n', (30978, 30980), False, 'import time\n'), ((31142, 31166), 'data.storage_engine.StorageEngine', 'StorageEngine', (['self._url'], {}), '(self._url)\n', (31155, 31166), False, 'from data.storage_engine import StorageEngine\n'), ((1522, 1561), 'datetime.datetime.strftime', 'datetime.strftime', (['time_array', '"""%Y%m%d"""'], {}), "(time_array, '%Y%m%d')\n", (1539, 1561), False, 'from datetime import timedelta, datetime\n'), ((1473, 1493), 'datetime.timedelta', 'timedelta', ([], {'days': 'days'}), '(days=days)\n', (1482, 1493), False, 'from datetime import timedelta, datetime\n'), ((31098, 31109), 'time.time', 'time.time', ([], {}), '()\n', (31107, 31109), False, 'import time\n'), ((31293, 31304), 'time.time', 'time.time', ([], {}), '()\n', (31302, 31304), False, 'import time\n')]
|
from app.mongodb_models.user import User as DBUser
from typing import Optional, List
from app.fields.user import Permission
from app.fields.role import Role
from app.core.permission import enforcer
from app import crud
def get_all_roles() -> List[Role]:
db_users = DBUser.objects()
db_users_name = [db_user.Username for db_user in db_users]
casbin_subjects = enforcer.get_all_subjects()
casbin_grouping_policy = enforcer.get_grouping_policy()
role_in_grouping_policy = [p[1] for p in casbin_grouping_policy]
roles = list(set(casbin_subjects + role_in_grouping_policy) - set(db_users_name))
all_roles = []
for role in roles:
# get user info of users belong to the role
username_of_users_for_role = enforcer.get_users_for_role(role)
users = [crud.user.get_user_base(username)
for username in username_of_users_for_role]
# get permissions of the role
role_policies = enforcer.get_filtered_policy(0, role)
permissions = [dict(zip(['sub', 'obj', 'act'], perm))
for perm in role_policies]
permissions = [Permission(**perm) for perm in permissions]
# put those together
all_roles.append(Role(name=role, users=users, permissions=permissions))
return all_roles
|
[
"app.core.permission.enforcer.get_all_subjects",
"app.core.permission.enforcer.get_grouping_policy",
"app.fields.role.Role",
"app.core.permission.enforcer.get_users_for_role",
"app.mongodb_models.user.User.objects",
"app.crud.user.get_user_base",
"app.fields.user.Permission",
"app.core.permission.enforcer.get_filtered_policy"
] |
[((271, 287), 'app.mongodb_models.user.User.objects', 'DBUser.objects', ([], {}), '()\n', (285, 287), True, 'from app.mongodb_models.user import User as DBUser\n'), ((373, 400), 'app.core.permission.enforcer.get_all_subjects', 'enforcer.get_all_subjects', ([], {}), '()\n', (398, 400), False, 'from app.core.permission import enforcer\n'), ((430, 460), 'app.core.permission.enforcer.get_grouping_policy', 'enforcer.get_grouping_policy', ([], {}), '()\n', (458, 460), False, 'from app.core.permission import enforcer\n'), ((747, 780), 'app.core.permission.enforcer.get_users_for_role', 'enforcer.get_users_for_role', (['role'], {}), '(role)\n', (774, 780), False, 'from app.core.permission import enforcer\n'), ((956, 993), 'app.core.permission.enforcer.get_filtered_policy', 'enforcer.get_filtered_policy', (['(0)', 'role'], {}), '(0, role)\n', (984, 993), False, 'from app.core.permission import enforcer\n'), ((798, 831), 'app.crud.user.get_user_base', 'crud.user.get_user_base', (['username'], {}), '(username)\n', (821, 831), False, 'from app import crud\n'), ((1129, 1147), 'app.fields.user.Permission', 'Permission', ([], {}), '(**perm)\n', (1139, 1147), False, 'from app.fields.user import Permission\n'), ((1228, 1281), 'app.fields.role.Role', 'Role', ([], {'name': 'role', 'users': 'users', 'permissions': 'permissions'}), '(name=role, users=users, permissions=permissions)\n', (1232, 1281), False, 'from app.fields.role import Role\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-12-05 19:01
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('store', '0002_auto_20161204_2335'),
]
operations = [
migrations.AddField(
model_name='product',
name='main_image',
field=models.ImageField(default=None, null=True, upload_to=b'', verbose_name='Main image of the product'),
),
migrations.AlterField(
model_name='product',
name='categories',
field=models.ManyToManyField(related_name='products', to='store.ProductCategory', verbose_name='Product categories'),
),
]
|
[
"django.db.models.ImageField",
"django.db.models.ManyToManyField"
] |
[((400, 504), 'django.db.models.ImageField', 'models.ImageField', ([], {'default': 'None', 'null': '(True)', 'upload_to': "b''", 'verbose_name': '"""Main image of the product"""'}), "(default=None, null=True, upload_to=b'', verbose_name=\n 'Main image of the product')\n", (417, 504), False, 'from django.db import migrations, models\n'), ((626, 740), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""products"""', 'to': '"""store.ProductCategory"""', 'verbose_name': '"""Product categories"""'}), "(related_name='products', to='store.ProductCategory',\n verbose_name='Product categories')\n", (648, 740), False, 'from django.db import migrations, models\n')]
|
# coding:utf-8
import hashlib
import time
import math
import platform
class DataHub:
# 数据存放出
_tmpData = {}
# -1为无限制
_size_max = -1
_regID = []
'''
数据中心
临时存放数据的位置
'''
def __init__(self, size_max=0):
# 获取数据中心最大值 默认无限制
self._size_max = size_max if size_max != 0 else -1
def put(self, data):
if self._size_max != -1 and len(self._regID) >= self._size_max:
print('DataHub中已经达到最大值,无法继续添加')
return 0
# 创建ID
ID = self._createID()
# 注册添加ID
beforeLen = len(self._regID)
self._regID.append(ID)
self._regID = list(set(self._regID))
afterLen = len(self._regID)
if beforeLen == afterLen:
print('插入时间间隔太短,或者发生哈希碰撞')
return 0
# 添加数据
self._tmpData[ID] = [data]
return ID
def get(self, ID):
if ID in self._regID:
return self._tmpData[ID][0]
else:
print('没有这个ID对于的数据')
return 0
def update(self, ID, data):
if ID in self._regID:
self._tmpData[ID] = [data]
return 1
else:
print('没有这个ID对于的数据')
return 0
def dele(self, ID):
if len(self._regID) == 0:
print('DataHub中已经没有数据了')
return 0
if ID in self._regID:
self._tmpData.pop(ID)
self._regID.pop(self._regID.index(ID))
return 1
else:
print('没有这个ID对于的数据')
return 0
def _createID(self):
return hashlib.md5(
str(hashlib.md5(self._NowTime().encode('utf-8')).hexdigest() + str(time.clock()) +
platform.platform().replace('-', '').replace('.', '')).encode('utf-8')
).hexdigest()
def showAllDataItem(self):
return self._tmpData
def showDataItem(self):
return len(self._regID)
def clearAll(self):
self._tmpData.clear()
self._regID.clear()
@staticmethod
def _NowTime():
return str(time.asctime()).replace(':', '').replace(' ', '').upper()
# def main():
# DH = DataHub(size_max=1)
# i = DH.put([1, 2, 3])
# data = DH.get(i)
# print(data)
# data[0] = data[1] + data[2]
# DH.update(i, data)
# j = DH.put([5, 5, 444])
# print(DH.showDataItem())
# print(DH.showAllDataItem())
#
# DH.dele(i)
# print(DH.showDataItem())
# print(DH.showAllDataItem())
#
# DH.dele(j)
# print(DH.showDataItem())
# print(DH.showAllDataItem())
#
#
# if __name__ == '__main__':
# main()
|
[
"time.asctime",
"platform.platform",
"time.clock"
] |
[((2066, 2080), 'time.asctime', 'time.asctime', ([], {}), '()\n', (2078, 2080), False, 'import time\n'), ((1674, 1686), 'time.clock', 'time.clock', ([], {}), '()\n', (1684, 1686), False, 'import time\n'), ((1706, 1725), 'platform.platform', 'platform.platform', ([], {}), '()\n', (1723, 1725), False, 'import platform\n')]
|
"""
Forum member model admin definitions
====================================
This module defines admin classes used to populate the Django administration dashboard.
"""
from django.contrib import admin
from machina.core.db.models import get_model
from machina.models.fields import MarkupTextField, MarkupTextFieldWidget
ForumProfile = get_model('forum_member', 'ForumProfile')
class ForumProfileAdmin(admin.ModelAdmin):
""" The Forum Profile model admin. """
list_display = ('id', 'user', 'posts_count', )
list_filter = ('posts_count', )
list_display_links = ('id', 'user', )
raw_id_fields = ('user', )
search_fields = ('user__username',)
formfield_overrides = {
MarkupTextField: {'widget': MarkupTextFieldWidget},
}
admin.site.register(ForumProfile, ForumProfileAdmin)
|
[
"machina.core.db.models.get_model",
"django.contrib.admin.site.register"
] |
[((354, 395), 'machina.core.db.models.get_model', 'get_model', (['"""forum_member"""', '"""ForumProfile"""'], {}), "('forum_member', 'ForumProfile')\n", (363, 395), False, 'from machina.core.db.models import get_model\n'), ((782, 834), 'django.contrib.admin.site.register', 'admin.site.register', (['ForumProfile', 'ForumProfileAdmin'], {}), '(ForumProfile, ForumProfileAdmin)\n', (801, 834), False, 'from django.contrib import admin\n')]
|
import binascii
import hashlib
import os
import sys
from ecdsa import SigningKey, VerifyingKey, curves
from ecdsa import ecdsa
from ecdsa import util as ecdsautil
DEFAULT_KEYTYPE = curves.NIST192p
def get_keys_folder(datafolder):
"""
:param datafolder:
:return:
"""
return os.path.join(datafolder, "keys")
def get_pub_keyfilename(datafolder):
"""
:param datafolder:
:return:
"""
keyfolder = get_keys_folder(datafolder)
return os.path.join(keyfolder, "identity.pub")
def get_priv_keyfilename(datafolder):
"""
:param datafolder:
:return:
"""
keyfolder = get_keys_folder(datafolder)
return os.path.join(keyfolder, "identity.priv")
def first_run(datafolder):
"""
Do our first run and generate keys
:param datafolder:
:return:
"""
keyfolder = get_keys_folder(datafolder)
if not os.path.exists(keyfolder):
os.makedirs(keyfolder)
if not os.path.isfile(get_priv_keyfilename(datafolder)):
key = genkey()
savekey(key, keyfolder, "identity")
sys.stderr.write("ident key generated\n")
def pubkeyhash(pubkey):
"""
Get a hash of a public key
:param pubkey:
:return:
"""
return hashlib.sha512(pubkey.to_der()).hexdigest()
def genkey():
"""
Generate an ECDSA key
:return:
"""
return SigningKey.generate(curve=DEFAULT_KEYTYPE)
def savekey(keypair, path, name):
"""
Save a keypair as PEM files
:param keypair:
:param path:
:param name:
:return:
"""
privname = os.path.join(path, name + ".priv")
pubname = os.path.join(path, name + ".pub")
with open(privname, "wb") as privfile:
privfile.write(keypair.to_pem())
with open(pubname, "wb") as pubfile:
pubfile.write(keypair.get_verifying_key().to_pem())
def load(privkeypem):
"""
Load a private key from disk
:param privkeypem:
:return:
"""
with open(privkeypem, "rb") as privfile:
return SigningKey.from_pem(privfile.read())
def loadpub(pubkeypem):
"""
Load a public key from a PEM file
:param pubkeypem:
:return:
"""
with open(pubkeypem, "rb") as pubfile:
return loadpubstr(pubfile.read())
def loadpubstr(pemstring):
"""
Load a public key from PEM string
:param pemstring:
:return:
"""
return VerifyingKey.from_pem(pemstring)
def get_pubkey(datafolder):
"""
Return the public key pem file
:param datafolder:
:return:
"""
filename = get_pub_keyfilename(datafolder)
if os.path.exists(filename):
with open(filename, "r") as filehandle:
return filehandle.read()
return None
def sign_string(privkey, message):
"""
Sign a string
:param privkey:
:param message:
:return:
"""
data = str(message)
sig = privkey.sign(data, hashfunc=hashlib.sha1, sigencode=ecdsautil.sigencode_der)
return binascii.hexlify(sig)
def verify_string(pubkey, signature, message):
"""
Verify
:param pubkey:
:param signature:
:param message:
:return:
"""
data = str(message)
signature = binascii.unhexlify(signature)
return pubkey.verify(signature, data, hashfunc=hashlib.sha1, sigdecode=ecdsautil.sigdecode_der)
def ecdh(privkey, pubkey):
"""
Given a loaded private key and a loaded public key, perform an ECDH exchange
:param privkey:
:param pubkey:
:return:
"""
return ecdsa.ecdh(privkey, pubkey)
|
[
"os.makedirs",
"ecdsa.SigningKey.generate",
"binascii.hexlify",
"os.path.exists",
"binascii.unhexlify",
"ecdsa.ecdsa.ecdh",
"sys.stderr.write",
"ecdsa.VerifyingKey.from_pem",
"os.path.join"
] |
[((296, 328), 'os.path.join', 'os.path.join', (['datafolder', '"""keys"""'], {}), "(datafolder, 'keys')\n", (308, 328), False, 'import os\n'), ((474, 513), 'os.path.join', 'os.path.join', (['keyfolder', '"""identity.pub"""'], {}), "(keyfolder, 'identity.pub')\n", (486, 513), False, 'import os\n'), ((661, 701), 'os.path.join', 'os.path.join', (['keyfolder', '"""identity.priv"""'], {}), "(keyfolder, 'identity.priv')\n", (673, 701), False, 'import os\n'), ((1355, 1397), 'ecdsa.SigningKey.generate', 'SigningKey.generate', ([], {'curve': 'DEFAULT_KEYTYPE'}), '(curve=DEFAULT_KEYTYPE)\n', (1374, 1397), False, 'from ecdsa import SigningKey, VerifyingKey, curves\n'), ((1564, 1598), 'os.path.join', 'os.path.join', (['path', "(name + '.priv')"], {}), "(path, name + '.priv')\n", (1576, 1598), False, 'import os\n'), ((1613, 1646), 'os.path.join', 'os.path.join', (['path', "(name + '.pub')"], {}), "(path, name + '.pub')\n", (1625, 1646), False, 'import os\n'), ((2368, 2400), 'ecdsa.VerifyingKey.from_pem', 'VerifyingKey.from_pem', (['pemstring'], {}), '(pemstring)\n', (2389, 2400), False, 'from ecdsa import SigningKey, VerifyingKey, curves\n'), ((2572, 2596), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (2586, 2596), False, 'import os\n'), ((2945, 2966), 'binascii.hexlify', 'binascii.hexlify', (['sig'], {}), '(sig)\n', (2961, 2966), False, 'import binascii\n'), ((3157, 3186), 'binascii.unhexlify', 'binascii.unhexlify', (['signature'], {}), '(signature)\n', (3175, 3186), False, 'import binascii\n'), ((3476, 3503), 'ecdsa.ecdsa.ecdh', 'ecdsa.ecdh', (['privkey', 'pubkey'], {}), '(privkey, pubkey)\n', (3486, 3503), False, 'from ecdsa import ecdsa\n'), ((877, 902), 'os.path.exists', 'os.path.exists', (['keyfolder'], {}), '(keyfolder)\n', (891, 902), False, 'import os\n'), ((912, 934), 'os.makedirs', 'os.makedirs', (['keyfolder'], {}), '(keyfolder)\n', (923, 934), False, 'import os\n'), ((1071, 1112), 'sys.stderr.write', 'sys.stderr.write', (['"""ident key generated\n"""'], {}), "('ident key generated\\n')\n", (1087, 1112), False, 'import sys\n')]
|
import ast
import re
import parsy
R_REFERENCE = re.compile(r'(?:(?:[^\d\W]\w*)?\.)?[^\d\W]\w*\s*', re.IGNORECASE)
R_SPACE = re.compile(r'\s+', re.IGNORECASE)
R_STRING = re.compile(r"('[^'\\]*(?:\\.[^'\\]*)*'|\"[^\"\\]*(?:\\.[^\"\\]*)*\")\s*", re.IGNORECASE)
R_OPERATOR_UNARY = re.compile(r'[+\-!~]\s*', re.IGNORECASE)
R_OPERATOR_BINARY = re.compile(r'(\*\*|\<{1,2}|\<=|\>{1,2}|\>=|\.\.|==|!=|\<\>|\/{1,2}|[-+*%&|^])\s*', re.IGNORECASE)
R_INTEGER = re.compile(r'(0[xh][0-9a-f]+(?:_[0-9a-f]+)*|0[oq][0-7]+(?:_[0-7]+)*|0[by][01]+(?:_[01]+)*|(?:0[dt])?[0-9]+(?:_[0-9]+)*)\s*', re.IGNORECASE)
R_PAREN_OPEN = re.compile(r'\(\s*', re.IGNORECASE)
R_PAREN_CLOSE = re.compile(r'\)\s*', re.IGNORECASE)
R_END_ALL = re.compile(r'(?:;|,|\)|\])\s*|$', re.IGNORECASE)
@parsy.generate
def parse_integer():
num = yield parsy.regex(R_INTEGER).desc('integer').optional()
if num:
num = num.strip().replace('_', '').lower()
if num.startswith('0x') or num.startswith('0h'):
return int(num[2:], 16)
elif num.startswith('0o') or num.startswith('0q'):
return int(num[2:], 8)
elif num.startswith('0b') or num.startswith('0y'):
return int(num[2:], 2)
elif num.startswith('0d') or num.startswith('0t'):
return int(num[2:], 10)
else:
return int(num, 10)
return None
@parsy.generate
def parse_expression():
yield parsy.regex(R_SPACE).optional()
result = []
while True:
c = yield parsy.regex(R_OPERATOR_UNARY).desc('unary operator').optional()
if c:
c = c.strip()
result.append(UnaryExpression(c))
continue
c = yield parse_integer
if c is not None:
result.append(ConstantExpression(c))
else:
c = yield parsy.regex(R_REFERENCE).desc('reference').optional()
if c:
c = c.strip()
result.append(ReferenceExpression(c))
else:
c = yield parsy.regex(R_STRING).desc('string').optional()
if c:
c = ast.literal_eval(c)
result.append(ConstantExpression(c))
else:
c = yield parsy.regex(R_PAREN_OPEN).desc('expression')
if c:
c = yield parse_expression
result.append(c)
yield parsy.regex(R_PAREN_CLOSE).desc('closing )')
c = yield parsy.regex(R_OPERATOR_BINARY).desc('binary operator').optional()
if c:
c = c.strip()
bin = BinaryExpression(c)
if c != '**':
reduce_expression(result, bin.level())
bin.A = result.pop()
result.append(bin)
else:
c = yield parsy.peek(parsy.regex(R_END_ALL).desc('expression'))
break
reduce_expression(result)
return validate_expression(result.pop())
def reduce_expression(ops, level=0):
while len(ops) > 1:
e = ops.pop()
t = ops[-1]
if t.level() >= level:
if isinstance(t, BinaryExpression):
t.B = e
else:
if isinstance(t, UnaryExpression):
t.A = e
else:
raise RuntimeError('Cannot reduce expression stack!')
else:
ops.append(e)
break
def validate_expression(e):
if e is None:
raise RuntimeError('Expected expression!')
if isinstance(e, BinaryExpression):
validate_expression(e.A)
validate_expression(e.B)
elif isinstance(e, UnaryExpression):
validate_expression(e.A)
return e
class BinaryExpression:
operations = {
'**': (55, lambda a, b: a ** b),
'*': (43, lambda a, b: a * b),
'/': (43, lambda a, b: a / b),
'//': (43, lambda a, b: a // b),
'%': (43, lambda a, b: a % b),
'+': (42, lambda a, b: a + b),
'-': (42, lambda a, b: a - b),
'<<': (35, lambda a, b: a << b),
'>>': (35, lambda a, b: a >> b),
'==': (20, lambda a, b: a == b and 1 or 0),
'!=': (20, lambda a, b: a != b and 1 or 0),
'<>': (20, lambda a, b: a != b and 1 or 0),
'&': (12, lambda a, b: a & b),
'^': (11, lambda a, b: a ^ b),
'|': (10, lambda a, b: a | b),
}
def __init__(self, text):
self.text = text
self.A = None
self.B = None
def __repr__(self):
return '(' + repr(self.A) + str(self.text) + repr(self.B) + ')'
def level(self):
return BinaryExpression.operations[self.text][0]
def execute(self, options, labels):
a = self.A.execute(options, labels)
b = self.B.execute(options, labels)
return BinaryExpression.operations[self.text][1](a, b)
def reduce(self, options, labels):
result = self
a = self.A.reduce(options, labels)
b = self.B.reduce(options, labels)
if a is not self.A or b is not self.B:
result = BinaryExpression(self.text)
result.A = a
result.B = b
if isinstance(a, ConstantExpression) and isinstance(b, ConstantExpression):
result = ConstantExpression(result.execute(options, labels))
return result
class UnaryExpression:
operations = {
'+': (50, lambda a: a),
'-': (50, lambda a: -a),
'!': (50, lambda a: 1 - (a and 1 or 0)),
'~': (50, lambda a: ~a),
}
def __init__(self, text):
self.text = text
self.A = None
def __repr__(self):
return str(self.text) + repr(self.A)
def level(self):
return UnaryExpression.operations[self.text][0]
def execute(self, options, labels):
a = self.A.execute(options, labels)
return UnaryExpression.operations[self.text][1](a)
def reduce(self, options, labels):
result = self
a = self.A.reduce(options, labels)
if a is not self.A:
result = UnaryExpression(self.text)
result.A = a
if isinstance(a, ConstantExpression):
result = ConstantExpression(result.execute(options, labels))
return result
class ConstantExpression:
def __init__(self, text):
self.text = text
def __repr__(self):
return 'constant(' + repr(self.text) + ')'
def level(self):
return 65
def execute(self, options, labels):
#TODO: How to encode strings?
return int(self.text)
def reduce(self, options, labels):
return self
class ReferenceExpression:
def __init__(self, text):
self.text = text
def __repr__(self):
return 'reference(' + repr(self.text) + ')'
def level(self):
return 65
def execute(self, options, labels):
return labels[self.text]
def reduce(self, options, labels):
return self
|
[
"ast.literal_eval",
"parsy.regex",
"re.compile"
] |
[((49, 121), 're.compile', 're.compile', (['"""(?:(?:[^\\\\d\\\\W]\\\\w*)?\\\\.)?[^\\\\d\\\\W]\\\\w*\\\\s*"""', 're.IGNORECASE'], {}), "('(?:(?:[^\\\\d\\\\W]\\\\w*)?\\\\.)?[^\\\\d\\\\W]\\\\w*\\\\s*', re.IGNORECASE)\n", (59, 121), False, 'import re\n'), ((125, 158), 're.compile', 're.compile', (['"""\\\\s+"""', 're.IGNORECASE'], {}), "('\\\\s+', re.IGNORECASE)\n", (135, 158), False, 'import re\n'), ((170, 288), 're.compile', 're.compile', (['"""(\'[^\'\\\\\\\\]*(?:\\\\\\\\.[^\'\\\\\\\\]*)*\'|\\\\"[^\\\\"\\\\\\\\]*(?:\\\\\\\\.[^\\\\"\\\\\\\\]*)*\\\\")\\\\s*"""', 're.IGNORECASE'], {}), '(\n \'(\\\'[^\\\'\\\\\\\\]*(?:\\\\\\\\.[^\\\'\\\\\\\\]*)*\\\'|\\\\"[^\\\\"\\\\\\\\]*(?:\\\\\\\\.[^\\\\"\\\\\\\\]*)*\\\\")\\\\s*\'\n , re.IGNORECASE)\n', (180, 288), False, 'import re\n'), ((278, 319), 're.compile', 're.compile', (['"""[+\\\\-!~]\\\\s*"""', 're.IGNORECASE'], {}), "('[+\\\\-!~]\\\\s*', re.IGNORECASE)\n", (288, 319), False, 'import re\n'), ((339, 457), 're.compile', 're.compile', (['"""(\\\\*\\\\*|\\\\<{1,2}|\\\\<=|\\\\>{1,2}|\\\\>=|\\\\.\\\\.|==|!=|\\\\<\\\\>|\\\\/{1,2}|[-+*%&|^])\\\\s*"""', 're.IGNORECASE'], {}), "(\n '(\\\\*\\\\*|\\\\<{1,2}|\\\\<=|\\\\>{1,2}|\\\\>=|\\\\.\\\\.|==|!=|\\\\<\\\\>|\\\\/{1,2}|[-+*%&|^])\\\\s*'\n , re.IGNORECASE)\n", (349, 457), False, 'import re\n'), ((449, 598), 're.compile', 're.compile', (['"""(0[xh][0-9a-f]+(?:_[0-9a-f]+)*|0[oq][0-7]+(?:_[0-7]+)*|0[by][01]+(?:_[01]+)*|(?:0[dt])?[0-9]+(?:_[0-9]+)*)\\\\s*"""', 're.IGNORECASE'], {}), "(\n '(0[xh][0-9a-f]+(?:_[0-9a-f]+)*|0[oq][0-7]+(?:_[0-7]+)*|0[by][01]+(?:_[01]+)*|(?:0[dt])?[0-9]+(?:_[0-9]+)*)\\\\s*'\n , re.IGNORECASE)\n", (459, 598), False, 'import re\n'), ((604, 640), 're.compile', 're.compile', (['"""\\\\(\\\\s*"""', 're.IGNORECASE'], {}), "('\\\\(\\\\s*', re.IGNORECASE)\n", (614, 640), False, 'import re\n'), ((656, 692), 're.compile', 're.compile', (['"""\\\\)\\\\s*"""', 're.IGNORECASE'], {}), "('\\\\)\\\\s*', re.IGNORECASE)\n", (666, 692), False, 'import re\n'), ((704, 754), 're.compile', 're.compile', (['"""(?:;|,|\\\\)|\\\\])\\\\s*|$"""', 're.IGNORECASE'], {}), "('(?:;|,|\\\\)|\\\\])\\\\s*|$', re.IGNORECASE)\n", (714, 754), False, 'import re\n'), ((1316, 1336), 'parsy.regex', 'parsy.regex', (['R_SPACE'], {}), '(R_SPACE)\n', (1327, 1336), False, 'import parsy\n'), ((1844, 1863), 'ast.literal_eval', 'ast.literal_eval', (['c'], {}), '(c)\n', (1860, 1863), False, 'import ast\n'), ((804, 826), 'parsy.regex', 'parsy.regex', (['R_INTEGER'], {}), '(R_INTEGER)\n', (815, 826), False, 'import parsy\n'), ((1386, 1415), 'parsy.regex', 'parsy.regex', (['R_OPERATOR_UNARY'], {}), '(R_OPERATOR_UNARY)\n', (1397, 1415), False, 'import parsy\n'), ((2112, 2142), 'parsy.regex', 'parsy.regex', (['R_OPERATOR_BINARY'], {}), '(R_OPERATOR_BINARY)\n', (2123, 2142), False, 'import parsy\n'), ((2370, 2392), 'parsy.regex', 'parsy.regex', (['R_END_ALL'], {}), '(R_END_ALL)\n', (2381, 2392), False, 'import parsy\n'), ((1631, 1655), 'parsy.regex', 'parsy.regex', (['R_REFERENCE'], {}), '(R_REFERENCE)\n', (1642, 1655), False, 'import parsy\n'), ((1931, 1956), 'parsy.regex', 'parsy.regex', (['R_PAREN_OPEN'], {}), '(R_PAREN_OPEN)\n', (1942, 1956), False, 'import parsy\n'), ((1777, 1798), 'parsy.regex', 'parsy.regex', (['R_STRING'], {}), '(R_STRING)\n', (1788, 1798), False, 'import parsy\n'), ((2055, 2081), 'parsy.regex', 'parsy.regex', (['R_PAREN_CLOSE'], {}), '(R_PAREN_CLOSE)\n', (2066, 2081), False, 'import parsy\n')]
|
import datetime,os,torch
from torch.utils.data import Dataset
from loadData import *
from lib import *
from fit import *
from model import *
from skimage.measure import block_reduce
import sys
date = datetime.datetime.now()
os.environ['CUDA_VISIBLE_DEVICES'] = '6' # change
# UnetAE_preRoll
# UnetAE_preIP_preRoll -> only predict instrument at buttom
# UnetAE_preIP_prePP_prePNZ_preRoll -> with pitch as adv training predict zero
# UnetAE_preIP_prePP_prePNN_preRoll -> with pitch as adv training negative loss
# DuoAE_preIP_prePP -> no pianoroll conntected
# DuoAE_preIP_preINZ_prePP_prePNZ -> no pianoroll conntected and adv training predict zero
# DuoAE_preIP_preINN_prePP_prePNN -> no pianoroll conntected and adv training negative loss
# DuoAE_preIP_prePP_preRoll -> pianoroll conntected
# DuoAE_preIP_preINZ_prePP_prePNZ_preRoll -> pianoroll conntected and adv training predict zero
# DuoAE_preIP_preINN_prePP_prePNN_preRoll -> pianoroll conntected and adv training predict negative
def main(args):
name = args[1]
batch_size = 10
epoch = 100
lr = 0.01
out_model_fn = '../data/model/%d%d%d/%s/'%(date.year,date.month,date.day,name)
if not os.path.exists(out_model_fn):
os.makedirs(out_model_fn)
# load data
t_kwargs = {'batch_size': batch_size, 'num_workers': 2, 'pin_memory': True,'drop_last': True}
Xtr,Ytr,Ytr_p,Ytr_s = load('musescore')
# Xtr (batch_size, 1, note_bin, time_length)
# Ytr (batch_size, instrument_categories, time_length)
# Ytr_p (batch_size, note_bin, time_length)
# Ytr_s (batch_size, instrument_categories, note_bin, time_length)
Xtr_mel,Ytr_mel = load_melody('musescore')
avg, std = np.load('../../data/cqt_avg_std.npy')
trdata = [Xtr, Ytr, Ytr_p, Ytr_s, Xtr_mel, Ytr_mel]
tr_loader = torch.utils.data.DataLoader(Data2Torch(trdata), shuffle=True, **t_kwargs)
print('finishing data building...')
# build model
model = Net(name).cuda()
model.apply(model_init)
# balance data
weight = [get_weight(Ytr)]
# start training
Trer = Trainer(model, lr, epoch, out_model_fn, avg, std)
Trer.fit(tr_loader, weight, name)
print( out_model_fn)
if __name__ == "__main__":
main(sys.argv)
|
[
"datetime.datetime.now",
"os.makedirs",
"os.path.exists"
] |
[((200, 223), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (221, 223), False, 'import datetime, os, torch\n'), ((1181, 1209), 'os.path.exists', 'os.path.exists', (['out_model_fn'], {}), '(out_model_fn)\n', (1195, 1209), False, 'import datetime, os, torch\n'), ((1216, 1241), 'os.makedirs', 'os.makedirs', (['out_model_fn'], {}), '(out_model_fn)\n', (1227, 1241), False, 'import datetime, os, torch\n')]
|
# -*- coding: utf-8 -*-
name = 'ue4'
version = '4.20.2'
author = ['ue4']
requires = ["python-2.7.11"]
variants = []
def commands():
import os
applications_path = os.environ["APPLICATIONS_PATH"]
python_path = os.path.join(applications_path, "python", "2.7.11").replace("/", os.sep)
ue4_path = os.path.join(applications_path, "ue4", "%s"%version).replace("/", os.sep)
env.UE_PYTHON_DIR.set(python_path)
env.PATH.append(os.path.join(ue4_path, "Engine", "Binaries", "Win64").replace("/", os.sep))
|
[
"os.path.join"
] |
[((224, 275), 'os.path.join', 'os.path.join', (['applications_path', '"""python"""', '"""2.7.11"""'], {}), "(applications_path, 'python', '2.7.11')\n", (236, 275), False, 'import os\n'), ((317, 371), 'os.path.join', 'os.path.join', (['applications_path', '"""ue4"""', "('%s' % version)"], {}), "(applications_path, 'ue4', '%s' % version)\n", (329, 371), False, 'import os\n'), ((460, 513), 'os.path.join', 'os.path.join', (['ue4_path', '"""Engine"""', '"""Binaries"""', '"""Win64"""'], {}), "(ue4_path, 'Engine', 'Binaries', 'Win64')\n", (472, 513), False, 'import os\n')]
|
import numpy as np
from pmesh.pm import ParticleMesh
from nbodykit.lab import BigFileCatalog, MultipleSpeciesCatalog,\
BigFileMesh, FFTPower
from nbodykit import setup_logging
from mpi4py import MPI
import HImodels
# enable logging, we have some clue what's going on.
setup_logging('info')
#
#Global, fixed things
scratchyf = '/global/cscratch1/sd/yfeng1/m3127/'
scratchcm = '/global/cscratch1/sd/chmodi/m3127/H1mass/'
project = '/project/projectdirs/m3127/H1mass/'
cosmodef = {'omegam':0.309167, 'h':0.677, 'omegab':0.048}
alist = [0.1429,0.1538,0.1667,0.1818,0.2000,0.2222,0.2500,0.2857,0.3333]
#Parameters, box size, number of mesh cells, simulation, ...
bs, nc, ncsim, sim, prefix = 256, 512, 2560, 'highres/%d-9100-fixed'%2560, 'highres'
#bs,nc,ncsim, sim, prefic = 1024, 1024, 10240, 'highres/%d-9100-fixed'%ncsim, 'highres'
# It's useful to have my rank for printing...
pm = ParticleMesh(BoxSize=bs, Nmesh=[nc, nc, nc])
rank = pm.comm.rank
comm = pm.comm
#Which model to use
HImodel = HImodels.ModelA
ofolder = '../data/outputs/'
def calc_bias(aa,h1mesh,suff):
'''Compute the bias(es) for the HI'''
if rank==0:
print("Processing a={:.4f}...".format(aa))
print('Reading DM mesh...')
if ncsim == 10240:
dm = BigFileMesh(scratchyf+sim+'/fastpm_%0.4f/'%aa+\
'/1-mesh/N%04d'%nc,'').paint()
else:
dm = BigFileMesh(project+sim+'/fastpm_%0.4f/'%aa+\
'/dmesh_N%04d/1/'%nc,'').paint()
dm /= dm.cmean()
if rank==0: print('Computing DM P(k)...')
pkmm = FFTPower(dm,mode='1d').power
k,pkmm= pkmm['k'],pkmm['power'] # Ignore shotnoise.
if rank==0: print('Done.')
#
pkh1h1 = FFTPower(h1mesh,mode='1d').power
pkh1h1 = pkh1h1['power']-pkh1h1.attrs['shotnoise']
pkh1mm = FFTPower(h1mesh,second=dm,mode='1d').power['power']
if rank==0: print('Done.')
# Compute the biases.
b1x = np.abs(pkh1mm/(pkmm+1e-10))
b1a = np.abs(pkh1h1/(pkmm+1e-10))**0.5
if rank==0: print("Finishing processing a={:.4f}.".format(aa))
return(k,b1x,b1a,np.abs(pkmm))
#
if __name__=="__main__":
#satsuff='-m1_5p0min-alpha_0p8-16node'
suff='-m1_00p3mh-alpha-0p8-subvol'
outfolder = ofolder + suff[1:] + "/modelA/"
try: os.makedirs(outfolder)
except : pass
if rank==0:
print('Starting')
for aa in alist:
if rank == 0: print('\n ############## Redshift = %0.2f ############## \n'%(1/aa-1))
halocat = BigFileCatalog(scratchyf + sim+ '/fastpm_%0.4f//'%aa, dataset='LL-0.200')
mp = halocat.attrs['MassTable'][1]*1e10##
halocat['Mass'] = halocat['Length'].compute() * mp
cencat = BigFileCatalog(scratchcm + sim+'/fastpm_%0.4f/cencat'%aa+suff)
satcat = BigFileCatalog(scratchcm + sim+'/fastpm_%0.4f/satcat'%aa+suff)
rsdfac = read_conversions(scratchyf + sim+'/fastpm_%0.4f/'%aa)
#
HImodelz = HImodel(aa)
los = [0,0,1]
halocat['HImass'], cencat['HImass'], satcat['HImass'] = HImodelz.assignHI(halocat, cencat, satcat)
halocat['RSDpos'], cencat['RSDpos'], satcat['RSDpos'] = HImodelz.assignrsd(rsdfac, halocat, cencat, satcat, los=los)
h1mesh = HImodelz.createmesh(bs, nc, halocat, cencat, satcat, mode='galaxies', position='RSDpos', weight='HImass')
kk,b1x,b1a,pkmm = calc_bias(aa,h1mesh,suff)
#
if rank==0:
fout = open(outfolder + "HI_bias_{:6.4f}.txt".format(aa),"w")
fout.write("# Mcut={:12.4e}Msun/h.\n".format(mcut))
fout.write("# {:>8s} {:>10s} {:>10s} {:>15s}\n".\
format("k","b1_x","b1_a","Pkmm"))
for i in range(1,kk.size):
fout.write("{:10.5f} {:10.5f} {:10.5f} {:15.5e}\n".\
format(kk[i],b1x[i],b1a[i],pkmm[i]))
fout.close()
#
|
[
"nbodykit.lab.BigFileCatalog",
"numpy.abs",
"nbodykit.setup_logging",
"nbodykit.lab.FFTPower",
"pmesh.pm.ParticleMesh",
"nbodykit.lab.BigFileMesh"
] |
[((308, 329), 'nbodykit.setup_logging', 'setup_logging', (['"""info"""'], {}), "('info')\n", (321, 329), False, 'from nbodykit import setup_logging\n'), ((933, 977), 'pmesh.pm.ParticleMesh', 'ParticleMesh', ([], {'BoxSize': 'bs', 'Nmesh': '[nc, nc, nc]'}), '(BoxSize=bs, Nmesh=[nc, nc, nc])\n', (945, 977), False, 'from pmesh.pm import ParticleMesh\n'), ((1983, 2014), 'numpy.abs', 'np.abs', (['(pkh1mm / (pkmm + 1e-10))'], {}), '(pkh1mm / (pkmm + 1e-10))\n', (1989, 2014), True, 'import numpy as np\n'), ((1626, 1649), 'nbodykit.lab.FFTPower', 'FFTPower', (['dm'], {'mode': '"""1d"""'}), "(dm, mode='1d')\n", (1634, 1649), False, 'from nbodykit.lab import BigFileCatalog, MultipleSpeciesCatalog, BigFileMesh, FFTPower\n'), ((1763, 1790), 'nbodykit.lab.FFTPower', 'FFTPower', (['h1mesh'], {'mode': '"""1d"""'}), "(h1mesh, mode='1d')\n", (1771, 1790), False, 'from nbodykit.lab import BigFileCatalog, MultipleSpeciesCatalog, BigFileMesh, FFTPower\n'), ((2021, 2052), 'numpy.abs', 'np.abs', (['(pkh1h1 / (pkmm + 1e-10))'], {}), '(pkh1h1 / (pkmm + 1e-10))\n', (2027, 2052), True, 'import numpy as np\n'), ((2142, 2154), 'numpy.abs', 'np.abs', (['pkmm'], {}), '(pkmm)\n', (2148, 2154), True, 'import numpy as np\n'), ((2558, 2634), 'nbodykit.lab.BigFileCatalog', 'BigFileCatalog', (["(scratchyf + sim + '/fastpm_%0.4f//' % aa)"], {'dataset': '"""LL-0.200"""'}), "(scratchyf + sim + '/fastpm_%0.4f//' % aa, dataset='LL-0.200')\n", (2572, 2634), False, 'from nbodykit.lab import BigFileCatalog, MultipleSpeciesCatalog, BigFileMesh, FFTPower\n'), ((2758, 2826), 'nbodykit.lab.BigFileCatalog', 'BigFileCatalog', (["(scratchcm + sim + '/fastpm_%0.4f/cencat' % aa + suff)"], {}), "(scratchcm + sim + '/fastpm_%0.4f/cencat' % aa + suff)\n", (2772, 2826), False, 'from nbodykit.lab import BigFileCatalog, MultipleSpeciesCatalog, BigFileMesh, FFTPower\n'), ((2838, 2906), 'nbodykit.lab.BigFileCatalog', 'BigFileCatalog', (["(scratchcm + sim + '/fastpm_%0.4f/satcat' % aa + suff)"], {}), "(scratchcm + sim + '/fastpm_%0.4f/satcat' % aa + suff)\n", (2852, 2906), False, 'from nbodykit.lab import BigFileCatalog, MultipleSpeciesCatalog, BigFileMesh, FFTPower\n'), ((1864, 1902), 'nbodykit.lab.FFTPower', 'FFTPower', (['h1mesh'], {'second': 'dm', 'mode': '"""1d"""'}), "(h1mesh, second=dm, mode='1d')\n", (1872, 1902), False, 'from nbodykit.lab import BigFileCatalog, MultipleSpeciesCatalog, BigFileMesh, FFTPower\n'), ((1309, 1388), 'nbodykit.lab.BigFileMesh', 'BigFileMesh', (["(scratchyf + sim + '/fastpm_%0.4f/' % aa + '/1-mesh/N%04d' % nc)", '""""""'], {}), "(scratchyf + sim + '/fastpm_%0.4f/' % aa + '/1-mesh/N%04d' % nc, '')\n", (1320, 1388), False, 'from nbodykit.lab import BigFileCatalog, MultipleSpeciesCatalog, BigFileMesh, FFTPower\n'), ((1442, 1521), 'nbodykit.lab.BigFileMesh', 'BigFileMesh', (["(project + sim + '/fastpm_%0.4f/' % aa + '/dmesh_N%04d/1/' % nc)", '""""""'], {}), "(project + sim + '/fastpm_%0.4f/' % aa + '/dmesh_N%04d/1/' % nc, '')\n", (1453, 1521), False, 'from nbodykit.lab import BigFileCatalog, MultipleSpeciesCatalog, BigFileMesh, FFTPower\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 2 19:15:26 2020
@author: Diego
"""
import pandas as pd
import sqlite3
import wget
import os
from urllib.request import urlopen
from bs4 import BeautifulSoup
import urllib.request
import datetime
import zipfile
import io
import requests
if not os.path.exists('data'):
os.makedirs('data')
if not os.path.exists(os.path.join('data', 'temp')):
os.makedirs(os.path.join('data', 'temp'))
conn = sqlite3.connect(os.path.join('data', 'fundos.db'))
db = conn.cursor()
# %% functions
def create_tables():
"""
Creates all tables in the database.
Returns
-------
None.
"""
db.execute("""CREATE TABLE IF NOT EXISTS files
(file_name TEXT,
last_modified DATE)""")
db.execute("""CREATE TABLE IF NOT EXISTS quotas
(cnpj TEXT,
date DATE,
quota REAL)""")
db.execute("CREATE INDEX idx_quotas_cnpj ON quotas(cnpj);")
db.execute("""CREATE TABLE IF NOT EXISTS inf_cadastral
(cnpj TEXT,
denom_social TEXT,
classe text,
rentab_fundo TEXT,
taxa_perfm INTEGER,
taxa_adm REAL)""")
db.execute("""CREATE TABLE IF NOT EXISTS cdi
(date DATE,
cdi REAL,
d_factor REAL)""")
def update_register():
"""
Updates the mutual funds register.
Returns
-------
None.
"""
url = 'http://dados.cvm.gov.br/dados/FI/CAD/DADOS/'
files = {}
i = 0
html = urlopen(url)
soup = BeautifulSoup(html, 'lxml')
table = soup.find('table')
tr = table.find_all('tr')
for t in tr:
if t.text[0:17] == 'inf_cadastral_fi_':
file_name = t.text[0:29]
last_modified = pd.to_datetime(t.text[29:45])
files[i] = {'file_name': file_name, 'url_date': last_modified}
i += 1
available_files = pd.DataFrame.from_dict(files, orient='index')
available_files['url_date'] = pd.to_datetime(available_files['url_date'])
last_file = available_files['file_name'][available_files['url_date'] == max(available_files['url_date'])].values[0]
file_url = f"http://dados.cvm.gov.br/dados/FI/CAD/DADOS/{last_file}"
response = requests.get(file_url)
df = pd.read_csv(io.BytesIO(response.content), sep=';', header=0, encoding='latin-1')
df.columns = df.columns.str.lower()
df = df.rename(columns={'cnpj_fundo': 'cnpj'})
# drop inactive
df = df[df['sit'] == 'EM FUNCIONAMENTO NORMAL']
# drop closed
df = df[df['condom'] == 'Aberto']
# drop no equity
df = df[df['vl_patrim_liq'] != 0]
df = df.drop_duplicates(subset=['cnpj'], keep='last')
df = df[['cnpj', 'denom_social', 'classe', 'rentab_fundo', 'taxa_perfm', 'taxa_adm']]
df[['taxa_perfm', 'taxa_adm']] = df[['taxa_perfm', 'taxa_adm']].fillna(value=0)
db.execute("DELETE FROM inf_cadastral")
df.to_sql('inf_cadastral', conn, if_exists='append', index=False)
conn.commit()
return
def update_quotes():
"""
Updates the mutual funds quotes.
Returns
-------
None.
"""
db_files = pd.read_sql("SELECT * FROM files", conn)
urls = ['http://dados.cvm.gov.br/dados/FI/DOC/INF_DIARIO/DADOS/HIST/',
'http://dados.cvm.gov.br/dados/FI/DOC/INF_DIARIO/DADOS/']
files = {}
i = 0
for url in urls:
html = urlopen(url)
soup = BeautifulSoup(html, 'lxml')
table = soup.find('table')
tr = table.find_all('tr')
for t in tr:
if t.text[0:14] == 'inf_diario_fi_':
if url == 'http://dados.cvm.gov.br/dados/FI/DOC/INF_DIARIO/DADOS/':
file_name = t.text[0:24]
last_modified = pd.to_datetime(t.text[24:40]).date()
else:
file_name = t.text[0:22]
last_modified = pd.to_datetime(t.text[22:38]).date()
files[i] = {'file_name': file_name, 'url_date': last_modified}
i += 1
available_files = pd.DataFrame.from_dict(files, orient='index')
new_files = available_files.merge(db_files, how='left', right_on='file_name', left_on='file_name')
new_files = new_files.fillna(pd.to_datetime('1900-01-01'))
new_files = new_files[new_files['url_date'] > pd.to_datetime(new_files['last_modified'])]
for idx, file in new_files.iterrows():
if len(file['file_name']) == 22:
url = 'http://dados.cvm.gov.br/dados/FI/DOC/INF_DIARIO/DADOS/HIST/'
zip_or_csv = 'zip'
else:
url = 'http://dados.cvm.gov.br/dados/FI/DOC/INF_DIARIO/DADOS/'
zip_or_csv = 'csv'
file_url = url + file['file_name']
file_data = requests.get(file_url).content
db.execute(f"""DELETE FROM files
WHERE file_name = '{file['file_name']}'""")
load_file(file_data, zip_or_csv=zip_or_csv)
db.execute(f"""INSERT INTO files
VALUES ('{file['file_name']}', '{file['url_date']}')""")
print(f"{file['file_name']} downloaded successfully.")
conn.commit()
return
def load_file(file_data, zip_or_csv):
"""
Loads the file with the new quotes.
Parameters
----------
file_name : string
Returns
-------
None.
"""
active = pd.read_sql("SELECT cnpj FROM inf_cadastral", conn)['cnpj']
if zip_or_csv == 'zip':
zip_file = zipfile.ZipFile(io.BytesIO(file_data))
# dict with all csv files
files_dict = {}
for i in range(len(zip_file.namelist())):
files_dict[zip_file.namelist()[i]] = zip_file.read(zip_file.namelist()[i])
else:
files_dict = {'any_name': file_data }
for key in files_dict.keys():
df = pd.read_csv(io.BytesIO(files_dict[key]), sep=';', header=0, encoding='latin-1')
df.columns = df.columns.str.lower()
df = df.rename(columns={'cnpj_fundo': 'cnpj', 'dt_comptc': 'date', 'vl_quota': 'quota'})
df = df[df['cnpj'].isin(list(active))]
df = df[['cnpj', 'date', 'quota']]
year = df['date'].str[:4].unique()[0]
month = df['date'].str[5:7].unique()[0]
db.execute(f"""DELETE FROM quotas
WHERE SUBSTR(date, 1, 4) = '{year}' AND
SUBSTR(date, 6, 2) = '{month}'""")
df.to_sql('quotas', conn, if_exists='append', index=False)
conn.commit()
return
def update_cdi():
"""
Updates the CDI (Brazilian reference rate).
Returns
-------
None.
"""
# Files in the ftp:
url = 'ftp://ftp.cetip.com.br/MediaCDI/'
req = urllib.request.Request(url)
r = urllib.request.urlopen(req)
text = str(r.read())
text = text.replace('\\n', ' ')
text = text.replace('\\r', '')
text = text.replace("b'", "")
text = text.replace("'", "")
text = text.split()
available_files = []
for file_name in text:
if file_name[-4:] == '.txt':
available_files.append(file_name)
# Files in the database:
db_files = pd.read_sql("SELECT * FROM files", conn)
db_files = db_files['file_name'].to_list()
# check if the file is new, process and update files table
for file in available_files:
if file not in db_files:
for fl in os.listdir(os.path.join('data', 'temp')):
os.remove(os.path.join('data', 'temp', fl))
file_url = f"ftp://ftp.cetip.com.br/MediaCDI/{file}"
wget.download(file_url, os.path.join('data', 'temp'))
with open(os.path.join('data', 'temp', file), 'r') as content:
cdi = int(content.readline()) / 100
d_factor = ((cdi / 100) + 1) ** (1 / 252)
date = datetime.datetime.strptime(file[:8], '%Y%m%d')
db.execute(f"""INSERT INTO cdi
VALUES ('{date}', {cdi}, {d_factor})""")
# These files are not updated by the provider (cetip.com.br).
# Because of that, the last_modified is not important, and set to 1900-01-01
db.execute(f"""INSERT INTO files
VALUES ('{file}', '1900-01-01')""")
conn.commit()
print("CDI file " + file + " downloaded successfully.")
return
def update_pipeline():
# create database tables
query = "SELECT name FROM sqlite_master WHERE type='table' AND name='quotas';"
if db.execute(query).fetchone() == None:
create_tables()
update_register()
update_quotes()
update_cdi()
return
|
[
"io.BytesIO",
"os.makedirs",
"pandas.DataFrame.from_dict",
"os.path.exists",
"urllib.request.urlopen",
"datetime.datetime.strptime",
"pandas.to_datetime",
"requests.get",
"pandas.read_sql",
"bs4.BeautifulSoup",
"os.path.join"
] |
[((294, 316), 'os.path.exists', 'os.path.exists', (['"""data"""'], {}), "('data')\n", (308, 316), False, 'import os\n'), ((322, 341), 'os.makedirs', 'os.makedirs', (['"""data"""'], {}), "('data')\n", (333, 341), False, 'import os\n'), ((465, 498), 'os.path.join', 'os.path.join', (['"""data"""', '"""fundos.db"""'], {}), "('data', 'fundos.db')\n", (477, 498), False, 'import os\n'), ((1628, 1640), 'urllib.request.urlopen', 'urlopen', (['url'], {}), '(url)\n', (1635, 1640), False, 'from urllib.request import urlopen\n'), ((1652, 1679), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""lxml"""'], {}), "(html, 'lxml')\n", (1665, 1679), False, 'from bs4 import BeautifulSoup\n'), ((2017, 2062), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['files'], {'orient': '"""index"""'}), "(files, orient='index')\n", (2039, 2062), True, 'import pandas as pd\n'), ((2097, 2140), 'pandas.to_datetime', 'pd.to_datetime', (["available_files['url_date']"], {}), "(available_files['url_date'])\n", (2111, 2140), True, 'import pandas as pd\n'), ((2349, 2371), 'requests.get', 'requests.get', (['file_url'], {}), '(file_url)\n', (2361, 2371), False, 'import requests\n'), ((3245, 3285), 'pandas.read_sql', 'pd.read_sql', (['"""SELECT * FROM files"""', 'conn'], {}), "('SELECT * FROM files', conn)\n", (3256, 3285), True, 'import pandas as pd\n'), ((4153, 4198), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['files'], {'orient': '"""index"""'}), "(files, orient='index')\n", (4175, 4198), True, 'import pandas as pd\n'), ((7203, 7243), 'pandas.read_sql', 'pd.read_sql', (['"""SELECT * FROM files"""', 'conn'], {}), "('SELECT * FROM files', conn)\n", (7214, 7243), True, 'import pandas as pd\n'), ((364, 392), 'os.path.join', 'os.path.join', (['"""data"""', '"""temp"""'], {}), "('data', 'temp')\n", (376, 392), False, 'import os\n'), ((411, 439), 'os.path.join', 'os.path.join', (['"""data"""', '"""temp"""'], {}), "('data', 'temp')\n", (423, 439), False, 'import os\n'), ((2393, 2421), 'io.BytesIO', 'io.BytesIO', (['response.content'], {}), '(response.content)\n', (2403, 2421), False, 'import io\n'), ((3492, 3504), 'urllib.request.urlopen', 'urlopen', (['url'], {}), '(url)\n', (3499, 3504), False, 'from urllib.request import urlopen\n'), ((3520, 3547), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""lxml"""'], {}), "(html, 'lxml')\n", (3533, 3547), False, 'from bs4 import BeautifulSoup\n'), ((4335, 4363), 'pandas.to_datetime', 'pd.to_datetime', (['"""1900-01-01"""'], {}), "('1900-01-01')\n", (4349, 4363), True, 'import pandas as pd\n'), ((5450, 5501), 'pandas.read_sql', 'pd.read_sql', (['"""SELECT cnpj FROM inf_cadastral"""', 'conn'], {}), "('SELECT cnpj FROM inf_cadastral', conn)\n", (5461, 5501), True, 'import pandas as pd\n'), ((1871, 1900), 'pandas.to_datetime', 'pd.to_datetime', (['t.text[29:45]'], {}), '(t.text[29:45])\n', (1885, 1900), True, 'import pandas as pd\n'), ((4415, 4457), 'pandas.to_datetime', 'pd.to_datetime', (["new_files['last_modified']"], {}), "(new_files['last_modified'])\n", (4429, 4457), True, 'import pandas as pd\n'), ((4838, 4860), 'requests.get', 'requests.get', (['file_url'], {}), '(file_url)\n', (4850, 4860), False, 'import requests\n'), ((5574, 5595), 'io.BytesIO', 'io.BytesIO', (['file_data'], {}), '(file_data)\n', (5584, 5595), False, 'import io\n'), ((5908, 5935), 'io.BytesIO', 'io.BytesIO', (['files_dict[key]'], {}), '(files_dict[key])\n', (5918, 5935), False, 'import io\n'), ((7877, 7923), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['file[:8]', '"""%Y%m%d"""'], {}), "(file[:8], '%Y%m%d')\n", (7903, 7923), False, 'import datetime\n'), ((7454, 7482), 'os.path.join', 'os.path.join', (['"""data"""', '"""temp"""'], {}), "('data', 'temp')\n", (7466, 7482), False, 'import os\n'), ((7646, 7674), 'os.path.join', 'os.path.join', (['"""data"""', '"""temp"""'], {}), "('data', 'temp')\n", (7658, 7674), False, 'import os\n'), ((7511, 7543), 'os.path.join', 'os.path.join', (['"""data"""', '"""temp"""', 'fl'], {}), "('data', 'temp', fl)\n", (7523, 7543), False, 'import os\n'), ((7698, 7732), 'os.path.join', 'os.path.join', (['"""data"""', '"""temp"""', 'file'], {}), "('data', 'temp', file)\n", (7710, 7732), False, 'import os\n'), ((3852, 3881), 'pandas.to_datetime', 'pd.to_datetime', (['t.text[24:40]'], {}), '(t.text[24:40])\n', (3866, 3881), True, 'import pandas as pd\n'), ((3992, 4021), 'pandas.to_datetime', 'pd.to_datetime', (['t.text[22:38]'], {}), '(t.text[22:38])\n', (4006, 4021), True, 'import pandas as pd\n')]
|
# Copyright (c) The Libra Core Contributors
# SPDX-License-Identifier: Apache-2.0
from ..protocol import VASPPairChannel
from ..status_logic import Status, KYCResult, State, InvalidStateException
from ..payment_command import PaymentCommand, PaymentLogicError
from ..business import BusinessForceAbort, BusinessValidationFailure
from os import urandom
from ..payment import PaymentObject, StatusObject, PaymentActor, PaymentAction
from ..libra_address import LibraAddress
from ..asyncnet import Aionet
from ..storage import StorableFactory
from ..payment_logic import PaymentProcessor
from ..utils import JSONFlag
from ..errors import OffChainErrorCode
from .basic_business_context import TestBusinessContext
from unittest.mock import MagicMock
from mock import AsyncMock
import pytest
import copy
@pytest.fixture
def payment():
sender_addr = LibraAddress.from_bytes("lbr", b'B'*16, b'b'*8)
sender = PaymentActor(sender_addr.as_str(), StatusObject(Status.none))
receiver_addr = LibraAddress.from_bytes("lbr", b'A'*16, b'a'*8)
receiver = PaymentActor(receiver_addr.as_str(), StatusObject(Status.none))
action = PaymentAction(5, 'TIK', 'charge', 7784993)
ref_id = f'{LibraAddress.from_encoded_str(sender_addr.get_onchain_encoded_str())}_{urandom(16).hex()}'
return PaymentObject(
sender, receiver, ref_id, None,
'Human readable payment information.', action
)
def test_SINIT(payment):
payment.sender.change_status(StatusObject(Status.needs_kyc_data))
assert State.from_payment_object(payment) == State.SINIT
payment2 = payment.new_version()
payment2.sender.add_additional_kyc_data("additional_kyc")
with pytest.raises(InvalidStateException):
State.from_payment_object(payment2)
payment3 = payment.new_version()
payment3.receiver.add_additional_kyc_data("additional_kyc")
with pytest.raises(InvalidStateException):
State.from_payment_object(payment3)
def test_RSEND(payment):
payment.sender.change_status(StatusObject(Status.needs_kyc_data))
payment.receiver.change_status(StatusObject(Status.ready_for_settlement))
assert State.from_payment_object(payment) == State.RSEND
payment2 = payment.new_version()
payment2.sender.add_additional_kyc_data("additional_kyc")
assert State.from_payment_object(payment2) == State.RSEND
payment3 = payment.new_version()
payment3.receiver.add_additional_kyc_data("additional_kyc")
with pytest.raises(InvalidStateException):
State.from_payment_object(payment3)
payment4 = payment2.new_version()
payment4.receiver.add_additional_kyc_data("additional_kyc")
with pytest.raises(InvalidStateException):
State.from_payment_object(payment4)
def test_RABORT(payment):
payment.sender.change_status(StatusObject(Status.needs_kyc_data))
payment.receiver.change_status(StatusObject(Status.abort, "", ""))
assert State.from_payment_object(payment) == State.RABORT
payment2 = payment.new_version()
payment2.sender.add_additional_kyc_data("additional_kyc")
assert State.from_payment_object(payment2) == State.RABORT
payment3 = payment.new_version()
payment3.receiver.add_additional_kyc_data("additional_kyc")
with pytest.raises(InvalidStateException):
State.from_payment_object(payment3)
def test_SABORT(payment):
payment.sender.change_status(StatusObject(Status.abort, "", ""))
payment.receiver.change_status(StatusObject(Status.ready_for_settlement))
assert State.from_payment_object(payment) == State.SABORT
payment2 = payment.new_version()
payment2.sender.add_additional_kyc_data("additional_kyc")
assert State.from_payment_object(payment2) == State.SABORT
payment3 = payment.new_version()
payment3.receiver.add_additional_kyc_data("additional_kyc")
assert State.from_payment_object(payment3) == State.SABORT
def test_READY(payment):
payment.sender.change_status(StatusObject(Status.ready_for_settlement))
payment.receiver.change_status(StatusObject(Status.ready_for_settlement))
assert State.from_payment_object(payment) == State.READY
payment2 = payment.new_version()
payment2.sender.add_additional_kyc_data("additional_kyc")
assert State.from_payment_object(payment2) == State.READY
payment3 = payment.new_version()
payment3.receiver.add_additional_kyc_data("additional_kyc")
assert State.from_payment_object(payment3) == State.READY
def test_RSOFT(payment):
payment.sender.change_status(StatusObject(Status.needs_kyc_data))
payment.receiver.change_status(StatusObject(Status.soft_match))
assert State.from_payment_object(payment) == State.RSOFT
payment3 = payment.new_version()
payment3.receiver.add_additional_kyc_data("additional_kyc")
with pytest.raises(InvalidStateException):
State.from_payment_object(payment3)
def test_SSOFTSEND(payment):
payment.sender.change_status(StatusObject(Status.needs_kyc_data))
payment.receiver.change_status(StatusObject(Status.soft_match))
payment.sender.add_additional_kyc_data("additional_kyc")
assert State.from_payment_object(payment) == State.SSOFTSEND
payment2 = payment.new_version()
payment2.receiver.add_additional_kyc_data("additional_kyc")
with pytest.raises(InvalidStateException):
State.from_payment_object(payment2)
def test_SSOFT(payment):
payment.sender.change_status(StatusObject(Status.soft_match))
payment.receiver.change_status(StatusObject(Status.ready_for_settlement))
assert State.from_payment_object(payment) == State.SSOFT
payment2 = payment.new_version()
payment2.sender.add_additional_kyc_data("additional_kyc")
assert State.from_payment_object(payment) == State.SSOFT
def test_RSOFTSEND(payment):
payment.sender.change_status(StatusObject(Status.soft_match))
payment.receiver.change_status(StatusObject(Status.ready_for_settlement))
payment.receiver.add_additional_kyc_data("additional_kyc")
assert State.from_payment_object(payment) == State.RSOFTSEND
payment2 = payment.new_version()
payment2.sender.add_additional_kyc_data("additional_kyc")
assert State.from_payment_object(payment) == State.RSOFTSEND
|
[
"pytest.raises",
"os.urandom"
] |
[((1679, 1715), 'pytest.raises', 'pytest.raises', (['InvalidStateException'], {}), '(InvalidStateException)\n', (1692, 1715), False, 'import pytest\n'), ((1872, 1908), 'pytest.raises', 'pytest.raises', (['InvalidStateException'], {}), '(InvalidStateException)\n', (1885, 1908), False, 'import pytest\n'), ((2463, 2499), 'pytest.raises', 'pytest.raises', (['InvalidStateException'], {}), '(InvalidStateException)\n', (2476, 2499), False, 'import pytest\n'), ((2657, 2693), 'pytest.raises', 'pytest.raises', (['InvalidStateException'], {}), '(InvalidStateException)\n', (2670, 2693), False, 'import pytest\n'), ((3243, 3279), 'pytest.raises', 'pytest.raises', (['InvalidStateException'], {}), '(InvalidStateException)\n', (3256, 3279), False, 'import pytest\n'), ((4793, 4829), 'pytest.raises', 'pytest.raises', (['InvalidStateException'], {}), '(InvalidStateException)\n', (4806, 4829), False, 'import pytest\n'), ((5281, 5317), 'pytest.raises', 'pytest.raises', (['InvalidStateException'], {}), '(InvalidStateException)\n', (5294, 5317), False, 'import pytest\n'), ((1266, 1277), 'os.urandom', 'urandom', (['(16)'], {}), '(16)\n', (1273, 1277), False, 'from os import urandom\n')]
|
from design_patterns.strategy.fahrenheit_celsius_strategy import ConverterStrategy
class ApplicationRunner:
def __init__(self, application):
self.application = application
def run(self):
self.application.init()
while not self.application.done:
self.application.idle()
self.application.cleanup()
if __name__ == '__main__': # pragma: no cover
runner = ApplicationRunner(ConverterStrategy())
runner.run()
|
[
"design_patterns.strategy.fahrenheit_celsius_strategy.ConverterStrategy"
] |
[((430, 449), 'design_patterns.strategy.fahrenheit_celsius_strategy.ConverterStrategy', 'ConverterStrategy', ([], {}), '()\n', (447, 449), False, 'from design_patterns.strategy.fahrenheit_celsius_strategy import ConverterStrategy\n')]
|
import json, os
labels = ['ID', '标题', '副标题', '总价', '总价单位', '均价', '小区名称', '所在区域', '房屋户型', '所在楼层', '建筑面积', '户型结构', '套内面积', '建筑类型',
'房屋朝向', '建筑结构', '装修情况', '梯户比例', '配备电梯', '产权年限', '挂牌时间', '交易权属', '上次交易', '房屋用途', '房屋年限', '产权所属', '抵押信息',
'房本备件', '房源标签', '税费解析', '交通出行', '核心卖点', '别墅类型', '售房详情', '周边配套', '小区介绍', '户型介绍', '装修描述', '权属抵押', '适宜人群',
'投资分析']
path1 = "data/"
path2 = "data2/"
files = os.listdir(path1)
for i in files:
with open(path1+i, 'r') as file:
tmp = json.load(file)
for j in range(len(tmp)):
for key in labels:
if key not in tmp[j]:
tmp[j][key] = ''
with open(path2+i, 'w') as file:
json.dump(tmp, file)
|
[
"json.dump",
"json.load",
"os.listdir"
] |
[((418, 435), 'os.listdir', 'os.listdir', (['path1'], {}), '(path1)\n', (428, 435), False, 'import json, os\n'), ((504, 519), 'json.load', 'json.load', (['file'], {}), '(file)\n', (513, 519), False, 'import json, os\n'), ((689, 709), 'json.dump', 'json.dump', (['tmp', 'file'], {}), '(tmp, file)\n', (698, 709), False, 'import json, os\n')]
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import os
import pandas as pd
from datetime import datetime, timedelta
from msrest.serialization import UTC
from azure.monitor.query import LogsQueryClient
from azure.identity import DefaultAzureCredential
credential = DefaultAzureCredential()
client = LogsQueryClient(credential)
# Response time trend
# request duration over the last 12 hours.
query = """AppRequests |
summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId"""
# returns LogsQueryResult
response = client.query_workspace(os.environ['LOG_WORKSPACE_ID'], query, timespan=timedelta(days=1))
try:
table = response.tables[0]
df = pd.DataFrame(table.rows, columns=[col.name for col in table.columns])
key_value = df.to_dict(orient='records')
print(key_value)
except TypeError:
print(response.error)
"""
[
{
'TimeGenerated': '2021-07-21T04:40:00Z',
'_ResourceId': '/subscriptions/faa080af....',
'avgRequestDuration': 19.7987
},
{
'TimeGenerated': '2021-07-21T04:50:00Z',
'_ResourceId': '/subscriptions/faa08....',
'avgRequestDuration': 33.9654
},
{
'TimeGenerated': '2021-07-21T05:00:00Z',
'_ResourceId': '/subscriptions/faa080....',
'avgRequestDuration': 44.13115
}
]
"""
|
[
"azure.monitor.query.LogsQueryClient",
"datetime.timedelta",
"pandas.DataFrame",
"azure.identity.DefaultAzureCredential"
] |
[((316, 340), 'azure.identity.DefaultAzureCredential', 'DefaultAzureCredential', ([], {}), '()\n', (338, 340), False, 'from azure.identity import DefaultAzureCredential\n'), ((350, 377), 'azure.monitor.query.LogsQueryClient', 'LogsQueryClient', (['credential'], {}), '(credential)\n', (365, 377), False, 'from azure.monitor.query import LogsQueryClient\n'), ((734, 803), 'pandas.DataFrame', 'pd.DataFrame', (['table.rows'], {'columns': '[col.name for col in table.columns]'}), '(table.rows, columns=[col.name for col in table.columns])\n', (746, 803), True, 'import pandas as pd\n'), ((669, 686), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (678, 686), False, 'from datetime import datetime, timedelta\n')]
|
import pickle
dbfile = open('people-pickle', 'rb') # use binary mode files in 3.X
db = pickle.load(dbfile)
for key in db:
print(key, '=>\n ', db[key])
print(db['sue']['name'])
|
[
"pickle.load"
] |
[((103, 122), 'pickle.load', 'pickle.load', (['dbfile'], {}), '(dbfile)\n', (114, 122), False, 'import pickle\n')]
|
import torch
import argparse
import os
import random
import numpy as np
from tensorboardX import SummaryWriter
from misc.utils import set_log, visualize
from torch.optim import SGD, Adam
from torch.nn.modules.loss import MSELoss
from inner_loop import InnerLoop
from omniglot_net import OmniglotNet
from score import *
from misc.batch_sampler import BatchSampler
from misc.replay_buffer import ReplayBuffer
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class MetaLearner(object):
def __init__(self, log, tb_writer, args):
super(self.__class__, self).__init__()
self.log = log
self.tb_writer = tb_writer
self.args = args
self.loss_fn = MSELoss()
self.net = OmniglotNet(self.loss_fn, args).to(device)
self.fast_net = InnerLoop(self.loss_fn, args).to(device)
self.opt = Adam(self.net.parameters(), lr=args.meta_lr)
self.sampler = BatchSampler(args)
self.memory = ReplayBuffer()
def meta_update(self, episode_i, ls):
in_ = episode_i.observations[:, :, 0]
target = episode_i.rewards[:, :, 0]
# We use a dummy forward / backward pass to get the correct grads into self.net
loss, out = forward_pass(self.net, in_, target)
# Unpack the list of grad dicts
gradients = {k: sum(d[k] for d in ls) for k in ls[0].keys()}
# Register a hook on each parameter in the net that replaces the current dummy grad
# with our grads accumulated across the meta-batch
hooks = []
for (k, v) in self.net.named_parameters():
def get_closure():
key = k
def replace_grad(grad):
return gradients[key]
return replace_grad
hooks.append(v.register_hook(get_closure()))
# Compute grads for current step, replace with summed gradients as defined by hook
self.opt.zero_grad()
loss.backward()
# Update the net parameters with the accumulated gradient according to optimizer
self.opt.step()
# Remove the hooks before next training phase
for h in hooks:
h.remove()
def test(self, i_task, episode_i_):
predictions_ = []
for i_agent in range(self.args.n_agent):
test_net = OmniglotNet(self.loss_fn, self.args).to(device)
# Make a test net with same parameters as our current net
test_net.copy_weights(self.net)
test_opt = SGD(test_net.parameters(), lr=self.args.fast_lr)
episode_i = self.memory.storage[i_task - 1]
# Train on the train examples, using the same number of updates as in training
for i in range(self.args.fast_num_update):
in_ = episode_i.observations[:, :, i_agent]
target = episode_i.rewards[:, :, i_agent]
loss, _ = forward_pass(test_net, in_, target)
print("loss {} at {}".format(loss, i_task))
test_opt.zero_grad()
loss.backward()
test_opt.step()
# Evaluate the trained model on train and val examples
tloss, _ = evaluate(test_net, episode_i, i_agent)
vloss, prediction_ = evaluate(test_net, episode_i_, i_agent)
mtr_loss = tloss / 10.
mval_loss = vloss / 10.
print('-------------------------')
print('Meta train:', mtr_loss)
print('Meta val:', mval_loss)
print('-------------------------')
del test_net
predictions_.append(prediction_)
visualize(episode_i, episode_i_, predictions_, i_task, self.args)
def train(self):
for i_task in range(10000):
# Sample episode from current task
self.sampler.reset_task(i_task)
episodes = self.sampler.sample()
# Add to memory
self.memory.add(i_task, episodes)
# Evaluate on test tasks
if len(self.memory) > 1:
self.test(i_task, episodes)
# Collect a meta batch update
if len(self.memory) > 2:
meta_grads = []
for i in range(self.args.meta_batch_size):
if i == 0:
episodes_i = self.memory.storage[i_task - 1]
episodes_i_ = self.memory.storage[i_task]
else:
episodes_i, episodes_i_ = self.memory.sample()
self.fast_net.copy_weights(self.net)
for i_agent in range(self.args.n_agent):
meta_grad = self.fast_net.forward(episodes_i, episodes_i_, i_agent)
meta_grads.append(meta_grad)
# Perform the meta update
self.meta_update(episodes_i, meta_grads)
def main(args):
# Create dir
if not os.path.exists("./logs"):
os.makedirs("./logs")
if not os.path.exists("./pytorch_models"):
os.makedirs("./pytorch_models")
# Set logs
tb_writer = SummaryWriter('./logs/tb_{0}'.format(args.log_name))
log = set_log(args)
# Set seeds
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if device == torch.device("cuda"):
torch.cuda.manual_seed(args.seed)
torch.cuda.manual_seed_all(args.seed)
# Set the gpu
learner = MetaLearner(log, tb_writer, args)
learner.train()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="")
# General
parser.add_argument(
"--policy-type", type=str,
choices=["discrete", "continuous", "normal"],
help="Policy type available only for discrete, normal, and continuous")
parser.add_argument(
"--learner-type", type=str,
choices=["meta", "finetune"],
help="Learner type available only for meta, finetune")
parser.add_argument(
"--n-hidden", default=64, type=int,
help="Number of hidden units")
parser.add_argument(
"--n-traj", default=1, type=int,
help="Number of trajectory to collect from each task")
# Meta-learning
parser.add_argument(
"--meta-batch-size", default=25, type=int,
help="Number of tasks to sample for meta parameter update")
parser.add_argument(
"--fast-num-update", default=5, type=int,
help="Number of updates for adaptation")
parser.add_argument(
"--meta-lr", default=0.03, type=float,
help="Meta learning rate")
parser.add_argument(
"--fast-lr", default=10.0, type=float,
help="Adaptation learning rate")
parser.add_argument(
"--first-order", action="store_true",
help="Adaptation learning rate")
# Env
parser.add_argument(
"--env-name", default="", type=str,
help="OpenAI gym environment name")
parser.add_argument(
"--ep-max-timesteps", default=10, type=int,
help="Episode is terminated when max timestep is reached.")
parser.add_argument(
"--n-agent", default=1, type=int,
help="Number of agents in the environment")
# Misc
parser.add_argument(
"--seed", default=0, type=int,
help="Sets Gym, PyTorch and Numpy seeds")
parser.add_argument(
"--prefix", default="", type=str,
help="Prefix for tb_writer and logging")
args = parser.parse_args()
# Set log name
args.log_name = \
"env::%s_seed::%s_learner_type::%s_meta_batch_size::%s_meta_lr::%s_fast_num_update::%s_" \
"fast_lr::%s_prefix::%s_log" % (
args.env_name, str(args.seed), args.learner_type, args.meta_batch_size, args.meta_lr,
args.fast_num_update, args.fast_lr, args.prefix)
main(args=args)
|
[
"numpy.random.seed",
"argparse.ArgumentParser",
"torch.nn.modules.loss.MSELoss",
"os.makedirs",
"torch.manual_seed",
"inner_loop.InnerLoop",
"misc.utils.set_log",
"os.path.exists",
"torch.cuda.manual_seed",
"omniglot_net.OmniglotNet",
"torch.cuda.manual_seed_all",
"misc.replay_buffer.ReplayBuffer",
"random.seed",
"torch.cuda.is_available",
"misc.batch_sampler.BatchSampler",
"torch.device",
"misc.utils.visualize"
] |
[((5202, 5215), 'misc.utils.set_log', 'set_log', (['args'], {}), '(args)\n', (5209, 5215), False, 'from misc.utils import set_log, visualize\n'), ((5241, 5263), 'random.seed', 'random.seed', (['args.seed'], {}), '(args.seed)\n', (5252, 5263), False, 'import random\n'), ((5268, 5293), 'numpy.random.seed', 'np.random.seed', (['args.seed'], {}), '(args.seed)\n', (5282, 5293), True, 'import numpy as np\n'), ((5298, 5326), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (5315, 5326), False, 'import torch\n'), ((5587, 5626), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '""""""'}), "(description='')\n", (5610, 5626), False, 'import argparse\n'), ((440, 465), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (463, 465), False, 'import torch\n'), ((706, 715), 'torch.nn.modules.loss.MSELoss', 'MSELoss', ([], {}), '()\n', (713, 715), False, 'from torch.nn.modules.loss import MSELoss\n'), ((942, 960), 'misc.batch_sampler.BatchSampler', 'BatchSampler', (['args'], {}), '(args)\n', (954, 960), False, 'from misc.batch_sampler import BatchSampler\n'), ((983, 997), 'misc.replay_buffer.ReplayBuffer', 'ReplayBuffer', ([], {}), '()\n', (995, 997), False, 'from misc.replay_buffer import ReplayBuffer\n'), ((3653, 3718), 'misc.utils.visualize', 'visualize', (['episode_i', 'episode_i_', 'predictions_', 'i_task', 'self.args'], {}), '(episode_i, episode_i_, predictions_, i_task, self.args)\n', (3662, 3718), False, 'from misc.utils import set_log, visualize\n'), ((4964, 4988), 'os.path.exists', 'os.path.exists', (['"""./logs"""'], {}), "('./logs')\n", (4978, 4988), False, 'import os\n'), ((4998, 5019), 'os.makedirs', 'os.makedirs', (['"""./logs"""'], {}), "('./logs')\n", (5009, 5019), False, 'import os\n'), ((5031, 5065), 'os.path.exists', 'os.path.exists', (['"""./pytorch_models"""'], {}), "('./pytorch_models')\n", (5045, 5065), False, 'import os\n'), ((5075, 5106), 'os.makedirs', 'os.makedirs', (['"""./pytorch_models"""'], {}), "('./pytorch_models')\n", (5086, 5106), False, 'import os\n'), ((5344, 5364), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (5356, 5364), False, 'import torch\n'), ((5374, 5407), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['args.seed'], {}), '(args.seed)\n', (5396, 5407), False, 'import torch\n'), ((5416, 5453), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['args.seed'], {}), '(args.seed)\n', (5442, 5453), False, 'import torch\n'), ((745, 776), 'omniglot_net.OmniglotNet', 'OmniglotNet', (['self.loss_fn', 'args'], {}), '(self.loss_fn, args)\n', (756, 776), False, 'from omniglot_net import OmniglotNet\n'), ((813, 842), 'inner_loop.InnerLoop', 'InnerLoop', (['self.loss_fn', 'args'], {}), '(self.loss_fn, args)\n', (822, 842), False, 'from inner_loop import InnerLoop\n'), ((2339, 2375), 'omniglot_net.OmniglotNet', 'OmniglotNet', (['self.loss_fn', 'self.args'], {}), '(self.loss_fn, self.args)\n', (2350, 2375), False, 'from omniglot_net import OmniglotNet\n')]
|
"""
733. Flood Fill
An image is represented by an m x n integer grid image where image[i][j] represents the pixel value of the image.
You are also given three integers sr, sc, and newColor. You should perform a flood fill on the image starting from the pixel image[sr][sc].
To perform a flood fill, consider the starting pixel, plus any pixels connected 4-directionally to the starting pixel of the same color as the starting pixel, plus any pixels connected 4-directionally to those pixels (also with the same color), and so on. Replace the color of all of the aforementioned pixels with newColor.
Return the modified image after performing the flood fill.
Example 1:
Input: image = [[1,1,1],[1,1,0],[1,0,1]], sr = 1, sc = 1, newColor = 2
Output: [[2,2,2],[2,2,0],[2,0,1]]
Explanation: From the center of the image with position (sr, sc) = (1, 1) (i.e., the red pixel), all pixels connected by a path of the same color as the starting pixel (i.e., the blue pixels) are colored with the new color.
Note the bottom corner is not colored 2, because it is not 4-directionally connected to the starting pixel.
Example 2:
Input: image = [[0,0,0],[0,0,0]], sr = 0, sc = 0, newColor = 2
Output: [[2,2,2],[2,2,2]]
Constraints:
m == image.length
n == image[i].length
1 <= m, n <= 50
0 <= image[i][j], newColor < 216
0 <= sr < m
0 <= sc < n
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/flood-fill
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
"""
import collections
from typing import List
class Solution:
def floodFill(
self, image: List[List[int]], sr: int, sc: int, newColor: int
) -> List[List[int]]:
curColor = image[sr][sc]
if curColor == newColor:
return image
n, m = len(image), len(image[0])
que = collections.deque([(sr, sc)])
image[sr][sc] = newColor
while que:
(x, y) = que.popleft()
for mx, my in [(x - 1, y), (x + 1, y), (x, y - 1), (x, y + 1)]:
if 0 <= mx < n and 0 <= my < m and image[mx][my] == curColor:
que.append((mx, my))
image[mx][my] = newColor
return image
if __name__ == "__main__":
image = [[1, 1, 1], [1, 1, 0], [1, 0, 1]]
sr = 1
sc = 1
newColor = 2
print(
"Input: image = {}, sr = {}, sc = {}, newColor = {}".format(
image, sr, sc, newColor
)
)
print("Output:", Solution().floodFill(image, sr, sc, newColor))
|
[
"collections.deque"
] |
[((1769, 1798), 'collections.deque', 'collections.deque', (['[(sr, sc)]'], {}), '([(sr, sc)])\n', (1786, 1798), False, 'import collections\n')]
|
import time
import random
import requests
# def tieba_spider(keyword, page_start, page_end):
# url = 'https://tieba.baidu.com/f?kw={}&ie=utf-8&pn={}'
# headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64)'
# ' AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.835.163 Safari/535.1'}
# for page in range(page_start, page_end):
# res = requests.get(url=url.format(keyword, (page - 1) * 50), headers=headers).content.decode(encoding='utf8',
# errors='ignore')
# filename = './tieba/{}吧_第{}页.html'.format(keyword, page)
# with open(filename, 'w') as f:
# f.write(res)
#
#
# keyword = input('关键字')
# page_start = input('开始页')
# page_end = input('结束页')
# print(keyword,page_start,page_end)
# tieba_spider(keyword, page_start, page_end)
class TieBaSpider:
def __init__(self):
self.url = url = 'https://tieba.baidu.com/f?kw={}&ie=utf-8&pn={}'
self.headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64)'
' AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.835.163 Safari/535.1'}
def get_html(self, url):
"""请求函数"""
html = requests.get(url=url, headers=self.headers).text
return html
def parse_html(self):
"""解析函数"""
pass
def save_html(self, filename, html):
"""数据处理"""
with open(filename, 'w') as f:
f.write(html)
def crawl(self):
"""爬虫逻辑函数"""
name = input('贴吧名:')
start = int(input('开始页'))
end = int(input('结束页'))
for page in range(start, end + 1):
url = self.url.format(name, (page - 1) * 50)
html = self.get_html(url)
filename = './tieba/{}吧_第{}页.html'.format(name, page)
self.save_html(filename, html)
time.sleep(random.randint(1, 3))
if __name__ == '__main__':
spider = TieBaSpider()
spider.crawl()
|
[
"random.randint",
"requests.get"
] |
[((1290, 1333), 'requests.get', 'requests.get', ([], {'url': 'url', 'headers': 'self.headers'}), '(url=url, headers=self.headers)\n', (1302, 1333), False, 'import requests\n'), ((1953, 1973), 'random.randint', 'random.randint', (['(1)', '(3)'], {}), '(1, 3)\n', (1967, 1973), False, 'import random\n')]
|
import numpy as np
from pandas import (
DataFrame,
IndexSlice,
)
class Render:
params = [[12, 24, 36], [12, 120]]
param_names = ["cols", "rows"]
def setup(self, cols, rows):
self.df = DataFrame(
np.random.randn(rows, cols),
columns=[f"float_{i+1}" for i in range(cols)],
index=[f"row_{i+1}" for i in range(rows)],
)
def time_apply_render(self, cols, rows):
self._style_apply()
self.st._render_html(True, True)
def peakmem_apply_render(self, cols, rows):
self._style_apply()
self.st._render_html(True, True)
def time_classes_render(self, cols, rows):
self._style_classes()
self.st._render_html(True, True)
def peakmem_classes_render(self, cols, rows):
self._style_classes()
self.st._render_html(True, True)
def time_tooltips_render(self, cols, rows):
self._style_tooltips()
self.st._render_html(True, True)
def peakmem_tooltips_render(self, cols, rows):
self._style_tooltips()
self.st._render_html(True, True)
def time_format_render(self, cols, rows):
self._style_format()
self.st._render_html(True, True)
def peakmem_format_render(self, cols, rows):
self._style_format()
self.st._render_html(True, True)
def time_apply_format_hide_render(self, cols, rows):
self._style_apply_format_hide()
self.st._render_html(True, True)
def peakmem_apply_format_hide_render(self, cols, rows):
self._style_apply_format_hide()
self.st._render_html(True, True)
def _style_apply(self):
def _apply_func(s):
return [
"background-color: lightcyan" if s.name == "row_1" else "" for v in s
]
self.st = self.df.style.apply(_apply_func, axis=1)
def _style_classes(self):
classes = self.df.applymap(lambda v: ("cls-1" if v > 0 else ""))
classes.index, classes.columns = self.df.index, self.df.columns
self.st = self.df.style.set_td_classes(classes)
def _style_format(self):
ic = int(len(self.df.columns) / 4 * 3)
ir = int(len(self.df.index) / 4 * 3)
# apply a formatting function
# subset is flexible but hinders vectorised solutions
self.st = self.df.style.format(
"{:,.3f}", subset=IndexSlice["row_1":f"row_{ir}", "float_1":f"float_{ic}"]
)
def _style_apply_format_hide(self):
self.st = self.df.style.applymap(lambda v: "color: red;")
self.st.format("{:.3f}")
self.st.hide_index(self.st.index[1:])
self.st.hide_columns(self.st.columns[1:])
def _style_tooltips(self):
ttips = DataFrame("abc", index=self.df.index[::2], columns=self.df.columns[::2])
self.st = self.df.style.set_tooltips(ttips)
self.st.hide_index(self.st.index[12:])
self.st.hide_columns(self.st.columns[12:])
|
[
"pandas.DataFrame",
"numpy.random.randn"
] |
[((2743, 2815), 'pandas.DataFrame', 'DataFrame', (['"""abc"""'], {'index': 'self.df.index[::2]', 'columns': 'self.df.columns[::2]'}), "('abc', index=self.df.index[::2], columns=self.df.columns[::2])\n", (2752, 2815), False, 'from pandas import DataFrame, IndexSlice\n'), ((240, 267), 'numpy.random.randn', 'np.random.randn', (['rows', 'cols'], {}), '(rows, cols)\n', (255, 267), True, 'import numpy as np\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import List
from unittest.mock import patch
from unittest import TestCase
import genty
import numpy as np
from . import core
@genty.genty
class BenchmarkTests(TestCase):
@genty.genty_dataset( # type: ignore
bragg=("bragg", [2.93, 2.18, 2.35, 2.12, 31.53, 15.98, 226.69, 193.11]),
morpho=("morpho", [280.36, 52.96, 208.16, 72.69, 89.92, 60.37, 226.69, 193.11]),
chirped=("chirped", [280.36, 52.96, 104.08, 36.34, 31.53, 15.98, 226.69, 193.11]),
)
def test_photonics_transforms(self, pb: str, expected: List[float]) -> None:
np.random.seed(24)
with patch("shutil.which", return_value="here"):
func = core.Photonics(pb, 16) # should be 8... but it is actually not allowed. Nevermind here
x = np.random.normal(0, 1, size=8)
output = func.transform(x)
np.testing.assert_almost_equal(output, expected, decimal=2)
np.random.seed(24)
x2 = np.random.normal(0, 1, size=8)
np.testing.assert_almost_equal(x, x2, decimal=2, err_msg="x was modified in the process")
def test_tanh_crop() -> None:
output = core.tanh_crop([-1e9, 1e9, 0], -12, 16)
np.testing.assert_almost_equal(output, [-12, 16, 2])
def test_morpho_transform_constraints() -> None:
with patch("shutil.which", return_value="here"):
func = core.Photonics("morpho", 60)
x = np.random.normal(0, 5, size=60) # std 5 to play with boundaries
output = func.transform(x)
assert np.all(output >= 0)
q = len(x) // 4
assert np.all(output[:q] <= 300)
assert np.all(output[q: 3 * q] <= 600)
assert np.all(output[2 * q: 3 * q] >= 30)
assert np.all(output[3 * q:] <= 300)
def test_photonics() -> None:
with patch("shutil.which", return_value="here"):
photo = core.Photonics("bragg", 16)
with patch("nevergrad.instrumentation.utils.CommandFunction.__call__", return_value="line1\n12\n"):
with patch("nevergrad.instrumentation.utils.CommandFunction.__call__", return_value="line1\n12\n"):
output = photo(np.zeros(16))
np.testing.assert_equal(output, 12)
# check error
with patch("nevergrad.instrumentation.utils.CommandFunction.__call__", return_value="line1\n"):
np.testing.assert_raises(RuntimeError, photo, np.zeros(16).tolist())
np.testing.assert_raises(AssertionError, photo, np.zeros(12).tolist())
|
[
"numpy.random.seed",
"genty.genty_dataset",
"numpy.testing.assert_almost_equal",
"numpy.zeros",
"unittest.mock.patch",
"numpy.testing.assert_equal",
"numpy.random.normal",
"numpy.all"
] |
[((389, 660), 'genty.genty_dataset', 'genty.genty_dataset', ([], {'bragg': "('bragg', [2.93, 2.18, 2.35, 2.12, 31.53, 15.98, 226.69, 193.11])", 'morpho': "('morpho', [280.36, 52.96, 208.16, 72.69, 89.92, 60.37, 226.69, 193.11])", 'chirped': "('chirped', [280.36, 52.96, 104.08, 36.34, 31.53, 15.98, 226.69, 193.11])"}), "(bragg=('bragg', [2.93, 2.18, 2.35, 2.12, 31.53, 15.98, \n 226.69, 193.11]), morpho=('morpho', [280.36, 52.96, 208.16, 72.69, \n 89.92, 60.37, 226.69, 193.11]), chirped=('chirped', [280.36, 52.96, \n 104.08, 36.34, 31.53, 15.98, 226.69, 193.11]))\n", (408, 660), False, 'import genty\n'), ((1369, 1421), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['output', '[-12, 16, 2]'], {}), '(output, [-12, 16, 2])\n', (1399, 1421), True, 'import numpy as np\n'), ((1578, 1609), 'numpy.random.normal', 'np.random.normal', (['(0)', '(5)'], {'size': '(60)'}), '(0, 5, size=60)\n', (1594, 1609), True, 'import numpy as np\n'), ((1685, 1704), 'numpy.all', 'np.all', (['(output >= 0)'], {}), '(output >= 0)\n', (1691, 1704), True, 'import numpy as np\n'), ((1736, 1761), 'numpy.all', 'np.all', (['(output[:q] <= 300)'], {}), '(output[:q] <= 300)\n', (1742, 1761), True, 'import numpy as np\n'), ((1773, 1803), 'numpy.all', 'np.all', (['(output[q:3 * q] <= 600)'], {}), '(output[q:3 * q] <= 600)\n', (1779, 1803), True, 'import numpy as np\n'), ((1816, 1849), 'numpy.all', 'np.all', (['(output[2 * q:3 * q] >= 30)'], {}), '(output[2 * q:3 * q] >= 30)\n', (1822, 1849), True, 'import numpy as np\n'), ((1862, 1891), 'numpy.all', 'np.all', (['(output[3 * q:] <= 300)'], {}), '(output[3 * q:] <= 300)\n', (1868, 1891), True, 'import numpy as np\n'), ((2278, 2313), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['output', '(12)'], {}), '(output, 12)\n', (2301, 2313), True, 'import numpy as np\n'), ((782, 800), 'numpy.random.seed', 'np.random.seed', (['(24)'], {}), '(24)\n', (796, 800), True, 'import numpy as np\n'), ((977, 1007), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)'], {'size': '(8)'}), '(0, 1, size=8)\n', (993, 1007), True, 'import numpy as np\n'), ((1051, 1110), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['output', 'expected'], {'decimal': '(2)'}), '(output, expected, decimal=2)\n', (1081, 1110), True, 'import numpy as np\n'), ((1119, 1137), 'numpy.random.seed', 'np.random.seed', (['(24)'], {}), '(24)\n', (1133, 1137), True, 'import numpy as np\n'), ((1151, 1181), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)'], {'size': '(8)'}), '(0, 1, size=8)\n', (1167, 1181), True, 'import numpy as np\n'), ((1190, 1284), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['x', 'x2'], {'decimal': '(2)', 'err_msg': '"""x was modified in the process"""'}), "(x, x2, decimal=2, err_msg=\n 'x was modified in the process')\n", (1220, 1284), True, 'import numpy as np\n'), ((1482, 1524), 'unittest.mock.patch', 'patch', (['"""shutil.which"""'], {'return_value': '"""here"""'}), "('shutil.which', return_value='here')\n", (1487, 1524), False, 'from unittest.mock import patch\n'), ((1933, 1975), 'unittest.mock.patch', 'patch', (['"""shutil.which"""'], {'return_value': '"""here"""'}), "('shutil.which', return_value='here')\n", (1938, 1975), False, 'from unittest.mock import patch\n'), ((2030, 2127), 'unittest.mock.patch', 'patch', (['"""nevergrad.instrumentation.utils.CommandFunction.__call__"""'], {'return_value': '"""line1\n12\n"""'}), "('nevergrad.instrumentation.utils.CommandFunction.__call__',\n return_value='line1\\n12\\n')\n", (2035, 2127), False, 'from unittest.mock import patch\n'), ((2341, 2434), 'unittest.mock.patch', 'patch', (['"""nevergrad.instrumentation.utils.CommandFunction.__call__"""'], {'return_value': '"""line1\n"""'}), "('nevergrad.instrumentation.utils.CommandFunction.__call__',\n return_value='line1\\n')\n", (2346, 2434), False, 'from unittest.mock import patch\n'), ((814, 856), 'unittest.mock.patch', 'patch', (['"""shutil.which"""'], {'return_value': '"""here"""'}), "('shutil.which', return_value='here')\n", (819, 856), False, 'from unittest.mock import patch\n'), ((2138, 2235), 'unittest.mock.patch', 'patch', (['"""nevergrad.instrumentation.utils.CommandFunction.__call__"""'], {'return_value': '"""line1\n12\n"""'}), "('nevergrad.instrumentation.utils.CommandFunction.__call__',\n return_value='line1\\n12\\n')\n", (2143, 2235), False, 'from unittest.mock import patch\n'), ((2260, 2272), 'numpy.zeros', 'np.zeros', (['(16)'], {}), '(16)\n', (2268, 2272), True, 'import numpy as np\n'), ((2561, 2573), 'numpy.zeros', 'np.zeros', (['(12)'], {}), '(12)\n', (2569, 2573), True, 'import numpy as np\n'), ((2486, 2498), 'numpy.zeros', 'np.zeros', (['(16)'], {}), '(16)\n', (2494, 2498), True, 'import numpy as np\n')]
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, erpnext
import frappe.defaults
from frappe import msgprint, _
from frappe.utils import cstr, flt, cint
from erpnext.controllers.stock_controller import StockController
from erpnext.accounts.utils import get_company_default
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos
from erpnext.stock.utils import get_stock_balance, get_incoming_rate, get_available_serial_nos
from erpnext.stock.doctype.batch.batch import get_batch_qty
class OpeningEntryAccountError(frappe.ValidationError): pass
class EmptyStockReconciliationItemsError(frappe.ValidationError): pass
class StockReconciliation(StockController):
def __init__(self, *args, **kwargs):
super(StockReconciliation, self).__init__(*args, **kwargs)
self.head_row = ["Item Code", "Warehouse", "Quantity", "Valuation Rate"]
def validate(self):
if not self.expense_account:
self.expense_account = frappe.get_cached_value('Company', self.company, "stock_adjustment_account")
if not self.cost_center:
self.cost_center = frappe.get_cached_value('Company', self.company, "cost_center")
self.validate_posting_time()
self.remove_items_with_no_change()
self.validate_data()
self.validate_expense_account()
self.set_total_qty_and_amount()
self.validate_putaway_capacity()
if self._action=="submit":
self.make_batches('warehouse')
def on_submit(self):
self.update_stock_ledger()
self.make_gl_entries()
self.repost_future_sle_and_gle()
from erpnext.stock.doctype.serial_no.serial_no import update_serial_nos_after_submit
update_serial_nos_after_submit(self, "items")
def on_cancel(self):
self.ignore_linked_doctypes = ('GL Entry', 'Stock Ledger Entry', 'Repost Item Valuation')
self.make_sle_on_cancel()
self.make_gl_entries_on_cancel()
self.repost_future_sle_and_gle()
def remove_items_with_no_change(self):
"""Remove items if qty or rate is not changed"""
self.difference_amount = 0.0
def _changed(item):
item_dict = get_stock_balance_for(item.item_code, item.warehouse,
self.posting_date, self.posting_time, batch_no=item.batch_no)
if ((item.qty is None or item.qty==item_dict.get("qty")) and
(item.valuation_rate is None or item.valuation_rate==item_dict.get("rate")) and
(not item.serial_no or (item.serial_no == item_dict.get("serial_nos")) )):
return False
else:
# set default as current rates
if item.qty is None:
item.qty = item_dict.get("qty")
if item.valuation_rate is None:
item.valuation_rate = item_dict.get("rate")
if item_dict.get("serial_nos"):
item.current_serial_no = item_dict.get("serial_nos")
if self.purpose == "Stock Reconciliation":
item.serial_no = item.current_serial_no
item.current_qty = item_dict.get("qty")
item.current_valuation_rate = item_dict.get("rate")
self.difference_amount += (flt(item.qty, item.precision("qty")) * \
flt(item.valuation_rate or item_dict.get("rate"), item.precision("valuation_rate")) \
- flt(item_dict.get("qty"), item.precision("qty")) * flt(item_dict.get("rate"), item.precision("valuation_rate")))
return True
items = list(filter(lambda d: _changed(d), self.items))
if not items:
frappe.throw(_("None of the items have any change in quantity or value."),
EmptyStockReconciliationItemsError)
elif len(items) != len(self.items):
self.items = items
for i, item in enumerate(self.items):
item.idx = i + 1
frappe.msgprint(_("Removed items with no change in quantity or value."))
def validate_data(self):
def _get_msg(row_num, msg):
return _("Row # {0}: ").format(row_num+1) + msg
self.validation_messages = []
item_warehouse_combinations = []
default_currency = frappe.db.get_default("currency")
for row_num, row in enumerate(self.items):
# find duplicates
key = [row.item_code, row.warehouse]
for field in ['serial_no', 'batch_no']:
if row.get(field):
key.append(row.get(field))
if key in item_warehouse_combinations:
self.validation_messages.append(_get_msg(row_num, _("Duplicate entry")))
else:
item_warehouse_combinations.append(key)
self.validate_item(row.item_code, row)
# validate warehouse
if not frappe.db.get_value("Warehouse", row.warehouse):
self.validation_messages.append(_get_msg(row_num, _("Warehouse not found in the system")))
# if both not specified
if row.qty in ["", None] and row.valuation_rate in ["", None]:
self.validation_messages.append(_get_msg(row_num,
_("Please specify either Quantity or Valuation Rate or both")))
# do not allow negative quantity
if flt(row.qty) < 0:
self.validation_messages.append(_get_msg(row_num,
_("Negative Quantity is not allowed")))
# do not allow negative valuation
if flt(row.valuation_rate) < 0:
self.validation_messages.append(_get_msg(row_num,
_("Negative Valuation Rate is not allowed")))
if row.qty and row.valuation_rate in ["", None]:
row.valuation_rate = get_stock_balance(row.item_code, row.warehouse,
self.posting_date, self.posting_time, with_valuation_rate=True)[1]
if not row.valuation_rate:
# try if there is a buying price list in default currency
buying_rate = frappe.db.get_value("Item Price", {"item_code": row.item_code,
"buying": 1, "currency": default_currency}, "price_list_rate")
if buying_rate:
row.valuation_rate = buying_rate
else:
# get valuation rate from Item
row.valuation_rate = frappe.get_value('Item', row.item_code, 'valuation_rate')
# throw all validation messages
if self.validation_messages:
for msg in self.validation_messages:
msgprint(msg)
raise frappe.ValidationError(self.validation_messages)
def validate_item(self, item_code, row):
from erpnext.stock.doctype.item.item import validate_end_of_life, \
validate_is_stock_item, validate_cancelled_item
# using try except to catch all validation msgs and display together
try:
item = frappe.get_doc("Item", item_code)
# end of life and stock item
validate_end_of_life(item_code, item.end_of_life, item.disabled, verbose=0)
validate_is_stock_item(item_code, item.is_stock_item, verbose=0)
# item should not be serialized
if item.has_serial_no and not row.serial_no and not item.serial_no_series:
raise frappe.ValidationError(_("Serial no(s) required for serialized item {0}").format(item_code))
# item managed batch-wise not allowed
if item.has_batch_no and not row.batch_no and not item.create_new_batch:
raise frappe.ValidationError(_("Batch no is required for batched item {0}").format(item_code))
# docstatus should be < 2
validate_cancelled_item(item_code, item.docstatus, verbose=0)
except Exception as e:
self.validation_messages.append(_("Row # ") + ("%d: " % (row.idx)) + cstr(e))
def update_stock_ledger(self):
""" find difference between current and expected entries
and create stock ledger entries based on the difference"""
from erpnext.stock.stock_ledger import get_previous_sle
sl_entries = []
has_serial_no = False
has_batch_no = False
for row in self.items:
item = frappe.get_doc("Item", row.item_code)
if item.has_batch_no:
has_batch_no = True
if item.has_serial_no or item.has_batch_no:
has_serial_no = True
self.get_sle_for_serialized_items(row, sl_entries)
else:
if row.serial_no or row.batch_no:
frappe.throw(_("Row #{0}: Item {1} is not a Serialized/Batched Item. It cannot have a Serial No/Batch No against it.") \
.format(row.idx, frappe.bold(row.item_code)))
previous_sle = get_previous_sle({
"item_code": row.item_code,
"warehouse": row.warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time
})
if previous_sle:
if row.qty in ("", None):
row.qty = previous_sle.get("qty_after_transaction", 0)
if row.valuation_rate in ("", None):
row.valuation_rate = previous_sle.get("valuation_rate", 0)
if row.qty and not row.valuation_rate:
frappe.throw(_("Valuation Rate required for Item {0} at row {1}").format(row.item_code, row.idx))
if ((previous_sle and row.qty == previous_sle.get("qty_after_transaction")
and (row.valuation_rate == previous_sle.get("valuation_rate") or row.qty == 0))
or (not previous_sle and not row.qty)):
continue
sl_entries.append(self.get_sle_for_items(row))
if sl_entries:
if has_serial_no:
sl_entries = self.merge_similar_item_serial_nos(sl_entries)
allow_negative_stock = False
if has_batch_no:
allow_negative_stock = True
self.make_sl_entries(sl_entries, allow_negative_stock=allow_negative_stock)
if has_serial_no and sl_entries:
self.update_valuation_rate_for_serial_no()
def get_sle_for_serialized_items(self, row, sl_entries):
from erpnext.stock.stock_ledger import get_previous_sle
serial_nos = get_serial_nos(row.serial_no)
# To issue existing serial nos
if row.current_qty and (row.current_serial_no or row.batch_no):
args = self.get_sle_for_items(row)
args.update({
'actual_qty': -1 * row.current_qty,
'serial_no': row.current_serial_no,
'batch_no': row.batch_no,
'valuation_rate': row.current_valuation_rate
})
if row.current_serial_no:
args.update({
'qty_after_transaction': 0,
})
sl_entries.append(args)
qty_after_transaction = 0
for serial_no in serial_nos:
args = self.get_sle_for_items(row, [serial_no])
previous_sle = get_previous_sle({
"item_code": row.item_code,
"posting_date": self.posting_date,
"posting_time": self.posting_time,
"serial_no": serial_no
})
if previous_sle and row.warehouse != previous_sle.get("warehouse"):
# If serial no exists in different warehouse
warehouse = previous_sle.get("warehouse", '') or row.warehouse
if not qty_after_transaction:
qty_after_transaction = get_stock_balance(row.item_code,
warehouse, self.posting_date, self.posting_time)
qty_after_transaction -= 1
new_args = args.copy()
new_args.update({
'actual_qty': -1,
'qty_after_transaction': qty_after_transaction,
'warehouse': warehouse,
'valuation_rate': previous_sle.get("valuation_rate")
})
sl_entries.append(new_args)
if row.qty:
args = self.get_sle_for_items(row)
args.update({
'actual_qty': row.qty,
'incoming_rate': row.valuation_rate,
'valuation_rate': row.valuation_rate
})
sl_entries.append(args)
if serial_nos == get_serial_nos(row.current_serial_no):
# update valuation rate
self.update_valuation_rate_for_serial_nos(row, serial_nos)
def update_valuation_rate_for_serial_no(self):
for d in self.items:
if not d.serial_no: continue
serial_nos = get_serial_nos(d.serial_no)
self.update_valuation_rate_for_serial_nos(d, serial_nos)
def update_valuation_rate_for_serial_nos(self, row, serial_nos):
valuation_rate = row.valuation_rate if self.docstatus == 1 else row.current_valuation_rate
if valuation_rate is None:
return
for d in serial_nos:
frappe.db.set_value("Serial No", d, 'purchase_rate', valuation_rate)
def get_sle_for_items(self, row, serial_nos=None):
"""Insert Stock Ledger Entries"""
if not serial_nos and row.serial_no:
serial_nos = get_serial_nos(row.serial_no)
data = frappe._dict({
"doctype": "Stock Ledger Entry",
"item_code": row.item_code,
"warehouse": row.warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time,
"voucher_type": self.doctype,
"voucher_no": self.name,
"voucher_detail_no": row.name,
"company": self.company,
"stock_uom": frappe.db.get_value("Item", row.item_code, "stock_uom"),
"is_cancelled": 1 if self.docstatus == 2 else 0,
"serial_no": '\n'.join(serial_nos) if serial_nos else '',
"batch_no": row.batch_no,
"valuation_rate": flt(row.valuation_rate, row.precision("valuation_rate"))
})
if not row.batch_no:
data.qty_after_transaction = flt(row.qty, row.precision("qty"))
if self.docstatus == 2 and not row.batch_no:
if row.current_qty:
data.actual_qty = -1 * row.current_qty
data.qty_after_transaction = flt(row.current_qty)
data.valuation_rate = flt(row.current_valuation_rate)
data.stock_value = data.qty_after_transaction * data.valuation_rate
data.stock_value_difference = -1 * flt(row.amount_difference)
else:
data.actual_qty = row.qty
data.qty_after_transaction = 0.0
data.valuation_rate = flt(row.valuation_rate)
data.stock_value_difference = -1 * flt(row.amount_difference)
return data
def make_sle_on_cancel(self):
sl_entries = []
has_serial_no = False
for row in self.items:
if row.serial_no or row.batch_no or row.current_serial_no:
has_serial_no = True
serial_nos = ''
if row.current_serial_no:
serial_nos = get_serial_nos(row.current_serial_no)
sl_entries.append(self.get_sle_for_items(row, serial_nos))
else:
sl_entries.append(self.get_sle_for_items(row))
if sl_entries:
if has_serial_no:
sl_entries = self.merge_similar_item_serial_nos(sl_entries)
sl_entries.reverse()
allow_negative_stock = frappe.db.get_value("Stock Settings", None, "allow_negative_stock")
self.make_sl_entries(sl_entries, allow_negative_stock=allow_negative_stock)
def merge_similar_item_serial_nos(self, sl_entries):
# If user has put the same item in multiple row with different serial no
new_sl_entries = []
merge_similar_entries = {}
for d in sl_entries:
if not d.serial_no or d.actual_qty < 0:
new_sl_entries.append(d)
continue
key = (d.item_code, d.warehouse)
if key not in merge_similar_entries:
merge_similar_entries[key] = d
elif d.serial_no:
data = merge_similar_entries[key]
data.actual_qty += d.actual_qty
data.qty_after_transaction += d.qty_after_transaction
data.valuation_rate = (data.valuation_rate + d.valuation_rate) / data.actual_qty
data.serial_no += '\n' + d.serial_no
if data.incoming_rate:
data.incoming_rate = (data.incoming_rate + d.incoming_rate) / data.actual_qty
for key, value in merge_similar_entries.items():
new_sl_entries.append(value)
return new_sl_entries
def get_gl_entries(self, warehouse_account=None):
if not self.cost_center:
msgprint(_("Please enter Cost Center"), raise_exception=1)
return super(StockReconciliation, self).get_gl_entries(warehouse_account,
self.expense_account, self.cost_center)
def validate_expense_account(self):
if not cint(erpnext.is_perpetual_inventory_enabled(self.company)):
return
if not self.expense_account:
frappe.throw(_("Please enter Expense Account"))
elif self.purpose == "Opening Stock" or not frappe.db.sql("""select name from `tabStock Ledger Entry` limit 1"""):
if frappe.db.get_value("Account", self.expense_account, "report_type") == "Profit and Loss":
frappe.throw(_("Difference Account must be a Asset/Liability type account, since this Stock Reconciliation is an Opening Entry"), OpeningEntryAccountError)
def set_total_qty_and_amount(self):
for d in self.get("items"):
d.amount = flt(d.qty, d.precision("qty")) * flt(d.valuation_rate, d.precision("valuation_rate"))
d.current_amount = (flt(d.current_qty,
d.precision("current_qty")) * flt(d.current_valuation_rate, d.precision("current_valuation_rate")))
d.quantity_difference = flt(d.qty) - flt(d.current_qty)
d.amount_difference = flt(d.amount) - flt(d.current_amount)
def get_items_for(self, warehouse):
self.items = []
for item in get_items(warehouse, self.posting_date, self.posting_time, self.company):
self.append("items", item)
def submit(self):
if len(self.items) > 100:
msgprint(_("The task has been enqueued as a background job. In case there is any issue on processing in background, the system will add a comment about the error on this Stock Reconciliation and revert to the Draft stage"))
self.queue_action('submit')
else:
self._submit()
@frappe.whitelist()
def get_items(warehouse, posting_date, posting_time, company):
lft, rgt = frappe.db.get_value("Warehouse", warehouse, ["lft", "rgt"])
items = frappe.db.sql("""
select i.name, i.item_name, bin.warehouse
from tabBin bin, tabItem i
where i.name=bin.item_code and i.disabled=0 and i.is_stock_item = 1
and i.has_variants = 0 and i.has_serial_no = 0 and i.has_batch_no = 0
and exists(select name from `tabWarehouse` where lft >= %s and rgt <= %s and name=bin.warehouse)
""", (lft, rgt))
items += frappe.db.sql("""
select i.name, i.item_name, id.default_warehouse
from tabItem i, `tabItem Default` id
where i.name = id.parent
and exists(select name from `tabWarehouse` where lft >= %s and rgt <= %s and name=id.default_warehouse)
and i.is_stock_item = 1 and i.has_serial_no = 0 and i.has_batch_no = 0
and i.has_variants = 0 and i.disabled = 0 and id.company=%s
group by i.name
""", (lft, rgt, company))
res = []
for d in set(items):
stock_bal = get_stock_balance(d[0], d[2], posting_date, posting_time,
with_valuation_rate=True)
if frappe.db.get_value("Item", d[0], "disabled") == 0:
res.append({
"item_code": d[0],
"warehouse": d[2],
"qty": stock_bal[0],
"item_name": d[1],
"valuation_rate": stock_bal[1],
"current_qty": stock_bal[0],
"current_valuation_rate": stock_bal[1]
})
return res
@frappe.whitelist()
def get_stock_balance_for(item_code, warehouse,
posting_date, posting_time, batch_no=None, with_valuation_rate= True):
frappe.has_permission("Stock Reconciliation", "write", throw = True)
item_dict = frappe.db.get_value("Item", item_code,
["has_serial_no", "has_batch_no"], as_dict=1)
serial_nos = ""
with_serial_no = True if item_dict.get("has_serial_no") else False
data = get_stock_balance(item_code, warehouse, posting_date, posting_time,
with_valuation_rate=with_valuation_rate, with_serial_no=with_serial_no)
if with_serial_no:
qty, rate, serial_nos = data
else:
qty, rate = data
if item_dict.get("has_batch_no"):
qty = get_batch_qty(batch_no, warehouse, posting_date=posting_date, posting_time=posting_time) or 0
return {
'qty': qty,
'rate': rate,
'serial_nos': serial_nos
}
@frappe.whitelist()
def get_difference_account(purpose, company):
if purpose == 'Stock Reconciliation':
account = get_company_default(company, "stock_adjustment_account")
else:
account = frappe.db.get_value('Account', {'is_group': 0,
'company': company, 'account_type': 'Temporary'}, 'name')
return account
|
[
"erpnext.accounts.utils.get_company_default",
"frappe.ValidationError",
"frappe.has_permission",
"erpnext.stock.doctype.item.item.validate_cancelled_item",
"erpnext.stock.doctype.batch.batch.get_batch_qty",
"erpnext.is_perpetual_inventory_enabled",
"frappe.bold",
"frappe.get_cached_value",
"erpnext.stock.doctype.serial_no.serial_no.update_serial_nos_after_submit",
"erpnext.stock.utils.get_stock_balance",
"frappe.db.sql",
"frappe.get_doc",
"frappe._",
"frappe.utils.flt",
"frappe.msgprint",
"frappe.whitelist",
"erpnext.stock.doctype.item.item.validate_is_stock_item",
"erpnext.stock.stock_ledger.get_previous_sle",
"erpnext.stock.doctype.serial_no.serial_no.get_serial_nos",
"frappe.get_value",
"frappe.utils.cstr",
"frappe.db.get_value",
"frappe.db.set_value",
"frappe.db.get_default",
"erpnext.stock.doctype.item.item.validate_end_of_life"
] |
[((16167, 16185), 'frappe.whitelist', 'frappe.whitelist', ([], {}), '()\n', (16183, 16185), False, 'import frappe, erpnext\n'), ((17550, 17568), 'frappe.whitelist', 'frappe.whitelist', ([], {}), '()\n', (17566, 17568), False, 'import frappe, erpnext\n'), ((18385, 18403), 'frappe.whitelist', 'frappe.whitelist', ([], {}), '()\n', (18401, 18403), False, 'import frappe, erpnext\n'), ((16261, 16320), 'frappe.db.get_value', 'frappe.db.get_value', (['"""Warehouse"""', 'warehouse', "['lft', 'rgt']"], {}), "('Warehouse', warehouse, ['lft', 'rgt'])\n", (16280, 16320), False, 'import frappe, erpnext\n'), ((16330, 16689), 'frappe.db.sql', 'frappe.db.sql', (['"""\n\t\tselect i.name, i.item_name, bin.warehouse\n\t\tfrom tabBin bin, tabItem i\n\t\twhere i.name=bin.item_code and i.disabled=0 and i.is_stock_item = 1\n\t\tand i.has_variants = 0 and i.has_serial_no = 0 and i.has_batch_no = 0\n\t\tand exists(select name from `tabWarehouse` where lft >= %s and rgt <= %s and name=bin.warehouse)\n\t"""', '(lft, rgt)'], {}), '(\n """\n\t\tselect i.name, i.item_name, bin.warehouse\n\t\tfrom tabBin bin, tabItem i\n\t\twhere i.name=bin.item_code and i.disabled=0 and i.is_stock_item = 1\n\t\tand i.has_variants = 0 and i.has_serial_no = 0 and i.has_batch_no = 0\n\t\tand exists(select name from `tabWarehouse` where lft >= %s and rgt <= %s and name=bin.warehouse)\n\t"""\n , (lft, rgt))\n', (16343, 16689), False, 'import frappe, erpnext\n'), ((16691, 17124), 'frappe.db.sql', 'frappe.db.sql', (['"""\n\t\tselect i.name, i.item_name, id.default_warehouse\n\t\tfrom tabItem i, `tabItem Default` id\n\t\twhere i.name = id.parent\n\t\t\tand exists(select name from `tabWarehouse` where lft >= %s and rgt <= %s and name=id.default_warehouse)\n\t\t\tand i.is_stock_item = 1 and i.has_serial_no = 0 and i.has_batch_no = 0\n\t\t\tand i.has_variants = 0 and i.disabled = 0 and id.company=%s\n\t\tgroup by i.name\n\t"""', '(lft, rgt, company)'], {}), '(\n """\n\t\tselect i.name, i.item_name, id.default_warehouse\n\t\tfrom tabItem i, `tabItem Default` id\n\t\twhere i.name = id.parent\n\t\t\tand exists(select name from `tabWarehouse` where lft >= %s and rgt <= %s and name=id.default_warehouse)\n\t\t\tand i.is_stock_item = 1 and i.has_serial_no = 0 and i.has_batch_no = 0\n\t\t\tand i.has_variants = 0 and i.disabled = 0 and id.company=%s\n\t\tgroup by i.name\n\t"""\n , (lft, rgt, company))\n', (16704, 17124), False, 'import frappe, erpnext\n'), ((17690, 17756), 'frappe.has_permission', 'frappe.has_permission', (['"""Stock Reconciliation"""', '"""write"""'], {'throw': '(True)'}), "('Stock Reconciliation', 'write', throw=True)\n", (17711, 17756), False, 'import frappe, erpnext\n'), ((17773, 17861), 'frappe.db.get_value', 'frappe.db.get_value', (['"""Item"""', 'item_code', "['has_serial_no', 'has_batch_no']"], {'as_dict': '(1)'}), "('Item', item_code, ['has_serial_no', 'has_batch_no'],\n as_dict=1)\n", (17792, 17861), False, 'import frappe, erpnext\n'), ((17954, 18097), 'erpnext.stock.utils.get_stock_balance', 'get_stock_balance', (['item_code', 'warehouse', 'posting_date', 'posting_time'], {'with_valuation_rate': 'with_valuation_rate', 'with_serial_no': 'with_serial_no'}), '(item_code, warehouse, posting_date, posting_time,\n with_valuation_rate=with_valuation_rate, with_serial_no=with_serial_no)\n', (17971, 18097), False, 'from erpnext.stock.utils import get_stock_balance, get_incoming_rate, get_available_serial_nos\n'), ((1715, 1760), 'erpnext.stock.doctype.serial_no.serial_no.update_serial_nos_after_submit', 'update_serial_nos_after_submit', (['self', '"""items"""'], {}), "(self, 'items')\n", (1745, 1760), False, 'from erpnext.stock.doctype.serial_no.serial_no import update_serial_nos_after_submit\n'), ((3871, 3904), 'frappe.db.get_default', 'frappe.db.get_default', (['"""currency"""'], {}), "('currency')\n", (3892, 3904), False, 'import frappe, erpnext\n'), ((9059, 9088), 'erpnext.stock.doctype.serial_no.serial_no.get_serial_nos', 'get_serial_nos', (['row.serial_no'], {}), '(row.serial_no)\n', (9073, 9088), False, 'from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos\n'), ((17162, 17249), 'erpnext.stock.utils.get_stock_balance', 'get_stock_balance', (['d[0]', 'd[2]', 'posting_date', 'posting_time'], {'with_valuation_rate': '(True)'}), '(d[0], d[2], posting_date, posting_time,\n with_valuation_rate=True)\n', (17179, 17249), False, 'from erpnext.stock.utils import get_stock_balance, get_incoming_rate, get_available_serial_nos\n'), ((18501, 18557), 'erpnext.accounts.utils.get_company_default', 'get_company_default', (['company', '"""stock_adjustment_account"""'], {}), "(company, 'stock_adjustment_account')\n", (18520, 18557), False, 'from erpnext.accounts.utils import get_company_default\n'), ((18577, 18685), 'frappe.db.get_value', 'frappe.db.get_value', (['"""Account"""', "{'is_group': 0, 'company': company, 'account_type': 'Temporary'}", '"""name"""'], {}), "('Account', {'is_group': 0, 'company': company,\n 'account_type': 'Temporary'}, 'name')\n", (18596, 18685), False, 'import frappe, erpnext\n'), ((1061, 1137), 'frappe.get_cached_value', 'frappe.get_cached_value', (['"""Company"""', 'self.company', '"""stock_adjustment_account"""'], {}), "('Company', self.company, 'stock_adjustment_account')\n", (1084, 1137), False, 'import frappe, erpnext\n'), ((1189, 1252), 'frappe.get_cached_value', 'frappe.get_cached_value', (['"""Company"""', 'self.company', '"""cost_center"""'], {}), "('Company', self.company, 'cost_center')\n", (1212, 1252), False, 'import frappe, erpnext\n'), ((5833, 5881), 'frappe.ValidationError', 'frappe.ValidationError', (['self.validation_messages'], {}), '(self.validation_messages)\n', (5855, 5881), False, 'import frappe, erpnext\n'), ((6136, 6169), 'frappe.get_doc', 'frappe.get_doc', (['"""Item"""', 'item_code'], {}), "('Item', item_code)\n", (6150, 6169), False, 'import frappe, erpnext\n'), ((6206, 6281), 'erpnext.stock.doctype.item.item.validate_end_of_life', 'validate_end_of_life', (['item_code', 'item.end_of_life', 'item.disabled'], {'verbose': '(0)'}), '(item_code, item.end_of_life, item.disabled, verbose=0)\n', (6226, 6281), False, 'from erpnext.stock.doctype.item.item import validate_end_of_life, validate_is_stock_item, validate_cancelled_item\n'), ((6285, 6349), 'erpnext.stock.doctype.item.item.validate_is_stock_item', 'validate_is_stock_item', (['item_code', 'item.is_stock_item'], {'verbose': '(0)'}), '(item_code, item.is_stock_item, verbose=0)\n', (6307, 6349), False, 'from erpnext.stock.doctype.item.item import validate_end_of_life, validate_is_stock_item, validate_cancelled_item\n'), ((6817, 6878), 'erpnext.stock.doctype.item.item.validate_cancelled_item', 'validate_cancelled_item', (['item_code', 'item.docstatus'], {'verbose': '(0)'}), '(item_code, item.docstatus, verbose=0)\n', (6840, 6878), False, 'from erpnext.stock.doctype.item.item import validate_end_of_life, validate_is_stock_item, validate_cancelled_item\n'), ((7299, 7336), 'frappe.get_doc', 'frappe.get_doc', (['"""Item"""', 'row.item_code'], {}), "('Item', row.item_code)\n", (7313, 7336), False, 'import frappe, erpnext\n'), ((9656, 9801), 'erpnext.stock.stock_ledger.get_previous_sle', 'get_previous_sle', (["{'item_code': row.item_code, 'posting_date': self.posting_date,\n 'posting_time': self.posting_time, 'serial_no': serial_no}"], {}), "({'item_code': row.item_code, 'posting_date': self.\n posting_date, 'posting_time': self.posting_time, 'serial_no': serial_no})\n", (9672, 9801), False, 'from erpnext.stock.stock_ledger import get_previous_sle\n'), ((10678, 10715), 'erpnext.stock.doctype.serial_no.serial_no.get_serial_nos', 'get_serial_nos', (['row.current_serial_no'], {}), '(row.current_serial_no)\n', (10692, 10715), False, 'from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos\n'), ((10927, 10954), 'erpnext.stock.doctype.serial_no.serial_no.get_serial_nos', 'get_serial_nos', (['d.serial_no'], {}), '(d.serial_no)\n', (10941, 10954), False, 'from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos\n'), ((11241, 11309), 'frappe.db.set_value', 'frappe.db.set_value', (['"""Serial No"""', 'd', '"""purchase_rate"""', 'valuation_rate'], {}), "('Serial No', d, 'purchase_rate', valuation_rate)\n", (11260, 11309), False, 'import frappe, erpnext\n'), ((11455, 11484), 'erpnext.stock.doctype.serial_no.serial_no.get_serial_nos', 'get_serial_nos', (['row.serial_no'], {}), '(row.serial_no)\n', (11469, 11484), False, 'from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos\n'), ((13338, 13405), 'frappe.db.get_value', 'frappe.db.get_value', (['"""Stock Settings"""', 'None', '"""allow_negative_stock"""'], {}), "('Stock Settings', None, 'allow_negative_stock')\n", (13357, 13405), False, 'import frappe, erpnext\n'), ((17255, 17300), 'frappe.db.get_value', 'frappe.db.get_value', (['"""Item"""', 'd[0]', '"""disabled"""'], {}), "('Item', d[0], 'disabled')\n", (17274, 17300), False, 'import frappe, erpnext\n'), ((18218, 18311), 'erpnext.stock.doctype.batch.batch.get_batch_qty', 'get_batch_qty', (['batch_no', 'warehouse'], {'posting_date': 'posting_date', 'posting_time': 'posting_time'}), '(batch_no, warehouse, posting_date=posting_date, posting_time=\n posting_time)\n', (18231, 18311), False, 'from erpnext.stock.doctype.batch.batch import get_batch_qty\n'), ((3372, 3432), 'frappe._', '_', (['"""None of the items have any change in quantity or value."""'], {}), "('None of the items have any change in quantity or value.')\n", (3373, 3432), False, 'from frappe import msgprint, _\n'), ((4361, 4408), 'frappe.db.get_value', 'frappe.db.get_value', (['"""Warehouse"""', 'row.warehouse'], {}), "('Warehouse', row.warehouse)\n", (4380, 4408), False, 'import frappe, erpnext\n'), ((4765, 4777), 'frappe.utils.flt', 'flt', (['row.qty'], {}), '(row.qty)\n', (4768, 4777), False, 'from frappe.utils import cstr, flt, cint\n'), ((4926, 4949), 'frappe.utils.flt', 'flt', (['row.valuation_rate'], {}), '(row.valuation_rate)\n', (4929, 4949), False, 'from frappe.utils import cstr, flt, cint\n'), ((5809, 5822), 'frappe.msgprint', 'msgprint', (['msg'], {}), '(msg)\n', (5817, 5822), False, 'from frappe import msgprint, _\n'), ((7759, 7907), 'erpnext.stock.stock_ledger.get_previous_sle', 'get_previous_sle', (["{'item_code': row.item_code, 'warehouse': row.warehouse, 'posting_date':\n self.posting_date, 'posting_time': self.posting_time}"], {}), "({'item_code': row.item_code, 'warehouse': row.warehouse,\n 'posting_date': self.posting_date, 'posting_time': self.posting_time})\n", (7775, 7907), False, 'from erpnext.stock.stock_ledger import get_previous_sle\n'), ((11823, 11878), 'frappe.db.get_value', 'frappe.db.get_value', (['"""Item"""', 'row.item_code', '"""stock_uom"""'], {}), "('Item', row.item_code, 'stock_uom')\n", (11842, 11878), False, 'import frappe, erpnext\n'), ((12343, 12363), 'frappe.utils.flt', 'flt', (['row.current_qty'], {}), '(row.current_qty)\n', (12346, 12363), False, 'from frappe.utils import cstr, flt, cint\n'), ((12390, 12421), 'frappe.utils.flt', 'flt', (['row.current_valuation_rate'], {}), '(row.current_valuation_rate)\n', (12393, 12421), False, 'from frappe.utils import cstr, flt, cint\n'), ((12662, 12685), 'frappe.utils.flt', 'flt', (['row.valuation_rate'], {}), '(row.valuation_rate)\n', (12665, 12685), False, 'from frappe.utils import cstr, flt, cint\n'), ((14479, 14508), 'frappe._', '_', (['"""Please enter Cost Center"""'], {}), "('Please enter Cost Center')\n", (14480, 14508), False, 'from frappe import msgprint, _\n'), ((14701, 14753), 'erpnext.is_perpetual_inventory_enabled', 'erpnext.is_perpetual_inventory_enabled', (['self.company'], {}), '(self.company)\n', (14739, 14753), False, 'import frappe, erpnext\n'), ((14814, 14847), 'frappe._', '_', (['"""Please enter Expense Account"""'], {}), "('Please enter Expense Account')\n", (14815, 14847), False, 'from frappe import msgprint, _\n'), ((15564, 15574), 'frappe.utils.flt', 'flt', (['d.qty'], {}), '(d.qty)\n', (15567, 15574), False, 'from frappe.utils import cstr, flt, cint\n'), ((15577, 15595), 'frappe.utils.flt', 'flt', (['d.current_qty'], {}), '(d.current_qty)\n', (15580, 15595), False, 'from frappe.utils import cstr, flt, cint\n'), ((15621, 15634), 'frappe.utils.flt', 'flt', (['d.amount'], {}), '(d.amount)\n', (15624, 15634), False, 'from frappe.utils import cstr, flt, cint\n'), ((15637, 15658), 'frappe.utils.flt', 'flt', (['d.current_amount'], {}), '(d.current_amount)\n', (15640, 15658), False, 'from frappe.utils import cstr, flt, cint\n'), ((15893, 16111), 'frappe._', '_', (['"""The task has been enqueued as a background job. In case there is any issue on processing in background, the system will add a comment about the error on this Stock Reconciliation and revert to the Draft stage"""'], {}), "('The task has been enqueued as a background job. In case there is any issue on processing in background, the system will add a comment about the error on this Stock Reconciliation and revert to the Draft stage'\n )\n", (15894, 16111), False, 'from frappe import msgprint, _\n'), ((3616, 3671), 'frappe._', '_', (['"""Removed items with no change in quantity or value."""'], {}), "('Removed items with no change in quantity or value.')\n", (3617, 3671), False, 'from frappe import msgprint, _\n'), ((5138, 5254), 'erpnext.stock.utils.get_stock_balance', 'get_stock_balance', (['row.item_code', 'row.warehouse', 'self.posting_date', 'self.posting_time'], {'with_valuation_rate': '(True)'}), '(row.item_code, row.warehouse, self.posting_date, self.\n posting_time, with_valuation_rate=True)\n', (5155, 5254), False, 'from erpnext.stock.utils import get_stock_balance, get_incoming_rate, get_available_serial_nos\n'), ((5373, 5502), 'frappe.db.get_value', 'frappe.db.get_value', (['"""Item Price"""', "{'item_code': row.item_code, 'buying': 1, 'currency': default_currency}", '"""price_list_rate"""'], {}), "('Item Price', {'item_code': row.item_code, 'buying': 1,\n 'currency': default_currency}, 'price_list_rate')\n", (5392, 5502), False, 'import frappe, erpnext\n'), ((10071, 10157), 'erpnext.stock.utils.get_stock_balance', 'get_stock_balance', (['row.item_code', 'warehouse', 'self.posting_date', 'self.posting_time'], {}), '(row.item_code, warehouse, self.posting_date, self.\n posting_time)\n', (10088, 10157), False, 'from erpnext.stock.utils import get_stock_balance, get_incoming_rate, get_available_serial_nos\n'), ((12533, 12559), 'frappe.utils.flt', 'flt', (['row.amount_difference'], {}), '(row.amount_difference)\n', (12536, 12559), False, 'from frappe.utils import cstr, flt, cint\n'), ((12725, 12751), 'frappe.utils.flt', 'flt', (['row.amount_difference'], {}), '(row.amount_difference)\n', (12728, 12751), False, 'from frappe.utils import cstr, flt, cint\n'), ((13022, 13059), 'erpnext.stock.doctype.serial_no.serial_no.get_serial_nos', 'get_serial_nos', (['row.current_serial_no'], {}), '(row.current_serial_no)\n', (13036, 13059), False, 'from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos\n'), ((14895, 14960), 'frappe.db.sql', 'frappe.db.sql', (['"""select name from `tabStock Ledger Entry` limit 1"""'], {}), "('select name from `tabStock Ledger Entry` limit 1')\n", (14908, 14960), False, 'import frappe, erpnext\n'), ((14972, 15039), 'frappe.db.get_value', 'frappe.db.get_value', (['"""Account"""', 'self.expense_account', '"""report_type"""'], {}), "('Account', self.expense_account, 'report_type')\n", (14991, 15039), False, 'import frappe, erpnext\n'), ((3740, 3756), 'frappe._', '_', (['"""Row # {0}: """'], {}), "('Row # {0}: ')\n", (3741, 3756), False, 'from frappe import msgprint, _\n'), ((4207, 4227), 'frappe._', '_', (['"""Duplicate entry"""'], {}), "('Duplicate entry')\n", (4208, 4227), False, 'from frappe import msgprint, _\n'), ((4464, 4502), 'frappe._', '_', (['"""Warehouse not found in the system"""'], {}), "('Warehouse not found in the system')\n", (4465, 4502), False, 'from frappe import msgprint, _\n'), ((4658, 4719), 'frappe._', '_', (['"""Please specify either Quantity or Valuation Rate or both"""'], {}), "('Please specify either Quantity or Valuation Rate or both')\n", (4659, 4719), False, 'from frappe import msgprint, _\n'), ((4842, 4879), 'frappe._', '_', (['"""Negative Quantity is not allowed"""'], {}), "('Negative Quantity is not allowed')\n", (4843, 4879), False, 'from frappe import msgprint, _\n'), ((5014, 5057), 'frappe._', '_', (['"""Negative Valuation Rate is not allowed"""'], {}), "('Negative Valuation Rate is not allowed')\n", (5015, 5057), False, 'from frappe import msgprint, _\n'), ((5641, 5698), 'frappe.get_value', 'frappe.get_value', (['"""Item"""', 'row.item_code', '"""valuation_rate"""'], {}), "('Item', row.item_code, 'valuation_rate')\n", (5657, 5698), False, 'import frappe, erpnext\n'), ((6977, 6984), 'frappe.utils.cstr', 'cstr', (['e'], {}), '(e)\n', (6981, 6984), False, 'from frappe.utils import cstr, flt, cint\n'), ((15079, 15199), 'frappe._', '_', (['"""Difference Account must be a Asset/Liability type account, since this Stock Reconciliation is an Opening Entry"""'], {}), "('Difference Account must be a Asset/Liability type account, since this Stock Reconciliation is an Opening Entry'\n )\n", (15080, 15199), False, 'from frappe import msgprint, _\n'), ((6497, 6547), 'frappe._', '_', (['"""Serial no(s) required for serialized item {0}"""'], {}), "('Serial no(s) required for serialized item {0}')\n", (6498, 6547), False, 'from frappe import msgprint, _\n'), ((6718, 6764), 'frappe._', '_', (['"""Batch no is required for batched item {0}"""'], {}), "('Batch no is required for batched item {0}')\n", (6719, 6764), False, 'from frappe import msgprint, _\n'), ((6940, 6951), 'frappe._', '_', (['"""Row # """'], {}), "('Row # ')\n", (6941, 6951), False, 'from frappe import msgprint, _\n'), ((7710, 7736), 'frappe.bold', 'frappe.bold', (['row.item_code'], {}), '(row.item_code)\n', (7721, 7736), False, 'import frappe, erpnext\n'), ((7579, 7689), 'frappe._', '_', (['"""Row #{0}: Item {1} is not a Serialized/Batched Item. It cannot have a Serial No/Batch No against it."""'], {}), "('Row #{0}: Item {1} is not a Serialized/Batched Item. It cannot have a Serial No/Batch No against it.'\n )\n", (7580, 7689), False, 'from frappe import msgprint, _\n'), ((8214, 8266), 'frappe._', '_', (['"""Valuation Rate required for Item {0} at row {1}"""'], {}), "('Valuation Rate required for Item {0} at row {1}')\n", (8215, 8266), False, 'from frappe import msgprint, _\n')]
|
import sqlite3
class sqdb3:
__ERROR_CODES = []
__CONNECTION_FLAG = False
def __init__(self):
self.ConnectDB()
def ConnectDB(self, dbname = "player_infos.db"):
self.__CONNECTION_FLAG = True
self.__connection = sqlite3.connect(dbname)
def DisconnectDB(self):
self.__CONNECTION_FLAG = False
self.__connection.close()
def GetMessages(self):
if self.__CONNECTION_FLAG:
self.__cursor = self.__connection.cursor()
raw_Messages = self.__cursor.execute("SELECT * FROM MESSAGES").fetchall()
print(raw_Messages)
else:
self.__ERROR_CODES.append(24)
def ReturnErrorCodes(self):
if len(self.__ERROR_CODES) > 0:
return self.__ERROR_CODES
x = sqdb3()
x.GetMessages()
print(x.ReturnErrorCodes())
x.DisconnectDB()
|
[
"sqlite3.connect"
] |
[((249, 272), 'sqlite3.connect', 'sqlite3.connect', (['dbname'], {}), '(dbname)\n', (264, 272), False, 'import sqlite3\n')]
|
#
# Copyright (C) [2020] Futurewei Technologies, Inc.
#
# FORCE-RISCV is licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR
# FIT FOR A PARTICULAR PURPOSE.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import PyInterface
from base.Sequence import Sequence
from base.GenThreadExecutor import GenThreadExecutorFactory
import Log
import RandomUtils
## GlobalInitSequence class
# Base class of arch level global init sequence class.
class GlobalInitSequence(Sequence):
def __init__(self, gen_thread, name):
super().__init__(gen_thread, name)
def generate(self, **kargs):
self.setupResetRegion()
self.allocateHandlerSetMemory()
self.setupMemoryFillPattern()
self.setupThreadGroup()
def setupResetRegion(self):
pass
def allocateHandlerSetMemory(self):
pass
def setupMemoryFillPattern(self):
pass
def setupThreadGroup(self):
pass
## Env class
# Top level class in a test template
class Env(object):
def __init__(self, interface):
self.interface = interface
self.numberChips = self.interface.numberOfChips()
self.numberCores = self.interface.numberOfCores()
self.numberThreads = self.interface.numberOfThreads()
self.genThreads = list()
self.mGenMain = None
self.beforeSequences = list()
self.afterSequences = list()
self.defaultGenClass = None
self.defaultSeqClass = None
self.defaultInitSeqClass = None
self.genThreadInitFunc = None
self.executor = GenThreadExecutorFactory.createGenThreadExecutor(self.numberChips * self.numberCores * self.numberThreads, interface)
## Configure generator memory
def configureMemory(self, memfile_module):
import importlib
mem_module = importlib.import_module(memfile_module)
mem_module.configure_memory(self.interface)
def configureChoicesModifier(self, modfile_module):
import importlib
choices_module = importlib.import_module(modfile_module)
choices_module.configure_choices(self.mGenMain)
## Setup generator threads
def setup(self):
for i_chip in range(self.numberChips):
for i_core in range(self.numberCores):
for i_thread in range(self.numberThreads):
gen_thread_id = self.createBackEndGenThread(i_thread, i_core, i_chip) # create back end generator thread
new_gen_thread = self.createGenThread(gen_thread_id, i_thread, i_core, i_chip) # create front end generator thread
self.genThreads.append(new_gen_thread)
self.setupGenThread(new_gen_thread)
self.assignMainGen()
self.addThreadSplitterSequence()
## Assign a main generator for the before and after main test processing.
def assignMainGen(self):
num_gen = len(self.genThreads )
if num_gen == 0:
self.interface.error("[assignMainGen] number of threads = 0")
gen_index = RandomUtils.random32(0, num_gen - 1)
self.mGenMain = self.genThreads[gen_index]
self.executor.setMainThreadId(self.mGenMain.genThreadID)
Log.notice("Main generator is 0x%x" % self.mGenMain.genThreadID)
## Create back end generator thread
def createBackEndGenThread(self, i_thread, i_core, i_chip):
ret_thread_id = self.interface.createGeneratorThread(i_thread, i_core, i_chip)
return ret_thread_id
## Create front end generator thread.
def createGenThread(self, gen_thread_id, i_thread, i_core, i_chip):
return self.defaultGenClass(gen_thread_id, self.interface)
## Setting up newly created generator thread.
def setupGenThread(self, gen_thread):
main_seq = self.defaultSeqClass(gen_thread, self.defaultSeqClass.__name__)
gen_thread.addSequence(main_seq)
gen_thread.setGenThreadInitFunc(self.genThreadInitFunc)
## Start all the generator threads
def generate(self):
for seq in self.beforeSequences:
seq.genThread = self.mGenMain
seq.run()
# TODO(Noah): Remove this logic if and when a more abstracted mechanism to assign the exceptions manager can be
# determined. All threads share the same exception handler sets, so we need to propagate their locations to each
# thread. We do this by creating copies of the exceptions manager.
self.mGenMain.setup()
for gen_thread in self.genThreads:
if gen_thread is not self.mGenMain:
gen_thread.exceptionHandlerManager = self.mGenMain.exceptionHandlerManager.createShallowCopy(gen_thread)
gen_thread.addressTableManager = self.mGenMain.addressTableManager.createShallowCopy(gen_thread)
self.executor.executeGenThreads(self.genThreads)
for seq in self.afterSequences:
seq.genThread = self.mGenMain
seq.run()
self.mGenMain.genSequence("Summary")
## set Sequence class like bnt, eret on a thread
def setSequenceClass(self, thread_id, seq_type, sequence):
thread_obj = self.getThreadObject(thread_id)
if thread_obj is not None:
if seq_type == 0:
thread_obj.setBntSequence(sequence)
elif seq_type == 1:
thread_obj.setEretPreambleSequence(sequence)
else:
self.interface.error("invalid sequence type: %d" % seq_type)
else:
self.interface.error("invalid thread id: %d" % thread_id)
## run Sequence on a thread
## TBD: to optimize thread list for better performance
def runSequence(self, thread_id, seq_type, primary, param_dict):
thread_obj = self.getThreadObject(thread_id)
if thread_obj is not None:
if seq_type == 0:
thread_obj.runBntSequence(primary, param_dict)
elif seq_type == 1:
thread_obj.runEretPreambleSequence(param_dict)
else:
self.interface.error("invalid sequence type: %d" % seq_type)
else:
self.interface.error("invalid thread id: %d" % thread_id)
def getThreadObject(self, thread_id):
for thread in self.genThreads:
if thread.genThreadID == thread_id:
return thread
return None
## Add a sequence to be run before generating the main test.
def addInitialSequence(self, init_class):
if init_class is not None:
self.beforeSequences.append(init_class(None, init_class.__name__))
else:
self.beforeSequences.append(self.defaultInitSeqClass(None, self.defaultInitSeqClass.__name__))
def addThreadSplitterSequence(self):
raise NotImplementedError
|
[
"RandomUtils.random32",
"base.GenThreadExecutor.GenThreadExecutorFactory.createGenThreadExecutor",
"importlib.import_module",
"Log.notice"
] |
[((1984, 2106), 'base.GenThreadExecutor.GenThreadExecutorFactory.createGenThreadExecutor', 'GenThreadExecutorFactory.createGenThreadExecutor', (['(self.numberChips * self.numberCores * self.numberThreads)', 'interface'], {}), '(self.numberChips * self.\n numberCores * self.numberThreads, interface)\n', (2032, 2106), False, 'from base.GenThreadExecutor import GenThreadExecutorFactory\n'), ((2231, 2270), 'importlib.import_module', 'importlib.import_module', (['memfile_module'], {}), '(memfile_module)\n', (2254, 2270), False, 'import importlib\n'), ((2431, 2470), 'importlib.import_module', 'importlib.import_module', (['modfile_module'], {}), '(modfile_module)\n', (2454, 2470), False, 'import importlib\n'), ((3455, 3491), 'RandomUtils.random32', 'RandomUtils.random32', (['(0)', '(num_gen - 1)'], {}), '(0, num_gen - 1)\n', (3475, 3491), False, 'import RandomUtils\n'), ((3616, 3680), 'Log.notice', 'Log.notice', (["('Main generator is 0x%x' % self.mGenMain.genThreadID)"], {}), "('Main generator is 0x%x' % self.mGenMain.genThreadID)\n", (3626, 3680), False, 'import Log\n')]
|
from flask_restful import Resource
from flask import request
from prx.TrainProxy import TrainProxy
from prx.TestProxy import TestProxy
class TestApi(Resource):
def testPicture():
rtn = {'success':False}
_projectname = request.form.get('projectname')
if not _projectname:
rtn['error'] = "projectname is nesserary"
return rtn
_tag = request.form.get('tag')
if not _tag:
rtn['error'] = "tag is nesserary"
return rtn
_path = request.form.get('path')
if not _path:
rtn['error'] = "path is nesserary"
return rtn
return TestProxy.testPicture(_projectname,_tag,_path)
def testDirectory():
rtn = {'success':False}
_projectname = request.form.get('projectname')
if not _projectname:
rtn['error'] = "projectname is nesserary"
return rtn
_tag = request.form.get('tag')
if not _tag:
rtn['error'] = "tag is nesserary"
return rtn
_path = request.form.get('path')
if not _path:
rtn['error'] = "path is nesserary"
return rtn
return TestProxy.testDirectory(_projectname,_tag,_path)
def post(self):
_cmd = request.form.get('cmd')
if _cmd == "testPicture":
return TestApi.testPicture()
if _cmd == "testDirectory":
return TestApi.testDirectory()
|
[
"prx.TestProxy.TestProxy.testDirectory",
"prx.TestProxy.TestProxy.testPicture",
"flask.request.form.get"
] |
[((239, 270), 'flask.request.form.get', 'request.form.get', (['"""projectname"""'], {}), "('projectname')\n", (255, 270), False, 'from flask import request\n'), ((401, 424), 'flask.request.form.get', 'request.form.get', (['"""tag"""'], {}), "('tag')\n", (417, 424), False, 'from flask import request\n'), ((540, 564), 'flask.request.form.get', 'request.form.get', (['"""path"""'], {}), "('path')\n", (556, 564), False, 'from flask import request\n'), ((690, 738), 'prx.TestProxy.TestProxy.testPicture', 'TestProxy.testPicture', (['_projectname', '_tag', '_path'], {}), '(_projectname, _tag, _path)\n', (711, 738), False, 'from prx.TestProxy import TestProxy\n'), ((827, 858), 'flask.request.form.get', 'request.form.get', (['"""projectname"""'], {}), "('projectname')\n", (843, 858), False, 'from flask import request\n'), ((989, 1012), 'flask.request.form.get', 'request.form.get', (['"""tag"""'], {}), "('tag')\n", (1005, 1012), False, 'from flask import request\n'), ((1128, 1152), 'flask.request.form.get', 'request.form.get', (['"""path"""'], {}), "('path')\n", (1144, 1152), False, 'from flask import request\n'), ((1278, 1328), 'prx.TestProxy.TestProxy.testDirectory', 'TestProxy.testDirectory', (['_projectname', '_tag', '_path'], {}), '(_projectname, _tag, _path)\n', (1301, 1328), False, 'from prx.TestProxy import TestProxy\n'), ((1367, 1390), 'flask.request.form.get', 'request.form.get', (['"""cmd"""'], {}), "('cmd')\n", (1383, 1390), False, 'from flask import request\n')]
|
__author__ = 'DafniAntotsiou'
import os
from pso import pso, particle2actuator
from functions import *
import mujoco_py as mp
from math import ceil
from mjviewerext import MjViewerExt
import glob
import argparse
from replay_trajectories import play
def argsparser():
parser = argparse.ArgumentParser("Implementation of Task Oriented Hand Motion Retargeting")
parser.add_argument('--model_path', help='path to model xml', type=str, default="model/MPL/MPL_Sphere_6.xml")
parser.add_argument('--traj_path', help='path to the trajectory file or directory', default='trajectories')
parser.add_argument('--out_dir', help='directory to save the output results', default='trajectories/result')
parser.add_argument('--seed', help='RNG seed', type=int, default=0)
boolean_flag(parser, 'play', default=False, help='playback the original and optimised trajectories')
boolean_flag(parser, 'rot_scene', default=True, help='set if scene was rotated during HPE acquisition')
return parser.parse_args()
def boolean_flag(parser, name, default=False, help=None):
""" This function is from OpenAI's baselines.
Add a boolean flag to argparse parser.
Parameters
----------
parser: argparse.Parser
parser to add the flag to
name: str
--<name> will enable the flag, while --no-<name> will disable it
default: bool or None
default value of the flag
help: str
help string for the flag
"""
dest = name.replace('-', '_')
parser.add_argument("--" + name, action="store_true", default=default, dest=dest, help=help)
parser.add_argument("--no-" + name, action="store_false", dest=dest)
def optimise_actions(model_path, traj_path, rot_scene=False, fps=60, render=False, name=None, replay=False):
per_hpe = False # apply pso only on new hpe frame regardless of simulation fps
data = read_npz(traj_path)
iterations = [100]
swarms = [100]
c_tasks = [0.8]
c_angles = [0.5]
for it in iterations:
for swarmsize in swarms:
for c_task in c_tasks:
for c_a in c_angles:
trajectory = {'obs': [], 'acs': [], 'hpe': []}
assert 'hpe' in data and 'obs' in data and 'acs' in data
if 'hpe' in data and 'obs' in data and 'acs' in data:
model = mp.load_model_from_path(model_path)
nsubstep = int(ceil(1/(fps * model.opt.timestep)))
sim = mp.MjSim(model, nsubsteps=nsubstep)
sim.reset()
if render:
viewer = MjViewerExt(sim)
# initialise environment
idvA, default_q = get_model_info(model)
default_mat = array([[-1, 0, 0], [0, -1, 0], [0, 0, 1]])
default_q2 = rotmat2quat(default_mat)
default_q = default_q2 * default_q
init_pos = array([0, -0.8, 0.1])
m_in = np.zeros(shape=(5, 3))
for i in range(5):
for j in range(3):
m_in[i, j] = idvA[3 + i * 4].get_pos()[j]
for i in range(len(data['acs'])):
# set actions
mocap_pos = data['acs'][i][0:3]
mocap_rot = data['acs'][i][3:7]
ctrl = data['acs'][i][7:]
sim.data.ctrl[:] = ctrl[:]
sim.data.mocap_pos[:] = mocap_pos[:]
sim.data.mocap_quat[:] = mocap_rot[:]
if i == 0:
# first frame
# set state
qpos = data['obs'][i][:len(sim.data.qpos)]
qvel = data['obs'][i][len(sim.data.qpos):]
sim.data.qpos[:] = qpos[:]
sim.data.qvel[:] = qvel[:]
# copy state to actuators
for j in range(len(idvA)):
idvA[j].get_val_from_sim(sim)
if i == 0 or not np.array_equal(data['hpe'][i], data['hpe'][i-1]) or not per_hpe:
# first or new frame - retarget and apply pso
idvA = obs2actions(data['hpe'][i], idvA=idvA, init_pos=init_pos,
default_q=default_q, default_mat=default_mat, m_in=m_in, ad_hoc=False,
rot_scene=rot_scene)
curr_state = (sim.data.qpos, sim.data.qvel)
obj_name = "Object"
obj_state = get_joint_state(obj_name, sim.data)
if obj_state is not None:
objects = {obj_name: obj_state}
else:
objects = None
pair_dist = get_active_contacts_dist(data=sim.data,
contact_pairs=get_pair_contacts(model=model))
if pair_dist and len(pair_dist) > 2:
# there are at least 2 fingers close to the object - enable contact
pso_params = {'contact': True, 'swarmsize': swarmsize, 'maxiter': it,
'minfunc': 1e-4, 'minstep': 1e-4, 'hybrid_prc': 10}
else:
# no object close enough to grab
pso_params = {'contact': False, 'swarmsize': 2, 'maxiter': 3,
'minfunc': 1e-1, 'minstep': 1e-1, 'hybrid_prc': 0.5}
if pso_params['contact']: # apply pso only for contact
for j in range(len(idvA) - 1):
idvA[j].set_value(idvA[j].get_value(), safe=True)
sub_params = idvA[0:23]
actions, error = pso(params=sub_params, obs=data['hpe'][i], model=model, norm=True, fps=10,
visualise=False,
default_mat=default_mat, hybrid_prc=pso_params['hybrid_prc'],
contact=pso_params['contact'], swarmsize=pso_params['swarmsize'],
initial_act=idvA, omega=0.1, phip=0.3, phig=0.7,
minstep=pso_params['minstep'], maxiter=pso_params['maxiter'],
minfunc=pso_params['minfunc'], hybrid_space=True, objects=objects
, initial_state=curr_state, rot_scene=rot_scene,
c_task=c_task, c_angle=c_a)
sub_params = particle2actuator(actions, sub_params)
for j in range(len(idvA)):
idvA[j].assign(sim)
# record frame
trajectory['obs'].append(np.concatenate((np.asarray(sim.data.qpos),
np.asarray(sim.data.qvel)), axis=0))
mocap = np.concatenate((sim.data.mocap_pos.flatten(), sim.data.mocap_quat.flatten()), axis=0)
trajectory['acs'].append(np.concatenate((np.asarray(mocap), np.asarray(sim.data.ctrl)), axis=0))
trajectory['hpe'].append(np.array(data['hpe'][i]))
sim.step()
if render:
viewer.render()
if name is None:
name = 'pso_optimise'
np.savez(name, **trajectory)
if replay:
play(model_path, data=trajectory, fps=fps, loop=False, second_data=data)
def main(args):
if not os.path.isfile(args.model_path):
print("model path does not exist. Terminating...")
exit(1)
args.model_path = os.path.abspath(args.model_path)
files = None
if os.path.isdir(args.traj_path):
filesExp = os.path.join(args.traj_path, "*.npz")
files = glob.glob(filesExp)
files.sort()
elif os.path.isfile(args.traj_path):
files = [args.traj_path]
else:
print("trajectory path does not exist. Terminating...")
exit(1)
os.makedirs(args.out_dir, exist_ok=True)
for f in files:
np.random.seed(args.seed)
out_name = os.path.join(args.out_dir, os.path.basename(f)[:-4] + '_pso')
print("now working on " + f)
optimise_actions(args.model_path, f, rot_scene=args.rot_scene, fps=60,
render=False, name=out_name, replay=args.play)
if __name__ == "__main__":
args = argsparser()
main(args)
|
[
"mujoco_py.MjSim",
"pso.particle2actuator",
"os.path.abspath",
"mujoco_py.load_model_from_path",
"argparse.ArgumentParser",
"os.makedirs",
"os.path.basename",
"os.path.isdir",
"math.ceil",
"replay_trajectories.play",
"os.path.isfile",
"glob.glob",
"pso.pso",
"mjviewerext.MjViewerExt",
"os.path.join"
] |
[((283, 370), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Implementation of Task Oriented Hand Motion Retargeting"""'], {}), "(\n 'Implementation of Task Oriented Hand Motion Retargeting')\n", (306, 370), False, 'import argparse\n'), ((8747, 8779), 'os.path.abspath', 'os.path.abspath', (['args.model_path'], {}), '(args.model_path)\n', (8762, 8779), False, 'import os\n'), ((8804, 8833), 'os.path.isdir', 'os.path.isdir', (['args.traj_path'], {}), '(args.traj_path)\n', (8817, 8833), False, 'import os\n'), ((9118, 9158), 'os.makedirs', 'os.makedirs', (['args.out_dir'], {'exist_ok': '(True)'}), '(args.out_dir, exist_ok=True)\n', (9129, 9158), False, 'import os\n'), ((8617, 8648), 'os.path.isfile', 'os.path.isfile', (['args.model_path'], {}), '(args.model_path)\n', (8631, 8648), False, 'import os\n'), ((8854, 8891), 'os.path.join', 'os.path.join', (['args.traj_path', '"""*.npz"""'], {}), "(args.traj_path, '*.npz')\n", (8866, 8891), False, 'import os\n'), ((8908, 8927), 'glob.glob', 'glob.glob', (['filesExp'], {}), '(filesExp)\n', (8917, 8927), False, 'import glob\n'), ((8958, 8988), 'os.path.isfile', 'os.path.isfile', (['args.traj_path'], {}), '(args.traj_path)\n', (8972, 8988), False, 'import os\n'), ((9260, 9279), 'os.path.basename', 'os.path.basename', (['f'], {}), '(f)\n', (9276, 9279), False, 'import os\n'), ((2367, 2402), 'mujoco_py.load_model_from_path', 'mp.load_model_from_path', (['model_path'], {}), '(model_path)\n', (2390, 2402), True, 'import mujoco_py as mp\n'), ((2508, 2543), 'mujoco_py.MjSim', 'mp.MjSim', (['model'], {'nsubsteps': 'nsubstep'}), '(model, nsubsteps=nsubstep)\n', (2516, 2543), True, 'import mujoco_py as mp\n'), ((2442, 2478), 'math.ceil', 'ceil', (['(1 / (fps * model.opt.timestep))'], {}), '(1 / (fps * model.opt.timestep))\n', (2446, 2478), False, 'from math import ceil\n'), ((2652, 2668), 'mjviewerext.MjViewerExt', 'MjViewerExt', (['sim'], {}), '(sim)\n', (2663, 2668), False, 'from mjviewerext import MjViewerExt\n'), ((8515, 8587), 'replay_trajectories.play', 'play', (['model_path'], {'data': 'trajectory', 'fps': 'fps', 'loop': '(False)', 'second_data': 'data'}), '(model_path, data=trajectory, fps=fps, loop=False, second_data=data)\n', (8519, 8587), False, 'from replay_trajectories import play\n'), ((6464, 6964), 'pso.pso', 'pso', ([], {'params': 'sub_params', 'obs': "data['hpe'][i]", 'model': 'model', 'norm': '(True)', 'fps': '(10)', 'visualise': '(False)', 'default_mat': 'default_mat', 'hybrid_prc': "pso_params['hybrid_prc']", 'contact': "pso_params['contact']", 'swarmsize': "pso_params['swarmsize']", 'initial_act': 'idvA', 'omega': '(0.1)', 'phip': '(0.3)', 'phig': '(0.7)', 'minstep': "pso_params['minstep']", 'maxiter': "pso_params['maxiter']", 'minfunc': "pso_params['minfunc']", 'hybrid_space': '(True)', 'objects': 'objects', 'initial_state': 'curr_state', 'rot_scene': 'rot_scene', 'c_task': 'c_task', 'c_angle': 'c_a'}), "(params=sub_params, obs=data['hpe'][i], model=model, norm=True, fps=10,\n visualise=False, default_mat=default_mat, hybrid_prc=pso_params[\n 'hybrid_prc'], contact=pso_params['contact'], swarmsize=pso_params[\n 'swarmsize'], initial_act=idvA, omega=0.1, phip=0.3, phig=0.7, minstep=\n pso_params['minstep'], maxiter=pso_params['maxiter'], minfunc=\n pso_params['minfunc'], hybrid_space=True, objects=objects,\n initial_state=curr_state, rot_scene=rot_scene, c_task=c_task, c_angle=c_a)\n", (6467, 6964), False, 'from pso import pso, particle2actuator\n'), ((7444, 7482), 'pso.particle2actuator', 'particle2actuator', (['actions', 'sub_params'], {}), '(actions, sub_params)\n', (7461, 7482), False, 'from pso import pso, particle2actuator\n')]
|
import frappe
no_cache = 1
def get_context(context):
context.form_dict = frappe.form_dict
context.title = 'Service Providers'
context.gold_members = []
if frappe.form_dict.country:
context.parents = [dict(label='All Service Providers',
route='service-providers', title='All Service Providers')]
filters = dict()
filters['show_in_website'] = 1
if frappe.form_dict.country:
filters['country'] = frappe.form_dict.country
gold_members = [d.name for d in frappe.get_all('Member', dict(membership_type='Gold'))]
if gold_members:
filters['member'] = ('in', gold_members)
context.gold_members = frappe.get_all('Service Provider',
'title, introduction, `image`, route, website_url, country', filters)
if context.gold_members:
context.has_gold_member = 1
else:
context.gold_members.append(dict(
title='Your Company',
introduction='Become a Gold Member today and get your company featured here',
image='/assets/foundation/img/gold.png',
route='/members',
placeholder=True
))
context.silver_members = []
silver_members = [d.name for d in frappe.get_all('Member', dict(membership_type='Silver'))]
if silver_members:
filters['member'] = ('in', silver_members)
context.silver_members = frappe.get_all('Service Provider',
'title, introduction, `image`, route, website_url, country', filters)
if context.silver_members:
context.has_silver_member = 1
else:
context.silver_members.append(dict(
title='Your Company',
introduction='Become a silver Member today and get your company featured here',
image='/assets/foundation/img/silver.png',
route='/members',
placeholder=True
))
context.individual_members = []
individual_members = [d.name for d in frappe.get_all('Member',
dict(membership_type='Individual'))]
if individual_members:
filters['member'] = ('in', individual_members)
context.individual_members = frappe.get_all('Service Provider',
'title, introduction, `image`, route, website_url, country', filters)
if context.individual_members:
context.has_individual_member = 1
else:
context.individual_members.append(dict(
title='Your Company',
introduction='Become an invidual member to list here',
route='/members'
))
|
[
"frappe.get_all"
] |
[((611, 719), 'frappe.get_all', 'frappe.get_all', (['"""Service Provider"""', '"""title, introduction, `image`, route, website_url, country"""', 'filters'], {}), "('Service Provider',\n 'title, introduction, `image`, route, website_url, country', filters)\n", (625, 719), False, 'import frappe\n'), ((1230, 1338), 'frappe.get_all', 'frappe.get_all', (['"""Service Provider"""', '"""title, introduction, `image`, route, website_url, country"""', 'filters'], {}), "('Service Provider',\n 'title, introduction, `image`, route, website_url, country', filters)\n", (1244, 1338), False, 'import frappe\n'), ((1885, 1993), 'frappe.get_all', 'frappe.get_all', (['"""Service Provider"""', '"""title, introduction, `image`, route, website_url, country"""', 'filters'], {}), "('Service Provider',\n 'title, introduction, `image`, route, website_url, country', filters)\n", (1899, 1993), False, 'import frappe\n')]
|
from contextlib import contextmanager
from filecmp import cmp, dircmp
from pathlib import Path
from shutil import copyfile, copytree, rmtree
import pytest
from demisto_sdk.commands.common.constants import PACKS_DIR, TEST_PLAYBOOKS_DIR
from demisto_sdk.commands.common.logger import logging_setup
from demisto_sdk.commands.common.tools import src_root
from TestSuite.test_tools import ChangeCWD
TEST_DATA = src_root() / 'tests' / 'test_files'
TEST_CONTENT_REPO = TEST_DATA / 'content_slim'
TEST_PRIVATE_CONTENT_REPO = TEST_DATA / 'private_content_slim'
UNIT_TEST_DATA = (src_root() / 'commands' / 'create_artifacts' / 'tests' / 'data')
COMMON_SERVER = UNIT_TEST_DATA / 'common_server'
ARTIFACTS_EXPECTED_RESULTS = TEST_DATA / 'artifacts'
def same_folders(src1, src2):
"""Assert if folder contains different files"""
dcmp = dircmp(src1, src2)
if dcmp.left_only or dcmp.right_only:
return False
for sub_dcmp in dcmp.subdirs.values():
same_folders(sub_dcmp.left, sub_dcmp.right)
return True
@contextmanager
def destroy_by_ext(suffix: str):
"""Modify file content to invalid by file extension - json/yaml.
Open:
- Choose file by file extension.
- Modify file content to not valid.
Close:
- Modify content to the original state.
"""
if suffix == 'json':
file = TEST_CONTENT_REPO / "Packs" / "Sample01" / "Classifiers" / "classifier-sample_new.json"
else:
file = TEST_CONTENT_REPO / "Packs" / "Sample01" / "TestPlaybooks" / "playbook-sample_test1.yml"
old_data = file.read_text()
file.write_text("{123dfdsf,}\nfdsfdsf")
try:
yield
finally:
file.write_text(old_data)
@contextmanager
def duplicate_file():
"""Create duplicate file name in content repository.
Open:
- Create copy of file in content.
Close:
- Delete copied file.
"""
file = TEST_CONTENT_REPO / PACKS_DIR / "Sample01" / TEST_PLAYBOOKS_DIR / "playbook-sample_test1.yml"
new_file = TEST_CONTENT_REPO / PACKS_DIR / "Sample02" / TEST_PLAYBOOKS_DIR / "playbook-sample_test1.yml"
try:
copyfile(file, new_file)
yield
finally:
new_file.unlink()
@contextmanager
def temp_dir():
"""Create Temp directory for test.
Open:
- Create temp directory.
Close:
- Delete temp directory.
"""
temp = UNIT_TEST_DATA / 'temp'
try:
temp.mkdir(parents=True, exist_ok=True)
yield temp
finally:
rmtree(temp)
@pytest.fixture()
def mock_git(mocker):
"""Mock git Repo object"""
from demisto_sdk.commands.common.content import Content
# Mock git working directory
mocker.patch.object(Content, 'git')
Content.git().working_tree_dir = TEST_CONTENT_REPO
yield
@pytest.fixture()
def private_repo():
"""Create Temp private repo structure from original content structure.
Open:
- Create a copy of regular content.
- Delete - content/TestPlaybooks dir.
Close:
- Delete private content folder.
"""
try:
copytree(TEST_CONTENT_REPO, TEST_PRIVATE_CONTENT_REPO)
test_playbook_dir = TEST_PRIVATE_CONTENT_REPO / TEST_PLAYBOOKS_DIR
rmtree(test_playbook_dir)
yield TEST_PRIVATE_CONTENT_REPO
finally:
rmtree(TEST_PRIVATE_CONTENT_REPO)
def test_modify_common_server_constants():
""" Modify global variables in CommonServerPython.py
When: CommonServerPython.py contains:
- Global variable - CONTENT_RELEASE_VERSION = '0.0.0'
- Global variable - CONTENT_BRANCH_NAME = ''
Given: Parameters:
- Content version x.x.x
- Active branch - xxxx
Then: CommonServerPython.py changes:
- Global variable - CONTENT_RELEASE_VERSION = 'x.x.x'
- Global variable - CONTENT_BRANCH_NAME = 'xxxx'
Notes:
- After test clean up changes.
"""
from demisto_sdk.commands.create_artifacts.content_artifacts_creator import \
modify_common_server_constants
path_before = COMMON_SERVER / 'CommonServerPython.py'
path_excepted = COMMON_SERVER / 'CommonServerPython_modified.py'
old_data = path_before.read_text()
modify_common_server_constants(path_before, '6.0.0', 'test')
assert cmp(path_before, path_excepted)
path_before.write_text(old_data)
def test_dump_pack(mock_git):
import demisto_sdk.commands.create_artifacts.content_artifacts_creator as cca
from demisto_sdk.commands.create_artifacts.content_artifacts_creator import (
ArtifactsManager, Pack, create_dirs, dump_pack)
cca.logger = logging_setup(0)
with temp_dir() as temp:
config = ArtifactsManager(artifacts_path=temp,
content_version='6.0.0',
zip=False,
suffix='',
cpus=1,
packs=False)
create_dirs(artifact_manager=config)
dump_pack(artifact_manager=config, pack=Pack(TEST_CONTENT_REPO / PACKS_DIR / 'Sample01'))
assert same_folders(src1=temp / 'content_packs' / 'Sample01',
src2=ARTIFACTS_EXPECTED_RESULTS / 'content' / 'content_packs' / 'Sample01')
def test_create_content_artifacts(mock_git):
from demisto_sdk.commands.create_artifacts.content_artifacts_creator import \
ArtifactsManager
with temp_dir() as temp:
config = ArtifactsManager(artifacts_path=temp,
content_version='6.0.0',
zip=False,
suffix='',
cpus=1,
packs=False)
exit_code = config.create_content_artifacts()
assert exit_code == 0
assert same_folders(temp, ARTIFACTS_EXPECTED_RESULTS / 'content')
def test_create_private_content_artifacts(private_repo):
from demisto_sdk.commands.common.content import Content
from demisto_sdk.commands.create_artifacts.content_artifacts_creator import \
ArtifactsManager
with temp_dir() as temp:
config = ArtifactsManager(artifacts_path=temp,
content_version='6.0.0',
zip=False,
suffix='',
cpus=1,
packs=False)
config.content = Content(private_repo)
config.packs = config.content.packs
exit_code = config.create_content_artifacts()
assert same_folders(temp, ARTIFACTS_EXPECTED_RESULTS / 'private')
assert exit_code == 0
@pytest.mark.parametrize(argnames="suffix", argvalues=["yml", "json"])
def test_malformed_file_failure(suffix: str, mock_git):
from demisto_sdk.commands.create_artifacts.content_artifacts_creator import \
ArtifactsManager
with temp_dir() as temp:
config = ArtifactsManager(artifacts_path=temp,
content_version='6.0.0',
zip=False,
suffix='',
cpus=1,
packs=False)
with destroy_by_ext(suffix):
exit_code = config.create_content_artifacts()
assert exit_code == 1
def test_duplicate_file_failure(mock_git):
from demisto_sdk.commands.create_artifacts.content_artifacts_creator import \
ArtifactsManager
with temp_dir() as temp:
config = ArtifactsManager(artifacts_path=temp,
content_version='6.0.0',
zip=False,
suffix='',
cpus=1,
packs=False)
with duplicate_file():
exit_code = config.create_content_artifacts()
assert exit_code == 1
@pytest.mark.parametrize('key, tool', [('some_key', False), ('', True)])
def test_sign_packs_failure(repo, capsys, key, tool):
"""
When:
- Signing a pack.
Given:
- Pack object.
- Signature key without the signing tool, or vice-versa.
Then:
- Verify that exceptions are written to the logger.
"""
import demisto_sdk.commands.create_artifacts.content_artifacts_creator as cca
from demisto_sdk.commands.create_artifacts.content_artifacts_creator import (
ArtifactsManager, sign_packs)
cca.logger = logging_setup(2)
with ChangeCWD(repo.path):
with temp_dir() as temp:
artifact_manager = ArtifactsManager(artifacts_path=temp,
content_version='6.0.0',
zip=False,
suffix='',
cpus=1,
packs=True,
signature_key=key)
if tool:
with open('./tool', 'w') as tool_file:
tool_file.write('some tool')
artifact_manager.signDirectory = Path(temp / 'tool')
sign_packs(artifact_manager)
captured = capsys.readouterr()
assert 'Failed to sign packs. In order to do so, you need to provide both signature_key and ' \
'sign_directory arguments.' in captured.out
|
[
"demisto_sdk.commands.common.tools.src_root",
"demisto_sdk.commands.create_artifacts.content_artifacts_creator.Pack",
"demisto_sdk.commands.common.content.Content",
"shutil.copytree",
"shutil.rmtree",
"TestSuite.test_tools.ChangeCWD",
"demisto_sdk.commands.create_artifacts.content_artifacts_creator.sign_packs",
"demisto_sdk.commands.create_artifacts.content_artifacts_creator.modify_common_server_constants",
"pytest.fixture",
"demisto_sdk.commands.create_artifacts.content_artifacts_creator.create_dirs",
"demisto_sdk.commands.create_artifacts.content_artifacts_creator.ArtifactsManager",
"pathlib.Path",
"demisto_sdk.commands.common.content.Content.git",
"shutil.copyfile",
"pytest.mark.parametrize",
"filecmp.cmp",
"demisto_sdk.commands.common.logger.logging_setup",
"filecmp.dircmp"
] |
[((2532, 2548), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (2546, 2548), False, 'import pytest\n'), ((2804, 2820), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (2818, 2820), False, 'import pytest\n'), ((6746, 6815), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([], {'argnames': '"""suffix"""', 'argvalues': "['yml', 'json']"}), "(argnames='suffix', argvalues=['yml', 'json'])\n", (6769, 6815), False, 'import pytest\n'), ((8018, 8089), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""key, tool"""', "[('some_key', False), ('', True)]"], {}), "('key, tool', [('some_key', False), ('', True)])\n", (8041, 8089), False, 'import pytest\n'), ((835, 853), 'filecmp.dircmp', 'dircmp', (['src1', 'src2'], {}), '(src1, src2)\n', (841, 853), False, 'from filecmp import cmp, dircmp\n'), ((4237, 4297), 'demisto_sdk.commands.create_artifacts.content_artifacts_creator.modify_common_server_constants', 'modify_common_server_constants', (['path_before', '"""6.0.0"""', '"""test"""'], {}), "(path_before, '6.0.0', 'test')\n", (4267, 4297), False, 'from demisto_sdk.commands.create_artifacts.content_artifacts_creator import modify_common_server_constants\n'), ((4310, 4341), 'filecmp.cmp', 'cmp', (['path_before', 'path_excepted'], {}), '(path_before, path_excepted)\n', (4313, 4341), False, 'from filecmp import cmp, dircmp\n'), ((4650, 4666), 'demisto_sdk.commands.common.logger.logging_setup', 'logging_setup', (['(0)'], {}), '(0)\n', (4663, 4666), False, 'from demisto_sdk.commands.common.logger import logging_setup\n'), ((8588, 8604), 'demisto_sdk.commands.common.logger.logging_setup', 'logging_setup', (['(2)'], {}), '(2)\n', (8601, 8604), False, 'from demisto_sdk.commands.common.logger import logging_setup\n'), ((9314, 9342), 'demisto_sdk.commands.create_artifacts.content_artifacts_creator.sign_packs', 'sign_packs', (['artifact_manager'], {}), '(artifact_manager)\n', (9324, 9342), False, 'from demisto_sdk.commands.create_artifacts.content_artifacts_creator import ArtifactsManager, sign_packs\n'), ((409, 419), 'demisto_sdk.commands.common.tools.src_root', 'src_root', ([], {}), '()\n', (417, 419), False, 'from demisto_sdk.commands.common.tools import src_root\n'), ((2135, 2159), 'shutil.copyfile', 'copyfile', (['file', 'new_file'], {}), '(file, new_file)\n', (2143, 2159), False, 'from shutil import copyfile, copytree, rmtree\n'), ((2516, 2528), 'shutil.rmtree', 'rmtree', (['temp'], {}), '(temp)\n', (2522, 2528), False, 'from shutil import copyfile, copytree, rmtree\n'), ((2740, 2753), 'demisto_sdk.commands.common.content.Content.git', 'Content.git', ([], {}), '()\n', (2751, 2753), False, 'from demisto_sdk.commands.common.content import Content\n'), ((3096, 3150), 'shutil.copytree', 'copytree', (['TEST_CONTENT_REPO', 'TEST_PRIVATE_CONTENT_REPO'], {}), '(TEST_CONTENT_REPO, TEST_PRIVATE_CONTENT_REPO)\n', (3104, 3150), False, 'from shutil import copyfile, copytree, rmtree\n'), ((3234, 3259), 'shutil.rmtree', 'rmtree', (['test_playbook_dir'], {}), '(test_playbook_dir)\n', (3240, 3259), False, 'from shutil import copyfile, copytree, rmtree\n'), ((3321, 3354), 'shutil.rmtree', 'rmtree', (['TEST_PRIVATE_CONTENT_REPO'], {}), '(TEST_PRIVATE_CONTENT_REPO)\n', (3327, 3354), False, 'from shutil import copyfile, copytree, rmtree\n'), ((4714, 4823), 'demisto_sdk.commands.create_artifacts.content_artifacts_creator.ArtifactsManager', 'ArtifactsManager', ([], {'artifacts_path': 'temp', 'content_version': '"""6.0.0"""', 'zip': '(False)', 'suffix': '""""""', 'cpus': '(1)', 'packs': '(False)'}), "(artifacts_path=temp, content_version='6.0.0', zip=False,\n suffix='', cpus=1, packs=False)\n", (4730, 4823), False, 'from demisto_sdk.commands.create_artifacts.content_artifacts_creator import ArtifactsManager, sign_packs\n'), ((4999, 5035), 'demisto_sdk.commands.create_artifacts.content_artifacts_creator.create_dirs', 'create_dirs', ([], {'artifact_manager': 'config'}), '(artifact_manager=config)\n', (5010, 5035), False, 'from demisto_sdk.commands.create_artifacts.content_artifacts_creator import ArtifactsManager, Pack, create_dirs, dump_pack\n'), ((5509, 5618), 'demisto_sdk.commands.create_artifacts.content_artifacts_creator.ArtifactsManager', 'ArtifactsManager', ([], {'artifacts_path': 'temp', 'content_version': '"""6.0.0"""', 'zip': '(False)', 'suffix': '""""""', 'cpus': '(1)', 'packs': '(False)'}), "(artifacts_path=temp, content_version='6.0.0', zip=False,\n suffix='', cpus=1, packs=False)\n", (5525, 5618), False, 'from demisto_sdk.commands.create_artifacts.content_artifacts_creator import ArtifactsManager, sign_packs\n'), ((6217, 6326), 'demisto_sdk.commands.create_artifacts.content_artifacts_creator.ArtifactsManager', 'ArtifactsManager', ([], {'artifacts_path': 'temp', 'content_version': '"""6.0.0"""', 'zip': '(False)', 'suffix': '""""""', 'cpus': '(1)', 'packs': '(False)'}), "(artifacts_path=temp, content_version='6.0.0', zip=False,\n suffix='', cpus=1, packs=False)\n", (6233, 6326), False, 'from demisto_sdk.commands.create_artifacts.content_artifacts_creator import ArtifactsManager, sign_packs\n'), ((6518, 6539), 'demisto_sdk.commands.common.content.Content', 'Content', (['private_repo'], {}), '(private_repo)\n', (6525, 6539), False, 'from demisto_sdk.commands.common.content import Content\n'), ((7025, 7134), 'demisto_sdk.commands.create_artifacts.content_artifacts_creator.ArtifactsManager', 'ArtifactsManager', ([], {'artifacts_path': 'temp', 'content_version': '"""6.0.0"""', 'zip': '(False)', 'suffix': '""""""', 'cpus': '(1)', 'packs': '(False)'}), "(artifacts_path=temp, content_version='6.0.0', zip=False,\n suffix='', cpus=1, packs=False)\n", (7041, 7134), False, 'from demisto_sdk.commands.create_artifacts.content_artifacts_creator import ArtifactsManager, sign_packs\n'), ((7622, 7731), 'demisto_sdk.commands.create_artifacts.content_artifacts_creator.ArtifactsManager', 'ArtifactsManager', ([], {'artifacts_path': 'temp', 'content_version': '"""6.0.0"""', 'zip': '(False)', 'suffix': '""""""', 'cpus': '(1)', 'packs': '(False)'}), "(artifacts_path=temp, content_version='6.0.0', zip=False,\n suffix='', cpus=1, packs=False)\n", (7638, 7731), False, 'from demisto_sdk.commands.create_artifacts.content_artifacts_creator import ArtifactsManager, sign_packs\n'), ((8615, 8635), 'TestSuite.test_tools.ChangeCWD', 'ChangeCWD', (['repo.path'], {}), '(repo.path)\n', (8624, 8635), False, 'from TestSuite.test_tools import ChangeCWD\n'), ((8701, 8828), 'demisto_sdk.commands.create_artifacts.content_artifacts_creator.ArtifactsManager', 'ArtifactsManager', ([], {'artifacts_path': 'temp', 'content_version': '"""6.0.0"""', 'zip': '(False)', 'suffix': '""""""', 'cpus': '(1)', 'packs': '(True)', 'signature_key': 'key'}), "(artifacts_path=temp, content_version='6.0.0', zip=False,\n suffix='', cpus=1, packs=True, signature_key=key)\n", (8717, 8828), False, 'from demisto_sdk.commands.create_artifacts.content_artifacts_creator import ArtifactsManager, sign_packs\n'), ((573, 583), 'demisto_sdk.commands.common.tools.src_root', 'src_root', ([], {}), '()\n', (581, 583), False, 'from demisto_sdk.commands.common.tools import src_root\n'), ((5084, 5132), 'demisto_sdk.commands.create_artifacts.content_artifacts_creator.Pack', 'Pack', (["(TEST_CONTENT_REPO / PACKS_DIR / 'Sample01')"], {}), "(TEST_CONTENT_REPO / PACKS_DIR / 'Sample01')\n", (5088, 5132), False, 'from demisto_sdk.commands.create_artifacts.content_artifacts_creator import ArtifactsManager, Pack, create_dirs, dump_pack\n'), ((9289, 9308), 'pathlib.Path', 'Path', (["(temp / 'tool')"], {}), "(temp / 'tool')\n", (9293, 9308), False, 'from pathlib import Path\n')]
|
from weather import Weather, Unit
import inkyphat
from PIL import Image, ImageDraw, ImageFont
weather = Weather(unit=Unit.CELSIUS)
location = weather.lookup_by_location('west lafayette')
condition = location.condition
day, date, month, year, time, am_pm, zone = condition.date.split(" ")
inkyphat.set_colour("yellow")
inkyphat.set_border(inkyphat.BLACK)
img = Image.open("resources/ShibaInu_resources/cute.png")
draw = ImageDraw.Draw(img)
icon_image = Image.open("resources/Weather/"+str(condition.code)+".png")
date_font = ImageFont.truetype(inkyphat.fonts.FredokaOne, 14)
font = ImageFont.truetype(inkyphat.fonts.FredokaOne, 12)
draw.text((18, 12), day + " " + date + " " + month + " " + year, inkyphat.BLACK, font=date_font)
draw.text((18, 32), time + " " + am_pm + " " + zone, inkyphat.BLACK, font=date_font)
draw.text((50, 52), condition.text, inkyphat.BLACK, font=font)
draw.text((50, 72), condition.temp + " " + chr(176) + "C", inkyphat.BLACK, font=font)
img.paste(icon_image, (18, 52))
# Display the weather data on Inky pHAT
inkyphat.set_image(img,colswap=[2,0,1])
inkyphat.show()
|
[
"inkyphat.set_colour",
"inkyphat.set_border",
"PIL.Image.open",
"PIL.ImageFont.truetype",
"PIL.ImageDraw.Draw",
"inkyphat.show",
"inkyphat.set_image",
"weather.Weather"
] |
[((105, 131), 'weather.Weather', 'Weather', ([], {'unit': 'Unit.CELSIUS'}), '(unit=Unit.CELSIUS)\n', (112, 131), False, 'from weather import Weather, Unit\n'), ((291, 320), 'inkyphat.set_colour', 'inkyphat.set_colour', (['"""yellow"""'], {}), "('yellow')\n", (310, 320), False, 'import inkyphat\n'), ((321, 356), 'inkyphat.set_border', 'inkyphat.set_border', (['inkyphat.BLACK'], {}), '(inkyphat.BLACK)\n', (340, 356), False, 'import inkyphat\n'), ((364, 415), 'PIL.Image.open', 'Image.open', (['"""resources/ShibaInu_resources/cute.png"""'], {}), "('resources/ShibaInu_resources/cute.png')\n", (374, 415), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((423, 442), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['img'], {}), '(img)\n', (437, 442), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((529, 578), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['inkyphat.fonts.FredokaOne', '(14)'], {}), '(inkyphat.fonts.FredokaOne, 14)\n', (547, 578), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((586, 635), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['inkyphat.fonts.FredokaOne', '(12)'], {}), '(inkyphat.fonts.FredokaOne, 12)\n', (604, 635), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((1042, 1084), 'inkyphat.set_image', 'inkyphat.set_image', (['img'], {'colswap': '[2, 0, 1]'}), '(img, colswap=[2, 0, 1])\n', (1060, 1084), False, 'import inkyphat\n'), ((1082, 1097), 'inkyphat.show', 'inkyphat.show', ([], {}), '()\n', (1095, 1097), False, 'import inkyphat\n')]
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# File : functional.py
# Author : <NAME>
# Email : <EMAIL>
# Date : 03/03/2018
#
# This file is part of Jacinle.
# Distributed under terms of the MIT license.
import math
from PIL import Image
import numpy as np
import torchvision.transforms.functional as TF
import jactorch.transforms.image.functional as jac_tf
from jacinle.utils.argument import get_2dshape
def normalize_coor(img, coor):
coor = coor.copy()
coor[:, 0] /= img.width
coor[:, 1] /= img.height
return img, coor
def denormalize_coor(img, coor):
coor = coor.copy()
coor[:, 0] *= img.width
coor[:, 1] *= img.height
return img, coor
def crop(img, coor, i, j, h, w):
coor = coor.copy()
coor[:, 0] = (coor[:, 0] - j / img.width) * (img.width / w)
coor[:, 1] = (coor[:, 1] - i / img.height) * (img.height / h)
return TF.crop(img, i, j, h, w), coor
def center_crop(img, coor, output_size):
output_size = get_2dshape(output_size)
w, h = img.size
th, tw = output_size
i = int(round((h - th) / 2.))
j = int(round((w - tw) / 2.))
return crop(img, coor, i, j, th, tw)
def pad(img, coor, padding, mode='constant', fill=0):
if isinstance(padding, int):
padding = (padding, padding, padding, padding)
elif len(padding) == 2:
padding = (padding[0], padding[1], padding[0], padding[1])
else:
assert len(padding) == 4
img_new = jac_tf.pad(img, padding, mode=mode, fill=fill)
coor = coor.copy()
coor[:, 0] = (coor[:, 0] + padding[0] / img.width) * (img.width / img_new.width)
coor[:, 1] = (coor[:, 1] + padding[1] / img.height) * (img.height/ img_new.height)
return img_new, coor
def hflip(img, coor):
coor = coor.copy()
coor[:, 0] = 1 - coor[:, 0]
return TF.hflip(img), coor
def vflip(img, coor):
coor = coor.copy()
coor[:, 1] = 1 - coor[:, 1]
return TF.vflip(img), coor
def resize(img, coor, size, interpolation=Image.BILINEAR):
# Assuming coordinates are 0/1-normalized.
return TF.resize(img, size, interpolation=interpolation), coor
def resized_crop(img, coor, i, j, h, w, size, interpolation=Image.BILINEAR):
img, coor = crop(img, coor, i, j, h, w)
img, coor = resize(img, coor, size, interpolation)
return img, coor
def refresh_valid(img, coor, force=False):
if coor.shape[1] == 2:
if force:
coor = np.concatenate([coor, np.ones_like(coor[:, 0])], axis=1)
else:
return img, coor
assert coor.shape[1] == 3, 'Support only (x, y, valid) or (x, y) typed coordinates.'
out = []
for x, y, v in coor:
valid = (v == 1) and (x >= 0) and (x < img.width) and (y >= 0) and (y < img.height)
if valid:
out.append((x, y, v))
else:
out.append((0., 0., 0.))
return img, np.array(out, dtype='float32')
def rotate(img, coor, angle, resample, crop_, expand, center=None, translate=None):
assert translate is None
img_new = TF.rotate(img, angle, resample=resample, expand=expand, center=center)
matrix, extra_crop = get_rotation_matrix(img, angle, crop_, expand, center, translate)
_, coor = denormalize_coor(img, coor)
for i in range(coor.shape[0]):
coor[i, :2] = apply_affine_transform(*coor[i, :2], matrix)
_, coor = normalize_coor(img_new, coor)
if extra_crop is not None:
img_new, coor = crop(img_new, coor, *extra_crop)
return img_new, coor
def pad_multiple_of(img, coor, multiple, mode='constant', fill=0):
h, w = img.height, img.width
hh = h - h % multiple + multiple * int(h % multiple != 0)
ww = w - w % multiple + multiple * int(w % multiple != 0)
if h != hh or w != ww:
return pad(img, coor, (0, 0, ww - w, hh - h), mode=mode, fill=fill)
return img, coor
def get_rotation_matrix(image, angle, crop, expand, center, translate):
w, h = image.size
if translate is None:
translate = (0, 0)
if center is None:
center = (w / 2.0, h / 2.0)
angle = math.radians(angle % 360)
matrix = [
round(math.cos(angle), 15), round(math.sin(angle), 15), 0.0,
round(-math.sin(angle), 15), round(math.cos(angle), 15), 0.0
]
matrix[2], matrix[5] = apply_affine_transform(-center[0], -center[1], matrix)
matrix[2] += center[0] + translate[0]
matrix[5] += center[1] + translate[1]
# print('debug', angle, translate, center, matrix, apply_affine_transform(0.5, 0.5, matrix))
if crop or expand:
xx = []
yy = []
for x, y in ((0, 0), (w, 0), (w, h), (0, h)):
x, y = apply_affine_transform(x, y, matrix)
xx.append(x)
yy.append(y)
xx.sort()
yy.sort()
extra_crop = None
if crop:
assert not expand, 'Cannot use both expand and crop.'
nw = int(math.ceil(xx[2]) - math.floor(xx[1]))
nh = int(math.ceil(yy[2]) - math.floor(yy[1]))
# CAUSION! extra_crop is of format (dy, dx, h, w)
extra_crop = ((h - nh) // 2, (w - nw) // 2, nh, nw)
if expand:
nw = int(math.ceil(xx[3]) - math.floor(xx[0]))
nh = int(math.ceil(yy[3]) - math.floor(yy[0]))
matrix[2] += (nw - w) / 2.
matrix[5] += (nh - h) / 2.
return matrix, extra_crop
def apply_affine_transform(x, y, matrix):
(a, b, c, d, e, f) = matrix
return a*x + b*y + c, d*x + e*y + f
|
[
"numpy.ones_like",
"torchvision.transforms.functional.rotate",
"torchvision.transforms.functional.hflip",
"math.radians",
"math.ceil",
"jactorch.transforms.image.functional.pad",
"torchvision.transforms.functional.resize",
"math.floor",
"math.sin",
"torchvision.transforms.functional.crop",
"numpy.array",
"torchvision.transforms.functional.vflip",
"math.cos",
"jacinle.utils.argument.get_2dshape"
] |
[((978, 1002), 'jacinle.utils.argument.get_2dshape', 'get_2dshape', (['output_size'], {}), '(output_size)\n', (989, 1002), False, 'from jacinle.utils.argument import get_2dshape\n'), ((1454, 1500), 'jactorch.transforms.image.functional.pad', 'jac_tf.pad', (['img', 'padding'], {'mode': 'mode', 'fill': 'fill'}), '(img, padding, mode=mode, fill=fill)\n', (1464, 1500), True, 'import jactorch.transforms.image.functional as jac_tf\n'), ((3023, 3093), 'torchvision.transforms.functional.rotate', 'TF.rotate', (['img', 'angle'], {'resample': 'resample', 'expand': 'expand', 'center': 'center'}), '(img, angle, resample=resample, expand=expand, center=center)\n', (3032, 3093), True, 'import torchvision.transforms.functional as TF\n'), ((4059, 4084), 'math.radians', 'math.radians', (['(angle % 360)'], {}), '(angle % 360)\n', (4071, 4084), False, 'import math\n'), ((886, 910), 'torchvision.transforms.functional.crop', 'TF.crop', (['img', 'i', 'j', 'h', 'w'], {}), '(img, i, j, h, w)\n', (893, 910), True, 'import torchvision.transforms.functional as TF\n'), ((1812, 1825), 'torchvision.transforms.functional.hflip', 'TF.hflip', (['img'], {}), '(img)\n', (1820, 1825), True, 'import torchvision.transforms.functional as TF\n'), ((1922, 1935), 'torchvision.transforms.functional.vflip', 'TF.vflip', (['img'], {}), '(img)\n', (1930, 1935), True, 'import torchvision.transforms.functional as TF\n'), ((2061, 2110), 'torchvision.transforms.functional.resize', 'TF.resize', (['img', 'size'], {'interpolation': 'interpolation'}), '(img, size, interpolation=interpolation)\n', (2070, 2110), True, 'import torchvision.transforms.functional as TF\n'), ((2863, 2893), 'numpy.array', 'np.array', (['out'], {'dtype': '"""float32"""'}), "(out, dtype='float32')\n", (2871, 2893), True, 'import numpy as np\n'), ((4115, 4130), 'math.cos', 'math.cos', (['angle'], {}), '(angle)\n', (4123, 4130), False, 'import math\n'), ((4143, 4158), 'math.sin', 'math.sin', (['angle'], {}), '(angle)\n', (4151, 4158), False, 'import math\n'), ((4213, 4228), 'math.cos', 'math.cos', (['angle'], {}), '(angle)\n', (4221, 4228), False, 'import math\n'), ((4185, 4200), 'math.sin', 'math.sin', (['angle'], {}), '(angle)\n', (4193, 4200), False, 'import math\n'), ((4879, 4895), 'math.ceil', 'math.ceil', (['xx[2]'], {}), '(xx[2])\n', (4888, 4895), False, 'import math\n'), ((4898, 4915), 'math.floor', 'math.floor', (['xx[1]'], {}), '(xx[1])\n', (4908, 4915), False, 'import math\n'), ((4934, 4950), 'math.ceil', 'math.ceil', (['yy[2]'], {}), '(yy[2])\n', (4943, 4950), False, 'import math\n'), ((4953, 4970), 'math.floor', 'math.floor', (['yy[1]'], {}), '(yy[1])\n', (4963, 4970), False, 'import math\n'), ((5124, 5140), 'math.ceil', 'math.ceil', (['xx[3]'], {}), '(xx[3])\n', (5133, 5140), False, 'import math\n'), ((5143, 5160), 'math.floor', 'math.floor', (['xx[0]'], {}), '(xx[0])\n', (5153, 5160), False, 'import math\n'), ((5179, 5195), 'math.ceil', 'math.ceil', (['yy[3]'], {}), '(yy[3])\n', (5188, 5195), False, 'import math\n'), ((5198, 5215), 'math.floor', 'math.floor', (['yy[0]'], {}), '(yy[0])\n', (5208, 5215), False, 'import math\n'), ((2447, 2471), 'numpy.ones_like', 'np.ones_like', (['coor[:, 0]'], {}), '(coor[:, 0])\n', (2459, 2471), True, 'import numpy as np\n')]
|
from engine.Database import db
from prettytable import PrettyTable
from utils import GeneralHelper
class Project:
def __init__(self, ref, name=None, database=None):
self.ref_name = GeneralHelper.prepare_string(ref)
if name:
self.name = GeneralHelper.prepare_name(name)
self.name = name
self.database = database
|
[
"utils.GeneralHelper.prepare_string",
"utils.GeneralHelper.prepare_name"
] |
[((196, 229), 'utils.GeneralHelper.prepare_string', 'GeneralHelper.prepare_string', (['ref'], {}), '(ref)\n', (224, 229), False, 'from utils import GeneralHelper\n'), ((271, 303), 'utils.GeneralHelper.prepare_name', 'GeneralHelper.prepare_name', (['name'], {}), '(name)\n', (297, 303), False, 'from utils import GeneralHelper\n')]
|
# Copyright (c) 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
YAQLIZATION_ATTR = '__yaqlization__'
def yaqlize(class_or_object=None, yaqlize_attributes=True,
yaqlize_methods=True, yaqlize_indexer=True,
auto_yaqlize_result=False, whitelist=None, blacklist=None,
attribute_remapping=None, blacklist_remapped_attributes=True):
def func(something):
if not hasattr(something, YAQLIZATION_ATTR):
setattr(something, YAQLIZATION_ATTR, build_yaqlization_settings(
yaqlize_attributes=yaqlize_attributes,
yaqlize_methods=yaqlize_methods,
yaqlize_indexer=yaqlize_indexer,
auto_yaqlize_result=auto_yaqlize_result,
whitelist=whitelist,
blacklist=blacklist,
attribute_remapping=attribute_remapping,
))
return something
if class_or_object is None:
return func
else:
return func(class_or_object)
def get_yaqlization_settings(class_or_object):
return getattr(class_or_object, YAQLIZATION_ATTR, None)
def is_yaqlized(class_or_object):
return hasattr(class_or_object, YAQLIZATION_ATTR)
def build_yaqlization_settings(
yaqlize_attributes=True, yaqlize_methods=True, yaqlize_indexer=True,
auto_yaqlize_result=False, whitelist=None, blacklist=None,
attribute_remapping=None, blacklist_remapped_attributes=True):
whitelist = set(whitelist or [])
blacklist = set(blacklist or [])
attribute_remapping = attribute_remapping or {}
if blacklist_remapped_attributes:
for value in six.itervalues(attribute_remapping):
if not isinstance(value, six.string_types):
name = value[0]
else:
name = value
blacklist.add(name)
return {
'yaqlizeAttributes': yaqlize_attributes,
'yaqlizeMethods': yaqlize_methods,
'yaqlizeIndexer': yaqlize_indexer,
'autoYaqlizeResult': auto_yaqlize_result,
'whitelist': whitelist,
'blacklist': blacklist,
'attributeRemapping': attribute_remapping
}
|
[
"six.itervalues"
] |
[((2192, 2227), 'six.itervalues', 'six.itervalues', (['attribute_remapping'], {}), '(attribute_remapping)\n', (2206, 2227), False, 'import six\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['AzureAdlsGen2MountArgs', 'AzureAdlsGen2Mount']
@pulumi.input_type
class AzureAdlsGen2MountArgs:
def __init__(__self__, *,
client_id: pulumi.Input[str],
client_secret_key: pulumi.Input[str],
client_secret_scope: pulumi.Input[str],
container_name: pulumi.Input[str],
initialize_file_system: pulumi.Input[bool],
mount_name: pulumi.Input[str],
storage_account_name: pulumi.Input[str],
tenant_id: pulumi.Input[str],
cluster_id: Optional[pulumi.Input[str]] = None,
directory: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a AzureAdlsGen2Mount resource.
"""
pulumi.set(__self__, "client_id", client_id)
pulumi.set(__self__, "client_secret_key", client_secret_key)
pulumi.set(__self__, "client_secret_scope", client_secret_scope)
pulumi.set(__self__, "container_name", container_name)
pulumi.set(__self__, "initialize_file_system", initialize_file_system)
pulumi.set(__self__, "mount_name", mount_name)
pulumi.set(__self__, "storage_account_name", storage_account_name)
pulumi.set(__self__, "tenant_id", tenant_id)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if directory is not None:
pulumi.set(__self__, "directory", directory)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: pulumi.Input[str]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientSecretKey")
def client_secret_key(self) -> pulumi.Input[str]:
return pulumi.get(self, "client_secret_key")
@client_secret_key.setter
def client_secret_key(self, value: pulumi.Input[str]):
pulumi.set(self, "client_secret_key", value)
@property
@pulumi.getter(name="clientSecretScope")
def client_secret_scope(self) -> pulumi.Input[str]:
return pulumi.get(self, "client_secret_scope")
@client_secret_scope.setter
def client_secret_scope(self, value: pulumi.Input[str]):
pulumi.set(self, "client_secret_scope", value)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "container_name")
@container_name.setter
def container_name(self, value: pulumi.Input[str]):
pulumi.set(self, "container_name", value)
@property
@pulumi.getter(name="initializeFileSystem")
def initialize_file_system(self) -> pulumi.Input[bool]:
return pulumi.get(self, "initialize_file_system")
@initialize_file_system.setter
def initialize_file_system(self, value: pulumi.Input[bool]):
pulumi.set(self, "initialize_file_system", value)
@property
@pulumi.getter(name="mountName")
def mount_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "mount_name")
@mount_name.setter
def mount_name(self, value: pulumi.Input[str]):
pulumi.set(self, "mount_name", value)
@property
@pulumi.getter(name="storageAccountName")
def storage_account_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "storage_account_name")
@storage_account_name.setter
def storage_account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "storage_account_name", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: pulumi.Input[str]):
pulumi.set(self, "tenant_id", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter
def directory(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "directory")
@directory.setter
def directory(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "directory", value)
@pulumi.input_type
class _AzureAdlsGen2MountState:
def __init__(__self__, *,
client_id: Optional[pulumi.Input[str]] = None,
client_secret_key: Optional[pulumi.Input[str]] = None,
client_secret_scope: Optional[pulumi.Input[str]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
directory: Optional[pulumi.Input[str]] = None,
initialize_file_system: Optional[pulumi.Input[bool]] = None,
mount_name: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[str]] = None,
storage_account_name: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering AzureAdlsGen2Mount resources.
"""
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_secret_key is not None:
pulumi.set(__self__, "client_secret_key", client_secret_key)
if client_secret_scope is not None:
pulumi.set(__self__, "client_secret_scope", client_secret_scope)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if container_name is not None:
pulumi.set(__self__, "container_name", container_name)
if directory is not None:
pulumi.set(__self__, "directory", directory)
if initialize_file_system is not None:
pulumi.set(__self__, "initialize_file_system", initialize_file_system)
if mount_name is not None:
pulumi.set(__self__, "mount_name", mount_name)
if source is not None:
pulumi.set(__self__, "source", source)
if storage_account_name is not None:
pulumi.set(__self__, "storage_account_name", storage_account_name)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientSecretKey")
def client_secret_key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "client_secret_key")
@client_secret_key.setter
def client_secret_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret_key", value)
@property
@pulumi.getter(name="clientSecretScope")
def client_secret_scope(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "client_secret_scope")
@client_secret_scope.setter
def client_secret_scope(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret_scope", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "container_name")
@container_name.setter
def container_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "container_name", value)
@property
@pulumi.getter
def directory(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "directory")
@directory.setter
def directory(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "directory", value)
@property
@pulumi.getter(name="initializeFileSystem")
def initialize_file_system(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "initialize_file_system")
@initialize_file_system.setter
def initialize_file_system(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "initialize_file_system", value)
@property
@pulumi.getter(name="mountName")
def mount_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "mount_name")
@mount_name.setter
def mount_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mount_name", value)
@property
@pulumi.getter
def source(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "source")
@source.setter
def source(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source", value)
@property
@pulumi.getter(name="storageAccountName")
def storage_account_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "storage_account_name")
@storage_account_name.setter
def storage_account_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "storage_account_name", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
class AzureAdlsGen2Mount(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret_key: Optional[pulumi.Input[str]] = None,
client_secret_scope: Optional[pulumi.Input[str]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
directory: Optional[pulumi.Input[str]] = None,
initialize_file_system: Optional[pulumi.Input[bool]] = None,
mount_name: Optional[pulumi.Input[str]] = None,
storage_account_name: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Create a AzureAdlsGen2Mount resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AzureAdlsGen2MountArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a AzureAdlsGen2Mount resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param AzureAdlsGen2MountArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AzureAdlsGen2MountArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret_key: Optional[pulumi.Input[str]] = None,
client_secret_scope: Optional[pulumi.Input[str]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
directory: Optional[pulumi.Input[str]] = None,
initialize_file_system: Optional[pulumi.Input[bool]] = None,
mount_name: Optional[pulumi.Input[str]] = None,
storage_account_name: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AzureAdlsGen2MountArgs.__new__(AzureAdlsGen2MountArgs)
if client_id is None and not opts.urn:
raise TypeError("Missing required property 'client_id'")
__props__.__dict__["client_id"] = client_id
if client_secret_key is None and not opts.urn:
raise TypeError("Missing required property 'client_secret_key'")
__props__.__dict__["client_secret_key"] = client_secret_key
if client_secret_scope is None and not opts.urn:
raise TypeError("Missing required property 'client_secret_scope'")
__props__.__dict__["client_secret_scope"] = client_secret_scope
__props__.__dict__["cluster_id"] = cluster_id
if container_name is None and not opts.urn:
raise TypeError("Missing required property 'container_name'")
__props__.__dict__["container_name"] = container_name
__props__.__dict__["directory"] = directory
if initialize_file_system is None and not opts.urn:
raise TypeError("Missing required property 'initialize_file_system'")
__props__.__dict__["initialize_file_system"] = initialize_file_system
if mount_name is None and not opts.urn:
raise TypeError("Missing required property 'mount_name'")
__props__.__dict__["mount_name"] = mount_name
if storage_account_name is None and not opts.urn:
raise TypeError("Missing required property 'storage_account_name'")
__props__.__dict__["storage_account_name"] = storage_account_name
if tenant_id is None and not opts.urn:
raise TypeError("Missing required property 'tenant_id'")
__props__.__dict__["tenant_id"] = tenant_id
__props__.__dict__["source"] = None
super(AzureAdlsGen2Mount, __self__).__init__(
'databricks:index/azureAdlsGen2Mount:AzureAdlsGen2Mount',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret_key: Optional[pulumi.Input[str]] = None,
client_secret_scope: Optional[pulumi.Input[str]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
directory: Optional[pulumi.Input[str]] = None,
initialize_file_system: Optional[pulumi.Input[bool]] = None,
mount_name: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[str]] = None,
storage_account_name: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None) -> 'AzureAdlsGen2Mount':
"""
Get an existing AzureAdlsGen2Mount resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AzureAdlsGen2MountState.__new__(_AzureAdlsGen2MountState)
__props__.__dict__["client_id"] = client_id
__props__.__dict__["client_secret_key"] = client_secret_key
__props__.__dict__["client_secret_scope"] = client_secret_scope
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["container_name"] = container_name
__props__.__dict__["directory"] = directory
__props__.__dict__["initialize_file_system"] = initialize_file_system
__props__.__dict__["mount_name"] = mount_name
__props__.__dict__["source"] = source
__props__.__dict__["storage_account_name"] = storage_account_name
__props__.__dict__["tenant_id"] = tenant_id
return AzureAdlsGen2Mount(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="clientSecretKey")
def client_secret_key(self) -> pulumi.Output[str]:
return pulumi.get(self, "client_secret_key")
@property
@pulumi.getter(name="clientSecretScope")
def client_secret_scope(self) -> pulumi.Output[str]:
return pulumi.get(self, "client_secret_scope")
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "cluster_id")
@property
@pulumi.getter(name="containerName")
def container_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "container_name")
@property
@pulumi.getter
def directory(self) -> pulumi.Output[str]:
return pulumi.get(self, "directory")
@property
@pulumi.getter(name="initializeFileSystem")
def initialize_file_system(self) -> pulumi.Output[bool]:
return pulumi.get(self, "initialize_file_system")
@property
@pulumi.getter(name="mountName")
def mount_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "mount_name")
@property
@pulumi.getter
def source(self) -> pulumi.Output[str]:
return pulumi.get(self, "source")
@property
@pulumi.getter(name="storageAccountName")
def storage_account_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "storage_account_name")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "tenant_id")
|
[
"pulumi.get",
"pulumi.getter",
"pulumi.ResourceOptions",
"pulumi.set"
] |
[((1852, 1882), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clientId"""'}), "(name='clientId')\n", (1865, 1882), False, 'import pulumi\n'), ((2113, 2150), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clientSecretKey"""'}), "(name='clientSecretKey')\n", (2126, 2150), False, 'import pulumi\n'), ((2421, 2460), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clientSecretScope"""'}), "(name='clientSecretScope')\n", (2434, 2460), False, 'import pulumi\n'), ((2741, 2776), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""containerName"""'}), "(name='containerName')\n", (2754, 2776), False, 'import pulumi\n'), ((3032, 3074), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""initializeFileSystem"""'}), "(name='initializeFileSystem')\n", (3045, 3074), False, 'import pulumi\n'), ((3372, 3403), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""mountName"""'}), "(name='mountName')\n", (3385, 3403), False, 'import pulumi\n'), ((3639, 3679), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""storageAccountName"""'}), "(name='storageAccountName')\n", (3652, 3679), False, 'import pulumi\n'), ((3965, 3995), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""tenantId"""'}), "(name='tenantId')\n", (3978, 3995), False, 'import pulumi\n'), ((4226, 4257), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clusterId"""'}), "(name='clusterId')\n", (4239, 4257), False, 'import pulumi\n'), ((6864, 6894), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clientId"""'}), "(name='clientId')\n", (6877, 6894), False, 'import pulumi\n'), ((7145, 7182), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clientSecretKey"""'}), "(name='clientSecretKey')\n", (7158, 7182), False, 'import pulumi\n'), ((7473, 7512), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clientSecretScope"""'}), "(name='clientSecretScope')\n", (7486, 7512), False, 'import pulumi\n'), ((7813, 7844), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clusterId"""'}), "(name='clusterId')\n", (7826, 7844), False, 'import pulumi\n'), ((8100, 8135), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""containerName"""'}), "(name='containerName')\n", (8113, 8135), False, 'import pulumi\n'), ((8675, 8717), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""initializeFileSystem"""'}), "(name='initializeFileSystem')\n", (8688, 8717), False, 'import pulumi\n'), ((9035, 9066), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""mountName"""'}), "(name='mountName')\n", (9048, 9066), False, 'import pulumi\n'), ((9571, 9611), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""storageAccountName"""'}), "(name='storageAccountName')\n", (9584, 9611), False, 'import pulumi\n'), ((9917, 9947), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""tenantId"""'}), "(name='tenantId')\n", (9930, 9947), False, 'import pulumi\n'), ((17935, 17965), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clientId"""'}), "(name='clientId')\n", (17948, 17965), False, 'import pulumi\n'), ((18078, 18115), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clientSecretKey"""'}), "(name='clientSecretKey')\n", (18091, 18115), False, 'import pulumi\n'), ((18244, 18283), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clientSecretScope"""'}), "(name='clientSecretScope')\n", (18257, 18283), False, 'import pulumi\n'), ((18416, 18447), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clusterId"""'}), "(name='clusterId')\n", (18429, 18447), False, 'import pulumi\n'), ((18572, 18607), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""containerName"""'}), "(name='containerName')\n", (18585, 18607), False, 'import pulumi\n'), ((18856, 18898), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""initializeFileSystem"""'}), "(name='initializeFileSystem')\n", (18869, 18898), False, 'import pulumi\n'), ((19038, 19069), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""mountName"""'}), "(name='mountName')\n", (19051, 19069), False, 'import pulumi\n'), ((19304, 19344), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""storageAccountName"""'}), "(name='storageAccountName')\n", (19317, 19344), False, 'import pulumi\n'), ((19479, 19509), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""tenantId"""'}), "(name='tenantId')\n", (19492, 19509), False, 'import pulumi\n'), ((1135, 1179), 'pulumi.set', 'pulumi.set', (['__self__', '"""client_id"""', 'client_id'], {}), "(__self__, 'client_id', client_id)\n", (1145, 1179), False, 'import pulumi\n'), ((1188, 1248), 'pulumi.set', 'pulumi.set', (['__self__', '"""client_secret_key"""', 'client_secret_key'], {}), "(__self__, 'client_secret_key', client_secret_key)\n", (1198, 1248), False, 'import pulumi\n'), ((1257, 1321), 'pulumi.set', 'pulumi.set', (['__self__', '"""client_secret_scope"""', 'client_secret_scope'], {}), "(__self__, 'client_secret_scope', client_secret_scope)\n", (1267, 1321), False, 'import pulumi\n'), ((1330, 1384), 'pulumi.set', 'pulumi.set', (['__self__', '"""container_name"""', 'container_name'], {}), "(__self__, 'container_name', container_name)\n", (1340, 1384), False, 'import pulumi\n'), ((1393, 1463), 'pulumi.set', 'pulumi.set', (['__self__', '"""initialize_file_system"""', 'initialize_file_system'], {}), "(__self__, 'initialize_file_system', initialize_file_system)\n", (1403, 1463), False, 'import pulumi\n'), ((1472, 1518), 'pulumi.set', 'pulumi.set', (['__self__', '"""mount_name"""', 'mount_name'], {}), "(__self__, 'mount_name', mount_name)\n", (1482, 1518), False, 'import pulumi\n'), ((1527, 1593), 'pulumi.set', 'pulumi.set', (['__self__', '"""storage_account_name"""', 'storage_account_name'], {}), "(__self__, 'storage_account_name', storage_account_name)\n", (1537, 1593), False, 'import pulumi\n'), ((1602, 1646), 'pulumi.set', 'pulumi.set', (['__self__', '"""tenant_id"""', 'tenant_id'], {}), "(__self__, 'tenant_id', tenant_id)\n", (1612, 1646), False, 'import pulumi\n'), ((1944, 1973), 'pulumi.get', 'pulumi.get', (['self', '"""client_id"""'], {}), "(self, 'client_id')\n", (1954, 1973), False, 'import pulumi\n'), ((2056, 2092), 'pulumi.set', 'pulumi.set', (['self', '"""client_id"""', 'value'], {}), "(self, 'client_id', value)\n", (2066, 2092), False, 'import pulumi\n'), ((2220, 2257), 'pulumi.get', 'pulumi.get', (['self', '"""client_secret_key"""'], {}), "(self, 'client_secret_key')\n", (2230, 2257), False, 'import pulumi\n'), ((2356, 2400), 'pulumi.set', 'pulumi.set', (['self', '"""client_secret_key"""', 'value'], {}), "(self, 'client_secret_key', value)\n", (2366, 2400), False, 'import pulumi\n'), ((2532, 2571), 'pulumi.get', 'pulumi.get', (['self', '"""client_secret_scope"""'], {}), "(self, 'client_secret_scope')\n", (2542, 2571), False, 'import pulumi\n'), ((2674, 2720), 'pulumi.set', 'pulumi.set', (['self', '"""client_secret_scope"""', 'value'], {}), "(self, 'client_secret_scope', value)\n", (2684, 2720), False, 'import pulumi\n'), ((2843, 2877), 'pulumi.get', 'pulumi.get', (['self', '"""container_name"""'], {}), "(self, 'container_name')\n", (2853, 2877), False, 'import pulumi\n'), ((2970, 3011), 'pulumi.set', 'pulumi.set', (['self', '"""container_name"""', 'value'], {}), "(self, 'container_name', value)\n", (2980, 3011), False, 'import pulumi\n'), ((3150, 3192), 'pulumi.get', 'pulumi.get', (['self', '"""initialize_file_system"""'], {}), "(self, 'initialize_file_system')\n", (3160, 3192), False, 'import pulumi\n'), ((3302, 3351), 'pulumi.set', 'pulumi.set', (['self', '"""initialize_file_system"""', 'value'], {}), "(self, 'initialize_file_system', value)\n", (3312, 3351), False, 'import pulumi\n'), ((3466, 3496), 'pulumi.get', 'pulumi.get', (['self', '"""mount_name"""'], {}), "(self, 'mount_name')\n", (3476, 3496), False, 'import pulumi\n'), ((3581, 3618), 'pulumi.set', 'pulumi.set', (['self', '"""mount_name"""', 'value'], {}), "(self, 'mount_name', value)\n", (3591, 3618), False, 'import pulumi\n'), ((3752, 3792), 'pulumi.get', 'pulumi.get', (['self', '"""storage_account_name"""'], {}), "(self, 'storage_account_name')\n", (3762, 3792), False, 'import pulumi\n'), ((3897, 3944), 'pulumi.set', 'pulumi.set', (['self', '"""storage_account_name"""', 'value'], {}), "(self, 'storage_account_name', value)\n", (3907, 3944), False, 'import pulumi\n'), ((4057, 4086), 'pulumi.get', 'pulumi.get', (['self', '"""tenant_id"""'], {}), "(self, 'tenant_id')\n", (4067, 4086), False, 'import pulumi\n'), ((4169, 4205), 'pulumi.set', 'pulumi.set', (['self', '"""tenant_id"""', 'value'], {}), "(self, 'tenant_id', value)\n", (4179, 4205), False, 'import pulumi\n'), ((4330, 4360), 'pulumi.get', 'pulumi.get', (['self', '"""cluster_id"""'], {}), "(self, 'cluster_id')\n", (4340, 4360), False, 'import pulumi\n'), ((4455, 4492), 'pulumi.set', 'pulumi.set', (['self', '"""cluster_id"""', 'value'], {}), "(self, 'cluster_id', value)\n", (4465, 4492), False, 'import pulumi\n'), ((4598, 4627), 'pulumi.get', 'pulumi.get', (['self', '"""directory"""'], {}), "(self, 'directory')\n", (4608, 4627), False, 'import pulumi\n'), ((4720, 4756), 'pulumi.set', 'pulumi.set', (['self', '"""directory"""', 'value'], {}), "(self, 'directory', value)\n", (4730, 4756), False, 'import pulumi\n'), ((6966, 6995), 'pulumi.get', 'pulumi.get', (['self', '"""client_id"""'], {}), "(self, 'client_id')\n", (6976, 6995), False, 'import pulumi\n'), ((7088, 7124), 'pulumi.set', 'pulumi.set', (['self', '"""client_id"""', 'value'], {}), "(self, 'client_id', value)\n", (7098, 7124), False, 'import pulumi\n'), ((7262, 7299), 'pulumi.get', 'pulumi.get', (['self', '"""client_secret_key"""'], {}), "(self, 'client_secret_key')\n", (7272, 7299), False, 'import pulumi\n'), ((7408, 7452), 'pulumi.set', 'pulumi.set', (['self', '"""client_secret_key"""', 'value'], {}), "(self, 'client_secret_key', value)\n", (7418, 7452), False, 'import pulumi\n'), ((7594, 7633), 'pulumi.get', 'pulumi.get', (['self', '"""client_secret_scope"""'], {}), "(self, 'client_secret_scope')\n", (7604, 7633), False, 'import pulumi\n'), ((7746, 7792), 'pulumi.set', 'pulumi.set', (['self', '"""client_secret_scope"""', 'value'], {}), "(self, 'client_secret_scope', value)\n", (7756, 7792), False, 'import pulumi\n'), ((7917, 7947), 'pulumi.get', 'pulumi.get', (['self', '"""cluster_id"""'], {}), "(self, 'cluster_id')\n", (7927, 7947), False, 'import pulumi\n'), ((8042, 8079), 'pulumi.set', 'pulumi.set', (['self', '"""cluster_id"""', 'value'], {}), "(self, 'cluster_id', value)\n", (8052, 8079), False, 'import pulumi\n'), ((8212, 8246), 'pulumi.get', 'pulumi.get', (['self', '"""container_name"""'], {}), "(self, 'container_name')\n", (8222, 8246), False, 'import pulumi\n'), ((8349, 8390), 'pulumi.set', 'pulumi.set', (['self', '"""container_name"""', 'value'], {}), "(self, 'container_name', value)\n", (8359, 8390), False, 'import pulumi\n'), ((8496, 8525), 'pulumi.get', 'pulumi.get', (['self', '"""directory"""'], {}), "(self, 'directory')\n", (8506, 8525), False, 'import pulumi\n'), ((8618, 8654), 'pulumi.set', 'pulumi.set', (['self', '"""directory"""', 'value'], {}), "(self, 'directory', value)\n", (8628, 8654), False, 'import pulumi\n'), ((8803, 8845), 'pulumi.get', 'pulumi.get', (['self', '"""initialize_file_system"""'], {}), "(self, 'initialize_file_system')\n", (8813, 8845), False, 'import pulumi\n'), ((8965, 9014), 'pulumi.set', 'pulumi.set', (['self', '"""initialize_file_system"""', 'value'], {}), "(self, 'initialize_file_system', value)\n", (8975, 9014), False, 'import pulumi\n'), ((9139, 9169), 'pulumi.get', 'pulumi.get', (['self', '"""mount_name"""'], {}), "(self, 'mount_name')\n", (9149, 9169), False, 'import pulumi\n'), ((9264, 9301), 'pulumi.set', 'pulumi.set', (['self', '"""mount_name"""', 'value'], {}), "(self, 'mount_name', value)\n", (9274, 9301), False, 'import pulumi\n'), ((9404, 9430), 'pulumi.get', 'pulumi.get', (['self', '"""source"""'], {}), "(self, 'source')\n", (9414, 9430), False, 'import pulumi\n'), ((9517, 9550), 'pulumi.set', 'pulumi.set', (['self', '"""source"""', 'value'], {}), "(self, 'source', value)\n", (9527, 9550), False, 'import pulumi\n'), ((9694, 9734), 'pulumi.get', 'pulumi.get', (['self', '"""storage_account_name"""'], {}), "(self, 'storage_account_name')\n", (9704, 9734), False, 'import pulumi\n'), ((9849, 9896), 'pulumi.set', 'pulumi.set', (['self', '"""storage_account_name"""', 'value'], {}), "(self, 'storage_account_name', value)\n", (9859, 9896), False, 'import pulumi\n'), ((10019, 10048), 'pulumi.get', 'pulumi.get', (['self', '"""tenant_id"""'], {}), "(self, 'tenant_id')\n", (10029, 10048), False, 'import pulumi\n'), ((10141, 10177), 'pulumi.set', 'pulumi.set', (['self', '"""tenant_id"""', 'value'], {}), "(self, 'tenant_id', value)\n", (10151, 10177), False, 'import pulumi\n'), ((18028, 18057), 'pulumi.get', 'pulumi.get', (['self', '"""client_id"""'], {}), "(self, 'client_id')\n", (18038, 18057), False, 'import pulumi\n'), ((18186, 18223), 'pulumi.get', 'pulumi.get', (['self', '"""client_secret_key"""'], {}), "(self, 'client_secret_key')\n", (18196, 18223), False, 'import pulumi\n'), ((18356, 18395), 'pulumi.get', 'pulumi.get', (['self', '"""client_secret_scope"""'], {}), "(self, 'client_secret_scope')\n", (18366, 18395), False, 'import pulumi\n'), ((18521, 18551), 'pulumi.get', 'pulumi.get', (['self', '"""cluster_id"""'], {}), "(self, 'cluster_id')\n", (18531, 18551), False, 'import pulumi\n'), ((18675, 18709), 'pulumi.get', 'pulumi.get', (['self', '"""container_name"""'], {}), "(self, 'container_name')\n", (18685, 18709), False, 'import pulumi\n'), ((18806, 18835), 'pulumi.get', 'pulumi.get', (['self', '"""directory"""'], {}), "(self, 'directory')\n", (18816, 18835), False, 'import pulumi\n'), ((18975, 19017), 'pulumi.get', 'pulumi.get', (['self', '"""initialize_file_system"""'], {}), "(self, 'initialize_file_system')\n", (18985, 19017), False, 'import pulumi\n'), ((19133, 19163), 'pulumi.get', 'pulumi.get', (['self', '"""mount_name"""'], {}), "(self, 'mount_name')\n", (19143, 19163), False, 'import pulumi\n'), ((19257, 19283), 'pulumi.get', 'pulumi.get', (['self', '"""source"""'], {}), "(self, 'source')\n", (19267, 19283), False, 'import pulumi\n'), ((19418, 19458), 'pulumi.get', 'pulumi.get', (['self', '"""storage_account_name"""'], {}), "(self, 'storage_account_name')\n", (19428, 19458), False, 'import pulumi\n'), ((19572, 19601), 'pulumi.get', 'pulumi.get', (['self', '"""tenant_id"""'], {}), "(self, 'tenant_id')\n", (19582, 19601), False, 'import pulumi\n'), ((1694, 1740), 'pulumi.set', 'pulumi.set', (['__self__', '"""cluster_id"""', 'cluster_id'], {}), "(__self__, 'cluster_id', cluster_id)\n", (1704, 1740), False, 'import pulumi\n'), ((1787, 1831), 'pulumi.set', 'pulumi.set', (['__self__', '"""directory"""', 'directory'], {}), "(__self__, 'directory', directory)\n", (1797, 1831), False, 'import pulumi\n'), ((5751, 5795), 'pulumi.set', 'pulumi.set', (['__self__', '"""client_id"""', 'client_id'], {}), "(__self__, 'client_id', client_id)\n", (5761, 5795), False, 'import pulumi\n'), ((5850, 5910), 'pulumi.set', 'pulumi.set', (['__self__', '"""client_secret_key"""', 'client_secret_key'], {}), "(__self__, 'client_secret_key', client_secret_key)\n", (5860, 5910), False, 'import pulumi\n'), ((5967, 6031), 'pulumi.set', 'pulumi.set', (['__self__', '"""client_secret_scope"""', 'client_secret_scope'], {}), "(__self__, 'client_secret_scope', client_secret_scope)\n", (5977, 6031), False, 'import pulumi\n'), ((6079, 6125), 'pulumi.set', 'pulumi.set', (['__self__', '"""cluster_id"""', 'cluster_id'], {}), "(__self__, 'cluster_id', cluster_id)\n", (6089, 6125), False, 'import pulumi\n'), ((6177, 6231), 'pulumi.set', 'pulumi.set', (['__self__', '"""container_name"""', 'container_name'], {}), "(__self__, 'container_name', container_name)\n", (6187, 6231), False, 'import pulumi\n'), ((6278, 6322), 'pulumi.set', 'pulumi.set', (['__self__', '"""directory"""', 'directory'], {}), "(__self__, 'directory', directory)\n", (6288, 6322), False, 'import pulumi\n'), ((6382, 6452), 'pulumi.set', 'pulumi.set', (['__self__', '"""initialize_file_system"""', 'initialize_file_system'], {}), "(__self__, 'initialize_file_system', initialize_file_system)\n", (6392, 6452), False, 'import pulumi\n'), ((6500, 6546), 'pulumi.set', 'pulumi.set', (['__self__', '"""mount_name"""', 'mount_name'], {}), "(__self__, 'mount_name', mount_name)\n", (6510, 6546), False, 'import pulumi\n'), ((6590, 6628), 'pulumi.set', 'pulumi.set', (['__self__', '"""source"""', 'source'], {}), "(__self__, 'source', source)\n", (6600, 6628), False, 'import pulumi\n'), ((6686, 6752), 'pulumi.set', 'pulumi.set', (['__self__', '"""storage_account_name"""', 'storage_account_name'], {}), "(__self__, 'storage_account_name', storage_account_name)\n", (6696, 6752), False, 'import pulumi\n'), ((6799, 6843), 'pulumi.set', 'pulumi.set', (['__self__', '"""tenant_id"""', 'tenant_id'], {}), "(__self__, 'tenant_id', tenant_id)\n", (6809, 6843), False, 'import pulumi\n'), ((13208, 13232), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {}), '()\n', (13230, 13232), False, 'import pulumi\n'), ((17058, 17087), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'id': 'id'}), '(id=id)\n', (17080, 17087), False, 'import pulumi\n')]
|
#%%
import requests
# url = "http://1172.16.31.10"
url = "http://192.168.1.10"
# data = bytes("connection\r\n")
data = "connection\r\n"
res = requests.post(url=url,
data=data,
headers={'Content-Type': 'text/plain'})
print(res.text)
# %%
import requests
res = requests.post(url="http://192.168.1.5/",data="""C=ADD_RECORD&product=X1521&sn=PPPYWWDSSSSEEEERX+FFGGCUUCLPPPVHSSS&station_name=D-INSPECTION&station_id=SPWX_W03-2FT-01_1_D-INSPECTION&start_time=2019-06-28 08:11:28&stop_time=2019-06-28 08:11:39&result=PASS&reason=&stage=1&mac_address=88:51:FB:42:A1:35&value1=0.052&value2=0.052&value3=0.052&value4=0.052&value5=0.052&value6=0.052&value7=0.052&value8=0.052
""")
print(res.text)
|
[
"requests.post"
] |
[((142, 215), 'requests.post', 'requests.post', ([], {'url': 'url', 'data': 'data', 'headers': "{'Content-Type': 'text/plain'}"}), "(url=url, data=data, headers={'Content-Type': 'text/plain'})\n", (155, 215), False, 'import requests\n'), ((303, 723), 'requests.post', 'requests.post', ([], {'url': '"""http://192.168.1.5/"""', 'data': '"""C=ADD_RECORD&product=X1521&sn=PPPYWWDSSSSEEEERX+FFGGCUUCLPPPVHSSS&station_name=D-INSPECTION&station_id=SPWX_W03-2FT-01_1_D-INSPECTION&start_time=2019-06-28 08:11:28&stop_time=2019-06-28 08:11:39&result=PASS&reason=&stage=1&mac_address=88:51:FB:42:A1:35&value1=0.052&value2=0.052&value3=0.052&value4=0.052&value5=0.052&value6=0.052&value7=0.052&value8=0.052\n"""'}), '(url=\'http://192.168.1.5/\', data=\n """C=ADD_RECORD&product=X1521&sn=PPPYWWDSSSSEEEERX+FFGGCUUCLPPPVHSSS&station_name=D-INSPECTION&station_id=SPWX_W03-2FT-01_1_D-INSPECTION&start_time=2019-06-28 08:11:28&stop_time=2019-06-28 08:11:39&result=PASS&reason=&stage=1&mac_address=88:51:FB:42:A1:35&value1=0.052&value2=0.052&value3=0.052&value4=0.052&value5=0.052&value6=0.052&value7=0.052&value8=0.052\n"""\n )\n', (316, 723), False, 'import requests\n')]
|
"""opdelete module
Implementation of "delete" operation for "realtime" etl process for
transferring data from MongoDB nested collections to PostgreSQL flat data
with using pregenerated schema and tailing records (events) in oplog.rs
collection.
How to use:
del = op_delete_stmts(dbreq, schema, path, str_id, database_name,
schema_name)
parameters:
dbreq - connection to PostgreSQL,
schema - schema of nested data represented as json object,
path - path to object for deleteion
str_id - string representation of root ObjectID
database_name - database name for destionation database (PostgreSQL)
schema_name - schema name for destionation database (PostgreSQL)
return value:
as delete operation is an complicated operation it usualy comes in
combination of sets two kinds of single operations UPDATE and DELETE for
PostgreSQL and retruned value has following view
{
'upd': {
'UPDATE database_name.schema_name."table_name" SET idx=(%s) WHERE
idx=(%s), parent_id_iod=(%s);': [1, 2, 'abc'],
'UPDATE database_name.schema_name."table_name" SET idx=(%s) WHERE
idx=(%s), parent_id_iod=(%s);': [2, 3, 'abc']
},
'del': {
'DELETE FROM database_name.schema_name."table_name" WHERE
idx=(%s), parent_id_iod=(%s);': [1, 'abc']
}
"""
__author__ = '<NAME>'
__email__ = "<EMAIL>"
from gizer.util import get_idx_column_name_from_list, SELECT_TMPLT, \
UPDATE_TMPLT, DELETE_TMPLT, get_indexes_dictionary_idx, \
get_root_table_from_path, get_ids_list, get_table_name_from_list, \
get_tables_structure, get_table_name_schema, get_last_idx_from_path, \
get_part_schema
import psycopg2
def op_delete_stmts(dbreq, schema, path, str_id, database_info):
"""delete operation wrapper"""
return gen_statements(dbreq, schema, path, str_id, database_info)
def get_max_id_in_array(dbreq, table, condition_list, database_info):
"""returns value fo max index in array from where object shod be deleted"""
cond_list = {}
for column in condition_list['target']:
if column != 'idx':
cond_list[column] = condition_list['target'][column]
where = get_where_templates({'target': cond_list, 'child': {}})['target']
sql_query = SELECT_TMPLT.format(
table='.'.join(filter(None, [database_info.database_name, database_info.schema_name,
table])),
conditions=where['template'])
curs = dbreq.cursor()
curs.execute(sql_query, tuple(where['values']))
idx = curs.fetchone()[0]
if idx is None:
idx = 0
return idx
def get_conditions_list(schema, path, str_id):
"""returns conditions list for target and for 'child' tables based on path
and sring represented root object ID value
"""
spath = path.split('.')
parental_tables_idx_list = get_indexes_dictionary_idx(path)
target_table = get_table_name_from_list(spath)
target_table_idxname_for_child = get_idx_column_name_from_list(spath)
params_target = {}
params_child = {}
root_table = get_root_table_from_path(path)
for parent_table_idx in parental_tables_idx_list:
if parent_table_idx != root_table:
if target_table_idxname_for_child != parent_table_idx:
params_target[parent_table_idx + '_idx'] = \
parental_tables_idx_list[parent_table_idx]
params_child[parent_table_idx + '_idx'] = \
parental_tables_idx_list[parent_table_idx]
else:
params_target['idx'] = parental_tables_idx_list[
parent_table_idx]
params_child[parent_table_idx + '_idx'] = \
parental_tables_idx_list[parent_table_idx]
ids = get_ids_list(schema)
root_id = ids.iterkeys().next()
if root_table == target_table:
params_target[root_id] = str(str_id)
else:
params_target[root_table + '_' + root_id] = str(str_id)
params_child[root_table + '_' + root_id] = str(str_id)
return {'target': params_target, 'child': params_child}
def get_where_templates(conditions_list):
"""generates where templates for target and 'child' tables based on
conditions list"""
def condition_with_quotes(key):
"""returns templae for value for query in wrapped in quotes or
not depending on value type"""
temp = ''
if key.endswith('_idx') or key == 'idx':
temp = '({0}=(%s))'.format(key)
else:
temp = '({0}=(%s))'.format(key)
return temp
where_list = {'target': {}, 'child': {}}
where_list['target']['template'] = ' and '.join(
sorted([(condition_with_quotes(key)) for key in
conditions_list['target']]))
where_list['target']['values'] = [conditions_list['target'][key] for key in
sorted(conditions_list['target'])]
where_list['child']['template'] = ' and '.join(
sorted(
[(condition_with_quotes(key)) for key in conditions_list['child']]))
where_list['child']['values'] = [conditions_list['child'][key] for key in
sorted(conditions_list['child'])]
return where_list
def gen_statements(dbreq, schema, path, str_id, database_info):
"""generates all SQL statements with parameteres related for oplog event
related to delete operation"""
tables_mappings = get_tables_structure(schema, path.split('.')[0], {}, {},
1, '')
# getting partial table mappings
if len(path.split('.')) <= 1:
schema_partial = schema
else:
schema_partial = get_part_schema(schema, path.split('.')[1:])
if schema_partial == None:
schema_partial = {}
tables_mappings_partial = get_tables_structure(schema_partial,
path.split('.')[0], {}, {},
1, '')
conditions_list = get_conditions_list(schema, path, str_id)
where_clauses = get_where_templates(conditions_list)
target_table = get_table_name_from_list(path.split('.'))
if not target_table in tables_mappings.keys():
return {'del': {}, 'upd': {}}
tables_list = []
tables_mappings_partial_fixed = {}
# fixing prefixes for partial table mappings
for table_partial in tables_mappings_partial:
tables_mappings_partial_fixed[table_partial.replace(
path.split('.')[0][:-1], target_table[:-1],)] = \
tables_mappings_partial[table_partial]
for table in tables_mappings_partial_fixed.keys():
if str.startswith(str(table), target_table[:-1], 0,
len(table)) and not table == target_table and \
table in tables_mappings.keys():
tables_list.append(table)
del_statements = {}
del_statements[DELETE_TMPLT.format(
table=get_table_name_schema([database_info.database_name,
database_info.schema_name, target_table]),
conditions=where_clauses['target']['template'])] = \
where_clauses['target']['values']
for table in tables_list:
del_statements[DELETE_TMPLT.format(
table=get_table_name_schema([database_info.database_name,
database_info.schema_name, table]),
conditions=where_clauses['child']['template'])] = \
where_clauses['child']['values']
update_statements = {}
idx = get_last_idx_from_path(path)
if idx == None:
return {'del': del_statements, 'upd': update_statements}
max_idx = get_max_id_in_array(dbreq, target_table, conditions_list,
database_info)
if idx <= max_idx:
return {'del': del_statements, 'upd': update_statements}
for ind in range(int(idx) + 1, int(max_idx) + 1):
spath = path.split('.')
del spath[-1]
spath.append(str(ind - 1))
path_to_update = '.'.join(spath)
udpate_where = get_where_templates(
get_conditions_list(schema, path_to_update, str_id))
update_statements[UPDATE_TMPLT.format(table=get_table_name_schema(
[database_info.database_name, database_info.schema_name,
target_table]),
statements='idx=' + str(ind - 1),
conditions=udpate_where['target'][
'template'])] = \
udpate_where['target'][
'values']
for table in tables_list:
update_statements[UPDATE_TMPLT.format(table=get_table_name_schema(
[database_info.database_name, database_info.schema_name,
table]),
statements=get_idx_column_name_from_list(path.split('.')) +
'_idx=' + str(ind - 1),
conditions=
udpate_where['child'][
'template'])] = \
udpate_where['child'][
'values']
return {'del': del_statements, 'upd': update_statements}
|
[
"gizer.util.get_last_idx_from_path",
"gizer.util.get_table_name_from_list",
"gizer.util.get_indexes_dictionary_idx",
"gizer.util.get_ids_list",
"gizer.util.get_root_table_from_path",
"gizer.util.get_table_name_schema",
"gizer.util.get_idx_column_name_from_list"
] |
[((2959, 2991), 'gizer.util.get_indexes_dictionary_idx', 'get_indexes_dictionary_idx', (['path'], {}), '(path)\n', (2985, 2991), False, 'from gizer.util import get_idx_column_name_from_list, SELECT_TMPLT, UPDATE_TMPLT, DELETE_TMPLT, get_indexes_dictionary_idx, get_root_table_from_path, get_ids_list, get_table_name_from_list, get_tables_structure, get_table_name_schema, get_last_idx_from_path, get_part_schema\n'), ((3011, 3042), 'gizer.util.get_table_name_from_list', 'get_table_name_from_list', (['spath'], {}), '(spath)\n', (3035, 3042), False, 'from gizer.util import get_idx_column_name_from_list, SELECT_TMPLT, UPDATE_TMPLT, DELETE_TMPLT, get_indexes_dictionary_idx, get_root_table_from_path, get_ids_list, get_table_name_from_list, get_tables_structure, get_table_name_schema, get_last_idx_from_path, get_part_schema\n'), ((3080, 3116), 'gizer.util.get_idx_column_name_from_list', 'get_idx_column_name_from_list', (['spath'], {}), '(spath)\n', (3109, 3116), False, 'from gizer.util import get_idx_column_name_from_list, SELECT_TMPLT, UPDATE_TMPLT, DELETE_TMPLT, get_indexes_dictionary_idx, get_root_table_from_path, get_ids_list, get_table_name_from_list, get_tables_structure, get_table_name_schema, get_last_idx_from_path, get_part_schema\n'), ((3179, 3209), 'gizer.util.get_root_table_from_path', 'get_root_table_from_path', (['path'], {}), '(path)\n', (3203, 3209), False, 'from gizer.util import get_idx_column_name_from_list, SELECT_TMPLT, UPDATE_TMPLT, DELETE_TMPLT, get_indexes_dictionary_idx, get_root_table_from_path, get_ids_list, get_table_name_from_list, get_tables_structure, get_table_name_schema, get_last_idx_from_path, get_part_schema\n'), ((3875, 3895), 'gizer.util.get_ids_list', 'get_ids_list', (['schema'], {}), '(schema)\n', (3887, 3895), False, 'from gizer.util import get_idx_column_name_from_list, SELECT_TMPLT, UPDATE_TMPLT, DELETE_TMPLT, get_indexes_dictionary_idx, get_root_table_from_path, get_ids_list, get_table_name_from_list, get_tables_structure, get_table_name_schema, get_last_idx_from_path, get_part_schema\n'), ((7675, 7703), 'gizer.util.get_last_idx_from_path', 'get_last_idx_from_path', (['path'], {}), '(path)\n', (7697, 7703), False, 'from gizer.util import get_idx_column_name_from_list, SELECT_TMPLT, UPDATE_TMPLT, DELETE_TMPLT, get_indexes_dictionary_idx, get_root_table_from_path, get_ids_list, get_table_name_from_list, get_tables_structure, get_table_name_schema, get_last_idx_from_path, get_part_schema\n'), ((7073, 7171), 'gizer.util.get_table_name_schema', 'get_table_name_schema', (['[database_info.database_name, database_info.schema_name, target_table]'], {}), '([database_info.database_name, database_info.\n schema_name, target_table])\n', (7094, 7171), False, 'from gizer.util import get_idx_column_name_from_list, SELECT_TMPLT, UPDATE_TMPLT, DELETE_TMPLT, get_indexes_dictionary_idx, get_root_table_from_path, get_ids_list, get_table_name_from_list, get_tables_structure, get_table_name_schema, get_last_idx_from_path, get_part_schema\n'), ((7400, 7491), 'gizer.util.get_table_name_schema', 'get_table_name_schema', (['[database_info.database_name, database_info.schema_name, table]'], {}), '([database_info.database_name, database_info.\n schema_name, table])\n', (7421, 7491), False, 'from gizer.util import get_idx_column_name_from_list, SELECT_TMPLT, UPDATE_TMPLT, DELETE_TMPLT, get_indexes_dictionary_idx, get_root_table_from_path, get_ids_list, get_table_name_from_list, get_tables_structure, get_table_name_schema, get_last_idx_from_path, get_part_schema\n'), ((8344, 8442), 'gizer.util.get_table_name_schema', 'get_table_name_schema', (['[database_info.database_name, database_info.schema_name, target_table]'], {}), '([database_info.database_name, database_info.\n schema_name, target_table])\n', (8365, 8442), False, 'from gizer.util import get_idx_column_name_from_list, SELECT_TMPLT, UPDATE_TMPLT, DELETE_TMPLT, get_indexes_dictionary_idx, get_root_table_from_path, get_ids_list, get_table_name_from_list, get_tables_structure, get_table_name_schema, get_last_idx_from_path, get_part_schema\n'), ((8745, 8836), 'gizer.util.get_table_name_schema', 'get_table_name_schema', (['[database_info.database_name, database_info.schema_name, table]'], {}), '([database_info.database_name, database_info.\n schema_name, table])\n', (8766, 8836), False, 'from gizer.util import get_idx_column_name_from_list, SELECT_TMPLT, UPDATE_TMPLT, DELETE_TMPLT, get_indexes_dictionary_idx, get_root_table_from_path, get_ids_list, get_table_name_from_list, get_tables_structure, get_table_name_schema, get_last_idx_from_path, get_part_schema\n')]
|
# -*- encoding: utf-8 -*-
'''
Emacs Behavior
==============
The :class:`~kivy.uix.behaviors.emacs.EmacsBehavior`
`mixin <https://en.wikipedia.org/wiki/Mixin>`_ allows you to add
`Emacs <https://www.gnu.org/software/emacs/>`_ keyboard shortcuts for basic
movement and editing to the :class:`~kivy.uix.textinput.TextInput` widget.
The shortcuts currently available are listed below:
Emacs shortcuts
---------------
=============== ========================================================
Shortcut Description
--------------- --------------------------------------------------------
Control + a Move cursor to the beginning of the line
Control + e Move cursor to the end of the line
Control + f Move cursor one character to the right
Control + b Move cursor one character to the left
Alt + f Move cursor to the end of the word to the right
Alt + b Move cursor to the start of the word to the left
Alt + Backspace Delete text left of the cursor to the beginning of word
Alt + d Delete text right of the cursor to the end of the word
Alt + w Copy selection
Control + w Cut selection
Control + y Paste selection
=============== ========================================================
.. warning::
If you have the :mod:`~kivy.modules.inspector` module enabled, the
shortcut for opening the inspector (Control + e) conflicts with the
Emacs shortcut to move to the end of the line (it will still move the
cursor to the end of the line, but the inspector will open as well).
'''
from kivy.properties import StringProperty
__all__ = ('EmacsBehavior', )
class EmacsBehavior(object):
'''
A `mixin <https://en.wikipedia.org/wiki/Mixin>`_ that enables Emacs-style
keyboard shortcuts for the :class:`~kivy.uix.textinput.TextInput` widget.
Please see the :mod:`Emacs behaviors module <kivy.uix.behaviors.emacs>`
documentation for more information.
.. versionadded:: 1.9.1
'''
key_bindings = StringProperty('emacs')
'''String name which determines the type of key bindings to use with the
:class:`~kivy.uix.textinput.TextInput`. This allows Emacs key bindings to
be enabled/disabled programmatically for widgets that inherit from
:class:`EmacsBehavior`. If the value is not ``'emacs'``, Emacs bindings
will be disabled. Use ``'default'`` for switching to the default key
bindings of TextInput.
:attr:`key_bindings` is a :class:`~kivy.properties.StringProperty`
and defaults to ``'emacs'``.
.. versionadded:: 1.9.2
'''
def __init__(self, **kwargs):
super(EmacsBehavior, self).__init__(**kwargs)
self.bindings = {
'ctrl': {
'a': lambda: self.do_cursor_movement('cursor_home'),
'e': lambda: self.do_cursor_movement('cursor_end'),
'f': lambda: self.do_cursor_movement('cursor_right'),
'b': lambda: self.do_cursor_movement('cursor_left'),
'w': lambda: self._cut(self.selection_text),
'y': self.paste,
},
'alt': {
'w': self.copy,
'f': lambda: self.do_cursor_movement('cursor_right',
control=True),
'b': lambda: self.do_cursor_movement('cursor_left',
control=True),
'd': self.delete_word_right,
'\x08': self.delete_word_left, # alt + backspace
},
}
def keyboard_on_key_down(self, window, keycode, text, modifiers):
key, key_str = keycode
mod = modifiers[0] if modifiers else None
is_emacs_shortcut = False
if key in range(256) and self.key_bindings == 'emacs':
is_emacs_shortcut = ((mod == 'ctrl' and
chr(key) in self.bindings['ctrl'].keys()) or
(mod == 'alt' and
chr(key) in self.bindings['alt'].keys()))
if is_emacs_shortcut:
# Look up mod and key
emacs_shortcut = self.bindings[mod][chr(key)]
emacs_shortcut()
else:
super(EmacsBehavior, self).keyboard_on_key_down(window, keycode,
text, modifiers)
def delete_word_right(self):
'''Delete text right of the cursor to the end of the word'''
if self._selection:
return
start_index = self.cursor_index()
start_cursor = self.cursor
self.do_cursor_movement('cursor_right', control=True)
end_index = self.cursor_index()
if start_index != end_index:
s = self.text[start_index:end_index]
self._set_unredo_delsel(start_index, end_index, s, from_undo=False)
self.text = self.text[:start_index] + self.text[end_index:]
self._set_cursor(pos=start_cursor)
def delete_word_left(self):
'''Delete text left of the cursor to the beginning of word'''
if self._selection:
return
start_index = self.cursor_index()
self.do_cursor_movement('cursor_left', control=True)
end_cursor = self.cursor
end_index = self.cursor_index()
if start_index != end_index:
s = self.text[end_index:start_index]
self._set_unredo_delsel(end_index, start_index, s, from_undo=False)
self.text = self.text[:end_index] + self.text[start_index:]
self._set_cursor(pos=end_cursor)
|
[
"kivy.properties.StringProperty"
] |
[((1996, 2019), 'kivy.properties.StringProperty', 'StringProperty', (['"""emacs"""'], {}), "('emacs')\n", (2010, 2019), False, 'from kivy.properties import StringProperty\n')]
|
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import RandomForestRegressor
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.ensemble import AdaBoostRegressor
from sklearn.model_selection import train_test_split, cross_val_score
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import mean_squared_error
import matplotlib.pyplot as plt
import numpy as np
def cross_val(estimator, X_train, y_train, nfolds):
''' Takes an instantiated model (estimator) and returns the average
mean square error (mse) and coefficient of determination (r2) from
kfold cross-validation.
Parameters: estimator: model object
X_train: 2d numpy array
y_train: 1d numpy array
nfolds: the number of folds in the kfold cross-validation
Returns: mse: average mean_square_error of model over number of folds
r2: average coefficient of determination over number of folds
There are many possible values for scoring parameter in cross_val_score.
http://scikit-learn.org/stable/modules/model_evaluation.html#scoring-parameter
kfold is easily parallelizable, so set n_jobs = -1 in cross_val_score
'''
mse = cross_val_score(estimator, X_train, y_train,
scoring='neg_mean_squared_error',
cv=nfolds, n_jobs=-1) * -1
# mse multiplied by -1 to make positive
mean_mse = np.sqrt(mse.mean())
name = estimator.__class__.__name__
print("{0:<25s} Train CV | RMSLE: {1:0.3f} ".format(name,
mean_mse))
return mean_mse
def stage_score_plot(estimator, X_train, y_train, X_test, y_test):
'''
Parameters: estimator: GradientBoostingRegressor or AdaBoostRegressor
X_train: 2d numpy array
y_train: 1d numpy array
X_test: 2d numpy array
y_test: 1d numpy array
Returns: A plot of the number of iterations vs the MSE for the model for
both the training set and test set.
'''
estimator.fit(X_train, y_train)
name = estimator.__class__.__name__.replace('Regressor', '')
learn_rate = estimator.learning_rate
# initialize
train_scores = np.zeros((estimator.n_estimators,), dtype=np.float64)
test_scores = np.zeros((estimator.n_estimators,), dtype=np.float64)
# Get train score from each boost
for i, y_train_pred in enumerate(estimator.staged_predict(X_train)):
train_scores[i] = mean_squared_error(y_train, y_train_pred)
# Get test score from each boost
for i, y_test_pred in enumerate(estimator.staged_predict(X_test)):
test_scores[i] = mean_squared_error(y_test, y_test_pred)
fig, ax = plt.subplots(figsize = (8,10))
plt.plot(np.sqrt(train_scores), alpha=.5, label="{0} Train - learning rate {1}".format(
name, learn_rate))
plt.plot(np.sqrt(test_scores), alpha=.5, label="{0} Test - learning rate {1}".format(
name, learn_rate), ls='--')
plt.title(name, fontsize=16, fontweight='bold')
plt.ylabel('RMSLE', fontsize=14)
plt.xlabel('Iterations', fontsize=14)
return
def rf_score_plot(randforest, X_train, y_train, X_test, y_test):
'''
Parameters: randforest: RandomForestRegressor
X_train: 2d numpy array
y_train: 1d numpy array
X_test: 2d numpy array
y_test: 1d numpy array
Returns: The prediction of a random forest regressor on the test set
'''
randforest.fit(X_train, y_train)
y_test_pred = randforest.predict(X_test)
test_score = np.sqrt(mean_squared_error(y_test, y_test_pred))
plt.axhline(test_score, alpha = 0.7, c = 'grey', lw=1, ls='-.', label =
'Random Forest Test')
def gridsearch_with_output(estimator, parameter_grid, X_train, y_train):
'''
Parameters: estimator: the type of model (e.g. RandomForestRegressor())
paramter_grid: dictionary defining the gridsearch parameters
X_train: 2d numpy array
y_train: 1d numpy array
Returns: best parameters and model fit with those parameters
'''
model_gridsearch = GridSearchCV(estimator,
parameter_grid,
verbose=True,
n_jobs=-1,
scoring='neg_mean_squared_error')
model_gridsearch.fit(X_train, y_train)
best_params = model_gridsearch.best_params_
model_best = model_gridsearch.best_estimator_
print("\nResult of gridsearch:")
print("{0:<20s} | {1:<8s} | {2}".format("Parameter", "Optimal", "Gridsearch values"))
print("-" * 55)
for param, vals in parameter_grid.items():
print("{0:<20s} | {1:<8s} | {2}".format(str(param),
str(best_params[param]),
str(vals)))
return best_params, model_best
def display_default_and_gsearch_model_results(model_default, model_gridsearch,
X_test, y_test):
'''
Parameters: model_default: fit model using initial parameters
model_gridsearch: fit model using parameters from gridsearch
X_test: 2d numpy array
y_test: 1d numpy array
Return: None, but prints out mse and r2 for the default and model with
gridsearched parameters
'''
name = model_default.__class__.__name__.replace('Regressor', '') # for printing
y_test_pred = model_gridsearch.predict(X_test)
mse = np.sqrt(mean_squared_error(y_test, y_test_pred))
print("Results for {0}".format(name))
print("Gridsearched model rmlse: {0:0.3f})".format(mse))
y_test_pred = model_default.predict(X_test)
mse = np.sqrt(mean_squared_error(y_test, y_test_pred))
print(" Default model rmsle: {0:0.3f}".format(mse))
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.axhline",
"sklearn.model_selection.GridSearchCV",
"sklearn.model_selection.cross_val_score",
"numpy.zeros",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.subplots",
"sklearn.metrics.mean_squared_error",
"numpy.sqrt"
] |
[((2361, 2414), 'numpy.zeros', 'np.zeros', (['(estimator.n_estimators,)'], {'dtype': 'np.float64'}), '((estimator.n_estimators,), dtype=np.float64)\n', (2369, 2414), True, 'import numpy as np\n'), ((2433, 2486), 'numpy.zeros', 'np.zeros', (['(estimator.n_estimators,)'], {'dtype': 'np.float64'}), '((estimator.n_estimators,), dtype=np.float64)\n', (2441, 2486), True, 'import numpy as np\n'), ((2858, 2887), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(8, 10)'}), '(figsize=(8, 10))\n', (2870, 2887), True, 'import matplotlib.pyplot as plt\n'), ((3241, 3288), 'matplotlib.pyplot.title', 'plt.title', (['name'], {'fontsize': '(16)', 'fontweight': '"""bold"""'}), "(name, fontsize=16, fontweight='bold')\n", (3250, 3288), True, 'import matplotlib.pyplot as plt\n'), ((3293, 3325), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""RMSLE"""'], {'fontsize': '(14)'}), "('RMSLE', fontsize=14)\n", (3303, 3325), True, 'import matplotlib.pyplot as plt\n'), ((3330, 3367), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Iterations"""'], {'fontsize': '(14)'}), "('Iterations', fontsize=14)\n", (3340, 3367), True, 'import matplotlib.pyplot as plt\n'), ((3924, 4016), 'matplotlib.pyplot.axhline', 'plt.axhline', (['test_score'], {'alpha': '(0.7)', 'c': '"""grey"""', 'lw': '(1)', 'ls': '"""-."""', 'label': '"""Random Forest Test"""'}), "(test_score, alpha=0.7, c='grey', lw=1, ls='-.', label=\n 'Random Forest Test')\n", (3935, 4016), True, 'import matplotlib.pyplot as plt\n'), ((4507, 4610), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', (['estimator', 'parameter_grid'], {'verbose': '(True)', 'n_jobs': '(-1)', 'scoring': '"""neg_mean_squared_error"""'}), "(estimator, parameter_grid, verbose=True, n_jobs=-1, scoring=\n 'neg_mean_squared_error')\n", (4519, 4610), False, 'from sklearn.model_selection import GridSearchCV\n'), ((1289, 1394), 'sklearn.model_selection.cross_val_score', 'cross_val_score', (['estimator', 'X_train', 'y_train'], {'scoring': '"""neg_mean_squared_error"""', 'cv': 'nfolds', 'n_jobs': '(-1)'}), "(estimator, X_train, y_train, scoring=\n 'neg_mean_squared_error', cv=nfolds, n_jobs=-1)\n", (1304, 1394), False, 'from sklearn.model_selection import train_test_split, cross_val_score\n'), ((2624, 2665), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['y_train', 'y_train_pred'], {}), '(y_train, y_train_pred)\n', (2642, 2665), False, 'from sklearn.metrics import mean_squared_error\n'), ((2799, 2838), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['y_test', 'y_test_pred'], {}), '(y_test, y_test_pred)\n', (2817, 2838), False, 'from sklearn.metrics import mean_squared_error\n'), ((2902, 2923), 'numpy.sqrt', 'np.sqrt', (['train_scores'], {}), '(train_scores)\n', (2909, 2923), True, 'import numpy as np\n'), ((3077, 3097), 'numpy.sqrt', 'np.sqrt', (['test_scores'], {}), '(test_scores)\n', (3084, 3097), True, 'import numpy as np\n'), ((3879, 3918), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['y_test', 'y_test_pred'], {}), '(y_test, y_test_pred)\n', (3897, 3918), False, 'from sklearn.metrics import mean_squared_error\n'), ((5986, 6025), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['y_test', 'y_test_pred'], {}), '(y_test, y_test_pred)\n', (6004, 6025), False, 'from sklearn.metrics import mean_squared_error\n'), ((6197, 6236), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['y_test', 'y_test_pred'], {}), '(y_test, y_test_pred)\n', (6215, 6236), False, 'from sklearn.metrics import mean_squared_error\n')]
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
from third_party.models.base_model import BaseModel
from . import networks
class Kp2uvModel(BaseModel):
"""This class implements the keypoint-to-UV model (inference only)."""
@staticmethod
def modify_commandline_options(parser, is_train=True):
parser.set_defaults(dataset_mode='kpuv')
return parser
def __init__(self, opt):
"""Initialize this model class.
Parameters:
opt -- test options
"""
BaseModel.__init__(self, opt)
self.visual_names = ['keypoints', 'output_uv']
self.model_names = ['Kp2uv']
self.netKp2uv = networks.define_kp2uv(gpu_ids=self.gpu_ids)
self.isTrain = False # only test mode supported
# Our program will automatically call <model.setup> to define schedulers, load networks, and print networks
def set_input(self, input):
"""Unpack input data from the dataloader.
Parameters:
input: a dictionary that contains the data itself and its metadata information.
"""
self.keypoints = input['keypoints'].to(self.device)
self.image_paths = input['path']
def forward(self):
"""Run forward pass. This will be called by <test>."""
output = self.netKp2uv.forward(self.keypoints)
self.output_uv = self.output2rgb(output)
def output2rgb(self, output):
"""Convert network outputs to RGB image."""
pred_id, pred_uv = output
_, pred_id_class = pred_id.max(1)
pred_id_class = pred_id_class.unsqueeze(1)
# extract UV from pred_uv (48 channels); select based on class ID
selected_uv = -1 * torch.ones(pred_uv.shape[0], 2, pred_uv.shape[2], pred_uv.shape[3], device=pred_uv.device)
for partid in range(1, 25):
mask = (pred_id_class == partid).float()
selected_uv *= (1. - mask)
selected_uv += mask * pred_uv[:, (partid - 1) * 2:(partid - 1) * 2 + 2]
pred_uv = selected_uv
rgb = torch.cat([pred_id_class.float() * 10 / 255. * 2 - 1, pred_uv], 1)
return rgb
def optimize_parameters(self):
pass
|
[
"torch.ones",
"third_party.models.base_model.BaseModel.__init__"
] |
[((1061, 1090), 'third_party.models.base_model.BaseModel.__init__', 'BaseModel.__init__', (['self', 'opt'], {}), '(self, opt)\n', (1079, 1090), False, 'from third_party.models.base_model import BaseModel\n'), ((2240, 2335), 'torch.ones', 'torch.ones', (['pred_uv.shape[0]', '(2)', 'pred_uv.shape[2]', 'pred_uv.shape[3]'], {'device': 'pred_uv.device'}), '(pred_uv.shape[0], 2, pred_uv.shape[2], pred_uv.shape[3], device=\n pred_uv.device)\n', (2250, 2335), False, 'import torch\n')]
|
from django.db import models
# Create your models here.
# class Essay(models.Model):
# score = models.IntegerField()
# essayA = models.TextField()
# essayQ = models.TextField()
# name = models.CharField(max_length=20)
#
# def __str__(self):
# return self.score
class choice(models.Model):
제목 = models.TextField()
점수 = models.IntegerField()
질문 = models.TextField()
답변 = models.TextField()
class lotto_data(models.Model):
a = models.DecimalField(max_digits=20,decimal_places=20)
b = models.DecimalField(max_digits=20,decimal_places=20)
c = models.DecimalField(max_digits=20,decimal_places=20)
d = models.DecimalField(max_digits=20,decimal_places=20)
e = models.DecimalField(max_digits=20,decimal_places=20)
f = models.DecimalField(max_digits=20,decimal_places=20)
g = models.DecimalField(max_digits=20,decimal_places=20)
h = models.DecimalField(max_digits=20,decimal_places=20)
i = models.DecimalField(max_digits=20,decimal_places=20)
j = models.DecimalField(max_digits=20,decimal_places=20)
k = models.DecimalField(max_digits=20,decimal_places=20)
l = models.DecimalField(max_digits=20,decimal_places=20)
m = models.DecimalField(max_digits=20,decimal_places=20)
|
[
"django.db.models.DecimalField",
"django.db.models.TextField",
"django.db.models.IntegerField"
] |
[((334, 352), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (350, 352), False, 'from django.db import models\n'), ((362, 383), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (381, 383), False, 'from django.db import models\n'), ((393, 411), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (409, 411), False, 'from django.db import models\n'), ((421, 439), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (437, 439), False, 'from django.db import models\n'), ((477, 530), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(20)', 'decimal_places': '(20)'}), '(max_digits=20, decimal_places=20)\n', (496, 530), False, 'from django.db import models\n'), ((538, 591), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(20)', 'decimal_places': '(20)'}), '(max_digits=20, decimal_places=20)\n', (557, 591), False, 'from django.db import models\n'), ((599, 652), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(20)', 'decimal_places': '(20)'}), '(max_digits=20, decimal_places=20)\n', (618, 652), False, 'from django.db import models\n'), ((660, 713), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(20)', 'decimal_places': '(20)'}), '(max_digits=20, decimal_places=20)\n', (679, 713), False, 'from django.db import models\n'), ((721, 774), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(20)', 'decimal_places': '(20)'}), '(max_digits=20, decimal_places=20)\n', (740, 774), False, 'from django.db import models\n'), ((782, 835), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(20)', 'decimal_places': '(20)'}), '(max_digits=20, decimal_places=20)\n', (801, 835), False, 'from django.db import models\n'), ((843, 896), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(20)', 'decimal_places': '(20)'}), '(max_digits=20, decimal_places=20)\n', (862, 896), False, 'from django.db import models\n'), ((904, 957), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(20)', 'decimal_places': '(20)'}), '(max_digits=20, decimal_places=20)\n', (923, 957), False, 'from django.db import models\n'), ((965, 1018), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(20)', 'decimal_places': '(20)'}), '(max_digits=20, decimal_places=20)\n', (984, 1018), False, 'from django.db import models\n'), ((1026, 1079), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(20)', 'decimal_places': '(20)'}), '(max_digits=20, decimal_places=20)\n', (1045, 1079), False, 'from django.db import models\n'), ((1087, 1140), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(20)', 'decimal_places': '(20)'}), '(max_digits=20, decimal_places=20)\n', (1106, 1140), False, 'from django.db import models\n'), ((1148, 1201), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(20)', 'decimal_places': '(20)'}), '(max_digits=20, decimal_places=20)\n', (1167, 1201), False, 'from django.db import models\n'), ((1209, 1262), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(20)', 'decimal_places': '(20)'}), '(max_digits=20, decimal_places=20)\n', (1228, 1262), False, 'from django.db import models\n')]
|
"""Unit tests for the OpenVAS source."""
from datetime import datetime, timezone
import unittest
from unittest.mock import Mock, patch
from src.collector import MetricCollector
class OpenVASTest(unittest.TestCase):
"""Unit tests for the OpenVAS metrics."""
def setUp(self):
self.mock_response = Mock()
self.sources = dict(source_id=dict(type="openvas", parameters=dict(url="http://openvas.xml")))
def test_warnings(self):
"""Test that the number of warnings is returned."""
self.mock_response.text = """<?xml version="1.0"?>
<report>
<results>
<result id="id">
<name>Name</name>
<description>Description</description>
<threat>Low</threat>
<host>1.2.3.4</host>
<port>80/tcp</port>
</result>
</results>
</report>"""
metric = dict(type="security_warnings", addition="sum", sources=self.sources)
with patch("requests.get", return_value=self.mock_response):
response = MetricCollector(metric).get()
self.assertEqual(
[dict(key="id", severity="Low", name="Name", description="Description", host="1.2.3.4", port="80/tcp")],
response["sources"][0]["entities"])
self.assertEqual("1", response["sources"][0]["value"])
def test_source_up_to_dateness(self):
"""Test that the report age in days is returned."""
self.mock_response.text = """
<report extension="xml" type="scan" content_type="text/xml">
<name>2019-04-09T17:56:14Z</name>
<creation_time>2019-04-09T17:56:14Z</creation_time>
<modification_time>2019-04-09T18:05:40Z</modification_time>
</report>"""
metric = dict(type="source_up_to_dateness", addition="max", sources=self.sources)
with patch("requests.get", return_value=self.mock_response):
response = MetricCollector(metric).get()
expected_age = (datetime.now(timezone.utc) - datetime(2019, 4, 9, 17, 56, 14, tzinfo=timezone.utc)).days
self.assertEqual(str(expected_age), response["sources"][0]["value"])
|
[
"unittest.mock.Mock",
"datetime.datetime",
"unittest.mock.patch",
"src.collector.MetricCollector",
"datetime.datetime.now"
] |
[((316, 322), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (320, 322), False, 'from unittest.mock import Mock, patch\n'), ((947, 1001), 'unittest.mock.patch', 'patch', (['"""requests.get"""'], {'return_value': 'self.mock_response'}), "('requests.get', return_value=self.mock_response)\n", (952, 1001), False, 'from unittest.mock import Mock, patch\n'), ((1786, 1840), 'unittest.mock.patch', 'patch', (['"""requests.get"""'], {'return_value': 'self.mock_response'}), "('requests.get', return_value=self.mock_response)\n", (1791, 1840), False, 'from unittest.mock import Mock, patch\n'), ((1919, 1945), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (1931, 1945), False, 'from datetime import datetime, timezone\n'), ((1948, 2001), 'datetime.datetime', 'datetime', (['(2019)', '(4)', '(9)', '(17)', '(56)', '(14)'], {'tzinfo': 'timezone.utc'}), '(2019, 4, 9, 17, 56, 14, tzinfo=timezone.utc)\n', (1956, 2001), False, 'from datetime import datetime, timezone\n'), ((1026, 1049), 'src.collector.MetricCollector', 'MetricCollector', (['metric'], {}), '(metric)\n', (1041, 1049), False, 'from src.collector import MetricCollector\n'), ((1865, 1888), 'src.collector.MetricCollector', 'MetricCollector', (['metric'], {}), '(metric)\n', (1880, 1888), False, 'from src.collector import MetricCollector\n')]
|
from django.urls import path
from blog_app01 import views
urlpatterns = [
path('index/', views.index),
path('login/', views.login),
path('regist/', views.regist),
path('valid_img/', views.valid_img),
]
|
[
"django.urls.path"
] |
[((79, 106), 'django.urls.path', 'path', (['"""index/"""', 'views.index'], {}), "('index/', views.index)\n", (83, 106), False, 'from django.urls import path\n'), ((112, 139), 'django.urls.path', 'path', (['"""login/"""', 'views.login'], {}), "('login/', views.login)\n", (116, 139), False, 'from django.urls import path\n'), ((145, 174), 'django.urls.path', 'path', (['"""regist/"""', 'views.regist'], {}), "('regist/', views.regist)\n", (149, 174), False, 'from django.urls import path\n'), ((180, 215), 'django.urls.path', 'path', (['"""valid_img/"""', 'views.valid_img'], {}), "('valid_img/', views.valid_img)\n", (184, 215), False, 'from django.urls import path\n')]
|
"""
Name: SampleEvent
breif: Samples events for particles provided in a phase space for MCDC-TNT
Author: <NAME> (OR State Univ - <EMAIL>) CEMeNT
Date: Dec 2nd 2021
"""
import numpy as np
import pykokkos as pk
@pk.workload
class SampleEvent:
def __init__(self, p_mesh_cell, p_alive, mesh_cap_xsec, mesh_scat_xsec, mesh_fis_xsec, scatter_event_index, capture_event_index, fission_event_index, num_part, nu_new_neutrons, rands, clever_out):
self.p_mesh_cell: pk.View1D[int] = p_mesh_cell
self.p_alive: pk.View1D[int] = p_alive
self.mesh_cap_xsec: pk.View1D[pk.double] = mesh_cap_xsec
self.mesh_scat_xsec: pk.View1D[pk.double] = mesh_scat_xsec
self.mesh_fis_xsec: pk.View1D[pk.double] = mesh_fis_xsec
self.scatter_event_index: pk.View1D[int] = scatter_event_index
self.capture_event_index: pk.View1D[int] = capture_event_index
self.fission_event_index: pk.View1D[int] = fission_event_index
self.num_part: int = num_part
self.nu_new_neutrons: int = num_part
self.rands: pk.View1D[pk.double] = rands
self.fissions_to_add: int = 0
self.scat_count: int = 0
self.cap_count: int = 0
self.fis_count: int = 0
self.killed: int = 0
self.clever_out: pk.View1D[int] = clever_out
#print('made it through init!')
@pk.main
def run(self):
for i in range(self.num_part):
#normalize cross sections in each mesh cell
total_scat_xsec: pk.double = self.mesh_scat_xsec[self.p_mesh_cell[i]] + self.mesh_cap_xsec[self.p_mesh_cell[i]] + self.mesh_fis_xsec[self.p_mesh_cell[i]]
mesh_scat_xsec_temp: pk.double = self.mesh_scat_xsec[self.p_mesh_cell[i]] / total_scat_xsec
mesh_cap_xsec_temp: pk.double = self.mesh_cap_xsec[self.p_mesh_cell[i]] / total_scat_xsec
mesh_fis_xsec_temp: pk.double = self.mesh_fis_xsec[self.p_mesh_cell[i]] / total_scat_xsec
#pk.printf('%d %d %d\n ',self.scat_count, self.cap_count, self.fis_count)
if self.p_alive[i] == 1:
event_rand:pk.double = self.rands[i]
#scatter?
if event_rand < mesh_scat_xsec_temp:
self.scatter_event_index[self.scat_count] = i
self.scat_count += 1
#pk.printf('had a scatter! %d\n', self.scat_count)
#capture?
elif mesh_scat_xsec_temp < event_rand and event_rand < mesh_scat_xsec_temp + mesh_cap_xsec_temp:
self.p_alive[i] = 0
self.killed += 1
self.capture_event_index[self.cap_count] = i
self.cap_count +=1
#pk.printf('had a capture! %d\n', self.cap_count)
#fission?
elif mesh_scat_xsec_temp + mesh_cap_xsec_temp < event_rand and event_rand < mesh_scat_xsec_temp + mesh_cap_xsec_temp + mesh_fis_xsec_temp:
self.p_alive[i] = 0
self.killed += 1
self.fissions_to_add += self.nu_new_neutrons
self.fission_event_index[self.fis_count] = i
self.fis_count += 1
#pk.printf('had a fission! %d\n', self.fis_count)
else:
pk.printf('Well shoot dang')
self.clever_out[0] = self.scat_count
self.clever_out[1] = self.cap_count
self.clever_out[2] = self.fis_count
def test_SampleEvent():
p_mesh_cell = np.array([0,1,0,5], dtype=np.int32)
p_alive = np.array([1,1,1,0], dtype=np.int32)
mesh_cap_xsec = 1/3*np.ones(2, dtype=float)
mesh_scat_xsec = 1/3*np.ones(2, dtype=float)
mesh_fis_xsec = 1/2*np.ones(2, dtype=float)
scatter_event_index = np.zeros(3, dtype=np.int32)
capture_event_index = np.zeros(3, dtype=np.int32)
fission_event_index = np.zeros(3, dtype=np.int32)
controled_rands = np.array([.2, .4, .8], dtype=float)
nu = 2
num_part = 3
p_mesh_cell = pk.from_numpy(p_mesh_cell)
p_alive = pk.from_numpy(p_alive)
mesh_cap_xsec = pk.from_numpy(mesh_cap_xsec)
mesh_scat_xsec = pk.from_numpy(mesh_scat_xsec)
mesh_fis_xsec = pk.from_numpy(mesh_fis_xsec)
scatter_event_index = pk.from_numpy(scatter_event_index)
capture_event_index = pk.from_numpy(capture_event_index)
fission_event_index = pk.from_numpy(fission_event_index)
controled_rands = pk.from_numpy(controled_rands)
clever_out = np.zeros(3, dtype=np.int32)
clever_out = pk.from_numpy(clever_out)
print("Running!")
pk.execute(pk.ExecutionSpace.OpenMP, SampleEvent(p_mesh_cell, p_alive, mesh_cap_xsec, mesh_scat_xsec, mesh_fis_xsec, scatter_event_index, capture_event_index, fission_event_index, num_part, nu, controled_rands, clever_out))
print('Made it through')
scat_count = clever_out[0]
cap_count = clever_out[1]
fis_count = clever_out[2]
print(scat_count)
assert (fis_count == 1)
assert (scat_count == 1)
assert (cap_count == 1)
assert (capture_event_index[0] == 1)
assert (fission_event_index[0] == 2)
assert (scatter_event_index[0] == 0)
if __name__ == '__main__':
test_SampleEvent()
|
[
"pykokkos.printf",
"numpy.zeros",
"numpy.ones",
"pykokkos.from_numpy",
"numpy.array"
] |
[((3753, 3791), 'numpy.array', 'np.array', (['[0, 1, 0, 5]'], {'dtype': 'np.int32'}), '([0, 1, 0, 5], dtype=np.int32)\n', (3761, 3791), True, 'import numpy as np\n'), ((3807, 3845), 'numpy.array', 'np.array', (['[1, 1, 1, 0]'], {'dtype': 'np.int32'}), '([1, 1, 1, 0], dtype=np.int32)\n', (3815, 3845), True, 'import numpy as np\n'), ((4048, 4075), 'numpy.zeros', 'np.zeros', (['(3)'], {'dtype': 'np.int32'}), '(3, dtype=np.int32)\n', (4056, 4075), True, 'import numpy as np\n'), ((4106, 4133), 'numpy.zeros', 'np.zeros', (['(3)'], {'dtype': 'np.int32'}), '(3, dtype=np.int32)\n', (4114, 4133), True, 'import numpy as np\n'), ((4164, 4191), 'numpy.zeros', 'np.zeros', (['(3)'], {'dtype': 'np.int32'}), '(3, dtype=np.int32)\n', (4172, 4191), True, 'import numpy as np\n'), ((4246, 4284), 'numpy.array', 'np.array', (['[0.2, 0.4, 0.8]'], {'dtype': 'float'}), '([0.2, 0.4, 0.8], dtype=float)\n', (4254, 4284), True, 'import numpy as np\n'), ((4358, 4384), 'pykokkos.from_numpy', 'pk.from_numpy', (['p_mesh_cell'], {}), '(p_mesh_cell)\n', (4371, 4384), True, 'import pykokkos as pk\n'), ((4403, 4425), 'pykokkos.from_numpy', 'pk.from_numpy', (['p_alive'], {}), '(p_alive)\n', (4416, 4425), True, 'import pykokkos as pk\n'), ((4459, 4487), 'pykokkos.from_numpy', 'pk.from_numpy', (['mesh_cap_xsec'], {}), '(mesh_cap_xsec)\n', (4472, 4487), True, 'import pykokkos as pk\n'), ((4513, 4542), 'pykokkos.from_numpy', 'pk.from_numpy', (['mesh_scat_xsec'], {}), '(mesh_scat_xsec)\n', (4526, 4542), True, 'import pykokkos as pk\n'), ((4567, 4595), 'pykokkos.from_numpy', 'pk.from_numpy', (['mesh_fis_xsec'], {}), '(mesh_fis_xsec)\n', (4580, 4595), True, 'import pykokkos as pk\n'), ((4635, 4669), 'pykokkos.from_numpy', 'pk.from_numpy', (['scatter_event_index'], {}), '(scatter_event_index)\n', (4648, 4669), True, 'import pykokkos as pk\n'), ((4700, 4734), 'pykokkos.from_numpy', 'pk.from_numpy', (['capture_event_index'], {}), '(capture_event_index)\n', (4713, 4734), True, 'import pykokkos as pk\n'), ((4765, 4799), 'pykokkos.from_numpy', 'pk.from_numpy', (['fission_event_index'], {}), '(fission_event_index)\n', (4778, 4799), True, 'import pykokkos as pk\n'), ((4835, 4865), 'pykokkos.from_numpy', 'pk.from_numpy', (['controled_rands'], {}), '(controled_rands)\n', (4848, 4865), True, 'import pykokkos as pk\n'), ((4896, 4923), 'numpy.zeros', 'np.zeros', (['(3)'], {'dtype': 'np.int32'}), '(3, dtype=np.int32)\n', (4904, 4923), True, 'import numpy as np\n'), ((4946, 4971), 'pykokkos.from_numpy', 'pk.from_numpy', (['clever_out'], {}), '(clever_out)\n', (4959, 4971), True, 'import pykokkos as pk\n'), ((3880, 3903), 'numpy.ones', 'np.ones', (['(2)'], {'dtype': 'float'}), '(2, dtype=float)\n', (3887, 3903), True, 'import numpy as np\n'), ((3933, 3956), 'numpy.ones', 'np.ones', (['(2)'], {'dtype': 'float'}), '(2, dtype=float)\n', (3940, 3956), True, 'import numpy as np\n'), ((3985, 4008), 'numpy.ones', 'np.ones', (['(2)'], {'dtype': 'float'}), '(2, dtype=float)\n', (3992, 4008), True, 'import numpy as np\n'), ((3509, 3537), 'pykokkos.printf', 'pk.printf', (['"""Well shoot dang"""'], {}), "('Well shoot dang')\n", (3518, 3537), True, 'import pykokkos as pk\n')]
|
import torch
from torch.nn import Parameter
import torch.nn.functional as F
from torch_geometric.nn.conv import MessagePassing
from torch_geometric.utils import remove_self_loops
from torch_geometric.nn.inits import glorot, zeros
class KGCNConv(MessagePassing):
def __init__(
self, in_channels, out_channels,
negative_slope=0.2, bias=True, **kwargs):
super(KGCNConv, self).__init__(aggr='add', **kwargs)
self.in_channels = in_channels
self.out_channels = out_channels
self.negative_slope = negative_slope
self.weight = Parameter(torch.Tensor(in_channels, out_channels))
if bias:
self.bias = Parameter(torch.Tensor(out_channels))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
glorot(self.weight)
zeros(self.bias)
def forward(self, x, edge_index, att_map, size=None):
""""""
if size is None and torch.is_tensor(x):
edge_index, _ = remove_self_loops(edge_index)
return self.propagate(edge_index, size=size, x=x, att_map=att_map)
def message(self, x_j, att_map):
return x_j * att_map.view(-1, 1)
def update(self, aggr_out, x):
aggr_out = F.relu(torch.mm(aggr_out + x, self.weight) + self.bias)
return aggr_out
def __repr__(self):
return '{}({}, {}, heads={})'.format(self.__class__.__name__,
self.in_channels,
self.out_channels, self.heads)
|
[
"torch_geometric.nn.inits.zeros",
"torch_geometric.nn.inits.glorot",
"torch.mm",
"torch.Tensor",
"torch_geometric.utils.remove_self_loops",
"torch.is_tensor"
] |
[((860, 879), 'torch_geometric.nn.inits.glorot', 'glorot', (['self.weight'], {}), '(self.weight)\n', (866, 879), False, 'from torch_geometric.nn.inits import glorot, zeros\n'), ((888, 904), 'torch_geometric.nn.inits.zeros', 'zeros', (['self.bias'], {}), '(self.bias)\n', (893, 904), False, 'from torch_geometric.nn.inits import glorot, zeros\n'), ((601, 640), 'torch.Tensor', 'torch.Tensor', (['in_channels', 'out_channels'], {}), '(in_channels, out_channels)\n', (613, 640), False, 'import torch\n'), ((1007, 1025), 'torch.is_tensor', 'torch.is_tensor', (['x'], {}), '(x)\n', (1022, 1025), False, 'import torch\n'), ((1055, 1084), 'torch_geometric.utils.remove_self_loops', 'remove_self_loops', (['edge_index'], {}), '(edge_index)\n', (1072, 1084), False, 'from torch_geometric.utils import remove_self_loops\n'), ((694, 720), 'torch.Tensor', 'torch.Tensor', (['out_channels'], {}), '(out_channels)\n', (706, 720), False, 'import torch\n'), ((1302, 1337), 'torch.mm', 'torch.mm', (['(aggr_out + x)', 'self.weight'], {}), '(aggr_out + x, self.weight)\n', (1310, 1337), False, 'import torch\n')]
|
import json
from django.apps import apps
from django.contrib.auth import get_user_model
from django.core.exceptions import ObjectDoesNotExist
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import TextField
from django.db.models.functions import Cast
from django.template.defaultfilters import truncatechars
from django.utils.html import format_html, format_html_join, mark_safe, format_html_join
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.models import ContentType
from pyston.filters.default_filters import SimpleMethodEqualFilter
from pyston.utils.decorators import filter_by, order_by, filter_class
from is_core.generic_views.inlines.inline_table_views import InlineTableView
from is_core.generic_views.mixins import TabItem, TabsViewMixin
from is_core.generic_views.table_views import TableView
from is_core.main import UIRESTModelISCore
from is_core.utils import render_model_objects_with_link, render_model_object_with_link
from is_core.utils.decorators import short_description
from security.config import settings
from security.models import CommandLog, InputLoggedRequest, OutputLoggedRequest, CeleryTaskLog, CeleryTaskRunLog
from ansi2html import Ansi2HTMLConverter
def display_json(value):
return json.dumps(value, indent=4, ensure_ascii=False, cls=DjangoJSONEncoder)
def display_as_code(value):
return format_html('<code style="white-space:pre-wrap;">{}</code>', value) if value else value
def display_related_objects(request, related_objects):
related_object_instances = []
for related_object in related_objects:
try:
related_object_instances.append(related_object.object)
except (ObjectDoesNotExist, AttributeError):
pass
return render_model_objects_with_link(request, related_object_instances)
def get_content_type_pks_of_parent_related_classes():
return {
ContentType.objects.get_for_model(model_class).pk
for model_class in (CommandLog, InputLoggedRequest, OutputLoggedRequest, CeleryTaskLog, CeleryTaskRunLog)
}
class UsernameUserFilter(SimpleMethodEqualFilter):
def get_filter_term(self, value, operator_slug, request):
user_model = get_user_model()
return {
'user_id__in': list(
user_model.objects.filter(
**{'{}__contains'.format(user_model.USERNAME_FIELD): value}
).annotate(
str_id=Cast('id', output_field=TextField())
).values_list('str_id', flat=True)
)
}
class SecurityISCoreMixin:
@short_description(_('related objects'))
def display_related_objects(self, obj, request):
return display_related_objects(
request, obj.related_objects.exclude(object_ct_id__in=get_content_type_pks_of_parent_related_classes())
)
@short_description(_('source'))
def display_source(self, obj, request):
return display_related_objects(
request, obj.related_objects.filter(object_ct_id__in=get_content_type_pks_of_parent_related_classes())
)
@short_description(_('raised output logged requests'))
def display_output_logged_requests(self, obj, request):
return render_model_objects_with_link(
request,
OutputLoggedRequest.objects.filter(
related_objects__object_id=obj.pk,
related_objects__object_ct_id=ContentType.objects.get_for_model(obj).pk
)
)
@short_description(_('raised command logs'))
def display_command_logs(self, obj, request):
return render_model_objects_with_link(
request,
CommandLog.objects.filter(
related_objects__object_id=obj.pk,
related_objects__object_ct_id=ContentType.objects.get_for_model(obj).pk
)
)
@short_description(_('raised celery task logs'))
def display_celery_task_logs(self, obj, request):
return render_model_objects_with_link(
request,
CeleryTaskLog.objects.filter(
related_objects__object_id=obj.pk,
related_objects__object_ct_id=ContentType.objects.get_for_model(obj).pk
)
)
class RequestsLogISCore(SecurityISCoreMixin, UIRESTModelISCore):
abstract = True
can_create = can_update = can_delete = False
@short_description(_('queries'))
def queries_code(self, obj):
return display_as_code(display_json(obj.queries)) if obj else None
@short_description(_('request body'))
def request_body_code(self, obj):
return display_as_code(obj.request_body) if obj else None
@short_description(_('request headers'))
def request_headers_code(self, obj):
return display_as_code(display_json(obj.request_headers)) if obj else None
@short_description(_('response body'))
def response_body_code(self, obj):
return display_as_code(obj.response_body) if obj else None
@short_description(_('response headers'))
def response_headers_code(self, obj):
return display_as_code(display_json(obj.response_headers)) if obj else None
@short_description(_('error description'))
def error_description_code(self, obj):
return display_as_code(obj.error_description) if obj else None
class InputRequestsLogISCore(RequestsLogISCore):
model = InputLoggedRequest
abstract = True
ui_list_fields = (
'id', 'created_at', 'changed_at', 'request_timestamp', 'response_timestamp', 'response_time', 'status',
'response_code', 'host', 'short_path', 'slug', 'ip', 'user', 'method', 'type', 'short_response_body',
'short_request_body', 'short_queries', 'short_request_headers'
)
form_fieldsets = (
(_('Request'), {'fields': ('created_at', 'changed_at', 'request_timestamp', 'host', 'method', 'path',
'queries_code', 'request_headers_code', 'request_body_code', 'is_secure')}),
(_('Response'), {'fields': ('response_timestamp', 'response_code', 'status', 'response_headers_code',
'response_body_code', 'type', 'error_description_code')}),
(_('User information'), {'fields': ('user', 'ip')}),
(_('Extra information'), {'fields': ('slug', 'response_time', 'display_related_objects',
'display_output_logged_requests', 'display_command_logs',
'display_celery_task_logs')}),
)
def get_form_fieldsets(self, request, obj=None):
form_fieldsets = list(super().get_form_fieldsets(request, obj))
app_names = {app.name for app in apps.get_app_configs()}
if (settings.SHOW_DEBUG_TOOLBAR and 'security.contrib.debug_toolbar_log' in app_names
and obj and hasattr(obj, 'input_logged_request_toolbar')):
form_fieldsets.append((None, {'fields': ('debug_toolbar',)}))
return form_fieldsets
@short_description(_('user'))
@filter_class(UsernameUserFilter)
def user(self, obj):
return obj.user
@short_description('')
def debug_toolbar(self, obj):
return mark_safe(obj.input_logged_request_toolbar.toolbar)
class OutputRequestsLogISCore(RequestsLogISCore):
model = OutputLoggedRequest
abstract = True
ui_list_fields = (
'id', 'created_at', 'changed_at', 'request_timestamp', 'response_timestamp', 'response_time', 'status',
'response_code', 'host', 'short_path', 'method', 'slug', 'short_response_body', 'short_request_body',
'short_queries', 'short_request_headers'
)
form_fieldsets = (
(_('Request'), {'fields': ('created_at', 'changed_at', 'request_timestamp', 'host', 'method', 'path',
'queries_code', 'request_headers_code', 'request_body_code', 'is_secure')}),
(_('Response'), {'fields': ('response_timestamp', 'response_code', 'status', 'response_headers_code',
'response_body_code', 'error_description_code')}),
(_('Extra information'), {'fields': ('slug', 'response_time', 'display_related_objects', 'display_source')}),
)
class CommandLogISCore(SecurityISCoreMixin, UIRESTModelISCore):
model = CommandLog
can_create = can_update = can_delete = False
ui_list_fields = (
'id', 'created_at', 'changed_at', 'name', 'start', 'stop', 'time', 'executed_from_command_line', 'is_successful'
)
form_fieldsets = (
(None, {
'fields': ('created_at', 'changed_at', 'name', 'input', 'output_html', 'error_message',
'display_related_objects', 'display_source', 'display_output_logged_requests',
'display_command_logs', 'display_celery_task_logs'),
'class': 'col-sm-6'
}),
(None, {
'fields': ('start', 'stop', 'time', 'executed_from_command_line', 'is_successful'),
'class': 'col-sm-6'
}),
)
abstract = True
@short_description(_('output'))
def output_html(self, obj=None):
if obj and obj.output is not None:
conv = Ansi2HTMLConverter()
output = mark_safe(conv.convert(obj.output, full=False))
return display_as_code(output)
return None
class CeleryTaskLogTabs(TabsViewMixin):
tabs = (
TabItem('list-celerytasklog', _('celery task')),
TabItem('list-celerytaskrunlog', _('celery task run')),
)
class CeleryTaskLogTableView(CeleryTaskLogTabs, TableView):
pass
class CeleryTaskRunLogISCore(SecurityISCoreMixin, UIRESTModelISCore):
model = CeleryTaskRunLog
abstract = True
can_create = can_update = can_delete = False
rest_extra_filter_fields = (
'celery_task_id',
)
ui_list_fields = (
'celery_task_id', 'created_at', 'changed_at', 'name', 'state', 'start', 'stop', 'time', 'result', 'retries',
'get_task_log'
)
form_fields = (
'celery_task_id', 'task_log', 'start', 'stop', 'time', 'state', 'result', 'error_message', 'output_html',
'retries', 'estimated_time_of_next_retry', 'display_related_objects', 'display_output_logged_requests',
'display_command_logs', 'display_celery_task_logs'
)
ui_list_view = CeleryTaskLogTableView
default_ordering = ('-created_at',)
@short_description(_('celery task log'))
def task_log(self, obj):
return obj.get_task_log()
@short_description(_('output'))
def output_html(self, obj):
if obj and obj.output is not None:
conv = Ansi2HTMLConverter()
output = mark_safe(conv.convert(obj.output, full=False))
return display_as_code(output)
return None
class CeleryTaskRunLogInlineTableView(InlineTableView):
model = CeleryTaskRunLog
fields = (
'created_at', 'changed_at', 'start', 'stop', 'time', 'state', 'result', 'retries'
)
def _get_list_filter(self):
return {
'filter': {
'celery_task_id': self.parent_instance.celery_task_id
}
}
class CeleryTaskLogISCore(SecurityISCoreMixin, UIRESTModelISCore):
model = CeleryTaskLog
abstract = True
can_create = can_update = can_delete = False
ui_list_fields = (
'celery_task_id', 'created_at', 'changed_at', 'name', 'short_input', 'state', 'get_start', 'get_stop',
'queue_name'
)
form_fieldsets = (
(None, {
'fields': (
'celery_task_id', 'created_at', 'changed_at', 'name', 'state', 'get_start', 'get_stop',
'estimated_time_of_first_arrival', 'expires', 'stale', 'queue_name', 'input', 'display_related_objects',
'display_source'
)
}),
(_('celery task runs'), {'inline_view': CeleryTaskRunLogInlineTableView}),
)
ui_list_view = CeleryTaskLogTableView
@filter_by('input')
@order_by('input')
@short_description(_('input'))
def short_input(self, obj):
return truncatechars(obj.input, 50)
def is_active_menu_item(self, request, active_group):
return active_group in {
self.menu_group,
'celerytaskrunlog',
}
|
[
"django.db.models.TextField",
"pyston.utils.decorators.filter_by",
"django.apps.apps.get_app_configs",
"django.contrib.auth.get_user_model",
"json.dumps",
"django.contrib.contenttypes.models.ContentType.objects.get_for_model",
"django.template.defaultfilters.truncatechars",
"pyston.utils.decorators.order_by",
"django.utils.html.mark_safe",
"ansi2html.Ansi2HTMLConverter",
"is_core.utils.render_model_objects_with_link",
"django.utils.translation.ugettext_lazy",
"pyston.utils.decorators.filter_class",
"django.utils.html.format_html",
"is_core.utils.decorators.short_description"
] |
[((1299, 1369), 'json.dumps', 'json.dumps', (['value'], {'indent': '(4)', 'ensure_ascii': '(False)', 'cls': 'DjangoJSONEncoder'}), '(value, indent=4, ensure_ascii=False, cls=DjangoJSONEncoder)\n', (1309, 1369), False, 'import json\n'), ((1795, 1860), 'is_core.utils.render_model_objects_with_link', 'render_model_objects_with_link', (['request', 'related_object_instances'], {}), '(request, related_object_instances)\n', (1825, 1860), False, 'from is_core.utils import render_model_objects_with_link, render_model_object_with_link\n'), ((7103, 7135), 'pyston.utils.decorators.filter_class', 'filter_class', (['UsernameUserFilter'], {}), '(UsernameUserFilter)\n', (7115, 7135), False, 'from pyston.utils.decorators import filter_by, order_by, filter_class\n'), ((7191, 7212), 'is_core.utils.decorators.short_description', 'short_description', (['""""""'], {}), "('')\n", (7208, 7212), False, 'from is_core.utils.decorators import short_description\n'), ((12049, 12067), 'pyston.utils.decorators.filter_by', 'filter_by', (['"""input"""'], {}), "('input')\n", (12058, 12067), False, 'from pyston.utils.decorators import filter_by, order_by, filter_class\n'), ((12073, 12090), 'pyston.utils.decorators.order_by', 'order_by', (['"""input"""'], {}), "('input')\n", (12081, 12090), False, 'from pyston.utils.decorators import filter_by, order_by, filter_class\n'), ((1411, 1478), 'django.utils.html.format_html', 'format_html', (['"""<code style="white-space:pre-wrap;">{}</code>"""', 'value'], {}), '(\'<code style="white-space:pre-wrap;">{}</code>\', value)\n', (1422, 1478), False, 'from django.utils.html import format_html, format_html_join, mark_safe, format_html_join\n'), ((2245, 2261), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (2259, 2261), False, 'from django.contrib.auth import get_user_model\n'), ((2654, 2674), 'django.utils.translation.ugettext_lazy', '_', (['"""related objects"""'], {}), "('related objects')\n", (2655, 2674), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2919, 2930), 'django.utils.translation.ugettext_lazy', '_', (['"""source"""'], {}), "('source')\n", (2920, 2930), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3165, 3199), 'django.utils.translation.ugettext_lazy', '_', (['"""raised output logged requests"""'], {}), "('raised output logged requests')\n", (3166, 3199), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3564, 3588), 'django.utils.translation.ugettext_lazy', '_', (['"""raised command logs"""'], {}), "('raised command logs')\n", (3565, 3588), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3934, 3962), 'django.utils.translation.ugettext_lazy', '_', (['"""raised celery task logs"""'], {}), "('raised celery task logs')\n", (3935, 3962), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4453, 4465), 'django.utils.translation.ugettext_lazy', '_', (['"""queries"""'], {}), "('queries')\n", (4454, 4465), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4599, 4616), 'django.utils.translation.ugettext_lazy', '_', (['"""request body"""'], {}), "('request body')\n", (4600, 4616), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4746, 4766), 'django.utils.translation.ugettext_lazy', '_', (['"""request headers"""'], {}), "('request headers')\n", (4747, 4766), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4916, 4934), 'django.utils.translation.ugettext_lazy', '_', (['"""response body"""'], {}), "('response body')\n", (4917, 4934), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5066, 5087), 'django.utils.translation.ugettext_lazy', '_', (['"""response headers"""'], {}), "('response headers')\n", (5067, 5087), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5239, 5261), 'django.utils.translation.ugettext_lazy', '_', (['"""error description"""'], {}), "('error description')\n", (5240, 5261), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7087, 7096), 'django.utils.translation.ugettext_lazy', '_', (['"""user"""'], {}), "('user')\n", (7088, 7096), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7262, 7313), 'django.utils.html.mark_safe', 'mark_safe', (['obj.input_logged_request_toolbar.toolbar'], {}), '(obj.input_logged_request_toolbar.toolbar)\n', (7271, 7313), False, 'from django.utils.html import format_html, format_html_join, mark_safe, format_html_join\n'), ((9149, 9160), 'django.utils.translation.ugettext_lazy', '_', (['"""output"""'], {}), "('output')\n", (9150, 9160), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((10497, 10517), 'django.utils.translation.ugettext_lazy', '_', (['"""celery task log"""'], {}), "('celery task log')\n", (10498, 10517), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((10606, 10617), 'django.utils.translation.ugettext_lazy', '_', (['"""output"""'], {}), "('output')\n", (10607, 10617), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((12173, 12201), 'django.template.defaultfilters.truncatechars', 'truncatechars', (['obj.input', '(50)'], {}), '(obj.input, 50)\n', (12186, 12201), False, 'from django.template.defaultfilters import truncatechars\n'), ((12114, 12124), 'django.utils.translation.ugettext_lazy', '_', (['"""input"""'], {}), "('input')\n", (12115, 12124), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1938, 1984), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['model_class'], {}), '(model_class)\n', (1971, 1984), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((5836, 5848), 'django.utils.translation.ugettext_lazy', '_', (['"""Request"""'], {}), "('Request')\n", (5837, 5848), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6058, 6071), 'django.utils.translation.ugettext_lazy', '_', (['"""Response"""'], {}), "('Response')\n", (6059, 6071), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6263, 6284), 'django.utils.translation.ugettext_lazy', '_', (['"""User information"""'], {}), "('User information')\n", (6264, 6284), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6324, 6346), 'django.utils.translation.ugettext_lazy', '_', (['"""Extra information"""'], {}), "('Extra information')\n", (6325, 6346), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7753, 7765), 'django.utils.translation.ugettext_lazy', '_', (['"""Request"""'], {}), "('Request')\n", (7754, 7765), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7975, 7988), 'django.utils.translation.ugettext_lazy', '_', (['"""Response"""'], {}), "('Response')\n", (7976, 7988), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((8172, 8194), 'django.utils.translation.ugettext_lazy', '_', (['"""Extra information"""'], {}), "('Extra information')\n", (8173, 8194), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((9261, 9281), 'ansi2html.Ansi2HTMLConverter', 'Ansi2HTMLConverter', ([], {}), '()\n', (9279, 9281), False, 'from ansi2html import Ansi2HTMLConverter\n'), ((9508, 9524), 'django.utils.translation.ugettext_lazy', '_', (['"""celery task"""'], {}), "('celery task')\n", (9509, 9524), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((9568, 9588), 'django.utils.translation.ugettext_lazy', '_', (['"""celery task run"""'], {}), "('celery task run')\n", (9569, 9588), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((10713, 10733), 'ansi2html.Ansi2HTMLConverter', 'Ansi2HTMLConverter', ([], {}), '()\n', (10731, 10733), False, 'from ansi2html import Ansi2HTMLConverter\n'), ((11920, 11941), 'django.utils.translation.ugettext_lazy', '_', (['"""celery task runs"""'], {}), "('celery task runs')\n", (11921, 11941), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6765, 6787), 'django.apps.apps.get_app_configs', 'apps.get_app_configs', ([], {}), '()\n', (6785, 6787), False, 'from django.apps import apps\n'), ((3474, 3512), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['obj'], {}), '(obj)\n', (3507, 3512), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((3844, 3882), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['obj'], {}), '(obj)\n', (3877, 3882), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((4225, 4263), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['obj'], {}), '(obj)\n', (4258, 4263), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((2513, 2524), 'django.db.models.TextField', 'TextField', ([], {}), '()\n', (2522, 2524), False, 'from django.db.models import TextField\n')]
|
import os
from argparse import ArgumentParser
from imgtools.io import (ImageFileLoader, ImageFileWriter,
read_dicom_rtstruct, read_dicom_series, read_dicom_rtdose, read_dicom_pet)
from imgtools.ops import StructureSetToSegmentation, ImageFileInput, ImageFileOutput, Resample
from imgtools.pipeline import Pipeline
###############################################################
# Example usage:
# python radcure_simple.py ./data/RADCURE/data ./RADCURE_output
###############################################################
class RADCUREPipeline(Pipeline):
"""Example processing pipeline for the RADCURE dataset.
This pipeline loads the CT images and structure sets, re-samples the images,
and draws the GTV contour using the resampled image.
"""
def __init__(self,
input_directory,
output_directory,
spacing=(1., 1., 0.),
n_jobs=-1,
missing_strategy="drop",
show_progress=False,
warn_on_error=False):
super().__init__(
n_jobs=n_jobs,
missing_strategy=missing_strategy,
show_progress=show_progress,
warn_on_error=warn_on_error)
# pipeline configuration
self.input_directory = input_directory
self.output_directory = output_directory
self.spacing = spacing
# pipeline ops
# input ops
self.image_input = ImageFileInput(
self.input_directory, # where to look for the images
get_subject_id_from="subject_directory", # how to extract the subject ID, 'subject_directory' means use the name of the subject directory
subdir_path="*/ImageSet_*", # whether the images are stored in a subdirectory of the subject directory (also accepts glob patterns)
reader=read_dicom_series # the function used to read individual images
)
self.structure_set_input = ImageFileInput(
self.input_directory,
get_subject_id_from="subject_directory",
subdir_path="*/structures/RTSTRUCT.dcm",
reader=read_dicom_rtstruct
)
self.rtdose_input = ImageFileInput(
self.input_directory,
get_subject_id_from="subject_directory",
subdir_path="*/dose/DOSE.dcm",
reader=read_dicom_rtdose
)
self.petscan_input = ImageFileInput(
self.input_directory,
get_subject_id_from="subject_directory",
subdir_path="*/pet_*",
reader=read_dicom_pet
)
# image processing ops
self.resample = Resample(spacing=self.spacing)
# Note: the ROI name is temporarily changed to match the example data
# since RADCURE is still not public. The correct ROI name for RADCURE is 'GTV'.
self.make_binary_mask = StructureSetToSegmentation(roi_names="GTV-1")#"GTV")
# output ops
self.image_output = ImageFileOutput(
os.path.join(self.output_directory, "images"), # where to save the processed images
filename_format="{subject_id}_image.nrrd", # the filename template, {subject_id} will be replaced by each subject's ID at runtime
create_dirs=True, # whether to create directories that don't exists already
compress=True # enable compression for NRRD format
)
self.mask_output = ImageFileOutput(
os.path.join(self.output_directory, "masks"),
filename_format="{subject_id}_mask.nrrd",
create_dirs=True,
compress=True
)
self.dose_output = ImageFileOutput(
os.path.join(self.output_directory, "doses"),
filename_format="{subject_id}_dose.nrrd",
create_dirs=True,
compress=True
)
self.petscan_output = ImageFileOutput(
os.path.join(self.output_directory, "petscan"),
filename_format="{subject_id}_petscan.nrrd",
create_dirs=True,
compress=True
)
def process_one_subject(self, subject_id):
"""Define the processing operations for one subject.
This method must be defined for all pipelines. It is used to define
the preprocessing steps for a single subject (note: that might mean
multiple images, structures, etc.). During pipeline execution, this
method will receive one argument, subject_id, which can be used to
retrieve inputs and save outputs.
Parameters
----------
subject_id : str
The ID of currently processed subject
"""
image = self.image_input(subject_id)
structure_set = self.structure_set_input(subject_id)
dose_set = self.rtdose_input(subject_id)
pet_set = self.petscan_input(subject_id)
image = self.resample(image)
# note that the binary mask can be generated with correct spacing using
# the resampled image, eliminating the need to resample it separately
# mask = self.make_binary_mask(structure_set, image)
self.image_output(subject_id, image)
# self.mask_output(subject_id, mask)
self.dose_output(subject_id, dose_set)
self.petscan_output(subject_id, pet_set)
if __name__ == "__main__":
parser = ArgumentParser("Example RADCURE processing pipeline.")
parser.add_argument(
"input_directory",
type=str,
help="Path to the input directory of RADCURE dataset.")
parser.add_argument(
"output_directory",
type=str,
help="Path to the directory where the processed images will be saved.")
parser.add_argument(
"--spacing",
nargs=3,
type=float,
default=(1., 1., 0.),
help="The resampled voxel spacing in (x, y, z) directions.")
parser.add_argument(
"--n_jobs",
type=int,
default=1,
help="The number of parallel processes to use.")
parser.add_argument(
"--show_progress",
action="store_true",
help="Whether to print progress to standard output.")
args = parser.parse_args()
pipeline = RADCUREPipeline(
input_directory=args.input_directory,
output_directory=args.output_directory,
spacing=args.spacing,
n_jobs=args.n_jobs,
show_progress=args.show_progress)
pipeline.run()
|
[
"imgtools.ops.ImageFileInput",
"argparse.ArgumentParser",
"imgtools.ops.Resample",
"os.path.join",
"imgtools.ops.StructureSetToSegmentation"
] |
[((5497, 5551), 'argparse.ArgumentParser', 'ArgumentParser', (['"""Example RADCURE processing pipeline."""'], {}), "('Example RADCURE processing pipeline.')\n", (5511, 5551), False, 'from argparse import ArgumentParser\n'), ((1486, 1622), 'imgtools.ops.ImageFileInput', 'ImageFileInput', (['self.input_directory'], {'get_subject_id_from': '"""subject_directory"""', 'subdir_path': '"""*/ImageSet_*"""', 'reader': 'read_dicom_series'}), "(self.input_directory, get_subject_id_from=\n 'subject_directory', subdir_path='*/ImageSet_*', reader=read_dicom_series)\n", (1500, 1622), False, 'from imgtools.ops import StructureSetToSegmentation, ImageFileInput, ImageFileOutput, Resample\n'), ((2037, 2193), 'imgtools.ops.ImageFileInput', 'ImageFileInput', (['self.input_directory'], {'get_subject_id_from': '"""subject_directory"""', 'subdir_path': '"""*/structures/RTSTRUCT.dcm"""', 'reader': 'read_dicom_rtstruct'}), "(self.input_directory, get_subject_id_from=\n 'subject_directory', subdir_path='*/structures/RTSTRUCT.dcm', reader=\n read_dicom_rtstruct)\n", (2051, 2193), False, 'from imgtools.ops import StructureSetToSegmentation, ImageFileInput, ImageFileOutput, Resample\n'), ((2270, 2414), 'imgtools.ops.ImageFileInput', 'ImageFileInput', (['self.input_directory'], {'get_subject_id_from': '"""subject_directory"""', 'subdir_path': '"""*/dose/DOSE.dcm"""', 'reader': 'read_dicom_rtdose'}), "(self.input_directory, get_subject_id_from=\n 'subject_directory', subdir_path='*/dose/DOSE.dcm', reader=\n read_dicom_rtdose)\n", (2284, 2414), False, 'from imgtools.ops import StructureSetToSegmentation, ImageFileInput, ImageFileOutput, Resample\n'), ((2493, 2621), 'imgtools.ops.ImageFileInput', 'ImageFileInput', (['self.input_directory'], {'get_subject_id_from': '"""subject_directory"""', 'subdir_path': '"""*/pet_*"""', 'reader': 'read_dicom_pet'}), "(self.input_directory, get_subject_id_from=\n 'subject_directory', subdir_path='*/pet_*', reader=read_dicom_pet)\n", (2507, 2621), False, 'from imgtools.ops import StructureSetToSegmentation, ImageFileInput, ImageFileOutput, Resample\n'), ((2730, 2760), 'imgtools.ops.Resample', 'Resample', ([], {'spacing': 'self.spacing'}), '(spacing=self.spacing)\n', (2738, 2760), False, 'from imgtools.ops import StructureSetToSegmentation, ImageFileInput, ImageFileOutput, Resample\n'), ((2959, 3004), 'imgtools.ops.StructureSetToSegmentation', 'StructureSetToSegmentation', ([], {'roi_names': '"""GTV-1"""'}), "(roi_names='GTV-1')\n", (2985, 3004), False, 'from imgtools.ops import StructureSetToSegmentation, ImageFileInput, ImageFileOutput, Resample\n'), ((3091, 3136), 'os.path.join', 'os.path.join', (['self.output_directory', '"""images"""'], {}), "(self.output_directory, 'images')\n", (3103, 3136), False, 'import os\n'), ((3600, 3644), 'os.path.join', 'os.path.join', (['self.output_directory', '"""masks"""'], {}), "(self.output_directory, 'masks')\n", (3612, 3644), False, 'import os\n'), ((3822, 3866), 'os.path.join', 'os.path.join', (['self.output_directory', '"""doses"""'], {}), "(self.output_directory, 'doses')\n", (3834, 3866), False, 'import os\n'), ((4047, 4093), 'os.path.join', 'os.path.join', (['self.output_directory', '"""petscan"""'], {}), "(self.output_directory, 'petscan')\n", (4059, 4093), False, 'import os\n')]
|
import logging
logging.basicConfig(format='%(levelname)s::%(module)s(l%(lineno)s)::%(funcName)s::%(message)s')
|
[
"logging.basicConfig"
] |
[((16, 116), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(levelname)s::%(module)s(l%(lineno)s)::%(funcName)s::%(message)s"""'}), "(format=\n '%(levelname)s::%(module)s(l%(lineno)s)::%(funcName)s::%(message)s')\n", (35, 116), False, 'import logging\n')]
|