code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import yfinance as yf
from datetime import datetime
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
from arch import arch_model
from volatility.utils import get_percent_chg
start = datetime(2000, 1, 1)
end = datetime(2020, 9, 11)
symbol = 'SPY'
tickerData = yf.Ticker(symbol)
df = tickerData.history(period='1d', start=start, end=end)
get_percent_chg(df, 1)
get_percent_chg(df, 5)
get_percent_chg(df, 10)
get_percent_chg(df, 15)
get_percent_chg(df, 21)
returns = df.Close.pct_change().dropna()
df['ret_1a'] = returns
test_size = 365*5
test_size = 365
keyList = ['ret_1', 'ret_5', 'ret_10', 'ret_15', 'ret_21']
fig, ax = plt.subplots(figsize=(10, 5), nrows=5, ncols=1)
k = 0
for key in keyList:
returns = 100 * df[key].dropna()
predictions = []
print('key', key)
for i in range(test_size):
train = returns[:-(test_size-i)]
model = arch_model(train, p=2, q=2)
model_fit = model.fit(disp='off')
pred_val = model_fit.forecast(horizon=1)
predictions.append(np.sqrt(pred_val.variance.values[-1,:][0]))
predictions = pd.Series(predictions, index=returns.index[-test_size:])
ax[k].plot(returns[-test_size:], label=key, color='r')
ax[k].plot(predictions, label=key+' volpred', color='b')
ax[k].set_ylabel(key)
k += 1
ax[k-1].set_xlabel('Date')
plt.legend(['True Returns', 'Predicted Volatility'], loc=2, fontsize=8)
plt.show() | [
"datetime.datetime",
"pandas.Series",
"numpy.sqrt",
"arch.arch_model",
"volatility.utils.get_percent_chg",
"yfinance.Ticker",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((205, 225), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)'], {}), '(2000, 1, 1)\n', (213, 225), False, 'from datetime import datetime\n'), ((232, 253), 'datetime.datetime', 'datetime', (['(2020)', '(9)', '(11)'], {}), '(2020, 9, 11)\n', (240, 253), False, 'from datetime import datetime\n'), ((282, 299), 'yfinance.Ticker', 'yf.Ticker', (['symbol'], {}), '(symbol)\n', (291, 299), True, 'import yfinance as yf\n'), ((359, 381), 'volatility.utils.get_percent_chg', 'get_percent_chg', (['df', '(1)'], {}), '(df, 1)\n', (374, 381), False, 'from volatility.utils import get_percent_chg\n'), ((382, 404), 'volatility.utils.get_percent_chg', 'get_percent_chg', (['df', '(5)'], {}), '(df, 5)\n', (397, 404), False, 'from volatility.utils import get_percent_chg\n'), ((405, 428), 'volatility.utils.get_percent_chg', 'get_percent_chg', (['df', '(10)'], {}), '(df, 10)\n', (420, 428), False, 'from volatility.utils import get_percent_chg\n'), ((429, 452), 'volatility.utils.get_percent_chg', 'get_percent_chg', (['df', '(15)'], {}), '(df, 15)\n', (444, 452), False, 'from volatility.utils import get_percent_chg\n'), ((453, 476), 'volatility.utils.get_percent_chg', 'get_percent_chg', (['df', '(21)'], {}), '(df, 21)\n', (468, 476), False, 'from volatility.utils import get_percent_chg\n'), ((645, 692), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(10, 5)', 'nrows': '(5)', 'ncols': '(1)'}), '(figsize=(10, 5), nrows=5, ncols=1)\n', (657, 692), True, 'import matplotlib.pyplot as plt\n'), ((1336, 1407), 'matplotlib.pyplot.legend', 'plt.legend', (["['True Returns', 'Predicted Volatility']"], {'loc': '(2)', 'fontsize': '(8)'}), "(['True Returns', 'Predicted Volatility'], loc=2, fontsize=8)\n", (1346, 1407), True, 'import matplotlib.pyplot as plt\n'), ((1408, 1418), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1416, 1418), True, 'import matplotlib.pyplot as plt\n'), ((1095, 1151), 'pandas.Series', 'pd.Series', (['predictions'], {'index': 'returns.index[-test_size:]'}), '(predictions, index=returns.index[-test_size:])\n', (1104, 1151), True, 'import pandas as pd\n'), ((887, 914), 'arch.arch_model', 'arch_model', (['train'], {'p': '(2)', 'q': '(2)'}), '(train, p=2, q=2)\n', (897, 914), False, 'from arch import arch_model\n'), ((1033, 1076), 'numpy.sqrt', 'np.sqrt', (['pred_val.variance.values[-1, :][0]'], {}), '(pred_val.variance.values[-1, :][0])\n', (1040, 1076), True, 'import numpy as np\n')] |
# Copyright 2003, 2007 by <NAME>. <EMAIL>
# All rights reserved. This code is part of the Biopython
# distribution and governed by its license.
# Please see the LICENSE file that should have been included as part
# of this package.
import math
def lcc_mult(seq,wsize):
"""Local Composition Complexity (LCC) values over sliding window.
Returns a list of floats, the LCC values for a sliding window over
the sequence.
seq - an unambiguous DNA sequence (a string or Seq object)
wsize - window size, integer
The result is the same as applying lcc_simp multiple times, but this
version is optimized for speed. The optimization works by using the
value of previous window as a base to compute the next one."""
l2=math.log(2)
tamseq=len(seq)
try:
#Assume its a string
upper = seq.upper()
except AttributeError:
#Should be a Seq object then
upper = seq.tostring().upper()
compone=[0]
lccsal=[0]
for i in range(wsize):
compone.append(((i+1)/float(wsize))*
((math.log((i+1)/float(wsize)))/l2))
window=seq[0:wsize]
cant_a=window.count('A')
cant_c=window.count('C')
cant_t=window.count('T')
cant_g=window.count('G')
term_a=compone[cant_a]
term_c=compone[cant_c]
term_t=compone[cant_t]
term_g=compone[cant_g]
lccsal.append(-(term_a+term_c+term_t+term_g))
tail=seq[0]
for x in range (tamseq-wsize):
window=upper[x+1:wsize+x+1]
if tail==window[-1]:
lccsal.append(lccsal[-1])
elif tail=='A':
cant_a=cant_a-1
if window.endswith('C'):
cant_c=cant_c+1
term_a=compone[cant_a]
term_c=compone[cant_c]
lccsal.append(-(term_a+term_c+term_t+term_g))
elif window.endswith('T'):
cant_t=cant_t+1
term_a=compone[cant_a]
term_t=compone[cant_t]
lccsal.append(-(term_a+term_c+term_t+term_g))
elif window.endswith('G'):
cant_g=cant_g+1
term_a=compone[cant_a]
term_g=compone[cant_g]
lccsal.append(-(term_a+term_c+term_t+term_g))
elif tail=='C':
cant_c=cant_c-1
if window.endswith('A'):
cant_a=cant_a+1
term_a=compone[cant_a]
term_c=compone[cant_c]
lccsal.append(-(term_a+term_c+term_t+term_g))
elif window.endswith('T'):
cant_t=cant_t+1
term_c=compone[cant_c]
term_t=compone[cant_t]
lccsal.append(-(term_a+term_c+term_t+term_g))
elif window.endswith('G'):
cant_g=cant_g+1
term_c=compone[cant_c]
term_g=compone[cant_g]
lccsal.append(-(term_a+term_c+term_t+term_g))
elif tail=='T':
cant_t=cant_t-1
if window.endswith('A'):
cant_a=cant_a+1
term_a=compone[cant_a]
term_t=compone[cant_t]
lccsal.append(-(term_a+term_c+term_t+term_g))
elif window.endswith('C'):
cant_c=cant_c+1
term_c=compone[cant_c]
term_t=compone[cant_t]
lccsal.append(-(term_a+term_c+term_t+term_g))
elif window.endswith('G'):
cant_g=cant_g+1
term_t=compone[cant_t]
term_g=compone[cant_g]
lccsal.append(-(term_a+term_c+term_t+term_g))
elif tail=='G':
cant_g=cant_g-1
if window.endswith('A'):
cant_a=cant_a+1
term_a=compone[cant_a]
term_g=compone[cant_g]
lccsal.append(-(term_a+term_c+term_t+term_g))
elif window.endswith('C'):
cant_c=cant_c+1
term_c=compone[cant_c]
term_g=compone[cant_g]
lccsal.append(-(term_a+term_c+term_t+term_g))
elif window.endswith('T'):
cant_t=cant_t+1
term_t=compone[cant_t]
term_g=compone[cant_g]
lccsal.append(-(term_a+term_c+term_t+term_g))
tail=window[0]
return lccsal
def lcc_simp(seq):
"""Local Composition Complexity (LCC) for a sequence.
seq - an unambiguous DNA sequence (a string or Seq object)
Returns the Local Composition Complexity (LCC) value for the entire
sequence (as a float).
Reference:
<NAME> (2005) Sequence Complexity and Composition
DOI: 10.1038/npg.els.0005260
"""
wsize=len(seq)
try:
#Assume its a string
upper = seq.upper()
except AttributeError:
#Should be a Seq object then
upper = seq.tostring().upper()
l2=math.log(2)
if 'A' not in seq:
term_a=0
# Check to avoid calculating the log of 0.
else:
term_a=((upper.count('A'))/float(wsize))*((math.log((upper.count('A'))
/float(wsize)))/l2)
if 'C' not in seq:
term_c=0
else:
term_c=((upper.count('C'))/float(wsize))*((math.log((upper.count('C'))
/float(wsize)))/l2)
if 'T' not in seq:
term_t=0
else:
term_t=((upper.count('T'))/float(wsize))*((math.log((upper.count('T'))
/float(wsize)))/l2)
if 'G' not in seq:
term_g=0
else:
term_g=((upper.count('G'))/float(wsize))*((math.log((upper.count('G'))
/float(wsize)))/l2)
lccsal=-(term_a+term_c+term_t+term_g)
return lccsal
| [
"math.log"
] | [((750, 761), 'math.log', 'math.log', (['(2)'], {}), '(2)\n', (758, 761), False, 'import math\n'), ((4891, 4902), 'math.log', 'math.log', (['(2)'], {}), '(2)\n', (4899, 4902), False, 'import math\n')] |
import logging
import random
from collections import namedtuple
from typing import NamedTuple
from queue import PriorityQueue
from objects import BaseObject
from constants import NORTH, SOUTH, EAST, WEST
Space = namedtuple("Space", ["x", "y"])
# TODO: Big TODO - Re-implement space with z/t value for terrain???
# SpaceMeta = namedtuple("SpaceMeta", ["actor", "terrain"], defaults=[None, "Blank"])
class SpaceMeta(NamedTuple):
actor: BaseObject
terrain: str = "Blank"
DIRECTIONS = [NORTH, SOUTH, WEST, EAST] # Maintained order, just cuz
GRID_DIRECTIONS = [Space(0, -1), Space(0, 1), Space(-1, 0), Space(1, 0)]
def grid_direction(direction: int) -> Space:
return GRID_DIRECTIONS[direction]
def grid_space_add(a: Space, b: Space) -> Space:
sum_x = a.x + b.x
sum_y = a.y + b.y
return Space(sum_x, sum_y)
def grid_space_neighbor(space: Space, direction: int) -> Space:
return grid_space_add(space, grid_direction(direction))
class Grid(dict):
def __setitem__(self, key, values):
x,y = key
# print(values)
super().__setitem__(Space(x, y), SpaceMeta(*values))
def __getitem__(self, key) -> SpaceMeta:
x,y = key
return super().__getitem__(Space(x, y))
def neighbors(self, space: Space):
# space = Space(coordinates)
for d in DIRECTIONS:
neighbor = grid_space_neighbor(space, d)
if neighbor in self:
neighbor_object = self[neighbor].actor
if neighbor_object is None or not neighbor_object.solid: # Can't traverse through solid objects
yield neighbor
def random_neighbor(self, space: Space) -> Space:
valid_neighbors = list(self.neighbors(space))
logging.info(valid_neighbors)
# random.shuffle(valid_neighbors) # TODO: Unnecessary?
r = random.randint(0, len(valid_neighbors) - 1)
result = valid_neighbors[r]
return result
def cost(self, start: Space, end: Space):
return 1 # TODO: More complex movement cost
# A* Pathfinding
def path_find(start: tuple, goal: tuple, graph: Grid):
"""Pathfinding graph algorithm"""
start = Space(*start)
goal = Space(*goal)
frontier = PriorityQueue()
frontier.put((0, start))
came_from = dict()
came_from[start] = None
cost_so_far = dict()
cost_so_far[start] = 0
while not frontier.empty():
current = frontier.get()[1]
if current == goal:
break
for next in graph.neighbors(current):
new_cost = cost_so_far[current] + graph.cost(current, next)
if next not in cost_so_far or new_cost < cost_so_far[next]:
cost_so_far[next] = new_cost
priority = new_cost + grid_distance(next, goal)
frontier.put((priority, next))
came_from[next] = current
return came_from, cost_so_far
# Dijkstra's Rangefinding
def range_find(start: tuple, range: int, graph: Grid):
start = Space(*start)
frontier = PriorityQueue()
frontier.put((0, start))
came_from = dict()
came_from[start] = None
cost_so_far = dict()
cost_so_far[start] = 0
while not frontier.empty():
current = frontier.get()[1]
# No goal/early exit
for next in graph.neighbors(current):
new_cost = cost_so_far[current] + graph.cost(current, next)
if new_cost < range and (next not in cost_so_far or new_cost < cost_so_far[next]):
cost_so_far[next] = new_cost
priority = new_cost
frontier.put((priority, next))
came_from[next] = current
return came_from, cost_so_far
def path_reconstruct(start: tuple, goal: tuple, search_result: dict) -> list:
result_path = list()
start = Space(*start)
current = Space(*goal)
while search_result[current] is not None:
# Add current location to reverse path
result_path.append(current)
current = search_result[current]
# Reached start, add start to path
result_path.append(start)
# Reverse the path to generate foward path TODO Optional flag?
# result_path.reverse()
return result_path
def grid_distance(a: Space, b: Space):
# Manhattan distance, square grid
return abs(a.x - b.x) + abs(a.y - b.y)
# ----- Testing Area -----
def test_hash():
test_coordinates = (0, 0)
space_a = Space(test_coordinates)
space_b = Space(test_coordinates)
if not hash(space_a) == hash(space_b):
print("Hash equality NOT working")
else:
print("Hash equality working on named tuples")
def test_hash_to_non_named_tuple():
test_coordinates = (1, 1)
test_grid_space = Space((1, 1))
if not hash(test_coordinates) == hash(test_grid_space):
print("CanNOT compare hashes from named/non-named tuples")
else:
print("Can indeed compare hashes of regular and named tuples across")
def test_compare_tuple_named_tuple():
test_coordinates = (1, 1)
test_grid_space = Space((1, 1))
if not test_coordinates == test_grid_space:
print("CanNOT compare equality from named/non-named tuples")
else:
print("Can indeed compare equality of regular and named tuples across")
def test_grid_dict_subclass():
test_coordinates = (0, 0)
grid = Grid(1, 1)
# grid.map[space_a] = 5
grid[test_coordinates] = 5
# space_c = Space(0, 0)
test_get = grid[test_coordinates]
if not test_get == 5:
print("Hashtable set/fetch NOT working")
else:
print("Hashtable set and get by Space coordinates working")
def test_all():
test_hash()
test_grid_dict_subclass()
test_hash_to_non_named_tuple()
test_compare_tuple_named_tuple()
if __name__ == "__main__":
# Run Tests
test_all() | [
"queue.PriorityQueue",
"collections.namedtuple",
"logging.info"
] | [((216, 247), 'collections.namedtuple', 'namedtuple', (['"""Space"""', "['x', 'y']"], {}), "('Space', ['x', 'y'])\n", (226, 247), False, 'from collections import namedtuple\n'), ((2226, 2241), 'queue.PriorityQueue', 'PriorityQueue', ([], {}), '()\n', (2239, 2241), False, 'from queue import PriorityQueue\n'), ((3040, 3055), 'queue.PriorityQueue', 'PriorityQueue', ([], {}), '()\n', (3053, 3055), False, 'from queue import PriorityQueue\n'), ((1742, 1771), 'logging.info', 'logging.info', (['valid_neighbors'], {}), '(valid_neighbors)\n', (1754, 1771), False, 'import logging\n')] |
"""
Copyright European Organization for Nuclear Research (CERN)
Licensed under the Apache License, Version 2.0 (the "License");
You may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Authors:
- <NAME>, <<EMAIL>>, 2014-2017
change index on table requests
Revision ID: 35ef10d1e11b
Revises: 3152492b110b
Create Date: 2014-06-20 09:01:52.704794
"""
from alembic.op import create_index, drop_index
# revision identifiers, used by Alembic.
revision = '35ef10d1e11b' # pylint:disable=invalid-name
down_revision = '3152492b110b' # pylint:disable=invalid-name
def upgrade():
'''
upgrade method
'''
create_index('REQUESTS_TYP_STA_UPD_IDX', 'requests', ["request_type", "state", "updated_at"])
drop_index('REQUESTS_TYP_STA_CRE_IDX', 'requests')
def downgrade():
'''
downgrade method
'''
create_index('REQUESTS_TYP_STA_CRE_IDX', 'requests', ["request_type", "state", "created_at"])
drop_index('REQUESTS_TYP_STA_UPD_IDX', 'requests')
| [
"alembic.op.drop_index",
"alembic.op.create_index"
] | [((716, 813), 'alembic.op.create_index', 'create_index', (['"""REQUESTS_TYP_STA_UPD_IDX"""', '"""requests"""', "['request_type', 'state', 'updated_at']"], {}), "('REQUESTS_TYP_STA_UPD_IDX', 'requests', ['request_type',\n 'state', 'updated_at'])\n", (728, 813), False, 'from alembic.op import create_index, drop_index\n'), ((814, 864), 'alembic.op.drop_index', 'drop_index', (['"""REQUESTS_TYP_STA_CRE_IDX"""', '"""requests"""'], {}), "('REQUESTS_TYP_STA_CRE_IDX', 'requests')\n", (824, 864), False, 'from alembic.op import create_index, drop_index\n'), ((925, 1022), 'alembic.op.create_index', 'create_index', (['"""REQUESTS_TYP_STA_CRE_IDX"""', '"""requests"""', "['request_type', 'state', 'created_at']"], {}), "('REQUESTS_TYP_STA_CRE_IDX', 'requests', ['request_type',\n 'state', 'created_at'])\n", (937, 1022), False, 'from alembic.op import create_index, drop_index\n'), ((1023, 1073), 'alembic.op.drop_index', 'drop_index', (['"""REQUESTS_TYP_STA_UPD_IDX"""', '"""requests"""'], {}), "('REQUESTS_TYP_STA_UPD_IDX', 'requests')\n", (1033, 1073), False, 'from alembic.op import create_index, drop_index\n')] |
from pywizard.userSettings import settings
import scipy as sp
class PreEmphasizer(object):
@classmethod
def processBuffer(cls, buf):
preEnergy = buf.energy()
alpha = cls.alpha()
unmodifiedPreviousSample = buf.samples[0]
tempSample = None
first_sample = buf.samples[0]
buf.samples = buf.samples[1:] + (buf.samples[:-1] * alpha)
buf.samples = sp.insert(buf.samples, 0, first_sample)
cls.scaleBuffer(buf, preEnergy, buf.energy())
@classmethod
def alpha(cls):
return settings.preEmphasisAlpha
@classmethod
def scaleBuffer(cls, buf, preEnergy, postEnergy):
scale = sp.sqrt(preEnergy / postEnergy)
buf.samples *= scale
| [
"scipy.sqrt",
"scipy.insert"
] | [((408, 447), 'scipy.insert', 'sp.insert', (['buf.samples', '(0)', 'first_sample'], {}), '(buf.samples, 0, first_sample)\n', (417, 447), True, 'import scipy as sp\n'), ((670, 701), 'scipy.sqrt', 'sp.sqrt', (['(preEnergy / postEnergy)'], {}), '(preEnergy / postEnergy)\n', (677, 701), True, 'import scipy as sp\n')] |
import numpy as np
import random
import numexpr as ne
def gen_layer(rin, rout, nsize):
R = 1.0
phi = np.random.uniform(0, 2*np.pi, size=(nsize))
costheta = np.random.uniform(-1, 1, size=(nsize))
u = np.random.uniform(rin**3, rout**3, size=(nsize))
theta = np.arccos( costheta )
r = R * np.cbrt( u )
x = r * np.sin( theta ) * np.cos( phi )
y = r * np.sin( theta ) * np.sin( phi )
z = r * np.cos( theta )
return( x, y, z )
def LPFbead(qrange, sigmabead):
'''
Compute the spherical form factor given a range of q values.
Parameters
----------
qrange: numpy.array
array of values in q-space to compute form factor for.
sigmabead: float
diameter of the sphere.
Return
-------
Fqb: numpy.array
array of values of the spherical form factors (F(q)) computed at q-points listed in qrange.
'''
R=np.true_divide(sigmabead,2)
QR=np.multiply(qrange,R)
Fqb=np.multiply(np.true_divide(np.sin(QR)-np.multiply(QR,np.cos(QR)),np.power(QR,3)),3)
return Fqb
def LPOmega(qrange, nAin, nAout, nB, r): # qvalues number_of_B number_of_A scatterer_coordinates
Ntot=nAin+nB+nAout # Total number of scatterers to loop through
omegaarrt=np.zeros((1,len(qrange))) # initiating array
omegaarr=np.zeros((1,len(qrange))) # initiating array
rur=r[0,:,:]# selects
rur=rur.transpose()
for i in range(Ntot-1): # loops through index and all further indexes to prevent double counting
all_disp = rur[i,:]-rur[(i+1):,:]
rij = np.sqrt(np.sum(np.square(all_disp),axis=1))
rij = rij.transpose()
rs = rij[:,np.newaxis] # reshapes array for consistency
Q = qrange[np.newaxis,:] # reshapes array for consistency
vals = ne.evaluate("sin(Q*rs)/(Q*rs)") # ne is efficient at calculations
inds=np.argwhere(np.isnan(vals)) # error catching in case there are NaN values
if len(inds)>0:
for val in inds:
vals[val[0],val[1]]=1
inds_double_check=np.argwhere(np.isnan(vals))
if len(inds_double_check)>0:
print('nan error!')
vals = ne.evaluate("sum((vals), axis=0)") # adds together scatterer contributions for each q value
omegaarr+=vals
omegaarr=np.true_divide(2*omegaarr,Ntot)+1 # 1 accounts for the guarenteed overlap of same bead # 2* accounts for double counting avoided to reduce computational expense by looping for all other pairs
omegaarrt+=omegaarr # stores values between loops
return omegaarrt
def visualize(r, Rcore, dR_Ain, dR_B, dR_Aout, sigmabead):
import py3Dmol
view = py3Dmol.view()
for ri in r[0,:,:].transpose():
if np.linalg.norm(ri) < Rcore+dR_Ain or np.linalg.norm(ri) > (Rcore+dR_Ain+dR_B):
col = 'blue'
else:
col = 'red'
view.addSphere(
{
'center': {'x': ri[0], 'y': ri[1], 'z': ri[2]},
'radius': sigmabead/2,
'color': col,
'alpha': 0.9,
}
)
#view.zoomTo()
view.show()
return view
def genLP(Rcore, dR_Ain, dR_B, dR_Aout, sigmabead, nAin, nAout, nB):
# core radius, inner A layer thickness, B layer thickness, outer A layer thickness,
# bead diameter, # of inner A beads, # of outer A beads, # of B beads
ntot = nAin+nB+nAout
power = 2
r = np.zeros((1, 3, ntot))
types = np.zeros((ntot))
### Create configuration for each replicate with dispersity ###
for step in range(0, 1):
### Populate A inner Layer ###
x, y, z = gen_layer(Rcore, Rcore+dR_Ain, nAin)
for i in range(nAin):
r[0,:,i] = [x[i], y[i], z[i]]
types[i] = 1
### Populate B middle Layer ###
x, y, z = gen_layer(Rcore+dR_Ain, Rcore+dR_Ain+dR_B, nB)
for i in range(nB):
r[0,:,i+nAin] = [x[i], y[i], z[i]]
types[i+nAin] = 2
### Populate A outer Layer ###
x, y, z = gen_layer(Rcore+dR_Ain+dR_B, Rcore+dR_Ain+dR_B+dR_Aout, nAout)
for i in range(nAout):
r[0,:,i+nAin+nB] = [x[i], y[i], z[i]]
types[i+nAin+nB] = 1
return r
class scatterer_generator:
'''
The wrapper class for vesicle shape. Default length unit: Angstrom.
Notes
-----
**The following 7 shape-specific descriptors are to be specified by user (see
*Attributes*) as
a list, in the precise order as listed, while calling `Model.load_shape`
to load this shape:**
num_scatterers:
Number of scatterers used to represent a chain. Default: 24
N:
Number of monomers in a chain. Default: 54
eta_B:
Packing fraction of scatterers in B layer. Default: 0.5
lmono_b:
Diameter of a monomer of chemistry B. Default: 50.4 A
lmono_a:
Diameter of a monomer of chemistry A. Default: 50.4 A
fb:
Fraction of monomers in chain that are of B type. fa = 1-fb. Default: 0.55
nLP:
Number of replicates for each individual. Default: 7
**The following 7 parameters are to be predicted, in the precise order
as listed, by GA:**
R_core:
Core radius. Default [min,max]: [50 A, 400 A]
t_Ain:
Thickness of inner A layer. Default [min,max]: [30 A, 200 A]
t_B:
Thickness of B layer. Default [min,max]: [30 A, 200 A]
t_Aout:
Thickness of outer A layer. Default [min,max]: [30 A, 200 A]
sigma_Ain:
Split of solvophilic scatterers between inner and outer layers.
Default [min,max]: [0.1, 0.45]
sigma_R:
Dispersity in vesicle size as implemented in the core radius.
Default [min,max]: [0.0, 0.45]
log10(bg):
Negative log10 of background intensity.
E.g. an background intensity of 0.001 leads to this value being 3.
Default [min,max]: [0.1,4]
See also
--------
crease_ga.Model.load_shape
'''
def __init__(self,
shape_params = [24,54,0.5,50.4,50.4,0.55,7],
minvalu = (50, 30, 30, 30, 0.1, 0.0, 0.1),
maxvalu = (400, 200, 200, 200, 0.45, 0.45, 4)):
num_scatterers = shape_params[0]
N = shape_params[1]
rho_B = shape_params[2]
lmono_a = shape_params[3]
lmono_b= shape_params[4]
fb = shape_params[5]
nLP = shape_params[6]
self._numvars = 7
self.minvalu = minvalu
self.maxvalu = maxvalu
self.num_scatterers=num_scatterers ## number of scatterers per chain
self.N=N ## Number of beads on chain
self.rho_B=rho_B ## density/volume fraction of beads in B layer
self.lmono_a=lmono_a ## Angstrom 'monomer contour length'
self.lmono_b=lmono_b ## Angstrom 'monomer contour length'
self.MB=np.pi/6*(self.lmono_b)**3 ## volume of B monomer
self.sigmabead=np.true_divide(self.N*self.lmono_b,self.num_scatterers) ## scatterer bead diameter
self.fb=fb ## fraction of B type monomers in chain
self.nLP=nLP ## number of replicates
@property
def numvars(self):
return self._numvars
def converttoIQ(self, qrange, param):
'''
Calculate computed scattering intensity profile.
Parameters
----------
qrange: numpy.array
q values.
param: numpy.array
Decoded input parameters. See *Notes* section of the class
documentation.
Returns
-------
IQid: A numpy array holding I(q).
'''
# q values, decoded parameters,
# number of repeat units per chain, fraction of B beads per chain, core density,
# scatterer diameter, molar mass of B chemistry,
# length of A chemistry bond, length of B chemistry bond,
# number of scatterers per chain, # of replicates, stdev in Rcore size
sigmabead = self.sigmabead
N = self.N
fb = self.fb
rho_B = self.rho_B
MB = self.MB
lmono_a = self.lmono_a
lmono_b = self.lmono_b
num_scatterers = self.num_scatterers
nLP = self.nLP
IQid=np.zeros((len(qrange))) #initiates array for output IQ
### Parameters used to generate scatterer placements ###
Rcore=param[0]
dR_Ain=param[1]
dR_B=param[2]
dR_Aout=param[3]
sAin=param[4] # split of type A scatterer
sigmaR=param[5] # variation in Rcore, dispersity
#print(Rcore, dR_Ain, dR_B, dR_Aout, sAin)
Background=10**(-param[6])
varR = Rcore*sigmaR # variation in Rcore
disper = np.array([-2.0, -1.5, -1.0, -0.5, 0.0, 0.5, 1.0, 1.5, 2.0]) # fixed intervals of sigma
sum_omegaarr=np.zeros((1,len(qrange)))
for step in range(0, nLP):
Rcore = param[0] + varR*disper[step + int((9-nLP)/2.)] ## add displacement to Rcore
# print("disper = ", disper[step + int((9-nLP)/2.)])
# print("Rcore = ", Rcore)
vol_B = (4/3.0)*np.pi*(np.power(Rcore + dR_Ain + dR_B, 3)
- np.power(Rcore + dR_Ain, 3)) ## volume of solvophobic layer B
nagg = int(np.true_divide( rho_B*vol_B, N*fb*MB )) ## number of chains in vesicle
ntot = nagg*num_scatterers ## total number of scatterers
nB = int(ntot*fb) ## number of scatterers in B
nAin = int(ntot*(1-fb)*sAin) ## number of scatterers in A_in
nAout = int(ntot*(1-fb)*(1-sAin)) ## number of scatterers in A_out
for reps in range(0, 3):
### Generates scatterer positions in structure ###
r = genLP(Rcore, dR_Ain, dR_B, dR_Aout, sigmabead, nAin, nAout, nB)
### Calculates omega from scatterers in shape ###
sum_omegaarr += LPOmega(qrange, nAin, nAout, nB, r)
omegaarr=np.true_divide(sum_omegaarr,nLP*3) # average omega
omegaarr=omegaarr.reshape(len(qrange),)
Fqb=LPFbead(qrange,sigmabead) # calcualtes sphere shape factor
F2qb=np.multiply(Fqb,Fqb) # Sphere shape factor square
sqmm=np.ones((np.shape(Fqb))) # assuming dilute mixture the micelle-micelle structure factor = 1
F2qb_sqmm=np.multiply(F2qb,sqmm) # determines the micelle form factor
IQid=np.multiply(omegaarr,F2qb_sqmm) # calculates Icomp
maxIQ=np.max(IQid)
IQid=np.true_divide(IQid,maxIQ) # normalizes the I(q) to have its maximum = 1
IQid+=Background # add background
return IQid
| [
"numpy.multiply",
"numpy.arccos",
"numexpr.evaluate",
"numpy.power",
"numpy.linalg.norm",
"numpy.max",
"numpy.square",
"numpy.array",
"numpy.zeros",
"numpy.isnan",
"numpy.true_divide",
"numpy.cos",
"numpy.random.uniform",
"numpy.sin",
"numpy.cbrt",
"numpy.shape",
"py3Dmol.view"
] | [((119, 162), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(2 * np.pi)'], {'size': 'nsize'}), '(0, 2 * np.pi, size=nsize)\n', (136, 162), True, 'import numpy as np\n'), ((182, 218), 'numpy.random.uniform', 'np.random.uniform', (['(-1)', '(1)'], {'size': 'nsize'}), '(-1, 1, size=nsize)\n', (199, 218), True, 'import numpy as np\n'), ((233, 283), 'numpy.random.uniform', 'np.random.uniform', (['(rin ** 3)', '(rout ** 3)'], {'size': 'nsize'}), '(rin ** 3, rout ** 3, size=nsize)\n', (250, 283), True, 'import numpy as np\n'), ((299, 318), 'numpy.arccos', 'np.arccos', (['costheta'], {}), '(costheta)\n', (308, 318), True, 'import numpy as np\n'), ((959, 987), 'numpy.true_divide', 'np.true_divide', (['sigmabead', '(2)'], {}), '(sigmabead, 2)\n', (973, 987), True, 'import numpy as np\n'), ((994, 1016), 'numpy.multiply', 'np.multiply', (['qrange', 'R'], {}), '(qrange, R)\n', (1005, 1016), True, 'import numpy as np\n'), ((2902, 2916), 'py3Dmol.view', 'py3Dmol.view', ([], {}), '()\n', (2914, 2916), False, 'import py3Dmol\n'), ((3747, 3769), 'numpy.zeros', 'np.zeros', (['(1, 3, ntot)'], {}), '((1, 3, ntot))\n', (3755, 3769), True, 'import numpy as np\n'), ((3786, 3800), 'numpy.zeros', 'np.zeros', (['ntot'], {}), '(ntot)\n', (3794, 3800), True, 'import numpy as np\n'), ((337, 347), 'numpy.cbrt', 'np.cbrt', (['u'], {}), '(u)\n', (344, 347), True, 'import numpy as np\n'), ((385, 396), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (391, 396), True, 'import numpy as np\n'), ((433, 444), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (439, 444), True, 'import numpy as np\n'), ((463, 476), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (469, 476), True, 'import numpy as np\n'), ((1982, 2013), 'numexpr.evaluate', 'ne.evaluate', (['"""sin(Q*rs)/(Q*rs)"""'], {}), "('sin(Q*rs)/(Q*rs)')\n", (1993, 2013), True, 'import numexpr as ne\n'), ((2380, 2414), 'numexpr.evaluate', 'ne.evaluate', (['"""sum((vals), axis=0)"""'], {}), "('sum((vals), axis=0)')\n", (2391, 2414), True, 'import numexpr as ne\n'), ((2510, 2544), 'numpy.true_divide', 'np.true_divide', (['(2 * omegaarr)', 'Ntot'], {}), '(2 * omegaarr, Ntot)\n', (2524, 2544), True, 'import numpy as np\n'), ((7415, 7473), 'numpy.true_divide', 'np.true_divide', (['(self.N * self.lmono_b)', 'self.num_scatterers'], {}), '(self.N * self.lmono_b, self.num_scatterers)\n', (7429, 7473), True, 'import numpy as np\n'), ((9218, 9277), 'numpy.array', 'np.array', (['[-2.0, -1.5, -1.0, -0.5, 0.0, 0.5, 1.0, 1.5, 2.0]'], {}), '([-2.0, -1.5, -1.0, -0.5, 0.0, 0.5, 1.0, 1.5, 2.0])\n', (9226, 9277), True, 'import numpy as np\n'), ((10594, 10631), 'numpy.true_divide', 'np.true_divide', (['sum_omegaarr', '(nLP * 3)'], {}), '(sum_omegaarr, nLP * 3)\n', (10608, 10631), True, 'import numpy as np\n'), ((10805, 10826), 'numpy.multiply', 'np.multiply', (['Fqb', 'Fqb'], {}), '(Fqb, Fqb)\n', (10816, 10826), True, 'import numpy as np\n'), ((11024, 11047), 'numpy.multiply', 'np.multiply', (['F2qb', 'sqmm'], {}), '(F2qb, sqmm)\n', (11035, 11047), True, 'import numpy as np\n'), ((11115, 11147), 'numpy.multiply', 'np.multiply', (['omegaarr', 'F2qb_sqmm'], {}), '(omegaarr, F2qb_sqmm)\n', (11126, 11147), True, 'import numpy as np\n'), ((11194, 11206), 'numpy.max', 'np.max', (['IQid'], {}), '(IQid)\n', (11200, 11206), True, 'import numpy as np\n'), ((11254, 11281), 'numpy.true_divide', 'np.true_divide', (['IQid', 'maxIQ'], {}), '(IQid, maxIQ)\n', (11268, 11281), True, 'import numpy as np\n'), ((367, 380), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (373, 380), True, 'import numpy as np\n'), ((415, 428), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (421, 428), True, 'import numpy as np\n'), ((1089, 1104), 'numpy.power', 'np.power', (['QR', '(3)'], {}), '(QR, 3)\n', (1097, 1104), True, 'import numpy as np\n'), ((2078, 2092), 'numpy.isnan', 'np.isnan', (['vals'], {}), '(vals)\n', (2086, 2092), True, 'import numpy as np\n'), ((2280, 2294), 'numpy.isnan', 'np.isnan', (['vals'], {}), '(vals)\n', (2288, 2294), True, 'import numpy as np\n'), ((10902, 10915), 'numpy.shape', 'np.shape', (['Fqb'], {}), '(Fqb)\n', (10910, 10915), True, 'import numpy as np\n'), ((1051, 1061), 'numpy.sin', 'np.sin', (['QR'], {}), '(QR)\n', (1057, 1061), True, 'import numpy as np\n'), ((1738, 1757), 'numpy.square', 'np.square', (['all_disp'], {}), '(all_disp)\n', (1747, 1757), True, 'import numpy as np\n'), ((2969, 2987), 'numpy.linalg.norm', 'np.linalg.norm', (['ri'], {}), '(ri)\n', (2983, 2987), True, 'import numpy as np\n'), ((3006, 3024), 'numpy.linalg.norm', 'np.linalg.norm', (['ri'], {}), '(ri)\n', (3020, 3024), True, 'import numpy as np\n'), ((9790, 9832), 'numpy.true_divide', 'np.true_divide', (['(rho_B * vol_B)', '(N * fb * MB)'], {}), '(rho_B * vol_B, N * fb * MB)\n', (9804, 9832), True, 'import numpy as np\n'), ((1077, 1087), 'numpy.cos', 'np.cos', (['QR'], {}), '(QR)\n', (1083, 1087), True, 'import numpy as np\n'), ((9632, 9666), 'numpy.power', 'np.power', (['(Rcore + dR_Ain + dR_B)', '(3)'], {}), '(Rcore + dR_Ain + dR_B, 3)\n', (9640, 9666), True, 'import numpy as np\n'), ((9705, 9732), 'numpy.power', 'np.power', (['(Rcore + dR_Ain)', '(3)'], {}), '(Rcore + dR_Ain, 3)\n', (9713, 9732), True, 'import numpy as np\n')] |
"""Strategies for selecting actions for value-based policies."""
from abc import ABC, abstractmethod
from typing import List, Optional
from numpy.typing import ArrayLike
import numpy as np
from rl.action_selectors import (
ActionSelector,
DeterministicActionSelector,
UniformDiscreteActionSelector,
NoisyActionSelector,
)
class ActionSelectionStrategy(ABC):
"""Base class for action selection strategies."""
@abstractmethod
def __call__(
self,
action_values: List[float],
action_counts: List[int],
) -> ActionSelector:
pass
class EpsilonGreedy(ActionSelectionStrategy):
"""Implementation of epsilon greedy action selection.
Args:
epsilon: probability of taking action to explore rather than exploing
random_state: `None`, `int`, or `np.random.Generator` to initialise
RNG
"""
def __init__(self, epsilon: float = 0.0, random_state=None):
self.epsilon = epsilon
self._rng = np.random.default_rng(random_state)
def __call__(
self,
action_values: List[float],
action_counts: Optional[List[int]] = None,
) -> NoisyActionSelector:
"""Action counts do not matter for this strategy."""
greedy_action = int(np.argmax(action_values))
preferred = DeterministicActionSelector(greedy_action)
noise = UniformDiscreteActionSelector(
len(action_values), random_state=self._rng
)
return NoisyActionSelector(
self.epsilon, preferred, noise, random_state=self._rng
)
class UCB(ActionSelectionStrategy):
"""Upper confidence bound action selection strategy.
As defined in Sutton & Barto equation 2.10. However we floor action
counts at `eps` to avoid divide-by-zero.
`t` is inferred by summing the action counts vector and adding 1.
(Because `t` refers to the time step at which action values are being
estimated, i.e. the next time step since the last observation).
Args:
c: confidence parameter
eps: small number to floor zero counts at
"""
def __init__(self, c: float, eps: float = 1.0e-8):
self.c = c
self._eps = eps
def __call__(
self,
action_values: List[float],
action_counts: List[int],
) -> DeterministicActionSelector:
chosen_action = int(np.argmax(self.ucb(action_values, action_counts)))
return DeterministicActionSelector(chosen_action)
def ucb(
self,
action_values: List[float],
action_counts: List[int],
) -> ArrayLike:
log_t = np.log(np.sum(action_counts) + 1)
floored_counts = np.maximum(action_counts, self._eps)
return action_values + self.c * np.sqrt(log_t / floored_counts)
| [
"numpy.sqrt",
"numpy.random.default_rng",
"rl.action_selectors.DeterministicActionSelector",
"numpy.argmax",
"numpy.sum",
"rl.action_selectors.NoisyActionSelector",
"numpy.maximum"
] | [((996, 1031), 'numpy.random.default_rng', 'np.random.default_rng', (['random_state'], {}), '(random_state)\n', (1017, 1031), True, 'import numpy as np\n'), ((1317, 1359), 'rl.action_selectors.DeterministicActionSelector', 'DeterministicActionSelector', (['greedy_action'], {}), '(greedy_action)\n', (1344, 1359), False, 'from rl.action_selectors import ActionSelector, DeterministicActionSelector, UniformDiscreteActionSelector, NoisyActionSelector\n'), ((1487, 1562), 'rl.action_selectors.NoisyActionSelector', 'NoisyActionSelector', (['self.epsilon', 'preferred', 'noise'], {'random_state': 'self._rng'}), '(self.epsilon, preferred, noise, random_state=self._rng)\n', (1506, 1562), False, 'from rl.action_selectors import ActionSelector, DeterministicActionSelector, UniformDiscreteActionSelector, NoisyActionSelector\n'), ((2442, 2484), 'rl.action_selectors.DeterministicActionSelector', 'DeterministicActionSelector', (['chosen_action'], {}), '(chosen_action)\n', (2469, 2484), False, 'from rl.action_selectors import ActionSelector, DeterministicActionSelector, UniformDiscreteActionSelector, NoisyActionSelector\n'), ((2678, 2714), 'numpy.maximum', 'np.maximum', (['action_counts', 'self._eps'], {}), '(action_counts, self._eps)\n', (2688, 2714), True, 'import numpy as np\n'), ((1271, 1295), 'numpy.argmax', 'np.argmax', (['action_values'], {}), '(action_values)\n', (1280, 1295), True, 'import numpy as np\n'), ((2626, 2647), 'numpy.sum', 'np.sum', (['action_counts'], {}), '(action_counts)\n', (2632, 2647), True, 'import numpy as np\n'), ((2755, 2786), 'numpy.sqrt', 'np.sqrt', (['(log_t / floored_counts)'], {}), '(log_t / floored_counts)\n', (2762, 2786), True, 'import numpy as np\n')] |
from unittest import TestSuite, TextTestRunner
import hashlib
def run(test):
suite = TestSuite()
suite.addTest(test)
TextTestRunner().run(suite)
def hash256(s):
'''two rounds of sha256'''
return hashlib.sha256(hashlib.sha256(s).digest()).digest()
| [
"unittest.TestSuite",
"hashlib.sha256",
"unittest.TextTestRunner"
] | [((92, 103), 'unittest.TestSuite', 'TestSuite', ([], {}), '()\n', (101, 103), False, 'from unittest import TestSuite, TextTestRunner\n'), ((132, 148), 'unittest.TextTestRunner', 'TextTestRunner', ([], {}), '()\n', (146, 148), False, 'from unittest import TestSuite, TextTestRunner\n'), ((235, 252), 'hashlib.sha256', 'hashlib.sha256', (['s'], {}), '(s)\n', (249, 252), False, 'import hashlib\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Program to denoise a short speech sample using a pre-trained autoencoder.
PATH_TO_TRAINED_MODEL : path to the pre-trained model (.h5)
PATH_TO_AUDIO : path to the noisy audio file (.wav)
PATH_TO_SAVE : path to save the denoised audio output (.wav)
@author: nk
"""
#%% Dependencies
import numpy as np
import librosa
import soundfile
from tensorflow import keras
#%%
PATH_TO_TRAINED_MODEL = "./trained_models/audio_denoise_AE.h5"
PATH_TO_AUDIO = "./audio_files/test_noisy.wav"
PATH_TO_SAVE = "./audio_files/new_denoised.wav"
#%%
class _Denoise_AE:
'''
Singleton class for denoising short audio samples of spoken words.
'''
model = None
_instance = None
# This is the fitting constant, saved from the training session!
fitting_constant = 7.259422170994068
# This is the sample rate that the model is configured to work with.
SAMPLE_RATE = 22050
def preprocess(self, path_to_audio):
'''
Preprocesses audio file located at specified path.
- Fixes length to 1s
- Extracts spectrogram
'''
data, _ = librosa.load(path_to_audio, sr = self.SAMPLE_RATE)
duration = self.SAMPLE_RATE
# Pad to appropriate length...
if len(data) < duration:
max_offset = np.abs(len(data) - duration)
offset = np.random.randint(max_offset)
data = np.pad(data, (offset, duration-len(data)-offset), "constant")
# ... or cut to appropriate length...
elif len(data) > duration:
max_offset = np.abs(len(data) - duration)
offset = np.random.randint(max_offset)
data = data[offset:len(data)-max_offset+offset]
# ... or leave as is.
else:
offset = 0
# Spectrogram
S = np.abs(librosa.stft(data))[:-1,:]
return S
def denoise(self, path_to_audio):
'''
Denoises input with autoencoder.
'''
# Load spectrogram
S = self.preprocess(path_to_audio)
# Get dimensions
dim_1 = S.shape[0]
dim_2 = S.shape[1]
# Reshape as input tensor
S = np.reshape(S, (1, dim_1, dim_2, 1))
S /= self.fitting_constant
# Get denoised spectrogram from autoencoder
S_denoised = self.model.predict(S).reshape((dim_1, dim_2))
# Convert denoised spectrogram to time series waveform
denoised = librosa.griffinlim(S_denoised) * self.fitting_constant
return denoised
#%%
def Denoise_AE():
# Ensure single instance of AE
if _Denoise_AE()._instance is None:
_Denoise_AE._instance = _Denoise_AE()
_Denoise_AE.model = keras.models.load_model(PATH_TO_TRAINED_MODEL)
return _Denoise_AE._instance
#%%
if __name__ == "__main__":
dnae = Denoise_AE()
dnae2 = Denoise_AE()
assert dnae is dnae2
denoised = dnae.denoise(PATH_TO_AUDIO)
soundfile.write(PATH_TO_SAVE, denoised, dnae.SAMPLE_RATE) | [
"numpy.reshape",
"librosa.griffinlim",
"soundfile.write",
"numpy.random.randint",
"tensorflow.keras.models.load_model",
"librosa.stft",
"librosa.load"
] | [((3027, 3084), 'soundfile.write', 'soundfile.write', (['PATH_TO_SAVE', 'denoised', 'dnae.SAMPLE_RATE'], {}), '(PATH_TO_SAVE, denoised, dnae.SAMPLE_RATE)\n', (3042, 3084), False, 'import soundfile\n'), ((1162, 1210), 'librosa.load', 'librosa.load', (['path_to_audio'], {'sr': 'self.SAMPLE_RATE'}), '(path_to_audio, sr=self.SAMPLE_RATE)\n', (1174, 1210), False, 'import librosa\n'), ((2235, 2270), 'numpy.reshape', 'np.reshape', (['S', '(1, dim_1, dim_2, 1)'], {}), '(S, (1, dim_1, dim_2, 1))\n', (2245, 2270), True, 'import numpy as np\n'), ((2775, 2821), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['PATH_TO_TRAINED_MODEL'], {}), '(PATH_TO_TRAINED_MODEL)\n', (2798, 2821), False, 'from tensorflow import keras\n'), ((1414, 1443), 'numpy.random.randint', 'np.random.randint', (['max_offset'], {}), '(max_offset)\n', (1431, 1443), True, 'import numpy as np\n'), ((2516, 2546), 'librosa.griffinlim', 'librosa.griffinlim', (['S_denoised'], {}), '(S_denoised)\n', (2534, 2546), False, 'import librosa\n'), ((1681, 1710), 'numpy.random.randint', 'np.random.randint', (['max_offset'], {}), '(max_offset)\n', (1698, 1710), True, 'import numpy as np\n'), ((1879, 1897), 'librosa.stft', 'librosa.stft', (['data'], {}), '(data)\n', (1891, 1897), False, 'import librosa\n')] |
#!/usr/bin/python
import spidev
class mcp2515:
SPI_RESET = 0xC0
SPI_READ = 0x03
SPI_READ_RX = 0x90
SPI_WRITE = 0x02
SPI_WRITE_TX = 0x40
SPI_RTS = 0x80
SPI_READ_STATUS = 0xA0
SPI_RX_STATUS = 0xB0
SPI_BIT_MODIFY = 0x05
#/* Configuration Registers */
CANSTAT = 0x0E
CANCTRL = 0x0F
BFPCTRL = 0x0C
TEC = 0x1C
REC = 0x1D
CNF3 = 0x28
CNF2 = 0x29
CNF1 = 0x2A
CANINTE = 0x2B
CANINTF = 0x2C
EFLG = 0x2D
TXRTSCTRL = 0x0D
#/* Recieve Filters */
RXF0SIDH = 0x00
RXF0SIDL = 0x01
RXF0EID8 = 0x02
RXF0EID0 = 0x03
RXF1SIDH = 0x04
RXF1SIDL = 0x05
RXF1EID8 = 0x06
RXF1EID0 = 0x07
RXF2SIDH = 0x08
RXF2SIDL = 0x09
RXF2EID8 = 0x0A
RXF2EID0 = 0x0B
RXF3SIDH = 0x10
RXF3SIDL = 0x11
RXF3EID8 = 0x12
RXF3EID0 = 0x13
RXF4SIDH = 0x14
RXF4SIDL = 0x15
RXF4EID8 = 0x16
RXF4EID0 = 0x17
RXF5SIDH = 0x18
RXF5SIDL = 0x19
RXF5EID8 = 0x1A
RXF5EID0 = 0x1B
#/* Receive Masks */
RXM0SIDH = 0x20
RXM0SIDL = 0x21
RXM0EID8 = 0x22
RXM0EID0 = 0x23
RXM1SIDH = 0x24
RXM1SIDL = 0x25
RXM1EID8 = 0x26
RXM1EID0 = 0x27
#/* Tx Buffer 0 */
TXB0CTRL = 0x30
TXB0SIDH = 0x31
TXB0SIDL = 0x32
TXB0EID8 = 0x33
TXB0EID0 = 0x34
TXB0DLC = 0x35
TXB0D0 = 0x36
TXB0D1 = 0x37
TXB0D2 = 0x38
TXB0D3 = 0x39
TXB0D4 = 0x3A
TXB0D5 = 0x3B
TXB0D6 = 0x3C
TXB0D7 = 0x3D
#/* Tx Buffer 1 */
TXB1CTRL = 0x40
TXB1SIDH = 0x41
TXB1SIDL = 0x42
TXB1EID8 = 0x43
TXB1EID0 = 0x44
TXB1DLC = 0x45
TXB1D0 = 0x46
TXB1D1 = 0x47
TXB1D2 = 0x48
TXB1D3 = 0x49
TXB1D4 = 0x4A
TXB1D5 = 0x4B
TXB1D6 = 0x4C
TXB1D7 = 0x4D
#/* Tx Buffer 2 */
TXB2CTRL = 0x50
TXB2SIDH = 0x51
TXB2SIDL = 0x52
TXB2EID8 = 0x53
TXB2EID0 = 0x54
TXB2DLC = 0x55
TXB2D0 = 0x56
TXB2D1 = 0x57
TXB2D2 = 0x58
TXB2D3 = 0x59
TXB2D4 = 0x5A
TXB2D5 = 0x5B
TXB2D6 = 0x5C
TXB2D7 = 0x5D
#/* Rx Buffer 0 */
RXB0CTRL = 0x60
RXB0SIDH = 0x61
RXB0SIDL = 0x62
RXB0EID8 = 0x63
RXB0EID0 = 0x64
RXB0DLC = 0x65
RXB0D0 = 0x66
RXB0D1 = 0x67
RXB0D2 = 0x68
RXB0D3 = 0x69
RXB0D4 = 0x6A
RXB0D5 = 0x6B
RXB0D6 = 0x6C
RXB0D7 = 0x6D
#/* Rx Buffer 1 */
RXB1CTRL = 0x70
RXB1SIDH = 0x71
RXB1SIDL = 0x72
RXB1EID8 = 0x73
RXB1EID0 = 0x74
RXB1DLC = 0x75
RXB1D0 = 0x76
RXB1D1 = 0x77
RXB1D2 = 0x78
RXB1D3 = 0x79
RXB1D4 = 0x7A
RXB1D5 = 0x7B
RXB1D6 = 0x7C
RXB1D7 = 0x7D
#/*******************************************************************
# * Bit register masks *
# *******************************************************************/
#/* TXBnCTRL */
TXREQ = 0x08
TXP = 0x03
#/* RXBnCTRL */
RXM = 0x60
BUKT = 0x04
#/* CANCTRL */
REQOP = 0xE0
ABAT = 0x10
OSM = 0x08
CLKEN = 0x04
CLKPRE = 0x03
#/* CANSTAT */
REQOP = 0xE0
ICOD = 0x0E
#/* CANINTE */
RX0IE = 0x01
RX1IE = 0x02
TX0IE = 0x04
TX1IE = 0x80
TX2IE = 0x10
ERRIE = 0x20
WAKIE = 0x40
MERRE = 0x80
#/* CANINTF */
RX0IF = 0x01
RX1IF = 0x02
TX0IF = 0x04
TX1IF = 0x80
TX2IF = 0x10
ERRIF = 0x20
WAKIF = 0x40
MERRF = 0x80
#/* BFPCTRL */
B1BFS = 0x20
B0BFS = 0x10
B1BFE = 0x08
B0BFE = 0x04
B1BFM = 0x02
B0BFM = 0x01
#/* CNF1 Masks */
SJW = 0xC0
BRP = 0x3F
#/* CNF2 Masks */
BTLMODE = 0x80
SAM = 0x40
PHSEG1 = 0x38
PRSEG = 0x07
#/* CNF3 Masks */
WAKFIL = 0x40
PHSEG2 = 0x07
#/* TXRTSCTRL Masks */
TXB2RTS = 0x04
TXB1RTS = 0x02
TXB0RTS = 0x01
#/*******************************************************************
# * Bit Timing Configuration *
# *******************************************************************/
#/* CNF1 */
SJW_1TQ = 0x40
SJW_2TQ = 0x80
SJW_3TQ = 0x90
SJW_4TQ = 0xC0
#/* CNF2 */
BTLMODE_CNF3 = 0x80
BTLMODE_PH1_IPT = 0x00
SMPL_3X = 0x40
SMPL_1X = 0x00
PHSEG1_8TQ = 0x38
PHSEG1_7TQ = 0x30
PHSEG1_6TQ = 0x28
PHSEG1_5TQ = 0x20
PHSEG1_4TQ = 0x18
PHSEG1_3TQ = 0x10
PHSEG1_2TQ = 0x08
PHSEG1_1TQ = 0x00
PRSEG_8TQ = 0x07
PRSEG_7TQ = 0x06
PRSEG_6TQ = 0x05
PRSEG_5TQ = 0x04
PRSEG_4TQ = 0x03
PRSEG_3TQ = 0x02
PRSEG_2TQ = 0x01
PRSEG_1TQ = 0x00
#/* CNF3 */
PHSEG2_8TQ = 0x07
PHSEG2_7TQ = 0x06
PHSEG2_6TQ = 0x05
PHSEG2_5TQ = 0x04
PHSEG2_4TQ = 0x03
PHSEG2_3TQ = 0x02
PHSEG2_2TQ = 0x01
PHSEG2_1TQ = 0x00
SOF_ENABLED = 0x80
WAKFIL_ENABLED = 0x40
WAKFIL_DISABLED = 0x00
#/*******************************************************************
# * Control/Configuration Registers *
# *******************************************************************/
#/* CANINTE */
RX0IE_ENABLED = 0x01
RX0IE_DISABLED = 0x00
RX1IE_ENABLED = 0x02
RX1IE_DISABLED = 0x00
G_RXIE_ENABLED = 0x03
G_RXIE_DISABLED = 0x00
TX0IE_ENABLED = 0x04
TX0IE_DISABLED = 0x00
TX1IE_ENABLED = 0x08
TX2IE_DISABLED = 0x00
TX2IE_ENABLED = 0x10
TX2IE_DISABLED = 0x00
G_TXIE_ENABLED = 0x1C
G_TXIE_DISABLED = 0x00
ERRIE_ENABLED = 0x20
ERRIE_DISABLED = 0x00
WAKIE_ENABLED = 0x40
WAKIE_DISABLED = 0x00
IVRE_ENABLED = 0x80
IVRE_DISABLED = 0x00
#/* CANINTF */
RX0IF_SET = 0x01
RX0IF_RESET = 0x00
RX1IF_SET = 0x02
RX1IF_RESET = 0x00
TX0IF_SET = 0x04
TX0IF_RESET = 0x00
TX1IF_SET = 0x08
TX2IF_RESET = 0x00
TX2IF_SET = 0x10
TX2IF_RESET = 0x00
ERRIF_SET = 0x20
ERRIF_RESET = 0x00
WAKIF_SET = 0x40
WAKIF_RESET = 0x00
IVRF_SET = 0x80
IVRF_RESET = 0x00
#/* CANCTRL */
REQOP_CONFIG = 0x80
REQOP_LISTEN = 0x60
REQOP_LOOPBACK = 0x40
REQOP_SLEEP = 0x20
REQOP_NORMAL = 0x00
ABORT = 0x10
OSM_ENABLED = 0x08
CLKOUT_ENABLED = 0x04
CLKOUT_DISABLED = 0x00
CLKOUT_PRE_8 = 0x03
CLKOUT_PRE_4 = 0x02
CLKOUT_PRE_2 = 0x01
CLKOUT_PRE_1 = 0x00
#/* CANSTAT */
OPMODE_CONFIG = 0x80
OPMODE_LISTEN = 0x60
OPMODE_LOOPBACK = 0x40
OPMODE_SLEEP = 0x20
OPMODE_NORMAL = 0x00
#/* RXBnCTRL */
RXM_RCV_ALL = 0x60
RXM_VALID_EXT = 0x40
RXM_VALID_STD = 0x20
RXM_VALID_ALL = 0x00
RXRTR_REMOTE = 0x08
RXRTR_NO_REMOTE = 0x00
BUKT_ROLLOVER = 0x04
BUKT_NO_ROLLOVER = 0x00
FILHIT0_FLTR_1 = 0x01
FILHIT0_FLTR_0 = 0x00
FILHIT1_FLTR_5 = 0x05
FILHIT1_FLTR_4 = 0x04
FILHIT1_FLTR_3 = 0x03
FILHIT1_FLTR_2 = 0x02
FILHIT1_FLTR_1 = 0x01
FILHIT1_FLTR_0 = 0x00
#/* TXBnCTRL */
TXREQ_SET = 0x08
TXREQ_CLEAR = 0x00
TXP_HIGHEST = 0x03
TXP_INTER_HIGH = 0x02
TXP_INTER_LOW = 0x01
TXP_LOWEST = 0x00
#/*******************************************************************
# * Register Bit Masks *
# *******************************************************************/
DLC_0 = 0x00
DLC_1 = 0x01
DLC_2 = 0x02
DLC_3 = 0x03
DLC_4 = 0x04
DLC_5 = 0x05
DLC_6 = 0x06
DLC_7 = 0x07
DLC_8 = 0x08
#/*******************************************************************
# * CAN SPI commands *
# *******************************************************************/
CAN_RESET = 0xC0
CAN_READ = 0x03
CAN_WRITE = 0x02
CAN_RTS = 0x80
CAN_RTS_TXB0 = 0x81
CAN_RTS_TXB1 = 0x82
CAN_RTS_TXB2 = 0x84
CAN_RD_STATUS = 0xA0
CAN_BIT_MODIFY = 0x05
CAN_RX_STATUS = 0xB0
CAN_RD_RX_BUFF = 0x90
CAN_LOAD_TX = 0x40
#/*******************************************************************
# * Miscellaneous *
# *******************************************************************/
DUMMY_BYTE = 0x00
TXB0 = 0x31
TXB1 = 0x41
TXB2 = 0x51
RXB0 = 0x61
RXB1 = 0x71
EXIDE_SET = 0x08
EXIDE_RESET = 0x00
#MCP2515
CAN_10Kbps = 0x31
CAN_25Kbps = 0x13
CAN_50Kbps = 0x09
CAN_100Kbps = 0x04
CAN_125Kbps = 0x03
CAN_250Kbps = 0x01
CAN_500Kbps = 0x00
def __init__(self):
self.spi = spidev.SpiDev()
self.spi.open(0, 0)
self.spi.max_speed_hz = 500000
self.spi.mode = 0b11
command = [self.SPI_RESET]
self.spi.writebytes(command)
def WriteRegister(self, Register, Data):
command = [self.SPI_WRITE, Register] + Data
self.spi.writebytes(command)
def ReadRegister(self, Register, n):
Data = [self.SPI_READ, Register] + [0]*n
#print Data
self.spi.xfer(Data)
#print Data
return(Data[2:])
def BitModify(self, Register, Mask, Value):
# print "BitModify(Reg=0x%x, Mask=0x%x, Value=0x%x)" % (Register, Mask, Value)
command = [self.SPI_BIT_MODIFY, Register] + [Mask, Value]
self.spi.writebytes(command)
def ReadStatus(self, type):
Data = [type, 0xFF]
self.spi.xfer(Data)
return(Data[1])
| [
"spidev.SpiDev"
] | [((9431, 9446), 'spidev.SpiDev', 'spidev.SpiDev', ([], {}), '()\n', (9444, 9446), False, 'import spidev\n')] |
import sqlalchemy
from .base import Base
from .track import Track
import model.tracktime
class GroupTrack(Base):
'''
a link between group and track (association pattern)
backrefs group and track (not listed here)
To use this first create the group, then group tracks,
then tracks and add them to grouptracks,
then add grouptracks to group
Association Object
'''
__tablename__ = "grouptracks"
idno = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
trackid = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey("tracks.idno"))#, primary_key = True)
groupid = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey("groups.idno"))#, primary_key = True)
no = sqlalchemy.Column(sqlalchemy.Integer)
# time doesnt work well with qt mvc and with python
fromms = sqlalchemy.Column(sqlalchemy.Integer)
lengthms = sqlalchemy.Column(sqlalchemy.Integer)
#track = sqlalchemy.orm.relationship("Track", backref="grouptracks")
headers = Track.headers[:]
headers.insert(0, "No")
headers.append("From")
headers.append("Length")
innerheaders = 1 # the track headers start from second index
def __init__(self, no, froms, lengths):
self.no = no
self.fromms = froms
self.lengthms = lengths
def bycol(self, col, newvalue=None, edit=False):
col = GroupTrack.translatecol(col)
if col < 0:
return self.track.bycol(-1-col, newvalue, edit)
if col == 0:
if newvalue != None:
self.no = newvalue
return True
return self.no
elif col == 1:
if newvalue != None:
nw, ok = model.tracktime.strtototal(newvalue)
if ok: self.fromms = nw
return ok
return model.tracktime.totaltostr(self.fromms, edit)
elif col == 2:
if newvalue != None:
nw, ok = model.tracktime.strtototal(newvalue)
if ok: self.lengthms = nw
return ok
return model.tracktime.totaltostr(self.lengthms, edit)
return None
def tipbycol(self, col):
col = GroupTrack.translatecol(col)
if col < 0:
return self.track.tipbycol(-1-col)
return None
@classmethod
def colbycol(cls, col):
col = cls.translatecol(col)
if col < 0:
return Track.colbycol(-1-col)
if col == 0:
return cls.no
elif col == 1:
return cls.fromms
elif col == 2:
return cls.lengthms
@classmethod
def isStar(cls,col):
col = cls.translatecol(col)
if col < 0:
return Track.isStar(-1-col)
else:
return False
@classmethod
def isCheck(cls, col):
col = cls.translatecol(col)
if col < 0:
return Track.isCheck(-1-col)
else:
return False
@classmethod
def translatecol(cls, col):
'''
"translate" the column number to the Track or to local index
a column in grouptrack (positive) or track (negative - 1)
'''
if col >= cls.innerheaders and col < cls.innerheaders + len(Track.headers):
return cls.innerheaders - col - 1
elif col >= cls.innerheaders + len(Track.headers):
return col - len(Track.headers)
return col
@classmethod
def translateorder(cls, direction, col):
"""translate the column number to/from Track
if direction then translate to track
return < 0 if the col doesn't have counterpart
"""
if direction: # gt -> t
return -1 - cls.translatecol(col)
else: # t -> gt
return cls.innerheaders + col
| [
"sqlalchemy.ForeignKey",
"sqlalchemy.Column"
] | [((445, 500), 'sqlalchemy.Column', 'sqlalchemy.Column', (['sqlalchemy.Integer'], {'primary_key': '(True)'}), '(sqlalchemy.Integer, primary_key=True)\n', (462, 500), False, 'import sqlalchemy\n'), ((734, 771), 'sqlalchemy.Column', 'sqlalchemy.Column', (['sqlalchemy.Integer'], {}), '(sqlalchemy.Integer)\n', (751, 771), False, 'import sqlalchemy\n'), ((841, 878), 'sqlalchemy.Column', 'sqlalchemy.Column', (['sqlalchemy.Integer'], {}), '(sqlalchemy.Integer)\n', (858, 878), False, 'import sqlalchemy\n'), ((894, 931), 'sqlalchemy.Column', 'sqlalchemy.Column', (['sqlalchemy.Integer'], {}), '(sqlalchemy.Integer)\n', (911, 931), False, 'import sqlalchemy\n'), ((553, 589), 'sqlalchemy.ForeignKey', 'sqlalchemy.ForeignKey', (['"""tracks.idno"""'], {}), "('tracks.idno')\n", (574, 589), False, 'import sqlalchemy\n'), ((665, 701), 'sqlalchemy.ForeignKey', 'sqlalchemy.ForeignKey', (['"""groups.idno"""'], {}), "('groups.idno')\n", (686, 701), False, 'import sqlalchemy\n')] |
import unittest
from reflexy.base import reflex
class TestReflexModule(unittest.TestCase):
sof = 'datasetname|file1.fits;PRO_CATG1;PURPOSE1:PURPOSE2,file2;' \
'PRO_CAT2;PURPOSE1'
sopexp = [('long_param1', '3'), ('param2', '3'), ('param3', 'ser'),
('param_not_shown', 'none')]
sop = 'recipe_name:long_param1=3,recipe_name:param2=3,' \
'recipe_name:param3=ser,recipe_name:param_not_shown=none'
def test_parseSof(self):
r = reflex.parseSof(self.sof)
self.assertEqual(len(r), 2)
self.assertEqual(r.datasetName, 'datasetname')
f1, f2 = r.files
self.assertEqual(f1.name, 'file1.fits')
self.assertEqual(f1.category, 'PRO_CATG1')
self.assertEqual(len(f1.purposes), 2)
self.assertIn('PURPOSE1', f1.purposes)
self.assertIn('PURPOSE2', f1.purposes)
self.assertEqual(f2.name, 'file2')
self.assertEqual(f2.category, 'PRO_CAT2')
self.assertEqual(len(f2.purposes), 1)
self.assertEqual(f2.purposes[0], 'PURPOSE1')
def test_parseRoundTripJson(self):
r = reflex.parseSof(self.sof)
j = r.toJSON()
r2 = reflex.parseSofJson(j)
self.assertEqual(r, r2)
def test_parseSop(self):
r = reflex.parseSop(self.sop)
self.assertEqual(len(r), len(self.sopexp))
for p, ep in zip(r, self.sopexp):
self.assertEqual(p.recipe, 'recipe_name')
self.assertEqual(p.displayName, ep[0])
self.assertEqual(p.value, ep[1])
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"reflexy.base.reflex.parseSop",
"reflexy.base.reflex.parseSofJson",
"reflexy.base.reflex.parseSof"
] | [((1568, 1583), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1581, 1583), False, 'import unittest\n'), ((482, 507), 'reflexy.base.reflex.parseSof', 'reflex.parseSof', (['self.sof'], {}), '(self.sof)\n', (497, 507), False, 'from reflexy.base import reflex\n'), ((1108, 1133), 'reflexy.base.reflex.parseSof', 'reflex.parseSof', (['self.sof'], {}), '(self.sof)\n', (1123, 1133), False, 'from reflexy.base import reflex\n'), ((1170, 1192), 'reflexy.base.reflex.parseSofJson', 'reflex.parseSofJson', (['j'], {}), '(j)\n', (1189, 1192), False, 'from reflexy.base import reflex\n'), ((1267, 1292), 'reflexy.base.reflex.parseSop', 'reflex.parseSop', (['self.sop'], {}), '(self.sop)\n', (1282, 1292), False, 'from reflexy.base import reflex\n')] |
import sys
import argparse
import os
import re
import yaml
from . import workflow
class Runner(object):
tasks = [
]
out_and_cache_subfolder_with_sumatra_label = True
def run(self):
parser = argparse.ArgumentParser(description='Run workflow')
parser.add_argument('config_path', type=str)
parser.add_argument('--workdir', type=str, default=None)
parser.add_argument('--out', type=str, default=None)
parser.add_argument('--cache', type=str, default=None)
parser.add_argument('--range', type=str, default=None)
args = parser.parse_args()
config = self._load_config(args.config_path)
out_path = args.out
cache_path = args.cache
if self.out_and_cache_subfolder_with_sumatra_label and 'sumatra_label' in config:
if out_path:
out_path = os.path.join(out_path, config['sumatra_label'])
if cache_path:
cache_path = os.path.join(cache_path, config['sumatra_label'])
tasks = []
if args.range:
single_id_match = re.match(r'^(\d*)$', args.range)
start_end_match = re.match(r'^(\d*)-(\d*)$', args.range)
if single_id_match is not None:
tasks = [int(single_id_match.group(1))]
elif start_end_match is not None:
start = int(start_end_match.group(1))
end = int(start_end_match.group(2))
if end >= start:
tasks = [x for x in range(start, end + 1)]
wf = workflow.Workflow(config, available_tasks=self._get_task_dictionary(), work_dir=args.workdir, output_path=out_path,
cache_path=cache_path)
wf.run(tasks_to_execute=tasks)
def _get_task_dictionary(self):
return {k.name: k for k in self.tasks}
def _load_config(self, path):
with open(path, 'r') as yml_file:
cfg = yaml.load(yml_file)
return cfg
| [
"os.path.join",
"re.match",
"yaml.load",
"argparse.ArgumentParser"
] | [((221, 272), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run workflow"""'}), "(description='Run workflow')\n", (244, 272), False, 'import argparse\n'), ((1099, 1131), 're.match', 're.match', (['"""^(\\\\d*)$"""', 'args.range'], {}), "('^(\\\\d*)$', args.range)\n", (1107, 1131), False, 'import re\n'), ((1162, 1201), 're.match', 're.match', (['"""^(\\\\d*)-(\\\\d*)$"""', 'args.range'], {}), "('^(\\\\d*)-(\\\\d*)$', args.range)\n", (1170, 1201), False, 'import re\n'), ((1953, 1972), 'yaml.load', 'yaml.load', (['yml_file'], {}), '(yml_file)\n', (1962, 1972), False, 'import yaml\n'), ((871, 918), 'os.path.join', 'os.path.join', (['out_path', "config['sumatra_label']"], {}), "(out_path, config['sumatra_label'])\n", (883, 918), False, 'import os\n'), ((975, 1024), 'os.path.join', 'os.path.join', (['cache_path', "config['sumatra_label']"], {}), "(cache_path, config['sumatra_label'])\n", (987, 1024), False, 'import os\n')] |
# Copyright (C) 2016 <NAME>.
# This file is part of CodexGigas - https://github.com/codexgigassys/
# See the file 'LICENSE' for copying permission.
import pathmagic
from pymongo import MongoClient
import ssdeep
from env import envget
def searchFuzzy(fuzz, limit, thresh):
client = MongoClient(envget('metadata.host'), envget('metadata.port'))
db = client[envget('db_metadata_name')]
coll_meta = db["db_metadata_collection"]
f1 = coll_meta.find({}, {"file_id": 1, "fuzzy_hash": 1}).limit(limit)
l = []
for f in f1:
l.append(f)
ret = {}
for a in l:
res = -1
try:
res = ssdeep.compare(a["fuzzy_hash"], fuzz)
except InternalError:
print(str(res) + "------" +
str(a["fuzzy_hash"]) + "-----" + str(a["file_id"]))
continue
if(res >= thresh):
ret[a["file_id"]] = res
return ret
def searchFull(search, limit):
# print("1")
client = MongoClient(envget('metadata.host'), envget('metadata.port'))
# print("2")
db = client[envget('db_metadata_name')]
# print("3")
coll_meta = db["db_metadata_collection"]
# print("4")
f1 = coll_meta.find(search).limit(limit)
# print("5")
l = []
for f in f1:
l.append(f)
# print("6")
ret = []
for a in l:
ret.append(str(a["file_id"]))
# print("7")
return ret
| [
"env.envget",
"ssdeep.compare"
] | [((299, 322), 'env.envget', 'envget', (['"""metadata.host"""'], {}), "('metadata.host')\n", (305, 322), False, 'from env import envget\n'), ((324, 347), 'env.envget', 'envget', (['"""metadata.port"""'], {}), "('metadata.port')\n", (330, 347), False, 'from env import envget\n'), ((365, 391), 'env.envget', 'envget', (['"""db_metadata_name"""'], {}), "('db_metadata_name')\n", (371, 391), False, 'from env import envget\n'), ((992, 1015), 'env.envget', 'envget', (['"""metadata.host"""'], {}), "('metadata.host')\n", (998, 1015), False, 'from env import envget\n'), ((1017, 1040), 'env.envget', 'envget', (['"""metadata.port"""'], {}), "('metadata.port')\n", (1023, 1040), False, 'from env import envget\n'), ((1075, 1101), 'env.envget', 'envget', (['"""db_metadata_name"""'], {}), "('db_metadata_name')\n", (1081, 1101), False, 'from env import envget\n'), ((639, 676), 'ssdeep.compare', 'ssdeep.compare', (["a['fuzzy_hash']", 'fuzz'], {}), "(a['fuzzy_hash'], fuzz)\n", (653, 676), False, 'import ssdeep\n')] |
#!/usr/bin/env python3
"""
Summary:
buildrpm (python3): branchdiff binary operating system package (.rpm, Redhat, Redhat-based systems)
- Automatic determination of version to be built
- Build version can optionally be forced to a specific version
- Resulting rpm ackage produced in packaging/rpm directory
- To execute build, from the directory of this module, run:
.. code-block:: python
$ cd ../<project dir>
$ make buildrpm
Author:
<NAME>
Copyright 2017-2018, All Rights Reserved.
License:
General Public License v3
Additional terms may be found in the complete license agreement:
https://bitbucket.org/blakeca00/branchdiffthon3/src/master/LICENSE.md
OS Support:
- Redhat, CentOS, Fedora, Redhat-based variants
Dependencies:
- Requires python3, developed and tested under python3.6
"""
import argparse
import inspect
import json
import os
import sys
import subprocess
import tarfile
import fileinput
from shutil import copy2 as copyfile
from shutil import copytree, rmtree, which
import distro
import docker
import loggers
from pyaws.utils import stdout_message, export_json_object
from pyaws.colors import Colors
from common import debug_header
try:
from pyaws.core.oscodes_unix import exit_codes
except Exception:
from pyaws.core.oscodes_win import exit_codes # non-specific os-safe codes
# globals
PROJECT = 'branchdiff'
module = os.path.basename(__file__)
TMPDIR = '/tmp/build'
VOLMNT = '/tmp/rpm'
CONTAINER_VOLMNT = '/mnt/rpm'
DISTRO_LIST = ['centos7', 'amazonlinux', 'redhat7']
# docker
dclient = docker.from_env()
# formatting
act = Colors.ORANGE # accent highlight (bright orange)
bd = Colors.BOLD + Colors.WHITE # title formatting
bn = Colors.CYAN # color for main binary highlighting
lk = Colors.DARK_BLUE # color for filesystem path confirmations
red = Colors.RED # color for failed operations
yl = Colors.GOLD3 # color when copying, creating paths
rst = Colors.RESET # reset all color, formatting
arrow = yl + Colors.BOLD + '-->' + rst
# global logger
logger = loggers.getLogger('1.0')
def git_root():
"""
Summary.
Returns root directory of git repository
"""
cmd = 'git rev-parse --show-toplevel 2>/dev/null'
return subprocess.getoutput(cmd).strip()
def help_menu():
"""
Summary.
Command line parameter options (Help Menu)
"""
menu = '''
''' + bd + module + rst + ''' help contents
''' + bd + '''DESCRIPTION''' + rst + '''
Builds an installable package (.rpm) for Redhat, CentOS, and Fedora
variants of the Linux Operatining System
''' + bd + '''OPTIONS''' + rst + '''
$ python3 ''' + act + module + rst + ''' --build [ --force-version <VERSION> ]
-b, --build
-d, --distro <value>
[-D, --debug ]
[-f, --force ]
[-h, --help ]
[-p, --parameter-file <value> ]
[-s, --set-version <value> ]
''' + bd + '''-b''' + rst + ''', ''' + bd + '''--build''' + rst + ''': Build Operating System package ( *.rpm, Redhat systems )
When given without the --set-version parameter switch, build ver-
sion is extracted from the project repository information
''' + bd + '''-d''' + rst + ''', ''' + bd + '''--debug''' + rst + ''': Debug mode, verbose output.
''' + bd + '''-d''' + rst + ''', ''' + bd + '''--distro''' + rst + ''' <value>: Specifies the Docker Operating System Image to
use when building. Allowable Values:
- centos7 (DEFAULT)
- amazonlinux
- redhat7
''' + bd + '''-F''' + rst + ''', ''' + bd + '''--force''' + rst + ''': When given, overwrites any pre-existing build artifacts.
DEFAULT: False
''' + bd + '''-h''' + rst + ''', ''' + bd + '''--help''' + rst + ''': Print this help menu
''' + bd + '''-p''' + rst + ''', ''' + bd + '''--parameter-file''' + rst + ''' <value>: Optional json format configuration file
containing all configuration parameters to build rpm package (key,
value format)
''' + bd + '''-s''' + rst + ''', ''' + bd + '''--set-version''' + rst + ''' (string): When given, overrides all version infor-
mation contained in the project to build the exact version speci-
fied by VERSION parameter
'''
print(menu)
return True
def clean(directory, debug):
"""
Summary.
rm residual installation files from build directory
"""
bytecode_list = list(
filter(
lambda x: x.endswith('.pyc') or x.endswith('.pyo'), os.listdir(directory)
)
)
if debug:
stdout_message(
message=f'bytecode_list contents: {bytecode_list}',
prefix='DEBUG'
)
for artifact in bytecode_list:
os.remove(directory + '/' + artifact)
logger.info('Artifact {} cleaned from {}'.format(artifact, directory))
return True
def current_branch(path):
"""
Returns:
git repository source url, TYPE: str
"""
cmd = 'git branch'
pwd = os.getcwd()
os.chdir(path)
try:
if '.git' in os.listdir('.'):
branch = subprocess.getoutput('git branch').split('*')[1].split('\n')[0][1:]
else:
ex = Exception(
'%s: Unable to identify current branch - path not a git repository: %s' %
(inspect.stack()[0][3], path))
raise ex
os.chdir(pwd) # return cursor
except IndexError:
logger.exception(
'%s: problem retrieving git branch for %s' %
(inspect.stack()[0][3], path)
)
return ''
return branch
def read(fname):
basedir = os.path.dirname(sys.argv[0])
return open(os.path.join(basedir, fname)).read()
def masterbranch_version(version_module):
"""
Returns version denoted in the master branch of the repository
"""
branch = current_branch(git_root())
commands = ['git checkout master', 'git checkout {}'.format(branch)]
try:
# checkout master
#stdout_message('Checkout master branch:\n\n%s' % subprocess.getoutput(commands[0]))
masterversion = read(version_module).split('=')[1].strip().strip('"')
# return to working branch
stdout_message(
'Returning to working branch: checkout %s\n\n%s'.format(branch)
)
stdout_message(subprocess.getoutput(f'git checkout {branch}'))
except Exception:
return None
return masterversion
def current_version(binary, version_modpath):
"""
Summary:
Returns current binary package version if locally
installed, master branch __version__ if the binary
being built is not installed locally
Args:
:root (str): path to the project root directory
:binary (str): Name of main project exectuable
Returns:
current version number of the project, TYPE: str
"""
if which(binary):
os_type = distro.linux_distribution()[0]
if os_type == 'Redhat' and which('yum'):
cmd = 'yum info ' + binary + ' 2>/dev/null | grep Version'
elif os_type == 'Redhat' and which('rpm'):
cmd = 'rpm -qi ' + binary + ' 2>/dev/null | grep Version'
elif os_type == 'Ubuntu' and which('apt'):
cmd = 'apt show ' + binary + ' 2>/dev/null | grep Version | head -n1'
try:
installed_version = subprocess.getoutput(cmd).split(':')[1].strip()
return greater_version(installed_version, __version__)
except Exception:
logger.info(
'%s: Build binary %s not installed, comparing current branch version to master branch version' %
(inspect.stack()[0][3], binary))
return greater_version(masterbranch_version(version_modpath), __version__)
def greater_version(versionA, versionB):
"""
Summary:
Compares to version strings with multiple digits and returns greater
Returns:
greater, TYPE: str
"""
try:
list_a = versionA.split('.')
list_b = versionB.split('.')
except AttributeError:
return versionA or versionB # either A or B is None
try:
for index, digit in enumerate(list_a):
if int(digit) > int(list_b[index]):
return versionA
elif int(digit) < int(list_b[index]):
return versionB
elif int(digit) == int(list_b[index]):
continue
except ValueError:
return versionA or versionB # either A or B is ''
return versionA
def increment_version(current):
"""
Returns current version incremented by 1 minor version number
"""
minor = current.split('.')[-1]
major = '.'.join(current.split('.')[:-1])
inc_minor = int(minor) + 1
return major + '.' + str(inc_minor)
def tar_archive(archive, source_dir, debug):
"""
Summary.
- Creates .tar.gz compressed archive
- Checks that file was created before exit
Returns:
Success | Failure, TYPE: bool
"""
try:
# rm any python byte-code artifacts
clean(source_dir, debug)
with tarfile.open(archive, "w:gz") as tar:
tar.add(source_dir, arcname=os.path.basename(source_dir))
if os.path.exists(archive):
return True
except OSError:
logger.exception(
'{}: Unable to create tar archive {}'.format(inspect.stack()[0][3], archive))
except Exception as e:
logger.exception(
'%s: Unknown problem while creating tar archive %s:\n%s' %
(inspect.stack()[0][3], archive, str(e)))
return False
def builddir_structure(param_dict, force):
"""
Summary.
- Updates path in binary exectuable
- Updates
Args:
:root (str): full path to root directory of the git project
:builddir (str): name of current build directory which we need to populate
Vars:
:lib_path (str): src path to library modules in project root
:builddir_path (str): dst path to root of the current build directory
(/<path>/branchdiff-1.X.X dir)
Returns:
Success | Failure, TYPE: bool
"""
root = git_root()
build_root = TMPDIR
arrow = yl + Colors.BOLD + '-->' + rst
# files
specfile = param_dict['SpecFile']['Name']
compfile = param_dict['BashCompletion']
builddir = param_dict['SpecFile']['BuildDirName']
# full paths
rpm_src = root + '/packaging/rpm'
builddir_path = build_root + '/' + builddir
lib_path = root + '/' + 'core'
try:
stdout_message(f'Assembling build directory artifacts in {bn + builddir + rst}')
# create build directory
if os.path.exists(builddir_path):
rmtree(builddir_path)
os.makedirs(builddir_path)
stdout_message(
message='Created:\t{}'.format(yl + builddir_path + rst),
prefix='OK')
# place main bin to builddir
if not os.path.exists(builddir_path + '/' + PROJECT_BIN):
binary_src = root + '/' + PROJECT_BIN
binary_dst = builddir_path + '/' + PROJECT_BIN
copyfile(binary_src, binary_dst)
stdout_message(
message='Copied: {} {} {}'.format(lk + binary_src + rst, arrow, lk + binary_dst + rst),
prefix='OK')
# place library dependencies
for libfile in os.listdir(lib_path):
if libfile.endswith('.pyc') or libfile.endswith('.pyo'):
continue
else:
lib_src = lib_path + '/' + libfile
lib_dst = builddir_path + '/' + libfile
copyfile(lib_src, lib_dst)
stdout_message(
message='Copied: {} {} {}'.format(lk + lib_src + rst, arrow, lk + lib_dst + rst),
prefix='OK')
# place specfile in build_root
spec_dst = build_root + '/' + specfile
if os.path.exists(spec_dst):
os.remove(spec_dst)
copyfile(rpm_src + '/' + specfile, spec_dst)
# verify build spec placement
if os.path.exists(spec_dst):
stdout_message(
message='Copied: {} {} {}'.format(
lk + rpm_src + '/' + specfile + rst, arrow, lk + spec_dst + rst),
prefix='OK')
# place bash completion artifacts
comp_src = root + '/' + 'bash' + '/' + compfile
comp_dst = builddir_path + '/' + compfile
if os.path.exists(comp_src):
if os.path.exists(comp_dst):
os.remove(comp_dst)
copyfile(comp_src, comp_dst)
# verify build spec placement
if os.path.exists(comp_dst):
stdout_message(
message='Copied: {} {} {}'.format(
lk + comp_src + rst, arrow, lk + comp_dst + rst),
prefix='OK')
except OSError as e:
logger.exception(
'{}: Problem creating dirs on local fs'.format(inspect.stack()[0][3]))
return False
return True
def build_package(build_root, builddir):
"""
Summary.
Creates final os installable package for current build, build version
Returns:
Success | Failure, TYPE: bool
"""
try:
pwd = os.getcwd()
if os.path.exists(builddir):
os.chdir(builddir)
cmd = 'rpmbuild -ba SPECS/branchdiff.spec'
stdout_message('Building {}... '.format(bn + builddir + rst))
stdout_message(subprocess.getoutput(cmd))
os.chdir(pwd)
else:
logger.warning(
'Build directory {} not found. Failed to create .deb package'.format(builddir))
os.chdir(pwd)
return False
except OSError as e:
logger.exception(
'{}: Error during os package creation: {}'.format(inspect.stack()[0][3], e))
return False
except Exception as e:
logger.exception(
'{}: Unknown Error during os package creation: {}'.format(inspect.stack()[0][3], e))
return False
return True
def builddir_content_updates(param_dict, osimage, version, debug):
"""
Summary:
Updates builddir contents:
- main exectuable has path to libraries updated
- builddir DEBIAN/control file version is updated to current
- updates the version.py file if version != to __version__
contained in the file. This oaccurs if user invokes the -S /
--set-version option
Args:
:root (str): project root full fs path
:builddir (str): dirname of the current build directory
:binary (str): name of the main exectuable
:version (str): version label provided with --set-version parameter. None otherwise
Returns:
Success | Failure, TYPE: bool
"""
root = git_root()
build_root = TMPDIR
rpm_src = root + '/packaging/rpm'
project_dirname = root.split('/')[-1]
major = '.'.join(version.split('.')[:2])
minor = version.split('.')[-1]
# files
specfile = param_dict['SpecFile']['Name']
builddir = param_dict['SpecFile']['BuildDirName']
version_module = param_dict['VersionModule']
dockeruser = param_dict['DockerUser']
project_url = param_dict['ProjectUrl']
# full paths
builddir_path = build_root + '/' + builddir
binary_path = builddir_path + '/' + PROJECT_BIN
lib_src = root + '/' + 'core'
# dependencies
deplist = None
for dep in param_dict['DependencyList']:
if deplist is None:
deplist = str(dep)
else:
deplist = deplist + ', ' + str(dep)
try:
stdout_message(
'Generating build spec file and build artifacts in %s' %
yl + builddir_path + rst
)
# main exec bin
with open(binary_path) as f1:
f2 = f1.readlines()
for index, line in enumerate(f2):
if line.startswith('pkg_lib='):
newline = 'pkg_lib=' + '\"' + '/usr/local/lib/' + PROJECT + '\"\n'
f2[index] = newline
elif line.startswith('LOG_DIR='):
logline = 'LOG_DIR=' + '\"' + '/var/log' + '\"\n'
f2[index] = logline
f1.close()
# rewrite bin
with open(binary_path, 'w') as f3:
f3.writelines(f2)
path = binary_path
stdout_message('Bin {} successfully updated.'.format(yl + path + rst))
# rewrite version file with current build ver
with open(builddir_path + '/' + version_module, 'w') as f4:
f3 = ['__version__=\"' + version + '\"\n']
f4.writelines(f3)
path = builddir_path + '/' + version_module
stdout_message('Module {} successfully updated.'.format(yl + path + rst))
# rewrite git project version file with current build version in case delta
with open(lib_src + '/' + version_module, 'w') as f5:
f4 = ['__version__=\"' + version + '\"\n']
f5.writelines(f4)
path = '../' + project_dirname + (lib_src + '/' + version_module)[len(root):]
stdout_message('Module {} successfully updated.'.format(yl + path + rst))
if os.path.exists(build_root + '/' + specfile):
# update specfile - major version
for line in fileinput.input([build_root + '/' + specfile], inplace=True):
print(line.replace('MAJOR_VERSION', major), end='')
stdout_message(f'Updated {specfile} with MAJOR_VERSION', prefix='OK')
# update specfile - minor version
for line in fileinput.input([build_root + '/' + specfile], inplace=True):
print(line.replace('MINOR_VERSION', minor), end='')
stdout_message(f'Updated {specfile} with MINOR_VERSION', prefix='OK')
# update specfile - DOCKERUSER
for line in fileinput.input([build_root + '/' + specfile], inplace=True):
print(line.replace('DOCKERUSER', dockeruser), end='')
stdout_message(f'Updated {specfile} with DOCKERUSER ({dockeruser})', prefix='OK')
# update specfile - Dependencies
for line in fileinput.input([build_root + '/' + specfile], inplace=True):
print(line.replace('DEPLIST', deplist), end='')
stdout_message(f'Updated {specfile} with Dependencies ({deplist})', prefix='OK')
# update specfile - major version
for line in fileinput.input([build_root + '/' + specfile], inplace=True):
print(line.replace('PROJECT_URL', project_url), end='')
stdout_message(f'Updated {specfile} with PROJECT_URL', prefix='OK')
else:
stdout_message(
message=f'{specfile} not found in build directory. Cannot update... halting build.',
prefix='WARN')
sys.exit(1)
# rm residual installation files from build directory
clean(builddir_path, debug)
except OSError as e:
logger.exception(
'%s: Problem while updating builddir contents: %s' %
(inspect.stack()[0][3], str(e)))
return False
return True
def cp_dockerfiles(src, dst):
"""
Copy dockerfiles and associated build artifacts to build_root
>> NOT CURRENTLY USED <<
"""
# place docker build script
script_src = src + '/' + dockerscript
script_dst = build_root + '/' + dockerscript
build_list = os.listdir(src)
for file in build_list:
copyfile(file, dst + '/' + file)
# cp Dockerfile to build root
copyfile(
docker_path + '/' + 'Dockerfile',
builddir_path + '/' + 'Dockerfile'
)
# verify build spec placement
stdout_message(
message='Copied: {} {} {}'.format(
lk + script_src + rst, arrow, lk + script_dst + rst),
prefix='OK')
return build_list
def container_running(cid, debug=False):
"""
Summary:
Verifies if a container is activly running
Args:
:cid (str): Container name or hex identifier
:dclient (object): global docker client
Returns:
True (running) | False (stopped)
TYPE: bool
"""
success_msg = f'Container {cid} running'
try:
container = dclient.containers.get(cid)
if container.status == 'running':
if debug:
stdout_message(success_msg, prefix='OK')
return True
except Exception:
if cid in subprocess.getoutput('docker ps'):
stdout_message(success_msg, prefix='OK')
return True
else:
stdout_message(f'Container {cid} stopped', prefix='WARN')
return False
def display_package_contents(rpm_path, contents):
"""
Summary:
Output newly built package contents.
Args:
:build_root (str): location of newly built rpm package
:version (str): current version string, format: '{major}.{minor}.{patch num}'
Returns:
Success | Failure, TYPE: bool
"""
tab = '\t'.expandtabs(2)
tab4 = '\t'.expandtabs(4)
width = 90
package = os.path.split(rpm_path)[1]
path, discard = os.path.split(contents)
pwd = os.getcwd()
os.chdir('.') if not path else os.chdir(path)
with open(contents) as f1:
unformatted = f1.readlines()
# title header and subheader
header = '\n\t\tPackage Contents: ' + bd + package + rst + '\n'
print(header)
subheader = tab + 'Permission' + tab + ' Owner/Group' + '\t' + 'ctime' \
+ '\t'.expandtabs(8) + 'File'
print(subheader)
# divider line
list(filter(lambda x: print('-', end=''), range(0, width + 1))), print('\r')
# content
for line in unformatted:
permissions = [tab + line.split()[0]]
raw = tab4 + 'root root'
ctime = line.split()[5:8]
f_ctime = tab4 + ''.join([x + ' ' for x in ctime])
content_path = tab4 + yl + line.split()[-1] + rst
fline = permissions[0] + raw + f_ctime + content_path
print(fline)
print('\n')
os.chdir(pwd)
return True
def docker_daemon_up():
"""
Summary:
Determines if docker installed and running by
evaluating the exit code of docker images cmd
Returns:
True (running) | False, TYPE: bool
"""
cmd = 'docker images >/dev/null 2>&1; echo $?'
if which('docker') and int(subprocess.getoutput(cmd)) == 0:
return True
else:
stdout_message('Docker engine not running or not accessible', prefix='WARN')
return False
def docker_init(src, builddir, osimage, param_dict, debug):
"""
Summary:
Creates docker image and container
Args:
Returns:
Container id (Name) | Failure (None)
"""
imagename = osimage + ':' + param_dict['DockerImage'] # image name
cname = param_dict['DockerContainer'] # container id
host_mnt = VOLMNT # host volume mount point
container_mnt = CONTAINER_VOLMNT # container volume internal mnt pt
docker_user = 'builder'
bash_cmd = '/bin/sleep 30'
buildscript = 'docker-buildrpm.sh'
# copy buildscript to directory where build files assembled
copyfile(src + '/' + buildscript, builddir + '/' + buildscript)
try:
# create host mount for container volume
if not os.path.exists(host_mnt):
os.makedirs(host_mnt)
stdout_message(f'Created host mount {host_mnt} for container volume')
# if image rpmbuild not exist, create
try:
image = dclient.images.get(imagename)
if image:
stdout_message('Image already exists. Creating Container...')
except Exception:
# create new docker image
os.chdir(src)
cmd = 'docker build -t {} . '.format(imagename)
subprocess.call([cmd], shell=True, cwd=src)
stdout_message('Built image', prefix='OK')
# start container detached
container = dclient.containers.run(
name=cname,
image=imagename,
command=bash_cmd,
volumes={host_mnt: {'bind': container_mnt, 'mode': 'rw'}},
user=docker_user,
detach=True
)
# verify container is running
if not container_running(cname):
stdout_message(f'Container {cname} not started - abort', prefix='WARN')
return False
# copy build files to container
stdout_message('Begin cp files into container')
# copy files from temporary build directory to container
os.chdir(builddir)
buildfile_list = list(
filter(
lambda x: x.endswith('.tar.gz') or x.endswith('.spec') or x.endswith('.sh'), os.listdir('.')
)
)
if debug:
print(f'buildfile_list contains:\n\n\t%s' % export_json_object(buildfile_list))
print(f'osimage is: {osimage}')
print(f'imagename is: {imagename}')
print(f'container name is: {container.name}')
for file in buildfile_list:
# local fs >> container:/home/builder
cmd = f'docker cp {file} {container.name}:/home/builder/{file}'
# status
if not subprocess.getoutput(cmd):
stdout_message(f'{file} copied to container {container.name} successfully')
else:
stdout_message(
f'Problem copying {file} to container {container.name}',
prefix='WARN'
)
# exec rpmbuild script
cmd = f'docker exec -i {container.name} sh -c \'cd /home/builder && bash {buildscript}\''
stdout_message(subprocess.getoutput(cmd))
if container_running(container.name):
return container
except OSError as e:
logger.exception(
'%s: Problem while updating builddir contents: %s' %
(inspect.stack()[0][3], str(e)))
return None
def main(setVersion, environment, package_configpath, force=False, debug=False):
"""
Summary:
Create build directories, populate contents, update contents
Args:
:setVersion (str): version number of rpm created
:environment (str):
:package_configpath (str): full path to json configuration file
:data (dict): build parameters for rpm build process
:force (bool): If True, overwrites any pre-existing build artifacts
Returns:
Success | Failure, TYPE: bool
"""
# all globals declared here
global PROJECT_BIN
PROJECT_BIN = 'branchdiff'
global PROJECT_ROOT
PROJECT_ROOT = git_root()
global SCRIPT_DIR
SCRIPT_DIR = PROJECT_ROOT + '/' + 'scripts'
global BUILD_ROOT
BUILD_ROOT = TMPDIR
global RPM_SRC
RPM_SRC = PROJECT_ROOT + '/packaging/rpm'
global LIB_DIR
LIB_DIR = PROJECT_ROOT + '/' + 'core'
global CURRENT_VERSION
CURRENT_VERSION = current_version(PROJECT_BIN, LIB_DIR + '/' 'version.py')
# sort out version numbers, forceVersion is overwrite of pre-existing build artifacts
global VERSION
if setVersion:
VERSION = setVersion
elif CURRENT_VERSION:
VERSION = increment_version(CURRENT_VERSION)
else:
stdout_message('Could not determine current {} version'.format(bd + PROJECT + rst))
sys.exit(exit_codes['E_DEPENDENCY']['Code'])
# log
stdout_message(f'Current version of last build: {bd + CURRENT_VERSION + rst}')
stdout_message(f'Version to be used for this build: {act + VERSION + rst}')
# create initial binary working dir
BUILDDIRNAME = PROJECT + '-' + '.'.join(VERSION.split('.')[:2])
# sub in current values
parameter_obj = ParameterSet(package_configpath, VERSION)
vars = parameter_obj.create()
VERSION_FILE = vars['VersionModule']
if debug:
print(json.dumps(vars, indent=True, sort_keys=True))
r_struture = builddir_structure(vars, VERSION)
r_updates = builddir_content_updates(vars, environment, VERSION, debug)
# create tar archive
target_archive = BUILD_ROOT + '/' + PROJECT_BIN + '-' + VERSION + '.tar.gz'
source_dir = BUILD_ROOT + '/' + BUILDDIRNAME
r_tarfile = tar_archive(target_archive, source_dir, debug)
# launch docker container and execute final build steps
if r_struture and r_updates and r_tarfile:
# status
msg = yl + BUILD_ROOT + '/' + target_archive + rst
stdout_message('tgz archive built: %s' % msg)
# trigger docker build based on environment:
container = docker_init(
PROJECT_ROOT + '/packaging/docker/' + environment,
BUILD_ROOT,
environment,
vars,
debug
)
if container:
return postbuild(PROJECT_ROOT, container, RPM_SRC, SCRIPT_DIR, VERSION_FILE, VERSION)
return False
def options(parser, help_menu=False):
"""
Summary:
parse cli parameter options
Returns:
TYPE: argparse object, parser argument set
"""
parser.add_argument("-b", "--build", dest='build', default=False, action='store_true', required=False)
parser.add_argument("-D", "--debug", dest='debug', default=False, action='store_true', required=False)
parser.add_argument("-d", "--distro", dest='distro', default='centos7', nargs='?', type=str, required=False)
parser.add_argument("-F", "--force", dest='force', default=False, action='store_true', required=False)
parser.add_argument("-p", "--parameter-file", dest='parameter_file', default='.rpm.json', nargs='?', required=False)
parser.add_argument("-s", "--set-version", dest='set', default=None, nargs='?', type=str, required=False)
parser.add_argument("-h", "--help", dest='help', default=False, action='store_true', required=False)
return parser.parse_args()
def is_installed(binary):
"""
Verifies if program installed on Redhat-based Linux system
"""
cmd = 'rpm -qa | grep ' + binary
return True if subprocess.getoutput(cmd) else False
def ospackages(pkg_list):
"""Summary
Install OS Package Prerequisites
Returns:
Success | Failure, TYPE: bool
"""
try:
for pkg in pkg_list:
if is_installed(pkg):
logger.info(f'{pkg} binary is already installed - skip')
continue
elif which('yum'):
cmd = 'sudo yum install ' + pkg + ' 2>/dev/null'
print(subprocess.getoutput(cmd))
elif which('dnf'):
cmd = 'sudo dnf install ' + pkg + ' 2>/dev/null'
print(subprocess.getoutput(cmd))
else:
logger.warning(
'%s: Dependent OS binaries not installed - package manager not identified' %
inspect.stack()[0][3])
except OSError as e:
logger.exception('{}: Problem installing os package {}'.format(inspect.stack()[0][3], pkg))
return False
return True
def prebuild(builddir, libsrc, volmnt, parameter_file):
"""Summary:
Prerequisites and dependencies for build execution
Returns:
Success | Failure, TYPE: bool
"""
def preclean(dir, artifact=''):
"""Cleans residual build artifacts by removing """
try:
if artifact:
if os.path.exists(libsrc + '/' + artifact):
rmtree(libsrc + '/' + artifact) # clean artifact from inside an existing dir
elif os.path.exists(dir):
rmtree(dir) # rm entire directory
except OSError as e:
logger.exception(
'%s: Error while cleaning residual build artifacts: %s' %
(inspect.stack()[0][3], str(e)))
return False
return True
version_module = json.loads(read(parameter_file))['VersionModule']
try:
if preclean(builddir) and preclean(volmnt) and preclean(libsrc, '__pycache__'):
stdout_message(f'Removed pre-existing build artifacts ({builddir}, {volmnt})')
os.makedirs(builddir)
os.makedirs(volmnt)
root = git_root()
src = root + '/core' + '/' + version_module
dst = root + '/scripts' + '/' + version_module
# deal with leftover build artifacts
if os.path.exists(dst):
os.remove(dst)
r_cf = copyfile(src, dst)
# import version module
global __version__
from version import __version__
if r_cf and __version__ and docker_daemon_up():
return True
except Exception as e:
logger.exception(
'{}: Failure to import __version__ parameter'.format(inspect.stack()[0][3])
)
return False
def locate_artifact(filext, origin):
"""
Summary.
Finds rpm file object after creation
Args:
:filext (str): File extension searching for (".rpm")
:origin (str): Starting directory for recursive search
Returns:
full path to rpm file | None if not found
"""
for root, dirs, files in os.walk(origin):
for file in files:
if file.endswith(filext):
return os.path.abspath(os.path.join(root, file))
return None
def postbuild(root, container, rpm_root, scripts_dir, version_module, version):
"""
Summary:
Post-build clean up
Args:
:container (object): Docker container object
:rpm_root (str): target dir for rpm package files
:script_dir (str): directory where scripts
:version_module (str): name of module containing version number
:version (str): current version label (Example: 1.6.8)
Returns:
Success | Failure, TYPE: bool
"""
project_dirname = root.split('/')[-1]
major = '.'.join(version.split('.')[:2])
minor = version.split('.')[-1]
volmnt = VOLMNT
delete = True
try:
# cp rpm created to repo
package = locate_artifact('.rpm', volmnt)
if package:
copyfile(locate_artifact('.rpm', volmnt), rpm_root)
package_path = rpm_root + '/' + os.path.split(package)[1]
# rpm contents text file
contents = locate_artifact('.txt', volmnt)
# stop and rm container
cmd = f'docker stop {container.name}'
subprocess.getoutput(cmd)
# status
if not container_running(container.name):
stdout_message(f'{container.name} successfully halted', prefix='OK')
cmd = f'docker rm {container.name}'
subprocess.getoutput(cmd)
# remove temp version module copied to scripts dir
if os.path.exists(scripts_dir + '/' + version_module):
os.remove(scripts_dir + '/' + version_module)
# rewrite version file with 67rrent build version
with open(root + '/core/' + version_module, 'w') as f3:
f2 = ['__version__=\"' + version + '\"\n']
f3.writelines(f2)
path = project_dirname + (root + '/core/' + version_module)[len(root):]
stdout_message(
'{}: Module {} successfully updated.'.format(inspect.stack()[0][3], yl + path + rst)
)
except OSError as e:
logger.exception('{}: Postbuild clean up failure'.format(inspect.stack()[0][3]))
return ''
return package_path, contents
class ParameterSet():
"""Recursion class for processing complex dictionary schema."""
def __init__(self, parameter_file, version):
"""
Summary.
Retains major and minor version numbers + parameters
in json form for later use
Args:
:parameter_file (str): path to json file obj containing
parameter keys and values
:version (str): current build version
"""
self.parameter_dict = json.loads(read(parameter_file))
self.version = version
self.major = '.'.join(self.version.split('.')[:2])
self.minor = self.version.split('.')[-1]
def create(self, parameters=None):
"""
Summary.
Update parameter dict with current values appropriate
for the active build
Args:
:parameters (dict): dictionary of all parameters used to gen rpm
:version (str): the version of the current build, e.g. 1.6.7
Returns:
parameters, TYPE: dict
"""
if parameters is None:
parameters = self.parameter_dict
for k, v in parameters.items():
if isinstance(v, dict):
self.create(v)
else:
if k == 'Version':
parameters[k] = self.major
elif k == 'Release':
parameters[k] = self.minor
elif k == 'Source':
parameters[k] = PROJECT + '-' + self.major + '.' + self.minor + '.tar.gz'
elif k == 'BuildDirName':
parameters[k] = PROJECT + '-' + self.major
return parameters
def valid_version(parameter, min=0, max=100):
"""
Summary.
User input validation. Validates version string made up of integers.
Example: '1.6.2'. Each integer in the version sequence must be in
a range of > 0 and < 100. Maximum version string digits is 3
(Example: 0.2.3 )
Args:
:parameter (str): Version string from user input
:min (int): Minimum allowable integer value a single digit in version
string provided as a parameter
:max (int): Maximum allowable integer value a single digit in a version
string provided as a parameter
Returns:
True if parameter valid or None, False if invalid, TYPE: bool
"""
# type correction and validation
if parameter is None:
return True
elif isinstance(parameter, int):
return False
elif isinstance(parameter, float):
parameter = str(parameter)
component_list = parameter.split('.')
length = len(component_list)
try:
if length <= 3:
for component in component_list:
if isinstance(int(component), int) and int(component) in range(min, max + 1):
continue
else:
return False
except ValueError as e:
return False
return True
def init_cli():
"""Collect parameters and call main."""
try:
parser = argparse.ArgumentParser(add_help=False)
args = options(parser)
except Exception as e:
help_menu()
stdout_message(str(e), 'ERROR')
return exit_codes['E_MISC']['Code']
if not os.path.isfile(args.parameter_file):
stdout_message(
message='Path to parmeters file not found. Abort',
prefix='WARN'
)
return exit_codes['E_DEPENDENCY']['Code']
if args.debug:
print(debug_header)
stdout_message(
message='Set (--set-version):\t{}'.format(args.set),
prefix='DBUG'
)
stdout_message(
message='Build Flag (--build):\t{}'.format(args.build),
prefix='DBUG'
)
stdout_message(
message='Docker Image (--distro):\t{}'.format(args.distro),
prefix='DBUG'
)
stdout_message(
message='Parameter File (--parameters):\t{}'.format(args.parameter_file),
prefix='DBUG'
)
stdout_message(
message='Debug Flag:\t\t{}'.format(args.debug),
prefix='DBUG'
)
if len(sys.argv) == 1:
help_menu()
return exit_codes['EX_OK']['Code']
elif args.help:
help_menu()
return exit_codes['EX_OK']['Code']
elif args.build:
libsrc = git_root() + '/' + 'core'
if valid_version(args.set) and prebuild(TMPDIR, libsrc, VOLMNT, git_root() + '/' + args.parameter_file):
package, contents = main(
setVersion=args.set,
environment=args.distro,
package_configpath=git_root() + '/' + args.parameter_file,
force=args.force,
debug=args.debug
)
if package:
stdout_message(f'New package created: {yl + package + rst}')
stdout_message(f'RPM build process completed successfully. End', prefix='OK')
if contents:
display_package_contents(package, contents)
else:
stdout_message(
message=f'Unable to locate a rpm contents file in {build_root}.',
prefix='WARN')
return False
return exit_codes['EX_OK']['Code']
else:
stdout_message(
'{}: Problem creating rpm installation package. Exit'.format(inspect.stack()[0][3]),
prefix='WARN',
severity='WARNING'
)
return exit_codes['E_MISC']['Code']
elif not valid_version(args.set):
stdout_message(
'You must enter a valid version when using --set-version parameter. Ex: 1.6.3',
prefix='WARN',
severity='WARNING'
)
return exit_codes['E_DEPENDENCY']['Code']
else:
logger.warning('{} Failure in prebuild stage'.format(inspect.stack()[0][3]))
return exit_codes['E_DEPENDENCY']['Code']
return True
sys.exit(init_cli())
| [
"subprocess.getoutput",
"tarfile.open",
"pyaws.utils.export_json_object",
"loggers.getLogger",
"pyaws.utils.stdout_message",
"sys.exit",
"os.walk",
"os.remove",
"os.path.exists",
"os.listdir",
"argparse.ArgumentParser",
"shutil.copy2",
"json.dumps",
"os.path.split",
"subprocess.call",
... | [((1443, 1469), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (1459, 1469), False, 'import os\n'), ((1614, 1631), 'docker.from_env', 'docker.from_env', ([], {}), '()\n', (1629, 1631), False, 'import docker\n'), ((2223, 2247), 'loggers.getLogger', 'loggers.getLogger', (['"""1.0"""'], {}), "('1.0')\n", (2240, 2247), False, 'import loggers\n'), ((5542, 5553), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5551, 5553), False, 'import os\n'), ((5558, 5572), 'os.chdir', 'os.chdir', (['path'], {}), '(path)\n', (5566, 5572), False, 'import os\n'), ((6196, 6224), 'os.path.dirname', 'os.path.dirname', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (6211, 6224), False, 'import os\n'), ((7447, 7460), 'shutil.which', 'which', (['binary'], {}), '(binary)\n', (7452, 7460), False, 'from shutil import copytree, rmtree, which\n'), ((20163, 20178), 'os.listdir', 'os.listdir', (['src'], {}), '(src)\n', (20173, 20178), False, 'import os\n'), ((20287, 20365), 'shutil.copy2', 'copyfile', (["(docker_path + '/' + 'Dockerfile')", "(builddir_path + '/' + 'Dockerfile')"], {}), "(docker_path + '/' + 'Dockerfile', builddir_path + '/' + 'Dockerfile')\n", (20295, 20365), True, 'from shutil import copy2 as copyfile\n'), ((21891, 21914), 'os.path.split', 'os.path.split', (['contents'], {}), '(contents)\n', (21904, 21914), False, 'import os\n'), ((21925, 21936), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (21934, 21936), False, 'import os\n'), ((22791, 22804), 'os.chdir', 'os.chdir', (['pwd'], {}), '(pwd)\n', (22799, 22804), False, 'import os\n'), ((23993, 24056), 'shutil.copy2', 'copyfile', (["(src + '/' + buildscript)", "(builddir + '/' + buildscript)"], {}), "(src + '/' + buildscript, builddir + '/' + buildscript)\n", (24001, 24056), True, 'from shutil import copy2 as copyfile\n'), ((28276, 28354), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""Current version of last build: {bd + CURRENT_VERSION + rst}"""'], {}), "(f'Current version of last build: {bd + CURRENT_VERSION + rst}')\n", (28290, 28354), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((28359, 28434), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""Version to be used for this build: {act + VERSION + rst}"""'], {}), "(f'Version to be used for this build: {act + VERSION + rst}')\n", (28373, 28434), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((33994, 34009), 'os.walk', 'os.walk', (['origin'], {}), '(origin)\n', (34001, 34009), False, 'import os\n'), ((5102, 5189), 'pyaws.utils.stdout_message', 'stdout_message', ([], {'message': 'f"""bytecode_list contents: {bytecode_list}"""', 'prefix': '"""DEBUG"""'}), "(message=f'bytecode_list contents: {bytecode_list}', prefix=\n 'DEBUG')\n", (5116, 5189), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((5274, 5311), 'os.remove', 'os.remove', (["(directory + '/' + artifact)"], {}), "(directory + '/' + artifact)\n", (5283, 5311), False, 'import os\n'), ((5921, 5934), 'os.chdir', 'os.chdir', (['pwd'], {}), '(pwd)\n', (5929, 5934), False, 'import os\n'), ((9823, 9846), 'os.path.exists', 'os.path.exists', (['archive'], {}), '(archive)\n', (9837, 9846), False, 'import os\n'), ((11158, 11243), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""Assembling build directory artifacts in {bn + builddir + rst}"""'], {}), "(f'Assembling build directory artifacts in {bn + builddir + rst}'\n )\n", (11172, 11243), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((11284, 11313), 'os.path.exists', 'os.path.exists', (['builddir_path'], {}), '(builddir_path)\n', (11298, 11313), False, 'import os\n'), ((11357, 11383), 'os.makedirs', 'os.makedirs', (['builddir_path'], {}), '(builddir_path)\n', (11368, 11383), False, 'import os\n'), ((11983, 12003), 'os.listdir', 'os.listdir', (['lib_path'], {}), '(lib_path)\n', (11993, 12003), False, 'import os\n'), ((12533, 12557), 'os.path.exists', 'os.path.exists', (['spec_dst'], {}), '(spec_dst)\n', (12547, 12557), False, 'import os\n'), ((12599, 12643), 'shutil.copy2', 'copyfile', (["(rpm_src + '/' + specfile)", 'spec_dst'], {}), "(rpm_src + '/' + specfile, spec_dst)\n", (12607, 12643), True, 'from shutil import copy2 as copyfile\n'), ((12694, 12718), 'os.path.exists', 'os.path.exists', (['spec_dst'], {}), '(spec_dst)\n', (12708, 12718), False, 'import os\n'), ((13076, 13100), 'os.path.exists', 'os.path.exists', (['comp_src'], {}), '(comp_src)\n', (13090, 13100), False, 'import os\n'), ((13270, 13294), 'os.path.exists', 'os.path.exists', (['comp_dst'], {}), '(comp_dst)\n', (13284, 13294), False, 'import os\n'), ((13875, 13886), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (13884, 13886), False, 'import os\n'), ((13899, 13923), 'os.path.exists', 'os.path.exists', (['builddir'], {}), '(builddir)\n', (13913, 13923), False, 'import os\n'), ((16277, 16378), 'pyaws.utils.stdout_message', 'stdout_message', (["('Generating build spec file and build artifacts in %s' % yl +\n builddir_path + rst)"], {}), "('Generating build spec file and build artifacts in %s' % yl +\n builddir_path + rst)\n", (16291, 16378), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((17902, 17945), 'os.path.exists', 'os.path.exists', (["(build_root + '/' + specfile)"], {}), "(build_root + '/' + specfile)\n", (17916, 17945), False, 'import os\n'), ((20215, 20247), 'shutil.copy2', 'copyfile', (['file', "(dst + '/' + file)"], {}), "(file, dst + '/' + file)\n", (20223, 20247), True, 'from shutil import copy2 as copyfile\n'), ((21844, 21867), 'os.path.split', 'os.path.split', (['rpm_path'], {}), '(rpm_path)\n', (21857, 21867), False, 'import os\n'), ((21941, 21954), 'os.chdir', 'os.chdir', (['"""."""'], {}), "('.')\n", (21949, 21954), False, 'import os\n'), ((21972, 21986), 'os.chdir', 'os.chdir', (['path'], {}), '(path)\n', (21980, 21986), False, 'import os\n'), ((23098, 23113), 'shutil.which', 'which', (['"""docker"""'], {}), "('docker')\n", (23103, 23113), False, 'from shutil import copytree, rmtree, which\n'), ((23193, 23269), 'pyaws.utils.stdout_message', 'stdout_message', (['"""Docker engine not running or not accessible"""'], {'prefix': '"""WARN"""'}), "('Docker engine not running or not accessible', prefix='WARN')\n", (23207, 23269), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((25312, 25359), 'pyaws.utils.stdout_message', 'stdout_message', (['"""Begin cp files into container"""'], {}), "('Begin cp files into container')\n", (25326, 25359), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((25434, 25452), 'os.chdir', 'os.chdir', (['builddir'], {}), '(builddir)\n', (25442, 25452), False, 'import os\n'), ((29325, 29370), 'pyaws.utils.stdout_message', 'stdout_message', (["('tgz archive built: %s' % msg)"], {}), "('tgz archive built: %s' % msg)\n", (29339, 29370), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((30910, 30935), 'subprocess.getoutput', 'subprocess.getoutput', (['cmd'], {}), '(cmd)\n', (30930, 30935), False, 'import subprocess\n'), ((32981, 33002), 'os.makedirs', 'os.makedirs', (['builddir'], {}), '(builddir)\n', (32992, 33002), False, 'import os\n'), ((33011, 33030), 'os.makedirs', 'os.makedirs', (['volmnt'], {}), '(volmnt)\n', (33022, 33030), False, 'import os\n'), ((33222, 33241), 'os.path.exists', 'os.path.exists', (['dst'], {}), '(dst)\n', (33236, 33241), False, 'import os\n'), ((33285, 33303), 'shutil.copy2', 'copyfile', (['src', 'dst'], {}), '(src, dst)\n', (33293, 33303), True, 'from shutil import copy2 as copyfile\n'), ((35235, 35260), 'subprocess.getoutput', 'subprocess.getoutput', (['cmd'], {}), '(cmd)\n', (35255, 35260), False, 'import subprocess\n'), ((35567, 35617), 'os.path.exists', 'os.path.exists', (["(scripts_dir + '/' + version_module)"], {}), "(scripts_dir + '/' + version_module)\n", (35581, 35617), False, 'import os\n'), ((39404, 39443), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)'}), '(add_help=False)\n', (39427, 39443), False, 'import argparse\n'), ((39618, 39653), 'os.path.isfile', 'os.path.isfile', (['args.parameter_file'], {}), '(args.parameter_file)\n', (39632, 39653), False, 'import os\n'), ((39663, 39748), 'pyaws.utils.stdout_message', 'stdout_message', ([], {'message': '"""Path to parmeters file not found. Abort"""', 'prefix': '"""WARN"""'}), "(message='Path to parmeters file not found. Abort', prefix='WARN'\n )\n", (39677, 39748), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((2411, 2436), 'subprocess.getoutput', 'subprocess.getoutput', (['cmd'], {}), '(cmd)\n', (2431, 2436), False, 'import subprocess\n'), ((5010, 5031), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (5020, 5031), False, 'import os\n'), ((5604, 5619), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (5614, 5619), False, 'import os\n'), ((6894, 6940), 'subprocess.getoutput', 'subprocess.getoutput', (['f"""git checkout {branch}"""'], {}), "(f'git checkout {branch}')\n", (6914, 6940), False, 'import subprocess\n'), ((7481, 7508), 'distro.linux_distribution', 'distro.linux_distribution', ([], {}), '()\n', (7506, 7508), False, 'import distro\n'), ((7548, 7560), 'shutil.which', 'which', (['"""yum"""'], {}), "('yum')\n", (7553, 7560), False, 'from shutil import copytree, rmtree, which\n'), ((9703, 9732), 'tarfile.open', 'tarfile.open', (['archive', '"""w:gz"""'], {}), "(archive, 'w:gz')\n", (9715, 9732), False, 'import tarfile\n'), ((11327, 11348), 'shutil.rmtree', 'rmtree', (['builddir_path'], {}), '(builddir_path)\n', (11333, 11348), False, 'from shutil import copytree, rmtree, which\n'), ((11555, 11604), 'os.path.exists', 'os.path.exists', (["(builddir_path + '/' + PROJECT_BIN)"], {}), "(builddir_path + '/' + PROJECT_BIN)\n", (11569, 11604), False, 'import os\n'), ((11727, 11759), 'shutil.copy2', 'copyfile', (['binary_src', 'binary_dst'], {}), '(binary_src, binary_dst)\n', (11735, 11759), True, 'from shutil import copy2 as copyfile\n'), ((12571, 12590), 'os.remove', 'os.remove', (['spec_dst'], {}), '(spec_dst)\n', (12580, 12590), False, 'import os\n'), ((13117, 13141), 'os.path.exists', 'os.path.exists', (['comp_dst'], {}), '(comp_dst)\n', (13131, 13141), False, 'import os\n'), ((13191, 13219), 'shutil.copy2', 'copyfile', (['comp_src', 'comp_dst'], {}), '(comp_src, comp_dst)\n', (13199, 13219), True, 'from shutil import copy2 as copyfile\n'), ((13937, 13955), 'os.chdir', 'os.chdir', (['builddir'], {}), '(builddir)\n', (13945, 13955), False, 'import os\n'), ((14152, 14165), 'os.chdir', 'os.chdir', (['pwd'], {}), '(pwd)\n', (14160, 14165), False, 'import os\n'), ((14317, 14330), 'os.chdir', 'os.chdir', (['pwd'], {}), '(pwd)\n', (14325, 14330), False, 'import os\n'), ((18017, 18077), 'fileinput.input', 'fileinput.input', (["[build_root + '/' + specfile]"], {'inplace': '(True)'}), "([build_root + '/' + specfile], inplace=True)\n", (18032, 18077), False, 'import fileinput\n'), ((18159, 18228), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""Updated {specfile} with MAJOR_VERSION"""'], {'prefix': '"""OK"""'}), "(f'Updated {specfile} with MAJOR_VERSION', prefix='OK')\n", (18173, 18228), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((18300, 18360), 'fileinput.input', 'fileinput.input', (["[build_root + '/' + specfile]"], {'inplace': '(True)'}), "([build_root + '/' + specfile], inplace=True)\n", (18315, 18360), False, 'import fileinput\n'), ((18442, 18511), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""Updated {specfile} with MINOR_VERSION"""'], {'prefix': '"""OK"""'}), "(f'Updated {specfile} with MINOR_VERSION', prefix='OK')\n", (18456, 18511), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((18580, 18640), 'fileinput.input', 'fileinput.input', (["[build_root + '/' + specfile]"], {'inplace': '(True)'}), "([build_root + '/' + specfile], inplace=True)\n", (18595, 18640), False, 'import fileinput\n'), ((18724, 18810), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""Updated {specfile} with DOCKERUSER ({dockeruser})"""'], {'prefix': '"""OK"""'}), "(f'Updated {specfile} with DOCKERUSER ({dockeruser})', prefix\n ='OK')\n", (18738, 18810), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((18876, 18936), 'fileinput.input', 'fileinput.input', (["[build_root + '/' + specfile]"], {'inplace': '(True)'}), "([build_root + '/' + specfile], inplace=True)\n", (18891, 18936), False, 'import fileinput\n'), ((19014, 19099), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""Updated {specfile} with Dependencies ({deplist})"""'], {'prefix': '"""OK"""'}), "(f'Updated {specfile} with Dependencies ({deplist})', prefix='OK'\n )\n", (19028, 19099), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((19166, 19226), 'fileinput.input', 'fileinput.input', (["[build_root + '/' + specfile]"], {'inplace': '(True)'}), "([build_root + '/' + specfile], inplace=True)\n", (19181, 19226), False, 'import fileinput\n'), ((19312, 19379), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""Updated {specfile} with PROJECT_URL"""'], {'prefix': '"""OK"""'}), "(f'Updated {specfile} with PROJECT_URL', prefix='OK')\n", (19326, 19379), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((19407, 19531), 'pyaws.utils.stdout_message', 'stdout_message', ([], {'message': 'f"""{specfile} not found in build directory. Cannot update... halting build."""', 'prefix': '"""WARN"""'}), "(message=\n f'{specfile} not found in build directory. Cannot update... halting build.'\n , prefix='WARN')\n", (19421, 19531), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((19567, 19578), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (19575, 19578), False, 'import sys\n'), ((24132, 24156), 'os.path.exists', 'os.path.exists', (['host_mnt'], {}), '(host_mnt)\n', (24146, 24156), False, 'import os\n'), ((24170, 24191), 'os.makedirs', 'os.makedirs', (['host_mnt'], {}), '(host_mnt)\n', (24181, 24191), False, 'import os\n'), ((24204, 24273), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""Created host mount {host_mnt} for container volume"""'], {}), "(f'Created host mount {host_mnt} for container volume')\n", (24218, 24273), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((25166, 25237), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""Container {cname} not started - abort"""'], {'prefix': '"""WARN"""'}), "(f'Container {cname} not started - abort', prefix='WARN')\n", (25180, 25237), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((26565, 26590), 'subprocess.getoutput', 'subprocess.getoutput', (['cmd'], {}), '(cmd)\n', (26585, 26590), False, 'import subprocess\n'), ((28216, 28260), 'sys.exit', 'sys.exit', (["exit_codes['E_DEPENDENCY']['Code']"], {}), "(exit_codes['E_DEPENDENCY']['Code'])\n", (28224, 28260), False, 'import sys\n'), ((28740, 28785), 'json.dumps', 'json.dumps', (['vars'], {'indent': '(True)', 'sort_keys': '(True)'}), '(vars, indent=True, sort_keys=True)\n', (28750, 28785), False, 'import json\n'), ((32894, 32972), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""Removed pre-existing build artifacts ({builddir}, {volmnt})"""'], {}), "(f'Removed pre-existing build artifacts ({builddir}, {volmnt})')\n", (32908, 32972), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((33255, 33269), 'os.remove', 'os.remove', (['dst'], {}), '(dst)\n', (33264, 33269), False, 'import os\n'), ((35341, 35409), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""{container.name} successfully halted"""'], {'prefix': '"""OK"""'}), "(f'{container.name} successfully halted', prefix='OK')\n", (35355, 35409), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((35470, 35495), 'subprocess.getoutput', 'subprocess.getoutput', (['cmd'], {}), '(cmd)\n', (35490, 35495), False, 'import subprocess\n'), ((35631, 35676), 'os.remove', 'os.remove', (["(scripts_dir + '/' + version_module)"], {}), "(scripts_dir + '/' + version_module)\n", (35640, 35676), False, 'import os\n'), ((6241, 6269), 'os.path.join', 'os.path.join', (['basedir', 'fname'], {}), '(basedir, fname)\n', (6253, 6269), False, 'import os\n'), ((7671, 7683), 'shutil.which', 'which', (['"""rpm"""'], {}), "('rpm')\n", (7676, 7683), False, 'from shutil import copytree, rmtree, which\n'), ((12240, 12266), 'shutil.copy2', 'copyfile', (['lib_src', 'lib_dst'], {}), '(lib_src, lib_dst)\n', (12248, 12266), True, 'from shutil import copy2 as copyfile\n'), ((13159, 13178), 'os.remove', 'os.remove', (['comp_dst'], {}), '(comp_dst)\n', (13168, 13178), False, 'import os\n'), ((14113, 14138), 'subprocess.getoutput', 'subprocess.getoutput', (['cmd'], {}), '(cmd)\n', (14133, 14138), False, 'import subprocess\n'), ((21099, 21139), 'pyaws.utils.stdout_message', 'stdout_message', (['success_msg'], {'prefix': '"""OK"""'}), "(success_msg, prefix='OK')\n", (21113, 21139), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((21204, 21237), 'subprocess.getoutput', 'subprocess.getoutput', (['"""docker ps"""'], {}), "('docker ps')\n", (21224, 21237), False, 'import subprocess\n'), ((21251, 21291), 'pyaws.utils.stdout_message', 'stdout_message', (['success_msg'], {'prefix': '"""OK"""'}), "(success_msg, prefix='OK')\n", (21265, 21291), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((21342, 21399), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""Container {cid} stopped"""'], {'prefix': '"""WARN"""'}), "(f'Container {cid} stopped', prefix='WARN')\n", (21356, 21399), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((23122, 23147), 'subprocess.getoutput', 'subprocess.getoutput', (['cmd'], {}), '(cmd)\n', (23142, 23147), False, 'import subprocess\n'), ((24424, 24485), 'pyaws.utils.stdout_message', 'stdout_message', (['"""Image already exists. Creating Container..."""'], {}), "('Image already exists. Creating Container...')\n", (24438, 24485), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((24563, 24576), 'os.chdir', 'os.chdir', (['src'], {}), '(src)\n', (24571, 24576), False, 'import os\n'), ((24649, 24692), 'subprocess.call', 'subprocess.call', (['[cmd]'], {'shell': '(True)', 'cwd': 'src'}), '([cmd], shell=True, cwd=src)\n', (24664, 24692), False, 'import subprocess\n'), ((24705, 24747), 'pyaws.utils.stdout_message', 'stdout_message', (['"""Built image"""'], {'prefix': '"""OK"""'}), "('Built image', prefix='OK')\n", (24719, 24747), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((25598, 25613), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (25608, 25613), False, 'import os\n'), ((26102, 26127), 'subprocess.getoutput', 'subprocess.getoutput', (['cmd'], {}), '(cmd)\n', (26122, 26127), False, 'import subprocess\n'), ((26145, 26220), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""{file} copied to container {container.name} successfully"""'], {}), "(f'{file} copied to container {container.name} successfully')\n", (26159, 26220), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((26255, 26345), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""Problem copying {file} to container {container.name}"""'], {'prefix': '"""WARN"""'}), "(f'Problem copying {file} to container {container.name}',\n prefix='WARN')\n", (26269, 26345), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((31279, 31291), 'shutil.which', 'which', (['"""yum"""'], {}), "('yum')\n", (31284, 31291), False, 'from shutil import copytree, rmtree, which\n'), ((32251, 32290), 'os.path.exists', 'os.path.exists', (["(libsrc + '/' + artifact)"], {}), "(libsrc + '/' + artifact)\n", (32265, 32290), False, 'import os\n'), ((32409, 32428), 'os.path.exists', 'os.path.exists', (['dir'], {}), '(dir)\n', (32423, 32428), False, 'import os\n'), ((7793, 7805), 'shutil.which', 'which', (['"""apt"""'], {}), "('apt')\n", (7798, 7805), False, 'from shutil import copytree, rmtree, which\n'), ((9781, 9809), 'os.path.basename', 'os.path.basename', (['source_dir'], {}), '(source_dir)\n', (9797, 9809), False, 'import os\n'), ((25713, 25747), 'pyaws.utils.export_json_object', 'export_json_object', (['buildfile_list'], {}), '(buildfile_list)\n', (25731, 25747), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((31425, 31437), 'shutil.which', 'which', (['"""dnf"""'], {}), "('dnf')\n", (31430, 31437), False, 'from shutil import copytree, rmtree, which\n'), ((32312, 32343), 'shutil.rmtree', 'rmtree', (["(libsrc + '/' + artifact)"], {}), "(libsrc + '/' + artifact)\n", (32318, 32343), False, 'from shutil import copytree, rmtree, which\n'), ((32446, 32457), 'shutil.rmtree', 'rmtree', (['dir'], {}), '(dir)\n', (32452, 32457), False, 'from shutil import copytree, rmtree, which\n'), ((34115, 34139), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (34127, 34139), False, 'import os\n'), ((35037, 35059), 'os.path.split', 'os.path.split', (['package'], {}), '(package)\n', (35050, 35059), False, 'import os\n'), ((31380, 31405), 'subprocess.getoutput', 'subprocess.getoutput', (['cmd'], {}), '(cmd)\n', (31400, 31405), False, 'import subprocess\n'), ((41302, 41362), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""New package created: {yl + package + rst}"""'], {}), "(f'New package created: {yl + package + rst}')\n", (41316, 41362), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((41379, 41456), 'pyaws.utils.stdout_message', 'stdout_message', (['f"""RPM build process completed successfully. End"""'], {'prefix': '"""OK"""'}), "(f'RPM build process completed successfully. End', prefix='OK')\n", (41393, 41456), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((42176, 42315), 'pyaws.utils.stdout_message', 'stdout_message', (['"""You must enter a valid version when using --set-version parameter. Ex: 1.6.3"""'], {'prefix': '"""WARN"""', 'severity': '"""WARNING"""'}), "(\n 'You must enter a valid version when using --set-version parameter. Ex: 1.6.3'\n , prefix='WARN', severity='WARNING')\n", (42190, 42315), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((9976, 9991), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (9989, 9991), False, 'import inspect\n'), ((13586, 13601), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (13599, 13601), False, 'import inspect\n'), ((14470, 14485), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (14483, 14485), False, 'import inspect\n'), ((14641, 14656), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (14654, 14656), False, 'import inspect\n'), ((31526, 31551), 'subprocess.getoutput', 'subprocess.getoutput', (['cmd'], {}), '(cmd)\n', (31546, 31551), False, 'import subprocess\n'), ((31841, 31856), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (31854, 31856), False, 'import inspect\n'), ((33604, 33619), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (33617, 33619), False, 'import inspect\n'), ((36058, 36073), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (36071, 36073), False, 'import inspect\n'), ((36207, 36222), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (36220, 36222), False, 'import inspect\n'), ((41593, 41693), 'pyaws.utils.stdout_message', 'stdout_message', ([], {'message': 'f"""Unable to locate a rpm contents file in {build_root}."""', 'prefix': '"""WARN"""'}), "(message=\n f'Unable to locate a rpm contents file in {build_root}.', prefix='WARN')\n", (41607, 41693), False, 'from pyaws.utils import stdout_message, export_json_object\n'), ((5861, 5876), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (5874, 5876), False, 'import inspect\n'), ((6084, 6099), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (6097, 6099), False, 'import inspect\n'), ((7936, 7961), 'subprocess.getoutput', 'subprocess.getoutput', (['cmd'], {}), '(cmd)\n', (7956, 7961), False, 'import subprocess\n'), ((10146, 10161), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (10159, 10161), False, 'import inspect\n'), ((19808, 19823), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (19821, 19823), False, 'import inspect\n'), ((26797, 26812), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (26810, 26812), False, 'import inspect\n'), ((8233, 8248), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (8246, 8248), False, 'import inspect\n'), ((32634, 32649), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (32647, 32649), False, 'import inspect\n'), ((5643, 5677), 'subprocess.getoutput', 'subprocess.getoutput', (['"""git branch"""'], {}), "('git branch')\n", (5663, 5677), False, 'import subprocess\n'), ((31721, 31736), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (31734, 31736), False, 'import inspect\n'), ((41953, 41968), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (41966, 41968), False, 'import inspect\n'), ((42505, 42520), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (42518, 42520), False, 'import inspect\n')] |
import torch
import torch.nn.functional as F
from agent.td3 import TD3
class TD3MT(TD3):
def __init__(self,
state_dim,
action_dim,
max_action,
num_env,
discount=0.99,
tau=0.005,
policy_noise=0.2,
noise_clip=0.5,
policy_freq=2,
cuda_index=None
):
super().__init__(state_dim, action_dim, max_action,
discount, tau,
policy_noise, noise_clip,
policy_freq, cuda_index)
self.it = 0
self.total_it = [0 for _ in range(num_env)]
self.state_dim = state_dim
self.action_dim = action_dim
self.actor_optimizer_online = torch.optim.Adam(self.actor.parameters(), lr=3e-4)
self.critic_optimizer_online = torch.optim.Adam(self.critic.parameters(), lr=3e-4)
def save(self, filename):
super().save(filename)
torch.save(self.actor_optimizer_online.state_dict(), filename + "_actor_optimizer_online.pt")
torch.save(self.critic_optimizer_online.state_dict(), filename + "_critic_optimizer_online.pt")
def load(self, filename):
super().load(filename)
self.actor_optimizer_online.load_state_dict(torch.load(filename + "_actor_optimizer_online.pt"))
self.critic_optimizer_online.load_state_dict(torch.load(filename + "_critic_optimizer_online.pt"))
def pad_state(self, state):
return torch.cat([state,
torch.zeros(state.shape[0], self.state_dim - state.shape[1]).to(self.device)],
dim=1)
def pad_action(self, action):
return torch.cat([action,
torch.zeros(action.shape[0], self.action_dim - action.shape[1]).to(self.device)],
dim=1)
def train_mt(self, idx, teacher, replay, batch_size=100, is_offline=True):
self.total_it[idx] += 1
state, action, next_state, reward, not_done = replay.sample(batch_size)
state_dim_org = state.shape[1]
action_dim_org = action.shape[1]
with torch.no_grad():
state_pad = self.pad_state(state)
action_pad = self.pad_action(action)
if is_offline:
teacher_q1, teacher_q2 = teacher.critic(state, action)
else:
next_state_pad = self.pad_state(next_state)
next_action = self.actor_target(next_state_pad)
noise = (
torch.rand_like(next_action) * self.policy_noise
).clamp(-self.noise_clip, self.noise_clip)
next_action = (next_action + noise).clamp(-self.max_action, self.max_action)
next_action = next_action[:, :action_dim_org]
next_action_pad = self.pad_action(next_action)
target_q1, target_q2 = self.critic_target(next_state_pad, next_action_pad)
target_q = torch.min(target_q1, target_q2)
target_q = reward + not_done * self.discount * target_q
current_q1, current_q2 = self.critic(state_pad, action_pad)
if is_offline:
critic_loss = F.mse_loss(current_q1, teacher_q1) + F.mse_loss(current_q2, teacher_q2)
self.critic_optimizer.zero_grad()
critic_loss.backward()
self.critic_optimizer.step()
else:
critic_loss = F.mse_loss(current_q1, target_q) + F.mse_loss(current_q2, target_q)
self.critic_optimizer_online.zero_grad()
critic_loss.backward()
self.critic_optimizer_online.step()
loss = [None, critic_loss.cpu().data.numpy()]
if is_offline or self.total_it[idx] % self.policy_freq == 0:
current_action = self.actor(state_pad)[:, :action_dim_org]
current_action_pad = self.pad_action(current_action)
actor_loss_t = -teacher.critic.Q1(state, current_action)
if is_offline:
actor_loss = actor_loss_t.mean()
self.actor_optimizer.zero_grad()
actor_loss.backward()
self.actor_optimizer.step()
else:
actor_loss = -self.critic.Q1(state_pad, current_action_pad)
actor_loss = 1.0 * actor_loss + 1.0 * actor_loss_t
actor_loss = actor_loss.mean()
self.actor_optimizer_online.zero_grad()
actor_loss.backward()
self.actor_optimizer_online.step()
self.update_target_network()
loss[0] = actor_loss.cpu().data.numpy()
return loss
| [
"torch.nn.functional.mse_loss",
"torch.rand_like",
"torch.load",
"torch.min",
"torch.no_grad",
"torch.zeros"
] | [((1351, 1402), 'torch.load', 'torch.load', (["(filename + '_actor_optimizer_online.pt')"], {}), "(filename + '_actor_optimizer_online.pt')\n", (1361, 1402), False, 'import torch\n'), ((1457, 1509), 'torch.load', 'torch.load', (["(filename + '_critic_optimizer_online.pt')"], {}), "(filename + '_critic_optimizer_online.pt')\n", (1467, 1509), False, 'import torch\n'), ((2211, 2226), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2224, 2226), False, 'import torch\n'), ((3062, 3093), 'torch.min', 'torch.min', (['target_q1', 'target_q2'], {}), '(target_q1, target_q2)\n', (3071, 3093), False, 'import torch\n'), ((3284, 3318), 'torch.nn.functional.mse_loss', 'F.mse_loss', (['current_q1', 'teacher_q1'], {}), '(current_q1, teacher_q1)\n', (3294, 3318), True, 'import torch.nn.functional as F\n'), ((3321, 3355), 'torch.nn.functional.mse_loss', 'F.mse_loss', (['current_q2', 'teacher_q2'], {}), '(current_q2, teacher_q2)\n', (3331, 3355), True, 'import torch.nn.functional as F\n'), ((3518, 3550), 'torch.nn.functional.mse_loss', 'F.mse_loss', (['current_q1', 'target_q'], {}), '(current_q1, target_q)\n', (3528, 3550), True, 'import torch.nn.functional as F\n'), ((3553, 3585), 'torch.nn.functional.mse_loss', 'F.mse_loss', (['current_q2', 'target_q'], {}), '(current_q2, target_q)\n', (3563, 3585), True, 'import torch.nn.functional as F\n'), ((1603, 1663), 'torch.zeros', 'torch.zeros', (['state.shape[0]', '(self.state_dim - state.shape[1])'], {}), '(state.shape[0], self.state_dim - state.shape[1])\n', (1614, 1663), False, 'import torch\n'), ((1809, 1872), 'torch.zeros', 'torch.zeros', (['action.shape[0]', '(self.action_dim - action.shape[1])'], {}), '(action.shape[0], self.action_dim - action.shape[1])\n', (1820, 1872), False, 'import torch\n'), ((2616, 2644), 'torch.rand_like', 'torch.rand_like', (['next_action'], {}), '(next_action)\n', (2631, 2644), False, 'import torch\n')] |
# Standard libraries
import os
import json
import logging
from typing import Text
# Azure functions
import azure.functions as func
# Inference runtime
import onnxruntime as ort
from tokenizers import BertWordPieceTokenizer
# Helper scripts
from .PreprocessData import normalize_text, truncate_text
from .Predict import get_ids_and_masks, predict
# Initialize ONNX runtime and language model tokenizer
vocab_file_path = os.path.join(os.path.dirname(__file__), "Model/bert-base-uncased-vocab.txt")
onnx_file_path = os.path.join(os.path.dirname(__file__), "Model/watchdog_model.onnx")
tokenizer = BertWordPieceTokenizer(vocab_file_path)
tokenizer.enable_padding(pad_id=0, pad_token="[PAD]", length=128)
tokenizer.enable_truncation(max_length=128)
ort_session = ort.InferenceSession(onnx_file_path)
def main(req: func.HttpRequest) -> func.HttpResponse:
logging.info('Invoked TextQualityWatchdog Skill.')
try:
body = json.dumps(req.get_json())
if body:
logging.info(body)
values = json.loads(body)['values']
results = {}
results["values"] = []
for value in values:
text = value['data']['text']
# Apply puntuation and whitespace normalization, and convert to lowercase
text = normalize_text(text)
# Truncate the text to a maximum of 128 (default) whitespace separated tokens
text = truncate_text(text)
# Compute the input tokens and attention masks for the text sequence
input_ids, attention_masks = get_ids_and_masks(tokenizer, text)
# Call the ONNX model to perform inference on the input
flat_prediction = predict(ort_session, input_ids, attention_masks)
payload = (
{
"recordId": value['recordId'],
"data": {
"text_quality_warning": int(flat_prediction[0])
}
}
)
results["values"].append(payload)
result = json.dumps(results, ensure_ascii=False)
return func.HttpResponse(result, mimetype="application/json")
else:
return func.HttpResponse(
"Invalid body",
status_code=400
)
except ValueError:
return func.HttpResponse(
"Invalid body",
status_code=400
)
| [
"json.loads",
"tokenizers.BertWordPieceTokenizer",
"azure.functions.HttpResponse",
"json.dumps",
"onnxruntime.InferenceSession",
"os.path.dirname",
"logging.info"
] | [((599, 638), 'tokenizers.BertWordPieceTokenizer', 'BertWordPieceTokenizer', (['vocab_file_path'], {}), '(vocab_file_path)\n', (621, 638), False, 'from tokenizers import BertWordPieceTokenizer\n'), ((764, 800), 'onnxruntime.InferenceSession', 'ort.InferenceSession', (['onnx_file_path'], {}), '(onnx_file_path)\n', (784, 800), True, 'import onnxruntime as ort\n'), ((436, 461), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (451, 461), False, 'import os\n'), ((530, 555), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (545, 555), False, 'import os\n'), ((860, 910), 'logging.info', 'logging.info', (['"""Invoked TextQualityWatchdog Skill."""'], {}), "('Invoked TextQualityWatchdog Skill.')\n", (872, 910), False, 'import logging\n'), ((993, 1011), 'logging.info', 'logging.info', (['body'], {}), '(body)\n', (1005, 1011), False, 'import logging\n'), ((2222, 2261), 'json.dumps', 'json.dumps', (['results'], {'ensure_ascii': '(False)'}), '(results, ensure_ascii=False)\n', (2232, 2261), False, 'import json\n'), ((2282, 2336), 'azure.functions.HttpResponse', 'func.HttpResponse', (['result'], {'mimetype': '"""application/json"""'}), "(result, mimetype='application/json')\n", (2299, 2336), True, 'import azure.functions as func\n'), ((2371, 2421), 'azure.functions.HttpResponse', 'func.HttpResponse', (['"""Invalid body"""'], {'status_code': '(400)'}), "('Invalid body', status_code=400)\n", (2388, 2421), True, 'import azure.functions as func\n'), ((2507, 2557), 'azure.functions.HttpResponse', 'func.HttpResponse', (['"""Invalid body"""'], {'status_code': '(400)'}), "('Invalid body', status_code=400)\n", (2524, 2557), True, 'import azure.functions as func\n'), ((1033, 1049), 'json.loads', 'json.loads', (['body'], {}), '(body)\n', (1043, 1049), False, 'import json\n')] |
import tensorflow as tf
def get_record_parser_qqp(config, is_test=False):
def parse(example):
ques_limit = config.test_ques_limit if is_test else config.ques_limit
features = tf.parse_single_example(example,
features={
"ques1_idxs": tf.FixedLenFeature([], tf.string),
"ques2_idxs": tf.FixedLenFeature([], tf.string),
"label": tf.FixedLenFeature([], tf.string),
"id": tf.FixedLenFeature([], tf.int64)
})
ques1_idxs = tf.reshape(tf.decode_raw(
features["ques1_idxs"], tf.int32), [ques_limit + 2])
ques2_idxs = tf.reshape(tf.decode_raw(
features["ques2_idxs"], tf.int32), [ques_limit + 2])
label = tf.reshape(tf.decode_raw(
features["label"], tf.float32), [2])
qa_id = features["id"]
return ques1_idxs, ques2_idxs, label, qa_id
return parse
| [
"tensorflow.decode_raw",
"tensorflow.FixedLenFeature"
] | [((733, 780), 'tensorflow.decode_raw', 'tf.decode_raw', (["features['ques1_idxs']", 'tf.int32'], {}), "(features['ques1_idxs'], tf.int32)\n", (746, 780), True, 'import tensorflow as tf\n'), ((849, 896), 'tensorflow.decode_raw', 'tf.decode_raw', (["features['ques2_idxs']", 'tf.int32'], {}), "(features['ques2_idxs'], tf.int32)\n", (862, 896), True, 'import tensorflow as tf\n'), ((960, 1004), 'tensorflow.decode_raw', 'tf.decode_raw', (["features['label']", 'tf.float32'], {}), "(features['label'], tf.float32)\n", (973, 1004), True, 'import tensorflow as tf\n'), ((346, 379), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['[]', 'tf.string'], {}), '([], tf.string)\n', (364, 379), True, 'import tensorflow as tf\n'), ((442, 475), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['[]', 'tf.string'], {}), '([], tf.string)\n', (460, 475), True, 'import tensorflow as tf\n'), ((533, 566), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['[]', 'tf.string'], {}), '([], tf.string)\n', (551, 566), True, 'import tensorflow as tf\n'), ((621, 653), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['[]', 'tf.int64'], {}), '([], tf.int64)\n', (639, 653), True, 'import tensorflow as tf\n')] |
import numpy
import requests
import Quandl
import datetime
from pyrnkr.application import App
from pyrnkr.widgets import Line
from pyrnkr.formula import Trace
def extract_date_index(ts, format='%Y-%m-%d'):
return [x.strftime(format) for x in ts.index.tolist()]
class oil(App):
# This must be consistent with config.json
required_parameters = []
title = 'Overview of Oil' # Appears on top of your application page
subtitle = 'Supply, Consumption, and Prices of Oil Products' # Subtitle to the above
# Production Chart Primary
MultiLineMultiTypeRigCount = "BKRHUGHES/RIGS_BY_BASIN_TOTAL_US_RIG_COUNT" # Example data
SingleLineRigTotal = "BKRHUGHES/RIGS_BY_STATE_TOTALUS_TOTAL"
# Price Charts
BRENT = "EIA/PET_RBRTE_D"
WTI = "EIA/PET_RWTC_D"
# Oil Secondary
MiningUSOilProduction = "FRED/IPG211111CN"
ImportsEndUseCrude = "FRED/IR10000"
OilAndGasWells = "FRED/IPN213111N"
PrivateFixedInvestmentWellsExploration = "FRED/E318RC1Q027SBEA"
# Quandl Token
TOKEN = '' # YOUR TOKEN HERE
def __init__(self, *args, **kwargs):
super(oil, self).__init__(*args, **kwargs)
def get_trace(self, symbol):
"""Get trace for a symbol"""
data = Quandl.get(symbol, authtoken=self.TOKEN)
if data.empty:
raise Exception('could not load series from data source') # If you'd like to handle network
# errors or retry do it here
datay = data[data.columns[0]].tolist() # Data can be manipulated in node.js / python (pandas or numpy)
x = extract_date_index(data)
if len(datay) != len(x):
raise Exception('x and y length do not match') # Sanity Check, trust but verify data feeds
tr = Trace(
x = x,
y = datay,
extra = {
'name': symbol
}
)
return tr # This returns the data to be plotted
def execute(self, parameters):
# Create object to plot
res = {
self.SingleLineRigTotal: {
'title': 'Total U.S. Rig Counts', # Title of individual plot
'subtitle': 'Total U.S. Rotary Rig Counts', # Subtitle of individual plot
'dimension': 'col-md-12', # Bootstrap column dimensions
},
self.BRENT: {
'title': 'Brent Crude',
'subtitle': 'USD Price of Brent Crude Oil',
'dimension': 'col-md-6',
},
self.WTI: {
'title': 'WTI Crude',
'subtitle': 'USD Price of WTI Crude Oil',
'dimension': 'col-md-6',
},
self.MiningUSOilProduction: {
'title': 'US Oil Production',
'subtitle': 'US Oil Production (Indexed 2012 = 100)',
'dimension': 'col-md-6',
},
self.ImportsEndUseCrude: {
'title': 'US Crude Imports',
'subtitle': 'Crude Oil Imports (Indexed 2000 = 100) (Not Seasonally Adjusted)',
'dimension': 'col-md-6',
},
self.OilAndGasWells: {
'title': 'US Oil and Gas Wells',
'subtitle': 'Drilling oil and gas wells',
'dimension': 'col-md-6',
},
self.PrivateFixedInvestmentWellsExploration: {
'title': 'Fixed Investment Wells and Exploration',
'subtitle': '(In Billions USD) (Quarterly Seasonally Adjusted)',
'dimension': 'col-md-6',
}
}
# Because we are using only RNKR line plots iterate through the above create line widgets of variable size
for k, v in res.iteritems():
ts = self.get_trace(k)
res[k]['widget'] = Line(
title=v['title'],
subtitle=v['subtitle'],
dimension=v['dimension'],
traces=[ts]
)
# Render the layout object, primary array is app page, secondary arrays are each bootstrap column
# Styling is dictated here and in the bootstrap column dimensions above
layout = self.render([
[res[self.SingleLineRigTotal]['widget']],
[
res[self.WTI]['widget'],
res[self.BRENT]['widget']
],
[
res[self.MiningUSOilProduction]['widget'],
res[self.ImportsEndUseCrude]['widget']
],
[
res[self.OilAndGasWells]['widget'],
res[self.PrivateFixedInvestmentWellsExploration]['widget']
],
])
return layout, None
def handler(event, context):
"""
AWS Lambda Handler
Inputs depend on your config.json
"""
res, err = oil().run(event)
if err:
raise Exception(err)
return res
# Left for convenience / example of debugging aws lambdas prior to upload
#import json
#print json.dumps(handler({}, {})) | [
"pyrnkr.widgets.Line",
"Quandl.get",
"pyrnkr.formula.Trace"
] | [((1239, 1279), 'Quandl.get', 'Quandl.get', (['symbol'], {'authtoken': 'self.TOKEN'}), '(symbol, authtoken=self.TOKEN)\n', (1249, 1279), False, 'import Quandl\n'), ((1751, 1794), 'pyrnkr.formula.Trace', 'Trace', ([], {'x': 'x', 'y': 'datay', 'extra': "{'name': symbol}"}), "(x=x, y=datay, extra={'name': symbol})\n", (1756, 1794), False, 'from pyrnkr.formula import Trace\n'), ((3812, 3901), 'pyrnkr.widgets.Line', 'Line', ([], {'title': "v['title']", 'subtitle': "v['subtitle']", 'dimension': "v['dimension']", 'traces': '[ts]'}), "(title=v['title'], subtitle=v['subtitle'], dimension=v['dimension'],\n traces=[ts])\n", (3816, 3901), False, 'from pyrnkr.widgets import Line\n')] |
from django.contrib.auth.password_validation import validate_password
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.utils.encoding import force_str
from django.utils.http import urlsafe_base64_decode
from rest_framework.exceptions import AuthenticationFailed
from rest_framework.serializers import (
CharField, EmailField, ModelSerializer, Serializer, ValidationError)
from rest_framework.validators import UniqueValidator
from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
from accounts.models import NGO, CustomUser, Donner
class MyTokenObtainPairSerializer(TokenObtainPairSerializer):
@classmethod
def get_token(cls, user):
token = super(MyTokenObtainPairSerializer, cls).get_token(user)
# Add custom claims
token["email"] = user.email
token["type"] = user.type
return token
class DonnerDetailSerializer(ModelSerializer):
class Meta:
model = Donner
exclude = (
"is_superuser",
"is_staff",
"last_login",
"password",
"country",
"user_permissions",
"groups",
)
class NGODetailSerializer(ModelSerializer):
class Meta:
model = NGO
exclude = (
"is_superuser",
"is_staff",
"last_login",
"password",
"country",
"user_permissions",
)
class DonnerRegisterSerializer(ModelSerializer):
email = EmailField(
required=True, validators=[UniqueValidator(queryset=Donner.objects.all())]
)
password = CharField(write_only=True, required=True,
validators=[validate_password])
password2 = CharField(write_only=True, required=True)
class Meta:
model = Donner
fields = (
"id",
"type",
"email",
"password",
"password2",
"first_name",
"last_name",
"phone_number",
"country",
"state",
"city",
"pin",
"DOB",
"profile_photo",
)
extra_kwargs = {
"first_name": {"required": True},
"last_name": {"required": True},
"password": {"write_only": True},
"password2": {"write_only": True},
}
def validate(self, attrs):
if attrs["password"] != attrs["password2"]:
raise ValidationError(
{"password": "Password fields didn't match."})
return attrs
def create(self, validated_data):
user = Donner.objects.create(
email=validated_data["email"],
first_name=validated_data["first_name"],
last_name=validated_data["last_name"],
type=validated_data["type"],
country=validated_data["country"],
phone_number=validated_data["phone_number"],
state=validated_data["state"],
city=validated_data["city"],
pin=validated_data["pin"],
DOB=validated_data["DOB"],
profile_photo=validated_data["profile_photo"],
)
user.set_password(validated_data["password"])
user.save()
return user
class NGORegisterSerializer(ModelSerializer):
email = EmailField(
required=True, validators=[UniqueValidator(queryset=Donner.objects.all())]
)
password = CharField(write_only=True, required=True,
validators=[validate_password])
password2 = CharField(write_only=True, required=True)
class Meta:
model = NGO
fields = (
"id",
"email",
"password",
"password2",
"name",
"phone_number",
"type",
"country",
"state",
"city",
"pin",
"ngo_approval_cert",
)
extra_kwargs = {
"name": {"required": True},
"password": {"<PASSWORD>": True},
"password2": {"write_only": True},
}
def validate(self, attrs):
if attrs["password"] != attrs["password2"]:
raise ValidationError(
{"password": "Password fields didn't match."})
return attrs
def create(self, validated_data):
user = NGO.objects.create(
email=validated_data["email"],
name=validated_data["name"],
type=validated_data["type"],
phone_number=validated_data["phone_number"],
country=validated_data["country"],
state=validated_data["state"],
city=validated_data["city"],
pin=validated_data["pin"],
ngo_approval_cert=validated_data["ngo_approval_cert"],
)
user.set_password(validated_data["password"])
user.save()
return user
class DonnerChangePasswordSerializer(ModelSerializer):
password = CharField(
write_only=True, required=True, validators=[validate_password])
password2 = CharField(write_only=True, required=True)
old_password = CharField(write_only=True, required=True)
class Meta:
model = Donner
fields = ('old_password', 'password', '<PASSWORD>')
def validate(self, attrs):
if attrs['password'] != attrs['password2']:
raise ValidationError(
{"password": "Password fields didn't match."})
return attrs
def validate_old_password(self, value):
user = self.context['request'].user
if not user.check_password(value):
raise ValidationError(
{"old_password": "Old password is not correct"})
return value
def update(self, instance, validated_data):
instance.set_password(validated_data['password'])
instance.save()
return instance
class NGOChangePasswordSerializer(ModelSerializer):
password = CharField(
write_only=True, required=True, validators=[validate_password])
password2 = CharField(write_only=True, required=True)
old_password = CharField(write_only=True, required=True)
class Meta:
model = NGO
fields = ('old_password', 'password', '<PASSWORD>')
def validate(self, attrs):
if attrs['password'] != attrs['password2']:
raise ValidationError(
{"password": "Password fields didn't match."})
return attrs
def validate_old_password(self, value):
user = self.context['request'].user
if not user.check_password(value):
raise ValidationError(
{"old_password": "Old password is not correct"})
return value
def update(self, instance, validated_data):
instance.set_password(validated_data['password'])
instance.save()
return instance
class DonnerUpdateUserSerializer(ModelSerializer):
class Meta:
model = Donner
fields = (
"first_name",
"last_name",
"phone_number",
"country",
"state",
"city",
"pin",
"DOB",
"profile_photo",
)
def validate_email(self, value):
user = self.context['request'].user
if Donner.objects.exclude(pk=user.pk).filter(email=value).exists():
raise ValidationError({"email": "This email is already in use."})
return value
def update(self, instance, validated_data):
instance.first_name = validated_data['first_name']
instance.last_name = validated_data['last_name']
instance.phone_number = validated_data['phone_number']
instance.country = validated_data['country']
instance.state = validated_data['state']
instance.city = validated_data['city']
instance.pin = validated_data['pin']
instance.DOB = validated_data['DOB']
instance.profile_photo = validated_data['profile_photo']
instance.save()
return instance
class NGOUpdateUserSerializer(ModelSerializer):
class Meta:
model = NGO
fields = (
"name",
"phone_number",
"country",
"state",
"city",
"pin",
"ngo_approval_cert",
)
def validate_email(self, value):
user = self.context['request'].user
if NGO.objects.exclude(pk=user.pk).filter(email=value).exists():
raise ValidationError({"email": "This email is already in use."})
return value
def update(self, instance, validated_data):
instance.name = validated_data['name']
instance.phone_number = validated_data['phone_number']
instance.country = validated_data['country']
instance.state = validated_data['state']
instance.city = validated_data['city']
instance.pin = validated_data['pin']
instance.ngo_approval_cert = validated_data['ngo_approval_cert']
instance.save()
return instance
class EmailResetPasswordSerializer(Serializer):
email = EmailField(min_length=5)
class Meta:
fields = ['email']
class SetNewPasswordSerializer(Serializer):
password = CharField(min_length=6, max_length=100, write_only=True)
uidb64 = CharField(min_length=1, write_only=True)
token = CharField(min_length=1, write_only=True)
class Meta:
fields = ["password", "uidb64", "token"]
def validate(self, attrs):
try:
password = attrs.get("password")
uidb64 = attrs.get("uidb64")
token = attrs.get("token")
id = force_str(urlsafe_base64_decode(uidb64))
user = CustomUser.objects.get(id=id)
if not PasswordResetTokenGenerator().check_token(user, token):
raise AuthenticationFailed("The reset link is invalid", 401)
user.set_password(password)
user.save()
return (user)
except Exception:
raise AuthenticationFailed("The reset link is invalid", 401)
| [
"rest_framework.serializers.EmailField",
"accounts.models.Donner.objects.create",
"accounts.models.NGO.objects.create",
"accounts.models.CustomUser.objects.get",
"accounts.models.Donner.objects.exclude",
"rest_framework.serializers.ValidationError",
"rest_framework.exceptions.AuthenticationFailed",
"d... | [((1638, 1711), 'rest_framework.serializers.CharField', 'CharField', ([], {'write_only': '(True)', 'required': '(True)', 'validators': '[validate_password]'}), '(write_only=True, required=True, validators=[validate_password])\n', (1647, 1711), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((1753, 1794), 'rest_framework.serializers.CharField', 'CharField', ([], {'write_only': '(True)', 'required': '(True)'}), '(write_only=True, required=True)\n', (1762, 1794), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((3477, 3550), 'rest_framework.serializers.CharField', 'CharField', ([], {'write_only': '(True)', 'required': '(True)', 'validators': '[validate_password]'}), '(write_only=True, required=True, validators=[validate_password])\n', (3486, 3550), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((3592, 3633), 'rest_framework.serializers.CharField', 'CharField', ([], {'write_only': '(True)', 'required': '(True)'}), '(write_only=True, required=True)\n', (3601, 3633), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((5015, 5088), 'rest_framework.serializers.CharField', 'CharField', ([], {'write_only': '(True)', 'required': '(True)', 'validators': '[validate_password]'}), '(write_only=True, required=True, validators=[validate_password])\n', (5024, 5088), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((5114, 5155), 'rest_framework.serializers.CharField', 'CharField', ([], {'write_only': '(True)', 'required': '(True)'}), '(write_only=True, required=True)\n', (5123, 5155), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((5175, 5216), 'rest_framework.serializers.CharField', 'CharField', ([], {'write_only': '(True)', 'required': '(True)'}), '(write_only=True, required=True)\n', (5184, 5216), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((6000, 6073), 'rest_framework.serializers.CharField', 'CharField', ([], {'write_only': '(True)', 'required': '(True)', 'validators': '[validate_password]'}), '(write_only=True, required=True, validators=[validate_password])\n', (6009, 6073), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((6099, 6140), 'rest_framework.serializers.CharField', 'CharField', ([], {'write_only': '(True)', 'required': '(True)'}), '(write_only=True, required=True)\n', (6108, 6140), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((6160, 6201), 'rest_framework.serializers.CharField', 'CharField', ([], {'write_only': '(True)', 'required': '(True)'}), '(write_only=True, required=True)\n', (6169, 6201), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((9156, 9180), 'rest_framework.serializers.EmailField', 'EmailField', ([], {'min_length': '(5)'}), '(min_length=5)\n', (9166, 9180), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((9286, 9342), 'rest_framework.serializers.CharField', 'CharField', ([], {'min_length': '(6)', 'max_length': '(100)', 'write_only': '(True)'}), '(min_length=6, max_length=100, write_only=True)\n', (9295, 9342), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((9356, 9396), 'rest_framework.serializers.CharField', 'CharField', ([], {'min_length': '(1)', 'write_only': '(True)'}), '(min_length=1, write_only=True)\n', (9365, 9396), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((9409, 9449), 'rest_framework.serializers.CharField', 'CharField', ([], {'min_length': '(1)', 'write_only': '(True)'}), '(min_length=1, write_only=True)\n', (9418, 9449), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((2659, 3084), 'accounts.models.Donner.objects.create', 'Donner.objects.create', ([], {'email': "validated_data['email']", 'first_name': "validated_data['first_name']", 'last_name': "validated_data['last_name']", 'type': "validated_data['type']", 'country': "validated_data['country']", 'phone_number': "validated_data['phone_number']", 'state': "validated_data['state']", 'city': "validated_data['city']", 'pin': "validated_data['pin']", 'DOB': "validated_data['DOB']", 'profile_photo': "validated_data['profile_photo']"}), "(email=validated_data['email'], first_name=\n validated_data['first_name'], last_name=validated_data['last_name'],\n type=validated_data['type'], country=validated_data['country'],\n phone_number=validated_data['phone_number'], state=validated_data[\n 'state'], city=validated_data['city'], pin=validated_data['pin'], DOB=\n validated_data['DOB'], profile_photo=validated_data['profile_photo'])\n", (2680, 3084), False, 'from accounts.models import NGO, CustomUser, Donner\n'), ((4398, 4752), 'accounts.models.NGO.objects.create', 'NGO.objects.create', ([], {'email': "validated_data['email']", 'name': "validated_data['name']", 'type': "validated_data['type']", 'phone_number': "validated_data['phone_number']", 'country': "validated_data['country']", 'state': "validated_data['state']", 'city': "validated_data['city']", 'pin': "validated_data['pin']", 'ngo_approval_cert': "validated_data['ngo_approval_cert']"}), "(email=validated_data['email'], name=validated_data[\n 'name'], type=validated_data['type'], phone_number=validated_data[\n 'phone_number'], country=validated_data['country'], state=\n validated_data['state'], city=validated_data['city'], pin=\n validated_data['pin'], ngo_approval_cert=validated_data[\n 'ngo_approval_cert'])\n", (4416, 4752), False, 'from accounts.models import NGO, CustomUser, Donner\n'), ((2503, 2565), 'rest_framework.serializers.ValidationError', 'ValidationError', (['{\'password\': "Password fields didn\'t match."}'], {}), '({\'password\': "Password fields didn\'t match."})\n', (2518, 2565), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((4242, 4304), 'rest_framework.serializers.ValidationError', 'ValidationError', (['{\'password\': "Password fields didn\'t match."}'], {}), '({\'password\': "Password fields didn\'t match."})\n', (4257, 4304), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((5419, 5481), 'rest_framework.serializers.ValidationError', 'ValidationError', (['{\'password\': "Password fields didn\'t match."}'], {}), '({\'password\': "Password fields didn\'t match."})\n', (5434, 5481), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((5671, 5735), 'rest_framework.serializers.ValidationError', 'ValidationError', (["{'old_password': 'Old password is not correct'}"], {}), "({'old_password': 'Old password is not correct'})\n", (5686, 5735), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((6401, 6463), 'rest_framework.serializers.ValidationError', 'ValidationError', (['{\'password\': "Password fields didn\'t match."}'], {}), '({\'password\': "Password fields didn\'t match."})\n', (6416, 6463), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((6653, 6717), 'rest_framework.serializers.ValidationError', 'ValidationError', (["{'old_password': 'Old password is not correct'}"], {}), "({'old_password': 'Old password is not correct'})\n", (6668, 6717), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((7421, 7480), 'rest_framework.serializers.ValidationError', 'ValidationError', (["{'email': 'This email is already in use.'}"], {}), "({'email': 'This email is already in use.'})\n", (7436, 7480), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((8537, 8596), 'rest_framework.serializers.ValidationError', 'ValidationError', (["{'email': 'This email is already in use.'}"], {}), "({'email': 'This email is already in use.'})\n", (8552, 8596), False, 'from rest_framework.serializers import CharField, EmailField, ModelSerializer, Serializer, ValidationError\n'), ((9764, 9793), 'accounts.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'id'}), '(id=id)\n', (9786, 9793), False, 'from accounts.models import NGO, CustomUser, Donner\n'), ((9714, 9743), 'django.utils.http.urlsafe_base64_decode', 'urlsafe_base64_decode', (['uidb64'], {}), '(uidb64)\n', (9735, 9743), False, 'from django.utils.http import urlsafe_base64_decode\n'), ((9892, 9946), 'rest_framework.exceptions.AuthenticationFailed', 'AuthenticationFailed', (['"""The reset link is invalid"""', '(401)'], {}), "('The reset link is invalid', 401)\n", (9912, 9946), False, 'from rest_framework.exceptions import AuthenticationFailed\n'), ((10083, 10137), 'rest_framework.exceptions.AuthenticationFailed', 'AuthenticationFailed', (['"""The reset link is invalid"""', '(401)'], {}), "('The reset link is invalid', 401)\n", (10103, 10137), False, 'from rest_framework.exceptions import AuthenticationFailed\n'), ((1593, 1613), 'accounts.models.Donner.objects.all', 'Donner.objects.all', ([], {}), '()\n', (1611, 1613), False, 'from accounts.models import NGO, CustomUser, Donner\n'), ((3432, 3452), 'accounts.models.Donner.objects.all', 'Donner.objects.all', ([], {}), '()\n', (3450, 3452), False, 'from accounts.models import NGO, CustomUser, Donner\n'), ((7338, 7372), 'accounts.models.Donner.objects.exclude', 'Donner.objects.exclude', ([], {'pk': 'user.pk'}), '(pk=user.pk)\n', (7360, 7372), False, 'from accounts.models import NGO, CustomUser, Donner\n'), ((8457, 8488), 'accounts.models.NGO.objects.exclude', 'NGO.objects.exclude', ([], {'pk': 'user.pk'}), '(pk=user.pk)\n', (8476, 8488), False, 'from accounts.models import NGO, CustomUser, Donner\n'), ((9814, 9843), 'django.contrib.auth.tokens.PasswordResetTokenGenerator', 'PasswordResetTokenGenerator', ([], {}), '()\n', (9841, 9843), False, 'from django.contrib.auth.tokens import PasswordResetTokenGenerator\n')] |
#!/usr/bin/env python
import argparse
import os
from PIL import Image
densities = {
'mdpi': 48,
'hdpi': 72,
'xhdpi': 96,
'xxhdpi': 144,
'xxxhdpi': 192
}
class PathAction(argparse.Action):
def __call__(self, parser, namespace, value, options_string=None):
if not os.path.exists(value):
raise argparse.ArgumentError(self, 'Path does not exist: {}'.format(value))
if not os.path.isdir(value):
raise argparse.ArgumentError(self, 'Path is not a directory: {}'.format(value))
if value.endswith('/'):
value = value[:-1]
setattr(namespace, self.dest, value)
def main():
args = _parse_args()
source = Image.open(args.source)
for density_name in densities:
density_size = densities[density_name]
destination = _create_output_file(args.res_dir, density_name, args.filename)
print('Writing: {}'.format(destination))
icon = source.resize((density_size, density_size), Image.LANCZOS)
icon.save(destination, 'PNG')
icon.close()
print('...done!')
def _parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('source', type=argparse.FileType('r'))
parser.add_argument('filename')
parser.add_argument('res_dir', action=PathAction)
return parser.parse_args()
def _create_output_file(res_dir, density, filename):
path = '{}/mipmap-{}'.format(res_dir, density)
if not os.path.exists(path):
os.makedirs(path)
return '{}/{}'.format(path, filename)
if __name__ == '__main__':
main()
| [
"os.path.exists",
"argparse.FileType",
"PIL.Image.open",
"os.makedirs",
"argparse.ArgumentParser",
"os.path.isdir"
] | [((622, 645), 'PIL.Image.open', 'Image.open', (['args.source'], {}), '(args.source)\n', (632, 645), False, 'from PIL import Image\n'), ((1007, 1032), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1030, 1032), False, 'import argparse\n'), ((1315, 1335), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (1329, 1335), False, 'import os\n'), ((1339, 1356), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (1350, 1356), False, 'import os\n'), ((273, 294), 'os.path.exists', 'os.path.exists', (['value'], {}), '(value)\n', (287, 294), False, 'import os\n'), ((384, 404), 'os.path.isdir', 'os.path.isdir', (['value'], {}), '(value)\n', (397, 404), False, 'import os\n'), ((1069, 1091), 'argparse.FileType', 'argparse.FileType', (['"""r"""'], {}), "('r')\n", (1086, 1091), False, 'import argparse\n')] |
import re
try:
long
except NameError:
long = int
# list of prod source label for pilot tests
list_ptest_prod_sources = ['ptest', 'rc_test', 'rc_test2', 'rc_alrb']
# mapping with prodsourcelabels that belong to analysis and production
analy_sources = ['user', 'panda']
prod_sources = ['managed', 'prod_test']
neutral_sources = ['install'] + list_ptest_prod_sources
ANALY_PS = 'user'
PROD_PS = 'managed'
ANALY_TASKTYPE = 'anal'
PROD_TASKTYPE = 'prod'
job_labels = [ANALY_PS, PROD_PS]
# priority of tasks to jumbo over others
priorityTasksToJumpOver = 1500
def translate_resourcetype_to_cores(resource_type, cores_queue):
# resolve the multiplying core factor
if 'MCORE' in resource_type:
return cores_queue
else:
return 1
def translate_prodsourcelabel_to_jobtype(queue_type, prodsourcelabel):
if prodsourcelabel in analy_sources:
return ANALY_PS
if prodsourcelabel in prod_sources:
return PROD_PS
if prodsourcelabel in neutral_sources:
if queue_type == 'unified' or queue_type == 'production':
return PROD_PS
if queue_type == 'analysis':
return ANALY_PS
# currently unmapped
return prodsourcelabel
def translate_tasktype_to_jobtype(task_type):
# any unrecognized tasktype will be defaulted to production
if task_type == ANALY_TASKTYPE:
return ANALY_PS
else:
return PROD_PS
# get core count
def getCoreCount(actualCoreCount, defCoreCount, jobMetrics):
coreCount = 1
try:
if actualCoreCount is not None:
coreCount = actualCoreCount
else:
tmpMatch = None
if jobMetrics is not None:
# extract coreCount
tmpMatch = re.search('coreCount=(\d+)',jobMetrics)
if tmpMatch is not None:
coreCount = long(tmpMatch.group(1))
else:
# use jobdef
if defCoreCount not in [None, 0]:
coreCount = defCoreCount
except Exception:
pass
return coreCount
# get HS06sec
def getHS06sec(startTime, endTime, corePower, coreCount, baseWalltime=0, cpuEfficiency=100):
try:
# no scaling
if cpuEfficiency == 0:
return 0
# get execution time
tmpTimeDelta = endTime-startTime
tmpVal = tmpTimeDelta.seconds + tmpTimeDelta.days * 24 * 3600
if tmpVal <= baseWalltime:
return 0
hs06sec = float(tmpVal-baseWalltime) * corePower * coreCount * float(cpuEfficiency) / 100.0
return hs06sec
except Exception:
return None
# parse string for number of standby jobs
def parseNumStandby(catchall):
retMap = {}
if catchall is not None:
for tmpItem in catchall.split(','):
tmpMatch = re.search('^nStandby=(.+)', tmpItem)
if tmpMatch is None:
continue
for tmpSubStr in tmpMatch.group(1).split('|'):
if len(tmpSubStr.split(':')) != 3:
continue
sw_id, resource_type, num = tmpSubStr.split(':')
try:
sw_id = int(sw_id)
except Exception:
pass
if sw_id not in retMap:
retMap[sw_id] = {}
if num == '':
num = 0
else:
num = int(num)
retMap[sw_id][resource_type] = num
break
return retMap
# compensate memory count to prevent jobs with ramCount close to the HIMEM border from going to HIMEM PQs
def compensate_ram_count(ram_count):
if ram_count == 'NULL':
ram_count = None
if ram_count is not None:
ram_count = int(ram_count * 0.90)
return ram_count
| [
"re.search"
] | [((2824, 2860), 're.search', 're.search', (['"""^nStandby=(.+)"""', 'tmpItem'], {}), "('^nStandby=(.+)', tmpItem)\n", (2833, 2860), False, 'import re\n'), ((1758, 1799), 're.search', 're.search', (['"""coreCount=(\\\\d+)"""', 'jobMetrics'], {}), "('coreCount=(\\\\d+)', jobMetrics)\n", (1767, 1799), False, 'import re\n')] |
try:
from IPython import get_ipython
if get_ipython().__class__.__name__ not in ['NoneType']:
from IPython import display
i_am_in_interatcive = True
import pylab as pl
pl.rcParams['figure.figsize'] = [13, 13]
# print("INTERACTIVE")
else:
import matplotlib.pyplot as pl
i_am_in_interatcive = False
# print("NOT INTERACTIVE")
except:
import matplotlib.pyplot as pl
i_am_in_interatcive = False
# print("__INIT__ EXECUTED")
from .Agent import Agent
from .Enviroment_with_agents import Enviroment_with_agents
from .Enviroment import Enviroment
from .InOut_Simple_Laberinth import InOut_Simple_Laberinth, No_Walls_Laberinth
import numpy as np
| [
"IPython.get_ipython"
] | [((49, 62), 'IPython.get_ipython', 'get_ipython', ([], {}), '()\n', (60, 62), False, 'from IPython import get_ipython\n')] |
#! /usr/bin/python3
#
# Copyright (c) 2021 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
import copy
import os
import subprocess
import commonl
import ttbl
import ttbl.images
import ttbl.power
class pgm_c(ttbl.images.flash_shell_cmd_c):
"""Flash using Intel's Quartus PGM tool
This allows to flash images to an Altera MAX10, using the Quartus
tools, freely downloadable from
https://www.intel.com/content/www/us/en/collections/products/fpga/software/downloads.html?s=Newest
Exports the following interfaces:
- power control (using any AC power switch, such as the
:class:`Digital Web Power Switch 7 <ttbl.pc.dlwps7>`)
- serial console
- image (in hex format) flashing (using the Quartus Prime tools
package)
Multiple instances at the same time are supported; however, due to
the JTAG interface not exporting a serial number, addressing has
to be done by USB path, which is risky (as it will change when the
cable is plugged to another port or might be enumerated in a
different number).
:param str usb_serial_number: USB serial number of the USB device to use
(USB-BlasterII or similar)
:param dict image_map:
:param str name: (optiona; default 'Intel Quartus PGM #<DEVICEID>')
instrument's name.
:param dict args: (optional) dictionary of extra command line options to
*quartus_pgm*; these are expanded with the target keywords with
*%(FIELD)s* templates, with fields being the target's
:ref:`metadata <finding_testcase_metadata>`:
FIXME: move to common flash_shell_cmd_c
:param dict jtagconfig: (optional) jtagconfig --setparam commands
to run before starting.
These are expanded with the target keywords with
*%(FIELD)s* templates, with fields being the target's
:ref:`metadata <finding_testcase_metadata>` and then run as::
jtagconfig --setparam CABLENAME KEY VALUE
:param int tcp_port: (optional, default *None*) if a TCP port
number is given, it is a assumed the flashing server is in
localhost in the given TCP port.
:param str sibling_serial_number (optional, default *None*) USB serial
number of the USB device that is a sibling to the one defined by
usb_serial_number
:param int usb_port (optional, default *None*) port that the USB device is
connected to, used in combination with sibling_serial_number to find
the USB path for devices that do not have unique serial numbers (USB
Blaster I)
Other parameters described in :class:ttbl.images.impl_c.
**Command line reference**
https://www.intel.com/content/dam/www/programmable/us/en/pdfs/literature/manual/tclscriptrefmnl.pdf
Section Quartus_PGM (2-50)
**System setup**
- Download and install Quartus Programmer::
$ wget http://download.altera.com/akdlm/software/acdsinst/20.1std/711/ib_installers/QuartusProgrammerSetup-20.1.0.711-linux.run
# chmod a+x QuartusProgrammerSetup-20.1.0.711-linux.run
# ./QuartusProgrammerSetup-20.1.0.711-linux.run --unattendedmodeui none --mode unattended --installdir /opt/quartus --accept_eula 1
- if installing to a different location than */opt/quartus*,
adjust the value of :data:`path` in a FIXME:ttbd configuration
file.
**Troubleshooting**
When it fails to flash, the error log is reported in the server in
a file called *flash-COMPONENTS.log* in the target's state
directory (FIXME: we need a better way for this--the admin shall
be able to read it, but not the users as it might leak sensitive
information?).
Common error messages:
- *Error (213019): Can't scan JTAG chain. Error code 87*
Also seen when manually running in the server::
$ /opt/quartus/qprogrammer/bin/jtagconfig
1) USB-BlasterII [3-1.4.4.3]
Unable to read device chain - JTAG chain broken
In many cases this has been:
- a powered off main board: power it on
- a misconnected USB-BlasterII: reconnect properly
- a broken USB-BlasterII: replace unit
- *Error (209012): Operation failed*
this usually happens when flashing one component of a multiple
component chain; the log might read something like::
Info (209060): Started Programmer operation at Mon Jul 20 12:05:22 2020
Info (209017): Device 2 contains JTAG ID code 0x038301DD
Info (209060): Started Programmer operation at Mon Jul 20 12:05:22 2020
Info (209016): Configuring device index 2
Info (209017): Device 2 contains JTAG ID code 0x018303DD
Info (209007): Configuration succeeded -- 1 device(s) configured
Info (209011): Successfully performed operation(s)
Info (209061): Ended Programmer operation at Mon Jul 20 12:05:22 2020
Error (209012): Operation failed
Info (209061): Ended Programmer operation at Mon Jul 20 12:05:22 2020
Error: Quartus Prime Programmer was unsuccessful. 1 error, 0 warnings
This case has been found to be because the **--bgp** option is
needed (which seems to map to the *Enable Realtime ISP
programming* in the Quartus UI, *quartus_pgmw*)
- *Warning (16328): The real-time ISP option for Max 10 is
selected. Ensure all Max 10 devices being programmed are in user
mode when requesting this programming option*
Followed by:
*Error (209012): Operation failed*
This case comes when a previous flashing process was interrupted
half way or the target is corrupted.
It needs a special one-time recovery; currently the
workaround seems to run the flashing with out the *--bgp* switch
that as of now is hardcoded.
FIXME: move the --bgp and --mode=JTAG switches to the args (vs
hardcoded) so a recovery target can be implemented as
NAME-nobgp
*Using Quartus tool with a remote jtagd*
The service port for *jtagd* can be tunneled in and used by the
Quartus toolsuite::
$ tcf property-get r013s001 interfaces.power.jtagd.tcp_port
5337
$ tcf power-on -c jtagd TARGET
$ tcf tunnel-add TARGET 5337 tcp 127.0.01
SERVERNAME:1234
Now the Quartus Qprogrammer tools need to be told which server to
add::
$ jtagdconfig --addserver SERVERNAME:1234 ""
(second entry is an empty password); this adds an entry to
*~/.jtagd.conf*::
# /home/USERNAME/.jtag.conf
Remote1 {
Host = "SERVERNAME:1234";
Password = "";
}
Note the port number changes with each tunnel, you will have to
*jtagconfig --addserver* and delete the old one (you can edit the
file by hand too).
Now list remote targets::
$ jtagconfig
1) USB-BlasterII on SERVERNAME:1234 [3-1.4.1]
031050DD 10M50DA(.|ES)/10M50DC
031040DD 10M25D(A|C)
Note this connection is open to anyone until the tunnel is removed
or the allocation is released with *tcf alloc-rm* or
equivalent. *PENDING* use SSL to secure access.
[ see also for the Quartus GUI, follow
https://www.intel.com/content/www/us/en/programmable/quartushelp/13.0/mergedProjects/program/pgm/pgm_pro_add_server.htm ]
**Quartus Lite**
Download from https://www.intel.com/content/www/us/en/software-kit/684215/intel-quartus-prime-lite-edition-design-software-version-21-1-for-linux.html?
Install with::
$ tar xf Quartus-lite-21.1.0.842-linux.tar
$ cd components
$ chmod a+x ./Quartus-lite-21.1.0.842-linux.tar
$ ./Quartus-lite-21.1.0.842-linux.tar
Quartus will use the same *~/.jtagd.conf* if you have used
*jtagconfig* to configure as above
1. Start Quartus::
$ INSTALLPATH/intelFPGA_lite/21.1/quartus/bin/quartus
2. Go to Programmer > Edit > Hardware Setup
3. Click on *Add Hardware*
4. Enter as *Server Name* and *Server Port* the name of the server
that is doing the tunnel (as printed by *tcf tunnel-add*
above); leave the password blank.
5. Click *OK*
**Troubleshooting**
- can't connect to port::
$ ./jtagconfig
1) Remote server SERVERNAME:1234: Unable to connect
- ensure jtagd in the target is on
- ensure the tunnel is on
"""
#: Path to *quartus_pgm*
#:
#: We need to use an ABSOLUTE PATH if the tool is not in the
#: normal search path (which usually won't).
#:
#: Change by setting, in a :ref:`server configuration file
#: <ttbd_configuration>`:
#:
#: >>> ttbl.quartus.pgm_c.path = "/opt/quartus/qprogrammer/bin/quartus_pgm"
#:
#: or for a single instance that then will be added to config:
#:
#: >>> imager = ttbl.quartus.pgm_c(...)
#: >>> imager.path = "/opt/quartus/qprogrammer/bin/quartus_pgm"
path = "/opt/quartus/qprogrammer/bin/quartus_pgm"
path_jtagconfig = "/opt/quartus/qprogrammer/bin/jtagconfig"
def __init__(self, usb_serial_number, image_map, args = None, name = None,
jtagconfig = None, tcp_port = None,
sibling_serial_number = None, usb_port = None,
**kwargs):
assert isinstance(usb_serial_number, str)
commonl.assert_dict_of_ints(image_map, "image_map")
commonl.assert_none_or_dict_of_strings(jtagconfig, "jtagconfig")
assert name == None or isinstance(name, str)
assert tcp_port == None or isinstance(tcp_port, int)
self.usb_serial_number = usb_serial_number
self.tcp_port = tcp_port
self.image_map = image_map
self.jtagconfig = jtagconfig
self.sibling_serial_number = sibling_serial_number
self.usb_port = usb_port
if args:
commonl.assert_dict_of_strings(args, "args")
self.args = args
else:
self.args = {}
cmdline = [
"stdbuf", "-o0", "-e0", "-i0",
self.path,
# FIXME: move this to args, enable value-less args (None)
"--bgp", # Real time background programming
"--mode=JTAG", # this is a JTAG
# when using a server, if the target is called
# SOMETHING in SERVERNAME:PORT CABLENAME, it seems PGM
# goes straight there. Weird
"-c", "%(device_path)s", # will resolve in flash_start()
# in flash_start() call we'll map the image names to targets
# to add these
#
#'--operation=PVB;%(image.NAME)s@1',
#'--operation=PVB;%(image.NAME)s@2',
#...
# (P)rogram (V)erify, (B)lank-check
#
# note like this we can support burning multiple images into the
# same chain with a single call
]
if args:
for arg, value in args.items():
if value != None:
cmdline += [ arg, value ]
# we do this because in flash_start() we need to add
# --operation as we find images we are supposed to flash
self.cmdline_orig = cmdline
ttbl.images.flash_shell_cmd_c.__init__(self, cmdline, cwd = '%(file_path)s',
**kwargs)
if name == None:
self.name = "quartus"
self.upid_set(
f"Intel Quartus PGM @ USB#{usb_serial_number}",
usb_serial_number = usb_serial_number)
def flash_start(self, target, images, context):
# Finalize preparing the command line for flashing the images
# find the device path; quartus_pgm doesn't seem to be able to
# address by serial and expects a cable name as 'PRODUCT NAME
# [PATH]', like 'USB BlasterII [1-3.3]'; we can't do this on
# object creation because the USB path might change when we power
# it on/off (rare, but could happen). Since USB Blaster I do not
# have unique serial numbers we use a combination of usb_port
# and sibling_serial_number to find the correct usb_path
if self.usb_port != None:
usb_path, _vendor, product = ttbl.usb_serial_to_path(
self.sibling_serial_number, self.usb_port)
else:
usb_path, _vendor, product = ttbl.usb_serial_to_path(
self.usb_serial_number)
if self.tcp_port:
# server based cable name
device_path = f"{product} on localhost:{self.tcp_port} [{usb_path}]"
jtag_config_filename = f"{target.state_dir}/jtag-{'_'.join(images.keys())}.conf"
# Create the jtag client config file to ensure that
# the correct jtag daemon is connected to, then use the
# environment variable QUARTUS_JTAG_CLIENT_CONFIG to have
# the quartus software find it
with open(jtag_config_filename, "w+") as jtag_config:
jtag_config.write(
f'ReplaceLocalJtagServer = "localhost:{self.tcp_port}";')
self.env_add["QUARTUS_JTAG_CLIENT_CONFIG"] = jtag_config_filename
else:
# local cable name, starts sever on its own
device_path = f"{product} [{usb_path}]"
context['kws'] = {
# HACK: we assume all images are in the same directory, so
# we are going to cwd there (see in __init__ how we set
# cwd to %(file_path)s. Reason is some of our paths might
# include @, which the tool considers illegal as it uses
# it to separate arguments--see below --operation
'file_path': os.path.dirname(list(images.values())[0]),
'device_path': device_path,
# flash_shell_cmd_c.flash_start() will add others
}
# for each image we are burning, map it to a target name in
# the cable (@NUMBER)
# make sure we don't modify the originals
cmdline = copy.deepcopy(self.cmdline_orig)
for image_type, filename in images.items():
target_index = self.image_map.get(image_type, None)
# pass only the realtive filename, as we are going to
# change working dir into the path (see above in
# context[kws][file_path]
cmdline.append("--operation=PVB;%s@%d" % (
os.path.basename(filename), target_index))
# now set it for flash_shell_cmd_c.flash_start()
self.cmdline = cmdline
if self.jtagconfig:
for option, value in self.jtagconfig.items():
cmdline = [
self.path_jtagconfig,
"--addserver", f"localhost:{self.tcp_port}", "", # empty password
"--setparam",
device_path,
option, value
]
target.log.info("running per-config: %s" % " ".join(cmdline))
subprocess.check_output(
cmdline, shell = False, stderr = subprocess.STDOUT)
ttbl.images.flash_shell_cmd_c.flash_start(self, target, images, context)
class jtagd_c(ttbl.power.daemon_c):
"""Driver for the jtag daemon
This driver starts the jtag daemon on the server for a specific
USB Blaster II
Does not override any of the default methods except for verify
**Arugments**
:param str usb_serial_number: serial number of the USB Blaster II
:param int tcp_port: (1024 - 65536) Number of the TCP port on
localhost where the daemon will listen
:param str jtagd_path: (optional) orverride :data:`jtagd_path`;
:param str explicit: (optional; default *off*) control when this
is started on/off:
- *None*: for normal behaviour; component will be
powered-on/started with the whole power rail
- *both*: explicit for both powering on and off: only
power-on/start and power-off/stop if explicity called by
name
- *on*: explicit for powering on: only power-on/start if explicity
powered on by name, power off normally
- *off*: explicit for powering off: only power-off/stop if explicity
powered off by name, power on normally
By default it is set to *off*, so that when the target is powere
off existing network connections to the daemon are maintained.
Any other arguments as taken by :class:ttbl.power.daemon_c and
:class:ttbl.power.impl_c.
"""
jtagd_path = "/opt/quartus/qprogrammer/bin/jtagd"
def __init__(self, usb_serial_number, tcp_port, jtagd_path = None,
check_path = None, explicit = "off", **kwargs):
assert isinstance(usb_serial_number, str), \
"usb_serial_number: expected a string, got %s" % type(usb_serial_number)
assert isinstance(tcp_port, int), \
"tcp_port: expected an integer between 1024 and 65536, got %s" \
% type(usb_serial_number)
if jtagd_path:
self.jtagd_path = jtagd_path
assert isinstance(self.jtagd_path, str), \
"openipc_path: expected a string, got %s" % type(jtagd_path)
self.usb_serial_number = usb_serial_number
self.tcp_port = tcp_port
cmdline = [
self.jtagd_path,
"--no-config",
"--auto-detect-filter", usb_serial_number,
"--port", str(tcp_port),
"--debug",
"--foreground",
]
ttbl.power.daemon_c.__init__(
self, cmdline, precheck_wait = 0.5, mkpidfile = True,
name = "jtagd", explicit = explicit,
# ...linux64/jtagd renames itself to jtagd and it makes it hard to kill
path = "jtagd",
check_path = "/opt/quartus/qprogrammer/linux64/jtagd",
**kwargs)
# Register the instrument like this, so it matches pgm_c and
# others and they all point to the same instrument
self.upid_set(
f"Intel Quartus PGM @ USB#{usb_serial_number}",
usb_serial_number = usb_serial_number)
def target_setup(self, target, iface_name, component):
target.fsdb.set(f"interfaces.{iface_name}.{component}.tcp_port",
self.tcp_port)
#Set the local ports that is able to be reached via tunneling
target.tunnel.allowed_local_ports.add(("127.0.0.1", "tcp",
self.tcp_port))
ttbl.power.daemon_c.target_setup(self, target, iface_name, component)
def verify(self, target, component, cmdline_expanded):
pidfile = os.path.join(target.state_dir, component + "-jtagd.pid")
return commonl.process_alive(pidfile, self.check_path) \
and commonl.tcp_port_busy(self.tcp_port)
def on(self, target, component):
return ttbl.power.daemon_c.on(self, target, component)
| [
"ttbl.usb_serial_to_path",
"subprocess.check_output",
"ttbl.power.daemon_c.on",
"commonl.assert_dict_of_ints",
"ttbl.power.daemon_c.__init__",
"ttbl.images.flash_shell_cmd_c.__init__",
"os.path.join",
"commonl.assert_none_or_dict_of_strings",
"ttbl.images.flash_shell_cmd_c.flash_start",
"commonl.p... | [((9249, 9300), 'commonl.assert_dict_of_ints', 'commonl.assert_dict_of_ints', (['image_map', '"""image_map"""'], {}), "(image_map, 'image_map')\n", (9276, 9300), False, 'import commonl\n'), ((9309, 9373), 'commonl.assert_none_or_dict_of_strings', 'commonl.assert_none_or_dict_of_strings', (['jtagconfig', '"""jtagconfig"""'], {}), "(jtagconfig, 'jtagconfig')\n", (9347, 9373), False, 'import commonl\n'), ((11109, 11197), 'ttbl.images.flash_shell_cmd_c.__init__', 'ttbl.images.flash_shell_cmd_c.__init__', (['self', 'cmdline'], {'cwd': '"""%(file_path)s"""'}), "(self, cmdline, cwd='%(file_path)s',\n **kwargs)\n", (11147, 11197), False, 'import ttbl\n'), ((13898, 13930), 'copy.deepcopy', 'copy.deepcopy', (['self.cmdline_orig'], {}), '(self.cmdline_orig)\n', (13911, 13930), False, 'import copy\n'), ((14976, 15048), 'ttbl.images.flash_shell_cmd_c.flash_start', 'ttbl.images.flash_shell_cmd_c.flash_start', (['self', 'target', 'images', 'context'], {}), '(self, target, images, context)\n', (15017, 15048), False, 'import ttbl\n'), ((17385, 17583), 'ttbl.power.daemon_c.__init__', 'ttbl.power.daemon_c.__init__', (['self', 'cmdline'], {'precheck_wait': '(0.5)', 'mkpidfile': '(True)', 'name': '"""jtagd"""', 'explicit': 'explicit', 'path': '"""jtagd"""', 'check_path': '"""/opt/quartus/qprogrammer/linux64/jtagd"""'}), "(self, cmdline, precheck_wait=0.5, mkpidfile=\n True, name='jtagd', explicit=explicit, path='jtagd', check_path=\n '/opt/quartus/qprogrammer/linux64/jtagd', **kwargs)\n", (17413, 17583), False, 'import ttbl\n'), ((18374, 18443), 'ttbl.power.daemon_c.target_setup', 'ttbl.power.daemon_c.target_setup', (['self', 'target', 'iface_name', 'component'], {}), '(self, target, iface_name, component)\n', (18406, 18443), False, 'import ttbl\n'), ((18522, 18578), 'os.path.join', 'os.path.join', (['target.state_dir', "(component + '-jtagd.pid')"], {}), "(target.state_dir, component + '-jtagd.pid')\n", (18534, 18578), False, 'import os\n'), ((18750, 18797), 'ttbl.power.daemon_c.on', 'ttbl.power.daemon_c.on', (['self', 'target', 'component'], {}), '(self, target, component)\n', (18772, 18797), False, 'import ttbl\n'), ((9766, 9810), 'commonl.assert_dict_of_strings', 'commonl.assert_dict_of_strings', (['args', '"""args"""'], {}), "(args, 'args')\n", (9796, 9810), False, 'import commonl\n'), ((12116, 12182), 'ttbl.usb_serial_to_path', 'ttbl.usb_serial_to_path', (['self.sibling_serial_number', 'self.usb_port'], {}), '(self.sibling_serial_number, self.usb_port)\n', (12139, 12182), False, 'import ttbl\n'), ((12255, 12302), 'ttbl.usb_serial_to_path', 'ttbl.usb_serial_to_path', (['self.usb_serial_number'], {}), '(self.usb_serial_number)\n', (12278, 12302), False, 'import ttbl\n'), ((18594, 18641), 'commonl.process_alive', 'commonl.process_alive', (['pidfile', 'self.check_path'], {}), '(pidfile, self.check_path)\n', (18615, 18641), False, 'import commonl\n'), ((18660, 18696), 'commonl.tcp_port_busy', 'commonl.tcp_port_busy', (['self.tcp_port'], {}), '(self.tcp_port)\n', (18681, 18696), False, 'import commonl\n'), ((14871, 14942), 'subprocess.check_output', 'subprocess.check_output', (['cmdline'], {'shell': '(False)', 'stderr': 'subprocess.STDOUT'}), '(cmdline, shell=False, stderr=subprocess.STDOUT)\n', (14894, 14942), False, 'import subprocess\n'), ((14283, 14309), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (14299, 14309), False, 'import os\n')] |
import pandas as pd
from config import WEBSCRAPE_DATA_PATH, OUTPUT_DATA_PATH
import os
def get_understat_filepaths(file_path):
filepaths = []
team = []
for root, dirs, files in os.walk(file_path):
for filename in files:
if ('understat' in filename) and ('team' not in filename) and ('player' not in filename):
filepaths += [f"{root}/{filename}"]
team = team + [filename.split('_', 1)[1].split('.')[0].replace("_", " ")]
return pd.DataFrame({'Filepath': filepaths,
}, index=team)
def create_teams_data(filepath):
understat_year_df_dict = {}
understat_year_df_with_opps_dict = {}
for subdir in os.listdir(filepath):
year = int(subdir[:4])
understat_dict = {}
players_path = os.path.join(os.path.normpath(filepath), subdir)
understat_paths = get_understat_filepaths(os.path.join(players_path, 'understat'))
print(understat_paths)
teams_list = pd.DataFrame({'name': understat_paths.index.values})
teams_list['id'] = teams_list.index + 1
for team in understat_paths.index:
understat_dict[team] = pd.read_csv(understat_paths.loc[team, 'Filepath'],
usecols=['date', 'xG', 'xGA', 'xpts'])
understat_dict[team]['Team'] = team
understat_dict[team]['date'] = pd.to_datetime(understat_dict[team].date).dt.date
understat_dict[team].sort_values(['date'], ascending=True, inplace=True)
understat_dict[team]['Games_played'] = understat_dict[team].index + 1
understat_year_df_dict[year] = pd.concat(understat_dict.values())
understat_year_df_dict[year]['season'] = year
understat_year_with_id = understat_year_df_dict[year].merge(teams_list,
left_on='Team',
right_on='name').drop('name', axis=1)
understat_opponents_filtered = understat_year_with_id[['xG', 'xGA', 'xpts', 'Team', 'id']]
understat_year_df_with_opps_dict[year] = understat_year_with_id.\
merge(understat_opponents_filtered, left_on=['xG', 'xGA'], right_on=['xGA', 'xG'],
suffixes=('', '_opponent'), how='outer').drop(['xG_opponent', 'xGA_opponent'], axis=1)
return understat_year_df_with_opps_dict
def main():
teams_data_dict = create_teams_data(filepath=WEBSCRAPE_DATA_PATH)
teams_data_with_understat = pd.concat(teams_data_dict.values())
teams_data_with_understat.to_csv(os.path.join(OUTPUT_DATA_PATH, 'teams_data.csv'), index=False)
if __name__ == "__main__":
main()
| [
"os.listdir",
"pandas.read_csv",
"pandas.to_datetime",
"os.path.join",
"os.path.normpath",
"pandas.DataFrame",
"os.walk"
] | [((191, 209), 'os.walk', 'os.walk', (['file_path'], {}), '(file_path)\n', (198, 209), False, 'import os\n'), ((497, 546), 'pandas.DataFrame', 'pd.DataFrame', (["{'Filepath': filepaths}"], {'index': 'team'}), "({'Filepath': filepaths}, index=team)\n", (509, 546), True, 'import pandas as pd\n'), ((701, 721), 'os.listdir', 'os.listdir', (['filepath'], {}), '(filepath)\n', (711, 721), False, 'import os\n'), ((999, 1051), 'pandas.DataFrame', 'pd.DataFrame', (["{'name': understat_paths.index.values}"], {}), "({'name': understat_paths.index.values})\n", (1011, 1051), True, 'import pandas as pd\n'), ((2629, 2677), 'os.path.join', 'os.path.join', (['OUTPUT_DATA_PATH', '"""teams_data.csv"""'], {}), "(OUTPUT_DATA_PATH, 'teams_data.csv')\n", (2641, 2677), False, 'import os\n'), ((820, 846), 'os.path.normpath', 'os.path.normpath', (['filepath'], {}), '(filepath)\n', (836, 846), False, 'import os\n'), ((906, 945), 'os.path.join', 'os.path.join', (['players_path', '"""understat"""'], {}), "(players_path, 'understat')\n", (918, 945), False, 'import os\n'), ((1179, 1272), 'pandas.read_csv', 'pd.read_csv', (["understat_paths.loc[team, 'Filepath']"], {'usecols': "['date', 'xG', 'xGA', 'xpts']"}), "(understat_paths.loc[team, 'Filepath'], usecols=['date', 'xG',\n 'xGA', 'xpts'])\n", (1190, 1272), True, 'import pandas as pd\n'), ((1407, 1448), 'pandas.to_datetime', 'pd.to_datetime', (['understat_dict[team].date'], {}), '(understat_dict[team].date)\n', (1421, 1448), True, 'import pandas as pd\n')] |
import numpy as np
import pytest
from respy import RespyCls
from respy.python.shared.shared_constants import IS_PARALLELISM_MPI
from respy.python.shared.shared_constants import IS_PARALLELISM_OMP
from respy.tests.codes.auxiliary import compare_est_log
from respy.tests.codes.auxiliary import simulate_observed
from respy.tests.codes.random_model import generate_random_model
@pytest.mark.skipif(
not IS_PARALLELISM_MPI and not IS_PARALLELISM_OMP, reason="No PARALLELISM available"
)
class TestClass(object):
"""This class groups together some tests."""
def test_1(self):
"""Ensure that it makes no difference whether the
criterion function is evaluated in parallel or not.
"""
# Generate random initialization file
constr = {
"program": {"version": "fortran"},
"estimation": {"maxfun": np.random.randint(0, 50)},
}
params_spec, options_spec = generate_random_model(point_constr=constr)
# If delta is a not fixed, we need to ensure a bound-constraint optimizer.
# However, this is not the standard flag_estimation as the number of function
# evaluation is possibly much larger to detect and differences in the updates of
# the optimizer steps depending on the implementation.
if params_spec.loc[("delta", "delta"), "fixed"] is False:
options_spec["estimation"]["optimizer"] = "FORT-BOBYQA"
base = None
for is_parallel in [True, False]:
options_spec["program"]["threads"] = 1
options_spec["program"]["procs"] = 1
if is_parallel:
if IS_PARALLELISM_OMP:
options_spec["program"]["threads"] = np.random.randint(2, 5)
if IS_PARALLELISM_MPI:
options_spec["program"]["procs"] = np.random.randint(2, 5)
respy_obj = RespyCls(params_spec, options_spec)
respy_obj = simulate_observed(respy_obj)
_, crit_val = respy_obj.fit()
if base is None:
base = crit_val
np.testing.assert_equal(base, crit_val)
def test_2(self):
""" This test ensures that the record files are identical.
"""
# Generate random initialization file. The number of periods is higher than
# usual as only FORTRAN implementations are used to solve the random request.
# This ensures that also some cases of interpolation are explored.
constr = {
"program": {"version": "fortran"},
"num_periods": np.random.randint(3, 10),
"estimation": {"maxfun": 0},
}
params_spec, options_spec = generate_random_model(point_constr=constr)
base_sol_log, base_est_info_log = None, None
base_est_log = None
for is_parallel in [False, True]:
options_spec["program"]["threads"] = 1
options_spec["program"]["procs"] = 1
if is_parallel:
if IS_PARALLELISM_OMP:
options_spec["program"]["threads"] = np.random.randint(2, 5)
if IS_PARALLELISM_MPI:
options_spec["program"]["procs"] = np.random.randint(2, 5)
respy_obj = RespyCls(params_spec, options_spec)
file_sim = respy_obj.get_attr("file_sim")
simulate_observed(respy_obj)
respy_obj.fit()
# Check for identical records
fname = file_sim + ".respy.sol"
if base_sol_log is None:
base_sol_log = open(fname, "r").read()
assert open(fname, "r").read() == base_sol_log
if base_est_info_log is None:
base_est_info_log = open("est.respy.info", "r").read()
assert open("est.respy.info", "r").read() == base_est_info_log
if base_est_log is None:
base_est_log = open("est.respy.log", "r").readlines()
compare_est_log(base_est_log)
| [
"numpy.testing.assert_equal",
"respy.tests.codes.random_model.generate_random_model",
"respy.tests.codes.auxiliary.compare_est_log",
"numpy.random.randint",
"respy.RespyCls",
"pytest.mark.skipif",
"respy.tests.codes.auxiliary.simulate_observed"
] | [((379, 487), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not IS_PARALLELISM_MPI and not IS_PARALLELISM_OMP)'], {'reason': '"""No PARALLELISM available"""'}), "(not IS_PARALLELISM_MPI and not IS_PARALLELISM_OMP,\n reason='No PARALLELISM available')\n", (397, 487), False, 'import pytest\n'), ((940, 982), 'respy.tests.codes.random_model.generate_random_model', 'generate_random_model', ([], {'point_constr': 'constr'}), '(point_constr=constr)\n', (961, 982), False, 'from respy.tests.codes.random_model import generate_random_model\n'), ((2693, 2735), 'respy.tests.codes.random_model.generate_random_model', 'generate_random_model', ([], {'point_constr': 'constr'}), '(point_constr=constr)\n', (2714, 2735), False, 'from respy.tests.codes.random_model import generate_random_model\n'), ((1894, 1929), 'respy.RespyCls', 'RespyCls', (['params_spec', 'options_spec'], {}), '(params_spec, options_spec)\n', (1902, 1929), False, 'from respy import RespyCls\n'), ((1954, 1982), 'respy.tests.codes.auxiliary.simulate_observed', 'simulate_observed', (['respy_obj'], {}), '(respy_obj)\n', (1971, 1982), False, 'from respy.tests.codes.auxiliary import simulate_observed\n'), ((2099, 2138), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['base', 'crit_val'], {}), '(base, crit_val)\n', (2122, 2138), True, 'import numpy as np\n'), ((2579, 2603), 'numpy.random.randint', 'np.random.randint', (['(3)', '(10)'], {}), '(3, 10)\n', (2596, 2603), True, 'import numpy as np\n'), ((3254, 3289), 'respy.RespyCls', 'RespyCls', (['params_spec', 'options_spec'], {}), '(params_spec, options_spec)\n', (3262, 3289), False, 'from respy import RespyCls\n'), ((3358, 3386), 'respy.tests.codes.auxiliary.simulate_observed', 'simulate_observed', (['respy_obj'], {}), '(respy_obj)\n', (3375, 3386), False, 'from respy.tests.codes.auxiliary import simulate_observed\n'), ((3965, 3994), 'respy.tests.codes.auxiliary.compare_est_log', 'compare_est_log', (['base_est_log'], {}), '(base_est_log)\n', (3980, 3994), False, 'from respy.tests.codes.auxiliary import compare_est_log\n'), ((866, 890), 'numpy.random.randint', 'np.random.randint', (['(0)', '(50)'], {}), '(0, 50)\n', (883, 890), True, 'import numpy as np\n'), ((1727, 1750), 'numpy.random.randint', 'np.random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (1744, 1750), True, 'import numpy as np\n'), ((1845, 1868), 'numpy.random.randint', 'np.random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (1862, 1868), True, 'import numpy as np\n'), ((3087, 3110), 'numpy.random.randint', 'np.random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (3104, 3110), True, 'import numpy as np\n'), ((3205, 3228), 'numpy.random.randint', 'np.random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (3222, 3228), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
"""
"""
from __future__ import unicode_literals
from __future__ import print_function
import logging
from json import dumps
from flask import request
logger = logging.getLogger('plutil.http.s')
def define_common_routes(plugin, app, server):
""" Some routes are always defined. """
@app.route('/', methods=['GET', 'POST'])
def route_index():
logger.debug("Server reached on root path")
the_args = request.args
for arg in the_args:
logger.debug(" - argument %s: %s", (arg, the_args[arg]))
return dumps({
'status': 'OK',
'result': the_args
})
@app.route('/shut_me_down_used_for_restarts', methods=['POST'])
def route_shutdown():
func = request.environ.get('werkzeug.server.shutdown')
if func is None:
raise RuntimeError('Not running with the Werkzeug Server')
func()
return dumps({
'status': 'OK',
'result': "Shutting down..."
})
@app.route('/result', methods=['GET', 'POST'])
def route_result():
logger.debug("We're being asked about a result")
try:
message_id = str(request.args['id'])
with server.messages_lock:
if message_id in server.messages:
logger.debug("message %r found in queue", message_id)
message = server.messages[message_id]
del server.messages[message_id]
result_type = message.result_type
result_data = message.result_data
else:
logger.debug("message %r NOT found in queue", message_id)
result_type = 'NotFound'
result_data = 'Result may not be ready or it ' \
'might have expired'
except Exception:
result_data = 'Exception in server while attempting to reply'
result_type = 'Error'
logger.error(result_data, exc_info=True)
return dumps({
'status': result_type,
'result': result_data
})
| [
"logging.getLogger",
"flask.request.environ.get",
"json.dumps"
] | [((186, 220), 'logging.getLogger', 'logging.getLogger', (['"""plutil.http.s"""'], {}), "('plutil.http.s')\n", (203, 220), False, 'import logging\n'), ((583, 626), 'json.dumps', 'dumps', (["{'status': 'OK', 'result': the_args}"], {}), "({'status': 'OK', 'result': the_args})\n", (588, 626), False, 'from json import dumps\n'), ((771, 818), 'flask.request.environ.get', 'request.environ.get', (['"""werkzeug.server.shutdown"""'], {}), "('werkzeug.server.shutdown')\n", (790, 818), False, 'from flask import request\n'), ((945, 998), 'json.dumps', 'dumps', (["{'status': 'OK', 'result': 'Shutting down...'}"], {}), "({'status': 'OK', 'result': 'Shutting down...'})\n", (950, 998), False, 'from json import dumps\n'), ((2082, 2135), 'json.dumps', 'dumps', (["{'status': result_type, 'result': result_data}"], {}), "({'status': result_type, 'result': result_data})\n", (2087, 2135), False, 'from json import dumps\n')] |
'''Train Siamese NIMA model networks.'''
from model.siamese_nima import SiameseNIMA
if __name__ == '__main__':
# dirs and paths to load data
train_image_dir = './assets/demo/train_images'
train_data_path = './assets/demo/train_data.csv'
# load data and train model
siamese = SiameseNIMA(output_dir='./assets')
train_raw = siamese.load_data(train_image_dir, train_data_path)
siamese.train(train_raw,
epochs=5,
batch_size=16,
nima_weight_path='./assets/weights/nima_weights_pre_trained.h5')
| [
"model.siamese_nima.SiameseNIMA"
] | [((299, 333), 'model.siamese_nima.SiameseNIMA', 'SiameseNIMA', ([], {'output_dir': '"""./assets"""'}), "(output_dir='./assets')\n", (310, 333), False, 'from model.siamese_nima import SiameseNIMA\n')] |
# -*- coding: utf-8 -*-
import datetime
from django.db.models import Count, Q
from django.utils import timezone
from trojsten.events.models import EventParticipant
from trojsten.people.constants import SCHOOL_YEAR_END_MONTH
from trojsten.results.constants import COEFFICIENT_COLUMN_KEY
from trojsten.results.generator import CategoryTagKeyGeneratorMixin, ResultsGenerator
from trojsten.results.representation import ResultsCell, ResultsCol, ResultsTag
from trojsten.submit.models import Submit
from .default import CompetitionRules
from .default import FinishedRoundsResultsRulesMixin as FinishedRounds
KMS_ALFA = "alfa"
KMS_BETA = "beta"
KMS_ALFA_MAX_COEFFICIENT = 3
KMS_ELIGIBLE_FOR_TASK_BOUND = [0, 2, 3, 5, 100, 100, 100, 100, 100, 100, 100]
KMS_FULL_POINTS_BOUND = [0, 1, 2, 3, 5, 8, 100, 100, 100, 100, 100]
KMS_CAMP_TYPE = "KMS sústredenie"
KMS_MO_FINALS_TYPE = "CKMO"
KMS_YEARS_OF_CAMPS_HISTORY = 10
class KMSResultsGenerator(CategoryTagKeyGeneratorMixin, ResultsGenerator):
def __init__(self, tag):
super(KMSResultsGenerator, self).__init__(tag)
self.camps = None
self.mo_finals = None
self.coefficients = {}
def get_user_coefficient(self, user, round):
if user not in self.coefficients:
if not self.camps or not self.mo_finals:
self.prepare_coefficients(round)
year = user.school_year_at(round.end_time)
successful_semesters = self.camps.get(user.pk, 0)
mo_finals = self.mo_finals.get(user.pk, 0)
self.coefficients[user] = year + successful_semesters + mo_finals
return self.coefficients[user]
def prepare_coefficients(self, round):
"""
Fetch from the db number of successful semester and number of participation
in MO final for each user and store them in dictionaries. The prepared
data in dictionaries are used to compute the coefficient of a given user.
We consider only events happened before given round, so the coefficients are computed
correct in older results.
"""
# We count only MO finals in previous school years, the user coefficient remains the same
# during a semester. We assume that the MO finals are held in the last semester
# of a year.
school_year = round.end_time.year - int(round.end_time.month < SCHOOL_YEAR_END_MONTH)
prev_school_year_end = timezone.make_aware(
datetime.datetime(school_year, SCHOOL_YEAR_END_MONTH, 28)
)
self.mo_finals = dict(
EventParticipant.objects.filter(
event__type__name=KMS_MO_FINALS_TYPE, event__end_time__lt=prev_school_year_end
)
.values("user")
.annotate(mo_finals=Count("event"))
.values_list("user", "mo_finals")
)
# We ignore camps that happened before KMS_YEARS_OF_CAMPS_HISTORY years, so we don't
# produce too big dictionaries of users.
self.camps = dict(
EventParticipant.objects.filter(
Q(
event__type__name=KMS_CAMP_TYPE,
event__end_time__lt=round.end_time,
event__end_time__year__gte=round.end_time.year - KMS_YEARS_OF_CAMPS_HISTORY,
),
Q(going=True) | Q(type=EventParticipant.PARTICIPANT),
)
.values("user")
.annotate(camps=Count("event__semester", distinct=True))
.values_list("user", "camps")
)
def get_cell_points_for_row_total(self, res_request, cell, key, coefficient):
return (
(1 + self.get_cell_total(res_request, cell)) // 2
if KMS_FULL_POINTS_BOUND[key] < coefficient or (self.tag.key == KMS_BETA and key == 3)
else self.get_cell_total(res_request, cell)
)
def run(self, res_request):
self.prepare_coefficients(res_request.round)
res_request.has_submit_in_beta = set()
for submit in Submit.objects.filter(
task__round__semester=res_request.round.semester, task__number__in=[8, 9, 10]
).select_related("user"):
res_request.has_submit_in_beta.add(submit.user)
return super(KMSResultsGenerator, self).run(res_request)
def is_user_active(self, request, user):
active = super(KMSResultsGenerator, self).is_user_active(request, user)
coefficient = self.get_user_coefficient(user, request.round)
if self.tag.key == KMS_ALFA:
active = active and (coefficient <= KMS_ALFA_MAX_COEFFICIENT)
if self.tag.key == KMS_BETA:
active = active and (
coefficient > KMS_ALFA_MAX_COEFFICIENT or user in request.has_submit_in_beta
)
return active
def deactivate_row_cells(self, request, row, cols):
coefficient = self.get_user_coefficient(row.user, request.round)
# Count only tasks your coefficient is eligible for
for key in row.cells_by_key:
if KMS_ELIGIBLE_FOR_TASK_BOUND[key] < coefficient:
row.cells_by_key[key].active = False
# Prepare list of piars consisting of cell and its points.
tasks = [
(cell, self.get_cell_points_for_row_total(request, cell, key, coefficient))
for key, cell in row.cells_by_key.items()
if row.cells_by_key[key].active
]
# Count only the best 5 tasks
for cell, _ in sorted(tasks, key=lambda x: x[1])[:-5]:
cell.active = False
def calculate_row_round_total(self, res_request, row, cols):
coefficient = self.get_user_coefficient(row.user, res_request.round)
row.round_total = sum(
self.get_cell_points_for_row_total(res_request, cell, key, coefficient)
for key, cell in row.cells_by_key.items()
if cell.active
)
def add_special_row_cells(self, res_request, row, cols):
super(KMSResultsGenerator, self).add_special_row_cells(res_request, row, cols)
coefficient = self.get_user_coefficient(row.user, res_request.round)
row.cells_by_key[COEFFICIENT_COLUMN_KEY] = ResultsCell(str(coefficient))
def create_results_cols(self, res_request):
yield ResultsCol(key=COEFFICIENT_COLUMN_KEY, name="K.")
for col in super(KMSResultsGenerator, self).create_results_cols(res_request):
yield col
class KMSRules(FinishedRounds, CompetitionRules):
RESULTS_TAGS = {
KMS_ALFA: ResultsTag(key=KMS_ALFA, name="Alfa"),
KMS_BETA: ResultsTag(key=KMS_BETA, name="Beta"),
}
RESULTS_GENERATOR_CLASS = KMSResultsGenerator
| [
"datetime.datetime",
"trojsten.submit.models.Submit.objects.filter",
"trojsten.results.representation.ResultsTag",
"django.db.models.Count",
"trojsten.results.representation.ResultsCol",
"trojsten.events.models.EventParticipant.objects.filter",
"django.db.models.Q"
] | [((6526, 6563), 'trojsten.results.representation.ResultsTag', 'ResultsTag', ([], {'key': 'KMS_ALFA', 'name': '"""Alfa"""'}), "(key=KMS_ALFA, name='Alfa')\n", (6536, 6563), False, 'from trojsten.results.representation import ResultsCell, ResultsCol, ResultsTag\n'), ((6583, 6620), 'trojsten.results.representation.ResultsTag', 'ResultsTag', ([], {'key': 'KMS_BETA', 'name': '"""Beta"""'}), "(key=KMS_BETA, name='Beta')\n", (6593, 6620), False, 'from trojsten.results.representation import ResultsCell, ResultsCol, ResultsTag\n'), ((2456, 2513), 'datetime.datetime', 'datetime.datetime', (['school_year', 'SCHOOL_YEAR_END_MONTH', '(28)'], {}), '(school_year, SCHOOL_YEAR_END_MONTH, 28)\n', (2473, 2513), False, 'import datetime\n'), ((6276, 6325), 'trojsten.results.representation.ResultsCol', 'ResultsCol', ([], {'key': 'COEFFICIENT_COLUMN_KEY', 'name': '"""K."""'}), "(key=COEFFICIENT_COLUMN_KEY, name='K.')\n", (6286, 6325), False, 'from trojsten.results.representation import ResultsCell, ResultsCol, ResultsTag\n'), ((4015, 4119), 'trojsten.submit.models.Submit.objects.filter', 'Submit.objects.filter', ([], {'task__round__semester': 'res_request.round.semester', 'task__number__in': '[8, 9, 10]'}), '(task__round__semester=res_request.round.semester,\n task__number__in=[8, 9, 10])\n', (4036, 4119), False, 'from trojsten.submit.models import Submit\n'), ((2770, 2784), 'django.db.models.Count', 'Count', (['"""event"""'], {}), "('event')\n", (2775, 2784), False, 'from django.db.models import Count, Q\n'), ((3440, 3479), 'django.db.models.Count', 'Count', (['"""event__semester"""'], {'distinct': '(True)'}), "('event__semester', distinct=True)\n", (3445, 3479), False, 'from django.db.models import Count, Q\n'), ((2568, 2683), 'trojsten.events.models.EventParticipant.objects.filter', 'EventParticipant.objects.filter', ([], {'event__type__name': 'KMS_MO_FINALS_TYPE', 'event__end_time__lt': 'prev_school_year_end'}), '(event__type__name=KMS_MO_FINALS_TYPE,\n event__end_time__lt=prev_school_year_end)\n', (2599, 2683), False, 'from trojsten.events.models import EventParticipant\n'), ((3072, 3228), 'django.db.models.Q', 'Q', ([], {'event__type__name': 'KMS_CAMP_TYPE', 'event__end_time__lt': 'round.end_time', 'event__end_time__year__gte': '(round.end_time.year - KMS_YEARS_OF_CAMPS_HISTORY)'}), '(event__type__name=KMS_CAMP_TYPE, event__end_time__lt=round.end_time,\n event__end_time__year__gte=round.end_time.year - KMS_YEARS_OF_CAMPS_HISTORY\n )\n', (3073, 3228), False, 'from django.db.models import Count, Q\n'), ((3316, 3329), 'django.db.models.Q', 'Q', ([], {'going': '(True)'}), '(going=True)\n', (3317, 3329), False, 'from django.db.models import Count, Q\n'), ((3332, 3368), 'django.db.models.Q', 'Q', ([], {'type': 'EventParticipant.PARTICIPANT'}), '(type=EventParticipant.PARTICIPANT)\n', (3333, 3368), False, 'from django.db.models import Count, Q\n')] |
import obj as obj_lib
import road_artifact
import drive as drive_lib
import utilities as u
class Sensor(obj_lib.Obj):
"""
parent object class for car sensors
returns instruction
driving instruction - (heading, speed)
no driving instruction (no new process or process has completed) - None
arrived at destination - 'arrived'
"""
def __init__(self, pygame, screen):
super().__init__(pygame, screen)
self.classifiers = []
self.init_classifiers()
def add_classifier(self, object):
self.classifiers.append(object(self.pygame, self.screen))
def init_classifiers(self):
pass
def collect(self, status):
raw_data = self.retrieve(status)
for c in self.classifiers:
new_process = c.evaluate(status, raw_data)
if new_process:
return new_process
return None
def retrieve(self, status):
pass
def reset(self):
for c in self.classifiers:
c.reset()
class SensorSimulator(Sensor):
def __init__(self, pygame, screen):
super().__init__(pygame, screen)
def init_classifiers(self):
self.add_classifier(ClassiferSimulatorStationaryDestination)
self.add_classifier(ClassiferSimulatorStationarySignSpeed15)
self.add_classifier(ClassiferSimulatorStationarySignSpeed25)
self.add_classifier(ClassiferSimulatorStationarySignSpeed45)
self.add_classifier(ClassiferSimulatorStationarySignSpeed55)
self.add_classifier(ClassiferSimulatorStationarySignSpeed65)
self.add_classifier(ClassiferSimulatorStationarySignStop)
self.add_classifier(ClassiferSimulatorStationarySignTrafficLight)
self.add_classifier(ClassiferSimulatorMoveVehicle)
self.add_classifier(ClassiferSimulatorMovePedestrian)
def retrieve(self, status):
# return artifacts that the car has not passed
car = status['car']
road = status['location']['road']
artifacts = status['location']['road'].artifacts
car_bottom = car.gnav('midbottom')
visible_artifacts = []
for artifact in artifacts:
artifact_top = artifact.gnav('midtop')
if road.dir_val_exceeds(artifact_top, car_bottom):
visible_artifacts.append(artifact)
return visible_artifacts
class Classifier(obj_lib.Obj):
"""parent object class for sensor classifiers"""
def __init__(self, pygame, screen, activate_distance):
super().__init__(pygame, screen)
self.status = {}
self.activate_distance = activate_distance
self.activate_distance_buffer = 0
def evaluate(self, status, raw_data):
feature = self.extract(status, raw_data)
if not feature:
return None
if not self.activate(status, feature):
return None
return self.get_process(status, feature)
def extract(self, status, raw_data):
# if raw_data handled by classifier, then return structured data from raw data
pass
def status_is_inactive(self, id):
# if status for feature with id has not been set, set it to inactive
# otherwise, return true if feature with id's status is inactive
if id not in self.status:
self.status[id] = 'inactive'
return True
else:
return self.status[id] == 'inactive'
def status_set_active(self, feature):
# set the status for feature active
id = feature['id']
self.status[id] = 'active'
def status_set_inactive(self, feature):
# set the status for feature active
id = feature['id']
self.status[id] = 'inactive'
def status_set_complete(self, feature):
# set the status for feature to complete
id = feature['id']
self.status[id] = 'complete'
def activate(self, status, feature):
# return true if process for feature should be activated, false otherwise
pass
def get_process(self, status, feature):
self.status_set_active(feature)
data = self.get_process_data(status, feature)
return (data, self.process_function)
def get_process_data(self, status, feature):
return {'status': status, 'feature': feature}
def process_function(self, data):
pass
def send_instruction(self, car, heading, speed, text):
car.draw_outline(text)
return car.make_instruction(heading, speed)
def reset(self):
self.status = {}
class ClassifierSimulator(Classifier):
def __init__(self, pygame, screen, artifact_class, activate_distance, activate_pos):
super().__init__(pygame, screen, activate_distance)
self.activate_distance_buffer = 5 # length of car
self.artifact_class = artifact_class
self.activate_pos = activate_pos
self.status = {}
def get_artifact_id(self, artifact):
road_id = artifact.road.id
artifact_id = artifact.id
return (road_id, artifact_id)
def extract(self, status, raw_data):
for artifact in raw_data:
if isinstance(artifact, self.artifact_class):
if self.status_is_inactive(self.get_artifact_id(artifact)):
return self.extract_data(status, artifact)
return None
def extract_data(self, status, artifact):
feature = {'artifact': artifact, 'id': self.get_artifact_id(artifact)}
car = status['car']
road = status['location']['road']
# distance - difference between artifact position and bottom of the car
if self.activate_pos:
pos_artifact = artifact.gnav(self.activate_pos)
else:
# segment position
location_road = artifact.pos_parms['length_attribute_road']
pos_artifact = road.gnav(location_road)
pos_car = car.gnav('top')
feature['distance'] = (pos_artifact - pos_car) * road.graph_dir_length
feature['heading'] = u.heading(car.center, artifact.center)
# same_lane
# * none - artifact is not in a lane
# * True - artifact is in the same lane as the car
# * False - artifact is in a lane, but not the car's lane
artifact_lane_id = artifact.pos_width
if type(artifact_lane_id) is int:
feature['same_lane'] = artifact_lane_id == status['location']['lane'].lane_id
else:
feature['same_lane'] = None
return feature
def activate(self, status, feature):
same_lane = feature['same_lane']
if same_lane is False:
return False
return feature['distance'] <= (self.activate_distance + self.activate_distance_buffer)
def process_complete(self, feature):
pass
def in_collision_buffer(self, car, artifact):
return car.collision_buffer and not car.collision_buffer.is_clear([artifact])
class ClassiferSimulatorStationary(ClassifierSimulator):
def __init__(self, pygame, screen, artifact_class, activate_distance, activate_pos):
super().__init__(pygame, screen, artifact_class, activate_distance, activate_pos)
def process_complete(self, feature):
self.status_set_complete(feature)
class ClassiferSimulatorStationaryDestination(ClassiferSimulatorStationary):
def __init__(self, pygame, screen):
super().__init__(pygame, screen, road_artifact.ObjRoadArtifactStationaryDestination, 0, None)
def get_process_data(self, status, feature):
data = super().get_process_data(status, feature)
wait_time = 3 # seconds
data['complete'] = self.pygame.time.get_ticks() + (wait_time * 1000)
return data
def process_function(self, data):
car = data['status']['car']
ticks = self.pygame.time.get_ticks()
if ticks < data['complete']:
return self.send_instruction(car, None, 0, 'Waiting at destination')
else:
return 'arrived'
class ClassiferSimulatorStationarySignSpeed(ClassiferSimulatorStationary):
def __init__(self, pygame, screen, artifact_class, activate_distance, activate_pos, speed):
self.speed = speed
super().__init__(pygame, screen, artifact_class, activate_distance, activate_pos)
def process_function(self, data):
car = data['status']['car']
if car.speed != self.speed:
car.speed_prev = self.speed # allow temporary speed changes to be reset
return self.send_instruction(car, None, self.speed, f'Setting speed to: {self.speed}')
else:
feature = data['feature']
self.process_complete(feature)
class ClassiferSimulatorStationarySignSpeed15(ClassiferSimulatorStationarySignSpeed):
def __init__(self, pygame, screen):
super().__init__(pygame, screen, road_artifact.ObjRoadArtifactStationarySignSpeed15, 0, None, 15)
class ClassiferSimulatorStationarySignSpeed25(ClassiferSimulatorStationarySignSpeed):
def __init__(self, pygame, screen):
super().__init__(pygame, screen, road_artifact.ObjRoadArtifactStationarySignSpeed25, 0, None, 25)
class ClassiferSimulatorStationarySignSpeed45(ClassiferSimulatorStationarySignSpeed):
def __init__(self, pygame, screen):
super().__init__(pygame, screen, road_artifact.ObjRoadArtifactStationarySignSpeed45, 0, None, 45)
class ClassiferSimulatorStationarySignSpeed55(ClassiferSimulatorStationarySignSpeed):
def __init__(self, pygame, screen):
super().__init__(pygame, screen, road_artifact.ObjRoadArtifactStationarySignSpeed55, 0, None, 55)
class ClassiferSimulatorStationarySignSpeed65(ClassiferSimulatorStationarySignSpeed):
def __init__(self, pygame, screen):
super().__init__(pygame, screen, road_artifact.ObjRoadArtifactStationarySignSpeed65, 0, None, 65)
class ClassiferSimulatorStationarySignStop(ClassiferSimulatorStationary):
def __init__(self, pygame, screen):
super().__init__(pygame, screen, road_artifact.ObjRoadArtifactStationarySignStop, 0, None)
def get_process_data(self, status, feature):
data = super().get_process_data(status, feature)
wait_time = 3 # seconds
data['complete'] = self.pygame.time.get_ticks() + (wait_time * 1000)
return data
def process_function(self, data):
car = data['status']['car']
ticks = self.pygame.time.get_ticks()
if ticks < data['complete']:
return self.send_instruction(car, None, 0, 'Waiting at stop sign')
else:
car.restore_speed()
self.process_complete(data['feature'])
class ClassiferSimulatorStationarySignTrafficLight(ClassiferSimulatorStationary):
def __init__(self, pygame, screen):
super().__init__(pygame, screen, road_artifact.ObjRoadArtifactStationarySignTrafficLight, 0, None)
def process_function(self, data):
car = data['status']['car']
feature = data['feature']
artifact = feature['artifact']
if artifact.red:
return self.send_instruction(car, None, 0, 'Waiting at red traffic light')
else:
car.restore_speed()
self.process_complete(feature)
class ClassifierSimulatorMove(ClassifierSimulator):
def __init__(self, pygame, screen, artifact_class, activate_distance, activate_pos):
super().__init__(pygame, screen, artifact_class, activate_distance, activate_pos)
def process_complete(self, feature):
self.status_set_inactive(feature)
def status_set_inactive(self, data):
feature = data['feature']
return super().status_set_inactive(feature)
class ClassiferSimulatorMoveVehicle(ClassifierSimulatorMove):
def __init__(self, pygame, screen):
super().__init__(pygame, screen, road_artifact.ObjRoadArtifactMoveVehicle, 50, 'bottom')
def get_process_data(self, status, feature):
data = super().get_process_data(status, feature)
car = data['status']['car']
road = data['status']['location']['road']
artifact = feature['artifact']
if road.lane_cnt == 1:
# only 1 lane - match speed of car
data['type'] = 'single_lane'
data['artifact_pos'] = artifact.gnav('bottom')
else:
# select adjoining lane
data['type'] = 'multiple_lane'
car.set_collision_buffer_parms('top-front')
lane_id_current = status['location']['lane'].lane_id
if lane_id_current - 1 >= 0:
lane_id_new = lane_id_current - 1
else:
lane_id_new = lane_id_current + 1
# create drive guide
data['drive'] = drive_lib.DriveArcChangeLane(self.pygame, self.screen, car, road, lane_id_current, lane_id_new)
return data
def process_function(self, data):
def change_lane(data):
car = data['status']['car']
drive = data['drive']
target_heading = drive.get_heading(car)
if target_heading is not None:
return self.send_instruction(car, target_heading, car.speed_prev, f'Changing lane to avoid slow moving vehicle')
else:
return self.status_set_inactive(data)
def slow_down(data):
car = data['status']['car']
feature = data['feature']
artifact = feature['artifact']
distance = abs(artifact.gnav('bottom') - car.gnav('top'))
if self.activate_distance > distance:
pos_prev = data['artifact_pos']
pos_current = artifact.gnav('bottom')
speed = (pos_current - pos_prev) # speed is distance per clock cycle
data['artifact_pos'] = pos_current
return self.send_instruction(car, artifact.heading, speed, 'Reducing speed for slow vehicle')
else:
car.restore_speed()
return self.status_set_inactive(data)
## process_function()
if data['type'] == 'single_lane':
return slow_down(data)
else:
return change_lane(data)
class ClassiferSimulatorMovePedestrian(ClassifierSimulatorMove):
def __init__(self, pygame, screen):
super().__init__(pygame, screen, road_artifact.ObjRoadArtifactMovePedestrian, 18, 'bottom')
def activate(self, status, feature):
if not super().activate(status, feature):
return False
pedestrian = feature['artifact']
car = status['car']
return self.in_collision_buffer(car, pedestrian)
def process_function(self, data):
car = data['status']['car']
pedestrian = data['feature']['artifact']
if self.in_collision_buffer(car, pedestrian):
car.draw_collision_buffer()
return self.send_instruction(car, None, 0, 'Waiting for pedestrian')
else:
car.restore_speed()
return super().status_set_inactive(data)
| [
"utilities.heading",
"drive.DriveArcChangeLane"
] | [((6048, 6086), 'utilities.heading', 'u.heading', (['car.center', 'artifact.center'], {}), '(car.center, artifact.center)\n', (6057, 6086), True, 'import utilities as u\n'), ((12729, 12828), 'drive.DriveArcChangeLane', 'drive_lib.DriveArcChangeLane', (['self.pygame', 'self.screen', 'car', 'road', 'lane_id_current', 'lane_id_new'], {}), '(self.pygame, self.screen, car, road,\n lane_id_current, lane_id_new)\n', (12757, 12828), True, 'import drive as drive_lib\n')] |
#!/usr/bin/env python3
import os
import sys
if os.getuid() != 0:
print ("Must be run as root, sorry.")
sys.exit(-1)
from solcx import install_solc_pragma
install_solc_pragma('>0.5.0 <0.6.0')
print ("Done.")
| [
"os.getuid",
"solcx.install_solc_pragma",
"sys.exit"
] | [((158, 194), 'solcx.install_solc_pragma', 'install_solc_pragma', (['""">0.5.0 <0.6.0"""'], {}), "('>0.5.0 <0.6.0')\n", (177, 194), False, 'from solcx import install_solc_pragma\n'), ((48, 59), 'os.getuid', 'os.getuid', ([], {}), '()\n', (57, 59), False, 'import os\n'), ((106, 118), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (114, 118), False, 'import sys\n')] |
"""
An implementation of a greedy transition-based dependency parser (unlabeled parsing only).
Released under BSD license.
Code is an adapted version of <NAME>'s parser:
https://explosion.ai/blog/parsing-english-in-python
-- change: move core logic to separate myparserutils file
modified by bplank, 03/2017
"""
#### Helper classes - do not modify ####
import os
from collections import defaultdict
import pickle
## Global
SHIFT = 0; RIGHT = 1; LEFT = 2
MOVES = (SHIFT, RIGHT, LEFT)
class DefaultList(list):
"""A list that returns a default value if index out of bounds."""
def __init__(self, default=None):
self.default = default
list.__init__(self)
def __getitem__(self, index):
try:
return list.__getitem__(self, index)
except IndexError:
return self.default
class Parse(object):
"""
Structure that keeps current set of edges/arcs A
lefts: left-branching edges
rights: right-branching edges
"""
def __init__(self, n):
self.n = n
self.heads = [None] * (n-1)
self.labels = [None] * (n-1)
self.lefts = []
self.rights = []
for i in range(n+1):
self.lefts.append(DefaultList(0))
self.rights.append(DefaultList(0))
def add(self, head, child, label=None):
self.heads[child] = head
self.labels[child] = label
if child < head:
self.lefts[head].append(child)
else:
self.rights[head].append(child)
#### End helper classes ####
class Perceptron(object):
"""
Learn weights for the features using the Perceptron algorithm
"""
def __init__(self, classes=None):
self.classes = classes
# Each feature gets its own weight vector, so weights is a dict-of-arrays
self.weights = {}
# The accumulated values, for the averaging. These will be keyed by
# feature/clas tuples
self._totals = defaultdict(int)
# The last time the feature was changed, for the averaging. Also
# keyed by feature/clas tuples
# (tstamps is short for timestamps)
self._tstamps = defaultdict(int)
# Number of instances seen
self.i = 0
def predict(self, features):
'''Dot-product the features and current weights and return the best class.'''
scores = self.score(features)
# Do a secondary alphabetic sort, for stability
return max(self.classes, key=lambda clas: (scores[clas], clas))
def score(self, features):
"""
Scores = features \cdot weights
"""
all_weights = self.weights
scores = dict((clas, 0) for clas in self.classes)
for feat, value in features:
if value == 0:
continue
if feat not in all_weights:
continue
weights = all_weights[feat]
for clas, weight in list(weights.items()):
scores[clas] += value * weight
return scores
def update(self, truth, guess, features):
"""
Update parameters
"""
def upd_feat(c, f, w, v):
param = (f, c)
self._totals[param] += (self.i - self._tstamps[param]) * w
self._tstamps[param] = self.i
self.weights[f][c] = w + v
self.i += 1
if truth == guess:
return None
for f, val in features:
weights = self.weights.setdefault(f, {})
upd_feat(truth, f, weights.get(truth, 0.0), 1.0)
upd_feat(guess, f, weights.get(guess, 0.0), -1.0)
def average_weights(self):
for feat, weights in list(self.weights.items()):
new_feat_weights = {}
for clas, weight in list(weights.items()):
param = (feat, clas)
total = self._totals[param]
total += (self.i - self._tstamps[param]) * weight
averaged = round(total / float(self.i), 3)
if averaged:
new_feat_weights[clas] = averaged
self.weights[feat] = new_feat_weights
def save(self, path):
print("Saving model to %s" % path)
pickle.dump(self.weights, open(path, 'wb'))
def load(self, path):
self.weights = pickle.load(open(path, 'rb')) ## fixed as gives an error in python3.8 GB
| [
"collections.defaultdict"
] | [((1973, 1989), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (1984, 1989), False, 'from collections import defaultdict\n'), ((2170, 2186), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (2181, 2186), False, 'from collections import defaultdict\n')] |
# -*- coding: utf-8 -*-
import json
import threading
import time
from abc import abstractmethod
from typing import Optional
from dmtp.mtp import tlv
from dmtp import mtp
import dmtp
import stun
from .manager import ContactManager, FieldValueEncoder, Session
def time_string(timestamp: int) -> str:
time_array = time.localtime(timestamp)
return time.strftime('%y-%m-%d %H:%M:%S', time_array)
"""
DMTP Client
~~~~~~~~~~~
"""
class DMTPClientHandler:
@abstractmethod
def process_command(self, cmd: dmtp.Command, source: tuple) -> bool:
pass
@abstractmethod
def process_message(self, msg: dmtp.Message, source: tuple):
pass
class DMTPClient(dmtp.Client):
def __init__(self, port: int, host: str='127.0.0.1'):
super().__init__(local_address=(host, port))
self.__server_address = None
self.nat = 'Unknown'
# database for location of contacts
db = self._create_contact_manager()
db.identifier = 'moky-%d' % port
self.__database = db
self.delegate = db
# delegate for show message
self.handler: DMTPClientHandler = None
# punching threads
self.__punching = {}
def _create_contact_manager(self) -> ContactManager:
db = ContactManager(peer=self.peer)
db.identifier = 'anyone@anywhere'
return db
@property
def server_address(self) -> Optional[tuple]:
return self.__server_address
@server_address.setter
def server_address(self, value: tuple):
self.__server_address = value
@property
def identifier(self) -> str:
return self.__database.identifier
@identifier.setter
def identifier(self, value: str):
self.__database.identifier = value
def connect(self, remote_address: tuple): # -> Optional[dmtp.Connection]:
print('connecting to %s' % str(remote_address))
conn = self.peer.connect(remote_address=remote_address)
if conn is not None:
local_address = self.peer.local_address
self.__keep_punching(destination=remote_address, source=local_address)
return conn
#
# Client actions
#
def say_hello(self, destination: tuple) -> bool:
if super().say_hello(destination=destination):
return True
cmd = dmtp.HelloCommand.new(identifier=self.identifier)
print('send cmd: %s' % cmd)
self.send_command(cmd=cmd, destination=destination)
return True
def call(self, identifier: str) -> bool:
cmd = dmtp.CallCommand.new(identifier=identifier)
print('send cmd: %s' % cmd)
self.send_command(cmd=cmd, destination=self.__server_address)
return True
def login(self, identifier: str, server_address: tuple=None):
if server_address is None:
server_address = self.server_address
else:
self.__server_address = server_address
self.peer.connect(remote_address=server_address)
assert server_address is not None, 'server address not set'
self.__database.identifier = identifier
self.say_hello(destination=server_address)
def ping(self, remote_address: tuple, local_address: tuple=None):
res = self.peer.hub.send(data=b'PING', destination=remote_address, source=local_address)
return res == 4
def __keep_punching(self, destination: tuple, source: tuple):
t = self.__punching.get(destination)
if t is None:
print('start punching for %s ...' % str(destination))
t = PunchThread(dmtp_client=self, remote_address=destination, local_address=source)
self.__punching[destination] = t
t.start()
def __stop_punching(self, destination: tuple):
t = self.__punching.get(destination)
if t is not None:
assert isinstance(t, PunchThread), 'punching thread error: %s' % t
print('stop punching for %s' % str(destination))
t.stop()
self.__punching.pop(destination)
def process_command(self, cmd: dmtp.Command, source: tuple) -> bool:
print('received cmd from %s:\n\t%s' % (source, cmd))
if self.handler is not None:
self.handler.process_command(cmd=cmd, source=source)
return super().process_command(cmd=cmd, source=source)
def process_message(self, msg: dmtp.Message, source: tuple) -> bool:
print('received msg from %s:\n\t%s' % (source, msg))
if self.handler is not None:
self.handler.process_message(msg=msg, source=source)
# return super().process_message(msg=msg, source=source)
return True
def send_command(self, cmd: dmtp.Command, destination: tuple) -> mtp.Departure:
print('sending cmd to %s:\n\t%s' % (destination, cmd))
return super().send_command(cmd=cmd, destination=destination)
def send_message(self, msg: dmtp.Message, destination: tuple) -> mtp.Departure:
print('sending msg to %s:\n\t%s' % (destination, json.dumps(msg, cls=FieldValueEncoder)))
return super().send_message(msg=msg, destination=destination)
def get_sessions(self, identifier: str) -> list:
"""
Get connected locations for user ID
:param identifier: user ID
:return: connected locations and addresses
"""
sessions = []
assert self.delegate is not None, 'location delegate not set'
locations = self.delegate.get_locations(identifier=identifier)
now = int(time.time())
for loc in locations:
assert isinstance(loc, dmtp.LocationValue), 'location error: %s' % loc
source_address = loc.source_address
if source_address is not None:
conn = self.peer.get_connection(remote_address=source_address)
if conn is not None and conn.is_connected(now=now):
sessions.append(Session(location=loc, address=source_address))
continue
mapped_address = loc.mapped_address
if mapped_address is not None:
conn = self.peer.get_connection(remote_address=mapped_address)
if conn is not None and conn.is_connected(now=now):
sessions.append(Session(location=loc, address=mapped_address))
continue
return sessions
#
# Send
#
def send_text(self, receiver: str, msg: str) -> Optional[dmtp.Message]:
sessions = self.get_sessions(identifier=receiver)
if len(sessions) == 0:
print('user (%s) not login ...' % receiver)
# ask the server to help building a connection
self.call(identifier=receiver)
return None
content = msg.encode('utf-8')
msg = dmtp.Message.new(info={
'sender': self.identifier,
'receiver': receiver,
'time': int(time.time()),
'data': content,
})
for item in sessions:
assert isinstance(item, Session), 'session error: %s' % item
print('send msg to %s:\n\t%s' % (item.address, msg))
self.send_message(msg=msg, destination=item.address)
return msg
#
# PeerHandler
#
def received_command(self, cmd: tlv.Data, source: tuple, destination: tuple) -> bool:
self.__stop_punching(destination=source)
return super().received_command(cmd=cmd, source=source, destination=destination)
def received_message(self, msg: tlv.Data, source: tuple, destination: tuple) -> bool:
self.__stop_punching(destination=source)
return super().received_message(msg=msg, source=source, destination=destination)
class PunchThread(threading.Thread):
def __init__(self, dmtp_client: DMTPClient, remote_address: tuple, local_address: tuple=None):
super().__init__()
self.running = True
self.__dmtp_client = dmtp_client
self.__remote_address = remote_address
self.__local_address = local_address
def stop(self):
self.running = False
def run(self):
client = self.__dmtp_client
remote = self.__remote_address
local = self.__local_address
now = int(time.time())
timeout = now + 60
while self.running and now < timeout:
when = time_string(now)
print('[%s] sending "PING" to %s' % (when, remote))
client.ping(remote_address=remote, local_address=local)
time.sleep(0.5)
now = int(time.time())
# say HI after ping
client.say_hello(destination=remote)
"""
STUN Client
~~~~~~~~~~~
"""
class STUNClientHandler:
@abstractmethod
def feedback(self, msg: str):
pass
class STUNClient(stun.Client):
def __init__(self, host: str, port: int):
super().__init__(host=host, port=port)
self.server_address = None
self.handler: STUNClientHandler = None
# self.retries = 5
def info(self, msg: str):
when = time_string(int(time.time()))
message = '[%s] %s' % (when, msg)
print(message)
if self.handler is not None:
self.handler.feedback(msg=message)
| [
"dmtp.HelloCommand.new",
"time.strftime",
"json.dumps",
"time.sleep",
"dmtp.CallCommand.new",
"time.localtime",
"time.time"
] | [((320, 345), 'time.localtime', 'time.localtime', (['timestamp'], {}), '(timestamp)\n', (334, 345), False, 'import time\n'), ((357, 403), 'time.strftime', 'time.strftime', (['"""%y-%m-%d %H:%M:%S"""', 'time_array'], {}), "('%y-%m-%d %H:%M:%S', time_array)\n", (370, 403), False, 'import time\n'), ((2349, 2398), 'dmtp.HelloCommand.new', 'dmtp.HelloCommand.new', ([], {'identifier': 'self.identifier'}), '(identifier=self.identifier)\n', (2370, 2398), False, 'import dmtp\n'), ((2575, 2618), 'dmtp.CallCommand.new', 'dmtp.CallCommand.new', ([], {'identifier': 'identifier'}), '(identifier=identifier)\n', (2595, 2618), False, 'import dmtp\n'), ((5556, 5567), 'time.time', 'time.time', ([], {}), '()\n', (5565, 5567), False, 'import time\n'), ((8276, 8287), 'time.time', 'time.time', ([], {}), '()\n', (8285, 8287), False, 'import time\n'), ((8542, 8557), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (8552, 8557), False, 'import time\n'), ((8580, 8591), 'time.time', 'time.time', ([], {}), '()\n', (8589, 8591), False, 'import time\n'), ((9101, 9112), 'time.time', 'time.time', ([], {}), '()\n', (9110, 9112), False, 'import time\n'), ((5055, 5093), 'json.dumps', 'json.dumps', (['msg'], {'cls': 'FieldValueEncoder'}), '(msg, cls=FieldValueEncoder)\n', (5065, 5093), False, 'import json\n'), ((6953, 6964), 'time.time', 'time.time', ([], {}), '()\n', (6962, 6964), False, 'import time\n')] |
from rest_framework import serializers
from projects.models import (
Project,
ProjectVolunteers,
ProjectVolunteersRegistration,
ProjectAttendees,
ProjectAttendeesRegistration,
ProjectDiscussion,
ProjectAnswerDiscussion,
ProjectHub,
)
class ProjectVolunteersRegistrationSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ProjectVolunteersRegistration
fields = ('url', 'profile', 'project_volunteers', 'project_volunteers_ref')
def create(self, validated_data):
project_volunteers = ProjectVolunteers.objects.get(pk=validated_data['project_volunteers_ref'])
registration = ProjectVolunteersRegistration.objects.create(
project_volunteers=project_volunteers,
**validated_data
)
count = ProjectVolunteersRegistration.objects.filter(
project_volunteers=project_volunteers
).count()
project_volunteers.registered = count
project_volunteers.save()
return registration
class ProjectVolunteersSerializer(serializers.HyperlinkedModelSerializer):
volunteers_registration = ProjectVolunteersRegistrationSerializer(many=True, read_only=True)
class Meta:
model = ProjectVolunteers
fields = (
'url',
'id',
'project',
'role',
'description',
'seats',
'registered',
'minimum_registration',
'volunteers_registration',
)
read_only_fields = ('registered', 'project', 'id')
class ProjectAttendeesRegistrationSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ProjectAttendeesRegistration
fields = ('url', 'profile', 'project_attendees', 'project_attendees_ref')
def create(self, validated_data):
project_attendees = ProjectAttendees.objects.get(pk=validated_data['project_attendees_ref'])
registration = ProjectAttendeesRegistration.objects.create(project_attendees=project_attendees, **validated_data)
count = ProjectAttendeesRegistration.objects.filter(project_attendees=project_attendees).count()
project_attendees.registered = count
project_attendees.save()
return registration
class ProjectAttendeesSerializer(serializers.HyperlinkedModelSerializer):
attendees_registration = ProjectAttendeesRegistrationSerializer(many=True, read_only=True)
class Meta:
model = ProjectAttendees
fields = (
'url',
'id',
'project',
'seats',
'registered',
'attendees_registration',
'minimum_registration',
)
read_only_fields = ('registered', 'project',)
class ProjectAnswerDiscussionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ProjectAnswerDiscussion
fields = ('url', 'id', 'discussion_ref', 'discussion', 'text', 'profile', 'created', 'updated')
read_only_fields = ('discussion', 'profile')
def create(self, validated_data):
project_discussion = ProjectDiscussion.objects.get(pk=validated_data['discussion_ref'])
answer = ProjectAnswerDiscussion.objects.create(discussion=project_discussion, **validated_data)
return answer
class ProjectDiscussionSerializer(serializers.HyperlinkedModelSerializer):
answer_discussion_project = ProjectAnswerDiscussionSerializer(many=True, read_only=True)
class Meta:
model = ProjectDiscussion
fields = (
'url',
'id',
'project',
'project_ref',
'title',
'text',
'profile',
'created',
'updated',
'answer_discussion_project',
)
read_only_fields = ('profile', 'project', 'id')
def create(self, validated_data):
project = Project.objects.get(pk=validated_data['project_ref'])
new_discussion = ProjectDiscussion.objects.create(project=project, **validated_data)
return new_discussion
class ProjectSerializer(serializers.HyperlinkedModelSerializer):
attendees = ProjectAttendeesSerializer()
volunteers = ProjectVolunteersSerializer(many=True)
discussion_project = ProjectDiscussionSerializer(many=True, read_only=True)
### cause of the error :
#serializers.HyperlinkedRelatedField(
# many=True,
# view_name='discussion_project',
# read_only=True
#)
class Meta:
model = Project
fields = ('url', 'id', 'name', 'start',
'end', 'description', 'category',
'sub_category', 'oth_category', 'oth_sub_cat','place_name', 'number', 'street',
'postal_code', 'city', 'organizer', 'created',
'updated', 'project_type', 'attendees',
'volunteers', 'discussion_project')
read_only_fields = ('organizer', 'id')
def create(self, validated_data):
attendees_data = validated_data.pop('attendees')
volunteers_data = validated_data.pop('volunteers')
new_project = Project.objects.create(**validated_data)
if validated_data['project_type'] == 'CO':
ProjectAttendees.objects.create(project=new_project, **attendees_data)
elif validated_data['project_type'] == 'CP':
for volunteer_data in volunteers_data:
ProjectVolunteers.objects.create(project=new_project, **volunteer_data)
else:
ProjectAttendees.objects.create(project=new_project, **attendees_data)
for volunteer_data in volunteers_data:
ProjectVolunteers.objects.create(project=new_project, **volunteer_data)
return new_project
class ProjectShortSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ('url', 'id', 'name', 'start', 'created', 'updated',)
class ProjectHubSerializer(serializers.HyperlinkedModelSerializer):
project = ProjectSerializer()
class Meta:
model = ProjectHub
fields = ('project', 'distance_km', 'lat', 'lng')
| [
"projects.models.ProjectDiscussion.objects.create",
"projects.models.ProjectVolunteersRegistration.objects.filter",
"projects.models.ProjectAnswerDiscussion.objects.create",
"projects.models.ProjectAttendees.objects.create",
"projects.models.ProjectAttendeesRegistration.objects.create",
"projects.models.P... | [((570, 644), 'projects.models.ProjectVolunteers.objects.get', 'ProjectVolunteers.objects.get', ([], {'pk': "validated_data['project_volunteers_ref']"}), "(pk=validated_data['project_volunteers_ref'])\n", (599, 644), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((668, 774), 'projects.models.ProjectVolunteersRegistration.objects.create', 'ProjectVolunteersRegistration.objects.create', ([], {'project_volunteers': 'project_volunteers'}), '(project_volunteers=\n project_volunteers, **validated_data)\n', (712, 774), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((1882, 1954), 'projects.models.ProjectAttendees.objects.get', 'ProjectAttendees.objects.get', ([], {'pk': "validated_data['project_attendees_ref']"}), "(pk=validated_data['project_attendees_ref'])\n", (1910, 1954), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((1978, 2081), 'projects.models.ProjectAttendeesRegistration.objects.create', 'ProjectAttendeesRegistration.objects.create', ([], {'project_attendees': 'project_attendees'}), '(project_attendees=\n project_attendees, **validated_data)\n', (2021, 2081), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((3137, 3203), 'projects.models.ProjectDiscussion.objects.get', 'ProjectDiscussion.objects.get', ([], {'pk': "validated_data['discussion_ref']"}), "(pk=validated_data['discussion_ref'])\n", (3166, 3203), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((3221, 3313), 'projects.models.ProjectAnswerDiscussion.objects.create', 'ProjectAnswerDiscussion.objects.create', ([], {'discussion': 'project_discussion'}), '(discussion=project_discussion, **\n validated_data)\n', (3259, 3313), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((3932, 3985), 'projects.models.Project.objects.get', 'Project.objects.get', ([], {'pk': "validated_data['project_ref']"}), "(pk=validated_data['project_ref'])\n", (3951, 3985), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((4011, 4078), 'projects.models.ProjectDiscussion.objects.create', 'ProjectDiscussion.objects.create', ([], {'project': 'project'}), '(project=project, **validated_data)\n', (4043, 4078), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((5161, 5201), 'projects.models.Project.objects.create', 'Project.objects.create', ([], {}), '(**validated_data)\n', (5183, 5201), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((5265, 5335), 'projects.models.ProjectAttendees.objects.create', 'ProjectAttendees.objects.create', ([], {'project': 'new_project'}), '(project=new_project, **attendees_data)\n', (5296, 5335), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((820, 908), 'projects.models.ProjectVolunteersRegistration.objects.filter', 'ProjectVolunteersRegistration.objects.filter', ([], {'project_volunteers': 'project_volunteers'}), '(project_volunteers=\n project_volunteers)\n', (864, 908), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((2093, 2178), 'projects.models.ProjectAttendeesRegistration.objects.filter', 'ProjectAttendeesRegistration.objects.filter', ([], {'project_attendees': 'project_attendees'}), '(project_attendees=project_attendees\n )\n', (2136, 2178), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((5554, 5624), 'projects.models.ProjectAttendees.objects.create', 'ProjectAttendees.objects.create', ([], {'project': 'new_project'}), '(project=new_project, **attendees_data)\n', (5585, 5624), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((5456, 5527), 'projects.models.ProjectVolunteers.objects.create', 'ProjectVolunteers.objects.create', ([], {'project': 'new_project'}), '(project=new_project, **volunteer_data)\n', (5488, 5527), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n'), ((5692, 5763), 'projects.models.ProjectVolunteers.objects.create', 'ProjectVolunteers.objects.create', ([], {'project': 'new_project'}), '(project=new_project, **volunteer_data)\n', (5724, 5763), False, 'from projects.models import Project, ProjectVolunteers, ProjectVolunteersRegistration, ProjectAttendees, ProjectAttendeesRegistration, ProjectDiscussion, ProjectAnswerDiscussion, ProjectHub\n')] |
"""
@Author: <EMAIL>
@Created: 2021/3/10
@Application: 作用在mongodb去重
"""
import pymongo
from NewsCrawler.settings import MONGO_URL
client = pymongo.MongoClient(MONGO_URL, maxPoolSize=1024)
def find_duplicate(collection):
collection.aggregate([
{'$group': {
'_id': {'title': "$title", 'published': "$published", "link": "$link"}, # 去重字段
'uniqueIds': {'$addToSet': "$_id"}, # 重复数据的id
'count': {'$sum': 1} # 重复次数
}},
{'$match': {
'count': {'$gt': 1} # 匹配重复次数大于1的数据
}},
{'$out': tmp_colName} # 输出的表名
], allowDiskUse=True)
def del_dup(tmp_collection, source_collection):
# 保留一位
for a in tmp_collection.find():
for b in a['uniqueIds'][1:]:
source_collection.delete_one({"_id": b})
tmp_col.drop() # 删除中间表
if __name__ == '__main__':
tmp_colName = "tmp_news" # 中间表名
col_list = ['caijing', 'ce', 'eastmoney', 'hexun', 'news', 'newsqq', 'sina', 'wangyi']
for i in col_list:
col = client['news'][i]
tmp_col = client['news'][tmp_colName]
find_duplicate(col)
del_dup(tmp_col, col)
| [
"pymongo.MongoClient"
] | [((141, 189), 'pymongo.MongoClient', 'pymongo.MongoClient', (['MONGO_URL'], {'maxPoolSize': '(1024)'}), '(MONGO_URL, maxPoolSize=1024)\n', (160, 189), False, 'import pymongo\n')] |
import codecs
__memory_storage = {}
def file_read(path):
with codecs.open(path, 'r', 'utf-8') as f:
return f.read()
def file_write(path, text):
with codecs.open(path, 'w', 'utf-8') as f:
return f.write(text)
def memory_write(key, data):
__memory_storage[key] = data
STORAGES = {
'file': {
'read': file_read,
'write': file_write
},
'memory': {
'read': lambda x: __memory_storage[x],
'write': memory_write
}
}
| [
"codecs.open"
] | [((68, 99), 'codecs.open', 'codecs.open', (['path', '"""r"""', '"""utf-8"""'], {}), "(path, 'r', 'utf-8')\n", (79, 99), False, 'import codecs\n'), ((168, 199), 'codecs.open', 'codecs.open', (['path', '"""w"""', '"""utf-8"""'], {}), "(path, 'w', 'utf-8')\n", (179, 199), False, 'import codecs\n')] |
import torch
import torchaudio
import numpy as np
from torch.nn.functional import normalize
from audio_utils.common.utilities import _check_transform_input
class BaseAudioParser(object):
def __init__(self, mode="after_batch"):
super().__init__()
assert mode in ['after_batch', "per_instance"]
if mode == "after_batch":
self.desired_dims = 3
else:
self.desired_dims = 2
def check_sample(self, audio_sample):
return _check_transform_input(audio_sample, desired_dims=self.desired_dims)
def __call__(self, audio):
raise NotImplementedError("Abstract method called")
class SpectrogramParser(BaseAudioParser):
def __init__(self,
window_length=400,
hop_length=160,
n_fft=400,
center=True,
window_fn=torch.hann_window,
pad=0,
pad_mode="reflect",
mode="after_batch"):
super(SpectrogramParser, self).__init__(mode)
self.n_fft = n_fft
self.hop_length = hop_length
self.window_length = window_length
self.center = center
self.return_complex = True
# self.window_fn = window_fn
self.window = window_fn(window_length)
self.pad = pad
self.pad_mode = pad_mode
# always returns complex
# therefore, output will be complex tensor, which is desired for SpecAugment
# Hence, add a transform on top to convert to absolute value and desired power
def __call__(self, batch):
"""
:param batch: float array/tensor of shape (N, T) or (T,) for a single input
:return: tensor of dtype complex
"""
batch = self.check_sample(batch)
batch = torchaudio.functional.spectrogram(
batch,
self.pad,
self.window,
self.n_fft,
self.hop_length,
self.window_length,
power=None, # power other than None enforces real valued spec
normalized=False, # normalization enforces real valued spec
center=self.center,
pad_mode=self.pad_mode,
onesided=True,
return_complex=True
)
return batch
class SpectrogramPostProcess:
def __init__(self,
window_length=400,
window_fn=torch.hann_window,
power=2,
normalize=False,
log_compress=True,
mode="after_batch",
mel_spec_override=False):
super(SpectrogramPostProcess, self).__init__()
self.power = power
self.normalize = normalize
self.window = window_fn(window_length)
self.log_compress = log_compress
self.mode = mode
if mel_spec_override:
self.log_compress = False
print("mel_spec_override flag is set, forcing log_compress to false")
if log_compress:
print("log_compression is set to True in SpectrogramPostProcess. If using MelScale down the line, disable it")
def __call__(self, batch):
"""
:param batch: float tensor of shape (N, F, T)
:return:
"""
if self.normalize:
batch /= self.window.pow(2.).sum().sqrt()
if self.power:
if self.power == 1.0:
batch = batch.abs()
else:
batch = batch.abs().pow(self.power)
if self.log_compress:
batch = torch.clamp(batch, min=1e-8, max=1e8)
batch = torch.log(batch)
if self.mode == "per_instance":
batch = batch.squeeze(0)
return batch
class ToMelScale(BaseAudioParser):
def __init__(self,
sample_rate=16000,
# window_length=400,
hop_length=160,
n_fft=1024,
n_mels=64,
fmin=60.0,
fmax=7800.0,
norm=None,
# center=True,
mel_scale="htk"):
super(ToMelScale, self).__init__()
self.sample_rate = sample_rate
# self.window_length = window_length
self.hop_length = hop_length
self.n_fft = n_fft
self.n_mels = n_mels
self.fmin = fmin
self.fmax = fmax
# self.center = center
self.mel_scale = torchaudio.transforms.MelScale(
self.n_mels,
self.sample_rate,
self.fmin,
self.fmax,
self.n_fft // 2 + 1,
norm,
mel_scale
)
def __call__(self, batch):
"""
Accepts output of SpectrogramParser -> ... -> SpectrogramPostProcess and converts it to MelScale
This pipeline allows us to use torchaudio.transforms.TimeStretching
:param batch:
:return:
"""
batch = self.mel_scale(batch)
batch = torch.clamp(batch, min=1e-8, max=1e8)
batch = torch.log(batch)
return batch
class RawAudioParser(BaseAudioParser):
"""
:param normalize_waveform
whether to N(0,1) normalize audio waveform
"""
def __init__(self, normalize_waveform=False):
super().__init__()
self.normalize_waveform = normalize_waveform
if self.normalize_waveform:
print("ATTENTION!!! Normalizing waveform")
def normalize_sample(self, audio):
return normalize(audio, 2, dim=-1)
def __call__(self, audio):
output = torch.from_numpy(audio.astype("float32")).float()
if self.normalize_waveform:
output = self.normalize_sample(output)
output = output.unsqueeze(0)
return output, None
| [
"torch.log",
"torchaudio.transforms.MelScale",
"torch.nn.functional.normalize",
"torchaudio.functional.spectrogram",
"audio_utils.common.utilities._check_transform_input",
"torch.clamp"
] | [((489, 557), 'audio_utils.common.utilities._check_transform_input', '_check_transform_input', (['audio_sample'], {'desired_dims': 'self.desired_dims'}), '(audio_sample, desired_dims=self.desired_dims)\n', (511, 557), False, 'from audio_utils.common.utilities import _check_transform_input\n'), ((1799, 2033), 'torchaudio.functional.spectrogram', 'torchaudio.functional.spectrogram', (['batch', 'self.pad', 'self.window', 'self.n_fft', 'self.hop_length', 'self.window_length'], {'power': 'None', 'normalized': '(False)', 'center': 'self.center', 'pad_mode': 'self.pad_mode', 'onesided': '(True)', 'return_complex': '(True)'}), '(batch, self.pad, self.window, self.n_fft,\n self.hop_length, self.window_length, power=None, normalized=False,\n center=self.center, pad_mode=self.pad_mode, onesided=True,\n return_complex=True)\n', (1832, 2033), False, 'import torchaudio\n'), ((4447, 4572), 'torchaudio.transforms.MelScale', 'torchaudio.transforms.MelScale', (['self.n_mels', 'self.sample_rate', 'self.fmin', 'self.fmax', '(self.n_fft // 2 + 1)', 'norm', 'mel_scale'], {}), '(self.n_mels, self.sample_rate, self.fmin,\n self.fmax, self.n_fft // 2 + 1, norm, mel_scale)\n', (4477, 4572), False, 'import torchaudio\n'), ((4993, 5039), 'torch.clamp', 'torch.clamp', (['batch'], {'min': '(1e-08)', 'max': '(100000000.0)'}), '(batch, min=1e-08, max=100000000.0)\n', (5004, 5039), False, 'import torch\n'), ((5047, 5063), 'torch.log', 'torch.log', (['batch'], {}), '(batch)\n', (5056, 5063), False, 'import torch\n'), ((5499, 5526), 'torch.nn.functional.normalize', 'normalize', (['audio', '(2)'], {'dim': '(-1)'}), '(audio, 2, dim=-1)\n', (5508, 5526), False, 'from torch.nn.functional import normalize\n'), ((3571, 3617), 'torch.clamp', 'torch.clamp', (['batch'], {'min': '(1e-08)', 'max': '(100000000.0)'}), '(batch, min=1e-08, max=100000000.0)\n', (3582, 3617), False, 'import torch\n'), ((3629, 3645), 'torch.log', 'torch.log', (['batch'], {}), '(batch)\n', (3638, 3645), False, 'import torch\n')] |
import itertools
from typing import Any, Callable, Sequence, Tuple
import dill as pickle
import jax.numpy as np
import numpy as onp
import pandas as pd
from jax import grad, jit, ops, random
from jax.experimental.optimizers import Optimizer, adam
from pzflow import distributions
from pzflow.bijectors import Bijector_Info, InitFunction, Pytree
from pzflow.utils import build_bijector_from_info, gaussian_error_model
class Flow:
"""A normalizing flow that models tabular data.
Attributes
----------
data_columns : tuple
List of DataFrame columns that the flow expects/produces.
conditional_columns : tuple
List of DataFrame columns on which the flow is conditioned.
info : Any
Object containing any kind of info included with the flow.
Often describes the data the flow is trained on.
latent
The latent distribution of the normalizing flow.
Has it's own sample and log_prob methods.
"""
def __init__(
self,
data_columns: Sequence[str] = None,
bijector: Tuple[InitFunction, Bijector_Info] = None,
conditional_columns: Sequence[str] = None,
latent=None,
data_error_model: Callable = None,
condition_error_model: Callable = None,
autoscale_conditions: bool = True,
seed: int = 0,
info: Any = None,
file: str = None,
_dictionary: dict = None,
):
"""Instantiate a normalizing flow.
Note that while all of the init parameters are technically optional,
you must provide either data_columns and bijector OR file.
In addition, if a file is provided, all other parameters must be None.
Parameters
----------
data_columns : Sequence[str], optional
Tuple, list, or other container of column names.
These are the columns the flow expects/produces in DataFrames.
bijector : Bijector Call, optional
A Bijector call that consists of the bijector InitFunction that
initializes the bijector and the tuple of Bijector Info.
Can be the output of any Bijector, e.g. Reverse(), Chain(...), etc.
conditional_columns : Sequence[str], optional
Names of columns on which to condition the normalizing flow.
latent : distribution, optional
The latent distribution for the normalizing flow. Can be any of
the distributions from pzflow.distributions. If not provided,
a normal distribution is used with the number of dimensions
inferred.
data_error_model : Callable, optional
A callable that defines the error model for data variables.
data_error_model must take key, X, Xerr, nsamples as arguments where:
key is a jax rng key, e.g. jax.random.PRNGKey(0)
X is a 2 dimensional array of data variables, where the order
of variables matches the order of the columns in data_columns
Xerr is the corresponding 2 dimensional array of errors
nsamples is the number of samples to draw from the error distribution
data_error_model must return an array of samples with the shape
(X.shape[0], nsamples, X.shape[1]).
If data_error_model is not provided, a Gaussian error model is assumed.
condition_error_model : Callable, optional
A callable that defines the error model for conditional variables.
condition_error_model must take key, X, Xerr, nsamples as arguments where:
key is a jax rng key, e.g. jax.random.PRNGKey(0)
X is a 2 dimensional array of conditional variables, where the order
of variables matches the order of the columns in conditional_columns
Xerr is the corresponding 2 dimensional array of errors
nsamples is the number of samples to draw from the error distribution
condition_error_model must return an array of samples with the shape
(X.shape[0], nsamples, X.shape[1]).
If condition_error_model is not provided, a Gaussian error model is assumed.
autoscale_conditions : bool, default=True
Sets whether or not conditions are automatically standard scaled when
passed to a conditional flow. I recommend you leave this as True.
seed : int, default=0
The random seed for initial parameters
info : Any, optional
An object to attach to the info attribute.
file : str, optional
Path to file from which to load a pretrained flow.
If a file is provided, all other parameters must be None.
"""
# validate parameters
if (
data_columns is None
and bijector is None
and file is None
and _dictionary is None
):
raise ValueError("You must provide data_columns and bijector OR file.")
if data_columns is not None and bijector is None:
raise ValueError("Please also provide a bijector.")
if data_columns is None and bijector is not None:
raise ValueError("Please also provide data_columns.")
if any(
(
data_columns is not None,
bijector is not None,
conditional_columns is not None,
latent is not None,
data_error_model is not None,
condition_error_model is not None,
info is not None,
)
):
if file is not None:
raise ValueError(
"If providing a file, please do not provide any other parameters."
)
if _dictionary is not None:
raise ValueError(
"If providing a dictionary, please do not provide any other parameters."
)
if file is not None and _dictionary is not None:
raise ValueError("Only provide file or _dictionary, not both.")
# if file or dictionary is provided, load everything from it
if file is not None or _dictionary is not None:
save_dict = self._save_dict()
if file is not None:
with open(file, "rb") as handle:
save_dict.update(pickle.load(handle))
else:
save_dict.update(_dictionary)
if save_dict["class"] != self.__class__.__name__:
raise TypeError(
f"This save file isn't a {self.__class__.__name__}."
+ f"It is a {save_dict['class']}"
)
# load columns and dimensions
self.data_columns = save_dict["data_columns"]
self.conditional_columns = save_dict["conditional_columns"]
self._input_dim = len(self.data_columns)
self.info = save_dict["info"]
# load the latent distribution
self._latent_info = save_dict["latent_info"]
self.latent = getattr(distributions, self._latent_info[0])(
*self._latent_info[1]
)
# load the error models
self.data_error_model = save_dict["data_error_model"]
self.condition_error_model = save_dict["condition_error_model"]
# load the bijector
self._bijector_info = save_dict["bijector_info"]
init_fun, _ = build_bijector_from_info(self._bijector_info)
_, self._forward, self._inverse = init_fun(
random.PRNGKey(0), self._input_dim
)
self._params = save_dict["params"]
# load the conditional means and stds
self._condition_means = save_dict["condition_means"]
self._condition_stds = save_dict["condition_stds"]
# set whether or not to automatically standard scale any
# conditions passed to the normalizing flow
self._autoscale_conditions = save_dict["autoscale_conditions"]
# if no file is provided, use provided parameters
else:
self.data_columns = tuple(data_columns)
self._input_dim = len(self.data_columns)
self.info = info
if conditional_columns is None:
self.conditional_columns = None
self._condition_means = None
self._condition_stds = None
else:
self.conditional_columns = tuple(conditional_columns)
self._condition_means = np.zeros(len(self.conditional_columns))
self._condition_stds = np.ones(len(self.conditional_columns))
# set whether or not to automatically standard scale any
# conditions passed to the normalizing flow
self._autoscale_conditions = autoscale_conditions
# set up the latent distribution
if latent is None:
self.latent = distributions.Normal(self._input_dim)
else:
self.latent = latent
self._latent_info = self.latent.info
# set up the error models
if data_error_model is None:
self.data_error_model = gaussian_error_model
else:
self.data_error_model = data_error_model
if condition_error_model is None:
self.condition_error_model = gaussian_error_model
else:
self.condition_error_model = condition_error_model
# set up the bijector with random params
init_fun, self._bijector_info = bijector
bijector_params, self._forward, self._inverse = init_fun(
random.PRNGKey(seed), self._input_dim
)
self._params = (self.latent._params, bijector_params)
def _get_conditions(self, inputs: pd.DataFrame) -> np.ndarray:
"""Return an array of the bijector conditions."""
# if this isn't a conditional flow, just return empty conditions
if self.conditional_columns is None:
conditions = np.zeros((inputs.shape[0], 1))
# if this a conditional flow, return an array of the conditions
else:
columns = list(self.conditional_columns)
conditions = np.array(inputs[columns].values)
conditions = (conditions - self._condition_means) / self._condition_stds
return conditions
def _get_err_samples(
self,
key,
inputs: pd.DataFrame,
err_samples: int,
type: str = "data",
skip: str = None,
) -> np.ndarray:
"""Draw error samples for each row of inputs. """
X = inputs.copy()
# get list of columns
if type == "data":
columns = list(self.data_columns)
error_model = self.data_error_model
elif type == "conditions":
if self.conditional_columns is None:
return np.zeros((err_samples * X.shape[0], 1))
else:
columns = list(self.conditional_columns)
error_model = self.condition_error_model
else:
raise ValueError("type must be `data` or `conditions`.")
# make sure all relevant variables have error columns
for col in columns:
# if errors not provided for the column, fill in zeros
if f"{col}_err" not in inputs.columns and col != skip:
X[f"{col}_err"] = np.zeros(X.shape[0])
# if we are skipping this column, fill in nan's
elif col == skip:
X[col] = np.nan * np.zeros(X.shape[0])
X[f"{col}_err"] = np.nan * np.zeros(X.shape[0])
# pull out relevant columns
err_columns = [col + "_err" for col in columns]
X, Xerr = np.array(X[columns].values), np.array(X[err_columns].values)
# generate samples
Xsamples = error_model(key, X, Xerr, err_samples)
Xsamples = Xsamples.reshape(X.shape[0] * err_samples, X.shape[1])
# delete the column corresponding to skip
if skip is not None:
idx = columns.index(skip)
Xsamples = np.delete(Xsamples, idx, axis=1)
# if these are samples of conditions, standard scale them!
if type == "conditions":
Xsamples = (Xsamples - self._condition_means) / self._condition_stds
return Xsamples
def _log_prob(
self, params: Pytree, inputs: np.ndarray, conditions: np.ndarray
) -> np.ndarray:
"""Log prob for arrays."""
# calculate log_prob
u, log_det = self._forward(params[1], inputs, conditions=conditions)
log_prob = self.latent.log_prob(params[0], u) + log_det
# set NaN's to negative infinity (i.e. zero probability)
log_prob = np.nan_to_num(log_prob, nan=np.NINF)
return log_prob
def log_prob(
self, inputs: pd.DataFrame, err_samples: int = None, seed: int = None
) -> np.ndarray:
"""Calculates log probability density of inputs.
Parameters
----------
inputs : pd.DataFrame
Input data for which log probability density is calculated.
Every column in self.data_columns must be present.
If self.conditional_columns is not None, those must be present
as well. If other columns are present, they are ignored.
err_samples : int, default=None
Number of samples from the error distribution to average over for
the log_prob calculation. If provided, Gaussian errors are assumed,
and method will look for error columns in `inputs`. Error columns
must end in `_err`. E.g. the error column for the variable `u` must
be `u_err`. Zero error assumed for any missing error columns.
seed : int, default=None
Random seed for drawing the samples with Gaussian errors.
Returns
-------
np.ndarray
Device array of shape (inputs.shape[0],).
"""
if err_samples is None:
# convert data to an array with columns ordered
columns = list(self.data_columns)
X = np.array(inputs[columns].values)
# get conditions
conditions = self._get_conditions(inputs)
# calculate log_prob
return self._log_prob(self._params, X, conditions)
else:
# validate nsamples
assert isinstance(
err_samples, int
), "err_samples must be a positive integer."
assert err_samples > 0, "err_samples must be a positive integer."
# get Gaussian samples
seed = onp.random.randint(1e18) if seed is None else seed
key = random.PRNGKey(seed)
X = self._get_err_samples(key, inputs, err_samples, type="data")
C = self._get_err_samples(key, inputs, err_samples, type="conditions")
# calculate log_probs
log_probs = self._log_prob(self._params, X, C)
probs = np.exp(log_probs.reshape(-1, err_samples))
return np.log(probs.mean(axis=1))
def posterior(
self,
inputs: pd.DataFrame,
column: str,
grid: np.ndarray,
marg_rules: dict = None,
normalize: bool = True,
err_samples: int = None,
seed: int = None,
batch_size: int = None,
nan_to_zero: bool = True,
) -> np.ndarray:
"""Calculates posterior distributions for the provided column.
Calculates the conditional posterior distribution, assuming the
data values in the other columns of the DataFrame.
Parameters
----------
inputs : pd.DataFrame
Data on which the posterior distributions are conditioned.
Must have columns matching self.data_columns, *except*
for the column specified for the posterior (see below).
column : str
Name of the column for which the posterior distribution
is calculated. Must be one of the columns in self.data_columns.
However, whether or not this column is one of the columns in
`inputs` is irrelevant.
grid : np.ndarray
Grid on which to calculate the posterior.
marg_rules : dict, optional
Dictionary with rules for marginalizing over missing variables.
The dictionary must contain the key "flag", which gives the flag
that indicates a missing value. E.g. if missing values are given
the value 99, the dictionary should contain {"flag": 99}.
The dictionary must also contain {"name": callable} for any
variables that will need to be marginalized over, where name is
the name of the variable, and callable is a callable that takes
the row of variables nad returns a grid over which to marginalize
the variable. E.g. {"y": lambda row: np.linspace(0, row["x"], 10)}.
Note: the callable for a given name must *always* return an array
of the same length, regardless of the input row.
err_samples : int, default=None
Number of samples from the error distribution to average over for
the posterior calculation. If provided, Gaussian errors are assumed,
and method will look for error columns in `inputs`. Error columns
must end in `_err`. E.g. the error column for the variable `u` must
be `u_err`. Zero error assumed for any missing error columns.
seed : int, default=None
Random seed for drawing the samples with Gaussian errors.
batch_size : int, default=None
Size of batches in which to calculate posteriors. If None, all
posteriors are calculated simultaneously. Simultaneous calculation
is faster, but memory intensive for large data sets.
normalize : boolean, default=True
Whether to normalize the posterior so that it integrates to 1.
nan_to_zero : bool, default=True
Whether to convert NaN's to zero probability in the final pdfs.
Returns
-------
np.ndarray
Device array of shape (inputs.shape[0], grid.size).
"""
# get the index of the provided column, and remove it from the list
columns = list(self.data_columns)
idx = columns.index(column)
columns.remove(column)
nrows = inputs.shape[0]
batch_size = nrows if batch_size is None else batch_size
# make sure indices run 0 -> nrows
inputs = inputs.reset_index(drop=True)
if err_samples is not None:
# validate nsamples
assert isinstance(
err_samples, int
), "err_samples must be a positive integer."
assert err_samples > 0, "err_samples must be a positive integer."
# set the seed
seed = onp.random.randint(1e18) if seed is None else seed
key = random.PRNGKey(seed)
# empty array to hold pdfs
pdfs = np.zeros((nrows, len(grid)))
# if marginalization rules were passed, we will loop over the rules
# and repeatedly call this method
if marg_rules is not None:
# if the flag is NaN, we must use np.isnan to check for flags
if onp.isnan(marg_rules["flag"]):
def check_flags(data):
return onp.isnan(data)
# else we use np.isclose to check for flags
else:
def check_flags(data):
return onp.isclose(data, marg_rules["flag"])
# first calculate pdfs for unflagged rows
unflagged_idx = inputs[
~check_flags(inputs[columns]).any(axis=1)
].index.tolist()
unflagged_pdfs = self.posterior(
inputs=inputs.iloc[unflagged_idx],
column=column,
grid=grid,
err_samples=err_samples,
seed=seed,
batch_size=batch_size,
normalize=False,
nan_to_zero=nan_to_zero,
)
# save these pdfs in the big array
pdfs = ops.index_update(
pdfs,
ops.index[unflagged_idx, :],
unflagged_pdfs,
indices_are_sorted=True,
unique_indices=True,
)
# we will keep track of all the rows we've already calculated
# posteriors for
already_done = unflagged_idx
# now we will loop over the rules in marg_rules
for name, rule in marg_rules.items():
# ignore the flag, because that's not a column in the data
if name == "flag":
continue
# get the list of new rows for which we need to calculate posteriors
flagged_idx = inputs[check_flags(inputs[name])].index.tolist()
flagged_idx = list(set(flagged_idx).difference(already_done))
# if flagged_idx is empty, move on!
if len(flagged_idx) == 0:
continue
# get the marginalization grid for each row
marg_grids = (
inputs.iloc[flagged_idx]
.apply(rule, axis=1, result_type="expand")
.values
)
# make a new data frame with the marginalization grids replacing
# the values of the flag in the column
marg_inputs = pd.DataFrame(
np.repeat(
inputs.iloc[flagged_idx].values, marg_grids.shape[1], axis=0
),
columns=inputs.columns,
)
marg_inputs[name] = marg_grids.reshape(marg_inputs.shape[0], 1)
# remove the error column if it's present
marg_inputs.drop(f"{name}_err", axis=1, inplace=True, errors="ignore")
# calculate posteriors for these
marg_pdfs = self.posterior(
inputs=marg_inputs,
column=column,
grid=grid,
marg_rules=marg_rules,
err_samples=err_samples,
seed=seed,
batch_size=batch_size,
normalize=False,
nan_to_zero=nan_to_zero,
)
# sum over the marginalized dimension
marg_pdfs = marg_pdfs.reshape(
len(flagged_idx), marg_grids.shape[1], grid.size
)
marg_pdfs = marg_pdfs.sum(axis=1)
# save the new pdfs in the big array
pdfs = ops.index_update(
pdfs,
ops.index[flagged_idx, :],
marg_pdfs,
indices_are_sorted=True,
unique_indices=True,
)
# add these flagged indices to the list of rows already done
already_done += flagged_idx
# now for the main posterior calculation loop
else:
# loop through batches
for batch_idx in range(0, nrows, batch_size):
# get the data batch
# and, if this is a conditional flow, the correpsonding conditions
batch = inputs.iloc[batch_idx : batch_idx + batch_size]
# if not drawing samples, just grab batch and conditions
if err_samples is None:
conditions = self._get_conditions(batch)
batch = np.array(batch[columns].values)
# if only drawing condition samples...
elif len(self.data_columns) == 1:
conditions = self._get_err_samples(
key, batch, err_samples, type="conditions"
)
batch = np.repeat(batch[columns].values, err_samples, axis=0)
# if drawing data and condition samples...
else:
conditions = self._get_err_samples(
key, batch, err_samples, type="conditions"
)
batch = self._get_err_samples(
key, batch, err_samples, skip=column, type="data"
)
# make a new copy of each row for each value of the column
# for which we are calculating the posterior
batch = np.hstack(
(
np.repeat(batch[:, :idx], len(grid), axis=0,),
np.tile(grid, len(batch))[:, None],
np.repeat(batch[:, idx:], len(grid), axis=0,),
)
)
# make similar copies of the conditions
conditions = np.repeat(conditions, len(grid), axis=0)
# calculate probability densities
log_prob = self._log_prob(self._params, batch, conditions).reshape(
(-1, len(grid))
)
prob = np.exp(log_prob)
# if we were Gaussian sampling, average over the samples
if err_samples is not None:
prob = prob.reshape(-1, err_samples, len(grid))
prob = prob.mean(axis=1)
# add the pdfs to the bigger list
pdfs = ops.index_update(
pdfs,
ops.index[batch_idx : batch_idx + batch_size, :],
prob,
indices_are_sorted=True,
unique_indices=True,
)
if normalize:
# normalize so they integrate to one
pdfs = pdfs / np.trapz(y=pdfs, x=grid).reshape(-1, 1)
if nan_to_zero:
# set NaN's equal to zero probability
pdfs = np.nan_to_num(pdfs, nan=0.0)
return pdfs
def sample(
self,
nsamples: int = 1,
conditions: pd.DataFrame = None,
save_conditions: bool = True,
seed: int = None,
) -> pd.DataFrame:
"""Returns samples from the normalizing flow.
Parameters
----------
nsamples : int, default=1
The number of samples to be returned.
conditions : pd.DataFrame, optional
If this is a conditional flow, you must pass conditions for
each sample. nsamples will be drawn for each row in conditions.
save_conditions : bool, default=True
If true, conditions will be saved in the DataFrame of samples
that is returned.
seed : int, optional
Sets the random seed for the samples.
Returns
-------
pd.DataFrame
Pandas DataFrame of samples.
"""
# validate nsamples
assert isinstance(nsamples, int), "nsamples must be a positive integer."
assert nsamples > 0, "nsamples must be a positive integer."
if self.conditional_columns is not None and conditions is None:
raise ValueError(
f"Must provide the following conditions\n{self.conditional_columns}"
)
# if this isn't a conditional flow, get empty conditions
if self.conditional_columns is None:
conditions = np.zeros((nsamples, 1))
# otherwise get conditions and make `nsamples` copies of each
else:
conditions = self._get_conditions(conditions)
conditions = np.repeat(conditions, nsamples, axis=0)
# draw from latent distribution
u = self.latent.sample(self._params[0], conditions.shape[0], seed)
# take the inverse back to the data distribution
x = self._inverse(self._params[1], u, conditions=conditions)[0]
# if not conditional, or save_conditions is False, this is all we need
if self.conditional_columns is None or save_conditions is False:
x = pd.DataFrame(x, columns=self.data_columns)
# but if conditional and save_conditions is True,
# save conditions with samples
else:
# unscale the conditons
conditions = conditions * self._condition_stds + self._condition_means
x = pd.DataFrame(
np.hstack((x, conditions)),
columns=self.data_columns + self.conditional_columns,
)
# return the samples!
return x
def _save_dict(self):
"""Returns the dictionary of all flow params to be saved."""
save_dict = {"class": self.__class__.__name__}
keys = [
"data_columns",
"conditional_columns",
"condition_means",
"condition_stds",
"data_error_model",
"condition_error_model",
"autoscale_conditions",
"info",
"latent_info",
"bijector_info",
"params",
]
for key in keys:
try:
save_dict[key] = getattr(self, key)
except AttributeError:
try:
save_dict[key] = getattr(self, "_" + key)
except AttributeError:
save_dict[key] = None
return save_dict
def save(self, file: str):
"""Saves the flow to a file.
Pickles the flow and saves it to a file that can be passed as
the `file` argument during flow instantiation.
WARNING: Currently, this method only works for bijectors that are
implemented in the `bijectors` module. If you want to save a flow
with a custom bijector, you either need to add the bijector to that
module, or handle the saving and loading on your end.
Parameters
----------
file : str
Path to where the flow will be saved.
Extension `.pkl` will be appended if not already present.
"""
save_dict = self._save_dict()
with open(file, "wb") as handle:
pickle.dump(save_dict, handle, recurse=True)
def train(
self,
inputs: pd.DataFrame,
epochs: int = 50,
batch_size: int = 1024,
optimizer: Optimizer = None,
loss_fn: Callable = None,
convolve_errs: bool = False,
seed: int = 0,
verbose: bool = False,
) -> list:
"""Trains the normalizing flow on the provided inputs.
Parameters
----------
inputs : pd.DataFrame
Data on which to train the normalizing flow.
Must have columns matching self.data_columns.
epochs : int, default=50
Number of epochs to train.
batch_size : int, default=1024
Batch size for training.
optimizer : jax Optimizer, default=adam(step_size=1e-3)
An optimizer from jax.experimental.optimizers.
loss_fn : Callable, optional
A function to calculate the loss: loss = loss_fn(params, x).
If not provided, will be -mean(log_prob).
convolve_errs : bool, default=False
Whether to draw new data from the error distributions during
each epoch of training. Assumes errors are Gaussian, and method
will look for error columns in `inputs`. Error columns must end
in `_err`. E.g. the error column for the variable `u` must be
`u_err`. Zero error assumed for any missing error columns.
seed : int, default=0
A random seed to control the batching and the (optional)
error sampling.
verbose : bool, default=False
If true, print the training loss every 5% of epochs.
Returns
-------
list
List of training losses from every epoch.
"""
# validate epochs
if not isinstance(epochs, int) or epochs <= 0:
raise ValueError("epochs must be a positive integer.")
# if no loss_fn is provided, use the default loss function
if loss_fn is None:
@jit
def loss_fn(params, x, c):
return -np.mean(self._log_prob(params, x, c))
# initialize the optimizer
optimizer = adam(step_size=1e-3) if optimizer is None else optimizer
opt_init, opt_update, get_params = optimizer
opt_state = opt_init(self._params)
# define the training step function
@jit
def step(i, opt_state, x, c):
params = get_params(opt_state)
gradients = grad(loss_fn)(params, x, c)
return opt_update(i, gradients, opt_state)
# get list of data columns
columns = list(self.data_columns)
# if this is a conditional flow, and autoscale_conditions == True
# save the means and stds of the conditional columns
if self.conditional_columns is not None and self._autoscale_conditions:
self._condition_means = np.array(
inputs[list(self.conditional_columns)].values.mean(axis=0)
)
condition_stds = np.array(
inputs[list(self.conditional_columns)].values.std(axis=0)
)
self._condition_stds = np.where(condition_stds != 0, condition_stds, 1)
# define a function to return batches
if convolve_errs:
def get_batch(sample_key, x, type):
return self._get_err_samples(sample_key, x, 1, type=type)
else:
def get_batch(sample_key, x, type):
if type == "conditions":
return self._get_conditions(x)
else:
return np.array(x[columns].values)
# get random seed for training loop
key = random.PRNGKey(seed)
if verbose:
print(f"Training {epochs} epochs \nLoss:")
# save the initial loss
X = np.array(inputs[columns].values)
C = self._get_conditions(inputs)
losses = [loss_fn(self._params, X, C)]
if verbose:
print(f"(0) {losses[-1]:.4f}")
# loop through training
itercount = itertools.count()
for epoch in range(epochs):
# new permutation of batches
permute_key, sample_key, key = random.split(key, num=3)
idx = random.permutation(permute_key, inputs.shape[0])
X = inputs.iloc[idx]
# loop through batches and step optimizer
for batch_idx in range(0, len(X), batch_size):
# if sampling from the error distribution, this returns a
# Gaussian sample of the batch. Else just returns batch as a
# jax array
batch = get_batch(
sample_key, X.iloc[batch_idx : batch_idx + batch_size], type="data"
)
batch_conditions = get_batch(
sample_key,
X.iloc[batch_idx : batch_idx + batch_size],
type="conditions",
)
opt_state = step(next(itercount), opt_state, batch, batch_conditions,)
# save end-of-epoch training loss
params = get_params(opt_state)
losses.append(
loss_fn(params, np.array(X[columns].values), self._get_conditions(X),)
)
if verbose and (
epoch % max(int(0.05 * epochs), 1) == 0 or (epoch + 1) == epochs
):
print(f"({epoch+1}) {losses[-1]:.4f}")
# update the flow parameters with the final training state
self._params = get_params(opt_state)
return losses
| [
"jax.random.split",
"pzflow.utils.build_bijector_from_info",
"dill.load",
"jax.numpy.repeat",
"jax.random.PRNGKey",
"jax.numpy.hstack",
"jax.numpy.delete",
"jax.experimental.optimizers.adam",
"pandas.DataFrame",
"jax.numpy.nan_to_num",
"jax.numpy.where",
"dill.dump",
"numpy.isnan",
"jax.nu... | [((12923, 12959), 'jax.numpy.nan_to_num', 'np.nan_to_num', (['log_prob'], {'nan': 'np.NINF'}), '(log_prob, nan=np.NINF)\n', (12936, 12959), True, 'import jax.numpy as np\n'), ((34142, 34162), 'jax.random.PRNGKey', 'random.PRNGKey', (['seed'], {}), '(seed)\n', (34156, 34162), False, 'from jax import grad, jit, ops, random\n'), ((34284, 34316), 'jax.numpy.array', 'np.array', (['inputs[columns].values'], {}), '(inputs[columns].values)\n', (34292, 34316), True, 'import jax.numpy as np\n'), ((34521, 34538), 'itertools.count', 'itertools.count', ([], {}), '()\n', (34536, 34538), False, 'import itertools\n'), ((7535, 7580), 'pzflow.utils.build_bijector_from_info', 'build_bijector_from_info', (['self._bijector_info'], {}), '(self._bijector_info)\n', (7559, 7580), False, 'from pzflow.utils import build_bijector_from_info, gaussian_error_model\n'), ((10195, 10225), 'jax.numpy.zeros', 'np.zeros', (['(inputs.shape[0], 1)'], {}), '((inputs.shape[0], 1))\n', (10203, 10225), True, 'import jax.numpy as np\n'), ((10390, 10422), 'jax.numpy.array', 'np.array', (['inputs[columns].values'], {}), '(inputs[columns].values)\n', (10398, 10422), True, 'import jax.numpy as np\n'), ((11918, 11945), 'jax.numpy.array', 'np.array', (['X[columns].values'], {}), '(X[columns].values)\n', (11926, 11945), True, 'import jax.numpy as np\n'), ((11947, 11978), 'jax.numpy.array', 'np.array', (['X[err_columns].values'], {}), '(X[err_columns].values)\n', (11955, 11978), True, 'import jax.numpy as np\n'), ((12280, 12312), 'jax.numpy.delete', 'np.delete', (['Xsamples', 'idx'], {'axis': '(1)'}), '(Xsamples, idx, axis=1)\n', (12289, 12312), True, 'import jax.numpy as np\n'), ((14313, 14345), 'jax.numpy.array', 'np.array', (['inputs[columns].values'], {}), '(inputs[columns].values)\n', (14321, 14345), True, 'import jax.numpy as np\n'), ((14894, 14914), 'jax.random.PRNGKey', 'random.PRNGKey', (['seed'], {}), '(seed)\n', (14908, 14914), False, 'from jax import grad, jit, ops, random\n'), ((19200, 19220), 'jax.random.PRNGKey', 'random.PRNGKey', (['seed'], {}), '(seed)\n', (19214, 19220), False, 'from jax import grad, jit, ops, random\n'), ((19545, 19574), 'numpy.isnan', 'onp.isnan', (["marg_rules['flag']"], {}), "(marg_rules['flag'])\n", (19554, 19574), True, 'import numpy as onp\n'), ((20433, 20550), 'jax.ops.index_update', 'ops.index_update', (['pdfs', 'ops.index[unflagged_idx, :]', 'unflagged_pdfs'], {'indices_are_sorted': '(True)', 'unique_indices': '(True)'}), '(pdfs, ops.index[unflagged_idx, :], unflagged_pdfs,\n indices_are_sorted=True, unique_indices=True)\n', (20449, 20550), False, 'from jax import grad, jit, ops, random\n'), ((26251, 26279), 'jax.numpy.nan_to_num', 'np.nan_to_num', (['pdfs'], {'nan': '(0.0)'}), '(pdfs, nan=0.0)\n', (26264, 26279), True, 'import jax.numpy as np\n'), ((27706, 27729), 'jax.numpy.zeros', 'np.zeros', (['(nsamples, 1)'], {}), '((nsamples, 1))\n', (27714, 27729), True, 'import jax.numpy as np\n'), ((27897, 27936), 'jax.numpy.repeat', 'np.repeat', (['conditions', 'nsamples'], {'axis': '(0)'}), '(conditions, nsamples, axis=0)\n', (27906, 27936), True, 'import jax.numpy as np\n'), ((28350, 28392), 'pandas.DataFrame', 'pd.DataFrame', (['x'], {'columns': 'self.data_columns'}), '(x, columns=self.data_columns)\n', (28362, 28392), True, 'import pandas as pd\n'), ((30417, 30461), 'dill.dump', 'pickle.dump', (['save_dict', 'handle'], {'recurse': '(True)'}), '(save_dict, handle, recurse=True)\n', (30428, 30461), True, 'import dill as pickle\n'), ((32615, 32636), 'jax.experimental.optimizers.adam', 'adam', ([], {'step_size': '(0.001)'}), '(step_size=0.001)\n', (32619, 32636), False, 'from jax.experimental.optimizers import Optimizer, adam\n'), ((33605, 33653), 'jax.numpy.where', 'np.where', (['(condition_stds != 0)', 'condition_stds', '(1)'], {}), '(condition_stds != 0, condition_stds, 1)\n', (33613, 33653), True, 'import jax.numpy as np\n'), ((34659, 34683), 'jax.random.split', 'random.split', (['key'], {'num': '(3)'}), '(key, num=3)\n', (34671, 34683), False, 'from jax import grad, jit, ops, random\n'), ((34702, 34750), 'jax.random.permutation', 'random.permutation', (['permute_key', 'inputs.shape[0]'], {}), '(permute_key, inputs.shape[0])\n', (34720, 34750), False, 'from jax import grad, jit, ops, random\n'), ((7653, 7670), 'jax.random.PRNGKey', 'random.PRNGKey', (['(0)'], {}), '(0)\n', (7667, 7670), False, 'from jax import grad, jit, ops, random\n'), ((9059, 9096), 'pzflow.distributions.Normal', 'distributions.Normal', (['self._input_dim'], {}), '(self._input_dim)\n', (9079, 9096), False, 'from pzflow import distributions\n'), ((9807, 9827), 'jax.random.PRNGKey', 'random.PRNGKey', (['seed'], {}), '(seed)\n', (9821, 9827), False, 'from jax import grad, jit, ops, random\n'), ((11577, 11597), 'jax.numpy.zeros', 'np.zeros', (['X.shape[0]'], {}), '(X.shape[0])\n', (11585, 11597), True, 'import jax.numpy as np\n'), ((14825, 14850), 'numpy.random.randint', 'onp.random.randint', (['(1e+18)'], {}), '(1e+18)\n', (14843, 14850), True, 'import numpy as onp\n'), ((19131, 19156), 'numpy.random.randint', 'onp.random.randint', (['(1e+18)'], {}), '(1e+18)\n', (19149, 19156), True, 'import numpy as onp\n'), ((23037, 23147), 'jax.ops.index_update', 'ops.index_update', (['pdfs', 'ops.index[flagged_idx, :]', 'marg_pdfs'], {'indices_are_sorted': '(True)', 'unique_indices': '(True)'}), '(pdfs, ops.index[flagged_idx, :], marg_pdfs,\n indices_are_sorted=True, unique_indices=True)\n', (23053, 23147), False, 'from jax import grad, jit, ops, random\n'), ((25456, 25472), 'jax.numpy.exp', 'np.exp', (['log_prob'], {}), '(log_prob)\n', (25462, 25472), True, 'import jax.numpy as np\n'), ((25776, 25902), 'jax.ops.index_update', 'ops.index_update', (['pdfs', 'ops.index[batch_idx:batch_idx + batch_size, :]', 'prob'], {'indices_are_sorted': '(True)', 'unique_indices': '(True)'}), '(pdfs, ops.index[batch_idx:batch_idx + batch_size, :], prob,\n indices_are_sorted=True, unique_indices=True)\n', (25792, 25902), False, 'from jax import grad, jit, ops, random\n'), ((28669, 28695), 'jax.numpy.hstack', 'np.hstack', (['(x, conditions)'], {}), '((x, conditions))\n', (28678, 28695), True, 'import jax.numpy as np\n'), ((32931, 32944), 'jax.grad', 'grad', (['loss_fn'], {}), '(loss_fn)\n', (32935, 32944), False, 'from jax import grad, jit, ops, random\n'), ((11063, 11102), 'jax.numpy.zeros', 'np.zeros', (['(err_samples * X.shape[0], 1)'], {}), '((err_samples * X.shape[0], 1))\n', (11071, 11102), True, 'import jax.numpy as np\n'), ((19643, 19658), 'numpy.isnan', 'onp.isnan', (['data'], {}), '(data)\n', (19652, 19658), True, 'import numpy as onp\n'), ((19801, 19838), 'numpy.isclose', 'onp.isclose', (['data', "marg_rules['flag']"], {}), "(data, marg_rules['flag'])\n", (19812, 19838), True, 'import numpy as onp\n'), ((21852, 21923), 'jax.numpy.repeat', 'np.repeat', (['inputs.iloc[flagged_idx].values', 'marg_grids.shape[1]'], {'axis': '(0)'}), '(inputs.iloc[flagged_idx].values, marg_grids.shape[1], axis=0)\n', (21861, 21923), True, 'import jax.numpy as np\n'), ((23944, 23975), 'jax.numpy.array', 'np.array', (['batch[columns].values'], {}), '(batch[columns].values)\n', (23952, 23975), True, 'import jax.numpy as np\n'), ((34055, 34082), 'jax.numpy.array', 'np.array', (['x[columns].values'], {}), '(x[columns].values)\n', (34063, 34082), True, 'import jax.numpy as np\n'), ((35655, 35682), 'jax.numpy.array', 'np.array', (['X[columns].values'], {}), '(X[columns].values)\n', (35663, 35682), True, 'import jax.numpy as np\n'), ((6417, 6436), 'dill.load', 'pickle.load', (['handle'], {}), '(handle)\n', (6428, 6436), True, 'import dill as pickle\n'), ((11722, 11742), 'jax.numpy.zeros', 'np.zeros', (['X.shape[0]'], {}), '(X.shape[0])\n', (11730, 11742), True, 'import jax.numpy as np\n'), ((11786, 11806), 'jax.numpy.zeros', 'np.zeros', (['X.shape[0]'], {}), '(X.shape[0])\n', (11794, 11806), True, 'import jax.numpy as np\n'), ((24254, 24307), 'jax.numpy.repeat', 'np.repeat', (['batch[columns].values', 'err_samples'], {'axis': '(0)'}), '(batch[columns].values, err_samples, axis=0)\n', (24263, 24307), True, 'import jax.numpy as np\n'), ((26118, 26142), 'jax.numpy.trapz', 'np.trapz', ([], {'y': 'pdfs', 'x': 'grid'}), '(y=pdfs, x=grid)\n', (26126, 26142), True, 'import jax.numpy as np\n')] |
import yaml
import pprint
def read_yaml():
""" A function to read YAML file"""
with open('configs.yml') as f:
config = list(yaml.safe_load_all(f))
return config
def write_yaml(data):
""" A function to write YAML file"""
with open('toyaml.yml', 'a') as f:
yaml.dump_all(data, f, default_flow_style=False)
if __name__ == "__main__":
# read the config yaml
my_config = read_yaml()
# pretty print my_config
pprint.pprint(my_config)
# write A python object to a file
write_yaml(my_config)
| [
"pprint.pprint",
"yaml.safe_load_all",
"yaml.dump_all"
] | [((464, 488), 'pprint.pprint', 'pprint.pprint', (['my_config'], {}), '(my_config)\n', (477, 488), False, 'import pprint\n'), ((296, 344), 'yaml.dump_all', 'yaml.dump_all', (['data', 'f'], {'default_flow_style': '(False)'}), '(data, f, default_flow_style=False)\n', (309, 344), False, 'import yaml\n'), ((142, 163), 'yaml.safe_load_all', 'yaml.safe_load_all', (['f'], {}), '(f)\n', (160, 163), False, 'import yaml\n')] |
"""pypyr step saves the current utc datetime to context."""
from datetime import datetime, timezone
import logging
# logger means the log level will be set correctly
logger = logging.getLogger(__name__)
def run_step(context):
"""Save current utc datetime to context.
Args:
context: pypyr.context.Context. Mandatory.
The following context key is optional:
- nowUtcIn. str. Datetime formatting expression. For full list
of possible expressions, check here:
https://docs.python.org/3.7/library/datetime.html#strftime-and-strptime-behavior
All inputs support pypyr formatting expressions.
This step creates now in context, containing a string representation of the
timestamp. If input formatting not specified, defaults to ISO8601.
Default is:
YYYY-MM-DDTHH:MM:SS.ffffff+00:00, or, if microsecond is 0,
YYYY-MM-DDTHH:MM:SS
Returns:
None. updates context arg.
"""
logger.debug("started")
format_expression = context.get('nowUtcIn', None)
if format_expression:
formatted_expression = context.get_formatted_string(format_expression)
context['nowUtc'] = datetime.now(
timezone.utc).strftime(formatted_expression)
else:
context['nowUtc'] = datetime.now(timezone.utc).isoformat()
logger.info("timestamp %s saved to context nowUtc", context['nowUtc'])
logger.debug("done")
| [
"logging.getLogger",
"datetime.datetime.now"
] | [((176, 203), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (193, 203), False, 'import logging\n'), ((1210, 1236), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (1222, 1236), False, 'from datetime import datetime, timezone\n'), ((1319, 1345), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (1331, 1345), False, 'from datetime import datetime, timezone\n')] |
import os
import sys
from time import sleep as sleep
import glob
import cv2
from PIL import Image
ESC = b'\033'
CSI = ESC + b'['
Фuse_ansi_escape_sequences = True
if not use_ansi_escape_sequences:
import ctypes
from ctypes import c_long
console_handle = ctypes.windll.kernel32.GetStdHandle(c_long(-11))
video_columns, video_lines = 140, 70
has_inverted_colors = True
def set_console_size(columns, lines):
os.system(f'mode con cols={columns} lines={lines} ')
os.system('cls')
set_console_size(40, 20)
selected_video_number = 0
videos = glob.glob('*.mp4')
for video_index, video_name in enumerate(videos):
print(f'[{video_index + 1}] - {video_name}')
selected_video_number = input('\nВведите номер видео: ')
try:
selected_video = videos[int(selected_video_number) - 1]
except:
set_console_size(100, 30)
print(f'{selected_video_number} - неверный номер X_X')
exit()
vidcap = cv2.VideoCapture(selected_video)
success, image = vidcap.read()
set_console_size(video_columns, video_lines)
symbols = list(r'$@B%8&WM#*oahkbdpqwmZO0QLCJUYXzcvunxrjft/\|()1[]?-_+~<>i!lI;:, ')
if has_inverted_colors:
symbols.reverse()
stdout = os.fdopen(sys.stdout.fileno(), 'wb', video_columns * video_lines * 2)
try:
while True:
success, image = vidcap.read()
if not success:
set_console_size(100, 30)
break
im = Image.fromarray(image)
im = im.resize((video_columns, video_lines))
im = im.convert('L')
pixels = im.load()
result = []
for y in range(1, video_lines):
for x in range(1, video_columns):
result.append(symbols[int(pixels[x, y] / 36) - 1])
result.append('\n')
# Set cursor to the top left corner
if use_ansi_escape_sequences:
stdout.write(CSI + b'1;1H')
else:
ctypes.windll.kernel32.SetConsoleCursorPosition(console_handle, 0)
stdout.write(''.join(result).encode())
stdout.flush()
sleep(1 / 60) # Sleep one sixtieth of a second (60 fps)
except KeyboardInterrupt:
set_console_size(100, 30)
| [
"ctypes.c_long",
"PIL.Image.fromarray",
"time.sleep",
"ctypes.windll.kernel32.SetConsoleCursorPosition",
"cv2.VideoCapture",
"os.system",
"sys.stdout.fileno",
"glob.glob"
] | [((484, 500), 'os.system', 'os.system', (['"""cls"""'], {}), "('cls')\n", (493, 500), False, 'import os\n'), ((562, 580), 'glob.glob', 'glob.glob', (['"""*.mp4"""'], {}), "('*.mp4')\n", (571, 580), False, 'import glob\n'), ((923, 955), 'cv2.VideoCapture', 'cv2.VideoCapture', (['selected_video'], {}), '(selected_video)\n', (939, 955), False, 'import cv2\n'), ((429, 481), 'os.system', 'os.system', (['f"""mode con cols={columns} lines={lines} """'], {}), "(f'mode con cols={columns} lines={lines} ')\n", (438, 481), False, 'import os\n'), ((1185, 1204), 'sys.stdout.fileno', 'sys.stdout.fileno', ([], {}), '()\n', (1202, 1204), False, 'import sys\n'), ((307, 318), 'ctypes.c_long', 'c_long', (['(-11)'], {}), '(-11)\n', (313, 318), False, 'from ctypes import c_long\n'), ((1401, 1423), 'PIL.Image.fromarray', 'Image.fromarray', (['image'], {}), '(image)\n', (1416, 1423), False, 'from PIL import Image\n'), ((2035, 2048), 'time.sleep', 'sleep', (['(1 / 60)'], {}), '(1 / 60)\n', (2040, 2048), True, 'from time import sleep as sleep\n'), ((1888, 1954), 'ctypes.windll.kernel32.SetConsoleCursorPosition', 'ctypes.windll.kernel32.SetConsoleCursorPosition', (['console_handle', '(0)'], {}), '(console_handle, 0)\n', (1935, 1954), False, 'import ctypes\n')] |
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\careers\acting\performance_object_data.py
# Compiled at: 2018-09-18 00:30:33
# Size of source mod 2**32: 2272 bytes
import services
class PerformanceObjectData:
def __init__(self, objects, pre_performance_states, performance_states, post_performance_states):
self._objects = objects
self._pre_performance_states = pre_performance_states
self._performance_states = performance_states
self._post_performance_states = post_performance_states
def set_performance_states(self):
self._set_states(self._performance_states)
def set_pre_performance_states(self):
bucks_tracker = services.active_sim_info().get_bucks_tracker()
for state_data in self._pre_performance_states:
skip_perk = state_data.skip_with_perk
state_value = state_data.state_value
if skip_perk is not None:
if bucks_tracker is not None:
if bucks_tracker.is_perk_unlocked(skip_perk):
continue
for obj in self._objects:
if obj.has_state(state_value.state):
obj.set_state((state_value.state), state_value, immediate=True, force_update=True)
def set_post_performance_states(self):
self._set_states(self._post_performance_states)
def _set_states(self, states):
for state_value in states:
for obj in self._objects:
if obj.has_state(state_value.state):
obj.set_state((state_value.state), state_value, immediate=True, force_update=True) | [
"services.active_sim_info"
] | [((854, 880), 'services.active_sim_info', 'services.active_sim_info', ([], {}), '()\n', (878, 880), False, 'import services\n')] |
import numpy as np
import torch
import os
import sys
import re
import math
from torch.utils.data import Dataset, DataLoader
from apex import amp
from apex.parallel import DistributedDataParallel as DDP
from lamb import Lamb
#tensorboard for accuracy graphs
import tensorflow as tf
def getCombinations(inputTensor, N, c, d):#input shape=(batch_size, obj_count, obj_dim) #batch_size=N, obj_count=c, obj_dim=d
tensorA = inputTensor.reshape(N, 1, c, d).expand(N, c, c, d)
tensorB = tensorA.transpose(1, 2)
return torch.cat((tensorB, tensorA), 3)
dataset_name = 'neutral'#'interpolation'#'extrapolation'
if len(sys.argv) < 2:
print("Missing data path!")
exit()
datapath_preprocessed = os.path.join(sys.argv[1], dataset_name + '_preprocessed')
class PgmDataset(Dataset):
def __init__(self, filenames):
'Initialization'
self.filenames = filenames
def __len__(self):
return len(self.filenames)
def __getitem__(self, index):
filename = self.filenames[index]
with np.load(os.path.join(datapath_preprocessed, filename)) as data:
image = data['image'].astype(np.uint8).reshape(16, 80, 80)
target = data['target']
return image, target
class WReN(torch.nn.Module):
def __init__(self, m):
super(WReN, self).__init__()
self.relation_network_depth = m
self.g_dim = 512
self.h_dim = 256
self.f_dim = 256
self.use_mag_enc = True #switch between scalar input and magnitude encoded input
self.mag_enc_type_relu = False #switch between gaussian magnitude encoding and relu based magnitude encoding
self.magnitude_encoding_dim = 20
#model
#magnitude encoding
self.input_scale = 2.0/255.0
self.input_offset = -1.0
std_dev = 0.28
self.input_encoding_variance_inv = 1.0 / (math.sqrt(2.0) * std_dev)
self.normalization_factor = 1.0 / (math.sqrt(2*math.pi) * std_dev)
self.mag_scale = torch.nn.Parameter(torch.linspace(-1.0, 1.0, steps=self.magnitude_encoding_dim), requires_grad=False)
if self.use_mag_enc:
conv_input_dim = self.magnitude_encoding_dim
else:
conv_input_dim = 1
self.conv = torch.nn.Sequential(
torch.nn.Conv2d(conv_input_dim, 32, 3, stride=2),
torch.nn.LeakyReLU(),
torch.nn.Conv2d(32, 32, 3, stride=2),
torch.nn.LeakyReLU(),
torch.nn.Conv2d(32, 32, 3, stride=2),
torch.nn.LeakyReLU(),
torch.nn.Conv2d(32, 32, 3, stride=2),
torch.nn.LeakyReLU()
)
self.post_cnn_linear = torch.nn.Linear(32*4*4, 256-9)
self.tag_matrix = torch.nn.Parameter(torch.eye(9).repeat(8, 1), requires_grad=False)
self.g = torch.nn.Sequential(
torch.nn.Linear(2*256, self.g_dim),
torch.nn.LeakyReLU(),
torch.nn.Linear(self.g_dim, self.g_dim),
torch.nn.LeakyReLU(),
torch.nn.Linear(self.g_dim, self.g_dim),
torch.nn.LeakyReLU(),
torch.nn.Linear(self.g_dim, self.h_dim),
torch.nn.LeakyReLU()
)
h = []
for i in range(m):
rel_layer_func = torch.nn.Sequential(
torch.nn.Linear(2*self.h_dim, self.h_dim),
torch.nn.LeakyReLU(),
torch.nn.Linear(self.h_dim, self.h_dim),
torch.nn.LeakyReLU(),
torch.nn.Linear(self.h_dim, self.h_dim),
torch.nn.LeakyReLU()
)
h.append(rel_layer_func)
self.h = torch.nn.ModuleList(h)
f_in_dim = self.h_dim
self.f = torch.nn.Sequential(
torch.nn.Linear(f_in_dim, self.f_dim),
torch.nn.LeakyReLU(),
torch.nn.Linear(self.f_dim, self.f_dim),
torch.nn.LeakyReLU()
)
self.f_final = torch.nn.Linear(self.f_dim, 1)
def forward(self, batch):
batch_size = batch.size()[0]
#Panel preprocessor CNN
batch_flat = batch.reshape(batch_size*16, 1, 80, 80)
if self.use_mag_enc:
with torch.no_grad():
#magnitude encoding
batch_flat = batch_flat.transpose(1, 3)
if self.mag_enc_type_relu:
#first order
batch_flat = batch_flat.add_(255/self.magnitude_encoding_dim)
batch_flat = torch.nn.functional.relu_(batch_flat.mul_(self.input_scale).add_(self.input_offset).add(-self.mag_scale))
#second order
batch_flat = torch.cat((batch_flat[:, :, :, :-1] - 2*batch_flat[:, :, :, 1:], batch_flat[:, :, :, -1].unsqueeze(dim=-1)), dim=-1).mul_(self.magnitude_encoding_dim/2)
batch_flat = torch.nn.functional.relu_(batch_flat)
else:
batch_flat = batch_flat.mul_(self.input_scale).add_(self.input_offset).tanh_().add(self.mag_scale).mul_(self.input_encoding_variance_inv).pow_(2).mul_(-1).exp_().mul_(self.normalization_factor)
batch_flat = batch_flat.transpose(3, 1)
conv_out = self.conv(batch_flat)
#scatter context
objectsWithoutPos = self.post_cnn_linear(conv_out.reshape(batch_size*16, -1))
panel_vectors = objectsWithoutPos.reshape(batch_size, 16, 256-9)
given, option1, option2, option3, option4, option5, option6, option7, option8 = panel_vectors.split((8, 1, 1, 1, 1, 1, 1, 1, 1), dim=1)
optionsWithContext = torch.cat((
given, option1,
given, option2,
given, option3,
given, option4,
given, option5,
given, option6,
given, option7,
given, option8
), 1)
optionsWithoutPos = optionsWithContext.reshape(batch_size*8*9, 256-9)
objects = torch.cat((optionsWithoutPos, self.tag_matrix.repeat(batch_size, 1)), dim=1).reshape(batch_size*8, 9, 256-9+9)
#MLRN
objPairs2D = getCombinations(objects, batch_size*8, 9, 256)
objPairs = objPairs2D.reshape(batch_size*8*(9*9), 2*256)
gResult = self.g(objPairs)#apply MLP
prev_result = gResult
prev_dim = self.h_dim
prev_result_2d = prev_result.reshape(batch_size*8, 9, 9, prev_dim)
sum_j = prev_result_2d.sum(dim=2)
for i, h_layer in enumerate(self.h):
residual = sum_j
intermed_obj_pairs_2d = getCombinations(sum_j, batch_size*8, 9, prev_dim)
intermed_obj_pairs = intermed_obj_pairs_2d.reshape(batch_size*8*(9*9), 2*prev_dim)
prev_result = h_layer(intermed_obj_pairs)#apply MLP
prev_dim = self.h_dim
prev_result_2d = prev_result.reshape(batch_size*8, 9, 9, prev_dim)
sum_j = prev_result_2d.sum(dim=2)
hSum = sum_j.sum(dim=1)
result = self.f_final(self.f(hSum))#pre-softmax scores for every possible answer
answer = result.reshape(batch_size, 8)
#attempt to stabilize training (avoiding inf value activations in last layers)
activation_loss = hSum.pow(2).mean() + result.pow(2).mean()
return answer, activation_loss
def worker_fn(rank, world_size):
setup(rank, world_size)
weights_filename = "weights.pt"
batch_size = 512
epochs = 240
warmup_epochs = 8
use_mixed_precision = True
batch_size = batch_size // world_size #batch size per worker
#Data
all_data = os.listdir(datapath_preprocessed)
train_filenames = [p for p in all_data if re.match(r'^PGM_' + re.escape(dataset_name) + r'_train_(\d+)\.npz$', p) is not None]
val_filenames = [p for p in all_data if re.match(r'^PGM_' + re.escape(dataset_name) + r'_val_(\d+)\.npz$', p) is not None]
train_dataset = PgmDataset(train_filenames)
train_sampler = torch.utils.data.distributed.DistributedSampler(train_dataset)
train_dataloader = DataLoader(train_dataset, batch_size=batch_size, num_workers=8, pin_memory=False, sampler=train_sampler)#shuffle is done by the sampler
val_dataloader = DataLoader(PgmDataset(val_filenames), batch_size=batch_size, shuffle=False, num_workers=4, pin_memory=False)
#Model
device_ids = [rank]
model = WReN(2).to(device_ids[0])#3-layer MLRN
if weights_filename is not None and os.path.isfile("./" + weights_filename):
model.load_state_dict(torch.load(weights_filename, map_location='cpu'))
print('Weights loaded')
cold_start = False
else:
print('No weights found')
cold_start = True
#Loss and optimizer
final_lr = 2e-3
def add_module_params_with_decay(module, weight_decay, param_groups):#adds parameters with decay unless they are bias parameters, which shouldn't receive decay
group_with_decay = []
group_without_decay = []
for name, param in module.named_parameters():
if not param.requires_grad: continue
if name == 'bias' or name.endswith('bias'):
group_without_decay.append(param)
else:
group_with_decay.append(param)
param_groups.append({"params": group_with_decay, "weight_decay": weight_decay})
param_groups.append({"params": group_without_decay})
optimizer_param_groups = [
]
add_module_params_with_decay(model.conv, 2e-1, optimizer_param_groups)
add_module_params_with_decay(model.post_cnn_linear, 2e-1, optimizer_param_groups)
add_module_params_with_decay(model.g, 2e-1, optimizer_param_groups)
add_module_params_with_decay(model.h, 2e-1, optimizer_param_groups)
add_module_params_with_decay(model.f, 2e-1, optimizer_param_groups)
add_module_params_with_decay(model.f_final, 2e-1, optimizer_param_groups)
optimizer = Lamb(optimizer_param_groups, lr=final_lr)
base_model = model
if use_mixed_precision:
model, optimizer = amp.initialize(model, optimizer, opt_level="O1") #Mixed Precision
lossFunc = torch.nn.CrossEntropyLoss()
softmax = torch.nn.Softmax(dim=1)
#Parallel distributed model
device = device_ids[0]
torch.cuda.set_device(device)
parallel_model = torch.nn.parallel.DistributedDataParallel(model, device_ids)
if rank == 0:
#accuracy logging
sess = tf.Session()
train_acc_placeholder = tf.placeholder(tf.float32, shape=())
train_acc_summary = tf.summary.scalar('training_acc', train_acc_placeholder)
val_acc_placeholder = tf.placeholder(tf.float32, shape=())
val_acc_summary = tf.summary.scalar('validation_acc', val_acc_placeholder)
writer = tf.summary.FileWriter("log", sess.graph)
#training loop
acc = []
global_step = 0
for epoch in range(epochs):
train_sampler.set_epoch(epoch)
# Validation
val_acc = []
parallel_model.eval()
with torch.no_grad():
for i, (local_batch, local_labels) in enumerate(val_dataloader):
local_batch, targets = local_batch.to(device), local_labels.to(device)
#answer = model(local_batch.type(torch.float32))
answer, _ = parallel_model(local_batch.type(torch.float32))
#Calc accuracy
answerSoftmax = softmax(answer)
maxIndex = answerSoftmax.argmax(dim=1)
correct = maxIndex.eq(targets)
accuracy = correct.type(dtype=torch.float16).mean(dim=0)
val_acc.append(accuracy)
if i % 50 == 0 and rank == 0:
print("batch " + str(i))
total_val_acc = sum(val_acc) / len(val_acc)
print('Validation accuracy: ' + str(total_val_acc.item()))
if rank == 0:
summary = sess.run(val_acc_summary, feed_dict={val_acc_placeholder: total_val_acc.item()})
writer.add_summary(summary, global_step=global_step)
# Training
parallel_model.train()
for i, (local_batch, local_labels) in enumerate(train_dataloader):
global_step = global_step + 1
if cold_start and epoch < warmup_epochs:#linear scaling of the lr for warmup during the first few epochs
lr = final_lr * global_step / (warmup_epochs*len(train_dataset) / (batch_size * world_size))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
local_batch, targets = local_batch.to(device_ids[0]), local_labels.to(device_ids[0])
optimizer.zero_grad()
answer, activation_loss = parallel_model(local_batch.type(torch.float32))
loss = lossFunc(answer, targets) + activation_loss * 2e-3
#Calc accuracy
answerSoftmax = softmax(answer)
maxIndex = answerSoftmax.argmax(dim=1)
correct = maxIndex.eq(targets)
accuracy = correct.type(dtype=torch.float16).mean(dim=0)
acc.append(accuracy)
#Training step
if use_mixed_precision:
with amp.scale_loss(loss, optimizer) as scaled_loss: #Mixed precision
scaled_loss.backward()
else:
loss.backward()
grad_norm = torch.nn.utils.clip_grad_norm_(parallel_model.parameters(), 1e1)
optimizer.step()
if i % 50 == 0 and rank == 0:
print("epoch " + str(epoch) + " batch " + str(i))
print("loss", loss)
print("activation loss", activation_loss)
print(grad_norm)
#logging and saving weights
if i % 1000 == 999:
trainAcc = sum(acc) / len(acc)
acc = []
print('Training accuracy: ' + str(trainAcc.item()))
if rank == 0:
if weights_filename is not None:
torch.save(base_model.state_dict(), weights_filename)
print('Weights saved')
summary = sess.run(train_acc_summary, feed_dict={train_acc_placeholder: trainAcc.item()})
writer.add_summary(summary, global_step=global_step)
if cold_start and weights_filename is not None and epoch % 10 == 0 and rank == 0:
torch.save(base_model.state_dict(), weights_filename + "_cp" + str(epoch))
print('Checkpoint saved')
cleanup()
def setup(rank, world_size):
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '12355'
# initialize the process group
torch.distributed.init_process_group("nccl", rank=rank, world_size=world_size)
# Explicitly setting seed to make sure that models created in two processes
# start from same random weights and biases.
torch.manual_seed(42)
def cleanup():
torch.distributed.destroy_process_group()
def run(world_size):
torch.multiprocessing.spawn(worker_fn, args=(world_size,), nprocs=world_size, join=True)
if __name__ == "__main__":
run(4)#4 GPUs
| [
"apex.amp.scale_loss",
"re.escape",
"torch.nn.CrossEntropyLoss",
"torch.distributed.destroy_process_group",
"math.sqrt",
"torch.utils.data.distributed.DistributedSampler",
"apex.amp.initialize",
"os.listdir",
"torch.nn.ModuleList",
"torch.eye",
"tensorflow.Session",
"tensorflow.placeholder",
... | [((707, 764), 'os.path.join', 'os.path.join', (['sys.argv[1]', "(dataset_name + '_preprocessed')"], {}), "(sys.argv[1], dataset_name + '_preprocessed')\n", (719, 764), False, 'import os\n'), ((524, 556), 'torch.cat', 'torch.cat', (['(tensorB, tensorA)', '(3)'], {}), '((tensorB, tensorA), 3)\n', (533, 556), False, 'import torch\n'), ((7599, 7632), 'os.listdir', 'os.listdir', (['datapath_preprocessed'], {}), '(datapath_preprocessed)\n', (7609, 7632), False, 'import os\n'), ((7959, 8021), 'torch.utils.data.distributed.DistributedSampler', 'torch.utils.data.distributed.DistributedSampler', (['train_dataset'], {}), '(train_dataset)\n', (8006, 8021), False, 'import torch\n'), ((8045, 8154), 'torch.utils.data.DataLoader', 'DataLoader', (['train_dataset'], {'batch_size': 'batch_size', 'num_workers': '(8)', 'pin_memory': '(False)', 'sampler': 'train_sampler'}), '(train_dataset, batch_size=batch_size, num_workers=8, pin_memory=\n False, sampler=train_sampler)\n', (8055, 8154), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((9897, 9938), 'lamb.Lamb', 'Lamb', (['optimizer_param_groups'], {'lr': 'final_lr'}), '(optimizer_param_groups, lr=final_lr)\n', (9901, 9938), False, 'from lamb import Lamb\n'), ((10100, 10127), 'torch.nn.CrossEntropyLoss', 'torch.nn.CrossEntropyLoss', ([], {}), '()\n', (10125, 10127), False, 'import torch\n'), ((10142, 10165), 'torch.nn.Softmax', 'torch.nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (10158, 10165), False, 'import torch\n'), ((10230, 10259), 'torch.cuda.set_device', 'torch.cuda.set_device', (['device'], {}), '(device)\n', (10251, 10259), False, 'import torch\n'), ((10281, 10341), 'torch.nn.parallel.DistributedDataParallel', 'torch.nn.parallel.DistributedDataParallel', (['model', 'device_ids'], {}), '(model, device_ids)\n', (10322, 10341), False, 'import torch\n'), ((14671, 14749), 'torch.distributed.init_process_group', 'torch.distributed.init_process_group', (['"""nccl"""'], {'rank': 'rank', 'world_size': 'world_size'}), "('nccl', rank=rank, world_size=world_size)\n", (14707, 14749), False, 'import torch\n'), ((14884, 14905), 'torch.manual_seed', 'torch.manual_seed', (['(42)'], {}), '(42)\n', (14901, 14905), False, 'import torch\n'), ((14926, 14967), 'torch.distributed.destroy_process_group', 'torch.distributed.destroy_process_group', ([], {}), '()\n', (14965, 14967), False, 'import torch\n'), ((14994, 15087), 'torch.multiprocessing.spawn', 'torch.multiprocessing.spawn', (['worker_fn'], {'args': '(world_size,)', 'nprocs': 'world_size', 'join': '(True)'}), '(worker_fn, args=(world_size,), nprocs=\n world_size, join=True)\n', (15021, 15087), False, 'import torch\n'), ((2675, 2711), 'torch.nn.Linear', 'torch.nn.Linear', (['(32 * 4 * 4)', '(256 - 9)'], {}), '(32 * 4 * 4, 256 - 9)\n', (2690, 2711), False, 'import torch\n'), ((3681, 3703), 'torch.nn.ModuleList', 'torch.nn.ModuleList', (['h'], {}), '(h)\n', (3700, 3703), False, 'import torch\n'), ((4000, 4030), 'torch.nn.Linear', 'torch.nn.Linear', (['self.f_dim', '(1)'], {}), '(self.f_dim, 1)\n', (4015, 4030), False, 'import torch\n'), ((5636, 5782), 'torch.cat', 'torch.cat', (['(given, option1, given, option2, given, option3, given, option4, given,\n option5, given, option6, given, option7, given, option8)', '(1)'], {}), '((given, option1, given, option2, given, option3, given, option4,\n given, option5, given, option6, given, option7, given, option8), 1)\n', (5645, 5782), False, 'import torch\n'), ((8440, 8479), 'os.path.isfile', 'os.path.isfile', (["('./' + weights_filename)"], {}), "('./' + weights_filename)\n", (8454, 8479), False, 'import os\n'), ((10018, 10066), 'apex.amp.initialize', 'amp.initialize', (['model', 'optimizer'], {'opt_level': '"""O1"""'}), "(model, optimizer, opt_level='O1')\n", (10032, 10066), False, 'from apex import amp\n'), ((10402, 10414), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (10412, 10414), True, 'import tensorflow as tf\n'), ((10447, 10483), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '()'}), '(tf.float32, shape=())\n', (10461, 10483), True, 'import tensorflow as tf\n'), ((10512, 10568), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""training_acc"""', 'train_acc_placeholder'], {}), "('training_acc', train_acc_placeholder)\n", (10529, 10568), True, 'import tensorflow as tf\n'), ((10599, 10635), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '()'}), '(tf.float32, shape=())\n', (10613, 10635), True, 'import tensorflow as tf\n'), ((10662, 10718), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""validation_acc"""', 'val_acc_placeholder'], {}), "('validation_acc', val_acc_placeholder)\n", (10679, 10718), True, 'import tensorflow as tf\n'), ((10736, 10776), 'tensorflow.summary.FileWriter', 'tf.summary.FileWriter', (['"""log"""', 'sess.graph'], {}), "('log', sess.graph)\n", (10757, 10776), True, 'import tensorflow as tf\n'), ((2026, 2086), 'torch.linspace', 'torch.linspace', (['(-1.0)', '(1.0)'], {'steps': 'self.magnitude_encoding_dim'}), '(-1.0, 1.0, steps=self.magnitude_encoding_dim)\n', (2040, 2086), False, 'import torch\n'), ((2295, 2343), 'torch.nn.Conv2d', 'torch.nn.Conv2d', (['conv_input_dim', '(32)', '(3)'], {'stride': '(2)'}), '(conv_input_dim, 32, 3, stride=2)\n', (2310, 2343), False, 'import torch\n'), ((2358, 2378), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (2376, 2378), False, 'import torch\n'), ((2392, 2428), 'torch.nn.Conv2d', 'torch.nn.Conv2d', (['(32)', '(32)', '(3)'], {'stride': '(2)'}), '(32, 32, 3, stride=2)\n', (2407, 2428), False, 'import torch\n'), ((2443, 2463), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (2461, 2463), False, 'import torch\n'), ((2477, 2513), 'torch.nn.Conv2d', 'torch.nn.Conv2d', (['(32)', '(32)', '(3)'], {'stride': '(2)'}), '(32, 32, 3, stride=2)\n', (2492, 2513), False, 'import torch\n'), ((2528, 2548), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (2546, 2548), False, 'import torch\n'), ((2562, 2598), 'torch.nn.Conv2d', 'torch.nn.Conv2d', (['(32)', '(32)', '(3)'], {'stride': '(2)'}), '(32, 32, 3, stride=2)\n', (2577, 2598), False, 'import torch\n'), ((2613, 2633), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (2631, 2633), False, 'import torch\n'), ((2855, 2891), 'torch.nn.Linear', 'torch.nn.Linear', (['(2 * 256)', 'self.g_dim'], {}), '(2 * 256, self.g_dim)\n', (2870, 2891), False, 'import torch\n'), ((2908, 2928), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (2926, 2928), False, 'import torch\n'), ((2946, 2985), 'torch.nn.Linear', 'torch.nn.Linear', (['self.g_dim', 'self.g_dim'], {}), '(self.g_dim, self.g_dim)\n', (2961, 2985), False, 'import torch\n'), ((3004, 3024), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (3022, 3024), False, 'import torch\n'), ((3042, 3081), 'torch.nn.Linear', 'torch.nn.Linear', (['self.g_dim', 'self.g_dim'], {}), '(self.g_dim, self.g_dim)\n', (3057, 3081), False, 'import torch\n'), ((3100, 3120), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (3118, 3120), False, 'import torch\n'), ((3138, 3177), 'torch.nn.Linear', 'torch.nn.Linear', (['self.g_dim', 'self.h_dim'], {}), '(self.g_dim, self.h_dim)\n', (3153, 3177), False, 'import torch\n'), ((3195, 3215), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (3213, 3215), False, 'import torch\n'), ((3789, 3826), 'torch.nn.Linear', 'torch.nn.Linear', (['f_in_dim', 'self.f_dim'], {}), '(f_in_dim, self.f_dim)\n', (3804, 3826), False, 'import torch\n'), ((3845, 3865), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (3863, 3865), False, 'import torch\n'), ((3883, 3922), 'torch.nn.Linear', 'torch.nn.Linear', (['self.f_dim', 'self.f_dim'], {}), '(self.f_dim, self.f_dim)\n', (3898, 3922), False, 'import torch\n'), ((3941, 3961), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (3959, 3961), False, 'import torch\n'), ((8511, 8559), 'torch.load', 'torch.load', (['weights_filename'], {'map_location': '"""cpu"""'}), "(weights_filename, map_location='cpu')\n", (8521, 8559), False, 'import torch\n'), ((10989, 11004), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (11002, 11004), False, 'import torch\n'), ((1044, 1089), 'os.path.join', 'os.path.join', (['datapath_preprocessed', 'filename'], {}), '(datapath_preprocessed, filename)\n', (1056, 1089), False, 'import os\n'), ((1881, 1895), 'math.sqrt', 'math.sqrt', (['(2.0)'], {}), '(2.0)\n', (1890, 1895), False, 'import math\n'), ((1950, 1972), 'math.sqrt', 'math.sqrt', (['(2 * math.pi)'], {}), '(2 * math.pi)\n', (1959, 1972), False, 'import math\n'), ((3339, 3382), 'torch.nn.Linear', 'torch.nn.Linear', (['(2 * self.h_dim)', 'self.h_dim'], {}), '(2 * self.h_dim, self.h_dim)\n', (3354, 3382), False, 'import torch\n'), ((3399, 3419), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (3417, 3419), False, 'import torch\n'), ((3437, 3476), 'torch.nn.Linear', 'torch.nn.Linear', (['self.h_dim', 'self.h_dim'], {}), '(self.h_dim, self.h_dim)\n', (3452, 3476), False, 'import torch\n'), ((3495, 3515), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (3513, 3515), False, 'import torch\n'), ((3533, 3572), 'torch.nn.Linear', 'torch.nn.Linear', (['self.h_dim', 'self.h_dim'], {}), '(self.h_dim, self.h_dim)\n', (3548, 3572), False, 'import torch\n'), ((3591, 3611), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (3609, 3611), False, 'import torch\n'), ((4248, 4263), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4261, 4263), False, 'import torch\n'), ((2752, 2764), 'torch.eye', 'torch.eye', (['(9)'], {}), '(9)\n', (2761, 2764), False, 'import torch\n'), ((4907, 4944), 'torch.nn.functional.relu_', 'torch.nn.functional.relu_', (['batch_flat'], {}), '(batch_flat)\n', (4932, 4944), False, 'import torch\n'), ((13164, 13195), 'apex.amp.scale_loss', 'amp.scale_loss', (['loss', 'optimizer'], {}), '(loss, optimizer)\n', (13178, 13195), False, 'from apex import amp\n'), ((7699, 7722), 're.escape', 're.escape', (['dataset_name'], {}), '(dataset_name)\n', (7708, 7722), False, 'import re\n'), ((7828, 7851), 're.escape', 're.escape', (['dataset_name'], {}), '(dataset_name)\n', (7837, 7851), False, 'import re\n')] |
# Copyright 2022 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import functools
import os
from typing import Any, Callable, List, Tuple, Union
from assembly.common import FileEntry
__all__ = [
"create_fast_copy_mock_instance", "fast_copy_mock", "mock_fast_copy_in"
]
FilePath = Union[str, os.PathLike]
def fast_copy_mock(
src: FilePath, dst: FilePath, tracked_copies: List[FileEntry]) -> None:
"""A bindable-mock of assembly.fast_copy() that tracks all of the copies
that it's asked to perform in the passed-in list.
"""
tracked_copies.append(FileEntry(source=src, destination=dst))
def create_fast_copy_mock_instance() -> Tuple[Callable, List[FileEntry]]:
"""Create a mock implementation of fast_copy() that's bound to a list of
FileEntries in which it records all the copies it's asked to make.
"""
copies = []
return (functools.partial(fast_copy_mock, tracked_copies=copies), copies)
def mock_fast_copy_in(context: Any) -> Tuple[Callable, List[FileEntry]]:
"""Insert a new mock of `fast_copy` into the context, and return it.
"""
(mock_instance, copies) = create_fast_copy_mock_instance()
context.fast_copy = mock_instance
return (mock_instance, copies)
| [
"assembly.common.FileEntry",
"functools.partial"
] | [((675, 713), 'assembly.common.FileEntry', 'FileEntry', ([], {'source': 'src', 'destination': 'dst'}), '(source=src, destination=dst)\n', (684, 713), False, 'from assembly.common import FileEntry\n'), ((975, 1031), 'functools.partial', 'functools.partial', (['fast_copy_mock'], {'tracked_copies': 'copies'}), '(fast_copy_mock, tracked_copies=copies)\n', (992, 1031), False, 'import functools\n')] |
#!/usr/bin/env python3
import os
import sys
import subprocess
import traceback
from datetime import datetime
try:
sys.path.append(snakemake.config['args']['mcc_path'])
import scripts.mccutils as mccutils
import config.preprocessing.trimgalore as trimgalore
from Bio import SeqIO
except Exception as e:
track = traceback.format_exc()
print(track, file=sys.stderr)
print("ERROR...unable to locate required external scripts at: "+snakemake.config['args']['mcc_path']+"/scripts/ or "+snakemake.config['args']['mcc_path']+"/config/preprocessing/", file=sys.stderr)
sys.exit(1)
class fileFormatError(Exception):
def __init__(self, message):
self.message = message
pass
def main():
fq1 = snakemake.input.fq1
fq2 = snakemake.params.fq2
methods = snakemake.params.methods.split(",")
processors = snakemake.threads
mcc_out = snakemake.params.out
run_id = snakemake.params.run_id
log = snakemake.params.log
# now = datetime.now()
# start = now.strftime("%Y-%m-%d %H:%M:%S")
mccutils.log("processing", "prepping reads for McClintock")
# trims adaptors of input fastq(s)
trimmedfq = fq1
trimmedfq2 = fq2
try:
check_fastqs(fq1, fq2, mcc_out, min_length=30, log=log)
if "trimgalore" in methods:
mccutils.log("processing", "running trim_galore", log=log)
if fq2 == "None":
flags = trimgalore.SINGLE_END_FLAGS
trimmedfq = run_trim_galore(fq1, run_id, log, mcc_out, cores=processors, flags=flags)
else:
flags = trimgalore.PAIRED_END_FLAGS
trimmedfq, trimmedfq2 = run_trim_galore(fq1, run_id, log, mcc_out, fq2=fq2, cores=processors, flags=flags)
run_multiqc(mcc_out+"/results/trimgalore/")
# make unzipped copies in mcc input dir
make_copies(trimmedfq, trimmedfq2, snakemake.output[0], snakemake.output[1])
# removes trimmed read files from trimgalore directory
if trimmedfq != fq1:
mccutils.remove(trimmedfq)
if trimmedfq2 != fq2:
mccutils.remove(trimmedfq2)
except Exception as e:
track = traceback.format_exc()
print(track, file=sys.stderr)
print("ERROR processing of FastQ files failed...check that your FastQ files are formatted correctly...Exiting...", file=sys.stderr)
mccutils.remove(snakemake.output[0])
mccutils.remove(snakemake.output[1])
sys.exit(1)
# now = datetime.now()
# end = now.strftime("%Y-%m-%d %H:%M:%S")
# mccutils.log("setup_reads", "start: "+start)
# mccutils.log("setup_reads", "end: "+end)
mccutils.log("processing", "read setup complete")
def make_copies(fq1, fq2, fq1copy, fq2copy):
if "gz" in fq1.split(".")[-1]:
mccutils.run_command_stdout(["zcat",fq1], fq1copy)
else:
mccutils.run_command(["cp", fq1, fq1copy])
if fq2 == "None":
mccutils.run_command(["touch", fq2copy])
elif "gz" in fq2.split(".")[-1]:
mccutils.run_command_stdout(["zcat",fq2], fq2copy)
else:
mccutils.run_command(["cp", fq2, fq2copy])
return fq1copy, fq2copy
def has_valid_read_lengths(fq1, fq2, min_length=30, paired=False):
if paired:
fqs_to_check = [fq1, fq2]
else:
fqs_to_check = [fq1]
for x,fq in enumerate(fqs_to_check):
has_valid_reads = False
for record in SeqIO.parse(fq, "fastq"):
if len(str(record.seq)) >= min_length:
has_valid_reads = True
break
if not has_valid_reads:
raise fileFormatError("fastq "+str(x+1)+" lacks any reads >= the minimum length of:"+str(min_length))
def has_valid_read_ids(fq1, fq2, log=None):
passed = mccutils.run_command(["fastq_info", fq1, fq2], log=log, fatal=False)
if not passed:
raise fileFormatError("Paired fastq files failed validation, see: "+log+" for details")
def check_fastqs(fq1, fq2, out, min_length=30, log=None):
mccutils.mkdir(out+"/tmp")
if fq2 == "None":
paired = False
else:
paired =True
fq1, fq2 = make_copies(fq1, fq2, out+"/tmp/tmp_val_fq_1.fq", out+"/tmp/tmp_val_fq_2.fq")
has_valid_read_lengths(fq1, fq2, min_length=min_length, paired=paired)
if paired:
has_valid_read_ids(fq1, fq2, log=log)
def run_trim_galore(fq1, run_id, log, out, fq2=None, cores=1, flags=[]):
mccutils.mkdir(out+"/results/")
command = ['trim_galore'] + flags + ["-j", str(cores), "-o", out+"/results/trimgalore"]
if fq2 is None:
command.append(fq1)
else:
command += [fq1, fq2]
mccutils.run_command(command, log=log)
if fq2 is None:
outfq = ""
for f in os.listdir(out+"/results/trimgalore"):
if "_trimmed.fq" in f:
outfq = out+"/results/trimgalore/"+f
file_exists = mccutils.check_file_exists(outfq)
return outfq
else:
outfq1 = ""
outfq2 = ""
for f in os.listdir(out+"/results/trimgalore"):
if "_val_1.fq" in f:
outfq1 = out+"/results/trimgalore/"+f
elif "_val_2.fq" in f:
outfq2= out+"/results/trimgalore/"+f
file_exists = mccutils.check_file_exists(outfq1)
file_exists = mccutils.check_file_exists(outfq2)
return outfq1, outfq2
def run_multiqc(trimgalore_dir):
os.chdir(trimgalore_dir)
mccutils.run_command(["multiqc","."])
if __name__ == "__main__":
main()
| [
"traceback.format_exc",
"os.listdir",
"scripts.mccutils.remove",
"scripts.mccutils.check_file_exists",
"os.chdir",
"scripts.mccutils.run_command",
"scripts.mccutils.mkdir",
"Bio.SeqIO.parse",
"sys.exit",
"scripts.mccutils.log",
"sys.path.append",
"scripts.mccutils.run_command_stdout"
] | [((119, 172), 'sys.path.append', 'sys.path.append', (["snakemake.config['args']['mcc_path']"], {}), "(snakemake.config['args']['mcc_path'])\n", (134, 172), False, 'import sys\n'), ((1060, 1119), 'scripts.mccutils.log', 'mccutils.log', (['"""processing"""', '"""prepping reads for McClintock"""'], {}), "('processing', 'prepping reads for McClintock')\n", (1072, 1119), True, 'import scripts.mccutils as mccutils\n'), ((2730, 2779), 'scripts.mccutils.log', 'mccutils.log', (['"""processing"""', '"""read setup complete"""'], {}), "('processing', 'read setup complete')\n", (2742, 2779), True, 'import scripts.mccutils as mccutils\n'), ((3865, 3933), 'scripts.mccutils.run_command', 'mccutils.run_command', (["['fastq_info', fq1, fq2]"], {'log': 'log', 'fatal': '(False)'}), "(['fastq_info', fq1, fq2], log=log, fatal=False)\n", (3885, 3933), True, 'import scripts.mccutils as mccutils\n'), ((4114, 4142), 'scripts.mccutils.mkdir', 'mccutils.mkdir', (["(out + '/tmp')"], {}), "(out + '/tmp')\n", (4128, 4142), True, 'import scripts.mccutils as mccutils\n'), ((4532, 4565), 'scripts.mccutils.mkdir', 'mccutils.mkdir', (["(out + '/results/')"], {}), "(out + '/results/')\n", (4546, 4565), True, 'import scripts.mccutils as mccutils\n'), ((4753, 4791), 'scripts.mccutils.run_command', 'mccutils.run_command', (['command'], {'log': 'log'}), '(command, log=log)\n', (4773, 4791), True, 'import scripts.mccutils as mccutils\n'), ((5519, 5543), 'os.chdir', 'os.chdir', (['trimgalore_dir'], {}), '(trimgalore_dir)\n', (5527, 5543), False, 'import os\n'), ((5548, 5586), 'scripts.mccutils.run_command', 'mccutils.run_command', (["['multiqc', '.']"], {}), "(['multiqc', '.'])\n", (5568, 5586), True, 'import scripts.mccutils as mccutils\n'), ((331, 353), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (351, 353), False, 'import traceback\n'), ((593, 604), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (601, 604), False, 'import sys\n'), ((2870, 2921), 'scripts.mccutils.run_command_stdout', 'mccutils.run_command_stdout', (["['zcat', fq1]", 'fq1copy'], {}), "(['zcat', fq1], fq1copy)\n", (2897, 2921), True, 'import scripts.mccutils as mccutils\n'), ((2939, 2981), 'scripts.mccutils.run_command', 'mccutils.run_command', (["['cp', fq1, fq1copy]"], {}), "(['cp', fq1, fq1copy])\n", (2959, 2981), True, 'import scripts.mccutils as mccutils\n'), ((3017, 3057), 'scripts.mccutils.run_command', 'mccutils.run_command', (["['touch', fq2copy]"], {}), "(['touch', fq2copy])\n", (3037, 3057), True, 'import scripts.mccutils as mccutils\n'), ((3514, 3538), 'Bio.SeqIO.parse', 'SeqIO.parse', (['fq', '"""fastq"""'], {}), "(fq, 'fastq')\n", (3525, 3538), False, 'from Bio import SeqIO\n'), ((4849, 4888), 'os.listdir', 'os.listdir', (["(out + '/results/trimgalore')"], {}), "(out + '/results/trimgalore')\n", (4859, 4888), False, 'import os\n'), ((4999, 5032), 'scripts.mccutils.check_file_exists', 'mccutils.check_file_exists', (['outfq'], {}), '(outfq)\n', (5025, 5032), True, 'import scripts.mccutils as mccutils\n'), ((5122, 5161), 'os.listdir', 'os.listdir', (["(out + '/results/trimgalore')"], {}), "(out + '/results/trimgalore')\n", (5132, 5161), False, 'import os\n'), ((5359, 5393), 'scripts.mccutils.check_file_exists', 'mccutils.check_file_exists', (['outfq1'], {}), '(outfq1)\n', (5385, 5393), True, 'import scripts.mccutils as mccutils\n'), ((5416, 5450), 'scripts.mccutils.check_file_exists', 'mccutils.check_file_exists', (['outfq2'], {}), '(outfq2)\n', (5442, 5450), True, 'import scripts.mccutils as mccutils\n'), ((1323, 1381), 'scripts.mccutils.log', 'mccutils.log', (['"""processing"""', '"""running trim_galore"""'], {'log': 'log'}), "('processing', 'running trim_galore', log=log)\n", (1335, 1381), True, 'import scripts.mccutils as mccutils\n'), ((2100, 2126), 'scripts.mccutils.remove', 'mccutils.remove', (['trimmedfq'], {}), '(trimmedfq)\n', (2115, 2126), True, 'import scripts.mccutils as mccutils\n'), ((2169, 2196), 'scripts.mccutils.remove', 'mccutils.remove', (['trimmedfq2'], {}), '(trimmedfq2)\n', (2184, 2196), True, 'import scripts.mccutils as mccutils\n'), ((2241, 2263), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (2261, 2263), False, 'import traceback\n'), ((2450, 2486), 'scripts.mccutils.remove', 'mccutils.remove', (['snakemake.output[0]'], {}), '(snakemake.output[0])\n', (2465, 2486), True, 'import scripts.mccutils as mccutils\n'), ((2495, 2531), 'scripts.mccutils.remove', 'mccutils.remove', (['snakemake.output[1]'], {}), '(snakemake.output[1])\n', (2510, 2531), True, 'import scripts.mccutils as mccutils\n'), ((2540, 2551), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2548, 2551), False, 'import sys\n'), ((3108, 3159), 'scripts.mccutils.run_command_stdout', 'mccutils.run_command_stdout', (["['zcat', fq2]", 'fq2copy'], {}), "(['zcat', fq2], fq2copy)\n", (3135, 3159), True, 'import scripts.mccutils as mccutils\n'), ((3182, 3224), 'scripts.mccutils.run_command', 'mccutils.run_command', (["['cp', fq2, fq2copy]"], {}), "(['cp', fq2, fq2copy])\n", (3202, 3224), True, 'import scripts.mccutils as mccutils\n')] |
from django import forms
class EmptyForm(forms.Form):
pass
class LoginForm(forms.Form):
username = forms.CharField(
max_length=50,
label='Username'
)
password = forms.CharField(
max_length=32,
label='Password',
widget=forms.PasswordInput(),
required=True
)
class DeleteForm(forms.Form):
verify = forms.CharField(
initial='true',
widget=forms.HiddenInput()
)
class ConfirmForm(forms.Form):
verify = forms.CharField(
initial='true',
widget=forms.HiddenInput()
)
class ViewAddForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
class ViewUpdateForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
class ViewSearchForm(forms.Form):
searchstring = forms.CharField(
max_length=50,
required=True,
widget=forms.TextInput(attrs={'id': 'searchbox'})
)
class ViewAdvancedSearchForm(forms.Form):
searchAttribute = forms.CharField(
max_length=50,
required=True
)
searchValue = forms.CharField(
max_length=50,
required=False
)
attributesList = forms.CharField(
max_length=256,
required=False
)
OPTIONS = (
('devices', 'devices'),
('device categories', 'device categories'),
('passwords', 'passwords'),
('password categories', 'password categories'),
('networks', 'networks')
)
displayTypes = forms.MultipleChoiceField(
choices=OPTIONS,
required=False
)
class NetworkTreeAddForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
protocol = forms.ChoiceField(
label='Protocol',
choices=(('ipv4', 'ipv4'), ('ipv6', 'ipv6'))
)
class NetworkAddForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Address',
help_text='The network/address in CIDR form (x.x.x.x or x.x.x.x/xx)'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
class NetworkRangeAddForm(forms.Form):
range = forms.CharField(
max_length=50,
label='Range'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
class NetworkDeleteForm(forms.Form):
recursive = forms.BooleanField(
label='Recursive delete',
required=False
)
class PasswordKeyAddForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
key = forms.CharField(
max_length=32,
label='Key',
widget=forms.PasswordInput(),
required=False
)
validate = forms.CharField(
max_length=32,
label='Key (again)',
widget=forms.PasswordInput(),
required=False
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
class CounterAddBasicForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
class CounterAddLoopingForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
values = forms.CharField(
max_length=5000,
label='Values',
help_text='one value per row',
widget=forms.Textarea(attrs={'cols':'30', 'rows': '5'})
)
class CounterUpdateBasicForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
value = forms.DecimalField(
min_value=0,
decimal_places=0,
label='Value'
)
class CounterUpdateLoopingForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
value = forms.CharField(
max_length=50,
label='Value'
)
values = forms.CharField(
max_length=5000,
label='Values',
help_text='one value per row',
widget=forms.Textarea(attrs={'cols':'30', 'rows': '5'})
)
class CounterSetForm(forms.Form):
value = forms.DecimalField(
min_value=0,
decimal_places=0,
label='Value'
)
class PasswordAddForm(forms.Form):
pw_username = forms.CharField(
max_length=50,
label='Username',
required=False
)
pw_password = forms.CharField(
max_length=250,
label='Password',
widget=forms.PasswordInput(),
required=False,
help_text='Max length: 250, leave empty for generated password.'
)
validate = forms.CharField(
max_length=250,
label='Password (again)',
widget=forms.PasswordInput(),
required=False
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
def __init__(self, password_keys, *args, **kwargs):
super(PasswordAddForm, self).__init__(*args, **kwargs)
keylist = [('__no-password-key__', 'None')]
for key in password_keys:
value = (key.oid, key.attributes['name'])
if key.attributes.get('default', False) is True:
keylist.insert(0, value)
else:
keylist.append(value)
field = forms.ChoiceField(
label='Password key',
choices=keylist
)
self.fields['passwordkey'] = field
class PasswordUpdateForm(forms.Form):
pw_username = forms.CharField(max_length = 50, label = 'Username',
required = False)
pw_password = forms.CharField(max_length = 250, label = 'Password',
widget = forms.PasswordInput(), required = False,
help_text = 'Max length: 250, leave empty for generated password.')
validate = forms.CharField(max_length = 250, label = 'Password (again)',
widget = forms.PasswordInput(), required = False)
description = forms.CharField(max_length = 100, required = False,
label = 'Description')
def __init__(self, password_keys, *args, **kwargs):
super(PasswordUpdateForm, self).__init__(*args, **kwargs)
keylist = [('__no-password-key__', 'None')]
for key in password_keys:
value = (key.oid, key.attributes['name'])
if key.attributes.get('default', False) is True:
keylist.insert(0, value)
else:
keylist.append(value)
field = forms.ChoiceField(label = 'Password key', choices = keylist)
self.fields['passwordkey'] = field
class DeviceTemplateAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this template.', required = False)
inheritance_only = forms.BooleanField(label = 'Inheritance only',
required = False,
initial = False,
help_text = 'Template is used for inheritance only.')
device_creation = forms.BooleanField(label = 'Device creation',
required = False,
initial = False,
help_text = 'Template is used for device creation.')
def __init__(self, templates, *args, **kwargs):
super(DeviceTemplateAddForm, self).__init__(*args, **kwargs)
choices = []
for template in templates:
choices.append((template.oid,
template.attributes.get('name', '[UKNOWN]')))
field = forms.MultipleChoiceField(required = False,
label = 'Inherited templates',
choices = choices)
self.fields['inherited_templates'] = field
class NetworkTemplateAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this template.', required = False)
inheritance_only = forms.BooleanField(label = 'Inheritance only',
required = False,
initial = False,
help_text = 'Template is used for inheritance only.')
def __init__(self, templates, *args, **kwargs):
super(NetworkTemplateAddForm, self).__init__(*args, **kwargs)
choices = []
for template in templates:
choices.append((template.oid,
template.attributes.get('name', '[UKNOWN]')))
field = forms.MultipleChoiceField(required = False,
label = 'Inherited templates',
choices = choices)
self.fields['inherited_templates'] = field
class DeviceTemplateUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this template.', required = False)
inheritance_only = forms.BooleanField(label = 'Inheritance only',
required = False,
initial = False,
help_text = 'Template is used for inheritance only.')
device_creation = forms.BooleanField(label = 'Device creation',
required = False,
initial = False,
help_text = 'Template is used for device creation.')
def __init__(self, templates, *args, **kwargs):
super(DeviceTemplateUpdateForm, self).__init__(*args, **kwargs)
choices = []
for template in templates:
choices.append((template.oid,
template.attributes.get('name', '[UKNOWN]')))
field = forms.MultipleChoiceField(required = False,
label = 'Inherited templates',
choices = choices)
self.fields['inherited_templates'] = field
class NetworkTemplateUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this template.', required = False)
inheritance_only = forms.BooleanField(label = 'Inheritance only',
required = False,
initial = False,
help_text = 'Template is used for inheritance only.')
def __init__(self, templates, *args, **kwargs):
super(NetworkTemplateUpdateForm, self).__init__(*args, **kwargs)
choices = []
for template in templates:
choices.append((template.oid,
template.attributes.get('name', '[UKNOWN]')))
field = forms.MultipleChoiceField(required = False,
label = 'Inherited templates',
choices = choices)
self.fields['inherited_templates'] = field
class TemplateRuleTextAddForm(forms.Form):
attr_name = forms.CharField(max_length = 50, label = 'Attribute name',
help_text = 'Name of attribute to create.')
hidden = forms.BooleanField(label = 'Hide attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will hidden per default if it is large/wikitext.')
important = forms.BooleanField(label = 'Important attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will be displayed on the device/entity overview page.')
large = forms.BooleanField(label = 'Large attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will have a separate display box.')
wikitext = forms.BooleanField(label = 'Wikitext attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will be displayed using wikitext parsing, implies "large".')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'Number of stored versions of the attribute.')
class TemplateRuleFixedAddForm(forms.Form):
attr_name = forms.CharField(max_length = 50, label = 'Attribute name',
help_text = 'Name of attribute to create.')
string_value = forms.CharField(max_length = 100, label = 'String value',
help_text = 'The created attributes value.')
variable_expansion = forms.BooleanField(label = 'Expand variables',
required = False,
initial = False)
important = forms.BooleanField(label = 'Important attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will be displayed on the device/entity overview page.')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'Number of stored versions of the attribute.')
class TemplateRuleRegmatchAddForm(forms.Form):
attr_name = forms.CharField(max_length = 50, label = 'Attribute name',
help_text = 'Name of attribute to create.')
regexp = forms.CharField(max_length = 50, label = 'Regexp',
help_text = 'Regular expression that must match the input value.')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'Number of stored versions of the attribute.')
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
important = forms.BooleanField(label = 'Important attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will be displayed on the device/entity overview page.')
class TemplateRuleBoolAddForm(forms.Form):
attr_name = forms.CharField(max_length = 50, label = 'Attribute name',
help_text = 'Name of attribute to create.')
default = forms.ChoiceField(label = 'Default',
choices = (('true', 'True'), ('false', 'False')),
help_text = 'Default value for attribute.')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'Number of stored versions of the attribute.')
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
important = forms.BooleanField(label = 'Important attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will be displayed on the device/entity overview page.')
class TemplateRuleIntAddForm(forms.Form):
attr_name = forms.CharField(max_length = 50, label = 'Attribute name',
help_text = 'Name of attribute to create.')
default = forms.IntegerField(label = 'Default',
initial = 0,
help_text = 'Default value.')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'Number of stored versions of the attribute.')
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
important = forms.BooleanField(label = 'Important attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will be displayed on the device/entity overview page.')
class TemplateRuleDeleteAttributeAddForm(forms.Form):
attr_name = forms.CharField(max_length = 50, label = 'Attribute name',
help_text = 'Name of attribute to delete.')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
class TemplateRuleFlushNodesAddForm(forms.Form):
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
def __init__(self, node_types, *args, **kwargs):
super(TemplateRuleFlushNodesAddForm, self).__init__(*args, **kwargs)
choices = []
for node_type in node_types:
choices.append((node_type, node_type))
field = forms.MultipleChoiceField(required = False,
label = 'Included node types',
choices = choices,
help_text = 'If no node types are chosen for include, all types will match.')
self.fields['include'] = field
field = forms.MultipleChoiceField(required = False,
label = 'Excluded node types',
choices = choices)
self.fields['exclude'] = field
class TemplateRuleFlushAssociationsAddForm(forms.Form):
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
def __init__(self, node_types, *args, **kwargs):
super(TemplateRuleFlushAssociationsAddForm, self).__init__(*args, **kwargs)
choices = []
for node_type in node_types:
choices.append((node_type, node_type))
field = forms.MultipleChoiceField(required = False,
label = 'Included node types',
choices = choices,
help_text = 'If no node types are chosen for include, all types will match.')
self.fields['include'] = field
field = forms.MultipleChoiceField(required = False,
label = 'Excluded node types',
choices = choices)
self.fields['exclude'] = field
class TemplateRulePasswordAddForm(forms.Form):
username = forms.CharField(max_length = 50, label = 'Username',
required = False)
passwd_description = forms.CharField(max_length = 50, label = 'Description',
required = False, help_text = 'Description of the added password.')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
def __init__(self, password_keys, *args, **kwargs):
super(TemplateRulePasswordAddForm, self).__init__(*args, **kwargs)
keylist = [('__no-password-key__', 'None')]
for key in password_keys:
keylist.append((key.oid, key.attributes['name']))
field = forms.ChoiceField(label = 'Password key', choices = keylist)
self.fields['passwordkey'] = field
class TemplateRuleSubdeviceAddForm(forms.Form):
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
num_devices = forms.IntegerField(label = 'Number of subdevices',
min_value = 1, initial = 1,
help_text = 'Number of subdevices to create.')
sequence_offset = forms.IntegerField(label = 'Sequence offset',
initial = 0,
help_text = 'Base offset of sequence counter used when applying subdevice templates.')
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
def __init__(self, templates, *args, **kwargs):
super(TemplateRuleSubdeviceAddForm, self).__init__(*args, **kwargs)
templatelist = [('none', 'None')]
for template in templates:
templatelist.append((template.oid, template.attributes['name']))
field = forms.ChoiceField(label = 'Template', choices = templatelist)
self.fields['template'] = field
class TemplateRuleAssignNetworkAddForm(forms.Form):
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
class NetworkAttributeAddSelectTypeForm(forms.Form):
ruletype = forms.ChoiceField(label = 'Attribute type',
choices = (('text', 'text'),
('bool','boolean')))
class AttributeAddSelectTypeForm(forms.Form):
ruletype = forms.ChoiceField(label = 'Attribute type',
choices = (
('text', 'text'),
('bool', 'boolean'),
('int', 'int')
))
class AttributeUpdateTextForm(forms.Form):
value = forms.CharField(max_length = 50, label = 'New value',
required = False)
class AttributeUpdateBoolForm(forms.Form):
value = forms.BooleanField(label = 'New value (true/false)',
required = False)
class AttributeUpdateIntForm(forms.Form):
value = forms.IntegerField(label = 'New value', initial = 0)
class AttributeUpdateLargeTextForm(forms.Form):
def __init__(self, attribute, *args, **kwargs):
super(AttributeUpdateLargeTextForm, self).__init__(*args, **kwargs)
field = forms.CharField(
max_length=5000,
label=attribute.name,
initial=attribute.value,
required=False,
widget=forms.Textarea(attrs={'cols':'100', 'rows': '20'})
)
self.fields['value'] = field
class AttributeAddTextForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name',
widget=forms.TextInput(
attrs={
'placeholder': 'Name'
}
)
)
value = forms.CharField(
max_length=50,
label='Value',
required=False,
widget=forms.TextInput(
attrs={
'placeholder': 'Value'
}
)
)
ruletype = forms.CharField(
initial='text',
widget=forms.HiddenInput()
)
large = forms.BooleanField(
label='Large attribute',
required=False,
help_text='Attribute will have a separate display box.'
)
wikitext = forms.BooleanField(
label='Wikitext attribute',
required=False,
help_text='Attribute will be displayed using textile wikitext parsing, implies "large".'
)
hidden = forms.BooleanField(
label='Hidden attribute',
required=False,
help_text='Attribute will hidden per default if it is large/wikitext.'
)
important = forms.BooleanField(
label='Important attribute',
required=False,
help_text='Attribute will be displayed on a device/entities overview page.'
)
versions = forms.IntegerField(
label='Versions',
min_value=1,
initial=1,
help_text='If set to > 1 a versioned attribute will be created.'
)
class PasswordAttributeAddTextForm(AttributeAddTextForm):
encrypted = forms.BooleanField(
label='Encrypted attribute',
required=False,
help_text='Attribute will be encrypted using the same key as the parent password.'
)
class AttributeAddBoolForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
value = forms.ChoiceField(label = 'Value',
choices = (('true', 'True'), ('false', 'False')))
ruletype = forms.CharField(initial = 'bool',
widget = forms.HiddenInput())
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'If set to > 1 a versioned attribute will be created.')
important = forms.BooleanField(label = 'Important attribute',
required = False,
help_text = 'If true, the attribute will be displayed on a device/entities overview page.')
class AttributeAddIntForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
value = forms.IntegerField(label = 'Integer Value', initial = 0)
ruletype = forms.CharField(initial = 'int',
widget = forms.HiddenInput())
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'If set to > 1 a versioned attribute will be created.')
important = forms.BooleanField(label = 'Important attribute',
required = False,
help_text = 'If true, the attribute will be displayed on a device/entities overview page.')
class DeviceCategoryAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 100, required = False,
label = 'Description')
class DeviceCategoryUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 100, required = False,
label = 'Description')
class TemplateSelectForm(forms.Form):
def __init__(self, templates, permit_none = True, *args, **kwargs):
super(TemplateSelectForm, self).__init__(*args, **kwargs)
tmpllist = []
if permit_none:
tmpllist.append((-1, 'None'))
for template in templates:
tmpllist.append((template.oid,
template.attributes.get('name', '[UNKNOWN]')))
field = forms.ChoiceField(label = 'Select template',
choices = tmpllist)
self.fields['template'] = field
class TemplateSetForm(forms.Form):
def __init__(self, template, *args, **kwargs):
super(TemplateSetForm, self).__init__(*args, **kwargs)
rules = list(template.combinedRules())
rules.sort(cmp=lambda x,y: cmp(x.attributes.get('priority', 10), y.attributes.get('priority', 10)))
for rule in rules:
field = None
if rule.class_name == 'template rule text':
wikitext = rule.attributes.get('wikitext', False)
if not wikitext:
field = forms.CharField(max_length = 50,
label = rule.attr_name,
required = False,
help_text = rule.attributes.get('description', None))
elif rule.class_name == 'template rule regmatch':
if rule.attributes.get('description', None):
help_text = '%s (must match: %s)' % (
rule.attributes.get('description'),
rule.regexp
)
else:
help_text = 'Must match: "%s"' % (rule.regexp)
field = forms.RegexField(max_length = 50,
label = rule.attr_name,
regex = rule.regexp, required = False,
help_text = help_text)
elif rule.class_name == 'template rule bool':
field = forms.BooleanField(label = rule.attr_name,
required = False,
initial = rule.default_value,
help_text = rule.attributes.get('description', None))
elif rule.class_name == 'template rule int':
field = forms.IntegerField(label = rule.attr_name,
initial = rule.default_value,
help_text = rule.attributes.get('description', None))
elif rule.class_name == 'template rule subdevice':
field = forms.IntegerField(label = 'Number of subdevices',
required = False,
initial = rule.num_devices,
help_text = rule.attributes.get('description', None))
if field:
self.fields['argument-%s' % (rule.oid)] = field
for rule in template.combinedRules():
if rule.class_name in [
'template rule regmatch', 'template rule bool',
'template rule int', 'template rule subdevice']:
continue
if rule.class_name == 'template rule text':
wikitext = rule.attributes.get('wikitext', False)
if wikitext:
field = forms.CharField(max_length = 50,
label = rule.attr_name,
required = False,
widget = forms.HiddenInput(),
help_text = rule.attributes.get('description', None))
else:
continue
elif rule.class_name == 'template rule password':
initial = ''
if rule.username:
initial = '%s' % (rule.username)
else:
initial = '[no username]'
if rule.description:
initial = '%s - %s' % (initial, rule.description)
field = forms.CharField(label = 'Add password',
required = False,
initial = initial,
widget=forms.TextInput(attrs={'readonly':'readonly'}),
help_text = rule.attributes.get('description', ''))
elif rule.class_name == 'template rule assign network':
field = forms.CharField(label = 'Auto-assign ip-address',
required = False,
widget = forms.HiddenInput(),
help_text = rule.attributes.get('description', ''))
elif rule.class_name == 'template rule fixed':
field = forms.CharField(label = rule.attr_name,
required = False,
initial = rule.value,
widget=forms.TextInput(attrs={'readonly':'readonly'}),
help_text = rule.attributes.get('description', ''))
apply_label = 'Add attribute %s = %s' % (rule.attr_name, rule.value)
elif rule.class_name == 'template rule flush nodes':
field = forms.CharField(label = 'Flush existing nodes',
required = False,
widget = forms.HiddenInput(),
help_text = rule.attributes.get('description', ''))
elif rule.class_name == 'template rule flush associations':
field = forms.CharField(label = 'Flush existing associations',
required = False,
widget = forms.HiddenInput(),
help_text = rule.attributes.get('description', ''))
elif rule.class_name == 'template rule delete attribute':
field = forms.CharField(label = 'Delete attribute',
required = False,
initial = rule.attr_name,
widget=forms.TextInput(attrs={'readonly':'readonly'}),
help_text = rule.attributes.get('description', ''))
else:
field = forms.CharField(label = rule.class_name,
required = False,
widget = forms.HiddenInput(),
help_text = rule.attributes.get('description', ''))
self.fields['argument-%s' % (rule.oid)] = field
# field = forms.BooleanField(label = 'Overwrite',
# required = False,
# initial = True,
# help_text = 'Overwrite existing attributes that have the same name as an attribute being created.')
# self.fields['overwrite'] = field
# self.fields['template'] = forms.CharField(initial = template.oid,
# widget = forms.HiddenInput())
class DeviceSetValuesForm(forms.Form):
def __init__(self, rules, *args, **kwargs):
super(DeviceSetValuesForm, self).__init__(*args, **kwargs)
for rule in rules:
is_wikitext = rule.attributes.get('wikitext', False)
if rule.dtype == 'text' and not is_wikitext:
field = forms.CharField(max_length = 50, label = rule.name,
required = False,
help_text = rule.attributes.get('description', None))
self.fields['attr-%s' % (rule.oid)] = field
elif rule.dtype == 'text' and is_wikitext:
widget = forms.HiddenInput()
field = forms.CharField(label = rule.name, widget = widget,
initial = ' ')
self.fields['attr-%s' % (rule.oid)] = field
elif rule.dtype == 'regmatch':
field = forms.RegexField(max_length = 50, label = rule.name,
regex = rule.value, required = False,
help_text = 'Must match: "%s"' % (rule.value))
self.fields['attr-%s' % (rule.oid)] = field
# elif rule.dtype == 'fixed':
# widget = forms.HiddenInput()
# field = forms.CharField(max_length = 50, label = rule.name,
# widget = widget, initial = rule.value)
# self.fields['attr-%s' % (rule.oid)] = field
if rule.dtype == 'bool':
field = forms.BooleanField(label = rule.name, required = False,
initial = rule.attributes.get('default', True),
help_text = rule.attributes.get('description', None))
self.fields['attr-%s' % (rule.oid)] = field
else:
pass
class DeviceNetworkAddForm(forms.Form):
def __init__(self, network_trees, *args, **kwargs):
super(DeviceNetworkAddForm, self).__init__(*args, **kwargs)
nt_choices = []
for tree in network_trees:
value = (tree.oid, tree.attributes.get('name', '[UNKNOWN]'))
if tree.attributes.get('default', False) is True:
nt_choices.insert(0, value)
else:
nt_choices.append(value)
field = forms.ChoiceField(label = 'Network Tree',
choices = nt_choices,
help_text = 'Network tree for address.')
self.fields['networktree'] = field
self.fields['network_name'] = \
forms.CharField(max_length = 50, label = 'IP-Address',
help_text = 'Valid forms: host: "a.b.c.d", '
'cidr subnet: "a.b.c.d/nn"')
self.fields['description'] = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
class UserAddForm(forms.Form):
user_name = forms.CharField(max_length = 50, label = '<NAME>')
real_name = forms.CharField(max_length = 50, label = 'Real Name (optional)',
required = False)
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
administrator = forms.BooleanField(label = 'Administrator',
required = False,
initial = False)
password = forms.CharField(max_length = 32, label = 'Password',
widget = forms.PasswordInput(), required = True)
validate = forms.CharField(max_length = 32, label = 'Password (again)',
widget = forms.PasswordInput(), required = True)
class UserUpdateAdminForm(forms.Form):
user_name = forms.CharField(max_length = 50, label = 'User Name')
real_name = forms.CharField(max_length = 50, label = 'Real Name (optional)',
required = False)
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
administrator = forms.BooleanField(label = 'Administrator',
required = False,
initial = False)
class UserUpdateForm(forms.Form):
user_name = forms.CharField(max_length = 50, label = 'User Name')
real_name = forms.CharField(max_length = 50, label = 'Real Name (optional)',
required = False)
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
class UserResetPasswordForm(forms.Form):
password = forms.CharField(max_length = 32, label = 'Password',
widget = forms.PasswordInput(), required = False,
help_text = 'Reseting the password for a user will disconnect all subkeys etc. Use this if the old password for the user is unknown.')
validate = forms.CharField(max_length = 32, label = 'Password (again)',
widget = forms.PasswordInput(), required = False)
class UserUpdatePasswordForm(forms.Form):
password = forms.CharField(max_length = 32, label = 'New Password',
widget = forms.PasswordInput(), required = False)
validate = forms.CharField(max_length = 32, label = 'New Password (again)',
widget = forms.PasswordInput(), required = False)
old_password = forms.CharField(max_length = 32, label = 'Old Password',
widget = forms.PasswordInput(), required = False,
help_text = 'Needs to be supplied if you are changing the password of a user other than your own.')
class UserConnectKeyForm(forms.Form):
password_key_key = forms.CharField(max_length = 32, label = 'Password key password',
widget = forms.PasswordInput(), required = False,
help_text = 'Required if the current active user doesn\'t have the selected password key connected.')
def __init__(self, password_keys, require_user_password, *args, **kwargs):
super(UserConnectKeyForm, self).__init__(*args, **kwargs)
self.message = '''
If you're connecting a password key for another user, keep in mind; that
user must logout and login to siptrack before the key will be connected.
'''
keylist = []
for key in password_keys:
value = (key.oid, key.attributes['name'])
if key.attributes.get('default', False) is True:
keylist.insert(0, value)
else:
keylist.append(value)
field = forms.ChoiceField(label = 'Password key', choices = keylist)
self.fields['passwordkey'] = field
if require_user_password:
field = forms.CharField(
max_length=32,
label='User\'s password',
help_text='Required to create the users keypair if they\'ve never logged in before.',
widget=forms.PasswordInput(),
required=False
)
self.fields['user_password'] = field
class UserManagerLocalAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
class UserManagerLDAPAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
connection_type = forms.ChoiceField(label = 'Connection type',
choices = (('ldap', 'ldap'), ('ldaps', 'ldaps')))
server = forms.CharField(max_length = 256, label = 'LDAP server')
port = forms.CharField(max_length = 5, label = 'LDAP server port')
base_dn = forms.CharField(max_length = 128, label = 'Base DN')
valid_groups = forms.CharField(max_length = 1000, label = 'Valid LDAP group',
help_text = 'Only members of the given group will be able to log in, use ":" to seperate groups.',
required = False)
class UserManagerActiveDirectoryAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
server = forms.CharField(max_length = 256, label = 'AD server')
base_dn = forms.CharField(max_length = 128, label = 'Base DN')
valid_groups = forms.CharField(max_length = 1000, label = 'Valid LDAP group',
help_text = 'Only members of the given group will be able to log in, use ":" to seperate groups.',
required = False)
user_domain = forms.CharField(max_length = 128, label = 'User Domain')
class DeviceResetForm(forms.Form):
reset_attributes = forms.BooleanField(label = 'Reset attributes',
required = False,
initial = True)
reset_device_links = forms.BooleanField(label = 'Reset device links',
required = False,
initial = False)
reset_passwords = forms.BooleanField(label = 'Reset passwords',
required = False,
initial = True)
reset_subdevices = forms.BooleanField(label = 'Reset subdevices',
required = False,
initial = True)
class ConfigAddSelectTypeForm(forms.Form):
def __init__(self, parent, *args, **kwargs):
super(ConfigAddSelectTypeForm, self).__init__(*args, **kwargs)
choices = []
if parent.class_name not in ['view tree', 'ipv4 network',
'ipv6 network', 'network tree', 'ipv4 network range',
'ipv6 network range']:
choices.append(('netautoassign', 'Network auto assignment'))
choices.append(('value', 'Config value'))
field = forms.ChoiceField(label = 'Config type', choices = choices)
self.fields['config_type'] = field
class ConfigAddNetworkAutoassignForm(forms.Form):
config_type = forms.CharField(initial = 'netautoassign',
widget = forms.HiddenInput())
def __init__(self, network_trees, *args, **kwargs):
super(ConfigAddNetworkAutoassignForm, self).__init__(*args, **kwargs)
nt_choices = []
for tree in network_trees:
value = (tree.oid, tree.attributes.get('name', '[UNKNOWN]'))
if tree.attributes.get('default', False) is True:
nt_choices.insert(0, value)
else:
nt_choices.append(value)
field = forms.ChoiceField(label = 'Network Tree',
choices = nt_choices,
help_text = 'Network tree for address.')
self.fields['networktree'] = field
self.fields['range_start'] = \
forms.CharField(max_length = 50, label = 'Range Start',
help_text = 'Enter the start address of the range used for assignment"')
self.fields['range_end'] = \
forms.CharField(max_length = 50, label = 'Range End',
help_text = 'Enter the end address of the range used for assignment"')
class ConfigAddValueForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
value = forms.CharField(max_length = 50, label = 'Value', required = False)
config_type = forms.CharField(initial = 'value',
widget = forms.HiddenInput())
class PermissionAddForm(forms.Form):
read_access = forms.BooleanField(label = 'Read access',
required = False)
write_access = forms.BooleanField(label = 'Write access',
required = False)
all_users = forms.BooleanField(label = 'Applies to all users',
required = False)
recursive = forms.BooleanField(label = 'Recursive',
required = False,
help_text = 'Applies recursively up the node tree.')
def __init__(self, users, groups, *args, **kwargs):
super(PermissionAddForm, self).__init__(*args, **kwargs)
field = forms.MultipleChoiceField(required = False,
label = 'Users',
choices = users,
help_text = 'Included users.')
self.fields['users'] = field
field = forms.MultipleChoiceField(required = False,
label = 'Groups',
choices = groups,
help_text = 'Included groups.')
self.fields['groups'] = field
class UserGroupAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 50, label = 'Description',
required = False)
def __init__(self, users, *args, **kwargs):
super(UserGroupAddForm, self).__init__(*args, **kwargs)
field = forms.MultipleChoiceField(required = False,
label = 'Users',
choices = users,
help_text = 'Included users.')
self.fields['users'] = field
class UserGroupUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
def __init__(self, users, *args, **kwargs):
super(UserGroupUpdateForm, self).__init__(*args, **kwargs)
field = forms.MultipleChoiceField(required = False,
label = 'Users',
choices = users,
help_text = 'Included users.')
self.fields['users'] = field
class CommandAddForm(forms.Form):
freetext = forms.CharField(max_length = 200, label = 'Command text')
class CommandUpdateForm(forms.Form):
freetext = forms.CharField(max_length = 200, label = 'Command text')
class CommandQueueAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
class CommandQueueUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
class EventTriggerAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
class EventTriggerUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
class EventTriggerRulePythonAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
code = forms.CharField(max_length = 5000, label = 'Code',
help_text = 'python code',
widget = forms.Textarea(attrs={'cols':'80', 'rows': '50'}))
class EventTriggerRulePythonUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
code = forms.CharField(max_length = 5000, label = 'Code',
help_text = 'python code',
widget = forms.Textarea(attrs={'cols':'80', 'rows': '50'}))
class UsermanagerADSyncUsersForm(forms.Form):
username = forms.CharField(max_length = 50, label = 'Username')
password = forms.CharField(max_length = 32, label = 'Password',
widget = forms.PasswordInput(), required = True)
class PasswordCategoryAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 100, required = False,
label = 'Description')
class DeviceCopyForm(forms.Form):
skip_attributes = forms.BooleanField(label = 'Skip attributes',
required = False, initial = False)
skip_devices = forms.BooleanField(label = 'Skip sub-devices',
required = False, initial = False)
skip_networks = forms.BooleanField(label = 'Skip networks',
required = False, initial = True)
class AttributeEditNotesForm(forms.Form):
notes = forms.CharField(max_length = 50000, label = '',
help_text = '',
required = False,
widget = forms.Textarea(attrs={'cols':'100', 'rows': '15'}))
class AttributeQuickeditForm(forms.Form):
value = forms.CharField(max_length = 100, required = False,
label = 'Value')
class RackUnitOccupiedForm(forms.Form):
reason = forms.CharField(max_length = 500, required = False,
label = 'Reason',
help_text = 'Describe what is occupying this unit.')
class RackUnitReservedForm(forms.Form):
reason = forms.CharField(max_length = 500, required = False,
label = 'Reason',
help_text = 'Describe why this unit is reserved.')
class DeviceConfigAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 100, required = False,
label = 'Description')
max_versions = forms.IntegerField(label = 'Retained versions',
min_value = 0, initial = 10,
help_text = 'The number of config versions to retain, set to 0 for unlimited.')
class DeviceConfigSubmitForm(forms.Form):
data = forms.CharField(max_length = 1000000, label = '',
help_text = '',
required = True,
widget = forms.Textarea(attrs={'cols':'100', 'rows': '15'}))
class DeviceConfigTemplateAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 100, required = False,
label = 'Description')
data = forms.CharField(max_length = 1000000, label = '',
help_text = '',
required = True,
widget = forms.Textarea(attrs={'cols':'100', 'rows': '15'}))
class DeviceConfigTemplateSubmitForm(forms.Form):
data = forms.CharField(max_length = 1000000, label = '',
help_text = '',
required = True,
widget = forms.Textarea(attrs={'cols':'100', 'rows': '15'}))
| [
"django.forms.HiddenInput",
"django.forms.BooleanField",
"django.forms.CharField",
"django.forms.PasswordInput",
"django.forms.RegexField",
"django.forms.ChoiceField",
"django.forms.IntegerField",
"django.forms.Textarea",
"django.forms.TextInput",
"django.forms.MultipleChoiceField",
"django.form... | [((109, 157), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Username"""'}), "(max_length=50, label='Username')\n", (124, 157), False, 'from django import forms\n'), ((623, 667), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (638, 667), False, 'from django import forms\n'), ((708, 776), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (723, 776), False, 'from django import forms\n'), ((853, 897), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (868, 897), False, 'from django import forms\n'), ((938, 1006), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (953, 1006), False, 'from django import forms\n'), ((1284, 1329), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'required': '(True)'}), '(max_length=50, required=True)\n', (1299, 1329), False, 'from django import forms\n'), ((1371, 1417), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'required': '(False)'}), '(max_length=50, required=False)\n', (1386, 1417), False, 'from django import forms\n'), ((1462, 1509), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(256)', 'required': '(False)'}), '(max_length=256, required=False)\n', (1477, 1509), False, 'from django import forms\n'), ((1784, 1842), 'django.forms.MultipleChoiceField', 'forms.MultipleChoiceField', ([], {'choices': 'OPTIONS', 'required': '(False)'}), '(choices=OPTIONS, required=False)\n', (1809, 1842), False, 'from django import forms\n'), ((1915, 1959), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (1930, 1959), False, 'from django import forms\n'), ((1997, 2082), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Protocol"""', 'choices': "(('ipv4', 'ipv4'), ('ipv6', 'ipv6'))"}), "(label='Protocol', choices=(('ipv4', 'ipv4'), ('ipv6',\n 'ipv6')))\n", (2014, 2082), False, 'from django import forms\n'), ((2147, 2269), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Address"""', 'help_text': '"""The network/address in CIDR form (x.x.x.x or x.x.x.x/xx)"""'}), "(max_length=50, label='Address', help_text=\n 'The network/address in CIDR form (x.x.x.x or x.x.x.x/xx)')\n", (2162, 2269), False, 'from django import forms\n'), ((2313, 2381), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (2328, 2381), False, 'from django import forms\n'), ((2464, 2509), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Range"""'}), "(max_length=50, label='Range')\n", (2479, 2509), False, 'from django import forms\n'), ((2550, 2618), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (2565, 2618), False, 'from django import forms\n'), ((2703, 2763), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Recursive delete"""', 'required': '(False)'}), "(label='Recursive delete', required=False)\n", (2721, 2763), False, 'from django import forms\n'), ((2836, 2880), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (2851, 2880), False, 'from django import forms\n'), ((3210, 3278), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (3225, 3278), False, 'from django import forms\n'), ((3360, 3404), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (3375, 3404), False, 'from django import forms\n'), ((3445, 3513), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (3460, 3513), False, 'from django import forms\n'), ((3597, 3641), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (3612, 3641), False, 'from django import forms\n'), ((3682, 3750), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (3697, 3750), False, 'from django import forms\n'), ((4023, 4067), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (4038, 4067), False, 'from django import forms\n'), ((4108, 4176), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (4123, 4176), False, 'from django import forms\n'), ((4219, 4283), 'django.forms.DecimalField', 'forms.DecimalField', ([], {'min_value': '(0)', 'decimal_places': '(0)', 'label': '"""Value"""'}), "(min_value=0, decimal_places=0, label='Value')\n", (4237, 4283), False, 'from django import forms\n'), ((4370, 4414), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (4385, 4414), False, 'from django import forms\n'), ((4455, 4523), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (4470, 4523), False, 'from django import forms\n'), ((4566, 4611), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Value"""'}), "(max_length=50, label='Value')\n", (4581, 4611), False, 'from django import forms\n'), ((4869, 4933), 'django.forms.DecimalField', 'forms.DecimalField', ([], {'min_value': '(0)', 'decimal_places': '(0)', 'label': '"""Value"""'}), "(min_value=0, decimal_places=0, label='Value')\n", (4887, 4933), False, 'from django import forms\n'), ((5018, 5082), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Username"""', 'required': '(False)'}), "(max_length=50, label='Username', required=False)\n", (5033, 5082), False, 'from django import forms\n'), ((5514, 5582), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (5529, 5582), False, 'from django import forms\n'), ((6238, 6302), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Username"""', 'required': '(False)'}), "(max_length=50, label='Username', required=False)\n", (6253, 6302), False, 'from django import forms\n'), ((6692, 6760), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (6707, 6760), False, 'from django import forms\n'), ((7373, 7417), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (7388, 7417), False, 'from django import forms\n'), ((7440, 7555), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this template."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this template.', required=False)\n", (7455, 7555), False, 'from django import forms\n'), ((7594, 7725), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Inheritance only"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""Template is used for inheritance only."""'}), "(label='Inheritance only', required=False, initial=False,\n help_text='Template is used for inheritance only.')\n", (7612, 7725), False, 'from django import forms\n'), ((7788, 7917), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Device creation"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""Template is used for device creation."""'}), "(label='Device creation', required=False, initial=False,\n help_text='Template is used for device creation.')\n", (7806, 7917), False, 'from django import forms\n'), ((8487, 8531), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (8502, 8531), False, 'from django import forms\n'), ((8554, 8669), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this template."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this template.', required=False)\n", (8569, 8669), False, 'from django import forms\n'), ((8708, 8839), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Inheritance only"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""Template is used for inheritance only."""'}), "(label='Inheritance only', required=False, initial=False,\n help_text='Template is used for inheritance only.')\n", (8726, 8839), False, 'from django import forms\n'), ((9413, 9457), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (9428, 9457), False, 'from django import forms\n'), ((9480, 9595), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this template."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this template.', required=False)\n", (9495, 9595), False, 'from django import forms\n'), ((9634, 9765), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Inheritance only"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""Template is used for inheritance only."""'}), "(label='Inheritance only', required=False, initial=False,\n help_text='Template is used for inheritance only.')\n", (9652, 9765), False, 'from django import forms\n'), ((9828, 9957), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Device creation"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""Template is used for device creation."""'}), "(label='Device creation', required=False, initial=False,\n help_text='Template is used for device creation.')\n", (9846, 9957), False, 'from django import forms\n'), ((10533, 10577), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (10548, 10577), False, 'from django import forms\n'), ((10600, 10715), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this template."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this template.', required=False)\n", (10615, 10715), False, 'from django import forms\n'), ((10754, 10885), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Inheritance only"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""Template is used for inheritance only."""'}), "(label='Inheritance only', required=False, initial=False,\n help_text='Template is used for inheritance only.')\n", (10772, 10885), False, 'from django import forms\n'), ((11465, 11566), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Attribute name"""', 'help_text': '"""Name of attribute to create."""'}), "(max_length=50, label='Attribute name', help_text=\n 'Name of attribute to create.')\n", (11480, 11566), False, 'from django import forms\n'), ((11593, 11760), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Hide attribute"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""If true, the attribute will hidden per default if it is large/wikitext."""'}), "(label='Hide attribute', required=False, initial=False,\n help_text=\n 'If true, the attribute will hidden per default if it is large/wikitext.')\n", (11611, 11760), False, 'from django import forms\n'), ((11872, 12055), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Important attribute"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""If true, the attribute will be displayed on the device/entity overview page."""'}), "(label='Important attribute', required=False, initial=\n False, help_text=\n 'If true, the attribute will be displayed on the device/entity overview page.'\n )\n", (11890, 12055), False, 'from django import forms\n'), ((12097, 12245), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Large attribute"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""If true, the attribute will have a separate display box."""'}), "(label='Large attribute', required=False, initial=False,\n help_text='If true, the attribute will have a separate display box.')\n", (12115, 12245), False, 'from django import forms\n'), ((12301, 12488), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Wikitext attribute"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""If true, the attribute will be displayed using wikitext parsing, implies "large"."""'}), '(label=\'Wikitext attribute\', required=False, initial=\n False, help_text=\n \'If true, the attribute will be displayed using wikitext parsing, implies "large".\'\n )\n', (12319, 12488), False, 'from django import forms\n'), ((12536, 12647), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this rule."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this rule.', required=False)\n", (12551, 12647), False, 'from django import forms\n'), ((12678, 12851), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Priority"""', 'min_value': '(0)', 'initial': '(10)', 'help_text': '"""The priority of this rule when using the templates, lower value will be displayed first."""'}), "(label='Priority', min_value=0, initial=10, help_text=\n 'The priority of this rule when using the templates, lower value will be displayed first.'\n )\n", (12696, 12851), False, 'from django import forms\n'), ((12933, 13055), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Versions"""', 'min_value': '(1)', 'initial': '(1)', 'help_text': '"""Number of stored versions of the attribute."""'}), "(label='Versions', min_value=1, initial=1, help_text=\n 'Number of stored versions of the attribute.')\n", (12951, 13055), False, 'from django import forms\n'), ((13144, 13245), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Attribute name"""', 'help_text': '"""Name of attribute to create."""'}), "(max_length=50, label='Attribute name', help_text=\n 'Name of attribute to create.')\n", (13159, 13245), False, 'from django import forms\n'), ((13278, 13379), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'label': '"""String value"""', 'help_text': '"""The created attributes value."""'}), "(max_length=100, label='String value', help_text=\n 'The created attributes value.')\n", (13293, 13379), False, 'from django import forms\n'), ((13418, 13493), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Expand variables"""', 'required': '(False)', 'initial': '(False)'}), "(label='Expand variables', required=False, initial=False)\n", (13436, 13493), False, 'from django import forms\n'), ((13540, 13723), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Important attribute"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""If true, the attribute will be displayed on the device/entity overview page."""'}), "(label='Important attribute', required=False, initial=\n False, help_text=\n 'If true, the attribute will be displayed on the device/entity overview page.'\n )\n", (13558, 13723), False, 'from django import forms\n'), ((13840, 13951), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this rule."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this rule.', required=False)\n", (13855, 13951), False, 'from django import forms\n'), ((13982, 14155), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Priority"""', 'min_value': '(0)', 'initial': '(10)', 'help_text': '"""The priority of this rule when using the templates, lower value will be displayed first."""'}), "(label='Priority', min_value=0, initial=10, help_text=\n 'The priority of this rule when using the templates, lower value will be displayed first.'\n )\n", (14000, 14155), False, 'from django import forms\n'), ((14237, 14359), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Versions"""', 'min_value': '(1)', 'initial': '(1)', 'help_text': '"""Number of stored versions of the attribute."""'}), "(label='Versions', min_value=1, initial=1, help_text=\n 'Number of stored versions of the attribute.')\n", (14255, 14359), False, 'from django import forms\n'), ((14451, 14552), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Attribute name"""', 'help_text': '"""Name of attribute to create."""'}), "(max_length=50, label='Attribute name', help_text=\n 'Name of attribute to create.')\n", (14466, 14552), False, 'from django import forms\n'), ((14579, 14695), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Regexp"""', 'help_text': '"""Regular expression that must match the input value."""'}), "(max_length=50, label='Regexp', help_text=\n 'Regular expression that must match the input value.')\n", (14594, 14695), False, 'from django import forms\n'), ((14727, 14838), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this rule."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this rule.', required=False)\n", (14742, 14838), False, 'from django import forms\n'), ((14869, 14991), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Versions"""', 'min_value': '(1)', 'initial': '(1)', 'help_text': '"""Number of stored versions of the attribute."""'}), "(label='Versions', min_value=1, initial=1, help_text=\n 'Number of stored versions of the attribute.')\n", (14887, 14991), False, 'from django import forms\n'), ((15034, 15207), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Priority"""', 'min_value': '(0)', 'initial': '(10)', 'help_text': '"""The priority of this rule when using the templates, lower value will be displayed first."""'}), "(label='Priority', min_value=0, initial=10, help_text=\n 'The priority of this rule when using the templates, lower value will be displayed first.'\n )\n", (15052, 15207), False, 'from django import forms\n'), ((15290, 15473), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Important attribute"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""If true, the attribute will be displayed on the device/entity overview page."""'}), "(label='Important attribute', required=False, initial=\n False, help_text=\n 'If true, the attribute will be displayed on the device/entity overview page.'\n )\n", (15308, 15473), False, 'from django import forms\n'), ((15632, 15733), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Attribute name"""', 'help_text': '"""Name of attribute to create."""'}), "(max_length=50, label='Attribute name', help_text=\n 'Name of attribute to create.')\n", (15647, 15733), False, 'from django import forms\n'), ((15761, 15889), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Default"""', 'choices': "(('true', 'True'), ('false', 'False'))", 'help_text': '"""Default value for attribute."""'}), "(label='Default', choices=(('true', 'True'), ('false',\n 'False')), help_text='Default value for attribute.')\n", (15778, 15889), False, 'from django import forms\n'), ((15934, 16045), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this rule."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this rule.', required=False)\n", (15949, 16045), False, 'from django import forms\n'), ((16076, 16198), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Versions"""', 'min_value': '(1)', 'initial': '(1)', 'help_text': '"""Number of stored versions of the attribute."""'}), "(label='Versions', min_value=1, initial=1, help_text=\n 'Number of stored versions of the attribute.')\n", (16094, 16198), False, 'from django import forms\n'), ((16241, 16414), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Priority"""', 'min_value': '(0)', 'initial': '(10)', 'help_text': '"""The priority of this rule when using the templates, lower value will be displayed first."""'}), "(label='Priority', min_value=0, initial=10, help_text=\n 'The priority of this rule when using the templates, lower value will be displayed first.'\n )\n", (16259, 16414), False, 'from django import forms\n'), ((16497, 16680), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Important attribute"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""If true, the attribute will be displayed on the device/entity overview page."""'}), "(label='Important attribute', required=False, initial=\n False, help_text=\n 'If true, the attribute will be displayed on the device/entity overview page.'\n )\n", (16515, 16680), False, 'from django import forms\n'), ((16838, 16939), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Attribute name"""', 'help_text': '"""Name of attribute to create."""'}), "(max_length=50, label='Attribute name', help_text=\n 'Name of attribute to create.')\n", (16853, 16939), False, 'from django import forms\n'), ((16967, 17041), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Default"""', 'initial': '(0)', 'help_text': '"""Default value."""'}), "(label='Default', initial=0, help_text='Default value.')\n", (16985, 17041), False, 'from django import forms\n'), ((17090, 17201), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this rule."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this rule.', required=False)\n", (17105, 17201), False, 'from django import forms\n'), ((17232, 17354), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Versions"""', 'min_value': '(1)', 'initial': '(1)', 'help_text': '"""Number of stored versions of the attribute."""'}), "(label='Versions', min_value=1, initial=1, help_text=\n 'Number of stored versions of the attribute.')\n", (17250, 17354), False, 'from django import forms\n'), ((17397, 17570), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Priority"""', 'min_value': '(0)', 'initial': '(10)', 'help_text': '"""The priority of this rule when using the templates, lower value will be displayed first."""'}), "(label='Priority', min_value=0, initial=10, help_text=\n 'The priority of this rule when using the templates, lower value will be displayed first.'\n )\n", (17415, 17570), False, 'from django import forms\n'), ((17653, 17836), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Important attribute"""', 'required': '(False)', 'initial': '(False)', 'help_text': '"""If true, the attribute will be displayed on the device/entity overview page."""'}), "(label='Important attribute', required=False, initial=\n False, help_text=\n 'If true, the attribute will be displayed on the device/entity overview page.'\n )\n", (17671, 17836), False, 'from django import forms\n'), ((18006, 18107), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Attribute name"""', 'help_text': '"""Name of attribute to delete."""'}), "(max_length=50, label='Attribute name', help_text=\n 'Name of attribute to delete.')\n", (18021, 18107), False, 'from django import forms\n'), ((18139, 18250), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this rule."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this rule.', required=False)\n", (18154, 18250), False, 'from django import forms\n'), ((18281, 18454), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Priority"""', 'min_value': '(0)', 'initial': '(10)', 'help_text': '"""The priority of this rule when using the templates, lower value will be displayed first."""'}), "(label='Priority', min_value=0, initial=10, help_text=\n 'The priority of this rule when using the templates, lower value will be displayed first.'\n )\n", (18299, 18454), False, 'from django import forms\n'), ((18589, 18700), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this rule."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this rule.', required=False)\n", (18604, 18700), False, 'from django import forms\n'), ((18731, 18904), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Priority"""', 'min_value': '(0)', 'initial': '(10)', 'help_text': '"""The priority of this rule when using the templates, lower value will be displayed first."""'}), "(label='Priority', min_value=0, initial=10, help_text=\n 'The priority of this rule when using the templates, lower value will be displayed first.'\n )\n", (18749, 18904), False, 'from django import forms\n'), ((19742, 19853), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this rule."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this rule.', required=False)\n", (19757, 19853), False, 'from django import forms\n'), ((19884, 20057), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Priority"""', 'min_value': '(0)', 'initial': '(10)', 'help_text': '"""The priority of this rule when using the templates, lower value will be displayed first."""'}), "(label='Priority', min_value=0, initial=10, help_text=\n 'The priority of this rule when using the templates, lower value will be displayed first.'\n )\n", (19902, 20057), False, 'from django import forms\n'), ((20890, 20954), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Username"""', 'required': '(False)'}), "(max_length=50, label='Username', required=False)\n", (20905, 20954), False, 'from django import forms\n'), ((20998, 21117), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Description"""', 'required': '(False)', 'help_text': '"""Description of the added password."""'}), "(max_length=50, label='Description', required=False,\n help_text='Description of the added password.')\n", (21013, 21117), False, 'from django import forms\n'), ((21152, 21263), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this rule."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this rule.', required=False)\n", (21167, 21263), False, 'from django import forms\n'), ((21294, 21467), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Priority"""', 'min_value': '(0)', 'initial': '(10)', 'help_text': '"""The priority of this rule when using the templates, lower value will be displayed first."""'}), "(label='Priority', min_value=0, initial=10, help_text=\n 'The priority of this rule when using the templates, lower value will be displayed first.'\n )\n", (21312, 21467), False, 'from django import forms\n'), ((22001, 22112), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this rule."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this rule.', required=False)\n", (22016, 22112), False, 'from django import forms\n'), ((22146, 22267), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Number of subdevices"""', 'min_value': '(1)', 'initial': '(1)', 'help_text': '"""Number of subdevices to create."""'}), "(label='Number of subdevices', min_value=1, initial=1,\n help_text='Number of subdevices to create.')\n", (22164, 22267), False, 'from django import forms\n'), ((22318, 22462), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Sequence offset"""', 'initial': '(0)', 'help_text': '"""Base offset of sequence counter used when applying subdevice templates."""'}), "(label='Sequence offset', initial=0, help_text=\n 'Base offset of sequence counter used when applying subdevice templates.')\n", (22336, 22462), False, 'from django import forms\n'), ((22503, 22676), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Priority"""', 'min_value': '(0)', 'initial': '(10)', 'help_text': '"""The priority of this rule when using the templates, lower value will be displayed first."""'}), "(label='Priority', min_value=0, initial=10, help_text=\n 'The priority of this rule when using the templates, lower value will be displayed first.'\n )\n", (22521, 22676), False, 'from django import forms\n'), ((23215, 23326), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(80)', 'label': '"""Description"""', 'help_text': '"""Description of this rule."""', 'required': '(False)'}), "(max_length=80, label='Description', help_text=\n 'Description of this rule.', required=False)\n", (23230, 23326), False, 'from django import forms\n'), ((23357, 23530), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Priority"""', 'min_value': '(0)', 'initial': '(10)', 'help_text': '"""The priority of this rule when using the templates, lower value will be displayed first."""'}), "(label='Priority', min_value=0, initial=10, help_text=\n 'The priority of this rule when using the templates, lower value will be displayed first.'\n )\n", (23375, 23530), False, 'from django import forms\n'), ((23666, 23761), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Attribute type"""', 'choices': "(('text', 'text'), ('bool', 'boolean'))"}), "(label='Attribute type', choices=(('text', 'text'), (\n 'bool', 'boolean')))\n", (23683, 23761), False, 'from django import forms\n'), ((23851, 23962), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Attribute type"""', 'choices': "(('text', 'text'), ('bool', 'boolean'), ('int', 'int'))"}), "(label='Attribute type', choices=(('text', 'text'), (\n 'bool', 'boolean'), ('int', 'int')))\n", (23868, 23962), False, 'from django import forms\n'), ((24096, 24161), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""New value"""', 'required': '(False)'}), "(max_length=50, label='New value', required=False)\n", (24111, 24161), False, 'from django import forms\n'), ((24236, 24302), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""New value (true/false)"""', 'required': '(False)'}), "(label='New value (true/false)', required=False)\n", (24254, 24302), False, 'from django import forms\n'), ((24374, 24422), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""New value"""', 'initial': '(0)'}), "(label='New value', initial=0)\n", (24392, 24422), False, 'from django import forms\n'), ((25506, 25627), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Large attribute"""', 'required': '(False)', 'help_text': '"""Attribute will have a separate display box."""'}), "(label='Large attribute', required=False, help_text=\n 'Attribute will have a separate display box.')\n", (25524, 25627), False, 'from django import forms\n'), ((25680, 25842), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Wikitext attribute"""', 'required': '(False)', 'help_text': '"""Attribute will be displayed using textile wikitext parsing, implies "large"."""'}), '(label=\'Wikitext attribute\', required=False, help_text=\n \'Attribute will be displayed using textile wikitext parsing, implies "large".\'\n )\n', (25698, 25842), False, 'from django import forms\n'), ((25888, 26025), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Hidden attribute"""', 'required': '(False)', 'help_text': '"""Attribute will hidden per default if it is large/wikitext."""'}), "(label='Hidden attribute', required=False, help_text=\n 'Attribute will hidden per default if it is large/wikitext.')\n", (25906, 26025), False, 'from django import forms\n'), ((26079, 26224), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Important attribute"""', 'required': '(False)', 'help_text': '"""Attribute will be displayed on a device/entities overview page."""'}), "(label='Important attribute', required=False, help_text=\n 'Attribute will be displayed on a device/entities overview page.')\n", (26097, 26224), False, 'from django import forms\n'), ((26277, 26408), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Versions"""', 'min_value': '(1)', 'initial': '(1)', 'help_text': '"""If set to > 1 a versioned attribute will be created."""'}), "(label='Versions', min_value=1, initial=1, help_text=\n 'If set to > 1 a versioned attribute will be created.')\n", (26295, 26408), False, 'from django import forms\n'), ((26534, 26686), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Encrypted attribute"""', 'required': '(False)', 'help_text': '"""Attribute will be encrypted using the same key as the parent password."""'}), "(label='Encrypted attribute', required=False, help_text=\n 'Attribute will be encrypted using the same key as the parent password.')\n", (26552, 26686), False, 'from django import forms\n'), ((26777, 26821), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (26792, 26821), False, 'from django import forms\n'), ((26838, 26923), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Value"""', 'choices': "(('true', 'True'), ('false', 'False'))"}), "(label='Value', choices=(('true', 'True'), ('false', 'False'))\n )\n", (26855, 26923), False, 'from django import forms\n'), ((27041, 27172), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Versions"""', 'min_value': '(1)', 'initial': '(1)', 'help_text': '"""If set to > 1 a versioned attribute will be created."""'}), "(label='Versions', min_value=1, initial=1, help_text=\n 'If set to > 1 a versioned attribute will be created.')\n", (27059, 27172), False, 'from django import forms\n'), ((27216, 27379), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Important attribute"""', 'required': '(False)', 'help_text': '"""If true, the attribute will be displayed on a device/entities overview page."""'}), "(label='Important attribute', required=False, help_text=\n 'If true, the attribute will be displayed on a device/entities overview page.'\n )\n", (27234, 27379), False, 'from django import forms\n'), ((27497, 27541), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (27512, 27541), False, 'from django import forms\n'), ((27558, 27610), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Integer Value"""', 'initial': '(0)'}), "(label='Integer Value', initial=0)\n", (27576, 27610), False, 'from django import forms\n'), ((27720, 27851), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Versions"""', 'min_value': '(1)', 'initial': '(1)', 'help_text': '"""If set to > 1 a versioned attribute will be created."""'}), "(label='Versions', min_value=1, initial=1, help_text=\n 'If set to > 1 a versioned attribute will be created.')\n", (27738, 27851), False, 'from django import forms\n'), ((27895, 28058), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Important attribute"""', 'required': '(False)', 'help_text': '"""If true, the attribute will be displayed on a device/entities overview page."""'}), "(label='Important attribute', required=False, help_text=\n 'If true, the attribute will be displayed on a device/entities overview page.'\n )\n", (27913, 28058), False, 'from django import forms\n'), ((28178, 28222), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (28193, 28222), False, 'from django import forms\n'), ((28245, 28313), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (28260, 28313), False, 'from django import forms\n'), ((28388, 28432), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (28403, 28432), False, 'from django import forms\n'), ((28455, 28523), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (28470, 28523), False, 'from django import forms\n'), ((38194, 38240), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""<NAME>"""'}), "(max_length=50, label='<NAME>')\n", (38209, 38240), False, 'from django import forms\n'), ((38261, 38337), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Real Name (optional)"""', 'required': '(False)'}), "(max_length=50, label='Real Name (optional)', required=False)\n", (38276, 38337), False, 'from django import forms\n'), ((38374, 38452), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Description (optional)"""', 'required': '(False)'}), "(max_length=50, label='Description (optional)', required=False)\n", (38389, 38452), False, 'from django import forms\n'), ((38491, 38563), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Administrator"""', 'required': '(False)', 'initial': '(False)'}), "(label='Administrator', required=False, initial=False)\n", (38509, 38563), False, 'from django import forms\n'), ((38916, 38965), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""User Name"""'}), "(max_length=50, label='User Name')\n", (38931, 38965), False, 'from django import forms\n'), ((38986, 39062), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Real Name (optional)"""', 'required': '(False)'}), "(max_length=50, label='Real Name (optional)', required=False)\n", (39001, 39062), False, 'from django import forms\n'), ((39099, 39177), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Description (optional)"""', 'required': '(False)'}), "(max_length=50, label='Description (optional)', required=False)\n", (39114, 39177), False, 'from django import forms\n'), ((39216, 39288), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Administrator"""', 'required': '(False)', 'initial': '(False)'}), "(label='Administrator', required=False, initial=False)\n", (39234, 39288), False, 'from django import forms\n'), ((39370, 39419), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""User Name"""'}), "(max_length=50, label='User Name')\n", (39385, 39419), False, 'from django import forms\n'), ((39440, 39516), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Real Name (optional)"""', 'required': '(False)'}), "(max_length=50, label='Real Name (optional)', required=False)\n", (39455, 39516), False, 'from django import forms\n'), ((39553, 39631), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Description (optional)"""', 'required': '(False)'}), "(max_length=50, label='Description (optional)', required=False)\n", (39568, 39631), False, 'from django import forms\n'), ((42158, 42202), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (42173, 42202), False, 'from django import forms\n'), ((42225, 42303), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Description (optional)"""', 'required': '(False)'}), "(max_length=50, label='Description (optional)', required=False)\n", (42240, 42303), False, 'from django import forms\n'), ((42376, 42420), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (42391, 42420), False, 'from django import forms\n'), ((42443, 42521), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Description (optional)"""', 'required': '(False)'}), "(max_length=50, label='Description (optional)', required=False)\n", (42458, 42521), False, 'from django import forms\n'), ((42562, 42657), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Connection type"""', 'choices': "(('ldap', 'ldap'), ('ldaps', 'ldaps'))"}), "(label='Connection type', choices=(('ldap', 'ldap'), (\n 'ldaps', 'ldaps')))\n", (42579, 42657), False, 'from django import forms\n'), ((42682, 42734), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(256)', 'label': '"""LDAP server"""'}), "(max_length=256, label='LDAP server')\n", (42697, 42734), False, 'from django import forms\n'), ((42750, 42805), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(5)', 'label': '"""LDAP server port"""'}), "(max_length=5, label='LDAP server port')\n", (42765, 42805), False, 'from django import forms\n'), ((42824, 42872), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(128)', 'label': '"""Base DN"""'}), "(max_length=128, label='Base DN')\n", (42839, 42872), False, 'from django import forms\n'), ((42896, 43077), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(1000)', 'label': '"""Valid LDAP group"""', 'help_text': '"""Only members of the given group will be able to log in, use ":" to seperate groups."""', 'required': '(False)'}), '(max_length=1000, label=\'Valid LDAP group\', help_text=\n \'Only members of the given group will be able to log in, use ":" to seperate groups.\'\n , required=False)\n', (42911, 43077), False, 'from django import forms\n'), ((43165, 43209), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (43180, 43209), False, 'from django import forms\n'), ((43232, 43310), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Description (optional)"""', 'required': '(False)'}), "(max_length=50, label='Description (optional)', required=False)\n", (43247, 43310), False, 'from django import forms\n'), ((43342, 43392), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(256)', 'label': '"""AD server"""'}), "(max_length=256, label='AD server')\n", (43357, 43392), False, 'from django import forms\n'), ((43411, 43459), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(128)', 'label': '"""Base DN"""'}), "(max_length=128, label='Base DN')\n", (43426, 43459), False, 'from django import forms\n'), ((43483, 43664), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(1000)', 'label': '"""Valid LDAP group"""', 'help_text': '"""Only members of the given group will be able to log in, use ":" to seperate groups."""', 'required': '(False)'}), '(max_length=1000, label=\'Valid LDAP group\', help_text=\n \'Only members of the given group will be able to log in, use ":" to seperate groups.\'\n , required=False)\n', (43498, 43664), False, 'from django import forms\n'), ((43705, 43757), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(128)', 'label': '"""User Domain"""'}), "(max_length=128, label='User Domain')\n", (43720, 43757), False, 'from django import forms\n'), ((43821, 43895), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Reset attributes"""', 'required': '(False)', 'initial': '(True)'}), "(label='Reset attributes', required=False, initial=True)\n", (43839, 43895), False, 'from django import forms\n'), ((43951, 44028), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Reset device links"""', 'required': '(False)', 'initial': '(False)'}), "(label='Reset device links', required=False, initial=False)\n", (43969, 44028), False, 'from django import forms\n'), ((44081, 44154), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Reset passwords"""', 'required': '(False)', 'initial': '(True)'}), "(label='Reset passwords', required=False, initial=True)\n", (44099, 44154), False, 'from django import forms\n'), ((44208, 44282), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Reset subdevices"""', 'required': '(False)', 'initial': '(True)'}), "(label='Reset subdevices', required=False, initial=True)\n", (44226, 44282), False, 'from django import forms\n'), ((46157, 46201), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (46172, 46201), False, 'from django import forms\n'), ((46218, 46279), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Value"""', 'required': '(False)'}), "(max_length=50, label='Value', required=False)\n", (46233, 46279), False, 'from django import forms\n'), ((46437, 46492), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Read access"""', 'required': '(False)'}), "(label='Read access', required=False)\n", (46455, 46492), False, 'from django import forms\n'), ((46528, 46584), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Write access"""', 'required': '(False)'}), "(label='Write access', required=False)\n", (46546, 46584), False, 'from django import forms\n'), ((46617, 46681), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Applies to all users"""', 'required': '(False)'}), "(label='Applies to all users', required=False)\n", (46635, 46681), False, 'from django import forms\n'), ((46714, 46823), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Recursive"""', 'required': '(False)', 'help_text': '"""Applies recursively up the node tree."""'}), "(label='Recursive', required=False, help_text=\n 'Applies recursively up the node tree.')\n", (46732, 46823), False, 'from django import forms\n'), ((47443, 47487), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (47458, 47487), False, 'from django import forms\n'), ((47510, 47577), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Description"""', 'required': '(False)'}), "(max_length=50, label='Description', required=False)\n", (47525, 47577), False, 'from django import forms\n'), ((47970, 48014), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (47985, 48014), False, 'from django import forms\n'), ((48037, 48115), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Description (optional)"""', 'required': '(False)'}), "(max_length=50, label='Description (optional)', required=False)\n", (48052, 48115), False, 'from django import forms\n'), ((48510, 48563), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(200)', 'label': '"""Command text"""'}), "(max_length=200, label='Command text')\n", (48525, 48563), False, 'from django import forms\n'), ((48621, 48674), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(200)', 'label': '"""Command text"""'}), "(max_length=200, label='Command text')\n", (48636, 48674), False, 'from django import forms\n'), ((48730, 48774), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (48745, 48774), False, 'from django import forms\n'), ((48833, 48877), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (48848, 48877), False, 'from django import forms\n'), ((48933, 48977), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (48948, 48977), False, 'from django import forms\n'), ((49036, 49080), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (49051, 49080), False, 'from django import forms\n'), ((49146, 49190), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (49161, 49190), False, 'from django import forms\n'), ((49440, 49484), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (49455, 49484), False, 'from django import forms\n'), ((49732, 49780), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Username"""'}), "(max_length=50, label='Username')\n", (49747, 49780), False, 'from django import forms\n'), ((49969, 50013), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (49984, 50013), False, 'from django import forms\n'), ((50036, 50104), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (50051, 50104), False, 'from django import forms\n'), ((50179, 50253), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Skip attributes"""', 'required': '(False)', 'initial': '(False)'}), "(label='Skip attributes', required=False, initial=False)\n", (50197, 50253), False, 'from django import forms\n'), ((50321, 50396), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Skip sub-devices"""', 'required': '(False)', 'initial': '(False)'}), "(label='Skip sub-devices', required=False, initial=False)\n", (50339, 50396), False, 'from django import forms\n'), ((50465, 50536), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'label': '"""Skip networks"""', 'required': '(False)', 'initial': '(True)'}), "(label='Skip networks', required=False, initial=True)\n", (50483, 50536), False, 'from django import forms\n'), ((50923, 50985), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Value"""'}), "(max_length=100, required=False, label='Value')\n", (50938, 50985), False, 'from django import forms\n'), ((51058, 51177), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(500)', 'required': '(False)', 'label': '"""Reason"""', 'help_text': '"""Describe what is occupying this unit."""'}), "(max_length=500, required=False, label='Reason', help_text=\n 'Describe what is occupying this unit.')\n", (51073, 51177), False, 'from django import forms\n'), ((51293, 51410), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(500)', 'required': '(False)', 'label': '"""Reason"""', 'help_text': '"""Describe why this unit is reserved."""'}), "(max_length=500, required=False, label='Reason', help_text=\n 'Describe why this unit is reserved.')\n", (51308, 51410), False, 'from django import forms\n'), ((51528, 51572), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (51543, 51572), False, 'from django import forms\n'), ((51595, 51663), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (51610, 51663), False, 'from django import forms\n'), ((51701, 51858), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""Retained versions"""', 'min_value': '(0)', 'initial': '(10)', 'help_text': '"""The number of config versions to retain, set to 0 for unlimited."""'}), "(label='Retained versions', min_value=0, initial=10,\n help_text=\n 'The number of config versions to retain, set to 0 for unlimited.')\n", (51719, 51858), False, 'from django import forms\n'), ((52267, 52311), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Name"""'}), "(max_length=50, label='Name')\n", (52282, 52311), False, 'from django import forms\n'), ((52334, 52402), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)', 'label': '"""Description"""'}), "(max_length=100, required=False, label='Description')\n", (52349, 52402), False, 'from django import forms\n'), ((6047, 6103), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Password key"""', 'choices': 'keylist'}), "(label='Password key', choices=keylist)\n", (6064, 6103), False, 'from django import forms\n'), ((7216, 7272), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Password key"""', 'choices': 'keylist'}), "(label='Password key', choices=keylist)\n", (7233, 7272), False, 'from django import forms\n'), ((8256, 8347), 'django.forms.MultipleChoiceField', 'forms.MultipleChoiceField', ([], {'required': '(False)', 'label': '"""Inherited templates"""', 'choices': 'choices'}), "(required=False, label='Inherited templates',\n choices=choices)\n", (8281, 8347), False, 'from django import forms\n'), ((9180, 9271), 'django.forms.MultipleChoiceField', 'forms.MultipleChoiceField', ([], {'required': '(False)', 'label': '"""Inherited templates"""', 'choices': 'choices'}), "(required=False, label='Inherited templates',\n choices=choices)\n", (9205, 9271), False, 'from django import forms\n'), ((10299, 10390), 'django.forms.MultipleChoiceField', 'forms.MultipleChoiceField', ([], {'required': '(False)', 'label': '"""Inherited templates"""', 'choices': 'choices'}), "(required=False, label='Inherited templates',\n choices=choices)\n", (10324, 10390), False, 'from django import forms\n'), ((11228, 11319), 'django.forms.MultipleChoiceField', 'forms.MultipleChoiceField', ([], {'required': '(False)', 'label': '"""Inherited templates"""', 'choices': 'choices'}), "(required=False, label='Inherited templates',\n choices=choices)\n", (11253, 11319), False, 'from django import forms\n'), ((19227, 19399), 'django.forms.MultipleChoiceField', 'forms.MultipleChoiceField', ([], {'required': '(False)', 'label': '"""Included node types"""', 'choices': 'choices', 'help_text': '"""If no node types are chosen for include, all types will match."""'}), "(required=False, label='Included node types',\n choices=choices, help_text=\n 'If no node types are chosen for include, all types will match.')\n", (19252, 19399), False, 'from django import forms\n'), ((19502, 19593), 'django.forms.MultipleChoiceField', 'forms.MultipleChoiceField', ([], {'required': '(False)', 'label': '"""Excluded node types"""', 'choices': 'choices'}), "(required=False, label='Excluded node types',\n choices=choices)\n", (19527, 19593), False, 'from django import forms\n'), ((20387, 20559), 'django.forms.MultipleChoiceField', 'forms.MultipleChoiceField', ([], {'required': '(False)', 'label': '"""Included node types"""', 'choices': 'choices', 'help_text': '"""If no node types are chosen for include, all types will match."""'}), "(required=False, label='Included node types',\n choices=choices, help_text=\n 'If no node types are chosen for include, all types will match.')\n", (20412, 20559), False, 'from django import forms\n'), ((20662, 20753), 'django.forms.MultipleChoiceField', 'forms.MultipleChoiceField', ([], {'required': '(False)', 'label': '"""Excluded node types"""', 'choices': 'choices'}), "(required=False, label='Excluded node types',\n choices=choices)\n", (20687, 20753), False, 'from django import forms\n'), ((21830, 21886), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Password key"""', 'choices': 'keylist'}), "(label='Password key', choices=keylist)\n", (21847, 21886), False, 'from django import forms\n'), ((23042, 23099), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Template"""', 'choices': 'templatelist'}), "(label='Template', choices=templatelist)\n", (23059, 23099), False, 'from django import forms\n'), ((28964, 29024), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Select template"""', 'choices': 'tmpllist'}), "(label='Select template', choices=tmpllist)\n", (28981, 29024), False, 'from django import forms\n'), ((37595, 37698), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Network Tree"""', 'choices': 'nt_choices', 'help_text': '"""Network tree for address."""'}), "(label='Network Tree', choices=nt_choices, help_text=\n 'Network tree for address.')\n", (37612, 37698), False, 'from django import forms\n'), ((37831, 37955), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""IP-Address"""', 'help_text': '"""Valid forms: host: "a.b.c.d", cidr subnet: "a.b.c.d/nn\\""""'}), '(max_length=50, label=\'IP-Address\', help_text=\n \'Valid forms: host: "a.b.c.d", cidr subnet: "a.b.c.d/nn"\')\n', (37846, 37955), False, 'from django import forms\n'), ((38045, 38123), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Description (optional)"""', 'required': '(False)'}), "(max_length=50, label='Description (optional)', required=False)\n", (38060, 38123), False, 'from django import forms\n'), ((41613, 41669), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Password key"""', 'choices': 'keylist'}), "(label='Password key', choices=keylist)\n", (41630, 41669), False, 'from django import forms\n'), ((44812, 44867), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Config type"""', 'choices': 'choices'}), "(label='Config type', choices=choices)\n", (44829, 44867), False, 'from django import forms\n'), ((45517, 45620), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""Network Tree"""', 'choices': 'nt_choices', 'help_text': '"""Network tree for address."""'}), "(label='Network Tree', choices=nt_choices, help_text=\n 'Network tree for address.')\n", (45534, 45620), False, 'from django import forms\n'), ((45752, 45879), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Range Start"""', 'help_text': '"""Enter the start address of the range used for assignment\\""""'}), '(max_length=50, label=\'Range Start\', help_text=\n \'Enter the start address of the range used for assignment"\')\n', (45767, 45879), False, 'from django import forms\n'), ((45958, 46081), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(50)', 'label': '"""Range End"""', 'help_text': '"""Enter the end address of the range used for assignment\\""""'}), '(max_length=50, label=\'Range End\', help_text=\n \'Enter the end address of the range used for assignment"\')\n', (45973, 46081), False, 'from django import forms\n'), ((46987, 47091), 'django.forms.MultipleChoiceField', 'forms.MultipleChoiceField', ([], {'required': '(False)', 'label': '"""Users"""', 'choices': 'users', 'help_text': '"""Included users."""'}), "(required=False, label='Users', choices=users,\n help_text='Included users.')\n", (47012, 47091), False, 'from django import forms\n'), ((47197, 47304), 'django.forms.MultipleChoiceField', 'forms.MultipleChoiceField', ([], {'required': '(False)', 'label': '"""Groups"""', 'choices': 'groups', 'help_text': '"""Included groups."""'}), "(required=False, label='Groups', choices=groups,\n help_text='Included groups.')\n", (47222, 47304), False, 'from django import forms\n'), ((47725, 47829), 'django.forms.MultipleChoiceField', 'forms.MultipleChoiceField', ([], {'required': '(False)', 'label': '"""Users"""', 'choices': 'users', 'help_text': '"""Included users."""'}), "(required=False, label='Users', choices=users,\n help_text='Included users.')\n", (47750, 47829), False, 'from django import forms\n'), ((48266, 48370), 'django.forms.MultipleChoiceField', 'forms.MultipleChoiceField', ([], {'required': '(False)', 'label': '"""Users"""', 'choices': 'users', 'help_text': '"""Included users."""'}), "(required=False, label='Users', choices=users,\n help_text='Included users.')\n", (48291, 48370), False, 'from django import forms\n'), ((276, 297), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (295, 297), False, 'from django import forms\n'), ((427, 446), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (444, 446), False, 'from django import forms\n'), ((554, 573), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (571, 573), False, 'from django import forms\n'), ((1169, 1211), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'id': 'searchbox'}"}), "(attrs={'id': 'searchbox'})\n", (1184, 1211), False, 'from django import forms\n'), ((2989, 3010), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (3008, 3010), False, 'from django import forms\n'), ((3140, 3161), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (3159, 3161), False, 'from django import forms\n'), ((3914, 3963), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'cols': '30', 'rows': '5'}"}), "(attrs={'cols': '30', 'rows': '5'})\n", (3928, 3963), False, 'from django import forms\n'), ((4767, 4816), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'cols': '30', 'rows': '5'}"}), "(attrs={'cols': '30', 'rows': '5'})\n", (4781, 4816), False, 'from django import forms\n'), ((5213, 5234), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (5232, 5234), False, 'from django import forms\n'), ((5444, 5465), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (5463, 5465), False, 'from django import forms\n'), ((6414, 6435), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (6433, 6435), False, 'from django import forms\n'), ((6633, 6654), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (6652, 6654), False, 'from django import forms\n'), ((25032, 25078), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'Name'}"}), "(attrs={'placeholder': 'Name'})\n", (25047, 25078), False, 'from django import forms\n'), ((25267, 25314), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'Value'}"}), "(attrs={'placeholder': 'Value'})\n", (25282, 25314), False, 'from django import forms\n'), ((25468, 25487), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (25485, 25487), False, 'from django import forms\n'), ((27005, 27024), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (27022, 27024), False, 'from django import forms\n'), ((27684, 27703), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (27701, 27703), False, 'from django import forms\n'), ((38683, 38704), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (38702, 38704), False, 'from django import forms\n'), ((38820, 38841), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (38839, 38841), False, 'from django import forms\n'), ((39781, 39802), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (39800, 39802), False, 'from django import forms\n'), ((40066, 40087), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (40085, 40087), False, 'from django import forms\n'), ((40243, 40264), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (40262, 40264), False, 'from django import forms\n'), ((40385, 40406), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (40404, 40406), False, 'from django import forms\n'), ((40523, 40544), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (40542, 40544), False, 'from django import forms\n'), ((40826, 40847), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (40845, 40847), False, 'from django import forms\n'), ((45048, 45067), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (45065, 45067), False, 'from django import forms\n'), ((46360, 46379), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (46377, 46379), False, 'from django import forms\n'), ((49325, 49375), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'cols': '80', 'rows': '50'}"}), "(attrs={'cols': '80', 'rows': '50'})\n", (49339, 49375), False, 'from django import forms\n'), ((49619, 49669), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'cols': '80', 'rows': '50'}"}), "(attrs={'cols': '80', 'rows': '50'})\n", (49633, 49669), False, 'from django import forms\n'), ((49874, 49895), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (49893, 49895), False, 'from django import forms\n'), ((50816, 50867), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'cols': '100', 'rows': '15'}"}), "(attrs={'cols': '100', 'rows': '15'})\n", (50830, 50867), False, 'from django import forms\n'), ((52156, 52207), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'cols': '100', 'rows': '15'}"}), "(attrs={'cols': '100', 'rows': '15'})\n", (52170, 52207), False, 'from django import forms\n'), ((52608, 52659), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'cols': '100', 'rows': '15'}"}), "(attrs={'cols': '100', 'rows': '15'})\n", (52622, 52659), False, 'from django import forms\n'), ((52898, 52949), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'cols': '100', 'rows': '15'}"}), "(attrs={'cols': '100', 'rows': '15'})\n", (52912, 52949), False, 'from django import forms\n'), ((24804, 24855), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'cols': '100', 'rows': '20'}"}), "(attrs={'cols': '100', 'rows': '20'})\n", (24818, 24855), False, 'from django import forms\n'), ((30266, 30379), 'django.forms.RegexField', 'forms.RegexField', ([], {'max_length': '(50)', 'label': 'rule.attr_name', 'regex': 'rule.regexp', 'required': '(False)', 'help_text': 'help_text'}), '(max_length=50, label=rule.attr_name, regex=rule.regexp,\n required=False, help_text=help_text)\n', (30282, 30379), False, 'from django import forms\n'), ((35954, 35973), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (35971, 35973), False, 'from django import forms\n'), ((35998, 36058), 'django.forms.CharField', 'forms.CharField', ([], {'label': 'rule.name', 'widget': 'widget', 'initial': '""" """'}), "(label=rule.name, widget=widget, initial=' ')\n", (36013, 36058), False, 'from django import forms\n'), ((41986, 42007), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {}), '()\n', (42005, 42007), False, 'from django import forms\n'), ((36216, 36346), 'django.forms.RegexField', 'forms.RegexField', ([], {'max_length': '(50)', 'label': 'rule.name', 'regex': 'rule.value', 'required': '(False)', 'help_text': '(\'Must match: "%s"\' % rule.value)'}), '(max_length=50, label=rule.name, regex=rule.value, required\n =False, help_text=\'Must match: "%s"\' % rule.value)\n', (36232, 36346), False, 'from django import forms\n'), ((32000, 32019), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (32017, 32019), False, 'from django import forms\n'), ((32687, 32734), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'readonly': 'readonly'}"}), "(attrs={'readonly': 'readonly'})\n", (32702, 32734), False, 'from django import forms\n'), ((33028, 33047), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (33045, 33047), False, 'from django import forms\n'), ((33367, 33414), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'readonly': 'readonly'}"}), "(attrs={'readonly': 'readonly'})\n", (33382, 33414), False, 'from django import forms\n'), ((33788, 33807), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (33805, 33807), False, 'from django import forms\n'), ((34111, 34130), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (34128, 34130), False, 'from django import forms\n'), ((34469, 34516), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'readonly': 'readonly'}"}), "(attrs={'readonly': 'readonly'})\n", (34484, 34516), False, 'from django import forms\n'), ((34751, 34770), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (34768, 34770), False, 'from django import forms\n')] |
"""
Author(s):
<NAME> (<EMAIL>)
Date: 02/21/2020
Description:
This action will return several lines, with each line being a JSON
representation of the user
"""
from azure_utility_tool.utils import paginate
from azure_utility_tool.graph_endpoints import USER_GET_ENDPOINT
from azure_utility_tool.test_cases import TestCases
from azure_utility_tool.transformers import expand_onPremisesExtensionAttributes
def list_all_users(parsed_args, config, app):
"""
This action returns a dictionary of all the users indexed by
userPrincipalName
"""
user_data = []
paginate(
USER_GET_ENDPOINT,
user_data,
'value',
parsed_args,
config,
app,
test_data=TestCases().get_test_user_graph_data(),
std_output=False,
transformer=expand_onPremisesExtensionAttributes)
# Return a dictionary of all the users in the tenant, indexed by
# userPrincipalName
users = {}
for user in user_data:
users[user["userPrincipalName"]] = user
return users
| [
"azure_utility_tool.test_cases.TestCases"
] | [((758, 769), 'azure_utility_tool.test_cases.TestCases', 'TestCases', ([], {}), '()\n', (767, 769), False, 'from azure_utility_tool.test_cases import TestCases\n')] |
import torch
import torch.nn as nn
from oncopolicy.models.factory import RegisterModel
import pdb
class AbstractDeterministicGuideline(nn.Module):
def __init__(self, args):
super(AbstractDeterministicGuideline, self).__init__()
self.args = args
self.max_steps = args.max_steps
def get_logprob(self, z):
z = z.unsqueeze(1)
return torch.log(torch.cat([1-z, z], dim =1))
def get_prob(self, z):
return z
@RegisterModel("last_observed_risk")
class LastObservedRisk(AbstractDeterministicGuideline):
'''
Deterministic risk progression model. Predict
observed risk doesnt change from last observation
'''
def __init__(self, args):
super(LastObservedRisk, self).__init__(args)
self.max_pool = nn.MaxPool1d(kernel_size=self.max_steps, stride=1)
def forward(self, x, batch):
'''
Forward func used in training/eval risk progression model.
args:
- x: tensor of shape [B, self.max_steps, args.risk_dimension], with 0s for unobserved
- batch: full batch obj, contains 'oberved tensor'
returns:
- z: tensor of shape [B, self.max_steps, args.risk_dimension], with last observed risk for each dim
'''
B, _, D = x.size()
obsereved_key = 'observed' if 'observed' in batch else 'progression_observed'
obs = batch[obsereved_key] # shape [B, self.max_steps]
indicies = torch.arange(start=0, end=self.max_steps).unsqueeze(0).expand([B,self.max_steps]).to(self.args.device)
obs_indicies = (obs.float() * indicies.float()).unsqueeze(1)
obs_indicies_w_pad = torch.cat([torch.zeros([B, 1, self.max_steps]).to(self.args.device), obs_indicies[:,:,:-1]], dim=-1)
indices_of_most_recent = self.max_pool(obs_indicies_w_pad).long().transpose(1,2).expand(B, self.max_steps, D)
z = torch.gather(x, dim=1, index=indices_of_most_recent)
return z, None
@RegisterModel("static_risk")
class StaticRisk(AbstractDeterministicGuideline):
'''
Deterministic risk progression model. Predict
observed risk doesnt change from first observation. Assume static
'''
def __init__(self, args):
super(StaticRisk, self).__init__(args)
def forward(self, x, batch):
'''
Forward func used in training/eval risk progression model.
args:
- x: tensor of shape [B, self.max_steps, args.risk_dimension], with 0s for unobserved
- batch: full batch obj, contains 'oberved tensor'
returns:
- z: tensor of shape [B, self.max_steps, args.risk_dimension], with last observed risk for each dim
'''
z = x[:,0,:].unsqueeze(1).expand_as(x).contiguous()
return z, None
@RegisterModel("random")
class Random(AbstractDeterministicGuideline):
'''
Predict rand risk at each timestep.
'''
def __init__(self, args):
super(Random, self).__init__(args)
def forward(self, x, batch):
'''
Forward func used in training/eval risk progression model.
args:
- x: tensor of shape [B, self.max_steps, args.risk_dimension], with 0s for unobserved
- batch: full batch obj, contains 'oberved tensor'
returns:
- z: tensor of shape [B, MAX_STEPS, args.risk_dimension], with last observed risk for each dim
'''
z = torch.sigmoid( torch.randn_like(x).to(x.device))
return z, None
| [
"torch.nn.MaxPool1d",
"oncopolicy.models.factory.RegisterModel",
"torch.randn_like",
"torch.arange",
"torch.zeros",
"torch.gather",
"torch.cat"
] | [((467, 502), 'oncopolicy.models.factory.RegisterModel', 'RegisterModel', (['"""last_observed_risk"""'], {}), "('last_observed_risk')\n", (480, 502), False, 'from oncopolicy.models.factory import RegisterModel\n'), ((1994, 2022), 'oncopolicy.models.factory.RegisterModel', 'RegisterModel', (['"""static_risk"""'], {}), "('static_risk')\n", (2007, 2022), False, 'from oncopolicy.models.factory import RegisterModel\n'), ((2821, 2844), 'oncopolicy.models.factory.RegisterModel', 'RegisterModel', (['"""random"""'], {}), "('random')\n", (2834, 2844), False, 'from oncopolicy.models.factory import RegisterModel\n'), ((794, 844), 'torch.nn.MaxPool1d', 'nn.MaxPool1d', ([], {'kernel_size': 'self.max_steps', 'stride': '(1)'}), '(kernel_size=self.max_steps, stride=1)\n', (806, 844), True, 'import torch.nn as nn\n'), ((1916, 1968), 'torch.gather', 'torch.gather', (['x'], {'dim': '(1)', 'index': 'indices_of_most_recent'}), '(x, dim=1, index=indices_of_most_recent)\n', (1928, 1968), False, 'import torch\n'), ((390, 418), 'torch.cat', 'torch.cat', (['[1 - z, z]'], {'dim': '(1)'}), '([1 - z, z], dim=1)\n', (399, 418), False, 'import torch\n'), ((3487, 3506), 'torch.randn_like', 'torch.randn_like', (['x'], {}), '(x)\n', (3503, 3506), False, 'import torch\n'), ((1695, 1730), 'torch.zeros', 'torch.zeros', (['[B, 1, self.max_steps]'], {}), '([B, 1, self.max_steps])\n', (1706, 1730), False, 'import torch\n'), ((1482, 1523), 'torch.arange', 'torch.arange', ([], {'start': '(0)', 'end': 'self.max_steps'}), '(start=0, end=self.max_steps)\n', (1494, 1523), False, 'import torch\n')] |
"""
Base settings to build other settings files upon.
"""
import environ
ROOT_DIR = environ.Path(__file__) - 3 # (histonets/config/settings/base.py - 3 = histonets/)
APPS_DIR = ROOT_DIR.path('histonets')
env = environ.Env()
READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False)
if READ_DOT_ENV_FILE:
# OS environment variables take precedence over variables from .env
env.read_env(str(ROOT_DIR.path('.env')))
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool('DJANGO_DEBUG', False)
# Local time zone. Choices are
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# though not all of them may be available with every OS.
# In Windows, this must be set to your system time zone.
TIME_ZONE = 'UTC'
# https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# DATABASES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': env.db('DATABASE_URL'),
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# URLS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = 'config.urls'
# https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# APPS
# ------------------------------------------------------------------------------
DJANGO_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# 'django.contrib.humanize', # Handy template tags
'django.contrib.admin',
]
THIRD_PARTY_APPS = [
'crispy_forms',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.box',
'webpack_loader',
'health_check',
'health_check.db',
# 'health_check.cache',
# 'health_check.storage',
# 'health_check.contrib.celery', # requires celery
# 'health_check.contrib.psutil', # disk and memory utilization; requires psutil
# 'health_check.contrib.s3boto_storage', # requires boto and S3BotoStorage backend
]
LOCAL_APPS = [
'histonets.users.apps.UsersConfig',
'histonets.apps.HistonetsConfig',
'histonets.collections.apps.CollectionsConfig',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIGRATIONS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#migration-modules
MIGRATION_MODULES = {
'sites': 'histonets.contrib.sites.migrations'
}
# AUTHENTICATION
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#authentication-backends
AUTHENTICATION_BACKENDS = [
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-user-model
AUTH_USER_MODEL = 'users.User'
# https://docs.djangoproject.com/en/dev/ref/settings/#login-redirect-url
LOGIN_REDIRECT_URL = 'users:redirect'
# https://docs.djangoproject.com/en/dev/ref/settings/#login-url
LOGIN_URL = 'account_login'
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = [
# https://docs.djangoproject.com/en/dev/topics/auth/passwords/#using-argon2-with-django
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# MIDDLEWARE
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#middleware
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
# STATIC
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('static'))
# https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = [
str(APPS_DIR.path('static')),
str(ROOT_DIR('assets'))
]
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
# webpack loader
WEBPACK_LOADER = {
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/',
'STATS_FILE': str(ROOT_DIR('webpack-stats.json')),
}
}
# MEDIA
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(ROOT_DIR('media'))
# https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# IIIF
# ------------------------------------------------------------------------------
IIIF_DIR = 'iiif' # Relative to the default storage, e.g., /media/iiif
IIIF_CANONICAL_URI_PATTERN = "{}/iiif/2/{{}}/full/max/0/default.jpg"
IIIF_CANONICAL_URI = IIIF_CANONICAL_URI_PATTERN.format(env('CANTALOUPE_SERVER', default='http://localhost'))
IIIF_CANONICAL_CONTAINER_URI = None
if env('CANTALOUPE_CONTAINER_SERVER', default=False):
IIIF_CANONICAL_CONTAINER_URI = IIIF_CANONICAL_URI_PATTERN.format(
env('CANTALOUPE_CONTAINER_SERVER', default='http://localhost')
)
IIIF_IMAGE_FORMATS = ["jpg", "jpeg", "tif", "tiff", "gif", "png"]
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap4'
# FIXTURES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#fixture-dirs
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# ADMIN
# ------------------------------------------------------------------------------
# Django Admin URL regex.
ADMIN_URL = r'^admin/'
# https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = [
("""Center for Interdisciplinary Digital Research (CIDR)""", '<EMAIL>'),
]
# https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# Celery
# ------------------------------------------------------------------------------
INSTALLED_APPS += ['histonets.taskapp.celery.CeleryConfig']
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-broker_url
CELERY_BROKER_URL = env('CELERY_BROKER_URL', default='django://')
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_backend
if CELERY_BROKER_URL == 'django://':
CELERY_RESULT_BACKEND = 'redis://'
else:
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-accept_content
CELERY_ACCEPT_CONTENT = ['json']
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-task_serializer
CELERY_TASK_SERIALIZER = 'json'
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_serializer
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ALWAYS_EAGER = False # set to True for emulation
CELERYD_TASK_TERMINATES_WORKER = True # custom option
CELERYD_MAX_TASKS_PER_CHILD = 1
# django-allauth
# ------------------------------------------------------------------------------
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_ALLOW_REGISTRATION = env.bool('DJANGO_ACCOUNT_ALLOW_REGISTRATION', True)
ACCOUNT_AUTHENTICATION_METHOD = 'username_email'
ACCOUNT_EMAIL_REQUIRED = env.bool('DJANGO_ACCOUNT_EMAIL_REQUIRED', default=True)
ACCOUNT_EMAIL_VERIFICATION = env.bool('DJANGO_ACCOUNT_EMAIL_VERIFICATION', default="mandatory")
ACCOUNT_ADAPTER = 'histonets.users.adapters.AccountAdapter'
SOCIALACCOUNT_ADAPTER = 'histonets.users.adapters.SocialAccountAdapter'
# django-compressor
# ------------------------------------------------------------------------------
# https://django-compressor.readthedocs.io/en/latest/quickstart/#installation
INSTALLED_APPS += ['compressor']
STATICFILES_FINDERS += ['compressor.finders.CompressorFinder']
COMPRESS_PRECOMPILERS = (
('text/x-sass', 'sass {infile} {outfile}'),
)
# Histonets
# ------------------------------------------------------------------------------
| [
"environ.Path",
"environ.Env"
] | [((214, 227), 'environ.Env', 'environ.Env', ([], {}), '()\n', (225, 227), False, 'import environ\n'), ((86, 108), 'environ.Path', 'environ.Path', (['__file__'], {}), '(__file__)\n', (98, 108), False, 'import environ\n')] |
#
# voice-skill-sdk
#
# (C) 2020, Deutsche Telekom AG
#
# Deutsche Telekom AG and all other contributors /
# copyright owners license this file to you under the MIT
# License (the "License"); you may not use this file
# except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/MIT
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from pathlib import Path
from bottle import get, static_file, redirect
here: Path = Path(__file__).absolute().parent
UI_ROOT = here / 'node_modules/swagger-ui-dist'
@get('/')
def root():
return redirect('/swagger-ui/')
@get('/swagger-ui/')
@get('/swagger-ui/<filename:path>')
def send_static(filename=None):
return static_file(filename or 'index.html', root=UI_ROOT)
| [
"bottle.redirect",
"bottle.static_file",
"bottle.get",
"pathlib.Path"
] | [((835, 843), 'bottle.get', 'get', (['"""/"""'], {}), "('/')\n", (838, 843), False, 'from bottle import get, static_file, redirect\n'), ((895, 914), 'bottle.get', 'get', (['"""/swagger-ui/"""'], {}), "('/swagger-ui/')\n", (898, 914), False, 'from bottle import get, static_file, redirect\n'), ((916, 950), 'bottle.get', 'get', (['"""/swagger-ui/<filename:path>"""'], {}), "('/swagger-ui/<filename:path>')\n", (919, 950), False, 'from bottle import get, static_file, redirect\n'), ((867, 891), 'bottle.redirect', 'redirect', (['"""/swagger-ui/"""'], {}), "('/swagger-ui/')\n", (875, 891), False, 'from bottle import get, static_file, redirect\n'), ((994, 1045), 'bottle.static_file', 'static_file', (["(filename or 'index.html')"], {'root': 'UI_ROOT'}), "(filename or 'index.html', root=UI_ROOT)\n", (1005, 1045), False, 'from bottle import get, static_file, redirect\n'), ((750, 764), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (754, 764), False, 'from pathlib import Path\n')] |
# Generated by Django 2.1.7 on 2019-05-24 19:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("courses", "0008_enrollment_company")]
operations = [
migrations.AddField(
model_name="courserunenrollment",
name="active",
field=models.BooleanField(
default=True,
help_text="Indicates whether or not this enrollment should be considered active",
),
),
migrations.AddField(
model_name="courserunenrollment",
name="edx_enrolled",
field=models.BooleanField(
default=False,
help_text="Indicates whether or not the request succeeded to enroll via the edX API",
),
),
migrations.AddField(
model_name="programenrollment",
name="active",
field=models.BooleanField(
default=True,
help_text="Indicates whether or not this enrollment should be considered active",
),
),
]
| [
"django.db.models.BooleanField"
] | [((332, 452), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'help_text': '"""Indicates whether or not this enrollment should be considered active"""'}), "(default=True, help_text=\n 'Indicates whether or not this enrollment should be considered active')\n", (351, 452), False, 'from django.db import migrations, models\n'), ((633, 758), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""Indicates whether or not the request succeeded to enroll via the edX API"""'}), "(default=False, help_text=\n 'Indicates whether or not the request succeeded to enroll via the edX API')\n", (652, 758), False, 'from django.db import migrations, models\n'), ((931, 1051), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'help_text': '"""Indicates whether or not this enrollment should be considered active"""'}), "(default=True, help_text=\n 'Indicates whether or not this enrollment should be considered active')\n", (950, 1051), False, 'from django.db import migrations, models\n')] |
from flask import Blueprint, current_app, jsonify
from seventweets.exceptions import error_handler
from seventweets import tweet
base = Blueprint('base', __name__)
@base.route('/')
@error_handler
def index():
original = tweet.count('original')
retweets = tweet.count('retweet')
return jsonify({
'name': current_app.config['ST_OWN_NAME'],
'address': current_app.config['ST_OWN_ADDRESS'],
'stats': {
'original': original,
'retweets': retweets,
'total': original + retweets,
}
})
| [
"flask.jsonify",
"flask.Blueprint",
"seventweets.tweet.count"
] | [((137, 164), 'flask.Blueprint', 'Blueprint', (['"""base"""', '__name__'], {}), "('base', __name__)\n", (146, 164), False, 'from flask import Blueprint, current_app, jsonify\n'), ((227, 250), 'seventweets.tweet.count', 'tweet.count', (['"""original"""'], {}), "('original')\n", (238, 250), False, 'from seventweets import tweet\n'), ((266, 288), 'seventweets.tweet.count', 'tweet.count', (['"""retweet"""'], {}), "('retweet')\n", (277, 288), False, 'from seventweets import tweet\n'), ((300, 495), 'flask.jsonify', 'jsonify', (["{'name': current_app.config['ST_OWN_NAME'], 'address': current_app.config[\n 'ST_OWN_ADDRESS'], 'stats': {'original': original, 'retweets': retweets,\n 'total': original + retweets}}"], {}), "({'name': current_app.config['ST_OWN_NAME'], 'address': current_app.\n config['ST_OWN_ADDRESS'], 'stats': {'original': original, 'retweets':\n retweets, 'total': original + retweets}})\n", (307, 495), False, 'from flask import Blueprint, current_app, jsonify\n')] |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from datetime import datetime
# import rasahub_google_calendar
from time import gmtime, time, strftime
import json
import locale
import logging
import math
import mysql.connector
from mysql.connector import errorcode
import os
import string
import random
import re
import yaml
from nltk.stem.snowball import SnowballStemmer
import httplib2
stemmer = SnowballStemmer("german")
logger = logging.getLogger(__name__)
offlinemode = False
locale.setlocale(locale.LC_ALL, "de_DE.utf8")
class NotAuthenticatedError(Exception):
"""
Class NotAuthenticatedError is thrown everytime a google user is not
authenticated properly.
"""
def __init__(self):
"""
Exception initialization, sets error message.
"""
self.msg = "Not Authenticated"
def __str__(self):
"""
to-String method
:return: Error message
:rtype: str
"""
return self.msg
def connectToDB(dbHost, dbName, dbPort, dbUser, dbPwd):
"""
Establishes connection to the database
:param dbHost: database host address
:type state: str.
:param dbName: database name
:type state: str.
:param dbPort: database host port
:type state: int.
:param dbUser: database username
:type name: str.
:param dbPwd: <PASSWORD>
:type state: str.
:return: Instance of class MySQLConnection
:rtype: MySQLConnection
"""
try:
cnx = mysql.connector.connect(user=dbUser, port=int(dbPort), password=dbPwd, host=dbHost, database=dbName, autocommit=True)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err)
else:
return cnx
def getBotID(cursor):
"""
Gets a suitable Bot User ID from a Humhub User Group called 'Bots'
:return: Bots Humhub User ID
:rtype: int
"""
query = "SELECT `user_id` FROM `group` JOIN `group_user` ON `group`.`id` = `group_user`.`group_id` WHERE `group`.`name` = 'Bots' ORDER BY user_id DESC LIMIT 1;"
cursor.execute(query)
return cursor.fetchone()[0]
def getNextID(cursor, current_id, bot_id, trigger):
"""
Gets the next message ID from Humhub
:return: Next message ID to process
:rtype: int
"""
query = ("SELECT id FROM message_entry WHERE user_id <> %(bot_id)s AND (content LIKE %(trigger)s OR message_entry.message_id IN "
"(SELECT DISTINCT message_entry.message_id FROM message_entry JOIN user_message "
"ON message_entry.message_id=user_message.message_id WHERE user_message.user_id = 5 ORDER BY message_entry.message_id)) "
"AND id > %(current_id)s ORDER BY id ASC")
data = {
'bot_id': bot_id,
'trigger': trigger + '%', # wildcard for SQL
'current_id': current_id,
}
row_count = cursor.execute(query, data)
results = cursor.fetchall()
if len(results) > 0: # fetchall returns list of results, each as a tuple
return results[0][0]
else:
return current_id
def getMessage(cursor, msg_id, trigger):
"""
Gets the newest message
:returns: Containing the message itself as string and the conversation ID
:rtype: dict
"""
query = "SELECT message_id, content FROM message_entry WHERE (user_id <> 5 AND id = {})".format(msg_id)
cursor.execute(query)
result = cursor.fetchone()
message_id = result[0]
if result[1][:len(trigger)] == trigger:
message = result[1][len(trigger):].strip()
else:
message = result[1].strip()
messagedata = {
'message': message,
'message_id': message_id
}
return messagedata
def create_new_conversation(cursor, title, message, user_id, bot_id):
"""
Creates new conversation in Humhub.
:param cursor: MySQL Cursor for database processes
:param str title: Title of conversation
:param str message: First message of created conversation
:param int user_id: Humhub User ID to create conversation with
:param int bot_id: User ID to use for the bot
"""
query = "INSERT INTO message (title, created_by, updated_by) VALUES ({}, {}, {})".format(title, bot_id, bot_id)
cursor.execute(query)
message_id = cursor.lastrowid
query = "INSERT INTO user_message (message_id, user_id, created_by, updated_by) VALUES ({}, {}, {}, {})".format(message_id, user_id, bot_id, bot_id)
cursor.execute(query)
query = "INSERT INTO message_entry (message_id, user_id, content, created_by, updated_by) VALUES ({}, {}, {}, {})".format(message_id, user_id, message, bot_id, bot_id)
cursor.execute(query)
def check_google_access(message_id, cursor, bot_id):
"""
Checks google calendar access for humhub user IDs
:param int message_id: ID of message
:param cursor: MySQL Cursor for database processes
:param bot_id: Humhub User ID of bot to exclude from calendar
"""
users = getUsersInConversation(cursor, message_id, bot_id)
calendars = []
return_case = True
for userID in users:
try:
calendar = get_google_calendar_items(userID)
except:
send_auth(cursor, userID, bot_id)
return []
return return_case
def getCurrentID(cursor):
"""
Gets the current max message ID from Humhub
:return: Current max message ID
:rtype: int
"""
query = "SELECT MAX(id) FROM message_entry;"
cursor.execute(query)
return cursor.fetchone()[0]
def send_auth_link(cursor, user_id, bot_id):
"""
Sends Google auth URL to not-authentificated users
:param cursor: MySQL Cursor for database processes
:param user_id: Humhub User ID to send URL to
:param bot_id: Humhub User ID of bot to exclude from calendar
"""
title = "Bitte authentifizieren Sie sich"
message = "http://localhost:8080/" + str(user_id)
create_new_conversation(cursor, title, message, user_id, bot_id)
def getUsersInConversation(cursor, sender_id, bot_id):
"""
Returns a list of Humhub User IDs participating in the conversation using
the sender ID
:param cursor: Mysql Cursor
:type cusor: mysql.connector.cursor.MySQLCursor
:param sender_id: Humhub conversation sender ID
:type sender_id: int
:param bot_id: Bot Humhub User ID
:type bot_id: int
:return: List of users in conversation
:rtype: list
"""
query = ("""SELECT user_id FROM user_message WHERE message_id = {}
""").format(sender_id)
cursor.execute(query)
users = []
for user_id in cursor:
if user_id != bot_id:
users.append(user_id[0])
return users
def getCalendar(user_id, date, cursor):
"""
Gets calendar pattern of a given Humhub User ID
:param user_id: Humhub user ID to get the calendar information from
:type user_id: int
:param date: Specific date to get the calendar information
:type date: datetime
:param cursor: Mysql Cursor
:type cusor: mysql.connector.cursor.MySQLCursor
:return: Calendar pattern with set busy dates of user_id
:rtype: dict
"""
# create calendar pattern
calendarPattern = createCalendarPattern()
# get busy appointments
startdate = date.strftime("%Y-%m-%d 00:00:00")
enddate = date.strftime("%Y-%m-%d 23:59:59")
startdate = "'" + startdate + "'"
enddate = "'" + enddate + "'"
#query = ("""SELECT start_datetime, end_datetime FROM calendar_entry
# INNER JOIN calendar_entry_participant ON
# calendar_entry.id =
# calendar_entry_participant.calendar_entry_id
# WHERE calendar_entry_participant.user_id = {} AND
# calendar_entry_participant.participation_state = 3 AND
# calendar_entry.start_datetime BETWEEN {} AND {}
# """).format(user_id, startdate, enddate)
#cursor.execute(query)
try:
dates = get_google_calendar_items(user_id)
except:
# not authenticated
bot_id = getBotID(cursor)
send_auth_link(cursor, user_id, bot_id)
raise NotAuthenticatedError
#for (start_datetime, end_datetime) in cursor:
# busydates.append([start_datetime, end_datetime])
#cnx.close()
return setBusyDates(calendarPattern, dates)
def setBusyDates(calendarPattern, dates):
"""
Sets busy dates in a given calendar pattern using calendar information
:param calendarPattern: Blank calendar pattern
:type calendarPattern: array
:param cursor: Array containing start and end datetimes of busy dates
:type cursor: array
:return: Calendarpattern with set busy dates
:rtype: dict
"""
# Google Edition
for appointment in dates:
start = dates[appointment]['start'] # format: 2018-05-24T17:00:00
end = dates[appointment]['end']
start_datetime = strptime(start, "%Y-%m-%dT%H:%M:%S")
end_datetime = strptime(end, "%Y-%m-%dT%H:%M:%S")
# convert minute to array index, round down as its starting time
startIndex = int(float(start_datetime.minute) / 15.)
# end minute index is round up
endIndex = int(math.ceil(float(end_datetime.minute) / 15.))
endAtZero = False
if endIndex == 0:
endAtZero = True
else:
endIndex -= 1 # correct index for all cases except 0
# set all patterns to 0 between start and end indezes
for i in range(start_datetime.hour, end_datetime.hour + 1):
if start_datetime.hour == end_datetime.hour:
for j in range(startIndex, endIndex + 1):
calendarPattern[i][j] = 1
break
# three cases: i = start.hour, i = end.hour or between
if i == start_datetime.hour:
# only set to 0 beginning from startIndex to 3
for j in range(startIndex, 4):
calendarPattern[i][j] = 1
elif i == end_datetime.hour:
if endAtZero:
break
# only set to 0 beginning from 0 to endIndex
for j in range(endIndex + 1):
calendarPattern[i][j] = 1
else:
# set all to 0
for j in range(0, 4):
calendarPattern[i][j] = 1
# Humhub Edition
#for (start_datetime, end_datetime) in cursor:
# # convert minute to array index, round down as its starting time
# startIndex = int(float(start_datetime.minute) / 15.)
# # end minute index is round up
# endIndex = int(math.ceil(float(end_datetime.minute) / 15.))
# endAtZero = False
# if endIndex == 0:
# endAtZero = True
# else:
# endIndex -= 1 # correct index for all cases except 0
# # set all patterns to 0 between start and end indezes
# for i in range(start_datetime.hour, end_datetime.hour + 1):
# if start_datetime.hour == end_datetime.hour:
# for j in range(startIndex, endIndex + 1):
# calendarPattern[i][j] = 1
# break
# # three cases: i = start.hour, i = end.hour or between
# if i == start_datetime.hour:
# # only set to 0 beginning from startIndex to 3
# for j in range(startIndex, 4):
# calendarPattern[i][j] = 1
# elif i == end_datetime.hour:
# if endAtZero:
# break
# # only set to 0 beginning from 0 to endIndex
# for j in range(endIndex + 1):
# calendarPattern[i][j] = 1
# else:
# # set all to 0
# for j in range(0, 4):
# calendarPattern[i][j] = 1
return calendarPattern
def createCalendarPattern(datefrom=None, dateto=None):
"""
Creates blank calendar pattern for one day or for timeframe between
datefrom and dateto
:param datefrom: Start datetime for free timeframe
:type datefrom: datetime
:param dateto: End datetime for free timeframe
:type dateto: datetime
:return: Blank calendarpattern
:rtype: dict
"""
# matching against standardized calendar
calendarPattern = []
if datefrom and dateto:
# set to busy = 1 except given range
for i in range(24):
calendarPattern.append(i)
calendarPattern[i] = []
for j in range(4):
calendarPattern[i].append(j)
calendarPattern[i][j] = 1
# get time
timefrom = datetime.strptime(datefrom, '%Y-%m-%dT%H:%M:%S.000Z')
timeto = datetime.strptime(dateto, '%Y-%m-%dT%H:%M:%S.000Z')
# round minute to next or before quarter
startIndex = int(math.ceil(float(timefrom.minute) / 15.))
endIndex = int(float(timeto.minute) / 15.) - 1
# set timeframe to 0 = free
for i in range(timefrom.hour, timeto.hour + 1):
if i == timefrom.hour:
for j in range(startIndex, 4):
calendarPattern[i][j] = 0
elif i == timeto.hour:
for j in range(endIndex + 1):
calendarPattern[i][j] = 0
else:
for j in range(0, 4):
calendarPattern[i][j] = 0
else:
# set to free = 0 for hole day
for i in range(24):
calendarPattern.append(i)
calendarPattern[i] = []
for j in range(4):
calendarPattern[i].append(j)
calendarPattern[i][j] = 0
# pattern: [5][0] for 05:00, [5][1] for 05:15, [5][2] for 05:30,
# [5][3] for 05:45, [6][0] for 06:00 and so on
return calendarPattern
def matchCalendars(calendars):
"""
Matches calendars against each other
:param calendars: array containing all calendars to match
:type calendars: array
:return: Matched calendarpattern
:rtype: dict
"""
calendarPattern = []
for i in range(24):
calendarPattern.append(i)
calendarPattern[i] = []
for j in range(4):
calendarPattern[i].append(j)
calendarPattern[i][j] = 0
for calendar in calendars:
if calendar is not None:
for i in range(24):
for j in range(4):
if calendar[i][j] == 1:
calendarPattern[i][j] = 1
# available dates have value 1, busy dates 0
return calendarPattern
def getDateSuggestion(calendar,
duration,
timesSearched,
beginHour,
beginMinuteIndex,
endHour,
endHourIndex):
"""
Gets date suggestion from a filled calendar
:param calendar: The calendar to search for a free date
:type calendar: array (Calendarpattern)
:param duration: Needed duration of the free date to be searched in minutes
:type duration: int
:param timesSearched: Number of date suggestions to skip - when we already
searched two times we want to skip the first two
occurences
:type timesSearched: int
:param beginHour: Index of starting hour to be searched
:type beginHour: int
:param beginMinuteIndex: Index of starting quarter to be searched
(x times 15)
:type beginMinuteIndex: int
:param endHour: Index of ending hour to be searched
:type begiendHournHour: int
:param endMinuteIndex: Index of ending quarter to be searched (x times 15)
:type endMinuteIndex: int
:return: Hour and Minute of free appointment
:rtype: list
"""
if duration == 0 or duration is None:
duration = 15
# transfer duration to minute indezes
durationindezes = int(math.ceil(float(duration) / 15.))
if timesSearched is None:
timesSearched = 0
# take next date where all required (duration) indezes are 1
for i in range(beginHour, endHour):
if beginHour == i:
rangej = beginMinuteIndex
else:
rangej = 0
for j in range(rangej, 4):
if calendar[i][j] == 0:
founddate = True
# look for j + durationindezes values
for d in range(1, durationindezes):
n = j + d
if calendar[i + int(n / 4)][n % 4] == 1:
founddate = False
# set i and j to new values
# to skip already checked dates
i = i + int(n / 4)
j = n % 4
break
else:
continue
# hour = i, minute = j
if founddate:
if timesSearched == 0:
return [i, j * 15]
else:
# skip
i = i + int((durationindezes + j) / 4)
j = (durationindezes + j) % 4
timesSearched -= 1
return [timesSearched]
def suggestDate(
datefrom,
dateto,
duration,
users,
timesSearched,
beginHour,
beginMinuteIndex,
endHour,
endHourIndex,
cnx
):
"""
Gets calendars of users and calls the free date searching method
getDateSuggestion
:param datefrom: Starting datetime to be searched
:type datefrom: datetime
:param dateto: Ending datetime to be searched
:type dateto: datetime
:param duration: Needed duration of the free date to be searched in minutes
:type duration: int
:param users: List of Humhub User IDs to get claendars from
:type users: list
:param timesSearched: Number of date suggestions to skip - when we already
searched two times we want to skip the first two
occurences
:type timesSearched: int
:param beginHour: Index of starting hour to be searched
:type beginHour: int
:param beginMinuteIndex: Index of starting quarter to be searched
(x times 15)
:type beginMinuteIndex: int
:param endHour: Index of ending hour to be searched
:type begiendHournHour: int
:param endMinuteIndex: Index of ending quarter to be searched (x times 15)
:type endMinuteIndex: int
"""
calendarPattern = []
dtfrom = datetime.strptime(datefrom, '%Y-%m-%dT%H:%M:%S.000Z')
dtto = datetime.strptime(dateto, '%Y-%m-%dT%H:%M:%S.000Z')
while dtfrom < dtto:
calendarPattern = createCalendarPattern()
# get users calendars
calendars = []
auth = True
for user in users:
try:
calendar = getCalendar(user, dtfrom, cnx)
calendars.append(calendar)
except:
auth = False
if auth == False:
raise NotAuthenticatedError
# get free date
calendars.append(calendarPattern)
datesuggest = None
datesuggest = matchCalendars(calendars)
# gets hour and minute, needs to be combined with extracted date
suggestion = getDateSuggestion(
datesuggest,
duration,
timesSearched,
beginHour,
beginMinuteIndex,
endHour,
endHourIndex
)
if len(suggestion) == 1:
timesSearched = suggestion[0]
dtfrom = dtfrom + timedelta(days=1)
if len(suggestion) == 2:
return suggestion
return []
def getEndTime(datetime, duration):
"""
Gets end time of a free date suggestion using starting datetime and
duration
:param datetime: Starting datetime
:type datetime: datetime
:param duration: Duration in minutes
:type duration: int
:return: End datetime
:rtype: datetime
"""
# round duration minutes to next 15
duration = int(math.ceil(float(duration) / 15.)) * 15
durationhour = int(duration / 60)
durationminute = duration % 60
newEndHour = datetime.hour + durationhour
newEndMinute = durationminute + datetime.minute
while newEndMinute >= 60:
newEndHour += 1
newEndMinute = newEndMinute % 60
return datetime.replace(hour=newEndHour, minute=newEndMinute)
def getUserName(userID):
"""
Gets users firstname and lastname by user_id and returns as string
:return: Full username
:rtype: str
"""
firstname = ''
lastname = ''
global offlinemode
if offlinemode:
return "<NAME>"
# search in humhub db
cnx = establishDBConnection(dbconfig)
cursor = cnx.cursor()
query = ("""SELECT firstname, lastname FROM profile WHERE user_id = {}
""").format(userID)
cursor.execute(query)
username = ''
for (firstname, lastname) in cursor:
username = firstname + " " + lastname
cnx.close()
return username
def bookdate(cnx, datefrom, duration, users):
"""
Books appointment in Humhub database
"""
# create calendar entry, duration in minutes
cursor = cnx.cursor()
datetimeNow = "'" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "'"
if (datefrom.minute + duration) >= 60:
dateto = datefrom.replace(
hour=datefrom.hour + int((datefrom.minute + duration) / 60),
minute=int(datefrom.minute + duration) % 60
)
else:
dateto = datefrom.replace(minute=datefrom.minute + duration)
# create one calendar entry for each user
for user in users:
# get user names for description except own id
description = 'Termin mit '
for user2 in users:
if user is not user2:
description += getUserName(user2) + ", "
description = description[:-2]
# get container id
query = ("""SELECT `id` AS `cID` FROM `contentcontainer` WHERE
`class` = 'humhub\\\\modules\\\\user\\\\models\\\\User' AND
`pk` = %s AND `owner_user_id` = %s""")
data = (user, user)
cursor.execute(query, data)
for cID in cursor:
containerID = cID[0]
# create entry
query = (("""INSERT INTO calendar_entry(title, description,
start_datetime, end_datetime, all_day, participation_mode,
color, allow_decline, allow_maybe, time_zone,
participant_info, closed) VALUES
('Termin', '{}', {}, {}, 0, 2, '#59d6e4', 1, 1,
'Europe/Berlin', '', 0);""").format(
description,
str("'" + datefrom.strftime("%Y-%m-%d %H:%M:%S") + "'"),
str("'" + dateto.strftime("%Y-%m-%d %H:%M:%S") + "'")))
cursor.execute(query)
cnx.commit()
# get id of entry created
calendarEntryID = cursor.lastrowid
# insert activity
query = ("""INSERT INTO `activity`
(`class`, `module`, `object_model`, `object_id`)
VALUES (%s, 'content', %s, %s);""")
data = ('humhub\\modules\\content\\activities\\ContentCreated',
'humhub\\modules\\calendar\\models\\CalendarEntry',
calendarEntryID)
cursor.execute(query, data)
cnx.commit()
# insert participation
query = (("""INSERT INTO calendar_entry_participant
(calendar_entry_id, user_id, participation_state)
VALUES ({}, {}, 3);""").format(calendarEntryID, user))
cursor.execute(query)
cnx.commit()
query = ("""INSERT INTO `content`
(`guid`, `object_model`, `object_id`, `visibility`, `pinned`,
`archived`, `created_at`, `created_by`, `updated_at`,
`updated_by`, `contentcontainer_id`, `stream_sort_date`,
`stream_channel`) VALUES
(%s, %s, %s, 1, 0, '0', %s, 5, %s, 5, %s, %s, 'default');""")
data = (buildGUID(cnx),
'humhub\\modules\\calendar\\models\\CalendarEntry',
calendarEntryID,
datetimeNow,
datetimeNow,
containerID,
datetimeNow)
cursor.execute(query, data)
cnx.commit()
query = (("""INSERT INTO user_follow
(object_model, object_id, user_id, send_notifications)
VALUES (%s, %s, %s, 1);"""))
data = ('humhub\\modules\\calendar\\models\\CalendarEntry',
calendarEntryID,
user)
cursor.execute(query, data)
cnx.commit()
query = ("""INSERT INTO `activity`
(`class`, `module`, `object_model`, `object_id`)
VALUES (%s, 'calendar', %s, %s);""")
data = ('humhub\\modules\\calendar\\activities\\ResponseAttend',
'humhub\\modules\\calendar\\models\\CalendarEntry',
calendarEntryID)
cursor.execute(query, data)
cnx.commit()
activityID = cursor.lastrowid
query = ("""INSERT INTO `content`
(`guid`, `object_model`, `object_id`, `visibility`,
`pinned`, `archived`, `created_at`, `created_by`,
`updated_at`, `updated_by`, `contentcontainer_id`,
`stream_sort_date`, `stream_channel`) VALUES
(%s, %s, %s, 1, 0, '0', %s, 5, %s, 5, 5, %s,
'activity');""")
data = (buildGUID(cnx),
'humhub\\modules\\activity\\models\\Activity',
activityID,
datetimeNow,
datetimeNow,
datetimeNow)
cursor.execute(query, data)
cnx.commit()
return []
def buildGUID(cnx):
"""
Builds GUID needed for content table in Humhub db
"""
unique = 0
while (unique == 0):
match = None
while (match is None):
hexstr = str(
os.urandom(4).encode('hex') +
"-" + os.urandom(2).encode('hex') +
"-" + hex(random.randint(0, 0x0fff) | 0x4000)[2:] +
"-" + hex(random.randint(0, 0x3fff) | 0x8000)[2:] +
"-" + os.urandom(6).encode('hex')
)
match = re.search(
'[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}',
hexstr
)
hexstr = "'" + hexstr + "'"
# check if GUID is already used
cursor = cnx.cursor(buffered=True)
query = "SELECT id FROM `content` WHERE guid = {}".format(hexstr)
cursor.execute(query)
if cursor.rowcount == 0:
unique = 1
else:
unique = 0
return hexstr
def searchCompetence(search, dictionary):
"""
Returns the path for a competence from general competence to searched
competence.
"""
for competence in dictionary:
if (
(
'competence' in competence and
stemmer.stem(competence['competence'])
== stemmer.stem(search.lower())
)
or
(
'synonyms' in competence and
stemmer.stem(search.lower()) in [
stemmer.stem(syn) for syn in competence['synonyms']
]
)
):
return [competence['competence']]
else:
try:
if 'subcategories' in competence:
return (
searchCompetence(search, competence['subcategories']) +
[competence['competence']])
except ValueError:
pass
raise ValueError("Not found")
def getUserCompetencies(cnx, exceptUserIDs):
"""
Returns array of persons with their competences as values
"""
competencies = {}
cnx = establishDBConnection(dbconfig)
cursor = cnx.cursor()
placeholder = '%s'
placeholders = ', '.join(placeholder for unused in exceptUserIDs)
query = ("""SELECT firstname, lastname, competence FROM profile
WHERE user_id NOT IN ({}) AND competence IS NOT NULL
""").format(placeholders)
cursor.execute(query, tuple(exceptUserIDs))
cnx.close()
for (firstname, lastname, competence) in cursor:
competencies[firstname + " " + lastname] = (
[comp.strip().lower() for comp in competence.split(',')]
)
return competencies
def getUsersWithCompetencies(categories, usercompetencies):
"""
Lists competences and their corresponding user IDs and returns the user ID
matching the needed competence
:param categories: Needed competence category
:type categories: list
:param usercompetencies: User IDs and their competences
:type usercompetencies: dict
"""
# user -> competence ==> competence -> users
competencies = {}
for user in usercompetencies:
for competence in usercompetencies[user]:
if competence not in competencies:
competencies[competence] = []
competencies[competence].append(user)
else:
competencies[competence].append(user)
# search for competence
for competence in categories: # from special to general
if competence in competencies:
# returns users matching requested competency
return {
"competence": competence,
"users": competencies[competence]
}
return None
def getMatchingCompetence(dictionary, lastmessage):
"""
Searches for a competence in a string
"""
allCompetences = getAllCompetences(dictionary)
searchedCompetence = []
for word in re.split('[ .!?]', lastmessage):
if stemmer.stem(word.strip().lower()) in [
stemmer.stem(comp) for comp in allCompetences]:
searchedCompetence.append(word.strip().lower())
return searchedCompetence
def getAllCompetences(dictionary, competences=[]):
"""
Gets all competences and synonyms in competence dictionary without
hirarchical list
"""
for competence in dictionary:
competences.append(competence['competence'])
if 'synonyms' in competence:
for synonym in competence['synonyms']:
competences.append(synonym)
if 'subcategories' in competence:
getAllCompetences(competence['subcategories'], competences)
return competences
def getUserID(person):
"""
Gets Humhub User ID using name information
:param person: Name of the person to get the Humhub User ID for
:type person: str.
"""
# search for person string in humhub db
# switch case for only one name (propably lastname) or
# two separate strings (firstname + lastname)
firstname = ''
lastname = ''
if len(person.split()) == 1:
# only lastname
lastname = person
else:
firstname = person.split()[0]
lastname = person.split()[1]
global offlinemode
if offlinemode:
return 8
# search in humhub db
cnx = establishDBConnection(dbconfig)
cursor = cnx.cursor()
query = ''
if firstname == '':
query = ("""SELECT user_id FROM profile WHERE lastname = {}
""").format(lastname)
else:
query = ("""SELECT user_id FROM profile WHERE firstname = {}
AND lastname = {}
""").format(firstname, lastname)
cursor.execute(query)
for user_id in cursor:
userid = user_id
cnx.close()
return userid
| [
"logging.getLogger",
"re.split",
"datetime.datetime.replace",
"locale.setlocale",
"datetime.datetime.strptime",
"os.urandom",
"nltk.stem.snowball.SnowballStemmer",
"datetime.datetime.now",
"random.randint",
"re.search"
] | [((501, 526), 'nltk.stem.snowball.SnowballStemmer', 'SnowballStemmer', (['"""german"""'], {}), "('german')\n", (516, 526), False, 'from nltk.stem.snowball import SnowballStemmer\n'), ((536, 563), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (553, 563), False, 'import logging\n'), ((584, 629), 'locale.setlocale', 'locale.setlocale', (['locale.LC_ALL', '"""de_DE.utf8"""'], {}), "(locale.LC_ALL, 'de_DE.utf8')\n", (600, 629), False, 'import locale\n'), ((18782, 18835), 'datetime.datetime.strptime', 'datetime.strptime', (['datefrom', '"""%Y-%m-%dT%H:%M:%S.000Z"""'], {}), "(datefrom, '%Y-%m-%dT%H:%M:%S.000Z')\n", (18799, 18835), False, 'from datetime import datetime\n'), ((18847, 18898), 'datetime.datetime.strptime', 'datetime.strptime', (['dateto', '"""%Y-%m-%dT%H:%M:%S.000Z"""'], {}), "(dateto, '%Y-%m-%dT%H:%M:%S.000Z')\n", (18864, 18898), False, 'from datetime import datetime\n'), ((20640, 20694), 'datetime.datetime.replace', 'datetime.replace', ([], {'hour': 'newEndHour', 'minute': 'newEndMinute'}), '(hour=newEndHour, minute=newEndMinute)\n', (20656, 20694), False, 'from datetime import datetime\n'), ((30055, 30086), 're.split', 're.split', (['"""[ .!?]"""', 'lastmessage'], {}), "('[ .!?]', lastmessage)\n", (30063, 30086), False, 'import re\n'), ((12884, 12937), 'datetime.datetime.strptime', 'datetime.strptime', (['datefrom', '"""%Y-%m-%dT%H:%M:%S.000Z"""'], {}), "(datefrom, '%Y-%m-%dT%H:%M:%S.000Z')\n", (12901, 12937), False, 'from datetime import datetime\n'), ((12955, 13006), 'datetime.datetime.strptime', 'datetime.strptime', (['dateto', '"""%Y-%m-%dT%H:%M:%S.000Z"""'], {}), "(dateto, '%Y-%m-%dT%H:%M:%S.000Z')\n", (12972, 13006), False, 'from datetime import datetime\n'), ((26578, 26663), 're.search', 're.search', (['"""[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}"""', 'hexstr'], {}), "('[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}',\n hexstr)\n", (26587, 26663), False, 'import re\n'), ((21529, 21543), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (21541, 21543), False, 'from datetime import datetime\n'), ((26516, 26529), 'os.urandom', 'os.urandom', (['(6)'], {}), '(6)\n', (26526, 26529), False, 'import os\n'), ((26452, 26476), 'random.randint', 'random.randint', (['(0)', '(16383)'], {}), '(0, 16383)\n', (26466, 26476), False, 'import random\n'), ((26384, 26407), 'random.randint', 'random.randint', (['(0)', '(4095)'], {}), '(0, 4095)\n', (26398, 26407), False, 'import random\n'), ((26328, 26341), 'os.urandom', 'os.urandom', (['(2)'], {}), '(2)\n', (26338, 26341), False, 'import os\n'), ((26276, 26289), 'os.urandom', 'os.urandom', (['(4)'], {}), '(4)\n', (26286, 26289), False, 'import os\n')] |
from pycocotools.coco import COCO
import json
with open('person_keypoints_val2017.json') as f:
data = json.load(f)
coco = COCO('person_keypoints_val2017.json')
def search(id):
for annotation in data['annotations']:
if annotation['image_id']==id:
print(annotation)
#print(data['images'])
keys = list(coco.imgs.keys())
img_idx = coco.imgs[keys[0]]['id']
print(img_idx)
ann_idx = coco.getAnnIds(imgIds=img_idx)
print(ann_idx[0].__class__)
annotations = coco.loadAnns(ann_idx)
for ann in annotations:
print(ann.get('num_keypoints', 0))
# image_ids = {}
# for anno in data['annotations']:
# if anno['image_id'] in image_ids.keys():
# image_ids[anno['image_id']] += 1
# else:
# image_ids[anno['image_id']] = 0
# for i in image_ids:
# if image_ids[i] > 1:
# print(i)
# search(8021) | [
"json.load",
"pycocotools.coco.COCO"
] | [((123, 160), 'pycocotools.coco.COCO', 'COCO', (['"""person_keypoints_val2017.json"""'], {}), "('person_keypoints_val2017.json')\n", (127, 160), False, 'from pycocotools.coco import COCO\n'), ((103, 115), 'json.load', 'json.load', (['f'], {}), '(f)\n', (112, 115), False, 'import json\n')] |
import discord
from discord.ext import commands
class System(commands.Cog):
def __init__(self, perceus):
self.perceus = perceus
@commands.Cog.listener()
async def on_ready(self):
print('Logged in as: ')
print(self.perceus.user.name)
print(self.perceus.user.id)
print('--------------')
@commands.command()
async def load(self, ctx, cogname):
self.perceus.load_extension(f'cogs.{cogname}')
await ctx.send(f'Loaded {cogname}')
@commands.command()
async def unload(self, ctx, cogname):
if cogname in ['System']:
return await ctx.send('You cannot unload system')
self.perceus.unload_extension(f'cogs.{cogname}')
await ctx.send(f'Unloaded {cogname}')
@commands.command()
async def reload(self, ctx, cogname):
self.perceus.reload_extension(f'cogs.{cogname}')
await ctx.send(f'Reloaded {cogname}')
def setup(perceus):
perceus.add_cog(System(perceus)) | [
"discord.ext.commands.Cog.listener",
"discord.ext.commands.command"
] | [((148, 171), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (169, 171), False, 'from discord.ext import commands\n'), ((346, 364), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (362, 364), False, 'from discord.ext import commands\n'), ((510, 528), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (526, 528), False, 'from discord.ext import commands\n'), ((777, 795), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (793, 795), False, 'from discord.ext import commands\n')] |
# -*- coding: utf-8 -*-
import argparse
import os
import sys
from base64 import b64encode
from datetime import datetime, timedelta
from cryptography import x509
from cryptography.hazmat.primitives import serialization, hashes
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives.hashes import SHA256
from cryptography.x509 import NameOID
__author__ = 'lundberg'
def main(args: argparse.Namespace):
# Generate key
key = rsa.generate_private_key(public_exponent=65537, key_size=4096)
passphrase = serialization.NoEncryption()
if args.passphrase is not None:
passphrase = serialization.BestAvailableEncryption(args.passphrase.encode())
private_bytes = key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=passphrase,
)
# Write key
if args.out is not None:
key_path = f'{args.out}{os.sep}{args.common_name}.key'
if os.path.exists(key_path):
sys.stderr.write(f'{key_path} already exists\n')
sys.exit(1)
with open(key_path, 'wb') as f:
f.write(private_bytes)
else:
sys.stdout.writelines(f'Private key for {args.common_name}:\n')
sys.stdout.writelines(private_bytes.decode('utf-8'))
sys.stdout.writelines('\n')
# Various details about who we are. For a self-signed certificate the
# subject and issuer are always the same.
subject = issuer = x509.Name(
[
x509.NameAttribute(NameOID.COUNTRY_NAME, args.country),
x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, args.province),
x509.NameAttribute(NameOID.LOCALITY_NAME, args.locality),
x509.NameAttribute(NameOID.ORGANIZATION_NAME, args.organization),
x509.NameAttribute(NameOID.COMMON_NAME, args.common_name),
]
)
alt_names = [x509.DNSName(alt_name) for alt_name in args.alt_names]
cert = (
x509.CertificateBuilder()
.subject_name(subject)
.issuer_name(issuer)
.public_key(key.public_key())
.serial_number(x509.random_serial_number())
.not_valid_before(datetime.utcnow())
.not_valid_after(datetime.utcnow() + timedelta(days=args.expires))
.add_extension(
x509.SubjectAlternativeName(alt_names),
critical=False,
# Sign our certificate with our private key
)
.sign(key, hashes.SHA256())
)
public_bytes = cert.public_bytes(serialization.Encoding.PEM)
# Write certificate
if args.out is not None:
cert_path = f'{args.out}{os.sep}{args.common_name}.crt'
if os.path.exists(cert_path):
sys.stderr.write(f'{cert_path} already exists\n')
sys.exit(1)
with open(cert_path, 'wb') as f:
f.write(public_bytes)
else:
sys.stdout.writelines(f'Certificate for {args.common_name}:\n')
sys.stdout.writelines(public_bytes.decode('utf-8'))
sys.stdout.writelines('\n')
# Print additional info
sys.stdout.writelines('cert#S256 fingerprint:\n')
sys.stdout.writelines(b64encode(cert.fingerprint(algorithm=SHA256())).decode('utf-8'))
sys.stdout.writelines('\n')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate key and cert')
parser.add_argument('--country', '-c', default='SE', help='country (default: SE)', type=str)
parser.add_argument('--province', '-p', default='Stockholm', help='province (default: Stockholm)', type=str)
parser.add_argument('--locality', '-l', default='Stockholm', help='locality (default: Stockholm)', type=str)
parser.add_argument('--organization', '-o', default='Sunet', help='organization (default: Sunet)', type=str)
parser.add_argument('--common-name', '-cn', help='common name', type=str, required=True)
parser.add_argument('--expires', '-e', default=365, help='expires in X days (default: 365)', type=int)
parser.add_argument('--alt-names', help='alternative names', nargs='*', default=[], type=str)
parser.add_argument('--passphrase', help='passphrase for key', nargs='?', default=None, type=str)
parser.add_argument('--out', help='output directory', nargs='?', default=None, type=str)
main(args=parser.parse_args())
| [
"os.path.exists",
"cryptography.x509.NameAttribute",
"cryptography.x509.random_serial_number",
"argparse.ArgumentParser",
"cryptography.x509.SubjectAlternativeName",
"datetime.datetime.utcnow",
"cryptography.x509.CertificateBuilder",
"cryptography.x509.DNSName",
"cryptography.hazmat.primitives.seria... | [((472, 534), 'cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key', 'rsa.generate_private_key', ([], {'public_exponent': '(65537)', 'key_size': '(4096)'}), '(public_exponent=65537, key_size=4096)\n', (496, 534), False, 'from cryptography.hazmat.primitives.asymmetric import rsa\n'), ((552, 580), 'cryptography.hazmat.primitives.serialization.NoEncryption', 'serialization.NoEncryption', ([], {}), '()\n', (578, 580), False, 'from cryptography.hazmat.primitives import serialization, hashes\n'), ((3121, 3170), 'sys.stdout.writelines', 'sys.stdout.writelines', (['"""cert#S256 fingerprint:\n"""'], {}), "('cert#S256 fingerprint:\\n')\n", (3142, 3170), False, 'import sys\n'), ((3266, 3293), 'sys.stdout.writelines', 'sys.stdout.writelines', (['"""\n"""'], {}), "('\\n')\n", (3287, 3293), False, 'import sys\n'), ((3336, 3396), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generate key and cert"""'}), "(description='Generate key and cert')\n", (3359, 3396), False, 'import argparse\n'), ((1016, 1040), 'os.path.exists', 'os.path.exists', (['key_path'], {}), '(key_path)\n', (1030, 1040), False, 'import os\n'), ((1220, 1283), 'sys.stdout.writelines', 'sys.stdout.writelines', (['f"""Private key for {args.common_name}:\n"""'], {}), "(f'Private key for {args.common_name}:\\n')\n", (1241, 1283), False, 'import sys\n'), ((1353, 1380), 'sys.stdout.writelines', 'sys.stdout.writelines', (['"""\n"""'], {}), "('\\n')\n", (1374, 1380), False, 'import sys\n'), ((1945, 1967), 'cryptography.x509.DNSName', 'x509.DNSName', (['alt_name'], {}), '(alt_name)\n', (1957, 1967), False, 'from cryptography import x509\n'), ((2506, 2521), 'cryptography.hazmat.primitives.hashes.SHA256', 'hashes.SHA256', ([], {}), '()\n', (2519, 2521), False, 'from cryptography.hazmat.primitives import serialization, hashes\n'), ((2722, 2747), 'os.path.exists', 'os.path.exists', (['cert_path'], {}), '(cert_path)\n', (2736, 2747), False, 'import os\n'), ((2928, 2991), 'sys.stdout.writelines', 'sys.stdout.writelines', (['f"""Certificate for {args.common_name}:\n"""'], {}), "(f'Certificate for {args.common_name}:\\n')\n", (2949, 2991), False, 'import sys\n'), ((3060, 3087), 'sys.stdout.writelines', 'sys.stdout.writelines', (['"""\n"""'], {}), "('\\n')\n", (3081, 3087), False, 'import sys\n'), ((1054, 1102), 'sys.stderr.write', 'sys.stderr.write', (['f"""{key_path} already exists\n"""'], {}), "(f'{key_path} already exists\\n')\n", (1070, 1102), False, 'import sys\n'), ((1115, 1126), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1123, 1126), False, 'import sys\n'), ((1558, 1612), 'cryptography.x509.NameAttribute', 'x509.NameAttribute', (['NameOID.COUNTRY_NAME', 'args.country'], {}), '(NameOID.COUNTRY_NAME, args.country)\n', (1576, 1612), False, 'from cryptography import x509\n'), ((1626, 1691), 'cryptography.x509.NameAttribute', 'x509.NameAttribute', (['NameOID.STATE_OR_PROVINCE_NAME', 'args.province'], {}), '(NameOID.STATE_OR_PROVINCE_NAME, args.province)\n', (1644, 1691), False, 'from cryptography import x509\n'), ((1705, 1761), 'cryptography.x509.NameAttribute', 'x509.NameAttribute', (['NameOID.LOCALITY_NAME', 'args.locality'], {}), '(NameOID.LOCALITY_NAME, args.locality)\n', (1723, 1761), False, 'from cryptography import x509\n'), ((1775, 1839), 'cryptography.x509.NameAttribute', 'x509.NameAttribute', (['NameOID.ORGANIZATION_NAME', 'args.organization'], {}), '(NameOID.ORGANIZATION_NAME, args.organization)\n', (1793, 1839), False, 'from cryptography import x509\n'), ((1853, 1910), 'cryptography.x509.NameAttribute', 'x509.NameAttribute', (['NameOID.COMMON_NAME', 'args.common_name'], {}), '(NameOID.COMMON_NAME, args.common_name)\n', (1871, 1910), False, 'from cryptography import x509\n'), ((2761, 2810), 'sys.stderr.write', 'sys.stderr.write', (['f"""{cert_path} already exists\n"""'], {}), "(f'{cert_path} already exists\\n')\n", (2777, 2810), False, 'import sys\n'), ((2823, 2834), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2831, 2834), False, 'import sys\n'), ((2353, 2391), 'cryptography.x509.SubjectAlternativeName', 'x509.SubjectAlternativeName', (['alt_names'], {}), '(alt_names)\n', (2380, 2391), False, 'from cryptography import x509\n'), ((2267, 2284), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2282, 2284), False, 'from datetime import datetime, timedelta\n'), ((2287, 2315), 'datetime.timedelta', 'timedelta', ([], {'days': 'args.expires'}), '(days=args.expires)\n', (2296, 2315), False, 'from datetime import datetime, timedelta\n'), ((3234, 3242), 'cryptography.hazmat.primitives.hashes.SHA256', 'SHA256', ([], {}), '()\n', (3240, 3242), False, 'from cryptography.hazmat.primitives.hashes import SHA256\n'), ((2223, 2240), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2238, 2240), False, 'from datetime import datetime, timedelta\n'), ((2168, 2195), 'cryptography.x509.random_serial_number', 'x509.random_serial_number', ([], {}), '()\n', (2193, 2195), False, 'from cryptography import x509\n'), ((2021, 2046), 'cryptography.x509.CertificateBuilder', 'x509.CertificateBuilder', ([], {}), '()\n', (2044, 2046), False, 'from cryptography import x509\n')] |
from math import pi
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy.optimize import minimize_scalar
__author__ = "<NAME>"
__credits__ = ["<NAME>"]
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__version__ = "0.1"
__license__ = "MIT"
# gravitational acceleration
g = 9.81 # m/s²
# kinematic viscosity
ny = 1.3e-6 # m^2/s (10°C water)
# _________________________________________________________________________________________________________________
def log_scale(start, end, minor=False, lower=None, upper=None):
"""
get the log scale ticks for the diagram
Args:
start (int):
end (int):
minor (bool):
lower (int | float):
upper (int | float):
Returns:
numpy.array: ticks of the scale
"""
if minor:
std = np.array([1., 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.,
2.2, 2.4, 2.6, 2.8, 3., 3.2, 3.4, 3.6, 3.8, 4., 4.2,
4.4, 4.6, 4.8, 5., 5.5, 6., 6.5, 7., 7.5, 8., 8.5,
9., 9.5, 10.])
else:
std = np.array([1., 1.5, 2., 3., 4., 5., 6., 8., 10.])
res = np.array([])
for x in range(start, end):
res = np.append(res, std * 10. ** x)
res = np.unique(res.round(3))
if lower is not None:
res = res[res >= lower]
if upper is not None:
res = res[res <= upper]
return res
def nomogram(k=0.1):
"""
make the nomogram
Args:
k (float): roughness in (mm)
Returns:
matplotlib.pyplot.Figure: of the plot
"""
# diameter
d = np.array(
[0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.1, 0.125, 0.15, 0.2, 0.25, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0,
1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.0]) # m
# velocity
v = np.array(
[0.1, 0.15, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0,
6.0, 7.0, 8.0, 9.0, 10.0, 12.0, 14.0, 16.0, 18.0, 20.0]) # m/s
# head loss
J = log_scale(-1, 3, minor=True) # mm/m
J_labels = log_scale(-1, 3, minor=False)
# flow
Q = log_scale(-1, 5, minor=True, upper=20000) # L/s
Q_labels = log_scale(-1, 5, minor=False, upper=20000)
# _________________________________________________________________________________________________________________
def area(d):
return d ** 2 * pi / 4
# _________________________________________________________________________________________________________________
def velocity(J, d):
return -2 * np.log10(2.51 * ny / (d * np.sqrt(2 * g * (J / 1000) * d)) +
(k / 1000) / (3.71 * d)) * \
np.sqrt(2 * g * d * (J / 1000))
# _________________________________________________________________________________________________________________
def get_diameter(v, J):
res = minimize_scalar(lambda x: abs(velocity(J, x) - v), bounds=(min(d), max(d)), method='bounded').x
if (round(res, 5) >= max(d)) or (round(res, 5) <= min(d)):
return np.NaN
return res
# _________________________________________________________________________________________________________________
fig, ax = plt.subplots()
def bbox(pad):
return {'facecolor': 'white', 'alpha': 0.8, 'pad': pad, 'linewidth': 0}
# _________________________________________________________________________________________________________________
# diameter lines
df_d = pd.DataFrame(index=J, columns=d)
first = True
for d_ in df_d:
vi = velocity(df_d.index.values, d_)
df_d[d_] = area(d_) * vi * 1000
# change_d = 0.6
# low, up = [0.34, 5.4]
change_d = np.NaN
low, up = [2.2, 2.2]
if d_ == change_d:
tvs = [low, up]
elif d_ < change_d:
tvs = [low]
else:
tvs = [up]
for tv in tvs:
tx = np.interp(tv, vi, J)
ty = area(d_) * tv * 1000
if first or d_ in (change_d, max(d)):
txt = 'd={}m'.format(d_)
if first:
first = False
else:
txt = d_
ax.text(tx, ty, txt, fontsize=5, rotation=30, horizontalalignment='center', verticalalignment='bottom',
bbox=bbox(1))
ax = df_d.plot(c='black', legend=False, logy=True, logx=True, ax=ax, lw=0.5)
# _________________________________________________________________________________________________________________
# velocity lines
print('0')
df_v = pd.DataFrame(index=np.logspace(-1, 3, num=500), columns=v)
# df_v = pd.DataFrame(index=J, columns=v)
first = True
for v_ in df_v:
d_ = df_v.index.to_series().apply(lambda Ji: get_diameter(v_, Ji)).values
# d_ = np.array([get_d(v_, Ji) for Ji in df_v.index.values])
Ai = area(d_)
df_v[v_] = Ai * v_ * 1000
# change_v = 5.
# low, up = [0.043, 0.43]
change_v = 9.
low, up = [0.11, 0.43]
if v_ == change_v:
tds = [low, up]
elif v_ < change_v:
tds = [low]
else:
tds = [up]
for td in tds:
data = pd.DataFrame()
data['d'] = d_
data['J'] = df_v.index.values
data.dropna(inplace=True)
data.sort_values('d', inplace=True)
tx = np.interp(td, data['d'].values, data['J'].values)
ty = area(td) * v_ * 1000
if first or (v_ in (change_v, max(v))):
txt = 'v={}m/s'.format(v_).replace('.0', '')
if first:
first = False
else:
txt = v_
if pd.notna(tx) and pd.notna(ty):
ax.text(tx, ty, txt, fontsize=5, rotation=-60, horizontalalignment='center', verticalalignment='bottom',
bbox=bbox(1))
print('1')
ax = df_v.plot(c='black', legend=False, logy=True, logx=True, ax=ax, lw=0.5)
# _________________________________________________________________________________________________________________
ax.set_xticks(J, minor=True)
ax.set_yticks(Q, minor=True)
ax.set_xticks(J_labels, minor=False)
ax.set_yticks(Q_labels, minor=False)
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_xticklabels([], minor=True)
ax.set_yticklabels([], minor=True)
ax.set_xticklabels([str(x).replace('.00', '').replace('.0', '') for x in J_labels], fontsize=6,
fontstretch='ultra-condensed')
ax.set_yticklabels([str(x).replace('.00', '').replace('.0', '') for x in Q_labels], fontsize=6)
ax.grid(linestyle=':', lw=0.2, c='grey', which='minor')
ax.grid(linestyle='-', lw=0.4, c='darkgrey')
ax.set_xlabel('Druckhöhengefälle J (mm/m)')
ax.set_ylabel('Durchfluss Q (l/s)')
ax.set_ylim([min(Q), max(Q)])
ax.set_xlim([min(J), max(J)])
ax.tick_params(direction='out', bottom=True, top=True, left=True, right=True, labelbottom=True, labeltop=True,
labelleft=True, labelright=True, which='both')
ax.text(0.15, 11000, 'k = {:0.01f} mm'.format(k), fontsize=22, fontstretch='ultra-condensed', bbox=bbox(5))
ax.text(340, 1.7, 'v (m/s)', fontsize=12, rotation=-60, bbox=bbox(2))
ax.text(300, 0.6, 'd (m)', fontsize=12, rotation=30, bbox=bbox(2))
# _________________________________________________________________________________________________________________
# figure post processing
fig.set_size_inches(h=29.7 / 2.54, w=21 / 2.54)
fig.tight_layout()
return fig
if __name__ == '__main__':
fig = nomogram()
k = 0.1 # mm
fig.savefig('Nomogramm k_{:0.1f}mm'.format(k).replace('.', '') + '.pdf')
plt.close(fig)
| [
"numpy.sqrt",
"numpy.append",
"numpy.array",
"matplotlib.pyplot.close",
"numpy.interp",
"pandas.DataFrame",
"numpy.logspace",
"matplotlib.pyplot.subplots",
"pandas.notna"
] | [((1168, 1180), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1176, 1180), True, 'import numpy as np\n'), ((1620, 1801), 'numpy.array', 'np.array', (['[0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.1, 0.125, 0.15, 0.2, 0.25, 0.3,\n 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, \n 1.8, 1.9, 2.0]'], {}), '([0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.1, 0.125, 0.15, 0.2, \n 0.25, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, \n 1.6, 1.7, 1.8, 1.9, 2.0])\n', (1628, 1801), True, 'import numpy as np\n'), ((1839, 2025), 'numpy.array', 'np.array', (['[0.1, 0.15, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.2, 1.4, 1.6, 1.8,\n 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 12.0, 14.0,\n 16.0, 18.0, 20.0]'], {}), '([0.1, 0.15, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.2, 1.4,\n 1.6, 1.8, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, \n 12.0, 14.0, 16.0, 18.0, 20.0])\n', (1847, 2025), True, 'import numpy as np\n'), ((3282, 3296), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (3294, 3296), True, 'import matplotlib.pyplot as plt\n'), ((3550, 3582), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'J', 'columns': 'd'}), '(index=J, columns=d)\n', (3562, 3582), True, 'import pandas as pd\n'), ((7862, 7876), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (7871, 7876), True, 'import matplotlib.pyplot as plt\n'), ((830, 1031), 'numpy.array', 'np.array', (['[1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.0, 2.2, 2.4, 2.6, 2.8,\n 3.0, 3.2, 3.4, 3.6, 3.8, 4.0, 4.2, 4.4, 4.6, 4.8, 5.0, 5.5, 6.0, 6.5, \n 7.0, 7.5, 8.0, 8.5, 9.0, 9.5, 10.0]'], {}), '([1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.0, 2.2, 2.4, \n 2.6, 2.8, 3.0, 3.2, 3.4, 3.6, 3.8, 4.0, 4.2, 4.4, 4.6, 4.8, 5.0, 5.5, \n 6.0, 6.5, 7.0, 7.5, 8.0, 8.5, 9.0, 9.5, 10.0])\n', (838, 1031), True, 'import numpy as np\n'), ((1108, 1164), 'numpy.array', 'np.array', (['[1.0, 1.5, 2.0, 3.0, 4.0, 5.0, 6.0, 8.0, 10.0]'], {}), '([1.0, 1.5, 2.0, 3.0, 4.0, 5.0, 6.0, 8.0, 10.0])\n', (1116, 1164), True, 'import numpy as np\n'), ((1227, 1258), 'numpy.append', 'np.append', (['res', '(std * 10.0 ** x)'], {}), '(res, std * 10.0 ** x)\n', (1236, 1258), True, 'import numpy as np\n'), ((2744, 2775), 'numpy.sqrt', 'np.sqrt', (['(2 * g * d * (J / 1000))'], {}), '(2 * g * d * (J / 1000))\n', (2751, 2775), True, 'import numpy as np\n'), ((4004, 4024), 'numpy.interp', 'np.interp', (['tv', 'vi', 'J'], {}), '(tv, vi, J)\n', (4013, 4024), True, 'import numpy as np\n'), ((4678, 4705), 'numpy.logspace', 'np.logspace', (['(-1)', '(3)'], {'num': '(500)'}), '(-1, 3, num=500)\n', (4689, 4705), True, 'import numpy as np\n'), ((5310, 5324), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (5322, 5324), True, 'import pandas as pd\n'), ((5498, 5547), 'numpy.interp', 'np.interp', (['td', "data['d'].values", "data['J'].values"], {}), "(td, data['d'].values, data['J'].values)\n", (5507, 5547), True, 'import numpy as np\n'), ((5819, 5831), 'pandas.notna', 'pd.notna', (['tx'], {}), '(tx)\n', (5827, 5831), True, 'import pandas as pd\n'), ((5836, 5848), 'pandas.notna', 'pd.notna', (['ty'], {}), '(ty)\n', (5844, 5848), True, 'import pandas as pd\n'), ((2636, 2667), 'numpy.sqrt', 'np.sqrt', (['(2 * g * (J / 1000) * d)'], {}), '(2 * g * (J / 1000) * d)\n', (2643, 2667), True, 'import numpy as np\n')] |
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient, APITestCase
from bbbs.afisha.factories import EventFactory
from bbbs.afisha.models import EventParticipant
from bbbs.common.factories import CityFactory
from bbbs.users.factories import UserFactory
from bbbs.users.models import Profile
class AfishaURLTests(APITestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.city = CityFactory(name="Воркута")
cls.mentor = UserFactory(
profile__role=Profile.Role.MENTOR,
profile__city=cls.city,
)
cls.moderator_reg = UserFactory(
profile__role=Profile.Role.MODERATOR_REG,
profile__city=cls.city,
)
cls.moderator_gen = UserFactory(
profile__role=Profile.Role.MODERATOR_GEN,
profile__city=cls.city,
)
cls.admin = UserFactory(
profile__role=Profile.Role.ADMIN,
profile__city=cls.city,
)
cls.users = [
cls.mentor,
cls.moderator_reg,
cls.moderator_gen,
cls.admin,
]
cls.event = EventFactory(
city=cls.mentor.profile.city,
)
cls.booking = EventParticipant.objects.create(
user=cls.mentor,
event=cls.event,
)
cls.unauthorized_client = APIClient()
cls.path_events_participants = reverse("event-participants-list")
cls.path_individual_booking = reverse(
"event-participants-detail",
args=[cls.mentor.profile.id],
)
cls.path_events = reverse("events")
def return_authorized_user_client(self, user):
authorized_client = APIClient()
authorized_client.force_authenticate(user=user)
return authorized_client
def url_returns_405_not_allowed_test_utility(
self, client, url, method_names
):
"""Helper. Tests "url" for not allowed methods.
It translates "methods_names" to correspond methods on "client" and
asserts when error different from 405 (not allowed) returns.
"""
for method_name in method_names:
with self.subTest(method_name):
method = getattr(client, method_name)
response = method(url)
self.assertEqual(
response.status_code,
status.HTTP_405_METHOD_NOT_ALLOWED,
msg=(
f"Убедитесь, что для '{url}' "
f"метод '{method_name}' запрещен и возвращает "
f"правильный номер ошибки."
),
)
def url_returns_404_not_found_test_utility(
self, client, url, method_names
):
"""Helper. Tests "url" for 404 with provided methods.
It translates "methods_names" to correspond methods on "client" and
asserts when error different from 404 (not found) returns.
"""
for method_name in method_names:
with self.subTest(method_name):
method = getattr(client, method_name)
response = method(url)
self.assertEqual(
response.status_code,
status.HTTP_404_NOT_FOUND,
msg=(
f"Убедитесь, для индивидуальных URL, таких как"
f"'{url}' при запросе методом '{method_name}'"
f"возвращается ошибка 404"
),
)
def test_events_unauthorized_client(self):
"""Unauthorized client gets 401 error on 'events' url."""
client = AfishaURLTests.unauthorized_client
response = client.get(AfishaURLTests.path_events)
self.assertEqual(
response.status_code,
status.HTTP_401_UNAUTHORIZED,
msg=(
f"Проверьте что неавторизованный пользователь не имеет "
f"доступ к '{AfishaURLTests.path_events}'."
),
)
def test_events_participants_unauthorized_client(self):
"""Unauthorized client gets 401 error on 'event-participants' url."""
client = AfishaURLTests.unauthorized_client
response = client.get(AfishaURLTests.path_events_participants)
self.assertEqual(
response.status_code,
status.HTTP_401_UNAUTHORIZED,
msg=(
f"Проверьте что неавторизованный пользователь не имеет доступ "
f"к '{AfishaURLTests.path_events_participants}'."
),
)
def test_events_mentor_has_access(self):
"""Mentor gets response with 200 code on 'events'."""
user = AfishaURLTests.mentor
client = self.return_authorized_user_client(user=user)
response = client.get(AfishaURLTests.path_events)
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
msg=(
f"Проверьте что пользователь с ролью "
f"'{user.profile.role}' "
f"имеет доступ к "
f"'{AfishaURLTests.path_events_participants}'."
),
)
def test_event_participants_mentor_has_access(self):
"""Mentor gets response with 200 code on 'events_participants'."""
user = AfishaURLTests.mentor
client = self.return_authorized_user_client(user=user)
response = client.get(AfishaURLTests.path_events_participants)
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
msg=(
f"Проверьте что пользователь с ролью "
f"'{user.profile.role}' "
f"имеет доступ к "
f"'{AfishaURLTests.path_events_participants}'."
),
)
def test_events_individual_urls_return_404(self):
"""URLs like '/events/{id}' should return 404 for tested methods."""
methods_to_test = [
"get",
"patch",
"post",
"put",
"delete",
]
event_id = AfishaURLTests.event.id
individual_event_url = AfishaURLTests.path_events + str(event_id)
client = self.return_authorized_user_client(AfishaURLTests.mentor)
self.url_returns_404_not_found_test_utility(
client=client,
url=individual_event_url,
method_names=methods_to_test,
)
def test_events_participants_individual_urls_return_405(self):
"""URLs like '/events-participants/{id}' should return 405.
HTTP_405_METHOD_NOT_ALLOWED should be returned only for methods in
'not_allowed_method_names' list.
"""
not_allowed_method_names = [
"get",
"patch",
"post",
"put",
]
individual_booking_url = AfishaURLTests.path_individual_booking
client = self.return_authorized_user_client(AfishaURLTests.mentor)
self.url_returns_405_not_allowed_test_utility(
client=client,
url=individual_booking_url,
method_names=not_allowed_method_names,
)
def test_events_list_url_returns_405(self):
"""URL '/events/' should return 405.
HTTP_405_METHOD_NOT_ALLOWED should be returned only for methods in
'not_allowed_method_names' list.
"""
not_allowed_method_names = [
"patch",
"post",
"put",
"delete",
]
events_url = AfishaURLTests.path_events
client = self.return_authorized_user_client(AfishaURLTests.mentor)
self.url_returns_405_not_allowed_test_utility(
client=client,
url=events_url,
method_names=not_allowed_method_names,
)
def test_events_participants_list_returns_405(self):
"""URL '/events_participants/' should return 405.
HTTP_405_METHOD_NOT_ALLOWED should be returned only for methods in
'not_allowed_method_names' list.
"""
not_allowed_method_names = [
"patch",
"put",
"delete",
]
events_participants_url = AfishaURLTests.path_events_participants
client = self.return_authorized_user_client(AfishaURLTests.mentor)
self.url_returns_405_not_allowed_test_utility(
client=client,
url=events_participants_url,
method_names=not_allowed_method_names,
)
| [
"bbbs.common.factories.CityFactory",
"bbbs.users.factories.UserFactory",
"rest_framework.test.APIClient",
"bbbs.afisha.models.EventParticipant.objects.create",
"django.urls.reverse",
"bbbs.afisha.factories.EventFactory"
] | [((488, 515), 'bbbs.common.factories.CityFactory', 'CityFactory', ([], {'name': '"""Воркута"""'}), "(name='Воркута')\n", (499, 515), False, 'from bbbs.common.factories import CityFactory\n'), ((538, 608), 'bbbs.users.factories.UserFactory', 'UserFactory', ([], {'profile__role': 'Profile.Role.MENTOR', 'profile__city': 'cls.city'}), '(profile__role=Profile.Role.MENTOR, profile__city=cls.city)\n', (549, 608), False, 'from bbbs.users.factories import UserFactory\n'), ((676, 753), 'bbbs.users.factories.UserFactory', 'UserFactory', ([], {'profile__role': 'Profile.Role.MODERATOR_REG', 'profile__city': 'cls.city'}), '(profile__role=Profile.Role.MODERATOR_REG, profile__city=cls.city)\n', (687, 753), False, 'from bbbs.users.factories import UserFactory\n'), ((821, 898), 'bbbs.users.factories.UserFactory', 'UserFactory', ([], {'profile__role': 'Profile.Role.MODERATOR_GEN', 'profile__city': 'cls.city'}), '(profile__role=Profile.Role.MODERATOR_GEN, profile__city=cls.city)\n', (832, 898), False, 'from bbbs.users.factories import UserFactory\n'), ((958, 1027), 'bbbs.users.factories.UserFactory', 'UserFactory', ([], {'profile__role': 'Profile.Role.ADMIN', 'profile__city': 'cls.city'}), '(profile__role=Profile.Role.ADMIN, profile__city=cls.city)\n', (969, 1027), False, 'from bbbs.users.factories import UserFactory\n'), ((1234, 1276), 'bbbs.afisha.factories.EventFactory', 'EventFactory', ([], {'city': 'cls.mentor.profile.city'}), '(city=cls.mentor.profile.city)\n', (1246, 1276), False, 'from bbbs.afisha.factories import EventFactory\n'), ((1325, 1390), 'bbbs.afisha.models.EventParticipant.objects.create', 'EventParticipant.objects.create', ([], {'user': 'cls.mentor', 'event': 'cls.event'}), '(user=cls.mentor, event=cls.event)\n', (1356, 1390), False, 'from bbbs.afisha.models import EventParticipant\n'), ((1466, 1477), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (1475, 1477), False, 'from rest_framework.test import APIClient, APITestCase\n'), ((1518, 1552), 'django.urls.reverse', 'reverse', (['"""event-participants-list"""'], {}), "('event-participants-list')\n", (1525, 1552), False, 'from django.urls import reverse\n'), ((1592, 1658), 'django.urls.reverse', 'reverse', (['"""event-participants-detail"""'], {'args': '[cls.mentor.profile.id]'}), "('event-participants-detail', args=[cls.mentor.profile.id])\n", (1599, 1658), False, 'from django.urls import reverse\n'), ((1724, 1741), 'django.urls.reverse', 'reverse', (['"""events"""'], {}), "('events')\n", (1731, 1741), False, 'from django.urls import reverse\n'), ((1825, 1836), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (1834, 1836), False, 'from rest_framework.test import APIClient, APITestCase\n')] |
#!/usr/bin/env python2.7
import pdb_structure
import sys
import os.path
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: "+sys.argv[0]+" <pdb-file>")
sys.exit(1)
pdbFile = sys.argv[1]
struc = pdb_structure.PDBFile(pdbFile)
name = os.path.basename(pdbFile).replace(".pdb", "")
from collections import defaultdict
chain_map = defaultdict(list)
for a in struc.models[0]:
# if not a.hetatm:
chain_map[a.chain].append(a)
protresnmap = {'ALA': 'A',
'ARG': 'R',
'ASN': 'N',
'ASP': 'D',
'ASX': 'B',
'CYS': 'C',
'GLU': 'E',
'GLN': 'Q',
'GLX': 'Z',
'GLY': 'G',
'HIS': 'H',
'ILE': 'I',
'LEU': 'L',
'LYS': 'K',
'MET': 'M',
'PHE': 'F',
'PRO': 'P',
'SER': 'S',
'THR': 'T',
'TRP': 'W',
'TYR': 'Y',
'VAL': 'V',
'A': 'A',
'C': 'C',
'G': 'G',
'U': 'U',
'T': 'T'
}
for c in chain_map:
print(">"+name+"_"+c)
resi_map = {a.resi: protresnmap[a.resn] for a in chain_map[c]}
seq = []
for r in range(min(resi_map.keys()), max(resi_map.keys())+1):
seq.append("-" if r not in resi_map else resi_map[r])
print("".join(seq))
| [
"pdb_structure.PDBFile",
"collections.defaultdict",
"sys.exit"
] | [((241, 271), 'pdb_structure.PDBFile', 'pdb_structure.PDBFile', (['pdbFile'], {}), '(pdbFile)\n', (262, 271), False, 'import pdb_structure\n'), ((388, 405), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (399, 405), False, 'from collections import defaultdict\n'), ((186, 197), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (194, 197), False, 'import sys\n')] |
# Создайте модель мероприятия для сайта-афиши.
# У модели должны быть такие поля:
# Название мероприятия (name), не больше 200 символов
# Дата и время проведения мероприятия (start_at)
# Описание мероприятия (description)
# Адрес электронной почты организатора мероприятия (contact)
# Пользователь, который создал мероприятие (author,
# related_name этого поля должно быть events)
# Название места проведения мероприятия (location), не более 400 символов
from django.db import models
from django.contrib.auth import get_user_model
class Event(models.Model):
name = models.CharField(max_length=200)
start_at = models.DateTimeField('event published', auto_now_add=True)
description = models.TextField
contact = models.EmailField()
author = models.ForeignKey(get_user_model(), on_delete=models.CASCADE,
related_name="event_author")
location = models.CharField(max_length=400)
| [
"django.db.models.DateTimeField",
"django.db.models.EmailField",
"django.contrib.auth.get_user_model",
"django.db.models.CharField"
] | [((571, 603), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (587, 603), False, 'from django.db import models\n'), ((619, 677), 'django.db.models.DateTimeField', 'models.DateTimeField', (['"""event published"""'], {'auto_now_add': '(True)'}), "('event published', auto_now_add=True)\n", (639, 677), False, 'from django.db import models\n'), ((727, 746), 'django.db.models.EmailField', 'models.EmailField', ([], {}), '()\n', (744, 746), False, 'from django.db import models\n'), ((897, 929), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(400)'}), '(max_length=400)\n', (913, 929), False, 'from django.db import models\n'), ((778, 794), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (792, 794), False, 'from django.contrib.auth import get_user_model\n')] |
from __future__ import division, print_function
import os
import os.path as fs
import numpy as np
import pandas as pd
import re
### PURPOSE: Takes a directory containing N files of the form mXXXXXX.ovf ###
### and imports them to an N x X x Y x Z x 3 numpy array ###
### where X,Y,Z are the number of cells in x,y,z ###
### Files will have the naming convention m*.ovf where * is 6 digit decimal number ###
### eg. 000000, 000001, 000123, etc ###
### So use regex to find something of the form m/<number>*/.ovf ###
def import_dir(path='.', which_files='all', skyrmion=False, core_slice='h', average=True):
#default path is this folder
#which files gives a range of files (default to all in dir)
ls = sorted(os.listdir(path)) #list and sort all files in given path
magnetisation_files=[] #init list of filenames in this dir
for el in ls: #test the regex for magnetisation file format, if found add to filename list
if re.match('m\d*\.ovf' ,el) is not None:
magnetisation_files.append(el)
file_name=fs.join(path, str(magnetisation_files[0])) #creates the filename for the first mag field
data_dimensions=getOvfAttributes(file_name) #gets the file attributes x,y,z nodes (eg 2x2x128)
num_files_to_import=len(magnetisation_files)
if which_files!='all':
print("not importing all files, importing files ",which_files[0], " to ", which_files[1])
num_files_to_import=which_files[1]-which_files[0]
all_mag_data=np.empty((num_files_to_import, data_dimensions[2]), dtype=(float, 3) )
i=0
first_time=True
percentages=[]
for n, fname in enumerate(magnetisation_files):
if which_files!='all':
if n<which_files[0]:
continue
if n>=which_files[1]:
break
if first_time:
print("starting to read ",num_files_to_import," files")
first_time=False
this_filename=fs.join(path, fname)
all_mag_data[i]=importOvfFilePandas(this_filename, data_dimensions, core_slice=core_slice, skyrmion=skyrmion, average_to_1D=average)
if i/num_files_to_import*100%10<0.2:
if np.floor(i*100/num_files_to_import) not in percentages:
print(np.floor(i*100.0/num_files_to_import),"% done")
percentages.append(np.floor(i*100/num_files_to_import))
i+=1
#print data_array.shape
print("100% done!")
print("read ",i," files")
return all_mag_data
def getOvfAttributes(filename):
if filename[-4:]!='.ovf': #if filetype is not ovf, exit with error code 1
print("FATAL ERROR, NOT AN OVF FILE")
return -1
f=open(filename, 'r')
j=0
for line in f:
if re.match('.*Binary.*', line) is not None: #if the data type is a binary, just exit with error code -2
print("FATAL ERROR: BINARY NOT SUPPORTED")
return -2
if j==20:
x_nodes=int(line[10:])
if j==21:
y_nodes=int(line[10:])
if j==22:
z_nodes=int(line[10:])
break
#print (str(j)+'\t'+str(line))
j+=1
f.close()
return(x_nodes, y_nodes, z_nodes)
# takes filename, imports ovf as pandas dataframe, takes data dimensions in (x,y,z) nodes format
def importOvfFilePandas(this_filename, data_dims, average_to_1D=False, skyrmion=False, core_slice='h'):
ave_axis=None
raw_data=pd.read_csv(this_filename, header=None, skiprows=28, skipfooter=2, delimiter="\s+")
magnetisation_array=np.reshape(raw_data.as_matrix(), np.append(data_dims[::-1],3))
if skyrmion:
m1=int(data_dims[1]/2-1)
m2=int(data_dims[1]/2+1)
if core_slice=='h':
magnetisation_array=magnetisation_array[:,m1:m2,:]
ave_axis=1
elif core_slice=='v':
magnetisation_array=magnetisation_array[:,:,m1:m2]
ave_axis=2
if average_to_1D:
magnetisation_array=np.mean(magnetisation_array, axis=ave_axis)
magnetisation_array=np.mean(magnetisation_array, axis=0)
elif average_to_1D:
for i in [1,2]:
magnetisation_array=np.mean(magnetisation_array, axis=1)
#print(magnetisation_array.shape)
return magnetisation_array
if __name__=="__main__":
#test=importOvfFilePandas('/home/michael/Desktop/Honours/MuMax3/DataProcessing/SkyrmionData/ovfimporttest/m000035.ovf', (128,128,1), skyrmion=True, h_core_slice=True, average_to_1D=True)
test=import_dir('/home/michael/Desktop/Honours/MuMax3/DataProcessing/HelicoidData/helicoidv8_mid.out/')
#test=importOvfFilePandas('/home/michael/Desktop/Honours/MuMax3/DataProcessing/SkyrmionData/ovfimporttest/m000035.ovf', (128,128,1), skyrmion=True, v_core_slice=True, average_to_1D=True)
#test=importOvfFilePandas('/home/michael/Desktop/Honours/MuMax3/DataProcessing/HelicoidData/helicoidv6.out/m000035.ovf', (2,2,128), average_to_1D=True)
| [
"numpy.mean",
"os.listdir",
"pandas.read_csv",
"os.path.join",
"re.match",
"numpy.floor",
"numpy.append",
"numpy.empty"
] | [((1449, 1518), 'numpy.empty', 'np.empty', (['(num_files_to_import, data_dimensions[2])'], {'dtype': '(float, 3)'}), '((num_files_to_import, data_dimensions[2]), dtype=(float, 3))\n', (1457, 1518), True, 'import numpy as np\n'), ((3116, 3204), 'pandas.read_csv', 'pd.read_csv', (['this_filename'], {'header': 'None', 'skiprows': '(28)', 'skipfooter': '(2)', 'delimiter': '"""\\\\s+"""'}), "(this_filename, header=None, skiprows=28, skipfooter=2,\n delimiter='\\\\s+')\n", (3127, 3204), True, 'import pandas as pd\n'), ((730, 746), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (740, 746), False, 'import os\n'), ((1817, 1837), 'os.path.join', 'fs.join', (['path', 'fname'], {}), '(path, fname)\n', (1824, 1837), True, 'import os.path as fs\n'), ((3254, 3283), 'numpy.append', 'np.append', (['data_dims[::-1]', '(3)'], {}), '(data_dims[::-1], 3)\n', (3263, 3283), True, 'import numpy as np\n'), ((945, 972), 're.match', 're.match', (['"""m\\\\d*\\\\.ovf"""', 'el'], {}), "('m\\\\d*\\\\.ovf', el)\n", (953, 972), False, 'import re\n'), ((2514, 2542), 're.match', 're.match', (['""".*Binary.*"""', 'line'], {}), "('.*Binary.*', line)\n", (2522, 2542), False, 'import re\n'), ((3583, 3626), 'numpy.mean', 'np.mean', (['magnetisation_array'], {'axis': 'ave_axis'}), '(magnetisation_array, axis=ave_axis)\n', (3590, 3626), True, 'import numpy as np\n'), ((3650, 3686), 'numpy.mean', 'np.mean', (['magnetisation_array'], {'axis': '(0)'}), '(magnetisation_array, axis=0)\n', (3657, 3686), True, 'import numpy as np\n'), ((2022, 2061), 'numpy.floor', 'np.floor', (['(i * 100 / num_files_to_import)'], {}), '(i * 100 / num_files_to_import)\n', (2030, 2061), True, 'import numpy as np\n'), ((3750, 3786), 'numpy.mean', 'np.mean', (['magnetisation_array'], {'axis': '(1)'}), '(magnetisation_array, axis=1)\n', (3757, 3786), True, 'import numpy as np\n'), ((2088, 2129), 'numpy.floor', 'np.floor', (['(i * 100.0 / num_files_to_import)'], {}), '(i * 100.0 / num_files_to_import)\n', (2096, 2129), True, 'import numpy as np\n'), ((2160, 2199), 'numpy.floor', 'np.floor', (['(i * 100 / num_files_to_import)'], {}), '(i * 100 / num_files_to_import)\n', (2168, 2199), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: proxyFetcher
Description :
Author : JHao
date: 2016/11/25
-------------------------------------------------
Change Activity:
2016/11/25: proxyFetcher
-------------------------------------------------
"""
__author__ = 'JHao'
import re
import requests
from time import sleep
from datetime import date, timedelta
import pandas as pd
import os
from util.webRequest import WebRequest
class ProxyFetcher(object):
"""
proxy getter
"""
@staticmethod
def freeProxy01():
"""
米扑代理 https://proxy.mimvp.com/
:return:
"""
url_list = [
'https://proxy.mimvp.com/freeopen',
'https://proxy.mimvp.com/freeopen?proxy=in_tp'
]
port_img_map = {'DMxMjg': '3128', 'Dgw': '80', 'DgwODA': '8080',
'DgwOA': '808', 'DgwMDA': '8000', 'Dg4ODg': '8888',
'DgwODE': '8081', 'Dk5OTk': '9999'}
for url in url_list:
html_tree = WebRequest().get(url).tree
for tr in html_tree.xpath(".//table[@class='mimvp-tbl free-proxylist-tbl']/tbody/tr"):
try:
ip = ''.join(tr.xpath('./td[2]/text()'))
port_img = ''.join(tr.xpath('./td[3]/img/@src')).split("port=")[-1]
port = port_img_map.get(port_img[14:].replace('O0O', ''))
if port:
yield '%s:%s' % (ip, port)
except Exception as e:
print(e)
@staticmethod
def freeProxy02():
"""
代理66 http://www.66ip.cn/
:return:
"""
url = "http://www.66ip.cn/mo.php"
resp = WebRequest().get(url, timeout=10)
proxies = re.findall(r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5})', resp.text)
for proxy in proxies:
yield proxy
@staticmethod
def freeProxy03():
"""
pzzqz https://pzzqz.com/
"""
from requests import Session
from lxml import etree
session = Session()
try:
index_resp = session.get("https://pzzqz.com/", timeout=20, verify=False).text
x_csrf_token = re.findall('X-CSRFToken": "(.*?)"', index_resp)
if x_csrf_token:
data = {"http": "on", "ping": "3000", "country": "cn", "ports": ""}
proxy_resp = session.post("https://pzzqz.com/", verify=False,
headers={"X-CSRFToken": x_csrf_token[0]}, json=data).json()
tree = etree.HTML(proxy_resp["proxy_html"])
for tr in tree.xpath("//tr"):
ip = "".join(tr.xpath("./td[1]/text()"))
port = "".join(tr.xpath("./td[2]/text()"))
yield "%s:%s" % (ip, port)
except Exception as e:
print(e)
@staticmethod
def freeProxy04():
"""
神鸡代理 http://www.shenjidaili.com/
:return:
"""
url = "http://www.shenjidaili.com/product/open/"
tree = WebRequest().get(url).tree
for table in tree.xpath("//table[@class='table table-hover text-white text-center table-borderless']"):
for tr in table.xpath("./tr")[1:]:
proxy = ''.join(tr.xpath("./td[1]/text()"))
yield proxy.strip()
@staticmethod
def freeProxy05(page_count=1):
"""
快代理 https://www.kuaidaili.com
"""
url_pattern = [
'https://www.kuaidaili.com/free/inha/{}/',
'https://www.kuaidaili.com/free/intr/{}/'
]
url_list = []
for page_index in range(1, page_count + 1):
for pattern in url_pattern:
url_list.append(pattern.format(page_index))
for url in url_list:
tree = WebRequest().get(url).tree
proxy_list = tree.xpath('.//table//tr')
sleep(1) # 必须sleep 不然第二条请求不到数据
for tr in proxy_list[1:]:
yield ':'.join(tr.xpath('./td/text()')[0:2])
@staticmethod
def freeProxy06(page=2):
"""
极速代理 https://www.superfastip.com/
:return:
"""
url = "https://api.superfastip.com/ip/freeip?page={page}"
for i in range(page):
page_url = url.format(page=i + 1)
try:
resp_json = WebRequest().get(page_url).json
for each in resp_json.get("freeips", []):
yield "%s:%s" % (each.get("ip", ""), each.get("port", ""))
except Exception as e:
print(e)
@staticmethod
def freeProxy07():
"""
云代理 http://www.ip3366.net/free/
:return:
"""
urls = ['http://www.ip3366.net/free/?stype=1',
"http://www.ip3366.net/free/?stype=2"]
for url in urls:
r = WebRequest().get(url, timeout=10)
proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\s\S]*?<td>(\d+)</td>', r.text)
for proxy in proxies:
yield ":".join(proxy)
@staticmethod
def freeProxy08():
"""
小幻代理 https://ip.ihuan.me/
:return:
"""
urls = [
'https://ip.ihuan.me/address/5Lit5Zu9.html',
]
for url in urls:
r = WebRequest().get(url, timeout=10)
proxies = re.findall(r'>\s*?(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\s*?</a></td><td>(\d+)</td>',
r.text)
for proxy in proxies:
yield ":".join(proxy)
@staticmethod
def freeProxy09(page_count=1):
"""
http://ip.jiangxianli.com/
免费代理库
:return:
"""
for i in range(1, page_count + 1):
url = 'http://ip.jiangxianli.com/?country=中国&page={}'.format(i)
html_tree = WebRequest().get(url).tree
for index, tr in enumerate(html_tree.xpath("//table//tr")):
if index == 0:
continue
yield ":".join(tr.xpath("./td/text()")[0:2]).strip()
@staticmethod
def freeProxy10():
"""
墙外网站 cn-proxy
:return:
"""
urls = ['http://cn-proxy.com/', 'http://cn-proxy.com/archives/218']
request = WebRequest()
for url in urls:
r = request.get(url, timeout=10)
proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\w\W]<td>(\d+)</td>', r.text)
for proxy in proxies:
yield ':'.join(proxy)
@staticmethod
def freeProxy11():
"""
https://proxy-list.org/english/index.php
:return:
"""
urls = ['https://proxy-list.org/english/index.php?p=%s' % n for n in range(1, 10)]
request = WebRequest()
import base64
for url in urls:
r = request.get(url, timeout=10)
proxies = re.findall(r"Proxy\('(.*?)'\)", r.text)
for proxy in proxies:
yield base64.b64decode(proxy).decode()
@staticmethod
def freeProxy12():
urls = ['https://list.proxylistplus.com/Fresh-HTTP-Proxy-List-1']
request = WebRequest()
for url in urls:
r = request.get(url, timeout=10)
proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\s\S]*?<td>(\d+)</td>', r.text)
for proxy in proxies:
yield ':'.join(proxy)
@staticmethod
def freeProxy13(max_page=2):
"""
http://www.89ip.cn/index.html
89免费代理
:param max_page:
:return:
"""
base_url = 'http://www.89ip.cn/index_{}.html'
for page in range(1, max_page + 1):
url = base_url.format(page)
r = WebRequest().get(url, timeout=10)
proxies = re.findall(
r'<td.*?>[\s\S]*?(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})[\s\S]*?</td>[\s\S]*?<td.*?>[\s\S]*?(\d+)[\s\S]*?</td>',
r.text)
for proxy in proxies:
yield ':'.join(proxy)
@staticmethod
def freeProxy14():
"""
http://www.xiladaili.com/
西拉代理
:return:
"""
urls = ['http://www.xiladaili.com/']
for url in urls:
r = WebRequest().get(url, timeout=10)
ips = re.findall(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5}", r.text)
for ip in ips:
yield ip.strip()
@staticmethod
def freeProxy14():
"""
http://www.xiladaili.com/
西拉代理
:return:
"""
urls = ['http://www.xiladaili.com/']
for url in urls:
r = WebRequest().get(url, timeout=10)
ips = re.findall(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5}", r.text)
for ip in ips:
yield ip.strip()
@staticmethod
def freeProxy15(): # 命名不和已有重复即可
proxies = requests.get('https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list-raw.txt').text.split()
for proxy in proxies:
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy16(): # 命名不和已有重复即可
ip_list=[]
for day in range(0, 30, 1):
dd=date.today()-timedelta(days=day)
ip_url='https://webanetlabs.net/proxylist2021/spisok_proksi_na_'+dd.strftime("%d.%m.%Y")+'.html'
source=requests.get(ip_url)
if source.status_code==200:
ip_list+=re.findall(r'[0-9]+(?:\.[0-9]+){3}:[0-9]+', source.text)
for proxy in ip_list:
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy17(): # 命名不和已有重复即可
http_ips=requests.get('https://www.proxy-list.download/api/v1/get?type=http').text.split()
https_ips=requests.get('https://www.proxy-list.download/api/v1/get?type=https').text.split()
for proxy in http_ips+https_ips:
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy18(): # 命名不和已有重复即可
json_results=requests.get('https://proxylist.geonode.com/api/proxy-list?limit=4000&page=1&sort_by=lastChecked&sort_type=desc&protocols=https%2Csocks4%2Csocks5').json()
ip_list=[s['ip']+':'+s['port'] for s in json_results['data']]
for proxy in ip_list:
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy19(): # 命名不和已有重复即可
ip_list=[]
for pg_num in range(1, 7):
df_ips=pd.read_html(requests.get('https://list.proxylistplus.com/Fresh-HTTP-Proxy-List-'+str(pg_num)).text)[2]
ip_list+=[s[0]+':'+str(s[1]) for s in df_ips[['IP Address.1', 'Port']].values]
for proxy in ip_list:
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy20(): # 命名不和已有重复即可
proxy_list=requests.get('https://api.proxyscrape.com/?request=displayproxies&proxytype=all').text.split()
for proxy in proxy_list:
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy21(): # 命名不和已有重复即可
proxy_list=requests.get('https://www.proxyscan.io/download?type=https').text.split()
proxy_list+=requests.get('https://www.proxyscan.io/download?type=socks4').text.split()
proxy_list+=requests.get('https://www.proxyscan.io/download?type=socks5').text.split()
for proxy in set(proxy_list):
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy22(): # 命名不和已有重复即可
proxy_list=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://www.socks-proxy.net/').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
proxy_list+=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://www.sslproxies.org/').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
proxy_list+=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://free-proxy-list.net/').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
proxy_list+=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://www.us-proxy.org/').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
proxy_list+=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://free-proxy-list.net/uk-proxy.html').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
proxy_list+=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://www.sslproxies.org/').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
proxy_list+=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://free-proxy-list.net/anonymous-proxy.html').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
for proxy in set(proxy_list):
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy23(): # 命名不和已有重复即可
# proxy_list=requests.get('https://raw.githubusercontent.com/ma-ji/proxy_pool/master/fetcher/slow_rotate.txt').text.split()
dirname = os.path.dirname(os.path.abspath(__file__))
fn = os.path.join(dirname, "slow_rotate.txt")
with open(fn) as f:
proxy_list = f.read().split()
for proxy in set(proxy_list):
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
if __name__ == '__main__':
p = ProxyFetcher()
for _ in p.freeProxy13():
print(_)
| [
"requests.Session",
"os.path.join",
"datetime.date.today",
"time.sleep",
"requests.get",
"datetime.timedelta",
"base64.b64decode",
"util.webRequest.WebRequest",
"lxml.etree.HTML",
"os.path.abspath",
"re.findall"
] | [((1856, 1933), 're.findall', 're.findall', (['"""(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}:\\\\d{1,5})"""', 'resp.text'], {}), "('(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}:\\\\d{1,5})', resp.text)\n", (1866, 1933), False, 'import re\n'), ((2166, 2175), 'requests.Session', 'Session', ([], {}), '()\n', (2173, 2175), False, 'from requests import Session\n'), ((6407, 6419), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (6417, 6419), False, 'from util.webRequest import WebRequest\n'), ((6914, 6926), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (6924, 6926), False, 'from util.webRequest import WebRequest\n'), ((7304, 7316), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (7314, 7316), False, 'from util.webRequest import WebRequest\n'), ((13674, 13714), 'os.path.join', 'os.path.join', (['dirname', '"""slow_rotate.txt"""'], {}), "(dirname, 'slow_rotate.txt')\n", (13686, 13714), False, 'import os\n'), ((2306, 2353), 're.findall', 're.findall', (['"""X-CSRFToken": "(.*?)\\""""', 'index_resp'], {}), '(\'X-CSRFToken": "(.*?)"\', index_resp)\n', (2316, 2353), False, 'import re\n'), ((4027, 4035), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (4032, 4035), False, 'from time import sleep\n'), ((5035, 5144), 're.findall', 're.findall', (['"""<td>(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})</td>[\\\\s\\\\S]*?<td>(\\\\d+)</td>"""', 'r.text'], {}), "(\n '<td>(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})</td>[\\\\s\\\\S]*?<td>(\\\\d+)</td>'\n , r.text)\n", (5045, 5144), False, 'import re\n'), ((5496, 5606), 're.findall', 're.findall', (['""">\\\\s*?(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})\\\\s*?</a></td><td>(\\\\d+)</td>"""', 'r.text'], {}), "(\n '>\\\\s*?(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})\\\\s*?</a></td><td>(\\\\d+)</td>'\n , r.text)\n", (5506, 5606), False, 'import re\n'), ((6512, 6619), 're.findall', 're.findall', (['"""<td>(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})</td>[\\\\w\\\\W]<td>(\\\\d+)</td>"""', 'r.text'], {}), "(\n '<td>(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})</td>[\\\\w\\\\W]<td>(\\\\d+)</td>'\n , r.text)\n", (6522, 6619), False, 'import re\n'), ((7041, 7081), 're.findall', 're.findall', (['"""Proxy\\\\(\'(.*?)\'\\\\)"""', 'r.text'], {}), '("Proxy\\\\(\'(.*?)\'\\\\)", r.text)\n', (7051, 7081), False, 'import re\n'), ((7409, 7518), 're.findall', 're.findall', (['"""<td>(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})</td>[\\\\s\\\\S]*?<td>(\\\\d+)</td>"""', 'r.text'], {}), "(\n '<td>(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})</td>[\\\\s\\\\S]*?<td>(\\\\d+)</td>'\n , r.text)\n", (7419, 7518), False, 'import re\n'), ((7953, 8108), 're.findall', 're.findall', (['"""<td.*?>[\\\\s\\\\S]*?(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})[\\\\s\\\\S]*?</td>[\\\\s\\\\S]*?<td.*?>[\\\\s\\\\S]*?(\\\\d+)[\\\\s\\\\S]*?</td>"""', 'r.text'], {}), "(\n '<td.*?>[\\\\s\\\\S]*?(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})[\\\\s\\\\S]*?</td>[\\\\s\\\\S]*?<td.*?>[\\\\s\\\\S]*?(\\\\d+)[\\\\s\\\\S]*?</td>'\n , r.text)\n", (7963, 8108), False, 'import re\n'), ((8455, 8527), 're.findall', 're.findall', (['"""\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}:\\\\d{1,5}"""', 'r.text'], {}), "('\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}:\\\\d{1,5}', r.text)\n", (8465, 8527), False, 'import re\n'), ((8865, 8937), 're.findall', 're.findall', (['"""\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}:\\\\d{1,5}"""', 'r.text'], {}), "('\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}:\\\\d{1,5}', r.text)\n", (8875, 8937), False, 'import re\n'), ((9567, 9587), 'requests.get', 'requests.get', (['ip_url'], {}), '(ip_url)\n', (9579, 9587), False, 'import requests\n'), ((13634, 13659), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (13649, 13659), False, 'import os\n'), ((1804, 1816), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (1814, 1816), False, 'from util.webRequest import WebRequest\n'), ((2670, 2706), 'lxml.etree.HTML', 'etree.HTML', (["proxy_resp['proxy_html']"], {}), "(proxy_resp['proxy_html'])\n", (2680, 2706), False, 'from lxml import etree\n'), ((9406, 9418), 'datetime.date.today', 'date.today', ([], {}), '()\n', (9416, 9418), False, 'from datetime import date, timedelta\n'), ((9419, 9438), 'datetime.timedelta', 'timedelta', ([], {'days': 'day'}), '(days=day)\n', (9428, 9438), False, 'from datetime import date, timedelta\n'), ((9653, 9709), 're.findall', 're.findall', (['"""[0-9]+(?:\\\\.[0-9]+){3}:[0-9]+"""', 'source.text'], {}), "('[0-9]+(?:\\\\.[0-9]+){3}:[0-9]+', source.text)\n", (9663, 9709), False, 'import re\n'), ((10252, 10409), 'requests.get', 'requests.get', (['"""https://proxylist.geonode.com/api/proxy-list?limit=4000&page=1&sort_by=lastChecked&sort_type=desc&protocols=https%2Csocks4%2Csocks5"""'], {}), "(\n 'https://proxylist.geonode.com/api/proxy-list?limit=4000&page=1&sort_by=lastChecked&sort_type=desc&protocols=https%2Csocks4%2Csocks5'\n )\n", (10264, 10409), False, 'import requests\n'), ((3172, 3184), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (3182, 3184), False, 'from util.webRequest import WebRequest\n'), ((4979, 4991), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (4989, 4991), False, 'from util.webRequest import WebRequest\n'), ((5440, 5452), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (5450, 5452), False, 'from util.webRequest import WebRequest\n'), ((7897, 7909), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (7907, 7909), False, 'from util.webRequest import WebRequest\n'), ((8403, 8415), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (8413, 8415), False, 'from util.webRequest import WebRequest\n'), ((8813, 8825), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (8823, 8825), False, 'from util.webRequest import WebRequest\n'), ((9072, 9177), 'requests.get', 'requests.get', (['"""https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list-raw.txt"""'], {}), "(\n 'https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list-raw.txt'\n )\n", (9084, 9177), False, 'import requests\n'), ((9882, 9950), 'requests.get', 'requests.get', (['"""https://www.proxy-list.download/api/v1/get?type=http"""'], {}), "('https://www.proxy-list.download/api/v1/get?type=http')\n", (9894, 9950), False, 'import requests\n'), ((9982, 10051), 'requests.get', 'requests.get', (['"""https://www.proxy-list.download/api/v1/get?type=https"""'], {}), "('https://www.proxy-list.download/api/v1/get?type=https')\n", (9994, 10051), False, 'import requests\n'), ((11074, 11160), 'requests.get', 'requests.get', (['"""https://api.proxyscrape.com/?request=displayproxies&proxytype=all"""'], {}), "(\n 'https://api.proxyscrape.com/?request=displayproxies&proxytype=all')\n", (11086, 11160), False, 'import requests\n'), ((11346, 11406), 'requests.get', 'requests.get', (['"""https://www.proxyscan.io/download?type=https"""'], {}), "('https://www.proxyscan.io/download?type=https')\n", (11358, 11406), False, 'import requests\n'), ((11440, 11501), 'requests.get', 'requests.get', (['"""https://www.proxyscan.io/download?type=socks4"""'], {}), "('https://www.proxyscan.io/download?type=socks4')\n", (11452, 11501), False, 'import requests\n'), ((11535, 11596), 'requests.get', 'requests.get', (['"""https://www.proxyscan.io/download?type=socks5"""'], {}), "('https://www.proxyscan.io/download?type=socks5')\n", (11547, 11596), False, 'import requests\n'), ((1108, 1120), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (1118, 1120), False, 'from util.webRequest import WebRequest\n'), ((3936, 3948), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (3946, 3948), False, 'from util.webRequest import WebRequest\n'), ((5980, 5992), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (5990, 5992), False, 'from util.webRequest import WebRequest\n'), ((4476, 4488), 'util.webRequest.WebRequest', 'WebRequest', ([], {}), '()\n', (4486, 4488), False, 'from util.webRequest import WebRequest\n'), ((7137, 7160), 'base64.b64decode', 'base64.b64decode', (['proxy'], {}), '(proxy)\n', (7153, 7160), False, 'import base64\n'), ((11860, 11904), 'requests.get', 'requests.get', (['"""https://www.socks-proxy.net/"""'], {}), "('https://www.socks-proxy.net/')\n", (11872, 11904), False, 'import requests\n'), ((12075, 12118), 'requests.get', 'requests.get', (['"""https://www.sslproxies.org/"""'], {}), "('https://www.sslproxies.org/')\n", (12087, 12118), False, 'import requests\n'), ((12289, 12333), 'requests.get', 'requests.get', (['"""https://free-proxy-list.net/"""'], {}), "('https://free-proxy-list.net/')\n", (12301, 12333), False, 'import requests\n'), ((12504, 12545), 'requests.get', 'requests.get', (['"""https://www.us-proxy.org/"""'], {}), "('https://www.us-proxy.org/')\n", (12516, 12545), False, 'import requests\n'), ((12716, 12773), 'requests.get', 'requests.get', (['"""https://free-proxy-list.net/uk-proxy.html"""'], {}), "('https://free-proxy-list.net/uk-proxy.html')\n", (12728, 12773), False, 'import requests\n'), ((12944, 12987), 'requests.get', 'requests.get', (['"""https://www.sslproxies.org/"""'], {}), "('https://www.sslproxies.org/')\n", (12956, 12987), False, 'import requests\n'), ((13158, 13222), 'requests.get', 'requests.get', (['"""https://free-proxy-list.net/anonymous-proxy.html"""'], {}), "('https://free-proxy-list.net/anonymous-proxy.html')\n", (13170, 13222), False, 'import requests\n')] |
# Code generated by lark_sdk_gen. DO NOT EDIT.
import unittest
import pylark
import pytest
from tests.test_conf import app_all_permission, app_no_permission
from tests.test_helper import mock_get_tenant_access_token_failed
def mock(*args, **kwargs):
raise pylark.PyLarkError(scope="scope", func="func", code=1, msg="mock-failed")
def mock_raw_request(*args, **kwargs):
raise pylark.PyLarkError(
scope="scope", func="func", code=1, msg="mock-raw-request-failed"
)
# mock get token
class TestDriveSampleMockGetTokenFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestDriveSampleMockGetTokenFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.cli.auth.get_tenant_access_token = mock_get_tenant_access_token_failed
self.cli.auth.get_app_access_token = mock_get_tenant_access_token_failed
self.module_cli = self.cli.drive
def test_mock_get_token_get_drive_file_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_meta(pylark.GetDriveFileMetaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_file(pylark.CreateDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_copy_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.copy_drive_file(pylark.CopyDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_file(pylark.DeleteDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_drive_sheet_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_sheet_file(pylark.DeleteDriveSheetFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_folder(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_folder(pylark.CreateDriveFolderReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_folder_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_meta(pylark.GetDriveFolderMetaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_root_folder_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_root_folder_meta(
pylark.GetDriveRootFolderMetaReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_folder_children(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_children(
pylark.GetDriveFolderChildrenReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_file_statistics(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_statistics(
pylark.GetDriveFileStatisticsReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_download_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_file(pylark.DownloadDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_file(pylark.UploadDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_prepare_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_file(
pylark.PrepareUploadDriveFileReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_part_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_file(pylark.PartUploadDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_finish_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_file(pylark.FinishUploadDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_download_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_media(pylark.DownloadDriveMediaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_media(pylark.UploadDriveMediaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_prepare_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_media(
pylark.PrepareUploadDriveMediaReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_part_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_media(pylark.PartUploadDriveMediaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_finish_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_media(
pylark.FinishUploadDriveMediaReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission_old(
pylark.CreateDriveMemberPermissionOldReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_transfer_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.transfer_drive_member_permission(
pylark.TransferDriveMemberPermissionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_member_permission_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_member_permission_list(
pylark.GetDriveMemberPermissionListReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission(
pylark.CreateDriveMemberPermissionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission_old(
pylark.DeleteDriveMemberPermissionOldReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission(
pylark.DeleteDriveMemberPermissionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission_old(
pylark.UpdateDriveMemberPermissionOldReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission(
pylark.UpdateDriveMemberPermissionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_check_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.check_drive_member_permission(
pylark.CheckDriveMemberPermissionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_public_permission_v1_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v1_old(
pylark.UpdateDrivePublicPermissionV1OldReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_public_permission_v2_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v2_old(
pylark.UpdateDrivePublicPermissionV2OldReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_public_permission_v2(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_public_permission_v2(
pylark.GetDrivePublicPermissionV2Req()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_public_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission(
pylark.UpdateDrivePublicPermissionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_batch_get_drive_media_tmp_download_url(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_drive_media_tmp_download_url(
pylark.BatchGetDriveMediaTmpDownloadURLReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_comment_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment_list(pylark.GetDriveCommentListReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment(pylark.GetDriveCommentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_comment(pylark.CreateDriveCommentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment(pylark.UpdateDriveCommentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_comment(pylark.DeleteDriveCommentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_comment_patch(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment_patch(
pylark.UpdateDriveCommentPatchReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_doc(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_doc(pylark.CreateDriveDocReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_doc_content(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_content(pylark.GetDriveDocContentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_doc_raw_content(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_raw_content(pylark.GetDriveDocRawContentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_doc_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_meta(pylark.GetDriveDocMetaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet(pylark.CreateSheetReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_meta(pylark.GetSheetMetaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_property(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_property(pylark.UpdateSheetPropertyReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_batch_update_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_update_sheet(pylark.BatchUpdateSheetReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_import_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.import_sheet(pylark.ImportSheetReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_import_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_import_task(pylark.CreateDriveImportTaskReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_import_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_import_task(pylark.GetDriveImportTaskReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_move_sheet_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_sheet_dimension(pylark.MoveSheetDimensionReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_prepend_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepend_sheet_value(pylark.PrependSheetValueReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_append_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.append_sheet_value(pylark.AppendSheetValueReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_insert_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.insert_sheet_dimension_range(
pylark.InsertSheetDimensionRangeReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_add_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_sheet_dimension_range(
pylark.AddSheetDimensionRangeReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_dimension_range(
pylark.UpdateSheetDimensionRangeReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_dimension_range(
pylark.DeleteSheetDimensionRangeReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_value(pylark.GetSheetValueReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_batch_get_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_sheet_value(pylark.BatchGetSheetValueReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_set_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value(pylark.SetSheetValueReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_batch_set_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_value(pylark.BatchSetSheetValueReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_set_sheet_style(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_style(pylark.SetSheetStyleReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_batch_set_sheet_style(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_style(pylark.BatchSetSheetStyleReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_merge_sheet_cell(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.merge_sheet_cell(pylark.MergeSheetCellReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_unmerge_sheet_cell(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.unmerge_sheet_cell(pylark.UnmergeSheetCellReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_set_sheet_value_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value_image(pylark.SetSheetValueImageReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_find_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.find_sheet(pylark.FindSheetReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_replace_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.replace_sheet(pylark.ReplaceSheetReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_condition_format(
pylark.CreateSheetConditionFormatReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_condition_format(
pylark.GetSheetConditionFormatReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_condition_format(
pylark.UpdateSheetConditionFormatReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_condition_format(
pylark.DeleteSheetConditionFormatReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_protected_dimension(
pylark.CreateSheetProtectedDimensionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_protected_dimension(
pylark.GetSheetProtectedDimensionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_protected_dimension(
pylark.UpdateSheetProtectedDimensionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_protected_dimension(
pylark.DeleteSheetProtectedDimensionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_data_validation_dropdown(
pylark.CreateSheetDataValidationDropdownReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_data_validation_dropdown(
pylark.DeleteSheetDataValidationDropdownReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_data_validation_dropdown(
pylark.UpdateSheetDataValidationDropdownReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_data_validation_dropdown(
pylark.GetSheetDataValidationDropdownReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter(pylark.CreateSheetFilterReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter(pylark.DeleteSheetFilterReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter(pylark.UpdateSheetFilterReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter(pylark.GetSheetFilterReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view(pylark.CreateSheetFilterViewReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view(pylark.DeleteSheetFilterViewReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view(pylark.UpdateSheetFilterViewReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view(pylark.GetSheetFilterViewReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_query_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view(pylark.QuerySheetFilterViewReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view_condition(
pylark.CreateSheetFilterViewConditionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view_condition(
pylark.DeleteSheetFilterViewConditionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view_condition(
pylark.UpdateSheetFilterViewConditionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view_condition(
pylark.GetSheetFilterViewConditionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_query_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view_condition(
pylark.QuerySheetFilterViewConditionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_float_image(pylark.CreateSheetFloatImageReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_float_image(pylark.DeleteSheetFloatImageReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_float_image(pylark.UpdateSheetFloatImageReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_float_image(pylark.GetSheetFloatImageReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_query_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_float_image(pylark.QuerySheetFloatImageReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_wiki_space_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space_list(pylark.GetWikiSpaceListReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_wiki_space(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space(pylark.GetWikiSpaceReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_wiki_space_setting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_wiki_space_setting(
pylark.UpdateWikiSpaceSettingReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_add_wiki_space_member(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_wiki_space_member(pylark.AddWikiSpaceMemberReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_wiki_node(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_wiki_node(pylark.CreateWikiNodeReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_wiki_node_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node_list(pylark.GetWikiNodeListReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_wiki_node(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node(pylark.GetWikiNodeReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_move_docs_to_wiki(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_docs_to_wiki(pylark.MoveDocsToWikiReq())
assert "msg=failed" in f"{e}"
# mock mock self func
class TestDriveSampleMockSelfFuncFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestDriveSampleMockSelfFuncFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.module_cli = self.cli.drive
def test_mock_self_func_get_drive_file_meta(self):
origin_func = self.module_cli.get_drive_file_meta
self.module_cli.get_drive_file_meta = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_meta(pylark.GetDriveFileMetaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_file_meta = origin_func
def test_mock_self_func_create_drive_file(self):
origin_func = self.module_cli.create_drive_file
self.module_cli.create_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_file(pylark.CreateDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_file = origin_func
def test_mock_self_func_copy_drive_file(self):
origin_func = self.module_cli.copy_drive_file
self.module_cli.copy_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.copy_drive_file(pylark.CopyDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.copy_drive_file = origin_func
def test_mock_self_func_delete_drive_file(self):
origin_func = self.module_cli.delete_drive_file
self.module_cli.delete_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_file(pylark.DeleteDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_drive_file = origin_func
def test_mock_self_func_delete_drive_sheet_file(self):
origin_func = self.module_cli.delete_drive_sheet_file
self.module_cli.delete_drive_sheet_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_sheet_file(pylark.DeleteDriveSheetFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_drive_sheet_file = origin_func
def test_mock_self_func_create_drive_folder(self):
origin_func = self.module_cli.create_drive_folder
self.module_cli.create_drive_folder = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_folder(pylark.CreateDriveFolderReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_folder = origin_func
def test_mock_self_func_get_drive_folder_meta(self):
origin_func = self.module_cli.get_drive_folder_meta
self.module_cli.get_drive_folder_meta = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_meta(pylark.GetDriveFolderMetaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_folder_meta = origin_func
def test_mock_self_func_get_drive_root_folder_meta(self):
origin_func = self.module_cli.get_drive_root_folder_meta
self.module_cli.get_drive_root_folder_meta = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_root_folder_meta(
pylark.GetDriveRootFolderMetaReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_root_folder_meta = origin_func
def test_mock_self_func_get_drive_folder_children(self):
origin_func = self.module_cli.get_drive_folder_children
self.module_cli.get_drive_folder_children = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_children(
pylark.GetDriveFolderChildrenReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_folder_children = origin_func
def test_mock_self_func_get_drive_file_statistics(self):
origin_func = self.module_cli.get_drive_file_statistics
self.module_cli.get_drive_file_statistics = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_statistics(
pylark.GetDriveFileStatisticsReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_file_statistics = origin_func
def test_mock_self_func_download_drive_file(self):
origin_func = self.module_cli.download_drive_file
self.module_cli.download_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_file(pylark.DownloadDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.download_drive_file = origin_func
def test_mock_self_func_upload_drive_file(self):
origin_func = self.module_cli.upload_drive_file
self.module_cli.upload_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_file(pylark.UploadDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.upload_drive_file = origin_func
def test_mock_self_func_prepare_upload_drive_file(self):
origin_func = self.module_cli.prepare_upload_drive_file
self.module_cli.prepare_upload_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_file(
pylark.PrepareUploadDriveFileReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.prepare_upload_drive_file = origin_func
def test_mock_self_func_part_upload_drive_file(self):
origin_func = self.module_cli.part_upload_drive_file
self.module_cli.part_upload_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_file(pylark.PartUploadDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.part_upload_drive_file = origin_func
def test_mock_self_func_finish_upload_drive_file(self):
origin_func = self.module_cli.finish_upload_drive_file
self.module_cli.finish_upload_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_file(pylark.FinishUploadDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.finish_upload_drive_file = origin_func
def test_mock_self_func_download_drive_media(self):
origin_func = self.module_cli.download_drive_media
self.module_cli.download_drive_media = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_media(pylark.DownloadDriveMediaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.download_drive_media = origin_func
def test_mock_self_func_upload_drive_media(self):
origin_func = self.module_cli.upload_drive_media
self.module_cli.upload_drive_media = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_media(pylark.UploadDriveMediaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.upload_drive_media = origin_func
def test_mock_self_func_prepare_upload_drive_media(self):
origin_func = self.module_cli.prepare_upload_drive_media
self.module_cli.prepare_upload_drive_media = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_media(
pylark.PrepareUploadDriveMediaReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.prepare_upload_drive_media = origin_func
def test_mock_self_func_part_upload_drive_media(self):
origin_func = self.module_cli.part_upload_drive_media
self.module_cli.part_upload_drive_media = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_media(pylark.PartUploadDriveMediaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.part_upload_drive_media = origin_func
def test_mock_self_func_finish_upload_drive_media(self):
origin_func = self.module_cli.finish_upload_drive_media
self.module_cli.finish_upload_drive_media = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_media(
pylark.FinishUploadDriveMediaReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.finish_upload_drive_media = origin_func
def test_mock_self_func_create_drive_member_permission_old(self):
origin_func = self.module_cli.create_drive_member_permission_old
self.module_cli.create_drive_member_permission_old = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission_old(
pylark.CreateDriveMemberPermissionOldReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_member_permission_old = origin_func
def test_mock_self_func_transfer_drive_member_permission(self):
origin_func = self.module_cli.transfer_drive_member_permission
self.module_cli.transfer_drive_member_permission = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.transfer_drive_member_permission(
pylark.TransferDriveMemberPermissionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.transfer_drive_member_permission = origin_func
def test_mock_self_func_get_drive_member_permission_list(self):
origin_func = self.module_cli.get_drive_member_permission_list
self.module_cli.get_drive_member_permission_list = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_member_permission_list(
pylark.GetDriveMemberPermissionListReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_member_permission_list = origin_func
def test_mock_self_func_create_drive_member_permission(self):
origin_func = self.module_cli.create_drive_member_permission
self.module_cli.create_drive_member_permission = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission(
pylark.CreateDriveMemberPermissionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_member_permission = origin_func
def test_mock_self_func_delete_drive_member_permission_old(self):
origin_func = self.module_cli.delete_drive_member_permission_old
self.module_cli.delete_drive_member_permission_old = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission_old(
pylark.DeleteDriveMemberPermissionOldReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_drive_member_permission_old = origin_func
def test_mock_self_func_delete_drive_member_permission(self):
origin_func = self.module_cli.delete_drive_member_permission
self.module_cli.delete_drive_member_permission = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission(
pylark.DeleteDriveMemberPermissionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_drive_member_permission = origin_func
def test_mock_self_func_update_drive_member_permission_old(self):
origin_func = self.module_cli.update_drive_member_permission_old
self.module_cli.update_drive_member_permission_old = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission_old(
pylark.UpdateDriveMemberPermissionOldReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_member_permission_old = origin_func
def test_mock_self_func_update_drive_member_permission(self):
origin_func = self.module_cli.update_drive_member_permission
self.module_cli.update_drive_member_permission = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission(
pylark.UpdateDriveMemberPermissionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_member_permission = origin_func
def test_mock_self_func_check_drive_member_permission(self):
origin_func = self.module_cli.check_drive_member_permission
self.module_cli.check_drive_member_permission = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.check_drive_member_permission(
pylark.CheckDriveMemberPermissionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.check_drive_member_permission = origin_func
def test_mock_self_func_update_drive_public_permission_v1_old(self):
origin_func = self.module_cli.update_drive_public_permission_v1_old
self.module_cli.update_drive_public_permission_v1_old = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v1_old(
pylark.UpdateDrivePublicPermissionV1OldReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_public_permission_v1_old = origin_func
def test_mock_self_func_update_drive_public_permission_v2_old(self):
origin_func = self.module_cli.update_drive_public_permission_v2_old
self.module_cli.update_drive_public_permission_v2_old = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v2_old(
pylark.UpdateDrivePublicPermissionV2OldReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_public_permission_v2_old = origin_func
def test_mock_self_func_get_drive_public_permission_v2(self):
origin_func = self.module_cli.get_drive_public_permission_v2
self.module_cli.get_drive_public_permission_v2 = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_public_permission_v2(
pylark.GetDrivePublicPermissionV2Req()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_public_permission_v2 = origin_func
def test_mock_self_func_update_drive_public_permission(self):
origin_func = self.module_cli.update_drive_public_permission
self.module_cli.update_drive_public_permission = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission(
pylark.UpdateDrivePublicPermissionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_public_permission = origin_func
def test_mock_self_func_batch_get_drive_media_tmp_download_url(self):
origin_func = self.module_cli.batch_get_drive_media_tmp_download_url
self.module_cli.batch_get_drive_media_tmp_download_url = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_drive_media_tmp_download_url(
pylark.BatchGetDriveMediaTmpDownloadURLReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.batch_get_drive_media_tmp_download_url = origin_func
def test_mock_self_func_get_drive_comment_list(self):
origin_func = self.module_cli.get_drive_comment_list
self.module_cli.get_drive_comment_list = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment_list(pylark.GetDriveCommentListReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_comment_list = origin_func
def test_mock_self_func_get_drive_comment(self):
origin_func = self.module_cli.get_drive_comment
self.module_cli.get_drive_comment = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment(pylark.GetDriveCommentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_comment = origin_func
def test_mock_self_func_create_drive_comment(self):
origin_func = self.module_cli.create_drive_comment
self.module_cli.create_drive_comment = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_comment(pylark.CreateDriveCommentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_comment = origin_func
def test_mock_self_func_update_drive_comment(self):
origin_func = self.module_cli.update_drive_comment
self.module_cli.update_drive_comment = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment(pylark.UpdateDriveCommentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_comment = origin_func
def test_mock_self_func_delete_drive_comment(self):
origin_func = self.module_cli.delete_drive_comment
self.module_cli.delete_drive_comment = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_comment(pylark.DeleteDriveCommentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_drive_comment = origin_func
def test_mock_self_func_update_drive_comment_patch(self):
origin_func = self.module_cli.update_drive_comment_patch
self.module_cli.update_drive_comment_patch = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment_patch(
pylark.UpdateDriveCommentPatchReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_comment_patch = origin_func
def test_mock_self_func_create_drive_doc(self):
origin_func = self.module_cli.create_drive_doc
self.module_cli.create_drive_doc = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_doc(pylark.CreateDriveDocReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_doc = origin_func
def test_mock_self_func_get_drive_doc_content(self):
origin_func = self.module_cli.get_drive_doc_content
self.module_cli.get_drive_doc_content = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_content(pylark.GetDriveDocContentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_doc_content = origin_func
def test_mock_self_func_get_drive_doc_raw_content(self):
origin_func = self.module_cli.get_drive_doc_raw_content
self.module_cli.get_drive_doc_raw_content = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_raw_content(pylark.GetDriveDocRawContentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_doc_raw_content = origin_func
def test_mock_self_func_get_drive_doc_meta(self):
origin_func = self.module_cli.get_drive_doc_meta
self.module_cli.get_drive_doc_meta = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_meta(pylark.GetDriveDocMetaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_doc_meta = origin_func
def test_mock_self_func_create_sheet(self):
origin_func = self.module_cli.create_sheet
self.module_cli.create_sheet = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet(pylark.CreateSheetReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet = origin_func
def test_mock_self_func_get_sheet_meta(self):
origin_func = self.module_cli.get_sheet_meta
self.module_cli.get_sheet_meta = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_meta(pylark.GetSheetMetaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_meta = origin_func
def test_mock_self_func_update_sheet_property(self):
origin_func = self.module_cli.update_sheet_property
self.module_cli.update_sheet_property = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_property(pylark.UpdateSheetPropertyReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_property = origin_func
def test_mock_self_func_batch_update_sheet(self):
origin_func = self.module_cli.batch_update_sheet
self.module_cli.batch_update_sheet = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_update_sheet(pylark.BatchUpdateSheetReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.batch_update_sheet = origin_func
def test_mock_self_func_import_sheet(self):
origin_func = self.module_cli.import_sheet
self.module_cli.import_sheet = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.import_sheet(pylark.ImportSheetReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.import_sheet = origin_func
def test_mock_self_func_create_drive_import_task(self):
origin_func = self.module_cli.create_drive_import_task
self.module_cli.create_drive_import_task = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_import_task(pylark.CreateDriveImportTaskReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_import_task = origin_func
def test_mock_self_func_get_drive_import_task(self):
origin_func = self.module_cli.get_drive_import_task
self.module_cli.get_drive_import_task = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_import_task(pylark.GetDriveImportTaskReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_import_task = origin_func
def test_mock_self_func_move_sheet_dimension(self):
origin_func = self.module_cli.move_sheet_dimension
self.module_cli.move_sheet_dimension = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_sheet_dimension(pylark.MoveSheetDimensionReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.move_sheet_dimension = origin_func
def test_mock_self_func_prepend_sheet_value(self):
origin_func = self.module_cli.prepend_sheet_value
self.module_cli.prepend_sheet_value = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepend_sheet_value(pylark.PrependSheetValueReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.prepend_sheet_value = origin_func
def test_mock_self_func_append_sheet_value(self):
origin_func = self.module_cli.append_sheet_value
self.module_cli.append_sheet_value = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.append_sheet_value(pylark.AppendSheetValueReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.append_sheet_value = origin_func
def test_mock_self_func_insert_sheet_dimension_range(self):
origin_func = self.module_cli.insert_sheet_dimension_range
self.module_cli.insert_sheet_dimension_range = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.insert_sheet_dimension_range(
pylark.InsertSheetDimensionRangeReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.insert_sheet_dimension_range = origin_func
def test_mock_self_func_add_sheet_dimension_range(self):
origin_func = self.module_cli.add_sheet_dimension_range
self.module_cli.add_sheet_dimension_range = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_sheet_dimension_range(
pylark.AddSheetDimensionRangeReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.add_sheet_dimension_range = origin_func
def test_mock_self_func_update_sheet_dimension_range(self):
origin_func = self.module_cli.update_sheet_dimension_range
self.module_cli.update_sheet_dimension_range = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_dimension_range(
pylark.UpdateSheetDimensionRangeReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_dimension_range = origin_func
def test_mock_self_func_delete_sheet_dimension_range(self):
origin_func = self.module_cli.delete_sheet_dimension_range
self.module_cli.delete_sheet_dimension_range = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_dimension_range(
pylark.DeleteSheetDimensionRangeReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_dimension_range = origin_func
def test_mock_self_func_get_sheet_value(self):
origin_func = self.module_cli.get_sheet_value
self.module_cli.get_sheet_value = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_value(pylark.GetSheetValueReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_value = origin_func
def test_mock_self_func_batch_get_sheet_value(self):
origin_func = self.module_cli.batch_get_sheet_value
self.module_cli.batch_get_sheet_value = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_sheet_value(pylark.BatchGetSheetValueReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.batch_get_sheet_value = origin_func
def test_mock_self_func_set_sheet_value(self):
origin_func = self.module_cli.set_sheet_value
self.module_cli.set_sheet_value = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value(pylark.SetSheetValueReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.set_sheet_value = origin_func
def test_mock_self_func_batch_set_sheet_value(self):
origin_func = self.module_cli.batch_set_sheet_value
self.module_cli.batch_set_sheet_value = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_value(pylark.BatchSetSheetValueReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.batch_set_sheet_value = origin_func
def test_mock_self_func_set_sheet_style(self):
origin_func = self.module_cli.set_sheet_style
self.module_cli.set_sheet_style = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_style(pylark.SetSheetStyleReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.set_sheet_style = origin_func
def test_mock_self_func_batch_set_sheet_style(self):
origin_func = self.module_cli.batch_set_sheet_style
self.module_cli.batch_set_sheet_style = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_style(pylark.BatchSetSheetStyleReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.batch_set_sheet_style = origin_func
def test_mock_self_func_merge_sheet_cell(self):
origin_func = self.module_cli.merge_sheet_cell
self.module_cli.merge_sheet_cell = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.merge_sheet_cell(pylark.MergeSheetCellReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.merge_sheet_cell = origin_func
def test_mock_self_func_unmerge_sheet_cell(self):
origin_func = self.module_cli.unmerge_sheet_cell
self.module_cli.unmerge_sheet_cell = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.unmerge_sheet_cell(pylark.UnmergeSheetCellReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.unmerge_sheet_cell = origin_func
def test_mock_self_func_set_sheet_value_image(self):
origin_func = self.module_cli.set_sheet_value_image
self.module_cli.set_sheet_value_image = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value_image(pylark.SetSheetValueImageReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.set_sheet_value_image = origin_func
def test_mock_self_func_find_sheet(self):
origin_func = self.module_cli.find_sheet
self.module_cli.find_sheet = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.find_sheet(pylark.FindSheetReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.find_sheet = origin_func
def test_mock_self_func_replace_sheet(self):
origin_func = self.module_cli.replace_sheet
self.module_cli.replace_sheet = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.replace_sheet(pylark.ReplaceSheetReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.replace_sheet = origin_func
def test_mock_self_func_create_sheet_condition_format(self):
origin_func = self.module_cli.create_sheet_condition_format
self.module_cli.create_sheet_condition_format = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_condition_format(
pylark.CreateSheetConditionFormatReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_condition_format = origin_func
def test_mock_self_func_get_sheet_condition_format(self):
origin_func = self.module_cli.get_sheet_condition_format
self.module_cli.get_sheet_condition_format = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_condition_format(
pylark.GetSheetConditionFormatReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_condition_format = origin_func
def test_mock_self_func_update_sheet_condition_format(self):
origin_func = self.module_cli.update_sheet_condition_format
self.module_cli.update_sheet_condition_format = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_condition_format(
pylark.UpdateSheetConditionFormatReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_condition_format = origin_func
def test_mock_self_func_delete_sheet_condition_format(self):
origin_func = self.module_cli.delete_sheet_condition_format
self.module_cli.delete_sheet_condition_format = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_condition_format(
pylark.DeleteSheetConditionFormatReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_condition_format = origin_func
def test_mock_self_func_create_sheet_protected_dimension(self):
origin_func = self.module_cli.create_sheet_protected_dimension
self.module_cli.create_sheet_protected_dimension = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_protected_dimension(
pylark.CreateSheetProtectedDimensionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_protected_dimension = origin_func
def test_mock_self_func_get_sheet_protected_dimension(self):
origin_func = self.module_cli.get_sheet_protected_dimension
self.module_cli.get_sheet_protected_dimension = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_protected_dimension(
pylark.GetSheetProtectedDimensionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_protected_dimension = origin_func
def test_mock_self_func_update_sheet_protected_dimension(self):
origin_func = self.module_cli.update_sheet_protected_dimension
self.module_cli.update_sheet_protected_dimension = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_protected_dimension(
pylark.UpdateSheetProtectedDimensionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_protected_dimension = origin_func
def test_mock_self_func_delete_sheet_protected_dimension(self):
origin_func = self.module_cli.delete_sheet_protected_dimension
self.module_cli.delete_sheet_protected_dimension = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_protected_dimension(
pylark.DeleteSheetProtectedDimensionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_protected_dimension = origin_func
def test_mock_self_func_create_sheet_data_validation_dropdown(self):
origin_func = self.module_cli.create_sheet_data_validation_dropdown
self.module_cli.create_sheet_data_validation_dropdown = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_data_validation_dropdown(
pylark.CreateSheetDataValidationDropdownReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_data_validation_dropdown = origin_func
def test_mock_self_func_delete_sheet_data_validation_dropdown(self):
origin_func = self.module_cli.delete_sheet_data_validation_dropdown
self.module_cli.delete_sheet_data_validation_dropdown = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_data_validation_dropdown(
pylark.DeleteSheetDataValidationDropdownReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_data_validation_dropdown = origin_func
def test_mock_self_func_update_sheet_data_validation_dropdown(self):
origin_func = self.module_cli.update_sheet_data_validation_dropdown
self.module_cli.update_sheet_data_validation_dropdown = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_data_validation_dropdown(
pylark.UpdateSheetDataValidationDropdownReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_data_validation_dropdown = origin_func
def test_mock_self_func_get_sheet_data_validation_dropdown(self):
origin_func = self.module_cli.get_sheet_data_validation_dropdown
self.module_cli.get_sheet_data_validation_dropdown = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_data_validation_dropdown(
pylark.GetSheetDataValidationDropdownReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_data_validation_dropdown = origin_func
def test_mock_self_func_create_sheet_filter(self):
origin_func = self.module_cli.create_sheet_filter
self.module_cli.create_sheet_filter = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter(pylark.CreateSheetFilterReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_filter = origin_func
def test_mock_self_func_delete_sheet_filter(self):
origin_func = self.module_cli.delete_sheet_filter
self.module_cli.delete_sheet_filter = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter(pylark.DeleteSheetFilterReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_filter = origin_func
def test_mock_self_func_update_sheet_filter(self):
origin_func = self.module_cli.update_sheet_filter
self.module_cli.update_sheet_filter = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter(pylark.UpdateSheetFilterReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_filter = origin_func
def test_mock_self_func_get_sheet_filter(self):
origin_func = self.module_cli.get_sheet_filter
self.module_cli.get_sheet_filter = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter(pylark.GetSheetFilterReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_filter = origin_func
def test_mock_self_func_create_sheet_filter_view(self):
origin_func = self.module_cli.create_sheet_filter_view
self.module_cli.create_sheet_filter_view = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view(pylark.CreateSheetFilterViewReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_filter_view = origin_func
def test_mock_self_func_delete_sheet_filter_view(self):
origin_func = self.module_cli.delete_sheet_filter_view
self.module_cli.delete_sheet_filter_view = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view(pylark.DeleteSheetFilterViewReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_filter_view = origin_func
def test_mock_self_func_update_sheet_filter_view(self):
origin_func = self.module_cli.update_sheet_filter_view
self.module_cli.update_sheet_filter_view = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view(pylark.UpdateSheetFilterViewReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_filter_view = origin_func
def test_mock_self_func_get_sheet_filter_view(self):
origin_func = self.module_cli.get_sheet_filter_view
self.module_cli.get_sheet_filter_view = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view(pylark.GetSheetFilterViewReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_filter_view = origin_func
def test_mock_self_func_query_sheet_filter_view(self):
origin_func = self.module_cli.query_sheet_filter_view
self.module_cli.query_sheet_filter_view = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view(pylark.QuerySheetFilterViewReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.query_sheet_filter_view = origin_func
def test_mock_self_func_create_sheet_filter_view_condition(self):
origin_func = self.module_cli.create_sheet_filter_view_condition
self.module_cli.create_sheet_filter_view_condition = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view_condition(
pylark.CreateSheetFilterViewConditionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_filter_view_condition = origin_func
def test_mock_self_func_delete_sheet_filter_view_condition(self):
origin_func = self.module_cli.delete_sheet_filter_view_condition
self.module_cli.delete_sheet_filter_view_condition = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view_condition(
pylark.DeleteSheetFilterViewConditionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_filter_view_condition = origin_func
def test_mock_self_func_update_sheet_filter_view_condition(self):
origin_func = self.module_cli.update_sheet_filter_view_condition
self.module_cli.update_sheet_filter_view_condition = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view_condition(
pylark.UpdateSheetFilterViewConditionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_filter_view_condition = origin_func
def test_mock_self_func_get_sheet_filter_view_condition(self):
origin_func = self.module_cli.get_sheet_filter_view_condition
self.module_cli.get_sheet_filter_view_condition = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view_condition(
pylark.GetSheetFilterViewConditionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_filter_view_condition = origin_func
def test_mock_self_func_query_sheet_filter_view_condition(self):
origin_func = self.module_cli.query_sheet_filter_view_condition
self.module_cli.query_sheet_filter_view_condition = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view_condition(
pylark.QuerySheetFilterViewConditionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.query_sheet_filter_view_condition = origin_func
def test_mock_self_func_create_sheet_float_image(self):
origin_func = self.module_cli.create_sheet_float_image
self.module_cli.create_sheet_float_image = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_float_image(pylark.CreateSheetFloatImageReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_float_image = origin_func
def test_mock_self_func_delete_sheet_float_image(self):
origin_func = self.module_cli.delete_sheet_float_image
self.module_cli.delete_sheet_float_image = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_float_image(pylark.DeleteSheetFloatImageReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_float_image = origin_func
def test_mock_self_func_update_sheet_float_image(self):
origin_func = self.module_cli.update_sheet_float_image
self.module_cli.update_sheet_float_image = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_float_image(pylark.UpdateSheetFloatImageReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_float_image = origin_func
def test_mock_self_func_get_sheet_float_image(self):
origin_func = self.module_cli.get_sheet_float_image
self.module_cli.get_sheet_float_image = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_float_image(pylark.GetSheetFloatImageReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_float_image = origin_func
def test_mock_self_func_query_sheet_float_image(self):
origin_func = self.module_cli.query_sheet_float_image
self.module_cli.query_sheet_float_image = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_float_image(pylark.QuerySheetFloatImageReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.query_sheet_float_image = origin_func
def test_mock_self_func_get_wiki_space_list(self):
origin_func = self.module_cli.get_wiki_space_list
self.module_cli.get_wiki_space_list = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space_list(pylark.GetWikiSpaceListReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_wiki_space_list = origin_func
def test_mock_self_func_get_wiki_space(self):
origin_func = self.module_cli.get_wiki_space
self.module_cli.get_wiki_space = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space(pylark.GetWikiSpaceReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_wiki_space = origin_func
def test_mock_self_func_update_wiki_space_setting(self):
origin_func = self.module_cli.update_wiki_space_setting
self.module_cli.update_wiki_space_setting = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_wiki_space_setting(
pylark.UpdateWikiSpaceSettingReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_wiki_space_setting = origin_func
def test_mock_self_func_add_wiki_space_member(self):
origin_func = self.module_cli.add_wiki_space_member
self.module_cli.add_wiki_space_member = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_wiki_space_member(pylark.AddWikiSpaceMemberReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.add_wiki_space_member = origin_func
def test_mock_self_func_create_wiki_node(self):
origin_func = self.module_cli.create_wiki_node
self.module_cli.create_wiki_node = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_wiki_node(pylark.CreateWikiNodeReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_wiki_node = origin_func
def test_mock_self_func_get_wiki_node_list(self):
origin_func = self.module_cli.get_wiki_node_list
self.module_cli.get_wiki_node_list = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node_list(pylark.GetWikiNodeListReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_wiki_node_list = origin_func
def test_mock_self_func_get_wiki_node(self):
origin_func = self.module_cli.get_wiki_node
self.module_cli.get_wiki_node = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node(pylark.GetWikiNodeReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_wiki_node = origin_func
def test_mock_self_func_move_docs_to_wiki(self):
origin_func = self.module_cli.move_docs_to_wiki
self.module_cli.move_docs_to_wiki = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_docs_to_wiki(pylark.MoveDocsToWikiReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.move_docs_to_wiki = origin_func
# mock raw request
class TestDriveSampleMockRawRequestFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestDriveSampleMockRawRequestFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.module_cli = self.cli.drive
self.cli.raw_request = mock_raw_request
def test_mock_raw_request_get_drive_file_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_meta(pylark.GetDriveFileMetaReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_file(
pylark.CreateDriveFileReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_copy_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.copy_drive_file(
pylark.CopyDriveFileReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_file(
pylark.DeleteDriveFileReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_drive_sheet_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_sheet_file(
pylark.DeleteDriveSheetFileReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_folder(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_folder(
pylark.CreateDriveFolderReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_folder_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_meta(
pylark.GetDriveFolderMetaReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_root_folder_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_root_folder_meta(
pylark.GetDriveRootFolderMetaReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_folder_children(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_children(
pylark.GetDriveFolderChildrenReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_file_statistics(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_statistics(
pylark.GetDriveFileStatisticsReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_download_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_file(
pylark.DownloadDriveFileReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_file(pylark.UploadDriveFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_prepare_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_file(
pylark.PrepareUploadDriveFileReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_part_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_file(pylark.PartUploadDriveFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_finish_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_file(pylark.FinishUploadDriveFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_download_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_media(
pylark.DownloadDriveMediaReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_media(pylark.UploadDriveMediaReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_prepare_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_media(
pylark.PrepareUploadDriveMediaReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_part_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_media(pylark.PartUploadDriveMediaReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_finish_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_media(
pylark.FinishUploadDriveMediaReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission_old(
pylark.CreateDriveMemberPermissionOldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_transfer_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.transfer_drive_member_permission(
pylark.TransferDriveMemberPermissionReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_member_permission_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_member_permission_list(
pylark.GetDriveMemberPermissionListReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission(
pylark.CreateDriveMemberPermissionReq(
token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission_old(
pylark.DeleteDriveMemberPermissionOldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission(
pylark.DeleteDriveMemberPermissionReq(
token="x",
member_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission_old(
pylark.UpdateDriveMemberPermissionOldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission(
pylark.UpdateDriveMemberPermissionReq(
token="x",
member_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_check_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.check_drive_member_permission(
pylark.CheckDriveMemberPermissionReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_public_permission_v1_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v1_old(
pylark.UpdateDrivePublicPermissionV1OldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_public_permission_v2_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v2_old(
pylark.UpdateDrivePublicPermissionV2OldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_public_permission_v2(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_public_permission_v2(
pylark.GetDrivePublicPermissionV2Req()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_public_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission(
pylark.UpdateDrivePublicPermissionReq(
token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_batch_get_drive_media_tmp_download_url(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_drive_media_tmp_download_url(
pylark.BatchGetDriveMediaTmpDownloadURLReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_comment_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment_list(
pylark.GetDriveCommentListReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment(
pylark.GetDriveCommentReq(
file_token="x",
comment_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_comment(
pylark.CreateDriveCommentReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment(
pylark.UpdateDriveCommentReq(
file_token="x",
comment_id="x",
reply_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_comment(
pylark.DeleteDriveCommentReq(
file_token="x",
comment_id="x",
reply_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_comment_patch(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment_patch(
pylark.UpdateDriveCommentPatchReq(
file_token="x",
comment_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_doc(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_doc(pylark.CreateDriveDocReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_doc_content(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_content(
pylark.GetDriveDocContentReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_doc_raw_content(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_raw_content(
pylark.GetDriveDocRawContentReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_doc_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_meta(
pylark.GetDriveDocMetaReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet(pylark.CreateSheetReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_meta(
pylark.GetSheetMetaReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_property(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_property(
pylark.UpdateSheetPropertyReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_batch_update_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_update_sheet(
pylark.BatchUpdateSheetReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_import_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.import_sheet(pylark.ImportSheetReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_import_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_import_task(pylark.CreateDriveImportTaskReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_import_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_import_task(
pylark.GetDriveImportTaskReq(
ticket="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_move_sheet_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_sheet_dimension(
pylark.MoveSheetDimensionReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_prepend_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepend_sheet_value(
pylark.PrependSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_append_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.append_sheet_value(
pylark.AppendSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_insert_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.insert_sheet_dimension_range(
pylark.InsertSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_add_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_sheet_dimension_range(
pylark.AddSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_dimension_range(
pylark.UpdateSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_dimension_range(
pylark.DeleteSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_value(
pylark.GetSheetValueReq(
spreadsheet_token="x",
range_="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_batch_get_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_sheet_value(
pylark.BatchGetSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_set_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value(
pylark.SetSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_batch_set_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_value(
pylark.BatchSetSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_set_sheet_style(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_style(
pylark.SetSheetStyleReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_batch_set_sheet_style(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_style(
pylark.BatchSetSheetStyleReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_merge_sheet_cell(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.merge_sheet_cell(
pylark.MergeSheetCellReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_unmerge_sheet_cell(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.unmerge_sheet_cell(
pylark.UnmergeSheetCellReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_set_sheet_value_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value_image(
pylark.SetSheetValueImageReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_find_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.find_sheet(
pylark.FindSheetReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_replace_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.replace_sheet(
pylark.ReplaceSheetReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_condition_format(
pylark.CreateSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_condition_format(
pylark.GetSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_condition_format(
pylark.UpdateSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_condition_format(
pylark.DeleteSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_protected_dimension(
pylark.CreateSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_protected_dimension(
pylark.GetSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_protected_dimension(
pylark.UpdateSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_protected_dimension(
pylark.DeleteSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_data_validation_dropdown(
pylark.CreateSheetDataValidationDropdownReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_data_validation_dropdown(
pylark.DeleteSheetDataValidationDropdownReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_data_validation_dropdown(
pylark.UpdateSheetDataValidationDropdownReq(
spreadsheet_token="x",
sheet_id="x",
data_validation_id=1,
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_data_validation_dropdown(
pylark.GetSheetDataValidationDropdownReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter(
pylark.CreateSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter(
pylark.DeleteSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter(
pylark.UpdateSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter(
pylark.GetSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view(
pylark.CreateSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view(
pylark.DeleteSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view(
pylark.UpdateSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view(
pylark.GetSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_query_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view(
pylark.QuerySheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view_condition(
pylark.CreateSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view_condition(
pylark.DeleteSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
condition_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view_condition(
pylark.UpdateSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
condition_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view_condition(
pylark.GetSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
condition_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_query_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view_condition(
pylark.QuerySheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_float_image(
pylark.CreateSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_float_image(
pylark.DeleteSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
float_image_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_float_image(
pylark.UpdateSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
float_image_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_float_image(
pylark.GetSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
float_image_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_query_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_float_image(
pylark.QuerySheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_wiki_space_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space_list(pylark.GetWikiSpaceListReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_wiki_space(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space(
pylark.GetWikiSpaceReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_wiki_space_setting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_wiki_space_setting(
pylark.UpdateWikiSpaceSettingReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_add_wiki_space_member(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_wiki_space_member(
pylark.AddWikiSpaceMemberReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_wiki_node(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_wiki_node(
pylark.CreateWikiNodeReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_wiki_node_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node_list(
pylark.GetWikiNodeListReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_wiki_node(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node(pylark.GetWikiNodeReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_move_docs_to_wiki(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_docs_to_wiki(
pylark.MoveDocsToWikiReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
# real request
class TestDriveSampleRealRequestFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestDriveSampleRealRequestFailed, self).__init__(*args, **kwargs)
self.cli = app_no_permission.ins()
self.module_cli = self.cli.drive
def test_real_request_get_drive_file_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_meta(pylark.GetDriveFileMetaReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_file(
pylark.CreateDriveFileReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_copy_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.copy_drive_file(
pylark.CopyDriveFileReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_file(
pylark.DeleteDriveFileReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_drive_sheet_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_sheet_file(
pylark.DeleteDriveSheetFileReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_folder(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_folder(
pylark.CreateDriveFolderReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_folder_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_meta(
pylark.GetDriveFolderMetaReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_root_folder_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_root_folder_meta(
pylark.GetDriveRootFolderMetaReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_folder_children(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_children(
pylark.GetDriveFolderChildrenReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_file_statistics(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_statistics(
pylark.GetDriveFileStatisticsReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_download_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_file(
pylark.DownloadDriveFileReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_file(pylark.UploadDriveFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_prepare_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_file(
pylark.PrepareUploadDriveFileReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_part_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_file(pylark.PartUploadDriveFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_finish_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_file(pylark.FinishUploadDriveFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_download_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_media(
pylark.DownloadDriveMediaReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_media(pylark.UploadDriveMediaReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_prepare_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_media(
pylark.PrepareUploadDriveMediaReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_part_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_media(pylark.PartUploadDriveMediaReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_finish_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_media(
pylark.FinishUploadDriveMediaReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission_old(
pylark.CreateDriveMemberPermissionOldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_transfer_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.transfer_drive_member_permission(
pylark.TransferDriveMemberPermissionReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_member_permission_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_member_permission_list(
pylark.GetDriveMemberPermissionListReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission(
pylark.CreateDriveMemberPermissionReq(
token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission_old(
pylark.DeleteDriveMemberPermissionOldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission(
pylark.DeleteDriveMemberPermissionReq(
token="x",
member_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission_old(
pylark.UpdateDriveMemberPermissionOldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission(
pylark.UpdateDriveMemberPermissionReq(
token="x",
member_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_check_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.check_drive_member_permission(
pylark.CheckDriveMemberPermissionReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_public_permission_v1_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v1_old(
pylark.UpdateDrivePublicPermissionV1OldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_public_permission_v2_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v2_old(
pylark.UpdateDrivePublicPermissionV2OldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_public_permission_v2(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_public_permission_v2(
pylark.GetDrivePublicPermissionV2Req()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_public_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission(
pylark.UpdateDrivePublicPermissionReq(
token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_batch_get_drive_media_tmp_download_url(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_drive_media_tmp_download_url(
pylark.BatchGetDriveMediaTmpDownloadURLReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_comment_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment_list(
pylark.GetDriveCommentListReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment(
pylark.GetDriveCommentReq(
file_token="x",
comment_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_comment(
pylark.CreateDriveCommentReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment(
pylark.UpdateDriveCommentReq(
file_token="x",
comment_id="x",
reply_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_comment(
pylark.DeleteDriveCommentReq(
file_token="x",
comment_id="x",
reply_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_comment_patch(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment_patch(
pylark.UpdateDriveCommentPatchReq(
file_token="x",
comment_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_doc(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_doc(pylark.CreateDriveDocReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_doc_content(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_content(
pylark.GetDriveDocContentReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_doc_raw_content(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_raw_content(
pylark.GetDriveDocRawContentReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_doc_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_meta(
pylark.GetDriveDocMetaReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet(pylark.CreateSheetReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_meta(
pylark.GetSheetMetaReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_property(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_property(
pylark.UpdateSheetPropertyReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_batch_update_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_update_sheet(
pylark.BatchUpdateSheetReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_import_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.import_sheet(pylark.ImportSheetReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_import_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_import_task(pylark.CreateDriveImportTaskReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_import_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_import_task(
pylark.GetDriveImportTaskReq(
ticket="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_move_sheet_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_sheet_dimension(
pylark.MoveSheetDimensionReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_prepend_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepend_sheet_value(
pylark.PrependSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_append_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.append_sheet_value(
pylark.AppendSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_insert_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.insert_sheet_dimension_range(
pylark.InsertSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_add_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_sheet_dimension_range(
pylark.AddSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_dimension_range(
pylark.UpdateSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_dimension_range(
pylark.DeleteSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_value(
pylark.GetSheetValueReq(
spreadsheet_token="x",
range_="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_batch_get_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_sheet_value(
pylark.BatchGetSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_set_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value(
pylark.SetSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_batch_set_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_value(
pylark.BatchSetSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_set_sheet_style(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_style(
pylark.SetSheetStyleReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_batch_set_sheet_style(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_style(
pylark.BatchSetSheetStyleReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_merge_sheet_cell(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.merge_sheet_cell(
pylark.MergeSheetCellReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_unmerge_sheet_cell(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.unmerge_sheet_cell(
pylark.UnmergeSheetCellReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_set_sheet_value_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value_image(
pylark.SetSheetValueImageReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_find_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.find_sheet(
pylark.FindSheetReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_replace_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.replace_sheet(
pylark.ReplaceSheetReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_condition_format(
pylark.CreateSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_condition_format(
pylark.GetSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_condition_format(
pylark.UpdateSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_condition_format(
pylark.DeleteSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_protected_dimension(
pylark.CreateSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_protected_dimension(
pylark.GetSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_protected_dimension(
pylark.UpdateSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_protected_dimension(
pylark.DeleteSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_data_validation_dropdown(
pylark.CreateSheetDataValidationDropdownReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_data_validation_dropdown(
pylark.DeleteSheetDataValidationDropdownReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_data_validation_dropdown(
pylark.UpdateSheetDataValidationDropdownReq(
spreadsheet_token="x",
sheet_id="x",
data_validation_id=1,
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_data_validation_dropdown(
pylark.GetSheetDataValidationDropdownReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter(
pylark.CreateSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter(
pylark.DeleteSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter(
pylark.UpdateSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter(
pylark.GetSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view(
pylark.CreateSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view(
pylark.DeleteSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view(
pylark.UpdateSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view(
pylark.GetSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_query_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view(
pylark.QuerySheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view_condition(
pylark.CreateSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view_condition(
pylark.DeleteSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
condition_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view_condition(
pylark.UpdateSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
condition_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view_condition(
pylark.GetSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
condition_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_query_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view_condition(
pylark.QuerySheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_float_image(
pylark.CreateSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_float_image(
pylark.DeleteSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
float_image_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_float_image(
pylark.UpdateSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
float_image_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_float_image(
pylark.GetSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
float_image_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_query_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_float_image(
pylark.QuerySheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_wiki_space_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space_list(pylark.GetWikiSpaceListReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_wiki_space(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space(
pylark.GetWikiSpaceReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_wiki_space_setting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_wiki_space_setting(
pylark.UpdateWikiSpaceSettingReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_add_wiki_space_member(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_wiki_space_member(
pylark.AddWikiSpaceMemberReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_wiki_node(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_wiki_node(
pylark.CreateWikiNodeReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_wiki_node_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node_list(
pylark.GetWikiNodeListReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_wiki_node(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node(pylark.GetWikiNodeReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_move_docs_to_wiki(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_docs_to_wiki(
pylark.MoveDocsToWikiReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
| [
"pylark.UpdateSheetDimensionRangeReq",
"pylark.UpdateDriveCommentReq",
"pylark.BatchUpdateSheetReq",
"pylark.GetSheetDataValidationDropdownReq",
"pylark.GetWikiSpaceListReq",
"pylark.UploadDriveMediaReq",
"pylark.TransferDriveMemberPermissionReq",
"pylark.GetSheetMetaReq",
"pylark.FindSheetReq",
"... | [((263, 336), 'pylark.PyLarkError', 'pylark.PyLarkError', ([], {'scope': '"""scope"""', 'func': '"""func"""', 'code': '(1)', 'msg': '"""mock-failed"""'}), "(scope='scope', func='func', code=1, msg='mock-failed')\n", (281, 336), False, 'import pylark\n'), ((388, 478), 'pylark.PyLarkError', 'pylark.PyLarkError', ([], {'scope': '"""scope"""', 'func': '"""func"""', 'code': '(1)', 'msg': '"""mock-raw-request-failed"""'}), "(scope='scope', func='func', code=1, msg=\n 'mock-raw-request-failed')\n", (406, 478), False, 'import pylark\n'), ((708, 732), 'tests.test_conf.app_all_permission.ins', 'app_all_permission.ins', ([], {}), '()\n', (730, 732), False, 'from tests.test_conf import app_all_permission, app_no_permission\n'), ((28399, 28423), 'tests.test_conf.app_all_permission.ins', 'app_all_permission.ins', ([], {}), '()\n', (28421, 28423), False, 'from tests.test_conf import app_all_permission, app_no_permission\n'), ((76131, 76155), 'tests.test_conf.app_all_permission.ins', 'app_all_permission.ins', ([], {}), '()\n', (76153, 76155), False, 'from tests.test_conf import app_all_permission, app_no_permission\n'), ((122031, 122054), 'tests.test_conf.app_no_permission.ins', 'app_no_permission.ins', ([], {}), '()\n', (122052, 122054), False, 'from tests.test_conf import app_all_permission, app_no_permission\n'), ((1008, 1041), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (1021, 1041), False, 'import pytest\n'), ((1232, 1265), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (1245, 1265), False, 'import pytest\n'), ((1451, 1484), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (1464, 1484), False, 'import pytest\n'), ((1668, 1701), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (1681, 1701), False, 'import pytest\n'), ((1895, 1928), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (1908, 1928), False, 'import pytest\n'), ((2129, 2162), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (2142, 2162), False, 'import pytest\n'), ((2358, 2391), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (2371, 2391), False, 'import pytest\n'), ((2595, 2628), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (2608, 2628), False, 'import pytest\n'), ((2870, 2903), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (2883, 2903), False, 'import pytest\n'), ((3144, 3177), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (3157, 3177), False, 'import pytest\n'), ((3412, 3445), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (3425, 3445), False, 'import pytest\n'), ((3637, 3670), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (3650, 3670), False, 'import pytest\n'), ((3866, 3899), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (3879, 3899), False, 'import pytest\n'), ((4137, 4170), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (4150, 4170), False, 'import pytest\n'), ((4374, 4407), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (4387, 4407), False, 'import pytest\n'), ((4611, 4644), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (4624, 4644), False, 'import pytest\n'), ((4839, 4872), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (4852, 4872), False, 'import pytest\n'), ((5071, 5104), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (5084, 5104), False, 'import pytest\n'), ((5345, 5378), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (5358, 5378), False, 'import pytest\n'), ((5585, 5618), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (5598, 5618), False, 'import pytest\n'), ((5868, 5901), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (5881, 5901), False, 'import pytest\n'), ((6166, 6199), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (6179, 6199), False, 'import pytest\n'), ((6461, 6494), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (6474, 6494), False, 'import pytest\n'), ((6753, 6786), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (6766, 6786), False, 'import pytest\n'), ((7046, 7079), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (7059, 7079), False, 'import pytest\n'), ((7342, 7375), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (7355, 7375), False, 'import pytest\n'), ((7635, 7668), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (7648, 7668), False, 'import pytest\n'), ((7931, 7964), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (7944, 7964), False, 'import pytest\n'), ((8219, 8252), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (8232, 8252), False, 'import pytest\n'), ((8513, 8546), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (8526, 8546), False, 'import pytest\n'), ((8821, 8854), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (8834, 8854), False, 'import pytest\n'), ((9122, 9155), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (9135, 9155), False, 'import pytest\n'), ((9410, 9443), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (9423, 9443), False, 'import pytest\n'), ((9707, 9740), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (9720, 9740), False, 'import pytest\n'), ((10001, 10034), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (10014, 10034), False, 'import pytest\n'), ((10231, 10264), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (10244, 10264), False, 'import pytest\n'), ((10455, 10488), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (10468, 10488), False, 'import pytest\n'), ((10685, 10718), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (10698, 10718), False, 'import pytest\n'), ((10915, 10948), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (10928, 10948), False, 'import pytest\n'), ((11151, 11184), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (11164, 11184), False, 'import pytest\n'), ((11418, 11451), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (11431, 11451), False, 'import pytest\n'), ((11641, 11674), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (11654, 11674), False, 'import pytest\n'), ((11877, 11910), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (11890, 11910), False, 'import pytest\n'), ((12113, 12146), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (12126, 12146), False, 'import pytest\n'), ((12330, 12363), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (12343, 12363), False, 'import pytest\n'), ((12539, 12572), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (12552, 12572), False, 'import pytest\n'), ((12758, 12791), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (12771, 12791), False, 'import pytest\n'), ((12988, 13021), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (13001, 13021), False, 'import pytest\n'), ((13206, 13239), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (13219, 13239), False, 'import pytest\n'), ((13425, 13458), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (13438, 13458), False, 'import pytest\n'), ((13663, 13696), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (13676, 13696), False, 'import pytest\n'), ((13894, 13927), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (13907, 13927), False, 'import pytest\n'), ((14123, 14156), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (14136, 14156), False, 'import pytest\n'), ((14349, 14382), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (14362, 14382), False, 'import pytest\n'), ((14583, 14616), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (14596, 14616), False, 'import pytest\n'), ((14863, 14896), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (14876, 14896), False, 'import pytest\n'), ((15140, 15173), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (15153, 15173), False, 'import pytest\n'), ((15423, 15456), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (15436, 15456), False, 'import pytest\n'), ((15693, 15726), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (15706, 15726), False, 'import pytest\n'), ((15914, 15947), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (15927, 15947), False, 'import pytest\n'), ((16140, 16173), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (16153, 16173), False, 'import pytest\n'), ((16361, 16394), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (16374, 16394), False, 'import pytest\n'), ((16587, 16620), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (16600, 16620), False, 'import pytest\n'), ((16808, 16841), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (16821, 16841), False, 'import pytest\n'), ((17035, 17068), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (17048, 17068), False, 'import pytest\n'), ((17255, 17288), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (17268, 17288), False, 'import pytest\n'), ((17482, 17515), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (17495, 17515), False, 'import pytest\n'), ((17703, 17736), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (17716, 17736), False, 'import pytest\n'), ((17907, 17940), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (17920, 17940), False, 'import pytest\n'), ((18133, 18166), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (18146, 18166), False, 'import pytest\n'), ((18416, 18449), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (18429, 18449), False, 'import pytest\n'), ((18696, 18729), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (18709, 18729), False, 'import pytest\n'), ((18982, 19015), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (18995, 19015), False, 'import pytest\n'), ((19271, 19304), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (19284, 19304), False, 'import pytest\n'), ((19563, 19596), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (19576, 19596), False, 'import pytest\n'), ((19852, 19885), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (19865, 19885), False, 'import pytest\n'), ((20147, 20180), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (20160, 20180), False, 'import pytest\n'), ((20447, 20480), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (20460, 20480), False, 'import pytest\n'), ((20756, 20789), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (20769, 20789), False, 'import pytest\n'), ((21065, 21098), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (21078, 21098), False, 'import pytest\n'), ((21371, 21404), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (21384, 21404), False, 'import pytest\n'), ((21656, 21689), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (21669, 21689), False, 'import pytest\n'), ((21883, 21916), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (21896, 21916), False, 'import pytest\n'), ((22110, 22143), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (22123, 22143), False, 'import pytest\n'), ((22334, 22367), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (22347, 22367), False, 'import pytest\n'), ((22560, 22593), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (22573, 22593), False, 'import pytest\n'), ((22801, 22834), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (22814, 22834), False, 'import pytest\n'), ((23042, 23075), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (23055, 23075), False, 'import pytest\n'), ((23280, 23313), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (23293, 23313), False, 'import pytest\n'), ((23514, 23547), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (23527, 23547), False, 'import pytest\n'), ((23763, 23796), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (23776, 23796), False, 'import pytest\n'), ((24063, 24096), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (24076, 24096), False, 'import pytest\n'), ((24363, 24396), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (24376, 24396), False, 'import pytest\n'), ((24660, 24693), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (24673, 24693), False, 'import pytest\n'), ((24953, 24986), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (24966, 24986), False, 'import pytest\n'), ((25241, 25274), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (25254, 25274), False, 'import pytest\n'), ((25482, 25515), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (25495, 25515), False, 'import pytest\n'), ((25723, 25756), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (25736, 25756), False, 'import pytest\n'), ((25961, 25994), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (25974, 25994), False, 'import pytest\n'), ((26195, 26228), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (26208, 26228), False, 'import pytest\n'), ((26429, 26462), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (26442, 26462), False, 'import pytest\n'), ((26650, 26683), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (26663, 26683), False, 'import pytest\n'), ((26873, 26906), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (26886, 26906), False, 'import pytest\n'), ((27143, 27176), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (27156, 27176), False, 'import pytest\n'), ((27370, 27403), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (27383, 27403), False, 'import pytest\n'), ((27590, 27623), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (27603, 27623), False, 'import pytest\n'), ((27808, 27841), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (27821, 27841), False, 'import pytest\n'), ((28021, 28054), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (28034, 28054), False, 'import pytest\n'), ((28644, 28677), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (28657, 28677), False, 'import pytest\n'), ((29037, 29070), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (29050, 29070), False, 'import pytest\n'), ((29419, 29452), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (29432, 29452), False, 'import pytest\n'), ((29801, 29834), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (29814, 29834), False, 'import pytest\n'), ((30207, 30240), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (30220, 30240), False, 'import pytest\n'), ((30618, 30651), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (30631, 30651), False, 'import pytest\n'), ((31024, 31057), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (31037, 31057), False, 'import pytest\n'), ((31450, 31483), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (31463, 31483), False, 'import pytest\n'), ((31917, 31950), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (31930, 31950), False, 'import pytest\n'), ((32382, 32415), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (32395, 32415), False, 'import pytest\n'), ((32829, 32862), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (32842, 32862), False, 'import pytest\n'), ((33223, 33256), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (33236, 33256), False, 'import pytest\n'), ((33635, 33668), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (33648, 33668), False, 'import pytest\n'), ((34091, 34124), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (34104, 34124), False, 'import pytest\n'), ((34514, 34547), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (34527, 34547), False, 'import pytest\n'), ((34931, 34964), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (34944, 34964), False, 'import pytest\n'), ((35331, 35364), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (35344, 35364), False, 'import pytest\n'), ((35749, 35782), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (35762, 35782), False, 'import pytest\n'), ((36211, 36244), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (36224, 36244), False, 'import pytest\n'), ((36640, 36673), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (36653, 36673), False, 'import pytest\n'), ((37132, 37165), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (37145, 37165), False, 'import pytest\n'), ((37644, 37677), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (37657, 37677), False, 'import pytest\n'), ((38151, 38184), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (38164, 38184), False, 'import pytest\n'), ((38651, 38684), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (38664, 38684), False, 'import pytest\n'), ((39158, 39191), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (39171, 39191), False, 'import pytest\n'), ((39664, 39697), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (39677, 39697), False, 'import pytest\n'), ((40171, 40204), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (40184, 40204), False, 'import pytest\n'), ((40677, 40710), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (40690, 40710), False, 'import pytest\n'), ((41169, 41202), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (41182, 41202), False, 'import pytest\n'), ((41682, 41715), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (41695, 41715), False, 'import pytest\n'), ((42217, 42250), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (42230, 42250), False, 'import pytest\n'), ((42731, 42764), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (42744, 42764), False, 'import pytest\n'), ((43225, 43258), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (43238, 43258), False, 'import pytest\n'), ((43744, 43777), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (43757, 43777), False, 'import pytest\n'), ((44236, 44269), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (44249, 44269), False, 'import pytest\n'), ((44638, 44671), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (44651, 44671), False, 'import pytest\n'), ((45035, 45068), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (45048, 45068), False, 'import pytest\n'), ((45441, 45474), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (45454, 45474), False, 'import pytest\n'), ((45847, 45880), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (45860, 45880), False, 'import pytest\n'), ((46271, 46304), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (46284, 46304), False, 'import pytest\n'), ((46712, 46745), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (46725, 46745), False, 'import pytest\n'), ((47109, 47142), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (47122, 47142), False, 'import pytest\n'), ((47532, 47565), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (47545, 47565), False, 'import pytest\n'), ((47945, 47978), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (47958, 47978), False, 'import pytest\n'), ((48320, 48353), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (48333, 48353), False, 'import pytest\n'), ((48685, 48718), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (48698, 48718), False, 'import pytest\n'), ((49076, 49109), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (49089, 49109), False, 'import pytest\n'), ((49479, 49512), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (49492, 49512), False, 'import pytest\n'), ((49855, 49888), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (49868, 49888), False, 'import pytest\n'), ((50250, 50283), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (50263, 50283), False, 'import pytest\n'), ((50670, 50703), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (50683, 50703), False, 'import pytest\n'), ((51078, 51111), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (51091, 51111), False, 'import pytest\n'), ((51481, 51514), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (51494, 51514), False, 'import pytest\n'), ((51878, 51911), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (51891, 51911), False, 'import pytest\n'), ((52302, 52335), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (52315, 52335), False, 'import pytest\n'), ((52776, 52809), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (52789, 52809), False, 'import pytest\n'), ((53250, 53283), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (53263, 53283), False, 'import pytest\n'), ((53733, 53766), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (53746, 53766), False, 'import pytest\n'), ((54177, 54210), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (54190, 54210), False, 'import pytest\n'), ((54571, 54604), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (54584, 54604), False, 'import pytest\n'), ((54964, 54997), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (54977, 54997), False, 'import pytest\n'), ((55358, 55391), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (55371, 55391), False, 'import pytest\n'), ((55751, 55784), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (55764, 55784), False, 'import pytest\n'), ((56145, 56178), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (56158, 56178), False, 'import pytest\n'), ((56541, 56574), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (56554, 56574), False, 'import pytest\n'), ((56929, 56962), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (56942, 56962), False, 'import pytest\n'), ((57332, 57365), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (57345, 57365), False, 'import pytest\n'), ((57710, 57743), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (57723, 57743), False, 'import pytest\n'), ((58066, 58099), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (58079, 58099), False, 'import pytest\n'), ((58479, 58512), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (58492, 58512), False, 'import pytest\n'), ((58959, 58992), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (58972, 58992), False, 'import pytest\n'), ((59439, 59472), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (59452, 59472), False, 'import pytest\n'), ((59928, 59961), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (59941, 59961), False, 'import pytest\n'), ((60426, 60459), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (60439, 60459), False, 'import pytest\n'), ((60924, 60957), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (60937, 60957), False, 'import pytest\n'), ((61422, 61455), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (61435, 61455), False, 'import pytest\n'), ((61929, 61962), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (61942, 61962), False, 'import pytest\n'), ((62451, 62484), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (62464, 62484), False, 'import pytest\n'), ((62987, 63020), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (63000, 63020), False, 'import pytest\n'), ((63523, 63556), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (63536, 63556), False, 'import pytest\n'), ((64050, 64083), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (64063, 64083), False, 'import pytest\n'), ((64523, 64556), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (64536, 64556), False, 'import pytest\n'), ((64923, 64956), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (64936, 64956), False, 'import pytest\n'), ((65323, 65356), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (65336, 65356), False, 'import pytest\n'), ((65714, 65747), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (65727, 65747), False, 'import pytest\n'), ((66120, 66153), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (66133, 66153), False, 'import pytest\n'), ((66549, 66582), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (66562, 66582), False, 'import pytest\n'), ((66978, 67011), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (66991, 67011), False, 'import pytest\n'), ((67398, 67431), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (67411, 67431), False, 'import pytest\n'), ((67815, 67848), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (67828, 67848), False, 'import pytest\n'), ((68271, 68304), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (68284, 68304), False, 'import pytest\n'), ((68789, 68822), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (68802, 68822), False, 'import pytest\n'), ((69307, 69340), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (69320, 69340), False, 'import pytest\n'), ((69816, 69849), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (69829, 69849), False, 'import pytest\n'), ((70322, 70355), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (70335, 70355), False, 'import pytest\n'), ((70807, 70840), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (70820, 70840), False, 'import pytest\n'), ((71236, 71269), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (71249, 71269), False, 'import pytest\n'), ((71665, 71698), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (71678, 71698), False, 'import pytest\n'), ((72085, 72118), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (72098, 72118), False, 'import pytest\n'), ((72502, 72535), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (72515, 72535), False, 'import pytest\n'), ((72913, 72946), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (72926, 72946), False, 'import pytest\n'), ((73297, 73330), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (73310, 73330), False, 'import pytest\n'), ((73700, 73733), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (73713, 73733), False, 'import pytest\n'), ((74153, 74186), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (74166, 74186), False, 'import pytest\n'), ((74549, 74582), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (74562, 74582), False, 'import pytest\n'), ((74937, 74970), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (74950, 74970), False, 'import pytest\n'), ((75315, 75348), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (75328, 75348), False, 'import pytest\n'), ((75691, 75724), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (75704, 75724), False, 'import pytest\n'), ((76316, 76349), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (76329, 76349), False, 'import pytest\n'), ((76636, 76669), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (76649, 76669), False, 'import pytest\n'), ((77036, 77069), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (77049, 77069), False, 'import pytest\n'), ((77432, 77465), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (77445, 77465), False, 'import pytest\n'), ((77837, 77870), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (77850, 77870), False, 'import pytest\n'), ((78257, 78290), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (78270, 78290), False, 'import pytest\n'), ((78667, 78700), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (78680, 78700), False, 'import pytest\n'), ((79085, 79118), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (79098, 79118), False, 'import pytest\n'), ((79456, 79489), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (79469, 79489), False, 'import pytest\n'), ((79881, 79914), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (79894, 79914), False, 'import pytest\n'), ((80298, 80331), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (80311, 80331), False, 'import pytest\n'), ((80702, 80735), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (80715, 80735), False, 'import pytest\n'), ((81027, 81060), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (81040, 81060), False, 'import pytest\n'), ((81394, 81427), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (81407, 81427), False, 'import pytest\n'), ((81727, 81760), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (81740, 81760), False, 'import pytest\n'), ((82060, 82093), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (82073, 82093), False, 'import pytest\n'), ((82467, 82500), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (82480, 82500), False, 'import pytest\n'), ((82795, 82828), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (82808, 82828), False, 'import pytest\n'), ((83165, 83198), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (83178, 83198), False, 'import pytest\n'), ((83501, 83534), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (83514, 83534), False, 'import pytest\n'), ((83880, 83913), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (83893, 83913), False, 'import pytest\n'), ((84274, 84307), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (84287, 84307), False, 'import pytest\n'), ((84665, 84698), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (84678, 84698), False, 'import pytest\n'), ((85053, 85086), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (85066, 85086), False, 'import pytest\n'), ((85490, 85523), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (85503, 85523), False, 'import pytest\n'), ((85882, 85915), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (85895, 85915), False, 'import pytest\n'), ((86354, 86387), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (86367, 86387), False, 'import pytest\n'), ((86746, 86779), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (86759, 86779), False, 'import pytest\n'), ((87213, 87246), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (87226, 87246), False, 'import pytest\n'), ((87603, 87636), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (87616, 87636), False, 'import pytest\n'), ((88007, 88040), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (88020, 88040), False, 'import pytest\n'), ((88404, 88437), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (88417, 88437), False, 'import pytest\n'), ((88788, 88821), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (88801, 88821), False, 'import pytest\n'), ((89229, 89262), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (89242, 89262), False, 'import pytest\n'), ((89619, 89652), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (89632, 89652), False, 'import pytest\n'), ((90028, 90061), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (90041, 90061), False, 'import pytest\n'), ((90467, 90500), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (90480, 90500), False, 'import pytest\n'), ((90876, 90909), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (90889, 90909), False, 'import pytest\n'), ((91355, 91388), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (91368, 91388), False, 'import pytest\n'), ((91840, 91873), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (91853, 91873), False, 'import pytest\n'), ((92292, 92325), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (92305, 92325), False, 'import pytest\n'), ((92611, 92644), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (92624, 92644), False, 'import pytest\n'), ((93025, 93058), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (93038, 93058), False, 'import pytest\n'), ((93439, 93472), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (93452, 93472), False, 'import pytest\n'), ((93834, 93867), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (93847, 93867), False, 'import pytest\n'), ((94139, 94172), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (94152, 94172), False, 'import pytest\n'), ((94544, 94577), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (94557, 94577), False, 'import pytest\n'), ((94960, 94993), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (94973, 94993), False, 'import pytest\n'), ((95364, 95397), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (95377, 95397), False, 'import pytest\n'), ((95679, 95712), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (95692, 95712), False, 'import pytest\n'), ((96013, 96046), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (96026, 96046), False, 'import pytest\n'), ((96419, 96452), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (96432, 96452), False, 'import pytest\n'), ((96868, 96901), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (96881, 96901), False, 'import pytest\n'), ((97280, 97313), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (97293, 97313), False, 'import pytest\n'), ((97700, 97733), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (97713, 97733), False, 'import pytest\n'), ((98136, 98169), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (98149, 98169), False, 'import pytest\n'), ((98569, 98602), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (98582, 98602), False, 'import pytest\n'), ((99008, 99041), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (99021, 99041), False, 'import pytest\n'), ((99434, 99467), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (99447, 99467), False, 'import pytest\n'), ((99873, 99906), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (99886, 99906), False, 'import pytest\n'), ((100285, 100318), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (100298, 100318), False, 'import pytest\n'), ((100692, 100725), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (100705, 100725), False, 'import pytest\n'), ((101104, 101137), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (101117, 101137), False, 'import pytest\n'), ((101511, 101544), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (101524, 101544), False, 'import pytest\n'), ((101924, 101957), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (101937, 101957), False, 'import pytest\n'), ((102330, 102363), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (102343, 102363), False, 'import pytest\n'), ((102743, 102776), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (102756, 102776), False, 'import pytest\n'), ((103150, 103183), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (103163, 103183), False, 'import pytest\n'), ((103574, 103607), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (103587, 103607), False, 'import pytest\n'), ((104020, 104053), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (104033, 104053), False, 'import pytest\n'), ((104459, 104492), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (104472, 104492), False, 'import pytest\n'), ((104895, 104928), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (104908, 104928), False, 'import pytest\n'), ((105337, 105370), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (105350, 105370), False, 'import pytest\n'), ((105782, 105815), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (105795, 105815), False, 'import pytest\n'), ((106230, 106263), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (106243, 106263), False, 'import pytest\n'), ((106675, 106708), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (106688, 106708), False, 'import pytest\n'), ((107126, 107159), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (107139, 107159), False, 'import pytest\n'), ((107582, 107615), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (107595, 107615), False, 'import pytest\n'), ((108047, 108080), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (108060, 108080), False, 'import pytest\n'), ((108512, 108545), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (108525, 108545), False, 'import pytest\n'), ((109050, 109083), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (109063, 109083), False, 'import pytest\n'), ((109491, 109524), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (109504, 109524), False, 'import pytest\n'), ((109938, 109971), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (109951, 109971), False, 'import pytest\n'), ((110385, 110418), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (110398, 110418), False, 'import pytest\n'), ((110829, 110862), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (110842, 110862), False, 'import pytest\n'), ((111275, 111308), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (111288, 111308), False, 'import pytest\n'), ((111736, 111769), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (111749, 111769), False, 'import pytest\n'), ((112237, 112270), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (112250, 112270), False, 'import pytest\n'), ((112735, 112768), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (112748, 112768), False, 'import pytest\n'), ((113229, 113262), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (113242, 113262), False, 'import pytest\n'), ((113698, 113731), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (113711, 113731), False, 'import pytest\n'), ((114228, 114261), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (114241, 114261), False, 'import pytest\n'), ((114796, 114829), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (114809, 114829), False, 'import pytest\n'), ((115361, 115394), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (115374, 115394), False, 'import pytest\n'), ((115922, 115955), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (115935, 115955), False, 'import pytest\n'), ((116440, 116473), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (116453, 116473), False, 'import pytest\n'), ((116901, 116934), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (116914, 116934), False, 'import pytest\n'), ((117402, 117435), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (117415, 117435), False, 'import pytest\n'), ((117900, 117933), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (117913, 117933), False, 'import pytest\n'), ((118394, 118427), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (118407, 118427), False, 'import pytest\n'), ((118848, 118881), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (118861, 118881), False, 'import pytest\n'), ((119165, 119198), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (119178, 119198), False, 'import pytest\n'), ((119565, 119598), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (119578, 119598), False, 'import pytest\n'), ((119982, 120015), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (119995, 120015), False, 'import pytest\n'), ((120386, 120419), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (120399, 120419), False, 'import pytest\n'), ((120783, 120816), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (120796, 120816), False, 'import pytest\n'), ((121178, 121211), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (121191, 121211), False, 'import pytest\n'), ((121487, 121520), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (121500, 121520), False, 'import pytest\n'), ((122163, 122196), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (122176, 122196), False, 'import pytest\n'), ((122423, 122456), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (122436, 122456), False, 'import pytest\n'), ((122763, 122796), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (122776, 122796), False, 'import pytest\n'), ((123099, 123132), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (123112, 123132), False, 'import pytest\n'), ((123444, 123477), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (123457, 123477), False, 'import pytest\n'), ((123804, 123837), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (123817, 123837), False, 'import pytest\n'), ((124154, 124187), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (124167, 124187), False, 'import pytest\n'), ((124512, 124545), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (124525, 124545), False, 'import pytest\n'), ((124823, 124856), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (124836, 124856), False, 'import pytest\n'), ((125188, 125221), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (125201, 125221), False, 'import pytest\n'), ((125545, 125578), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (125558, 125578), False, 'import pytest\n'), ((125889, 125922), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (125902, 125922), False, 'import pytest\n'), ((126154, 126187), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (126167, 126187), False, 'import pytest\n'), ((126461, 126494), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (126474, 126494), False, 'import pytest\n'), ((126734, 126767), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (126747, 126767), False, 'import pytest\n'), ((127007, 127040), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (127020, 127040), False, 'import pytest\n'), ((127354, 127387), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (127367, 127387), False, 'import pytest\n'), ((127622, 127655), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (127635, 127655), False, 'import pytest\n'), ((127932, 127965), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (127945, 127965), False, 'import pytest\n'), ((128208, 128241), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (128221, 128241), False, 'import pytest\n'), ((128527, 128560), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (128540, 128560), False, 'import pytest\n'), ((128861, 128894), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (128874, 128894), False, 'import pytest\n'), ((129192, 129225), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (129205, 129225), False, 'import pytest\n'), ((129520, 129553), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (129533, 129553), False, 'import pytest\n'), ((129897, 129930), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (129910, 129930), False, 'import pytest\n'), ((130229, 130262), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (130242, 130262), False, 'import pytest\n'), ((130641, 130674), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (130654, 130674), False, 'import pytest\n'), ((130973, 131006), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (130986, 131006), False, 'import pytest\n'), ((131380, 131413), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (131393, 131413), False, 'import pytest\n'), ((131710, 131743), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (131723, 131743), False, 'import pytest\n'), ((132054, 132087), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (132067, 132087), False, 'import pytest\n'), ((132391, 132424), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (132404, 132424), False, 'import pytest\n'), ((132715, 132748), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (132728, 132748), False, 'import pytest\n'), ((133096, 133129), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (133109, 133129), False, 'import pytest\n'), ((133426, 133459), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (133439, 133459), False, 'import pytest\n'), ((133775, 133808), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (133788, 133808), False, 'import pytest\n'), ((134154, 134187), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (134167, 134187), False, 'import pytest\n'), ((134503, 134536), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (134516, 134536), False, 'import pytest\n'), ((134922, 134955), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (134935, 134955), False, 'import pytest\n'), ((135347, 135380), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (135360, 135380), False, 'import pytest\n'), ((135739, 135772), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (135752, 135772), False, 'import pytest\n'), ((135998, 136031), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (136011, 136031), False, 'import pytest\n'), ((136352, 136385), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (136365, 136385), False, 'import pytest\n'), ((136706, 136739), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (136719, 136739), False, 'import pytest\n'), ((137041, 137074), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (137054, 137074), False, 'import pytest\n'), ((137286, 137319), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (137299, 137319), False, 'import pytest\n'), ((137631, 137664), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (137644, 137664), False, 'import pytest\n'), ((137987, 138020), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (138000, 138020), False, 'import pytest\n'), ((138331, 138364), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (138344, 138364), False, 'import pytest\n'), ((138586, 138619), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (138599, 138619), False, 'import pytest\n'), ((138860, 138893), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (138873, 138893), False, 'import pytest\n'), ((139206, 139239), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (139219, 139239), False, 'import pytest\n'), ((139595, 139628), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (139608, 139628), False, 'import pytest\n'), ((139947, 139980), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (139960, 139980), False, 'import pytest\n'), ((140307, 140340), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (140320, 140340), False, 'import pytest\n'), ((140683, 140716), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (140696, 140716), False, 'import pytest\n'), ((141056, 141089), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (141069, 141089), False, 'import pytest\n'), ((141435, 141468), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (141448, 141468), False, 'import pytest\n'), ((141801, 141834), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (141814, 141834), False, 'import pytest\n'), ((142180, 142213), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (142193, 142213), False, 'import pytest\n'), ((142532, 142565), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (142545, 142565), False, 'import pytest\n'), ((142879, 142912), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (142892, 142912), False, 'import pytest\n'), ((143231, 143264), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (143244, 143264), False, 'import pytest\n'), ((143578, 143611), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (143591, 143611), False, 'import pytest\n'), ((143931, 143964), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (143944, 143964), False, 'import pytest\n'), ((144277, 144310), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (144290, 144310), False, 'import pytest\n'), ((144630, 144663), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (144643, 144663), False, 'import pytest\n'), ((144977, 145010), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (144990, 145010), False, 'import pytest\n'), ((145341, 145374), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (145354, 145374), False, 'import pytest\n'), ((145727, 145760), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (145740, 145760), False, 'import pytest\n'), ((146106, 146139), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (146119, 146139), False, 'import pytest\n'), ((146482, 146515), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (146495, 146515), False, 'import pytest\n'), ((146864, 146897), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (146877, 146897), False, 'import pytest\n'), ((147249, 147282), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (147262, 147282), False, 'import pytest\n'), ((147637, 147670), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (147650, 147670), False, 'import pytest\n'), ((148022, 148055), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (148035, 148055), False, 'import pytest\n'), ((148413, 148446), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (148426, 148446), False, 'import pytest\n'), ((148809, 148842), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (148822, 148842), False, 'import pytest\n'), ((149214, 149247), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (149227, 149247), False, 'import pytest\n'), ((149619, 149652), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (149632, 149652), False, 'import pytest\n'), ((150097, 150130), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (150110, 150130), False, 'import pytest\n'), ((150478, 150511), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (150491, 150511), False, 'import pytest\n'), ((150865, 150898), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (150878, 150898), False, 'import pytest\n'), ((151252, 151285), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (151265, 151285), False, 'import pytest\n'), ((151636, 151669), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (151649, 151669), False, 'import pytest\n'), ((152022, 152055), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (152035, 152055), False, 'import pytest\n'), ((152423, 152456), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (152436, 152456), False, 'import pytest\n'), ((152864, 152897), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (152877, 152897), False, 'import pytest\n'), ((153302, 153335), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (153315, 153335), False, 'import pytest\n'), ((153736, 153769), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (153749, 153769), False, 'import pytest\n'), ((154145, 154178), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (154158, 154178), False, 'import pytest\n'), ((154615, 154648), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (154628, 154648), False, 'import pytest\n'), ((155123, 155156), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (155136, 155156), False, 'import pytest\n'), ((155628, 155661), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (155641, 155661), False, 'import pytest\n'), ((156129, 156162), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (156142, 156162), False, 'import pytest\n'), ((156587, 156620), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (156600, 156620), False, 'import pytest\n'), ((156988, 157021), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (157001, 157021), False, 'import pytest\n'), ((157429, 157462), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (157442, 157462), False, 'import pytest\n'), ((157867, 157900), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (157880, 157900), False, 'import pytest\n'), ((158301, 158334), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (158314, 158334), False, 'import pytest\n'), ((158695, 158728), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (158708, 158728), False, 'import pytest\n'), ((158952, 158985), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (158965, 158985), False, 'import pytest\n'), ((159292, 159325), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (159305, 159325), False, 'import pytest\n'), ((159649, 159682), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (159662, 159682), False, 'import pytest\n'), ((159993, 160026), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (160006, 160026), False, 'import pytest\n'), ((160330, 160363), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (160343, 160363), False, 'import pytest\n'), ((160665, 160698), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (160678, 160698), False, 'import pytest\n'), ((160914, 160947), 'pytest.raises', 'pytest.raises', (['pylark.PyLarkError'], {}), '(pylark.PyLarkError)\n', (160927, 160947), False, 'import pytest\n'), ((1096, 1124), 'pylark.GetDriveFileMetaReq', 'pylark.GetDriveFileMetaReq', ([], {}), '()\n', (1122, 1124), False, 'import pylark\n'), ((1318, 1345), 'pylark.CreateDriveFileReq', 'pylark.CreateDriveFileReq', ([], {}), '()\n', (1343, 1345), False, 'import pylark\n'), ((1535, 1560), 'pylark.CopyDriveFileReq', 'pylark.CopyDriveFileReq', ([], {}), '()\n', (1558, 1560), False, 'import pylark\n'), ((1754, 1781), 'pylark.DeleteDriveFileReq', 'pylark.DeleteDriveFileReq', ([], {}), '()\n', (1779, 1781), False, 'import pylark\n'), ((1987, 2019), 'pylark.DeleteDriveSheetFileReq', 'pylark.DeleteDriveSheetFileReq', ([], {}), '()\n', (2017, 2019), False, 'import pylark\n'), ((2217, 2246), 'pylark.CreateDriveFolderReq', 'pylark.CreateDriveFolderReq', ([], {}), '()\n', (2244, 2246), False, 'import pylark\n'), ((2448, 2478), 'pylark.GetDriveFolderMetaReq', 'pylark.GetDriveFolderMetaReq', ([], {}), '()\n', (2476, 2478), False, 'import pylark\n'), ((2707, 2741), 'pylark.GetDriveRootFolderMetaReq', 'pylark.GetDriveRootFolderMetaReq', ([], {}), '()\n', (2739, 2741), False, 'import pylark\n'), ((2981, 3015), 'pylark.GetDriveFolderChildrenReq', 'pylark.GetDriveFolderChildrenReq', ([], {}), '()\n', (3013, 3015), False, 'import pylark\n'), ((3255, 3289), 'pylark.GetDriveFileStatisticsReq', 'pylark.GetDriveFileStatisticsReq', ([], {}), '()\n', (3287, 3289), False, 'import pylark\n'), ((3500, 3529), 'pylark.DownloadDriveFileReq', 'pylark.DownloadDriveFileReq', ([], {}), '()\n', (3527, 3529), False, 'import pylark\n'), ((3723, 3750), 'pylark.UploadDriveFileReq', 'pylark.UploadDriveFileReq', ([], {}), '()\n', (3748, 3750), False, 'import pylark\n'), ((3977, 4011), 'pylark.PrepareUploadDriveFileReq', 'pylark.PrepareUploadDriveFileReq', ([], {}), '()\n', (4009, 4011), False, 'import pylark\n'), ((4228, 4259), 'pylark.PartUploadDriveFileReq', 'pylark.PartUploadDriveFileReq', ([], {}), '()\n', (4257, 4259), False, 'import pylark\n'), ((4467, 4500), 'pylark.FinishUploadDriveFileReq', 'pylark.FinishUploadDriveFileReq', ([], {}), '()\n', (4498, 4500), False, 'import pylark\n'), ((4700, 4730), 'pylark.DownloadDriveMediaReq', 'pylark.DownloadDriveMediaReq', ([], {}), '()\n', (4728, 4730), False, 'import pylark\n'), ((4926, 4954), 'pylark.UploadDriveMediaReq', 'pylark.UploadDriveMediaReq', ([], {}), '()\n', (4952, 4954), False, 'import pylark\n'), ((5183, 5218), 'pylark.PrepareUploadDriveMediaReq', 'pylark.PrepareUploadDriveMediaReq', ([], {}), '()\n', (5216, 5218), False, 'import pylark\n'), ((5437, 5469), 'pylark.PartUploadDriveMediaReq', 'pylark.PartUploadDriveMediaReq', ([], {}), '()\n', (5467, 5469), False, 'import pylark\n'), ((5696, 5730), 'pylark.FinishUploadDriveMediaReq', 'pylark.FinishUploadDriveMediaReq', ([], {}), '()\n', (5728, 5730), False, 'import pylark\n'), ((5988, 6030), 'pylark.CreateDriveMemberPermissionOldReq', 'pylark.CreateDriveMemberPermissionOldReq', ([], {}), '()\n', (6028, 6030), False, 'import pylark\n'), ((6284, 6325), 'pylark.TransferDriveMemberPermissionReq', 'pylark.TransferDriveMemberPermissionReq', ([], {}), '()\n', (6323, 6325), False, 'import pylark\n'), ((6579, 6619), 'pylark.GetDriveMemberPermissionListReq', 'pylark.GetDriveMemberPermissionListReq', ([], {}), '()\n', (6617, 6619), False, 'import pylark\n'), ((6869, 6908), 'pylark.CreateDriveMemberPermissionReq', 'pylark.CreateDriveMemberPermissionReq', ([], {}), '()\n', (6906, 6908), False, 'import pylark\n'), ((7166, 7208), 'pylark.DeleteDriveMemberPermissionOldReq', 'pylark.DeleteDriveMemberPermissionOldReq', ([], {}), '()\n', (7206, 7208), False, 'import pylark\n'), ((7458, 7497), 'pylark.DeleteDriveMemberPermissionReq', 'pylark.DeleteDriveMemberPermissionReq', ([], {}), '()\n', (7495, 7497), False, 'import pylark\n'), ((7755, 7797), 'pylark.UpdateDriveMemberPermissionOldReq', 'pylark.UpdateDriveMemberPermissionOldReq', ([], {}), '()\n', (7795, 7797), False, 'import pylark\n'), ((8047, 8086), 'pylark.UpdateDriveMemberPermissionReq', 'pylark.UpdateDriveMemberPermissionReq', ([], {}), '()\n', (8084, 8086), False, 'import pylark\n'), ((8334, 8372), 'pylark.CheckDriveMemberPermissionReq', 'pylark.CheckDriveMemberPermissionReq', ([], {}), '()\n', (8370, 8372), False, 'import pylark\n'), ((8636, 8680), 'pylark.UpdateDrivePublicPermissionV1OldReq', 'pylark.UpdateDrivePublicPermissionV1OldReq', ([], {}), '()\n', (8678, 8680), False, 'import pylark\n'), ((8944, 8988), 'pylark.UpdateDrivePublicPermissionV2OldReq', 'pylark.UpdateDrivePublicPermissionV2OldReq', ([], {}), '()\n', (8986, 8988), False, 'import pylark\n'), ((9238, 9276), 'pylark.GetDrivePublicPermissionV2Req', 'pylark.GetDrivePublicPermissionV2Req', ([], {}), '()\n', (9274, 9276), False, 'import pylark\n'), ((9526, 9565), 'pylark.UpdateDrivePublicPermissionReq', 'pylark.UpdateDrivePublicPermissionReq', ([], {}), '()\n', (9563, 9565), False, 'import pylark\n'), ((9831, 9875), 'pylark.BatchGetDriveMediaTmpDownloadURLReq', 'pylark.BatchGetDriveMediaTmpDownloadURLReq', ([], {}), '()\n', (9873, 9875), False, 'import pylark\n'), ((10092, 10123), 'pylark.GetDriveCommentListReq', 'pylark.GetDriveCommentListReq', ([], {}), '()\n', (10121, 10123), False, 'import pylark\n'), ((10317, 10344), 'pylark.GetDriveCommentReq', 'pylark.GetDriveCommentReq', ([], {}), '()\n', (10342, 10344), False, 'import pylark\n'), ((10544, 10574), 'pylark.CreateDriveCommentReq', 'pylark.CreateDriveCommentReq', ([], {}), '()\n', (10572, 10574), False, 'import pylark\n'), ((10774, 10804), 'pylark.UpdateDriveCommentReq', 'pylark.UpdateDriveCommentReq', ([], {}), '()\n', (10802, 10804), False, 'import pylark\n'), ((11004, 11034), 'pylark.DeleteDriveCommentReq', 'pylark.DeleteDriveCommentReq', ([], {}), '()\n', (11032, 11034), False, 'import pylark\n'), ((11263, 11298), 'pylark.UpdateDriveCommentPatchReq', 'pylark.UpdateDriveCommentPatchReq', ([], {}), '()\n', (11296, 11298), False, 'import pylark\n'), ((11503, 11529), 'pylark.CreateDriveDocReq', 'pylark.CreateDriveDocReq', ([], {}), '()\n', (11527, 11529), False, 'import pylark\n'), ((11731, 11761), 'pylark.GetDriveDocContentReq', 'pylark.GetDriveDocContentReq', ([], {}), '()\n', (11759, 11761), False, 'import pylark\n'), ((11971, 12004), 'pylark.GetDriveDocRawContentReq', 'pylark.GetDriveDocRawContentReq', ([], {}), '()\n', (12002, 12004), False, 'import pylark\n'), ((12200, 12227), 'pylark.GetDriveDocMetaReq', 'pylark.GetDriveDocMetaReq', ([], {}), '()\n', (12225, 12227), False, 'import pylark\n'), ((12411, 12434), 'pylark.CreateSheetReq', 'pylark.CreateSheetReq', ([], {}), '()\n', (12432, 12434), False, 'import pylark\n'), ((12622, 12646), 'pylark.GetSheetMetaReq', 'pylark.GetSheetMetaReq', ([], {}), '()\n', (12644, 12646), False, 'import pylark\n'), ((12848, 12879), 'pylark.UpdateSheetPropertyReq', 'pylark.UpdateSheetPropertyReq', ([], {}), '()\n', (12877, 12879), False, 'import pylark\n'), ((13075, 13103), 'pylark.BatchUpdateSheetReq', 'pylark.BatchUpdateSheetReq', ([], {}), '()\n', (13101, 13103), False, 'import pylark\n'), ((13287, 13310), 'pylark.ImportSheetReq', 'pylark.ImportSheetReq', ([], {}), '()\n', (13308, 13310), False, 'import pylark\n'), ((13518, 13551), 'pylark.CreateDriveImportTaskReq', 'pylark.CreateDriveImportTaskReq', ([], {}), '()\n', (13549, 13551), False, 'import pylark\n'), ((13753, 13783), 'pylark.GetDriveImportTaskReq', 'pylark.GetDriveImportTaskReq', ([], {}), '()\n', (13781, 13783), False, 'import pylark\n'), ((13983, 14013), 'pylark.MoveSheetDimensionReq', 'pylark.MoveSheetDimensionReq', ([], {}), '()\n', (14011, 14013), False, 'import pylark\n'), ((14211, 14240), 'pylark.PrependSheetValueReq', 'pylark.PrependSheetValueReq', ([], {}), '()\n', (14238, 14240), False, 'import pylark\n'), ((14436, 14464), 'pylark.AppendSheetValueReq', 'pylark.AppendSheetValueReq', ([], {}), '()\n', (14462, 14464), False, 'import pylark\n'), ((14697, 14734), 'pylark.InsertSheetDimensionRangeReq', 'pylark.InsertSheetDimensionRangeReq', ([], {}), '()\n', (14732, 14734), False, 'import pylark\n'), ((14974, 15008), 'pylark.AddSheetDimensionRangeReq', 'pylark.AddSheetDimensionRangeReq', ([], {}), '()\n', (15006, 15008), False, 'import pylark\n'), ((15254, 15291), 'pylark.UpdateSheetDimensionRangeReq', 'pylark.UpdateSheetDimensionRangeReq', ([], {}), '()\n', (15289, 15291), False, 'import pylark\n'), ((15537, 15574), 'pylark.DeleteSheetDimensionRangeReq', 'pylark.DeleteSheetDimensionRangeReq', ([], {}), '()\n', (15572, 15574), False, 'import pylark\n'), ((15777, 15802), 'pylark.GetSheetValueReq', 'pylark.GetSheetValueReq', ([], {}), '()\n', (15800, 15802), False, 'import pylark\n'), ((16004, 16034), 'pylark.BatchGetSheetValueReq', 'pylark.BatchGetSheetValueReq', ([], {}), '()\n', (16032, 16034), False, 'import pylark\n'), ((16224, 16249), 'pylark.SetSheetValueReq', 'pylark.SetSheetValueReq', ([], {}), '()\n', (16247, 16249), False, 'import pylark\n'), ((16451, 16481), 'pylark.BatchSetSheetValueReq', 'pylark.BatchSetSheetValueReq', ([], {}), '()\n', (16479, 16481), False, 'import pylark\n'), ((16671, 16696), 'pylark.SetSheetStyleReq', 'pylark.SetSheetStyleReq', ([], {}), '()\n', (16694, 16696), False, 'import pylark\n'), ((16898, 16928), 'pylark.BatchSetSheetStyleReq', 'pylark.BatchSetSheetStyleReq', ([], {}), '()\n', (16926, 16928), False, 'import pylark\n'), ((17120, 17146), 'pylark.MergeSheetCellReq', 'pylark.MergeSheetCellReq', ([], {}), '()\n', (17144, 17146), False, 'import pylark\n'), ((17342, 17370), 'pylark.UnmergeSheetCellReq', 'pylark.UnmergeSheetCellReq', ([], {}), '()\n', (17368, 17370), False, 'import pylark\n'), ((17572, 17602), 'pylark.SetSheetValueImageReq', 'pylark.SetSheetValueImageReq', ([], {}), '()\n', (17600, 17602), False, 'import pylark\n'), ((17782, 17803), 'pylark.FindSheetReq', 'pylark.FindSheetReq', ([], {}), '()\n', (17801, 17803), False, 'import pylark\n'), ((17989, 18013), 'pylark.ReplaceSheetReq', 'pylark.ReplaceSheetReq', ([], {}), '()\n', (18011, 18013), False, 'import pylark\n'), ((18248, 18286), 'pylark.CreateSheetConditionFormatReq', 'pylark.CreateSheetConditionFormatReq', ([], {}), '()\n', (18284, 18286), False, 'import pylark\n'), ((18528, 18563), 'pylark.GetSheetConditionFormatReq', 'pylark.GetSheetConditionFormatReq', ([], {}), '()\n', (18561, 18563), False, 'import pylark\n'), ((18811, 18849), 'pylark.UpdateSheetConditionFormatReq', 'pylark.UpdateSheetConditionFormatReq', ([], {}), '()\n', (18847, 18849), False, 'import pylark\n'), ((19097, 19135), 'pylark.DeleteSheetConditionFormatReq', 'pylark.DeleteSheetConditionFormatReq', ([], {}), '()\n', (19133, 19135), False, 'import pylark\n'), ((19389, 19430), 'pylark.CreateSheetProtectedDimensionReq', 'pylark.CreateSheetProtectedDimensionReq', ([], {}), '()\n', (19428, 19430), False, 'import pylark\n'), ((19678, 19716), 'pylark.GetSheetProtectedDimensionReq', 'pylark.GetSheetProtectedDimensionReq', ([], {}), '()\n', (19714, 19716), False, 'import pylark\n'), ((19970, 20011), 'pylark.UpdateSheetProtectedDimensionReq', 'pylark.UpdateSheetProtectedDimensionReq', ([], {}), '()\n', (20009, 20011), False, 'import pylark\n'), ((20265, 20306), 'pylark.DeleteSheetProtectedDimensionReq', 'pylark.DeleteSheetProtectedDimensionReq', ([], {}), '()\n', (20304, 20306), False, 'import pylark\n'), ((20570, 20615), 'pylark.CreateSheetDataValidationDropdownReq', 'pylark.CreateSheetDataValidationDropdownReq', ([], {}), '()\n', (20613, 20615), False, 'import pylark\n'), ((20879, 20924), 'pylark.DeleteSheetDataValidationDropdownReq', 'pylark.DeleteSheetDataValidationDropdownReq', ([], {}), '()\n', (20922, 20924), False, 'import pylark\n'), ((21188, 21233), 'pylark.UpdateSheetDataValidationDropdownReq', 'pylark.UpdateSheetDataValidationDropdownReq', ([], {}), '()\n', (21231, 21233), False, 'import pylark\n'), ((21491, 21533), 'pylark.GetSheetDataValidationDropdownReq', 'pylark.GetSheetDataValidationDropdownReq', ([], {}), '()\n', (21531, 21533), False, 'import pylark\n'), ((21744, 21773), 'pylark.CreateSheetFilterReq', 'pylark.CreateSheetFilterReq', ([], {}), '()\n', (21771, 21773), False, 'import pylark\n'), ((21971, 22000), 'pylark.DeleteSheetFilterReq', 'pylark.DeleteSheetFilterReq', ([], {}), '()\n', (21998, 22000), False, 'import pylark\n'), ((22198, 22227), 'pylark.UpdateSheetFilterReq', 'pylark.UpdateSheetFilterReq', ([], {}), '()\n', (22225, 22227), False, 'import pylark\n'), ((22419, 22445), 'pylark.GetSheetFilterReq', 'pylark.GetSheetFilterReq', ([], {}), '()\n', (22443, 22445), False, 'import pylark\n'), ((22653, 22686), 'pylark.CreateSheetFilterViewReq', 'pylark.CreateSheetFilterViewReq', ([], {}), '()\n', (22684, 22686), False, 'import pylark\n'), ((22894, 22927), 'pylark.DeleteSheetFilterViewReq', 'pylark.DeleteSheetFilterViewReq', ([], {}), '()\n', (22925, 22927), False, 'import pylark\n'), ((23135, 23168), 'pylark.UpdateSheetFilterViewReq', 'pylark.UpdateSheetFilterViewReq', ([], {}), '()\n', (23166, 23168), False, 'import pylark\n'), ((23370, 23400), 'pylark.GetSheetFilterViewReq', 'pylark.GetSheetFilterViewReq', ([], {}), '()\n', (23398, 23400), False, 'import pylark\n'), ((23606, 23638), 'pylark.QuerySheetFilterViewReq', 'pylark.QuerySheetFilterViewReq', ([], {}), '()\n', (23636, 23638), False, 'import pylark\n'), ((23883, 23925), 'pylark.CreateSheetFilterViewConditionReq', 'pylark.CreateSheetFilterViewConditionReq', ([], {}), '()\n', (23923, 23925), False, 'import pylark\n'), ((24183, 24225), 'pylark.DeleteSheetFilterViewConditionReq', 'pylark.DeleteSheetFilterViewConditionReq', ([], {}), '()\n', (24223, 24225), False, 'import pylark\n'), ((24483, 24525), 'pylark.UpdateSheetFilterViewConditionReq', 'pylark.UpdateSheetFilterViewConditionReq', ([], {}), '()\n', (24523, 24525), False, 'import pylark\n'), ((24777, 24816), 'pylark.GetSheetFilterViewConditionReq', 'pylark.GetSheetFilterViewConditionReq', ([], {}), '()\n', (24814, 24816), False, 'import pylark\n'), ((25072, 25113), 'pylark.QuerySheetFilterViewConditionReq', 'pylark.QuerySheetFilterViewConditionReq', ([], {}), '()\n', (25111, 25113), False, 'import pylark\n'), ((25334, 25367), 'pylark.CreateSheetFloatImageReq', 'pylark.CreateSheetFloatImageReq', ([], {}), '()\n', (25365, 25367), False, 'import pylark\n'), ((25575, 25608), 'pylark.DeleteSheetFloatImageReq', 'pylark.DeleteSheetFloatImageReq', ([], {}), '()\n', (25606, 25608), False, 'import pylark\n'), ((25816, 25849), 'pylark.UpdateSheetFloatImageReq', 'pylark.UpdateSheetFloatImageReq', ([], {}), '()\n', (25847, 25849), False, 'import pylark\n'), ((26051, 26081), 'pylark.GetSheetFloatImageReq', 'pylark.GetSheetFloatImageReq', ([], {}), '()\n', (26079, 26081), False, 'import pylark\n'), ((26287, 26319), 'pylark.QuerySheetFloatImageReq', 'pylark.QuerySheetFloatImageReq', ([], {}), '()\n', (26317, 26319), False, 'import pylark\n'), ((26517, 26545), 'pylark.GetWikiSpaceListReq', 'pylark.GetWikiSpaceListReq', ([], {}), '()\n', (26543, 26545), False, 'import pylark\n'), ((26733, 26757), 'pylark.GetWikiSpaceReq', 'pylark.GetWikiSpaceReq', ([], {}), '()\n', (26755, 26757), False, 'import pylark\n'), ((26984, 27018), 'pylark.UpdateWikiSpaceSettingReq', 'pylark.UpdateWikiSpaceSettingReq', ([], {}), '()\n', (27016, 27018), False, 'import pylark\n'), ((27233, 27263), 'pylark.AddWikiSpaceMemberReq', 'pylark.AddWikiSpaceMemberReq', ([], {}), '()\n', (27261, 27263), False, 'import pylark\n'), ((27455, 27481), 'pylark.CreateWikiNodeReq', 'pylark.CreateWikiNodeReq', ([], {}), '()\n', (27479, 27481), False, 'import pylark\n'), ((27677, 27704), 'pylark.GetWikiNodeListReq', 'pylark.GetWikiNodeListReq', ([], {}), '()\n', (27702, 27704), False, 'import pylark\n'), ((27890, 27913), 'pylark.GetWikiNodeReq', 'pylark.GetWikiNodeReq', ([], {}), '()\n', (27911, 27913), False, 'import pylark\n'), ((28107, 28133), 'pylark.MoveDocsToWikiReq', 'pylark.MoveDocsToWikiReq', ([], {}), '()\n', (28131, 28133), False, 'import pylark\n'), ((28732, 28760), 'pylark.GetDriveFileMetaReq', 'pylark.GetDriveFileMetaReq', ([], {}), '()\n', (28758, 28760), False, 'import pylark\n'), ((29123, 29150), 'pylark.CreateDriveFileReq', 'pylark.CreateDriveFileReq', ([], {}), '()\n', (29148, 29150), False, 'import pylark\n'), ((29503, 29528), 'pylark.CopyDriveFileReq', 'pylark.CopyDriveFileReq', ([], {}), '()\n', (29526, 29528), False, 'import pylark\n'), ((29887, 29914), 'pylark.DeleteDriveFileReq', 'pylark.DeleteDriveFileReq', ([], {}), '()\n', (29912, 29914), False, 'import pylark\n'), ((30299, 30331), 'pylark.DeleteDriveSheetFileReq', 'pylark.DeleteDriveSheetFileReq', ([], {}), '()\n', (30329, 30331), False, 'import pylark\n'), ((30706, 30735), 'pylark.CreateDriveFolderReq', 'pylark.CreateDriveFolderReq', ([], {}), '()\n', (30733, 30735), False, 'import pylark\n'), ((31114, 31144), 'pylark.GetDriveFolderMetaReq', 'pylark.GetDriveFolderMetaReq', ([], {}), '()\n', (31142, 31144), False, 'import pylark\n'), ((31562, 31596), 'pylark.GetDriveRootFolderMetaReq', 'pylark.GetDriveRootFolderMetaReq', ([], {}), '()\n', (31594, 31596), False, 'import pylark\n'), ((32028, 32062), 'pylark.GetDriveFolderChildrenReq', 'pylark.GetDriveFolderChildrenReq', ([], {}), '()\n', (32060, 32062), False, 'import pylark\n'), ((32493, 32527), 'pylark.GetDriveFileStatisticsReq', 'pylark.GetDriveFileStatisticsReq', ([], {}), '()\n', (32525, 32527), False, 'import pylark\n'), ((32917, 32946), 'pylark.DownloadDriveFileReq', 'pylark.DownloadDriveFileReq', ([], {}), '()\n', (32944, 32946), False, 'import pylark\n'), ((33309, 33336), 'pylark.UploadDriveFileReq', 'pylark.UploadDriveFileReq', ([], {}), '()\n', (33334, 33336), False, 'import pylark\n'), ((33746, 33780), 'pylark.PrepareUploadDriveFileReq', 'pylark.PrepareUploadDriveFileReq', ([], {}), '()\n', (33778, 33780), False, 'import pylark\n'), ((34182, 34213), 'pylark.PartUploadDriveFileReq', 'pylark.PartUploadDriveFileReq', ([], {}), '()\n', (34211, 34213), False, 'import pylark\n'), ((34607, 34640), 'pylark.FinishUploadDriveFileReq', 'pylark.FinishUploadDriveFileReq', ([], {}), '()\n', (34638, 34640), False, 'import pylark\n'), ((35020, 35050), 'pylark.DownloadDriveMediaReq', 'pylark.DownloadDriveMediaReq', ([], {}), '()\n', (35048, 35050), False, 'import pylark\n'), ((35418, 35446), 'pylark.UploadDriveMediaReq', 'pylark.UploadDriveMediaReq', ([], {}), '()\n', (35444, 35446), False, 'import pylark\n'), ((35861, 35896), 'pylark.PrepareUploadDriveMediaReq', 'pylark.PrepareUploadDriveMediaReq', ([], {}), '()\n', (35894, 35896), False, 'import pylark\n'), ((36303, 36335), 'pylark.PartUploadDriveMediaReq', 'pylark.PartUploadDriveMediaReq', ([], {}), '()\n', (36333, 36335), False, 'import pylark\n'), ((36751, 36785), 'pylark.FinishUploadDriveMediaReq', 'pylark.FinishUploadDriveMediaReq', ([], {}), '()\n', (36783, 36785), False, 'import pylark\n'), ((37252, 37294), 'pylark.CreateDriveMemberPermissionOldReq', 'pylark.CreateDriveMemberPermissionOldReq', ([], {}), '()\n', (37292, 37294), False, 'import pylark\n'), ((37762, 37803), 'pylark.TransferDriveMemberPermissionReq', 'pylark.TransferDriveMemberPermissionReq', ([], {}), '()\n', (37801, 37803), False, 'import pylark\n'), ((38269, 38309), 'pylark.GetDriveMemberPermissionListReq', 'pylark.GetDriveMemberPermissionListReq', ([], {}), '()\n', (38307, 38309), False, 'import pylark\n'), ((38767, 38806), 'pylark.CreateDriveMemberPermissionReq', 'pylark.CreateDriveMemberPermissionReq', ([], {}), '()\n', (38804, 38806), False, 'import pylark\n'), ((39278, 39320), 'pylark.DeleteDriveMemberPermissionOldReq', 'pylark.DeleteDriveMemberPermissionOldReq', ([], {}), '()\n', (39318, 39320), False, 'import pylark\n'), ((39780, 39819), 'pylark.DeleteDriveMemberPermissionReq', 'pylark.DeleteDriveMemberPermissionReq', ([], {}), '()\n', (39817, 39819), False, 'import pylark\n'), ((40291, 40333), 'pylark.UpdateDriveMemberPermissionOldReq', 'pylark.UpdateDriveMemberPermissionOldReq', ([], {}), '()\n', (40331, 40333), False, 'import pylark\n'), ((40793, 40832), 'pylark.UpdateDriveMemberPermissionReq', 'pylark.UpdateDriveMemberPermissionReq', ([], {}), '()\n', (40830, 40832), False, 'import pylark\n'), ((41284, 41322), 'pylark.CheckDriveMemberPermissionReq', 'pylark.CheckDriveMemberPermissionReq', ([], {}), '()\n', (41320, 41322), False, 'import pylark\n'), ((41805, 41849), 'pylark.UpdateDrivePublicPermissionV1OldReq', 'pylark.UpdateDrivePublicPermissionV1OldReq', ([], {}), '()\n', (41847, 41849), False, 'import pylark\n'), ((42340, 42384), 'pylark.UpdateDrivePublicPermissionV2OldReq', 'pylark.UpdateDrivePublicPermissionV2OldReq', ([], {}), '()\n', (42382, 42384), False, 'import pylark\n'), ((42847, 42885), 'pylark.GetDrivePublicPermissionV2Req', 'pylark.GetDrivePublicPermissionV2Req', ([], {}), '()\n', (42883, 42885), False, 'import pylark\n'), ((43341, 43380), 'pylark.UpdateDrivePublicPermissionReq', 'pylark.UpdateDrivePublicPermissionReq', ([], {}), '()\n', (43378, 43380), False, 'import pylark\n'), ((43868, 43912), 'pylark.BatchGetDriveMediaTmpDownloadURLReq', 'pylark.BatchGetDriveMediaTmpDownloadURLReq', ([], {}), '()\n', (43910, 43912), False, 'import pylark\n'), ((44327, 44358), 'pylark.GetDriveCommentListReq', 'pylark.GetDriveCommentListReq', ([], {}), '()\n', (44356, 44358), False, 'import pylark\n'), ((44724, 44751), 'pylark.GetDriveCommentReq', 'pylark.GetDriveCommentReq', ([], {}), '()\n', (44749, 44751), False, 'import pylark\n'), ((45124, 45154), 'pylark.CreateDriveCommentReq', 'pylark.CreateDriveCommentReq', ([], {}), '()\n', (45152, 45154), False, 'import pylark\n'), ((45530, 45560), 'pylark.UpdateDriveCommentReq', 'pylark.UpdateDriveCommentReq', ([], {}), '()\n', (45558, 45560), False, 'import pylark\n'), ((45936, 45966), 'pylark.DeleteDriveCommentReq', 'pylark.DeleteDriveCommentReq', ([], {}), '()\n', (45964, 45966), False, 'import pylark\n'), ((46383, 46418), 'pylark.UpdateDriveCommentPatchReq', 'pylark.UpdateDriveCommentPatchReq', ([], {}), '()\n', (46416, 46418), False, 'import pylark\n'), ((46797, 46823), 'pylark.CreateDriveDocReq', 'pylark.CreateDriveDocReq', ([], {}), '()\n', (46821, 46823), False, 'import pylark\n'), ((47199, 47229), 'pylark.GetDriveDocContentReq', 'pylark.GetDriveDocContentReq', ([], {}), '()\n', (47227, 47229), False, 'import pylark\n'), ((47626, 47659), 'pylark.GetDriveDocRawContentReq', 'pylark.GetDriveDocRawContentReq', ([], {}), '()\n', (47657, 47659), False, 'import pylark\n'), ((48032, 48059), 'pylark.GetDriveDocMetaReq', 'pylark.GetDriveDocMetaReq', ([], {}), '()\n', (48057, 48059), False, 'import pylark\n'), ((48401, 48424), 'pylark.CreateSheetReq', 'pylark.CreateSheetReq', ([], {}), '()\n', (48422, 48424), False, 'import pylark\n'), ((48768, 48792), 'pylark.GetSheetMetaReq', 'pylark.GetSheetMetaReq', ([], {}), '()\n', (48790, 48792), False, 'import pylark\n'), ((49166, 49197), 'pylark.UpdateSheetPropertyReq', 'pylark.UpdateSheetPropertyReq', ([], {}), '()\n', (49195, 49197), False, 'import pylark\n'), ((49566, 49594), 'pylark.BatchUpdateSheetReq', 'pylark.BatchUpdateSheetReq', ([], {}), '()\n', (49592, 49594), False, 'import pylark\n'), ((49936, 49959), 'pylark.ImportSheetReq', 'pylark.ImportSheetReq', ([], {}), '()\n', (49957, 49959), False, 'import pylark\n'), ((50343, 50376), 'pylark.CreateDriveImportTaskReq', 'pylark.CreateDriveImportTaskReq', ([], {}), '()\n', (50374, 50376), False, 'import pylark\n'), ((50760, 50790), 'pylark.GetDriveImportTaskReq', 'pylark.GetDriveImportTaskReq', ([], {}), '()\n', (50788, 50790), False, 'import pylark\n'), ((51167, 51197), 'pylark.MoveSheetDimensionReq', 'pylark.MoveSheetDimensionReq', ([], {}), '()\n', (51195, 51197), False, 'import pylark\n'), ((51569, 51598), 'pylark.PrependSheetValueReq', 'pylark.PrependSheetValueReq', ([], {}), '()\n', (51596, 51598), False, 'import pylark\n'), ((51965, 51993), 'pylark.AppendSheetValueReq', 'pylark.AppendSheetValueReq', ([], {}), '()\n', (51991, 51993), False, 'import pylark\n'), ((52416, 52453), 'pylark.InsertSheetDimensionRangeReq', 'pylark.InsertSheetDimensionRangeReq', ([], {}), '()\n', (52451, 52453), False, 'import pylark\n'), ((52887, 52921), 'pylark.AddSheetDimensionRangeReq', 'pylark.AddSheetDimensionRangeReq', ([], {}), '()\n', (52919, 52921), False, 'import pylark\n'), ((53364, 53401), 'pylark.UpdateSheetDimensionRangeReq', 'pylark.UpdateSheetDimensionRangeReq', ([], {}), '()\n', (53399, 53401), False, 'import pylark\n'), ((53847, 53884), 'pylark.DeleteSheetDimensionRangeReq', 'pylark.DeleteSheetDimensionRangeReq', ([], {}), '()\n', (53882, 53884), False, 'import pylark\n'), ((54261, 54286), 'pylark.GetSheetValueReq', 'pylark.GetSheetValueReq', ([], {}), '()\n', (54284, 54286), False, 'import pylark\n'), ((54661, 54691), 'pylark.BatchGetSheetValueReq', 'pylark.BatchGetSheetValueReq', ([], {}), '()\n', (54689, 54691), False, 'import pylark\n'), ((55048, 55073), 'pylark.SetSheetValueReq', 'pylark.SetSheetValueReq', ([], {}), '()\n', (55071, 55073), False, 'import pylark\n'), ((55448, 55478), 'pylark.BatchSetSheetValueReq', 'pylark.BatchSetSheetValueReq', ([], {}), '()\n', (55476, 55478), False, 'import pylark\n'), ((55835, 55860), 'pylark.SetSheetStyleReq', 'pylark.SetSheetStyleReq', ([], {}), '()\n', (55858, 55860), False, 'import pylark\n'), ((56235, 56265), 'pylark.BatchSetSheetStyleReq', 'pylark.BatchSetSheetStyleReq', ([], {}), '()\n', (56263, 56265), False, 'import pylark\n'), ((56626, 56652), 'pylark.MergeSheetCellReq', 'pylark.MergeSheetCellReq', ([], {}), '()\n', (56650, 56652), False, 'import pylark\n'), ((57016, 57044), 'pylark.UnmergeSheetCellReq', 'pylark.UnmergeSheetCellReq', ([], {}), '()\n', (57042, 57044), False, 'import pylark\n'), ((57422, 57452), 'pylark.SetSheetValueImageReq', 'pylark.SetSheetValueImageReq', ([], {}), '()\n', (57450, 57452), False, 'import pylark\n'), ((57789, 57810), 'pylark.FindSheetReq', 'pylark.FindSheetReq', ([], {}), '()\n', (57808, 57810), False, 'import pylark\n'), ((58148, 58172), 'pylark.ReplaceSheetReq', 'pylark.ReplaceSheetReq', ([], {}), '()\n', (58170, 58172), False, 'import pylark\n'), ((58594, 58632), 'pylark.CreateSheetConditionFormatReq', 'pylark.CreateSheetConditionFormatReq', ([], {}), '()\n', (58630, 58632), False, 'import pylark\n'), ((59071, 59106), 'pylark.GetSheetConditionFormatReq', 'pylark.GetSheetConditionFormatReq', ([], {}), '()\n', (59104, 59106), False, 'import pylark\n'), ((59554, 59592), 'pylark.UpdateSheetConditionFormatReq', 'pylark.UpdateSheetConditionFormatReq', ([], {}), '()\n', (59590, 59592), False, 'import pylark\n'), ((60043, 60081), 'pylark.DeleteSheetConditionFormatReq', 'pylark.DeleteSheetConditionFormatReq', ([], {}), '()\n', (60079, 60081), False, 'import pylark\n'), ((60544, 60585), 'pylark.CreateSheetProtectedDimensionReq', 'pylark.CreateSheetProtectedDimensionReq', ([], {}), '()\n', (60583, 60585), False, 'import pylark\n'), ((61039, 61077), 'pylark.GetSheetProtectedDimensionReq', 'pylark.GetSheetProtectedDimensionReq', ([], {}), '()\n', (61075, 61077), False, 'import pylark\n'), ((61540, 61581), 'pylark.UpdateSheetProtectedDimensionReq', 'pylark.UpdateSheetProtectedDimensionReq', ([], {}), '()\n', (61579, 61581), False, 'import pylark\n'), ((62047, 62088), 'pylark.DeleteSheetProtectedDimensionReq', 'pylark.DeleteSheetProtectedDimensionReq', ([], {}), '()\n', (62086, 62088), False, 'import pylark\n'), ((62574, 62619), 'pylark.CreateSheetDataValidationDropdownReq', 'pylark.CreateSheetDataValidationDropdownReq', ([], {}), '()\n', (62617, 62619), False, 'import pylark\n'), ((63110, 63155), 'pylark.DeleteSheetDataValidationDropdownReq', 'pylark.DeleteSheetDataValidationDropdownReq', ([], {}), '()\n', (63153, 63155), False, 'import pylark\n'), ((63646, 63691), 'pylark.UpdateSheetDataValidationDropdownReq', 'pylark.UpdateSheetDataValidationDropdownReq', ([], {}), '()\n', (63689, 63691), False, 'import pylark\n'), ((64170, 64212), 'pylark.GetSheetDataValidationDropdownReq', 'pylark.GetSheetDataValidationDropdownReq', ([], {}), '()\n', (64210, 64212), False, 'import pylark\n'), ((64611, 64640), 'pylark.CreateSheetFilterReq', 'pylark.CreateSheetFilterReq', ([], {}), '()\n', (64638, 64640), False, 'import pylark\n'), ((65011, 65040), 'pylark.DeleteSheetFilterReq', 'pylark.DeleteSheetFilterReq', ([], {}), '()\n', (65038, 65040), False, 'import pylark\n'), ((65411, 65440), 'pylark.UpdateSheetFilterReq', 'pylark.UpdateSheetFilterReq', ([], {}), '()\n', (65438, 65440), False, 'import pylark\n'), ((65799, 65825), 'pylark.GetSheetFilterReq', 'pylark.GetSheetFilterReq', ([], {}), '()\n', (65823, 65825), False, 'import pylark\n'), ((66213, 66246), 'pylark.CreateSheetFilterViewReq', 'pylark.CreateSheetFilterViewReq', ([], {}), '()\n', (66244, 66246), False, 'import pylark\n'), ((66642, 66675), 'pylark.DeleteSheetFilterViewReq', 'pylark.DeleteSheetFilterViewReq', ([], {}), '()\n', (66673, 66675), False, 'import pylark\n'), ((67071, 67104), 'pylark.UpdateSheetFilterViewReq', 'pylark.UpdateSheetFilterViewReq', ([], {}), '()\n', (67102, 67104), False, 'import pylark\n'), ((67488, 67518), 'pylark.GetSheetFilterViewReq', 'pylark.GetSheetFilterViewReq', ([], {}), '()\n', (67516, 67518), False, 'import pylark\n'), ((67907, 67939), 'pylark.QuerySheetFilterViewReq', 'pylark.QuerySheetFilterViewReq', ([], {}), '()\n', (67937, 67939), False, 'import pylark\n'), ((68391, 68433), 'pylark.CreateSheetFilterViewConditionReq', 'pylark.CreateSheetFilterViewConditionReq', ([], {}), '()\n', (68431, 68433), False, 'import pylark\n'), ((68909, 68951), 'pylark.DeleteSheetFilterViewConditionReq', 'pylark.DeleteSheetFilterViewConditionReq', ([], {}), '()\n', (68949, 68951), False, 'import pylark\n'), ((69427, 69469), 'pylark.UpdateSheetFilterViewConditionReq', 'pylark.UpdateSheetFilterViewConditionReq', ([], {}), '()\n', (69467, 69469), False, 'import pylark\n'), ((69933, 69972), 'pylark.GetSheetFilterViewConditionReq', 'pylark.GetSheetFilterViewConditionReq', ([], {}), '()\n', (69970, 69972), False, 'import pylark\n'), ((70441, 70482), 'pylark.QuerySheetFilterViewConditionReq', 'pylark.QuerySheetFilterViewConditionReq', ([], {}), '()\n', (70480, 70482), False, 'import pylark\n'), ((70900, 70933), 'pylark.CreateSheetFloatImageReq', 'pylark.CreateSheetFloatImageReq', ([], {}), '()\n', (70931, 70933), False, 'import pylark\n'), ((71329, 71362), 'pylark.DeleteSheetFloatImageReq', 'pylark.DeleteSheetFloatImageReq', ([], {}), '()\n', (71360, 71362), False, 'import pylark\n'), ((71758, 71791), 'pylark.UpdateSheetFloatImageReq', 'pylark.UpdateSheetFloatImageReq', ([], {}), '()\n', (71789, 71791), False, 'import pylark\n'), ((72175, 72205), 'pylark.GetSheetFloatImageReq', 'pylark.GetSheetFloatImageReq', ([], {}), '()\n', (72203, 72205), False, 'import pylark\n'), ((72594, 72626), 'pylark.QuerySheetFloatImageReq', 'pylark.QuerySheetFloatImageReq', ([], {}), '()\n', (72624, 72626), False, 'import pylark\n'), ((73001, 73029), 'pylark.GetWikiSpaceListReq', 'pylark.GetWikiSpaceListReq', ([], {}), '()\n', (73027, 73029), False, 'import pylark\n'), ((73380, 73404), 'pylark.GetWikiSpaceReq', 'pylark.GetWikiSpaceReq', ([], {}), '()\n', (73402, 73404), False, 'import pylark\n'), ((73811, 73845), 'pylark.UpdateWikiSpaceSettingReq', 'pylark.UpdateWikiSpaceSettingReq', ([], {}), '()\n', (73843, 73845), False, 'import pylark\n'), ((74243, 74273), 'pylark.AddWikiSpaceMemberReq', 'pylark.AddWikiSpaceMemberReq', ([], {}), '()\n', (74271, 74273), False, 'import pylark\n'), ((74634, 74660), 'pylark.CreateWikiNodeReq', 'pylark.CreateWikiNodeReq', ([], {}), '()\n', (74658, 74660), False, 'import pylark\n'), ((75024, 75051), 'pylark.GetWikiNodeListReq', 'pylark.GetWikiNodeListReq', ([], {}), '()\n', (75049, 75051), False, 'import pylark\n'), ((75397, 75420), 'pylark.GetWikiNodeReq', 'pylark.GetWikiNodeReq', ([], {}), '()\n', (75418, 75420), False, 'import pylark\n'), ((75777, 75803), 'pylark.MoveDocsToWikiReq', 'pylark.MoveDocsToWikiReq', ([], {}), '()\n', (75801, 75803), False, 'import pylark\n'), ((76404, 76432), 'pylark.GetDriveFileMetaReq', 'pylark.GetDriveFileMetaReq', ([], {}), '()\n', (76430, 76432), False, 'import pylark\n'), ((76739, 76782), 'pylark.CreateDriveFileReq', 'pylark.CreateDriveFileReq', ([], {'folder_token': '"""x"""'}), "(folder_token='x')\n", (76764, 76782), False, 'import pylark\n'), ((77137, 77176), 'pylark.CopyDriveFileReq', 'pylark.CopyDriveFileReq', ([], {'file_token': '"""x"""'}), "(file_token='x')\n", (77160, 77176), False, 'import pylark\n'), ((77535, 77575), 'pylark.DeleteDriveFileReq', 'pylark.DeleteDriveFileReq', ([], {'doc_token': '"""x"""'}), "(doc_token='x')\n", (77560, 77575), False, 'import pylark\n'), ((77946, 77999), 'pylark.DeleteDriveSheetFileReq', 'pylark.DeleteDriveSheetFileReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (77976, 77999), False, 'import pylark\n'), ((78362, 78407), 'pylark.CreateDriveFolderReq', 'pylark.CreateDriveFolderReq', ([], {'folder_token': '"""x"""'}), "(folder_token='x')\n", (78389, 78407), False, 'import pylark\n'), ((78774, 78820), 'pylark.GetDriveFolderMetaReq', 'pylark.GetDriveFolderMetaReq', ([], {'folder_token': '"""x"""'}), "(folder_token='x')\n", (78802, 78820), False, 'import pylark\n'), ((79197, 79231), 'pylark.GetDriveRootFolderMetaReq', 'pylark.GetDriveRootFolderMetaReq', ([], {}), '()\n', (79229, 79231), False, 'import pylark\n'), ((79567, 79617), 'pylark.GetDriveFolderChildrenReq', 'pylark.GetDriveFolderChildrenReq', ([], {'folder_token': '"""x"""'}), "(folder_token='x')\n", (79599, 79617), False, 'import pylark\n'), ((79992, 80040), 'pylark.GetDriveFileStatisticsReq', 'pylark.GetDriveFileStatisticsReq', ([], {'file_token': '"""x"""'}), "(file_token='x')\n", (80024, 80040), False, 'import pylark\n'), ((80403, 80446), 'pylark.DownloadDriveFileReq', 'pylark.DownloadDriveFileReq', ([], {'file_token': '"""x"""'}), "(file_token='x')\n", (80430, 80446), False, 'import pylark\n'), ((80788, 80815), 'pylark.UploadDriveFileReq', 'pylark.UploadDriveFileReq', ([], {}), '()\n', (80813, 80815), False, 'import pylark\n'), ((81138, 81172), 'pylark.PrepareUploadDriveFileReq', 'pylark.PrepareUploadDriveFileReq', ([], {}), '()\n', (81170, 81172), False, 'import pylark\n'), ((81485, 81516), 'pylark.PartUploadDriveFileReq', 'pylark.PartUploadDriveFileReq', ([], {}), '()\n', (81514, 81516), False, 'import pylark\n'), ((81820, 81853), 'pylark.FinishUploadDriveFileReq', 'pylark.FinishUploadDriveFileReq', ([], {}), '()\n', (81851, 81853), False, 'import pylark\n'), ((82166, 82210), 'pylark.DownloadDriveMediaReq', 'pylark.DownloadDriveMediaReq', ([], {'file_token': '"""x"""'}), "(file_token='x')\n", (82194, 82210), False, 'import pylark\n'), ((82554, 82582), 'pylark.UploadDriveMediaReq', 'pylark.UploadDriveMediaReq', ([], {}), '()\n', (82580, 82582), False, 'import pylark\n'), ((82907, 82942), 'pylark.PrepareUploadDriveMediaReq', 'pylark.PrepareUploadDriveMediaReq', ([], {}), '()\n', (82940, 82942), False, 'import pylark\n'), ((83257, 83289), 'pylark.PartUploadDriveMediaReq', 'pylark.PartUploadDriveMediaReq', ([], {}), '()\n', (83287, 83289), False, 'import pylark\n'), ((83612, 83646), 'pylark.FinishUploadDriveMediaReq', 'pylark.FinishUploadDriveMediaReq', ([], {}), '()\n', (83644, 83646), False, 'import pylark\n'), ((84000, 84042), 'pylark.CreateDriveMemberPermissionOldReq', 'pylark.CreateDriveMemberPermissionOldReq', ([], {}), '()\n', (84040, 84042), False, 'import pylark\n'), ((84392, 84433), 'pylark.TransferDriveMemberPermissionReq', 'pylark.TransferDriveMemberPermissionReq', ([], {}), '()\n', (84431, 84433), False, 'import pylark\n'), ((84783, 84823), 'pylark.GetDriveMemberPermissionListReq', 'pylark.GetDriveMemberPermissionListReq', ([], {}), '()\n', (84821, 84823), False, 'import pylark\n'), ((85169, 85217), 'pylark.CreateDriveMemberPermissionReq', 'pylark.CreateDriveMemberPermissionReq', ([], {'token': '"""x"""'}), "(token='x')\n", (85206, 85217), False, 'import pylark\n'), ((85610, 85652), 'pylark.DeleteDriveMemberPermissionOldReq', 'pylark.DeleteDriveMemberPermissionOldReq', ([], {}), '()\n', (85650, 85652), False, 'import pylark\n'), ((85998, 86061), 'pylark.DeleteDriveMemberPermissionReq', 'pylark.DeleteDriveMemberPermissionReq', ([], {'token': '"""x"""', 'member_id': '"""x"""'}), "(token='x', member_id='x')\n", (86035, 86061), False, 'import pylark\n'), ((86474, 86516), 'pylark.UpdateDriveMemberPermissionOldReq', 'pylark.UpdateDriveMemberPermissionOldReq', ([], {}), '()\n', (86514, 86516), False, 'import pylark\n'), ((86862, 86925), 'pylark.UpdateDriveMemberPermissionReq', 'pylark.UpdateDriveMemberPermissionReq', ([], {'token': '"""x"""', 'member_id': '"""x"""'}), "(token='x', member_id='x')\n", (86899, 86925), False, 'import pylark\n'), ((87328, 87366), 'pylark.CheckDriveMemberPermissionReq', 'pylark.CheckDriveMemberPermissionReq', ([], {}), '()\n', (87364, 87366), False, 'import pylark\n'), ((87726, 87770), 'pylark.UpdateDrivePublicPermissionV1OldReq', 'pylark.UpdateDrivePublicPermissionV1OldReq', ([], {}), '()\n', (87768, 87770), False, 'import pylark\n'), ((88130, 88174), 'pylark.UpdateDrivePublicPermissionV2OldReq', 'pylark.UpdateDrivePublicPermissionV2OldReq', ([], {}), '()\n', (88172, 88174), False, 'import pylark\n'), ((88520, 88558), 'pylark.GetDrivePublicPermissionV2Req', 'pylark.GetDrivePublicPermissionV2Req', ([], {}), '()\n', (88556, 88558), False, 'import pylark\n'), ((88904, 88952), 'pylark.UpdateDrivePublicPermissionReq', 'pylark.UpdateDrivePublicPermissionReq', ([], {'token': '"""x"""'}), "(token='x')\n", (88941, 88952), False, 'import pylark\n'), ((89353, 89397), 'pylark.BatchGetDriveMediaTmpDownloadURLReq', 'pylark.BatchGetDriveMediaTmpDownloadURLReq', ([], {}), '()\n', (89395, 89397), False, 'import pylark\n'), ((89727, 89772), 'pylark.GetDriveCommentListReq', 'pylark.GetDriveCommentListReq', ([], {'file_token': '"""x"""'}), "(file_token='x')\n", (89756, 89772), False, 'import pylark\n'), ((90131, 90188), 'pylark.GetDriveCommentReq', 'pylark.GetDriveCommentReq', ([], {'file_token': '"""x"""', 'comment_id': '"""x"""'}), "(file_token='x', comment_id='x')\n", (90156, 90188), False, 'import pylark\n'), ((90573, 90617), 'pylark.CreateDriveCommentReq', 'pylark.CreateDriveCommentReq', ([], {'file_token': '"""x"""'}), "(file_token='x')\n", (90601, 90617), False, 'import pylark\n'), ((90982, 91056), 'pylark.UpdateDriveCommentReq', 'pylark.UpdateDriveCommentReq', ([], {'file_token': '"""x"""', 'comment_id': '"""x"""', 'reply_id': '"""x"""'}), "(file_token='x', comment_id='x', reply_id='x')\n", (91010, 91056), False, 'import pylark\n'), ((91461, 91535), 'pylark.DeleteDriveCommentReq', 'pylark.DeleteDriveCommentReq', ([], {'file_token': '"""x"""', 'comment_id': '"""x"""', 'reply_id': '"""x"""'}), "(file_token='x', comment_id='x', reply_id='x')\n", (91489, 91535), False, 'import pylark\n'), ((91952, 92017), 'pylark.UpdateDriveCommentPatchReq', 'pylark.UpdateDriveCommentPatchReq', ([], {'file_token': '"""x"""', 'comment_id': '"""x"""'}), "(file_token='x', comment_id='x')\n", (91985, 92017), False, 'import pylark\n'), ((92377, 92403), 'pylark.CreateDriveDocReq', 'pylark.CreateDriveDocReq', ([], {}), '()\n', (92401, 92403), False, 'import pylark\n'), ((92718, 92761), 'pylark.GetDriveDocContentReq', 'pylark.GetDriveDocContentReq', ([], {'doc_token': '"""x"""'}), "(doc_token='x')\n", (92746, 92761), False, 'import pylark\n'), ((93136, 93182), 'pylark.GetDriveDocRawContentReq', 'pylark.GetDriveDocRawContentReq', ([], {'doc_token': '"""x"""'}), "(doc_token='x')\n", (93167, 93182), False, 'import pylark\n'), ((93543, 93583), 'pylark.GetDriveDocMetaReq', 'pylark.GetDriveDocMetaReq', ([], {'doc_token': '"""x"""'}), "(doc_token='x')\n", (93568, 93583), False, 'import pylark\n'), ((93915, 93938), 'pylark.CreateSheetReq', 'pylark.CreateSheetReq', ([], {}), '()\n', (93936, 93938), False, 'import pylark\n'), ((94239, 94284), 'pylark.GetSheetMetaReq', 'pylark.GetSheetMetaReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (94261, 94284), False, 'import pylark\n'), ((94651, 94703), 'pylark.UpdateSheetPropertyReq', 'pylark.UpdateSheetPropertyReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (94680, 94703), False, 'import pylark\n'), ((95064, 95113), 'pylark.BatchUpdateSheetReq', 'pylark.BatchUpdateSheetReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (95090, 95113), False, 'import pylark\n'), ((95445, 95468), 'pylark.ImportSheetReq', 'pylark.ImportSheetReq', ([], {}), '()\n', (95466, 95468), False, 'import pylark\n'), ((95772, 95805), 'pylark.CreateDriveImportTaskReq', 'pylark.CreateDriveImportTaskReq', ([], {}), '()\n', (95803, 95805), False, 'import pylark\n'), ((96120, 96160), 'pylark.GetDriveImportTaskReq', 'pylark.GetDriveImportTaskReq', ([], {'ticket': '"""x"""'}), "(ticket='x')\n", (96148, 96160), False, 'import pylark\n'), ((96525, 96590), 'pylark.MoveSheetDimensionReq', 'pylark.MoveSheetDimensionReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (96553, 96590), False, 'import pylark\n'), ((96973, 97023), 'pylark.PrependSheetValueReq', 'pylark.PrependSheetValueReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (97000, 97023), False, 'import pylark\n'), ((97384, 97433), 'pylark.AppendSheetValueReq', 'pylark.AppendSheetValueReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (97410, 97433), False, 'import pylark\n'), ((97814, 97872), 'pylark.InsertSheetDimensionRangeReq', 'pylark.InsertSheetDimensionRangeReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (97849, 97872), False, 'import pylark\n'), ((98247, 98302), 'pylark.AddSheetDimensionRangeReq', 'pylark.AddSheetDimensionRangeReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (98279, 98302), False, 'import pylark\n'), ((98683, 98741), 'pylark.UpdateSheetDimensionRangeReq', 'pylark.UpdateSheetDimensionRangeReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (98718, 98741), False, 'import pylark\n'), ((99122, 99180), 'pylark.DeleteSheetDimensionRangeReq', 'pylark.DeleteSheetDimensionRangeReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (99157, 99180), False, 'import pylark\n'), ((99535, 99593), 'pylark.GetSheetValueReq', 'pylark.GetSheetValueReq', ([], {'spreadsheet_token': '"""x"""', 'range_': '"""x"""'}), "(spreadsheet_token='x', range_='x')\n", (99558, 99593), False, 'import pylark\n'), ((99980, 100031), 'pylark.BatchGetSheetValueReq', 'pylark.BatchGetSheetValueReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (100008, 100031), False, 'import pylark\n'), ((100386, 100432), 'pylark.SetSheetValueReq', 'pylark.SetSheetValueReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (100409, 100432), False, 'import pylark\n'), ((100799, 100850), 'pylark.BatchSetSheetValueReq', 'pylark.BatchSetSheetValueReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (100827, 100850), False, 'import pylark\n'), ((101205, 101251), 'pylark.SetSheetStyleReq', 'pylark.SetSheetStyleReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (101228, 101251), False, 'import pylark\n'), ((101618, 101669), 'pylark.BatchSetSheetStyleReq', 'pylark.BatchSetSheetStyleReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (101646, 101669), False, 'import pylark\n'), ((102026, 102073), 'pylark.MergeSheetCellReq', 'pylark.MergeSheetCellReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (102050, 102073), False, 'import pylark\n'), ((102434, 102483), 'pylark.UnmergeSheetCellReq', 'pylark.UnmergeSheetCellReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (102460, 102483), False, 'import pylark\n'), ((102850, 102901), 'pylark.SetSheetValueImageReq', 'pylark.SetSheetValueImageReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (102878, 102901), False, 'import pylark\n'), ((103246, 103302), 'pylark.FindSheetReq', 'pylark.FindSheetReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (103265, 103302), False, 'import pylark\n'), ((103673, 103732), 'pylark.ReplaceSheetReq', 'pylark.ReplaceSheetReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (103695, 103732), False, 'import pylark\n'), ((104135, 104194), 'pylark.CreateSheetConditionFormatReq', 'pylark.CreateSheetConditionFormatReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (104171, 104194), False, 'import pylark\n'), ((104571, 104627), 'pylark.GetSheetConditionFormatReq', 'pylark.GetSheetConditionFormatReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (104604, 104627), False, 'import pylark\n'), ((105010, 105069), 'pylark.UpdateSheetConditionFormatReq', 'pylark.UpdateSheetConditionFormatReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (105046, 105069), False, 'import pylark\n'), ((105452, 105511), 'pylark.DeleteSheetConditionFormatReq', 'pylark.DeleteSheetConditionFormatReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (105488, 105511), False, 'import pylark\n'), ((105900, 105962), 'pylark.CreateSheetProtectedDimensionReq', 'pylark.CreateSheetProtectedDimensionReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (105939, 105962), False, 'import pylark\n'), ((106345, 106404), 'pylark.GetSheetProtectedDimensionReq', 'pylark.GetSheetProtectedDimensionReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (106381, 106404), False, 'import pylark\n'), ((106793, 106855), 'pylark.UpdateSheetProtectedDimensionReq', 'pylark.UpdateSheetProtectedDimensionReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (106832, 106855), False, 'import pylark\n'), ((107244, 107306), 'pylark.DeleteSheetProtectedDimensionReq', 'pylark.DeleteSheetProtectedDimensionReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (107283, 107306), False, 'import pylark\n'), ((107705, 107771), 'pylark.CreateSheetDataValidationDropdownReq', 'pylark.CreateSheetDataValidationDropdownReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (107748, 107771), False, 'import pylark\n'), ((108170, 108236), 'pylark.DeleteSheetDataValidationDropdownReq', 'pylark.DeleteSheetDataValidationDropdownReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (108213, 108236), False, 'import pylark\n'), ((108635, 108742), 'pylark.UpdateSheetDataValidationDropdownReq', 'pylark.UpdateSheetDataValidationDropdownReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'data_validation_id': '(1)'}), "(spreadsheet_token='x', sheet_id\n ='x', data_validation_id=1)\n", (108678, 108742), False, 'import pylark\n'), ((109170, 109233), 'pylark.GetSheetDataValidationDropdownReq', 'pylark.GetSheetDataValidationDropdownReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (109210, 109233), False, 'import pylark\n'), ((109596, 109660), 'pylark.CreateSheetFilterReq', 'pylark.CreateSheetFilterReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (109623, 109660), False, 'import pylark\n'), ((110043, 110107), 'pylark.DeleteSheetFilterReq', 'pylark.DeleteSheetFilterReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (110070, 110107), False, 'import pylark\n'), ((110490, 110554), 'pylark.UpdateSheetFilterReq', 'pylark.UpdateSheetFilterReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (110517, 110554), False, 'import pylark\n'), ((110931, 110992), 'pylark.GetSheetFilterReq', 'pylark.GetSheetFilterReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (110955, 110992), False, 'import pylark\n'), ((111385, 111453), 'pylark.CreateSheetFilterViewReq', 'pylark.CreateSheetFilterViewReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (111416, 111453), False, 'import pylark\n'), ((111846, 111938), 'pylark.DeleteSheetFilterViewReq', 'pylark.DeleteSheetFilterViewReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n filter_view_id='x')\n", (111877, 111938), False, 'import pylark\n'), ((112347, 112439), 'pylark.UpdateSheetFilterViewReq', 'pylark.UpdateSheetFilterViewReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n filter_view_id='x')\n", (112378, 112439), False, 'import pylark\n'), ((112842, 112931), 'pylark.GetSheetFilterViewReq', 'pylark.GetSheetFilterViewReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n filter_view_id='x')\n", (112870, 112931), False, 'import pylark\n'), ((113338, 113405), 'pylark.QuerySheetFilterViewReq', 'pylark.QuerySheetFilterViewReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (113368, 113405), False, 'import pylark\n'), ((113818, 113920), 'pylark.CreateSheetFilterViewConditionReq', 'pylark.CreateSheetFilterViewConditionReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id=\n 'x', filter_view_id='x')\n", (113858, 113920), False, 'import pylark\n'), ((114348, 114468), 'pylark.DeleteSheetFilterViewConditionReq', 'pylark.DeleteSheetFilterViewConditionReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""', 'condition_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id=\n 'x', filter_view_id='x', condition_id='x')\n", (114388, 114468), False, 'import pylark\n'), ((114916, 115036), 'pylark.UpdateSheetFilterViewConditionReq', 'pylark.UpdateSheetFilterViewConditionReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""', 'condition_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id=\n 'x', filter_view_id='x', condition_id='x')\n", (114956, 115036), False, 'import pylark\n'), ((115478, 115594), 'pylark.GetSheetFilterViewConditionReq', 'pylark.GetSheetFilterViewConditionReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""', 'condition_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n filter_view_id='x', condition_id='x')\n", (115515, 115594), False, 'import pylark\n'), ((116041, 116141), 'pylark.QuerySheetFilterViewConditionReq', 'pylark.QuerySheetFilterViewConditionReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n filter_view_id='x')\n", (116080, 116141), False, 'import pylark\n'), ((116550, 116618), 'pylark.CreateSheetFloatImageReq', 'pylark.CreateSheetFloatImageReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (116581, 116618), False, 'import pylark\n'), ((117011, 117103), 'pylark.DeleteSheetFloatImageReq', 'pylark.DeleteSheetFloatImageReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'float_image_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n float_image_id='x')\n", (117042, 117103), False, 'import pylark\n'), ((117512, 117604), 'pylark.UpdateSheetFloatImageReq', 'pylark.UpdateSheetFloatImageReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'float_image_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n float_image_id='x')\n", (117543, 117604), False, 'import pylark\n'), ((118007, 118096), 'pylark.GetSheetFloatImageReq', 'pylark.GetSheetFloatImageReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'float_image_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n float_image_id='x')\n", (118035, 118096), False, 'import pylark\n'), ((118503, 118570), 'pylark.QuerySheetFloatImageReq', 'pylark.QuerySheetFloatImageReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (118533, 118570), False, 'import pylark\n'), ((118936, 118964), 'pylark.GetWikiSpaceListReq', 'pylark.GetWikiSpaceListReq', ([], {}), '()\n', (118962, 118964), False, 'import pylark\n'), ((119265, 119301), 'pylark.GetWikiSpaceReq', 'pylark.GetWikiSpaceReq', ([], {'space_id': '"""x"""'}), "(space_id='x')\n", (119287, 119301), False, 'import pylark\n'), ((119676, 119722), 'pylark.UpdateWikiSpaceSettingReq', 'pylark.UpdateWikiSpaceSettingReq', ([], {'space_id': '"""x"""'}), "(space_id='x')\n", (119708, 119722), False, 'import pylark\n'), ((120089, 120131), 'pylark.AddWikiSpaceMemberReq', 'pylark.AddWikiSpaceMemberReq', ([], {'space_id': '"""x"""'}), "(space_id='x')\n", (120117, 120131), False, 'import pylark\n'), ((120488, 120526), 'pylark.CreateWikiNodeReq', 'pylark.CreateWikiNodeReq', ([], {'space_id': '"""x"""'}), "(space_id='x')\n", (120512, 120526), False, 'import pylark\n'), ((120887, 120926), 'pylark.GetWikiNodeListReq', 'pylark.GetWikiNodeListReq', ([], {'space_id': '"""x"""'}), "(space_id='x')\n", (120912, 120926), False, 'import pylark\n'), ((121260, 121283), 'pylark.GetWikiNodeReq', 'pylark.GetWikiNodeReq', ([], {}), '()\n', (121281, 121283), False, 'import pylark\n'), ((121590, 121628), 'pylark.MoveDocsToWikiReq', 'pylark.MoveDocsToWikiReq', ([], {'space_id': '"""x"""'}), "(space_id='x')\n", (121614, 121628), False, 'import pylark\n'), ((122251, 122279), 'pylark.GetDriveFileMetaReq', 'pylark.GetDriveFileMetaReq', ([], {}), '()\n', (122277, 122279), False, 'import pylark\n'), ((122526, 122569), 'pylark.CreateDriveFileReq', 'pylark.CreateDriveFileReq', ([], {'folder_token': '"""x"""'}), "(folder_token='x')\n", (122551, 122569), False, 'import pylark\n'), ((122864, 122903), 'pylark.CopyDriveFileReq', 'pylark.CopyDriveFileReq', ([], {'file_token': '"""x"""'}), "(file_token='x')\n", (122887, 122903), False, 'import pylark\n'), ((123202, 123242), 'pylark.DeleteDriveFileReq', 'pylark.DeleteDriveFileReq', ([], {'doc_token': '"""x"""'}), "(doc_token='x')\n", (123227, 123242), False, 'import pylark\n'), ((123553, 123606), 'pylark.DeleteDriveSheetFileReq', 'pylark.DeleteDriveSheetFileReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (123583, 123606), False, 'import pylark\n'), ((123909, 123954), 'pylark.CreateDriveFolderReq', 'pylark.CreateDriveFolderReq', ([], {'folder_token': '"""x"""'}), "(folder_token='x')\n", (123936, 123954), False, 'import pylark\n'), ((124261, 124307), 'pylark.GetDriveFolderMetaReq', 'pylark.GetDriveFolderMetaReq', ([], {'folder_token': '"""x"""'}), "(folder_token='x')\n", (124289, 124307), False, 'import pylark\n'), ((124624, 124658), 'pylark.GetDriveRootFolderMetaReq', 'pylark.GetDriveRootFolderMetaReq', ([], {}), '()\n', (124656, 124658), False, 'import pylark\n'), ((124934, 124984), 'pylark.GetDriveFolderChildrenReq', 'pylark.GetDriveFolderChildrenReq', ([], {'folder_token': '"""x"""'}), "(folder_token='x')\n", (124966, 124984), False, 'import pylark\n'), ((125299, 125347), 'pylark.GetDriveFileStatisticsReq', 'pylark.GetDriveFileStatisticsReq', ([], {'file_token': '"""x"""'}), "(file_token='x')\n", (125331, 125347), False, 'import pylark\n'), ((125650, 125693), 'pylark.DownloadDriveFileReq', 'pylark.DownloadDriveFileReq', ([], {'file_token': '"""x"""'}), "(file_token='x')\n", (125677, 125693), False, 'import pylark\n'), ((125975, 126002), 'pylark.UploadDriveFileReq', 'pylark.UploadDriveFileReq', ([], {}), '()\n', (126000, 126002), False, 'import pylark\n'), ((126265, 126299), 'pylark.PrepareUploadDriveFileReq', 'pylark.PrepareUploadDriveFileReq', ([], {}), '()\n', (126297, 126299), False, 'import pylark\n'), ((126552, 126583), 'pylark.PartUploadDriveFileReq', 'pylark.PartUploadDriveFileReq', ([], {}), '()\n', (126581, 126583), False, 'import pylark\n'), ((126827, 126860), 'pylark.FinishUploadDriveFileReq', 'pylark.FinishUploadDriveFileReq', ([], {}), '()\n', (126858, 126860), False, 'import pylark\n'), ((127113, 127157), 'pylark.DownloadDriveMediaReq', 'pylark.DownloadDriveMediaReq', ([], {'file_token': '"""x"""'}), "(file_token='x')\n", (127141, 127157), False, 'import pylark\n'), ((127441, 127469), 'pylark.UploadDriveMediaReq', 'pylark.UploadDriveMediaReq', ([], {}), '()\n', (127467, 127469), False, 'import pylark\n'), ((127734, 127769), 'pylark.PrepareUploadDriveMediaReq', 'pylark.PrepareUploadDriveMediaReq', ([], {}), '()\n', (127767, 127769), False, 'import pylark\n'), ((128024, 128056), 'pylark.PartUploadDriveMediaReq', 'pylark.PartUploadDriveMediaReq', ([], {}), '()\n', (128054, 128056), False, 'import pylark\n'), ((128319, 128353), 'pylark.FinishUploadDriveMediaReq', 'pylark.FinishUploadDriveMediaReq', ([], {}), '()\n', (128351, 128353), False, 'import pylark\n'), ((128647, 128689), 'pylark.CreateDriveMemberPermissionOldReq', 'pylark.CreateDriveMemberPermissionOldReq', ([], {}), '()\n', (128687, 128689), False, 'import pylark\n'), ((128979, 129020), 'pylark.TransferDriveMemberPermissionReq', 'pylark.TransferDriveMemberPermissionReq', ([], {}), '()\n', (129018, 129020), False, 'import pylark\n'), ((129310, 129350), 'pylark.GetDriveMemberPermissionListReq', 'pylark.GetDriveMemberPermissionListReq', ([], {}), '()\n', (129348, 129350), False, 'import pylark\n'), ((129636, 129684), 'pylark.CreateDriveMemberPermissionReq', 'pylark.CreateDriveMemberPermissionReq', ([], {'token': '"""x"""'}), "(token='x')\n", (129673, 129684), False, 'import pylark\n'), ((130017, 130059), 'pylark.DeleteDriveMemberPermissionOldReq', 'pylark.DeleteDriveMemberPermissionOldReq', ([], {}), '()\n', (130057, 130059), False, 'import pylark\n'), ((130345, 130408), 'pylark.DeleteDriveMemberPermissionReq', 'pylark.DeleteDriveMemberPermissionReq', ([], {'token': '"""x"""', 'member_id': '"""x"""'}), "(token='x', member_id='x')\n", (130382, 130408), False, 'import pylark\n'), ((130761, 130803), 'pylark.UpdateDriveMemberPermissionOldReq', 'pylark.UpdateDriveMemberPermissionOldReq', ([], {}), '()\n', (130801, 130803), False, 'import pylark\n'), ((131089, 131152), 'pylark.UpdateDriveMemberPermissionReq', 'pylark.UpdateDriveMemberPermissionReq', ([], {'token': '"""x"""', 'member_id': '"""x"""'}), "(token='x', member_id='x')\n", (131126, 131152), False, 'import pylark\n'), ((131495, 131533), 'pylark.CheckDriveMemberPermissionReq', 'pylark.CheckDriveMemberPermissionReq', ([], {}), '()\n', (131531, 131533), False, 'import pylark\n'), ((131833, 131877), 'pylark.UpdateDrivePublicPermissionV1OldReq', 'pylark.UpdateDrivePublicPermissionV1OldReq', ([], {}), '()\n', (131875, 131877), False, 'import pylark\n'), ((132177, 132221), 'pylark.UpdateDrivePublicPermissionV2OldReq', 'pylark.UpdateDrivePublicPermissionV2OldReq', ([], {}), '()\n', (132219, 132221), False, 'import pylark\n'), ((132507, 132545), 'pylark.GetDrivePublicPermissionV2Req', 'pylark.GetDrivePublicPermissionV2Req', ([], {}), '()\n', (132543, 132545), False, 'import pylark\n'), ((132831, 132879), 'pylark.UpdateDrivePublicPermissionReq', 'pylark.UpdateDrivePublicPermissionReq', ([], {'token': '"""x"""'}), "(token='x')\n", (132868, 132879), False, 'import pylark\n'), ((133220, 133264), 'pylark.BatchGetDriveMediaTmpDownloadURLReq', 'pylark.BatchGetDriveMediaTmpDownloadURLReq', ([], {}), '()\n', (133262, 133264), False, 'import pylark\n'), ((133534, 133579), 'pylark.GetDriveCommentListReq', 'pylark.GetDriveCommentListReq', ([], {'file_token': '"""x"""'}), "(file_token='x')\n", (133563, 133579), False, 'import pylark\n'), ((133878, 133935), 'pylark.GetDriveCommentReq', 'pylark.GetDriveCommentReq', ([], {'file_token': '"""x"""', 'comment_id': '"""x"""'}), "(file_token='x', comment_id='x')\n", (133903, 133935), False, 'import pylark\n'), ((134260, 134304), 'pylark.CreateDriveCommentReq', 'pylark.CreateDriveCommentReq', ([], {'file_token': '"""x"""'}), "(file_token='x')\n", (134288, 134304), False, 'import pylark\n'), ((134609, 134683), 'pylark.UpdateDriveCommentReq', 'pylark.UpdateDriveCommentReq', ([], {'file_token': '"""x"""', 'comment_id': '"""x"""', 'reply_id': '"""x"""'}), "(file_token='x', comment_id='x', reply_id='x')\n", (134637, 134683), False, 'import pylark\n'), ((135028, 135102), 'pylark.DeleteDriveCommentReq', 'pylark.DeleteDriveCommentReq', ([], {'file_token': '"""x"""', 'comment_id': '"""x"""', 'reply_id': '"""x"""'}), "(file_token='x', comment_id='x', reply_id='x')\n", (135056, 135102), False, 'import pylark\n'), ((135459, 135524), 'pylark.UpdateDriveCommentPatchReq', 'pylark.UpdateDriveCommentPatchReq', ([], {'file_token': '"""x"""', 'comment_id': '"""x"""'}), "(file_token='x', comment_id='x')\n", (135492, 135524), False, 'import pylark\n'), ((135824, 135850), 'pylark.CreateDriveDocReq', 'pylark.CreateDriveDocReq', ([], {}), '()\n', (135848, 135850), False, 'import pylark\n'), ((136105, 136148), 'pylark.GetDriveDocContentReq', 'pylark.GetDriveDocContentReq', ([], {'doc_token': '"""x"""'}), "(doc_token='x')\n", (136133, 136148), False, 'import pylark\n'), ((136463, 136509), 'pylark.GetDriveDocRawContentReq', 'pylark.GetDriveDocRawContentReq', ([], {'doc_token': '"""x"""'}), "(doc_token='x')\n", (136494, 136509), False, 'import pylark\n'), ((136810, 136850), 'pylark.GetDriveDocMetaReq', 'pylark.GetDriveDocMetaReq', ([], {'doc_token': '"""x"""'}), "(doc_token='x')\n", (136835, 136850), False, 'import pylark\n'), ((137122, 137145), 'pylark.CreateSheetReq', 'pylark.CreateSheetReq', ([], {}), '()\n', (137143, 137145), False, 'import pylark\n'), ((137386, 137431), 'pylark.GetSheetMetaReq', 'pylark.GetSheetMetaReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (137408, 137431), False, 'import pylark\n'), ((137738, 137790), 'pylark.UpdateSheetPropertyReq', 'pylark.UpdateSheetPropertyReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (137767, 137790), False, 'import pylark\n'), ((138091, 138140), 'pylark.BatchUpdateSheetReq', 'pylark.BatchUpdateSheetReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (138117, 138140), False, 'import pylark\n'), ((138412, 138435), 'pylark.ImportSheetReq', 'pylark.ImportSheetReq', ([], {}), '()\n', (138433, 138435), False, 'import pylark\n'), ((138679, 138712), 'pylark.CreateDriveImportTaskReq', 'pylark.CreateDriveImportTaskReq', ([], {}), '()\n', (138710, 138712), False, 'import pylark\n'), ((138967, 139007), 'pylark.GetDriveImportTaskReq', 'pylark.GetDriveImportTaskReq', ([], {'ticket': '"""x"""'}), "(ticket='x')\n", (138995, 139007), False, 'import pylark\n'), ((139312, 139377), 'pylark.MoveSheetDimensionReq', 'pylark.MoveSheetDimensionReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (139340, 139377), False, 'import pylark\n'), ((139700, 139750), 'pylark.PrependSheetValueReq', 'pylark.PrependSheetValueReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (139727, 139750), False, 'import pylark\n'), ((140051, 140100), 'pylark.AppendSheetValueReq', 'pylark.AppendSheetValueReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (140077, 140100), False, 'import pylark\n'), ((140421, 140479), 'pylark.InsertSheetDimensionRangeReq', 'pylark.InsertSheetDimensionRangeReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (140456, 140479), False, 'import pylark\n'), ((140794, 140849), 'pylark.AddSheetDimensionRangeReq', 'pylark.AddSheetDimensionRangeReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (140826, 140849), False, 'import pylark\n'), ((141170, 141228), 'pylark.UpdateSheetDimensionRangeReq', 'pylark.UpdateSheetDimensionRangeReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (141205, 141228), False, 'import pylark\n'), ((141549, 141607), 'pylark.DeleteSheetDimensionRangeReq', 'pylark.DeleteSheetDimensionRangeReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (141584, 141607), False, 'import pylark\n'), ((141902, 141960), 'pylark.GetSheetValueReq', 'pylark.GetSheetValueReq', ([], {'spreadsheet_token': '"""x"""', 'range_': '"""x"""'}), "(spreadsheet_token='x', range_='x')\n", (141925, 141960), False, 'import pylark\n'), ((142287, 142338), 'pylark.BatchGetSheetValueReq', 'pylark.BatchGetSheetValueReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (142315, 142338), False, 'import pylark\n'), ((142633, 142679), 'pylark.SetSheetValueReq', 'pylark.SetSheetValueReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (142656, 142679), False, 'import pylark\n'), ((142986, 143037), 'pylark.BatchSetSheetValueReq', 'pylark.BatchSetSheetValueReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (143014, 143037), False, 'import pylark\n'), ((143332, 143378), 'pylark.SetSheetStyleReq', 'pylark.SetSheetStyleReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (143355, 143378), False, 'import pylark\n'), ((143685, 143736), 'pylark.BatchSetSheetStyleReq', 'pylark.BatchSetSheetStyleReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (143713, 143736), False, 'import pylark\n'), ((144033, 144080), 'pylark.MergeSheetCellReq', 'pylark.MergeSheetCellReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (144057, 144080), False, 'import pylark\n'), ((144381, 144430), 'pylark.UnmergeSheetCellReq', 'pylark.UnmergeSheetCellReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (144407, 144430), False, 'import pylark\n'), ((144737, 144788), 'pylark.SetSheetValueImageReq', 'pylark.SetSheetValueImageReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (144765, 144788), False, 'import pylark\n'), ((145073, 145129), 'pylark.FindSheetReq', 'pylark.FindSheetReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (145092, 145129), False, 'import pylark\n'), ((145440, 145499), 'pylark.ReplaceSheetReq', 'pylark.ReplaceSheetReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (145462, 145499), False, 'import pylark\n'), ((145842, 145901), 'pylark.CreateSheetConditionFormatReq', 'pylark.CreateSheetConditionFormatReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (145878, 145901), False, 'import pylark\n'), ((146218, 146274), 'pylark.GetSheetConditionFormatReq', 'pylark.GetSheetConditionFormatReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (146251, 146274), False, 'import pylark\n'), ((146597, 146656), 'pylark.UpdateSheetConditionFormatReq', 'pylark.UpdateSheetConditionFormatReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (146633, 146656), False, 'import pylark\n'), ((146979, 147038), 'pylark.DeleteSheetConditionFormatReq', 'pylark.DeleteSheetConditionFormatReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (147015, 147038), False, 'import pylark\n'), ((147367, 147429), 'pylark.CreateSheetProtectedDimensionReq', 'pylark.CreateSheetProtectedDimensionReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (147406, 147429), False, 'import pylark\n'), ((147752, 147811), 'pylark.GetSheetProtectedDimensionReq', 'pylark.GetSheetProtectedDimensionReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (147788, 147811), False, 'import pylark\n'), ((148140, 148202), 'pylark.UpdateSheetProtectedDimensionReq', 'pylark.UpdateSheetProtectedDimensionReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (148179, 148202), False, 'import pylark\n'), ((148531, 148593), 'pylark.DeleteSheetProtectedDimensionReq', 'pylark.DeleteSheetProtectedDimensionReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (148570, 148593), False, 'import pylark\n'), ((148932, 148998), 'pylark.CreateSheetDataValidationDropdownReq', 'pylark.CreateSheetDataValidationDropdownReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (148975, 148998), False, 'import pylark\n'), ((149337, 149403), 'pylark.DeleteSheetDataValidationDropdownReq', 'pylark.DeleteSheetDataValidationDropdownReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (149380, 149403), False, 'import pylark\n'), ((149742, 149849), 'pylark.UpdateSheetDataValidationDropdownReq', 'pylark.UpdateSheetDataValidationDropdownReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'data_validation_id': '(1)'}), "(spreadsheet_token='x', sheet_id\n ='x', data_validation_id=1)\n", (149785, 149849), False, 'import pylark\n'), ((150217, 150280), 'pylark.GetSheetDataValidationDropdownReq', 'pylark.GetSheetDataValidationDropdownReq', ([], {'spreadsheet_token': '"""x"""'}), "(spreadsheet_token='x')\n", (150257, 150280), False, 'import pylark\n'), ((150583, 150647), 'pylark.CreateSheetFilterReq', 'pylark.CreateSheetFilterReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (150610, 150647), False, 'import pylark\n'), ((150970, 151034), 'pylark.DeleteSheetFilterReq', 'pylark.DeleteSheetFilterReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (150997, 151034), False, 'import pylark\n'), ((151357, 151421), 'pylark.UpdateSheetFilterReq', 'pylark.UpdateSheetFilterReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (151384, 151421), False, 'import pylark\n'), ((151738, 151799), 'pylark.GetSheetFilterReq', 'pylark.GetSheetFilterReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (151762, 151799), False, 'import pylark\n'), ((152132, 152200), 'pylark.CreateSheetFilterViewReq', 'pylark.CreateSheetFilterViewReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (152163, 152200), False, 'import pylark\n'), ((152533, 152625), 'pylark.DeleteSheetFilterViewReq', 'pylark.DeleteSheetFilterViewReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n filter_view_id='x')\n", (152564, 152625), False, 'import pylark\n'), ((152974, 153066), 'pylark.UpdateSheetFilterViewReq', 'pylark.UpdateSheetFilterViewReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n filter_view_id='x')\n", (153005, 153066), False, 'import pylark\n'), ((153409, 153498), 'pylark.GetSheetFilterViewReq', 'pylark.GetSheetFilterViewReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n filter_view_id='x')\n", (153437, 153498), False, 'import pylark\n'), ((153845, 153912), 'pylark.QuerySheetFilterViewReq', 'pylark.QuerySheetFilterViewReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (153875, 153912), False, 'import pylark\n'), ((154265, 154367), 'pylark.CreateSheetFilterViewConditionReq', 'pylark.CreateSheetFilterViewConditionReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id=\n 'x', filter_view_id='x')\n", (154305, 154367), False, 'import pylark\n'), ((154735, 154855), 'pylark.DeleteSheetFilterViewConditionReq', 'pylark.DeleteSheetFilterViewConditionReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""', 'condition_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id=\n 'x', filter_view_id='x', condition_id='x')\n", (154775, 154855), False, 'import pylark\n'), ((155243, 155363), 'pylark.UpdateSheetFilterViewConditionReq', 'pylark.UpdateSheetFilterViewConditionReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""', 'condition_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id=\n 'x', filter_view_id='x', condition_id='x')\n", (155283, 155363), False, 'import pylark\n'), ((155745, 155861), 'pylark.GetSheetFilterViewConditionReq', 'pylark.GetSheetFilterViewConditionReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""', 'condition_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n filter_view_id='x', condition_id='x')\n", (155782, 155861), False, 'import pylark\n'), ((156248, 156348), 'pylark.QuerySheetFilterViewConditionReq', 'pylark.QuerySheetFilterViewConditionReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'filter_view_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n filter_view_id='x')\n", (156287, 156348), False, 'import pylark\n'), ((156697, 156765), 'pylark.CreateSheetFloatImageReq', 'pylark.CreateSheetFloatImageReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (156728, 156765), False, 'import pylark\n'), ((157098, 157190), 'pylark.DeleteSheetFloatImageReq', 'pylark.DeleteSheetFloatImageReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'float_image_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n float_image_id='x')\n", (157129, 157190), False, 'import pylark\n'), ((157539, 157631), 'pylark.UpdateSheetFloatImageReq', 'pylark.UpdateSheetFloatImageReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'float_image_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n float_image_id='x')\n", (157570, 157631), False, 'import pylark\n'), ((157974, 158063), 'pylark.GetSheetFloatImageReq', 'pylark.GetSheetFloatImageReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""', 'float_image_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x',\n float_image_id='x')\n", (158002, 158063), False, 'import pylark\n'), ((158410, 158477), 'pylark.QuerySheetFloatImageReq', 'pylark.QuerySheetFloatImageReq', ([], {'spreadsheet_token': '"""x"""', 'sheet_id': '"""x"""'}), "(spreadsheet_token='x', sheet_id='x')\n", (158440, 158477), False, 'import pylark\n'), ((158783, 158811), 'pylark.GetWikiSpaceListReq', 'pylark.GetWikiSpaceListReq', ([], {}), '()\n', (158809, 158811), False, 'import pylark\n'), ((159052, 159088), 'pylark.GetWikiSpaceReq', 'pylark.GetWikiSpaceReq', ([], {'space_id': '"""x"""'}), "(space_id='x')\n", (159074, 159088), False, 'import pylark\n'), ((159403, 159449), 'pylark.UpdateWikiSpaceSettingReq', 'pylark.UpdateWikiSpaceSettingReq', ([], {'space_id': '"""x"""'}), "(space_id='x')\n", (159435, 159449), False, 'import pylark\n'), ((159756, 159798), 'pylark.AddWikiSpaceMemberReq', 'pylark.AddWikiSpaceMemberReq', ([], {'space_id': '"""x"""'}), "(space_id='x')\n", (159784, 159798), False, 'import pylark\n'), ((160095, 160133), 'pylark.CreateWikiNodeReq', 'pylark.CreateWikiNodeReq', ([], {'space_id': '"""x"""'}), "(space_id='x')\n", (160119, 160133), False, 'import pylark\n'), ((160434, 160473), 'pylark.GetWikiNodeListReq', 'pylark.GetWikiNodeListReq', ([], {'space_id': '"""x"""'}), "(space_id='x')\n", (160459, 160473), False, 'import pylark\n'), ((160747, 160770), 'pylark.GetWikiNodeReq', 'pylark.GetWikiNodeReq', ([], {}), '()\n', (160768, 160770), False, 'import pylark\n'), ((161017, 161055), 'pylark.MoveDocsToWikiReq', 'pylark.MoveDocsToWikiReq', ([], {'space_id': '"""x"""'}), "(space_id='x')\n", (161041, 161055), False, 'import pylark\n')] |
# -*- coding: utf-8 -*-
#
# Implementation of Granger-Geweke causality
#
#
# Builtin/3rd party package imports
import numpy as np
def granger(CSD, Hfunc, Sigma):
"""
Computes the pairwise Granger-Geweke causalities
for all (non-symmetric!) channel combinations
according to Equation 8 in [1]_.
The transfer functions `Hfunc` and noise covariance
`Sigma` are expected to have been already computed.
Parameters
----------
CSD : (nFreq, N, N) :class:`numpy.ndarray`
Complex cross spectra for all channel combinations ``i,j``
`N` corresponds to number of input channels.
Hfunc : (nFreq, N, N) :class:`numpy.ndarray`
Spectral transfer functions for all channel combinations ``i,j``
Sigma : (N, N) :class:`numpy.ndarray`
The noise covariances
Returns
-------
Granger : (nFreq, N, N) :class:`numpy.ndarray`
Spectral Granger-Geweke causality between all channel
combinations. Directionality follows array
notation: causality from ``i -> j`` is ``Granger[:,i,j]``,
causality from ``j -> i`` is ``Granger[:,j,i]``
See also
--------
wilson_sf : :func:`~syncopy.connectivity.wilson_sf.wilson_sf
Spectral matrix factorization that yields the
transfer functions and noise covariances
from a cross spectral density.
Notes
-----
.. [1] Dhamala, Mukeshwar, <NAME>, and <NAME>.
"Estimating Granger causality from Fourier and wavelet transforms
of time series data." Physical review letters 100.1 (2008): 018701.
"""
nChannels = CSD.shape[1]
auto_spectra = CSD.transpose(1, 2, 0).diagonal()
auto_spectra = np.abs(auto_spectra) # auto-spectra are real
# we need the stacked auto-spectra of the form (nChannel=3):
# S_11 S_22 S_33
# Smat(f) = S_11 S_22 S_33
# S_11 S_22 S_33
Smat = auto_spectra[:, None, :] * np.ones(nChannels)[:, None]
# Granger i->j needs H_ji entry
Hmat = np.abs(Hfunc.transpose(0, 2, 1))**2
# Granger i->j needs Sigma_ji entry
SigmaJI = np.abs(Sigma.T)
# imag part should be 0
auto_cov = np.abs(Sigma.diagonal())
# same stacking as for the auto spectra (without freq axis)
SigmaII = auto_cov[None, :] * np.ones(nChannels)[:, None]
# the denominator
denom = SigmaII.T - SigmaJI**2 / SigmaII
denom = Smat - denom * Hmat
# linear causality i -> j
Granger = np.log(Smat / denom)
return Granger
| [
"numpy.abs",
"numpy.log",
"numpy.ones"
] | [((1711, 1731), 'numpy.abs', 'np.abs', (['auto_spectra'], {}), '(auto_spectra)\n', (1717, 1731), True, 'import numpy as np\n'), ((2119, 2134), 'numpy.abs', 'np.abs', (['Sigma.T'], {}), '(Sigma.T)\n', (2125, 2134), True, 'import numpy as np\n'), ((2475, 2495), 'numpy.log', 'np.log', (['(Smat / denom)'], {}), '(Smat / denom)\n', (2481, 2495), True, 'import numpy as np\n'), ((1953, 1971), 'numpy.ones', 'np.ones', (['nChannels'], {}), '(nChannels)\n', (1960, 1971), True, 'import numpy as np\n'), ((2302, 2320), 'numpy.ones', 'np.ones', (['nChannels'], {}), '(nChannels)\n', (2309, 2320), True, 'import numpy as np\n')] |
import os
import sys
import maya.standalone
import mayaLib
print("=" * 30)
print("This is mayaLib package test")
print("=" * 30)
print("Initializing maya standalone ...")
maya.standalone.initialize(name="python")
# Create engine
maya_engine = mayaLib.MayaEngine()
print("Engine : " + str(maya_engine))
# Get engine path
print("Current file location : " + str(maya_engine.get_file_path()))
# Save
maya_engine_scene = os.path.join(os.path.join(os.environ["USERPROFILE"]), "Desktop", "test.ma")
maya_engine.save(maya_engine_scene)
print("Current file location after save : " + maya_engine.get_file_path())
# Open as
maya_engine.open_as(maya_engine.get_file_path())
print("Open as ")
print("Current file location after open as : " + maya_engine.get_file_path())
# Open
maya_engine.open(maya_engine_scene)
print("Current file location after open : " + maya_engine.get_file_path())
print("Uninitialized maya standalone ...")
maya.standalone.uninitialize()
sys.exit(0)
| [
"mayaLib.MayaEngine",
"os.path.join",
"sys.exit"
] | [((247, 267), 'mayaLib.MayaEngine', 'mayaLib.MayaEngine', ([], {}), '()\n', (265, 267), False, 'import mayaLib\n'), ((955, 966), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (963, 966), False, 'import sys\n'), ((433, 472), 'os.path.join', 'os.path.join', (["os.environ['USERPROFILE']"], {}), "(os.environ['USERPROFILE'])\n", (445, 472), False, 'import os\n')] |
"""Initial Migration
Revision ID: b0c12eb8ae59
Revises: <PASSWORD>
Create Date: 2020-07-15 11:44:46.190193
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = '<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('password_hash', sa.String(length=255), nullable=True))
op.drop_column('users', 'pass_secure')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('pass_secure', sa.VARCHAR(length=255), autoincrement=False, nullable=True))
op.drop_column('users', 'password_hash')
# ### end Alembic commands ###
| [
"sqlalchemy.String",
"alembic.op.drop_column",
"sqlalchemy.VARCHAR"
] | [((470, 508), 'alembic.op.drop_column', 'op.drop_column', (['"""users"""', '"""pass_secure"""'], {}), "('users', 'pass_secure')\n", (484, 508), False, 'from alembic import op\n'), ((746, 786), 'alembic.op.drop_column', 'op.drop_column', (['"""users"""', '"""password_hash"""'], {}), "('users', 'password_hash')\n", (760, 786), False, 'from alembic import op\n'), ((427, 448), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (436, 448), True, 'import sqlalchemy as sa\n'), ((681, 703), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {'length': '(255)'}), '(length=255)\n', (691, 703), True, 'import sqlalchemy as sa\n')] |
# Exercício Python #045 - GAME: <NAME> e Tesoura
#
# Crie um programa que faça o computador jogar JOKENPÔ com você.
# Aprenda a arrumar as cores nas respostas!
from random import choice
from random import randint # Maneira utilizada na resolução deste exercício
from time import sleep
print('\033[1;31mATENÇÃO! ESTE É UM JOGO ALTAMENTE PERIGOSO ONDE NÃO HÁ CHANCES DE VITÓRIA PARA VOCÊ!\033[m')
Uc = input('\033[0;30mMe diga, \033[1;34mó grande jogador, \033[0;30mvocê escolhe \033[1;35mPEDRA, \033[1;31mPAPEL, '
'\033[0;30mou \033[1;36mTESOURA? ').strip().upper()
PC = ['\033[1;35mPEDRA\033[m', '\033[1;31mPAPEL\033[m', '\033[1;36mTESOURA\033[m']
PCc = choice(PC)
sleep(0.5)
print('JO')
sleep(1)
print('KEN')
sleep(1)
print('PO!')
if PCc == 'PEDRA' and Uc == 'TESOURA' or PCc == 'TESOURA' and Uc == 'PAPEL' or PCc == 'PAPEL' and Uc == 'PEDRA':
print(f'\033[1;31mHAHAHA! Eu venci! \033[0;30mEu escolhi \033[m{PCc} \033[0;30me você \033[m{Uc}\033[0;30m!')
elif PCc == Uc:
print(f'\033[1;33mEMPATE! Vamos jogar novamente! Eu escolhi \033[m{PCc} \033[0;30me você \033[m{Uc}')
else:
print(f'\033[0;34mT-T Infelizmente,\033[1;32mvocê venceu... \033[0;30mEu escolhi \033[m{PCc}, \033[0;30me você '
f'escolheu \033[m{Uc}\033[0;30m...\033[m') | [
"random.choice",
"time.sleep"
] | [((670, 680), 'random.choice', 'choice', (['PC'], {}), '(PC)\n', (676, 680), False, 'from random import choice\n'), ((682, 692), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (687, 692), False, 'from time import sleep\n'), ((706, 714), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (711, 714), False, 'from time import sleep\n'), ((729, 737), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (734, 737), False, 'from time import sleep\n')] |
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides definitions for non-regularized training or test losses."""
import sys
import numpy as np
import tensorflow as tf
from tensorflow import flags
import tf_cider
FLAGS = flags.FLAGS
LOG_TENSOR = True
def log_tensor(name, g=None, l=None):
if LOG_TENSOR:
if g is None and l is None:
print >> sys.stderr, name, eval(name, {"self":self})
else:
print >> sys.stderr, name, eval(name, g, l)
class BaseLoss(object):
"""Inherit from this class when implementing new losses."""
def calculate_loss(self, unused_predictions, unused_labels, **unused_params):
"""Calculates the average loss of the examples in a mini-batch.
Args:
unused_predictions: a 2-d tensor storing the prediction scores, in which
each row represents a sample in the mini-batch and each column
represents a class.
unused_labels: a 2-d tensor storing the labels, which has the same shape
as the unused_predictions. The labels must be in the range of 0 and 1.
unused_params: loss specific parameters.
Returns:
A scalar loss tensor.
"""
raise NotImplementedError()
class CrossEntropyLoss(BaseLoss):
def calculate_loss(self, predictions, labels, weights=None,
**unused_params):
loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=labels,
logits=predictions)
if weights is not None:
loss = tf.div(tf.reduce_sum(loss * weights),
tf.reduce_sum(weights) + epsilon)
else:
loss = tf.reduce_mean(loss)
return loss
class SparseSoftmaxCrossEntropyLoss(BaseLoss):
def calculate_loss(self, predictions, labels, weights=None,
epsilon=1e-9, **unused_params):
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels,
logits=predictions)
if weights is not None:
loss = tf.div(tf.reduce_sum(loss * weights),
tf.reduce_sum(weights) + epsilon)
else:
loss = tf.reduce_mean(loss)
return loss
class SelfCriticalLoss(BaseLoss):
def __init__(self):
self.cider_scorer = tf_cider.CiderScorer()
def calculate_loss(self,
target_caption_words,
target_caption_lengths,
greedy_caption_words,
greedy_caption_lengths,
sample_caption_words,
sample_caption_lengths,
sample_caption_logits,
epsilon=1e-9, **unused_params):
cider_scorer = self.cider_scorer
log_tensor("greedy_caption_words", l=locals())
log_tensor("greedy_caption_lengths", l=locals())
log_tensor("sample_caption_logits", l=locals())
log_tensor("sample_caption_words", l=locals())
log_tensor("sample_caption_lengths", l=locals())
log_tensor("target_caption_words", l=locals())
log_tensor("target_caption_lengths", l=locals())
greedy_score = cider_scorer.score(greedy_caption_words,
greedy_caption_lengths,
target_caption_words,
target_caption_lengths)
sample_score = cider_scorer.score(sample_caption_words,
sample_caption_lengths,
target_caption_words,
target_caption_lengths)
tf.summary.scalar("losses/average_greedy_score", tf.reduce_mean(greedy_score))
tf.summary.scalar("losses/average_sample_score", tf.reduce_mean(sample_score))
tf.summary.histogram("losses/greedy_score", greedy_score)
tf.summary.histogram("losses/sample_score", sample_score)
tf.summary.histogram("losses/greedy_caption_lengths", greedy_caption_lengths)
tf.summary.histogram("losses/sample_caption_lengths", sample_caption_lengths)
# reward = -1 * reward
reward = greedy_score - sample_score
reward = tf.stop_gradient(reward)
# extract the logprobs of each word in sample_captions
sample_probs = tf.nn.softmax(sample_caption_logits)
batch_size, max_sample_length, _ = sample_probs.get_shape().as_list()
sample_batch_index = tf.tile(tf.reshape(tf.range(0, batch_size),
shape=[batch_size,1]),
multiples=[1, max_sample_length])
sample_seq_index = tf.tile(tf.reshape(tf.range(0, max_sample_length),
shape=[1, max_sample_length]),
multiples=[batch_size, 1])
sample_gather_index = tf.stack([sample_batch_index,
sample_seq_index,
sample_caption_words], axis=2)
sample_caption_logprobs = tf.log(tf.gather_nd(sample_probs, sample_gather_index) + epsilon)
tf.summary.histogram("losses/sample_caption_logprobs", sample_caption_logprobs)
sample_caption_mask = tf.sequence_mask(sample_caption_lengths,
maxlen=max_sample_length)
sample_caption_mask = tf.cast(sample_caption_mask, dtype=tf.float32)
rl_loss = tf.expand_dims(reward, 1) * sample_caption_logprobs
rl_loss = tf.div(tf.reduce_sum(rl_loss * sample_caption_mask),
tf.reduce_sum(sample_caption_mask),
name="rl_loss")
tf.summary.scalar("losses/rl_loss", rl_loss)
log_tensor("reward", l=locals())
log_tensor("rl_loss", l=locals())
return rl_loss
| [
"tensorflow.cast",
"tensorflow.expand_dims",
"tensorflow.reduce_sum",
"tf_cider.CiderScorer",
"tensorflow.nn.sparse_softmax_cross_entropy_with_logits",
"tensorflow.range",
"tensorflow.stop_gradient",
"tensorflow.summary.histogram",
"tensorflow.nn.softmax",
"tensorflow.nn.sigmoid_cross_entropy_with... | [((1878, 1952), 'tensorflow.nn.sigmoid_cross_entropy_with_logits', 'tf.nn.sigmoid_cross_entropy_with_logits', ([], {'labels': 'labels', 'logits': 'predictions'}), '(labels=labels, logits=predictions)\n', (1917, 1952), True, 'import tensorflow as tf\n'), ((2372, 2458), 'tensorflow.nn.sparse_softmax_cross_entropy_with_logits', 'tf.nn.sparse_softmax_cross_entropy_with_logits', ([], {'labels': 'labels', 'logits': 'predictions'}), '(labels=labels, logits=\n predictions)\n', (2418, 2458), True, 'import tensorflow as tf\n'), ((2786, 2808), 'tf_cider.CiderScorer', 'tf_cider.CiderScorer', ([], {}), '()\n', (2806, 2808), False, 'import tf_cider\n'), ((4268, 4325), 'tensorflow.summary.histogram', 'tf.summary.histogram', (['"""losses/greedy_score"""', 'greedy_score'], {}), "('losses/greedy_score', greedy_score)\n", (4288, 4325), True, 'import tensorflow as tf\n'), ((4330, 4387), 'tensorflow.summary.histogram', 'tf.summary.histogram', (['"""losses/sample_score"""', 'sample_score'], {}), "('losses/sample_score', sample_score)\n", (4350, 4387), True, 'import tensorflow as tf\n'), ((4392, 4469), 'tensorflow.summary.histogram', 'tf.summary.histogram', (['"""losses/greedy_caption_lengths"""', 'greedy_caption_lengths'], {}), "('losses/greedy_caption_lengths', greedy_caption_lengths)\n", (4412, 4469), True, 'import tensorflow as tf\n'), ((4474, 4551), 'tensorflow.summary.histogram', 'tf.summary.histogram', (['"""losses/sample_caption_lengths"""', 'sample_caption_lengths'], {}), "('losses/sample_caption_lengths', sample_caption_lengths)\n", (4494, 4551), True, 'import tensorflow as tf\n'), ((4634, 4658), 'tensorflow.stop_gradient', 'tf.stop_gradient', (['reward'], {}), '(reward)\n', (4650, 4658), True, 'import tensorflow as tf\n'), ((4738, 4774), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['sample_caption_logits'], {}), '(sample_caption_logits)\n', (4751, 4774), True, 'import tensorflow as tf\n'), ((5287, 5365), 'tensorflow.stack', 'tf.stack', (['[sample_batch_index, sample_seq_index, sample_caption_words]'], {'axis': '(2)'}), '([sample_batch_index, sample_seq_index, sample_caption_words], axis=2)\n', (5295, 5365), True, 'import tensorflow as tf\n'), ((5541, 5620), 'tensorflow.summary.histogram', 'tf.summary.histogram', (['"""losses/sample_caption_logprobs"""', 'sample_caption_logprobs'], {}), "('losses/sample_caption_logprobs', sample_caption_logprobs)\n", (5561, 5620), True, 'import tensorflow as tf\n'), ((5647, 5713), 'tensorflow.sequence_mask', 'tf.sequence_mask', (['sample_caption_lengths'], {'maxlen': 'max_sample_length'}), '(sample_caption_lengths, maxlen=max_sample_length)\n', (5663, 5713), True, 'import tensorflow as tf\n'), ((5784, 5830), 'tensorflow.cast', 'tf.cast', (['sample_caption_mask'], {'dtype': 'tf.float32'}), '(sample_caption_mask, dtype=tf.float32)\n', (5791, 5830), True, 'import tensorflow as tf\n'), ((6067, 6111), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""losses/rl_loss"""', 'rl_loss'], {}), "('losses/rl_loss', rl_loss)\n", (6084, 6111), True, 'import tensorflow as tf\n'), ((2160, 2180), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['loss'], {}), '(loss)\n', (2174, 2180), True, 'import tensorflow as tf\n'), ((2668, 2688), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['loss'], {}), '(loss)\n', (2682, 2688), True, 'import tensorflow as tf\n'), ((4151, 4179), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['greedy_score'], {}), '(greedy_score)\n', (4165, 4179), True, 'import tensorflow as tf\n'), ((4234, 4262), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['sample_score'], {}), '(sample_score)\n', (4248, 4262), True, 'import tensorflow as tf\n'), ((5850, 5875), 'tensorflow.expand_dims', 'tf.expand_dims', (['reward', '(1)'], {}), '(reward, 1)\n', (5864, 5875), True, 'import tensorflow as tf\n'), ((5923, 5967), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(rl_loss * sample_caption_mask)'], {}), '(rl_loss * sample_caption_mask)\n', (5936, 5967), True, 'import tensorflow as tf\n'), ((5990, 6024), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['sample_caption_mask'], {}), '(sample_caption_mask)\n', (6003, 6024), True, 'import tensorflow as tf\n'), ((2052, 2081), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(loss * weights)'], {}), '(loss * weights)\n', (2065, 2081), True, 'import tensorflow as tf\n'), ((2560, 2589), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(loss * weights)'], {}), '(loss * weights)\n', (2573, 2589), True, 'import tensorflow as tf\n'), ((4893, 4916), 'tensorflow.range', 'tf.range', (['(0)', 'batch_size'], {}), '(0, batch_size)\n', (4901, 4916), True, 'import tensorflow as tf\n'), ((5096, 5126), 'tensorflow.range', 'tf.range', (['(0)', 'max_sample_length'], {}), '(0, max_sample_length)\n', (5104, 5126), True, 'import tensorflow as tf\n'), ((5478, 5525), 'tensorflow.gather_nd', 'tf.gather_nd', (['sample_probs', 'sample_gather_index'], {}), '(sample_probs, sample_gather_index)\n', (5490, 5525), True, 'import tensorflow as tf\n'), ((2103, 2125), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['weights'], {}), '(weights)\n', (2116, 2125), True, 'import tensorflow as tf\n'), ((2611, 2633), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['weights'], {}), '(weights)\n', (2624, 2633), True, 'import tensorflow as tf\n')] |
# Generated by Django 2.0.5 on 2019-06-23 19:14
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('provarme_dashboard', '0005_devolution_traffic'),
]
operations = [
migrations.RemoveField(
model_name='devolution',
name='address',
),
migrations.RemoveField(
model_name='devolution',
name='city',
),
migrations.RemoveField(
model_name='devolution',
name='state',
),
migrations.RemoveField(
model_name='devolution',
name='zipcode',
),
]
| [
"django.db.migrations.RemoveField"
] | [((238, 301), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""devolution"""', 'name': '"""address"""'}), "(model_name='devolution', name='address')\n", (260, 301), False, 'from django.db import migrations\n'), ((346, 406), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""devolution"""', 'name': '"""city"""'}), "(model_name='devolution', name='city')\n", (368, 406), False, 'from django.db import migrations\n'), ((451, 512), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""devolution"""', 'name': '"""state"""'}), "(model_name='devolution', name='state')\n", (473, 512), False, 'from django.db import migrations\n'), ((557, 620), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""devolution"""', 'name': '"""zipcode"""'}), "(model_name='devolution', name='zipcode')\n", (579, 620), False, 'from django.db import migrations\n')] |
#!/usr/bin/env python3
import json
import os
from pybytom.script import (
get_public_key_hash, get_p2pkh_program, get_p2wpkh_program, get_p2wpkh_address
)
# Test Values
base_path = os.path.dirname(__file__)
file_path = os.path.abspath(os.path.join(base_path, "..", "values.json"))
values = open(file_path, "r")
_ = json.loads(values.read())
values.close()
def test_p2pk():
assert get_public_key_hash(
public_key=_["script"]["p2pk"]["public_key"]
) == _["script"]["p2pk"]["public_key_hash"]
assert get_p2pkh_program(
public_key_hash=_["script"]["p2pk"]["public_key_hash"]
) == _["script"]["p2pk"]["program"]["p2pkh"]
assert get_p2wpkh_program(
public_key_hash=_["script"]["p2pk"]["public_key_hash"]
) == _["script"]["p2pk"]["program"]["p2wpkh"]
assert get_p2wpkh_address(
public_key_hash=_["script"]["p2pk"]["public_key_hash"], network="mainnet", vapor=False
) == _["script"]["p2pk"]["address"]["mainnet"]
assert get_p2wpkh_address(
public_key_hash=_["script"]["p2pk"]["public_key_hash"], network="solonet", vapor=False
) == _["script"]["p2pk"]["address"]["solonet"]
assert get_p2wpkh_address(
public_key_hash=_["script"]["p2pk"]["public_key_hash"], network="testnet", vapor=False
) == _["script"]["p2pk"]["address"]["testnet"]
assert get_p2wpkh_address(
public_key_hash=_["script"]["p2pk"]["public_key_hash"], network="mainnet", vapor=True
) == _["script"]["p2pk"]["vapor_address"]["mainnet"]
assert get_p2wpkh_address(
public_key_hash=_["script"]["p2pk"]["public_key_hash"], network="solonet", vapor=True
) == _["script"]["p2pk"]["vapor_address"]["solonet"]
assert get_p2wpkh_address(
public_key_hash=_["script"]["p2pk"]["public_key_hash"], network="testnet", vapor=True
) == _["script"]["p2pk"]["vapor_address"]["testnet"]
| [
"pybytom.script.get_p2wpkh_program",
"os.path.join",
"os.path.dirname",
"pybytom.script.get_p2wpkh_address",
"pybytom.script.get_public_key_hash",
"pybytom.script.get_p2pkh_program"
] | [((188, 213), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (203, 213), False, 'import os\n'), ((242, 286), 'os.path.join', 'os.path.join', (['base_path', '""".."""', '"""values.json"""'], {}), "(base_path, '..', 'values.json')\n", (254, 286), False, 'import os\n'), ((394, 459), 'pybytom.script.get_public_key_hash', 'get_public_key_hash', ([], {'public_key': "_['script']['p2pk']['public_key']"}), "(public_key=_['script']['p2pk']['public_key'])\n", (413, 459), False, 'from pybytom.script import get_public_key_hash, get_p2pkh_program, get_p2wpkh_program, get_p2wpkh_address\n'), ((528, 601), 'pybytom.script.get_p2pkh_program', 'get_p2pkh_program', ([], {'public_key_hash': "_['script']['p2pk']['public_key_hash']"}), "(public_key_hash=_['script']['p2pk']['public_key_hash'])\n", (545, 601), False, 'from pybytom.script import get_public_key_hash, get_p2pkh_program, get_p2wpkh_program, get_p2wpkh_address\n'), ((670, 744), 'pybytom.script.get_p2wpkh_program', 'get_p2wpkh_program', ([], {'public_key_hash': "_['script']['p2pk']['public_key_hash']"}), "(public_key_hash=_['script']['p2pk']['public_key_hash'])\n", (688, 744), False, 'from pybytom.script import get_public_key_hash, get_p2pkh_program, get_p2wpkh_program, get_p2wpkh_address\n'), ((815, 925), 'pybytom.script.get_p2wpkh_address', 'get_p2wpkh_address', ([], {'public_key_hash': "_['script']['p2pk']['public_key_hash']", 'network': '"""mainnet"""', 'vapor': '(False)'}), "(public_key_hash=_['script']['p2pk']['public_key_hash'],\n network='mainnet', vapor=False)\n", (833, 925), False, 'from pybytom.script import get_public_key_hash, get_p2pkh_program, get_p2wpkh_program, get_p2wpkh_address\n'), ((992, 1102), 'pybytom.script.get_p2wpkh_address', 'get_p2wpkh_address', ([], {'public_key_hash': "_['script']['p2pk']['public_key_hash']", 'network': '"""solonet"""', 'vapor': '(False)'}), "(public_key_hash=_['script']['p2pk']['public_key_hash'],\n network='solonet', vapor=False)\n", (1010, 1102), False, 'from pybytom.script import get_public_key_hash, get_p2pkh_program, get_p2wpkh_program, get_p2wpkh_address\n'), ((1169, 1279), 'pybytom.script.get_p2wpkh_address', 'get_p2wpkh_address', ([], {'public_key_hash': "_['script']['p2pk']['public_key_hash']", 'network': '"""testnet"""', 'vapor': '(False)'}), "(public_key_hash=_['script']['p2pk']['public_key_hash'],\n network='testnet', vapor=False)\n", (1187, 1279), False, 'from pybytom.script import get_public_key_hash, get_p2pkh_program, get_p2wpkh_program, get_p2wpkh_address\n'), ((1347, 1456), 'pybytom.script.get_p2wpkh_address', 'get_p2wpkh_address', ([], {'public_key_hash': "_['script']['p2pk']['public_key_hash']", 'network': '"""mainnet"""', 'vapor': '(True)'}), "(public_key_hash=_['script']['p2pk']['public_key_hash'],\n network='mainnet', vapor=True)\n", (1365, 1456), False, 'from pybytom.script import get_public_key_hash, get_p2pkh_program, get_p2wpkh_program, get_p2wpkh_address\n'), ((1529, 1638), 'pybytom.script.get_p2wpkh_address', 'get_p2wpkh_address', ([], {'public_key_hash': "_['script']['p2pk']['public_key_hash']", 'network': '"""solonet"""', 'vapor': '(True)'}), "(public_key_hash=_['script']['p2pk']['public_key_hash'],\n network='solonet', vapor=True)\n", (1547, 1638), False, 'from pybytom.script import get_public_key_hash, get_p2pkh_program, get_p2wpkh_program, get_p2wpkh_address\n'), ((1711, 1820), 'pybytom.script.get_p2wpkh_address', 'get_p2wpkh_address', ([], {'public_key_hash': "_['script']['p2pk']['public_key_hash']", 'network': '"""testnet"""', 'vapor': '(True)'}), "(public_key_hash=_['script']['p2pk']['public_key_hash'],\n network='testnet', vapor=True)\n", (1729, 1820), False, 'from pybytom.script import get_public_key_hash, get_p2pkh_program, get_p2wpkh_program, get_p2wpkh_address\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
:copyright: (c) 2008-2017 Sigasi
:license: BSD, see LICENSE for more details.
"""
import os
from SigasiProjectCreator.ArgsAndFileParser import ArgsAndFileParser
from SigasiProjectCreator.Creator import SigasiProjectCreator
from SigasiProjectCreator import VhdlVersion
usage = """usage: %prog project-name hdl-file hdl-file...
this script creates a sigasi project in the current working directory:
* adds one linked folder to the project that points to the common
folder of all listed hdl-files
* unmaps all hdl-files in the common folder, except the listed files.
These files are mapped to the 'work' library
example: %prog MyProjectName foo.vhdl bar.sv
"""
def main():
parser = ArgsAndFileParser(usage)
args = parser.parse_args(2)
project_name = args[0]
hdl_files = args[1:]
destination = os.getcwd()
# Find common directory of the hdl files
abs_paths = [os.path.abspath(x) for x in hdl_files]
folder = os.path.dirname(os.path.commonprefix([p + os.path.sep for p in abs_paths]))
sigasi_project_file_creator = SigasiProjectCreator(project_name, VhdlVersion.NINETY_THREE)
# Create Project File and add a link the common source folder
folder_name = os.path.basename(os.path.normpath(folder))
sigasi_project_file_creator.add_link(folder_name, folder, True)
# Create Library Mapping File
# Unmap everything except the list of files (map those to work)
sigasi_project_file_creator.unmap("/")
for path in abs_paths:
relative_file_path = os.path.relpath(path, folder)
sigasi_project_file_creator.add_mapping(folder_name + "/" + relative_file_path, "work")
sigasi_project_file_creator.write(destination)
if __name__ == '__main__':
main()
| [
"SigasiProjectCreator.Creator.SigasiProjectCreator",
"os.getcwd",
"SigasiProjectCreator.ArgsAndFileParser.ArgsAndFileParser",
"os.path.normpath",
"os.path.commonprefix",
"os.path.abspath",
"os.path.relpath"
] | [((782, 806), 'SigasiProjectCreator.ArgsAndFileParser.ArgsAndFileParser', 'ArgsAndFileParser', (['usage'], {}), '(usage)\n', (799, 806), False, 'from SigasiProjectCreator.ArgsAndFileParser import ArgsAndFileParser\n'), ((909, 920), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (918, 920), False, 'import os\n'), ((1147, 1207), 'SigasiProjectCreator.Creator.SigasiProjectCreator', 'SigasiProjectCreator', (['project_name', 'VhdlVersion.NINETY_THREE'], {}), '(project_name, VhdlVersion.NINETY_THREE)\n', (1167, 1207), False, 'from SigasiProjectCreator.Creator import SigasiProjectCreator\n'), ((984, 1002), 'os.path.abspath', 'os.path.abspath', (['x'], {}), '(x)\n', (999, 1002), False, 'import os\n'), ((1052, 1112), 'os.path.commonprefix', 'os.path.commonprefix', (['[(p + os.path.sep) for p in abs_paths]'], {}), '([(p + os.path.sep) for p in abs_paths])\n', (1072, 1112), False, 'import os\n'), ((1309, 1333), 'os.path.normpath', 'os.path.normpath', (['folder'], {}), '(folder)\n', (1325, 1333), False, 'import os\n'), ((1605, 1634), 'os.path.relpath', 'os.path.relpath', (['path', 'folder'], {}), '(path, folder)\n', (1620, 1634), False, 'import os\n')] |
#!/usr/bin/env python3
"""Benchmark icontract against deal when used together with hypothesis."""
import os
import sys
import timeit
from typing import List
import deal
import dpcontracts
import hypothesis
import hypothesis.extra.dpcontracts
import hypothesis.strategies
import icontract
import tabulate
import icontract_hypothesis
def benchmark_icontract_assume_preconditions(arg_count: int = 1) -> None:
"""Benchmark the Hypothesis testing with icontract and rejection sampling."""
count = 0
if arg_count == 1:
@icontract.require(lambda a: a > 0)
def some_func(a: int) -> None:
nonlocal count
count += 1
pass
assume_preconditions = icontract_hypothesis.make_assume_preconditions(some_func)
@hypothesis.settings(
suppress_health_check=(hypothesis.HealthCheck.filter_too_much,)
)
@hypothesis.given(a=hypothesis.strategies.integers())
def execute(a: int) -> None:
assume_preconditions(a)
some_func(a)
elif arg_count == 2:
@icontract.require(lambda a: a > 0)
@icontract.require(lambda b: b > 0)
def some_func(a: int, b: int) -> None:
nonlocal count
count += 1
pass
assume_preconditions = icontract_hypothesis.make_assume_preconditions(some_func)
@hypothesis.settings(
suppress_health_check=(hypothesis.HealthCheck.filter_too_much,)
)
@hypothesis.given(
a=hypothesis.strategies.integers(), b=hypothesis.strategies.integers()
)
def execute(a: int, b: int) -> None:
assume_preconditions(a=a, b=b)
some_func(a, b)
elif arg_count == 3:
@icontract.require(lambda a: a > 0)
@icontract.require(lambda b: b > 0)
@icontract.require(lambda c: c > 0)
def some_func(a: int, b: int, c: int) -> None:
nonlocal count
count += 1
pass
assume_preconditions = icontract_hypothesis.make_assume_preconditions(some_func)
@hypothesis.settings(
suppress_health_check=(hypothesis.HealthCheck.filter_too_much,)
)
@hypothesis.given(
a=hypothesis.strategies.integers(),
b=hypothesis.strategies.integers(),
c=hypothesis.strategies.integers(),
)
def execute(a: int, b: int, c: int) -> None:
assume_preconditions(a=a, b=b, c=c)
some_func(a, b, c)
else:
raise NotImplementedError("arg_count {}".format(arg_count))
execute()
# Assert the count of function executions for fair tests
assert count == 100
def benchmark_icontract_inferred_strategy(arg_count: int = 1) -> None:
"""Benchmark the Hypothesis testing with icontract and inferred search strategies."""
count = 0
if arg_count == 1:
@icontract.require(lambda a: a > 0)
def some_func(a: int) -> None:
nonlocal count
count += 1
pass
elif arg_count == 2:
@icontract.require(lambda a: a > 0)
@icontract.require(lambda b: b > 0)
def some_func(a: int, b: int) -> None:
nonlocal count
count += 1
pass
elif arg_count == 3:
@icontract.require(lambda a: a > 0)
@icontract.require(lambda b: b > 0)
@icontract.require(lambda c: c > 0)
def some_func(a: int, b: int, c: int) -> None:
nonlocal count
count += 1
pass
else:
raise NotImplementedError("arg_count {}".format(arg_count))
icontract_hypothesis.test_with_inferred_strategy(some_func)
# Assert the count of function executions for fair tests
assert count == 100
def benchmark_dpcontracts(arg_count: int = 1) -> None:
"""Benchmark the Hypothesis testing with dpcontracts."""
count = 0
if arg_count == 1:
@dpcontracts.require("some dummy contract", lambda args: args.a > 0)
def some_func(a: int) -> None:
nonlocal count
count += 1
pass
@hypothesis.settings(
suppress_health_check=(hypothesis.HealthCheck.filter_too_much,)
)
@hypothesis.given(a=hypothesis.strategies.integers())
def execute(a: int) -> None:
hypothesis.extra.dpcontracts.fulfill(some_func)(a)
elif arg_count == 2:
@dpcontracts.require("some dummy contract", lambda args: args.a > 0)
@dpcontracts.require("some dummy contract", lambda args: args.b > 0)
def some_func(a: int, b: int) -> None:
nonlocal count
count += 1
pass
@hypothesis.settings(
suppress_health_check=(hypothesis.HealthCheck.filter_too_much,)
)
@hypothesis.given(
a=hypothesis.strategies.integers(), b=hypothesis.strategies.integers()
)
def execute(a: int, b: int) -> None:
hypothesis.extra.dpcontracts.fulfill(some_func)(a, b)
elif arg_count == 3:
@dpcontracts.require("some dummy contract", lambda args: args.a > 0)
@dpcontracts.require("some dummy contract", lambda args: args.b > 0)
@dpcontracts.require("some dummy contract", lambda args: args.c > 0)
def some_func(a: int, b: int, c: int) -> None:
nonlocal count
count += 1
pass
@hypothesis.settings(
suppress_health_check=(hypothesis.HealthCheck.filter_too_much,)
)
@hypothesis.given(
a=hypothesis.strategies.integers(),
b=hypothesis.strategies.integers(),
c=hypothesis.strategies.integers(),
)
def execute(a: int, b: int, c: int) -> None:
hypothesis.extra.dpcontracts.fulfill(some_func)(a, b, c)
else:
raise NotImplementedError("arg_count {}".format(arg_count))
execute()
# Assert the count of function executions for fair tests
assert count == 100
def benchmark_deal(arg_count: int = 1) -> None:
"""Benchmark the Hypothesis testing with deal."""
count = 0
if arg_count == 1:
@deal.pre(lambda _: _.a > 0)
def some_func(a: int) -> None:
nonlocal count
count += 1
pass
for case in deal.cases(some_func, count=100):
case()
elif arg_count == 2:
@deal.pre(lambda _: _.a > 0)
@deal.pre(lambda _: _.b > 0)
def some_func(a: int, b: int) -> None:
nonlocal count
count += 1
pass
for case in deal.cases(some_func, count=100):
case()
elif arg_count == 3:
@deal.pre(lambda _: _.a > 0)
@deal.pre(lambda _: _.b > 0)
@deal.pre(lambda _: _.c > 0)
def some_func(a: int, b: int, c: int) -> None:
nonlocal count
count += 1
pass
for case in deal.cases(some_func, count=100):
case()
else:
raise NotImplementedError("arg_count {}".format(arg_count))
assert count == 100
def writeln_utf8(text: str = "") -> None:
"""
Write the text to STDOUT using UTF-8 encoding followed by a new-line character.
We can not use ``print()`` as we can not rely on the correct encoding in Windows.
See: https://stackoverflow.com/questions/31469707/changing-the-locale-preferred-encoding-in-python-3-in-windows
"""
sys.stdout.buffer.write(text.encode("utf-8"))
sys.stdout.buffer.write(os.linesep.encode("utf-8"))
def measure_functions() -> None:
# yapf: disable
funcs = [
'benchmark_icontract_inferred_strategy',
'benchmark_icontract_assume_preconditions',
'benchmark_dpcontracts',
'benchmark_deal',
]
# yapf: enable
durations = [0.0] * len(funcs)
number = 10
for arg_count in [1, 2, 3]:
for i, func in enumerate(funcs):
duration = timeit.timeit(
"{}(arg_count={})".format(func, arg_count),
setup="from __main__ import {}".format(func),
number=number,
)
durations[i] = duration
table = [] # type: List[List[str]]
for func, duration in zip(funcs, durations):
# yapf: disable
table.append([
'`{}`'.format(func),
'{:.2f} s'.format(duration),
'{:.2f} ms'.format(duration * 1000 / number),
'{:.0f}%'.format(duration * 100 / durations[0])
])
# yapf: enable
# yapf: disable
table_str = tabulate.tabulate(
table,
headers=['Case', 'Total time', 'Time per run', 'Relative time per run'],
colalign=('left', 'right', 'right', 'right'),
tablefmt='rst')
# yapf: enable
writeln_utf8()
writeln_utf8("Argument count: {}".format(arg_count))
writeln_utf8()
writeln_utf8(table_str)
if __name__ == "__main__":
writeln_utf8("Benchmarking Hypothesis testing:")
writeln_utf8("")
measure_functions()
| [
"os.linesep.encode",
"tabulate.tabulate",
"icontract_hypothesis.make_assume_preconditions",
"icontract_hypothesis.test_with_inferred_strategy",
"hypothesis.strategies.integers",
"dpcontracts.require",
"deal.cases",
"deal.pre",
"hypothesis.settings",
"icontract.require",
"hypothesis.extra.dpcontr... | [((3648, 3707), 'icontract_hypothesis.test_with_inferred_strategy', 'icontract_hypothesis.test_with_inferred_strategy', (['some_func'], {}), '(some_func)\n', (3696, 3707), False, 'import icontract_hypothesis\n'), ((541, 575), 'icontract.require', 'icontract.require', (['(lambda a: a > 0)'], {}), '(lambda a: a > 0)\n', (558, 575), False, 'import icontract\n'), ((714, 771), 'icontract_hypothesis.make_assume_preconditions', 'icontract_hypothesis.make_assume_preconditions', (['some_func'], {}), '(some_func)\n', (760, 771), False, 'import icontract_hypothesis\n'), ((782, 871), 'hypothesis.settings', 'hypothesis.settings', ([], {'suppress_health_check': '(hypothesis.HealthCheck.filter_too_much,)'}), '(suppress_health_check=(hypothesis.HealthCheck.\n filter_too_much,))\n', (801, 871), False, 'import hypothesis\n'), ((2913, 2947), 'icontract.require', 'icontract.require', (['(lambda a: a > 0)'], {}), '(lambda a: a > 0)\n', (2930, 2947), False, 'import icontract\n'), ((3960, 4027), 'dpcontracts.require', 'dpcontracts.require', (['"""some dummy contract"""', '(lambda args: args.a > 0)'], {}), "('some dummy contract', lambda args: args.a > 0)\n", (3979, 4027), False, 'import dpcontracts\n'), ((4144, 4233), 'hypothesis.settings', 'hypothesis.settings', ([], {'suppress_health_check': '(hypothesis.HealthCheck.filter_too_much,)'}), '(suppress_health_check=(hypothesis.HealthCheck.\n filter_too_much,))\n', (4163, 4233), False, 'import hypothesis\n'), ((6188, 6215), 'deal.pre', 'deal.pre', (['(lambda _: _.a > 0)'], {}), '(lambda _: _.a > 0)\n', (6196, 6215), False, 'import deal\n'), ((6343, 6375), 'deal.cases', 'deal.cases', (['some_func'], {'count': '(100)'}), '(some_func, count=100)\n', (6353, 6375), False, 'import deal\n'), ((7547, 7573), 'os.linesep.encode', 'os.linesep.encode', (['"""utf-8"""'], {}), "('utf-8')\n", (7564, 7573), False, 'import os\n'), ((8646, 8813), 'tabulate.tabulate', 'tabulate.tabulate', (['table'], {'headers': "['Case', 'Total time', 'Time per run', 'Relative time per run']", 'colalign': "('left', 'right', 'right', 'right')", 'tablefmt': '"""rst"""'}), "(table, headers=['Case', 'Total time', 'Time per run',\n 'Relative time per run'], colalign=('left', 'right', 'right', 'right'),\n tablefmt='rst')\n", (8663, 8813), False, 'import tabulate\n'), ((1085, 1119), 'icontract.require', 'icontract.require', (['(lambda a: a > 0)'], {}), '(lambda a: a > 0)\n', (1102, 1119), False, 'import icontract\n'), ((1129, 1163), 'icontract.require', 'icontract.require', (['(lambda b: b > 0)'], {}), '(lambda b: b > 0)\n', (1146, 1163), False, 'import icontract\n'), ((1310, 1367), 'icontract_hypothesis.make_assume_preconditions', 'icontract_hypothesis.make_assume_preconditions', (['some_func'], {}), '(some_func)\n', (1356, 1367), False, 'import icontract_hypothesis\n'), ((1378, 1467), 'hypothesis.settings', 'hypothesis.settings', ([], {'suppress_health_check': '(hypothesis.HealthCheck.filter_too_much,)'}), '(suppress_health_check=(hypothesis.HealthCheck.\n filter_too_much,))\n', (1397, 1467), False, 'import hypothesis\n'), ((3090, 3124), 'icontract.require', 'icontract.require', (['(lambda a: a > 0)'], {}), '(lambda a: a > 0)\n', (3107, 3124), False, 'import icontract\n'), ((3134, 3168), 'icontract.require', 'icontract.require', (['(lambda b: b > 0)'], {}), '(lambda b: b > 0)\n', (3151, 3168), False, 'import icontract\n'), ((4449, 4516), 'dpcontracts.require', 'dpcontracts.require', (['"""some dummy contract"""', '(lambda args: args.a > 0)'], {}), "('some dummy contract', lambda args: args.a > 0)\n", (4468, 4516), False, 'import dpcontracts\n'), ((4526, 4593), 'dpcontracts.require', 'dpcontracts.require', (['"""some dummy contract"""', '(lambda args: args.b > 0)'], {}), "('some dummy contract', lambda args: args.b > 0)\n", (4545, 4593), False, 'import dpcontracts\n'), ((4718, 4807), 'hypothesis.settings', 'hypothesis.settings', ([], {'suppress_health_check': '(hypothesis.HealthCheck.filter_too_much,)'}), '(suppress_health_check=(hypothesis.HealthCheck.\n filter_too_much,))\n', (4737, 4807), False, 'import hypothesis\n'), ((6432, 6459), 'deal.pre', 'deal.pre', (['(lambda _: _.a > 0)'], {}), '(lambda _: _.a > 0)\n', (6440, 6459), False, 'import deal\n'), ((6469, 6496), 'deal.pre', 'deal.pre', (['(lambda _: _.b > 0)'], {}), '(lambda _: _.b > 0)\n', (6477, 6496), False, 'import deal\n'), ((6632, 6664), 'deal.cases', 'deal.cases', (['some_func'], {'count': '(100)'}), '(some_func, count=100)\n', (6642, 6664), False, 'import deal\n'), ((917, 949), 'hypothesis.strategies.integers', 'hypothesis.strategies.integers', ([], {}), '()\n', (947, 949), False, 'import hypothesis\n'), ((1757, 1791), 'icontract.require', 'icontract.require', (['(lambda a: a > 0)'], {}), '(lambda a: a > 0)\n', (1774, 1791), False, 'import icontract\n'), ((1801, 1835), 'icontract.require', 'icontract.require', (['(lambda b: b > 0)'], {}), '(lambda b: b > 0)\n', (1818, 1835), False, 'import icontract\n'), ((1845, 1879), 'icontract.require', 'icontract.require', (['(lambda c: c > 0)'], {}), '(lambda c: c > 0)\n', (1862, 1879), False, 'import icontract\n'), ((2034, 2091), 'icontract_hypothesis.make_assume_preconditions', 'icontract_hypothesis.make_assume_preconditions', (['some_func'], {}), '(some_func)\n', (2080, 2091), False, 'import icontract_hypothesis\n'), ((2102, 2191), 'hypothesis.settings', 'hypothesis.settings', ([], {'suppress_health_check': '(hypothesis.HealthCheck.filter_too_much,)'}), '(suppress_health_check=(hypothesis.HealthCheck.\n filter_too_much,))\n', (2121, 2191), False, 'import hypothesis\n'), ((3319, 3353), 'icontract.require', 'icontract.require', (['(lambda a: a > 0)'], {}), '(lambda a: a > 0)\n', (3336, 3353), False, 'import icontract\n'), ((3363, 3397), 'icontract.require', 'icontract.require', (['(lambda b: b > 0)'], {}), '(lambda b: b > 0)\n', (3380, 3397), False, 'import icontract\n'), ((3407, 3441), 'icontract.require', 'icontract.require', (['(lambda c: c > 0)'], {}), '(lambda c: c > 0)\n', (3424, 3441), False, 'import icontract\n'), ((4362, 4409), 'hypothesis.extra.dpcontracts.fulfill', 'hypothesis.extra.dpcontracts.fulfill', (['some_func'], {}), '(some_func)\n', (4398, 4409), False, 'import hypothesis\n'), ((4279, 4311), 'hypothesis.strategies.integers', 'hypothesis.strategies.integers', ([], {}), '()\n', (4309, 4311), False, 'import hypothesis\n'), ((5092, 5159), 'dpcontracts.require', 'dpcontracts.require', (['"""some dummy contract"""', '(lambda args: args.a > 0)'], {}), "('some dummy contract', lambda args: args.a > 0)\n", (5111, 5159), False, 'import dpcontracts\n'), ((5169, 5236), 'dpcontracts.require', 'dpcontracts.require', (['"""some dummy contract"""', '(lambda args: args.b > 0)'], {}), "('some dummy contract', lambda args: args.b > 0)\n", (5188, 5236), False, 'import dpcontracts\n'), ((5246, 5313), 'dpcontracts.require', 'dpcontracts.require', (['"""some dummy contract"""', '(lambda args: args.c > 0)'], {}), "('some dummy contract', lambda args: args.c > 0)\n", (5265, 5313), False, 'import dpcontracts\n'), ((5446, 5535), 'hypothesis.settings', 'hypothesis.settings', ([], {'suppress_health_check': '(hypothesis.HealthCheck.filter_too_much,)'}), '(suppress_health_check=(hypothesis.HealthCheck.\n filter_too_much,))\n', (5465, 5535), False, 'import hypothesis\n'), ((6721, 6748), 'deal.pre', 'deal.pre', (['(lambda _: _.a > 0)'], {}), '(lambda _: _.a > 0)\n', (6729, 6748), False, 'import deal\n'), ((6758, 6785), 'deal.pre', 'deal.pre', (['(lambda _: _.b > 0)'], {}), '(lambda _: _.b > 0)\n', (6766, 6785), False, 'import deal\n'), ((6795, 6822), 'deal.pre', 'deal.pre', (['(lambda _: _.c > 0)'], {}), '(lambda _: _.c > 0)\n', (6803, 6822), False, 'import deal\n'), ((6966, 6998), 'deal.cases', 'deal.cases', (['some_func'], {'count': '(100)'}), '(some_func, count=100)\n', (6976, 6998), False, 'import deal\n'), ((1526, 1558), 'hypothesis.strategies.integers', 'hypothesis.strategies.integers', ([], {}), '()\n', (1556, 1558), False, 'import hypothesis\n'), ((1562, 1594), 'hypothesis.strategies.integers', 'hypothesis.strategies.integers', ([], {}), '()\n', (1592, 1594), False, 'import hypothesis\n'), ((5002, 5049), 'hypothesis.extra.dpcontracts.fulfill', 'hypothesis.extra.dpcontracts.fulfill', (['some_func'], {}), '(some_func)\n', (5038, 5049), False, 'import hypothesis\n'), ((4866, 4898), 'hypothesis.strategies.integers', 'hypothesis.strategies.integers', ([], {}), '()\n', (4896, 4898), False, 'import hypothesis\n'), ((4902, 4934), 'hypothesis.strategies.integers', 'hypothesis.strategies.integers', ([], {}), '()\n', (4932, 4934), False, 'import hypothesis\n'), ((2250, 2282), 'hypothesis.strategies.integers', 'hypothesis.strategies.integers', ([], {}), '()\n', (2280, 2282), False, 'import hypothesis\n'), ((2298, 2330), 'hypothesis.strategies.integers', 'hypothesis.strategies.integers', ([], {}), '()\n', (2328, 2330), False, 'import hypothesis\n'), ((2346, 2378), 'hypothesis.strategies.integers', 'hypothesis.strategies.integers', ([], {}), '()\n', (2376, 2378), False, 'import hypothesis\n'), ((5799, 5846), 'hypothesis.extra.dpcontracts.fulfill', 'hypothesis.extra.dpcontracts.fulfill', (['some_func'], {}), '(some_func)\n', (5835, 5846), False, 'import hypothesis\n'), ((5594, 5626), 'hypothesis.strategies.integers', 'hypothesis.strategies.integers', ([], {}), '()\n', (5624, 5626), False, 'import hypothesis\n'), ((5642, 5674), 'hypothesis.strategies.integers', 'hypothesis.strategies.integers', ([], {}), '()\n', (5672, 5674), False, 'import hypothesis\n'), ((5690, 5722), 'hypothesis.strategies.integers', 'hypothesis.strategies.integers', ([], {}), '()\n', (5720, 5722), False, 'import hypothesis\n')] |
from pathlib import Path
import matplotlib.pyplot as plt
import pandas as pd
results_dir = Path('results')
results_dir.mkdir(exist_ok=True)
# Performance plot
for scale in [3, 4]:
for test_set in ['Set5', 'Set14']:
time = []
psnr = []
model = []
for save_dir in sorted(Path('.').glob(f'*-sc{scale}')):
if 'bicubic' not in save_dir.stem:
model += [save_dir.stem.rsplit('-', 1)[0].upper()]
metrics_file = save_dir / f'test/{test_set}/metrics.csv'
metrics = pd.read_csv(str(metrics_file), index_col='name')
time += [metrics.time.average]
psnr += [metrics.psnr.average]
plt.figure()
plt.semilogx(time, psnr, '.')
plt.grid(True, which='both')
for x, y, s in zip(time, psnr, model):
if 'NS' in s:
s = s.split('-')[1]
plt.text(x, y, s)
plt.xlabel('Run time (sec)')
plt.ylabel('PSNR (dB)')
plt.title(f'Scale {scale} on {test_set}')
plt.savefig(str(results_dir / f'performance-sc{scale}-{test_set}.png'))
plt.close()
# History plot
for scale in [3, 4]:
plt.figure()
for save_dir in sorted(Path('.').glob(f'*-sc{scale}')):
if 'bicubic' not in save_dir.stem:
model = save_dir.stem.rsplit('-', 1)[0].upper()
history_file = save_dir / f'train/history.csv'
history = pd.read_csv(str(history_file))
plt.plot(history.epoch, history.val_psnr, label=model, alpha=0.8)
plt.legend()
plt.xlabel('Epochs')
plt.ylabel('Average test PSNR (dB)')
plt.savefig(str(results_dir / f'history-sc{scale}.png'))
plt.xlim(0, 500)
if scale == 3:
plt.ylim(31.5, 34.5)
if scale == 4:
plt.ylim(29, 32)
plt.savefig(str(results_dir / f'history-sc{scale}-zoom.png'))
plt.close()
| [
"matplotlib.pyplot.text",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.ylabel",
"pathlib.Path",
"matplotlib.pyplot.semilogx",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.close",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.title",
"matplotlib.pyplot.xlim",
"matpl... | [((94, 109), 'pathlib.Path', 'Path', (['"""results"""'], {}), "('results')\n", (98, 109), False, 'from pathlib import Path\n'), ((1193, 1205), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1203, 1205), True, 'import matplotlib.pyplot as plt\n'), ((1563, 1575), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1573, 1575), True, 'import matplotlib.pyplot as plt\n'), ((1580, 1600), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epochs"""'], {}), "('Epochs')\n", (1590, 1600), True, 'import matplotlib.pyplot as plt\n'), ((1605, 1641), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Average test PSNR (dB)"""'], {}), "('Average test PSNR (dB)')\n", (1615, 1641), True, 'import matplotlib.pyplot as plt\n'), ((1707, 1723), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(500)'], {}), '(0, 500)\n', (1715, 1723), True, 'import matplotlib.pyplot as plt\n'), ((1886, 1897), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (1895, 1897), True, 'import matplotlib.pyplot as plt\n'), ((706, 718), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (716, 718), True, 'import matplotlib.pyplot as plt\n'), ((727, 756), 'matplotlib.pyplot.semilogx', 'plt.semilogx', (['time', 'psnr', '"""."""'], {}), "(time, psnr, '.')\n", (739, 756), True, 'import matplotlib.pyplot as plt\n'), ((765, 793), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {'which': '"""both"""'}), "(True, which='both')\n", (773, 793), True, 'import matplotlib.pyplot as plt\n'), ((941, 969), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Run time (sec)"""'], {}), "('Run time (sec)')\n", (951, 969), True, 'import matplotlib.pyplot as plt\n'), ((978, 1001), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""PSNR (dB)"""'], {}), "('PSNR (dB)')\n", (988, 1001), True, 'import matplotlib.pyplot as plt\n'), ((1010, 1051), 'matplotlib.pyplot.title', 'plt.title', (['f"""Scale {scale} on {test_set}"""'], {}), "(f'Scale {scale} on {test_set}')\n", (1019, 1051), True, 'import matplotlib.pyplot as plt\n'), ((1140, 1151), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (1149, 1151), True, 'import matplotlib.pyplot as plt\n'), ((1751, 1771), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(31.5)', '(34.5)'], {}), '(31.5, 34.5)\n', (1759, 1771), True, 'import matplotlib.pyplot as plt\n'), ((1799, 1815), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(29)', '(32)'], {}), '(29, 32)\n', (1807, 1815), True, 'import matplotlib.pyplot as plt\n'), ((915, 932), 'matplotlib.pyplot.text', 'plt.text', (['x', 'y', 's'], {}), '(x, y, s)\n', (923, 932), True, 'import matplotlib.pyplot as plt\n'), ((1493, 1558), 'matplotlib.pyplot.plot', 'plt.plot', (['history.epoch', 'history.val_psnr'], {'label': 'model', 'alpha': '(0.8)'}), '(history.epoch, history.val_psnr, label=model, alpha=0.8)\n', (1501, 1558), True, 'import matplotlib.pyplot as plt\n'), ((1233, 1242), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (1237, 1242), False, 'from pathlib import Path\n'), ((309, 318), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (313, 318), False, 'from pathlib import Path\n')] |
# Matplotlib
# 파이썬 데이터과학 관련 시각화 페키지
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
#%matplotlib inline # 주피터 노트북에서 show() 호출없이도
# 그래프를 그릴수 있게 해 줌
# data = np.arange(10)
# plt.plot(data)
# plt.show()
# 산점도 - 100의 표준정규분포 난수 생성
list = []
for i in range(100): # 0 ~ 99
x = np.random.normal(0,1) # 표준정규분포 난수
y = x + 0.1 + 0.2 + np.random.normal(0,1)
list.append([x, y])
print(list)
x_data = [ v[0] for v in list ] # v= [x, ]
y_data = [ v[1] for v in list ] # v= [, y]
plt.plot(x_data, y_data, 'ro')
plt.show()
# 성적데이터 읽어오기
df = pd.read_excel('c:/Java/sungjuk.xlsx')
#총점, 평균 계산후 df 에 추가
subj = ['국어', '영어', '수학', '과학']
df['총점'] = df[subj].sum(axis=1)
df['평균'] = df['총점'] / len(subj)
df.sort_values(['평균'], ascending=[False]) # 평균으로 정렬
import matplotlib as mpl
mpl.rc('font', family='Malgun Gothic') # 그래프 한글 설정
sj = df.sort_values(['평균'], ascending=[False])
sj.index = sj['이름']
sj['평균'].plot(kind='bar', figsize=(8,4))
# 성적 비교 - 어느 반이 잘했나?
ban1 = df[df['반'] == 1]
ban2 = df[df['반'] == 2]
ban1_mean = ban1['총점'].sum() / (6 * 4)
ban2_mean = ban2['총점'].sum() / (6 * 4)
print(ban1_mean, ban2_mean) # 79.042 vs 77.125
# 두집단 간의 평균운 유의미하게 차이 나는것인가? (t검증)
# p-value 값이 0.005 이하일때 - 차이가 난다고 할 수 있음
import scipy.stats as stats
result = stats.ttest_ind(ban1['평균'], ban2['평균'])
print(result) # pvalue=0.755583336185639
# 그럼, 과목별 평균은 차이가 나는가? (t검증)
for sub in subj:
print(sub, stats.ttest_ind(ban1[sub], ban2[sub]))
# 국어 pvalue=0.031982494983816424
# 영어 pvalue=0.5518533781528807
# 수학 pvalue=0.1654958420079056
# 과학 pvalue=0.0014931977711732465
# 전체 성적데이터에 대한 그래프 출력
sj[subj].plot(kind='bar', figsize=(10,6))
# 과목별 점수 분포 - 박스수염 그래프 작성
df[subj].boxplot(return_type='axes')
# 일반, 이반 과목별 점수 분포
ban1[subj].boxplot(return_type='axes') # 일반
ban2[subj].boxplot(return_type='axes') # 이반
# 과목별 상관관계 - '수학:과학' 와 '국어:영어'
df.plot(kind='scatter', x='수학', y='과학')
print( stats.pearsonr( df['수학'], df['과학'] ) ) # 피어슨 상관계수
# 0.5632890597067751(상관계수), 0.05650580486155532(p검증값)
# 과목별 상관관계 - '수학:과학' 와 '국어:영어'
df.plot(kind='scatter', x='국어', y='영어')
print( stats.pearsonr( df['국어'], df['영어'] ) ) # 피어슨 상관계수
# 0.10566562777973997(상관계수), 0.7437959551857836(p검증값)
| [
"numpy.random.normal",
"scipy.stats.pearsonr",
"matplotlib.pyplot.plot",
"scipy.stats.ttest_ind",
"matplotlib.rc",
"pandas.read_excel",
"matplotlib.pyplot.show"
] | [((550, 580), 'matplotlib.pyplot.plot', 'plt.plot', (['x_data', 'y_data', '"""ro"""'], {}), "(x_data, y_data, 'ro')\n", (558, 580), True, 'import matplotlib.pyplot as plt\n'), ((582, 592), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (590, 592), True, 'import matplotlib.pyplot as plt\n'), ((617, 654), 'pandas.read_excel', 'pd.read_excel', (['"""c:/Java/sungjuk.xlsx"""'], {}), "('c:/Java/sungjuk.xlsx')\n", (630, 654), True, 'import pandas as pd\n'), ((861, 899), 'matplotlib.rc', 'mpl.rc', (['"""font"""'], {'family': '"""Malgun Gothic"""'}), "('font', family='Malgun Gothic')\n", (867, 899), True, 'import matplotlib as mpl\n'), ((1348, 1387), 'scipy.stats.ttest_ind', 'stats.ttest_ind', (["ban1['평균']", "ban2['평균']"], {}), "(ban1['평균'], ban2['평균'])\n", (1363, 1387), True, 'import scipy.stats as stats\n'), ((334, 356), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)'], {}), '(0, 1)\n', (350, 356), True, 'import numpy as np\n'), ((2004, 2038), 'scipy.stats.pearsonr', 'stats.pearsonr', (["df['수학']", "df['과학']"], {}), "(df['수학'], df['과학'])\n", (2018, 2038), True, 'import scipy.stats as stats\n'), ((2192, 2226), 'scipy.stats.pearsonr', 'stats.pearsonr', (["df['국어']", "df['영어']"], {}), "(df['국어'], df['영어'])\n", (2206, 2226), True, 'import scipy.stats as stats\n'), ((393, 415), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)'], {}), '(0, 1)\n', (409, 415), True, 'import numpy as np\n'), ((1498, 1535), 'scipy.stats.ttest_ind', 'stats.ttest_ind', (['ban1[sub]', 'ban2[sub]'], {}), '(ban1[sub], ban2[sub])\n', (1513, 1535), True, 'import scipy.stats as stats\n')] |
from django.contrib.auth.models import User
from rollservice.models import DiceSequence
import rest_framework.test as rf_test
import rest_framework.status as status
import rest_framework.reverse as reverse
import hypothesis.extra.django
import hypothesis.strategies as strategies
import unittest
class DiceSeqStrategies:
dice_rolls = strategies.lists(
elements=strategies.sampled_from([4, 6, 8, 10, 12, 20, 100]),
min_size=1
)
user = strategies.just(dict(
username='dungeon_master',
email='<EMAIL>',
password='<PASSWORD>'
))
@strategies.composite
def seq_name(draw):
seq_number = draw(strategies.integers(min_value=1))
return f'Roll {seq_number}'
@strategies.composite
def dice_sequence(draw, seq_name=seq_name(), dice_rolls=dice_rolls):
seq_name = draw(seq_name)
dice_sequence = draw(dice_rolls)
return dict(
seq_name=seq_name,
dice_sequence=dice_sequence
)
dice_sequence_list = strategies.lists(elements=dice_sequence(), min_size=1)
@strategies.composite
def existing_uuid(draw, queryset):
max_value = len(queryset) - 1
index = draw(strategies.integers(min_value=0, max_value=max_value))
return queryset[index].uuid
non_existing_uuid = strategies.uuids()
invalid_uuid = strategies.text(max_size=100)
@strategies.composite
def existing_uuid_url(draw, queryset):
max_value = len(queryset) - 1
index = draw(strategies.integers(min_value=0, max_value=max_value))
uuid = queryset[index].uuid
url = reverse.reverse('dice-seq-by-uuid', args=[uuid])
return url
@strategies.composite
def non_existing_uuid_url(draw, queryset, non_existing_uuid=non_existing_uuid):
uuid = draw(non_existing_uuid)
url = reverse.reverse('dice-seq-by-uuid', args=[uuid])
return url
@strategies.composite
def invalid_uuid_url(draw, invalid_uuid=invalid_uuid):
uuid = draw(invalid_uuid)
url_root = reverse.reverse('dice-seq')
url = url_root + '/by_uuid/' + uuid + '/'
return url
class DiceSequenceByUUIDTests(hypothesis.extra.django.TestCase):
@classmethod
def setUpTestData(cls):
sequences = DiceSeqStrategies.dice_sequence_list.example()
new_user = DiceSeqStrategies.user.example()
owner = User.objects.create(**new_user)
for sequence in sequences:
dice_sequence = DiceSequence.objects.create(seq_name=sequence['seq_name'], owner=owner)
dice_sequence.sequence.set(sequence['dice_sequence'])
queryset = DiceSequence.objects.all()
client_class = rf_test.APIClient
@hypothesis.given(DiceSeqStrategies.existing_uuid_url(queryset=queryset))
def test_dice_seq_by_uuid_GET_with_existing_uuid_should_return_OK(self, url):
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@hypothesis.given(DiceSeqStrategies.non_existing_uuid_url(queryset=queryset))
def test_dice_seq_by_uuid_GET_with_non_existing_uuid_should_return_NOT_FOUND(self, url):
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
@hypothesis.given(DiceSeqStrategies.invalid_uuid_url())
def test_dice_seq_by_uuid_GET_with_invalid_uuid_should_return_BAD_REQUEST(self, url):
response = self.client.get(url)
self.assertIn(response.status_code, [status.HTTP_404_NOT_FOUND, status.HTTP_400_BAD_REQUEST])
@hypothesis.given(strategies.one_of([
DiceSeqStrategies.existing_uuid_url(queryset=queryset),
DiceSeqStrategies.non_existing_uuid_url(queryset=queryset),
DiceSeqStrategies.invalid_uuid_url(),
]))
def test_dice_seq_by_uuid_GET_idempotent(self, url):
response1 = self.client.get(url)
response2 = self.client.get(url)
self.assertEqual(response1.status_code, response2.status_code)
| [
"hypothesis.strategies.text",
"hypothesis.strategies.sampled_from",
"hypothesis.strategies.integers",
"rollservice.models.DiceSequence.objects.create",
"rollservice.models.DiceSequence.objects.all",
"hypothesis.strategies.uuids",
"django.contrib.auth.models.User.objects.create",
"rest_framework.revers... | [((1358, 1376), 'hypothesis.strategies.uuids', 'strategies.uuids', ([], {}), '()\n', (1374, 1376), True, 'import hypothesis.strategies as strategies\n'), ((1396, 1425), 'hypothesis.strategies.text', 'strategies.text', ([], {'max_size': '(100)'}), '(max_size=100)\n', (1411, 1425), True, 'import hypothesis.strategies as strategies\n'), ((2724, 2750), 'rollservice.models.DiceSequence.objects.all', 'DiceSequence.objects.all', ([], {}), '()\n', (2748, 2750), False, 'from rollservice.models import DiceSequence\n'), ((1660, 1708), 'rest_framework.reverse.reverse', 'reverse.reverse', (['"""dice-seq-by-uuid"""'], {'args': '[uuid]'}), "('dice-seq-by-uuid', args=[uuid])\n", (1675, 1708), True, 'import rest_framework.reverse as reverse\n'), ((1901, 1949), 'rest_framework.reverse.reverse', 'reverse.reverse', (['"""dice-seq-by-uuid"""'], {'args': '[uuid]'}), "('dice-seq-by-uuid', args=[uuid])\n", (1916, 1949), True, 'import rest_framework.reverse as reverse\n'), ((2117, 2144), 'rest_framework.reverse.reverse', 'reverse.reverse', (['"""dice-seq"""'], {}), "('dice-seq')\n", (2132, 2144), True, 'import rest_framework.reverse as reverse\n'), ((2462, 2493), 'django.contrib.auth.models.User.objects.create', 'User.objects.create', ([], {}), '(**new_user)\n', (2481, 2493), False, 'from django.contrib.auth.models import User\n'), ((384, 435), 'hypothesis.strategies.sampled_from', 'strategies.sampled_from', (['[4, 6, 8, 10, 12, 20, 100]'], {}), '([4, 6, 8, 10, 12, 20, 100])\n', (407, 435), True, 'import hypothesis.strategies as strategies\n'), ((671, 703), 'hypothesis.strategies.integers', 'strategies.integers', ([], {'min_value': '(1)'}), '(min_value=1)\n', (690, 703), True, 'import hypothesis.strategies as strategies\n'), ((1225, 1278), 'hypothesis.strategies.integers', 'strategies.integers', ([], {'min_value': '(0)', 'max_value': 'max_value'}), '(min_value=0, max_value=max_value)\n', (1244, 1278), True, 'import hypothesis.strategies as strategies\n'), ((1555, 1608), 'hypothesis.strategies.integers', 'strategies.integers', ([], {'min_value': '(0)', 'max_value': 'max_value'}), '(min_value=0, max_value=max_value)\n', (1574, 1608), True, 'import hypothesis.strategies as strategies\n'), ((2557, 2628), 'rollservice.models.DiceSequence.objects.create', 'DiceSequence.objects.create', ([], {'seq_name': "sequence['seq_name']", 'owner': 'owner'}), "(seq_name=sequence['seq_name'], owner=owner)\n", (2584, 2628), False, 'from rollservice.models import DiceSequence\n')] |
# -*- coding: utf-8 -*-
import unittest
import numpy as np
import torch
from comet.metrics import RegressionReport, WMTKendall
class TestMetrics(unittest.TestCase):
def test_regression_report(self):
report = RegressionReport()
a = np.array([0, 0, 0, 1, 1, 1, 1])
b = np.arange(7)
expected = {
"pearson": torch.tensor(0.8660254, dtype=torch.float32),
"kendall": torch.tensor(0.7559289, dtype=torch.float32),
"spearman": torch.tensor(0.866025, dtype=torch.float32),
}
result = report.compute(a, b)
self.assertDictEqual(
{k: round(v.item(), 4) for k, v in result.items()},
{k: round(v.item(), 4) for k, v in expected.items()},
)
def test_wmt_kendall(self):
metric = WMTKendall()
pos = torch.tensor([0, 0.5, 1])
neg = torch.tensor([1, 0.5, 0])
expected = (1 - 2) / (1 + 2)
self.assertEqual(metric.compute(pos, neg), expected)
| [
"comet.metrics.RegressionReport",
"numpy.array",
"torch.tensor",
"comet.metrics.WMTKendall",
"numpy.arange"
] | [((224, 242), 'comet.metrics.RegressionReport', 'RegressionReport', ([], {}), '()\n', (240, 242), False, 'from comet.metrics import RegressionReport, WMTKendall\n'), ((255, 286), 'numpy.array', 'np.array', (['[0, 0, 0, 1, 1, 1, 1]'], {}), '([0, 0, 0, 1, 1, 1, 1])\n', (263, 286), True, 'import numpy as np\n'), ((299, 311), 'numpy.arange', 'np.arange', (['(7)'], {}), '(7)\n', (308, 311), True, 'import numpy as np\n'), ((808, 820), 'comet.metrics.WMTKendall', 'WMTKendall', ([], {}), '()\n', (818, 820), False, 'from comet.metrics import RegressionReport, WMTKendall\n'), ((836, 861), 'torch.tensor', 'torch.tensor', (['[0, 0.5, 1]'], {}), '([0, 0.5, 1])\n', (848, 861), False, 'import torch\n'), ((876, 901), 'torch.tensor', 'torch.tensor', (['[1, 0.5, 0]'], {}), '([1, 0.5, 0])\n', (888, 901), False, 'import torch\n'), ((356, 400), 'torch.tensor', 'torch.tensor', (['(0.8660254)'], {'dtype': 'torch.float32'}), '(0.8660254, dtype=torch.float32)\n', (368, 400), False, 'import torch\n'), ((425, 469), 'torch.tensor', 'torch.tensor', (['(0.7559289)'], {'dtype': 'torch.float32'}), '(0.7559289, dtype=torch.float32)\n', (437, 469), False, 'import torch\n'), ((495, 538), 'torch.tensor', 'torch.tensor', (['(0.866025)'], {'dtype': 'torch.float32'}), '(0.866025, dtype=torch.float32)\n', (507, 538), False, 'import torch\n')] |
import tensorflow as tf
from nalp.corpus import TextCorpus
from nalp.datasets import LanguageModelingDataset
from nalp.encoders import IntegerEncoder
from nalp.models import RelGAN
# Creating a character TextCorpus from file
corpus = TextCorpus(from_file='data/text/chapter1_harry.txt', corpus_type='char')
# Creating an IntegerEncoder, learning encoding and encoding tokens
encoder = IntegerEncoder()
encoder.learn(corpus.vocab_index, corpus.index_vocab)
encoded_tokens = encoder.encode(corpus.tokens)
# Creating Language Modeling Dataset
dataset = LanguageModelingDataset(encoded_tokens, max_contiguous_pad_length=10, batch_size=64)
# Creating the RelGAN
relgan = RelGAN(encoder=encoder, vocab_size=corpus.vocab_size, max_length=10,
embedding_size=256, n_slots=5, n_heads=5, head_size=25, n_blocks=1, n_layers=3,
n_filters=(64, 128, 256), filters_size=(3, 5, 5), dropout_rate=0.25, tau=5)
# Compiling the GSGAN
relgan.compile(pre_optimizer=tf.optimizers.Adam(learning_rate=0.01),
d_optimizer=tf.optimizers.Adam(learning_rate=0.0001),
g_optimizer=tf.optimizers.Adam(learning_rate=0.0001))
# Pre-fitting the RelGAN
relgan.pre_fit(dataset.batches, epochs=200)
# Fitting the RelGAN
relgan.fit(dataset.batches, epochs=50)
# Saving RelGAN weights
relgan.save_weights('trained/relgan', save_format='tf')
| [
"nalp.encoders.IntegerEncoder",
"nalp.datasets.LanguageModelingDataset",
"nalp.models.RelGAN",
"tensorflow.optimizers.Adam",
"nalp.corpus.TextCorpus"
] | [((236, 308), 'nalp.corpus.TextCorpus', 'TextCorpus', ([], {'from_file': '"""data/text/chapter1_harry.txt"""', 'corpus_type': '"""char"""'}), "(from_file='data/text/chapter1_harry.txt', corpus_type='char')\n", (246, 308), False, 'from nalp.corpus import TextCorpus\n'), ((388, 404), 'nalp.encoders.IntegerEncoder', 'IntegerEncoder', ([], {}), '()\n', (402, 404), False, 'from nalp.encoders import IntegerEncoder\n'), ((554, 642), 'nalp.datasets.LanguageModelingDataset', 'LanguageModelingDataset', (['encoded_tokens'], {'max_contiguous_pad_length': '(10)', 'batch_size': '(64)'}), '(encoded_tokens, max_contiguous_pad_length=10,\n batch_size=64)\n', (577, 642), False, 'from nalp.datasets import LanguageModelingDataset\n'), ((671, 907), 'nalp.models.RelGAN', 'RelGAN', ([], {'encoder': 'encoder', 'vocab_size': 'corpus.vocab_size', 'max_length': '(10)', 'embedding_size': '(256)', 'n_slots': '(5)', 'n_heads': '(5)', 'head_size': '(25)', 'n_blocks': '(1)', 'n_layers': '(3)', 'n_filters': '(64, 128, 256)', 'filters_size': '(3, 5, 5)', 'dropout_rate': '(0.25)', 'tau': '(5)'}), '(encoder=encoder, vocab_size=corpus.vocab_size, max_length=10,\n embedding_size=256, n_slots=5, n_heads=5, head_size=25, n_blocks=1,\n n_layers=3, n_filters=(64, 128, 256), filters_size=(3, 5, 5),\n dropout_rate=0.25, tau=5)\n', (677, 907), False, 'from nalp.models import RelGAN\n'), ((980, 1018), 'tensorflow.optimizers.Adam', 'tf.optimizers.Adam', ([], {'learning_rate': '(0.01)'}), '(learning_rate=0.01)\n', (998, 1018), True, 'import tensorflow as tf\n'), ((1047, 1087), 'tensorflow.optimizers.Adam', 'tf.optimizers.Adam', ([], {'learning_rate': '(0.0001)'}), '(learning_rate=0.0001)\n', (1065, 1087), True, 'import tensorflow as tf\n'), ((1116, 1156), 'tensorflow.optimizers.Adam', 'tf.optimizers.Adam', ([], {'learning_rate': '(0.0001)'}), '(learning_rate=0.0001)\n', (1134, 1156), True, 'import tensorflow as tf\n')] |
from setuptools import setup
setup(name='filecompare',
version='0.1',
description='A package for comparing text and JSON files.',
url='https://github.com/thomasms/filecompare',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=[
'filecompare',
'filecompare.compare',
'filecompare.tools',
'filecompare.utils'
],
install_requires=[],
python_requires='>=3',
scripts=['filecompare/tools/docompare.py'],
setup_requires=['pytest-runner'],
test_suite='tests.testsuite',
tests_require=['pytest'],
zip_safe=False)
| [
"setuptools.setup"
] | [((31, 550), 'setuptools.setup', 'setup', ([], {'name': '"""filecompare"""', 'version': '"""0.1"""', 'description': '"""A package for comparing text and JSON files."""', 'url': '"""https://github.com/thomasms/filecompare"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""MIT"""', 'packages': "['filecompare', 'filecompare.compare', 'filecompare.tools', 'filecompare.utils'\n ]", 'install_requires': '[]', 'python_requires': '""">=3"""', 'scripts': "['filecompare/tools/docompare.py']", 'setup_requires': "['pytest-runner']", 'test_suite': '"""tests.testsuite"""', 'tests_require': "['pytest']", 'zip_safe': '(False)'}), "(name='filecompare', version='0.1', description=\n 'A package for comparing text and JSON files.', url=\n 'https://github.com/thomasms/filecompare', author='<NAME>',\n author_email='<EMAIL>', license='MIT', packages=['filecompare',\n 'filecompare.compare', 'filecompare.tools', 'filecompare.utils'],\n install_requires=[], python_requires='>=3', scripts=[\n 'filecompare/tools/docompare.py'], setup_requires=['pytest-runner'],\n test_suite='tests.testsuite', tests_require=['pytest'], zip_safe=False)\n", (36, 550), False, 'from setuptools import setup\n')] |
from text import longest_common_substring
from text._utils import suffix_array
import itertools
class HelperTestMixin:
"""
author: Anonta (https://stackoverflow.com/users/5798361/anonta)
source: https://stackoverflow.com/questions/51456472/python-fastest-algorithm-to-get-the-most-common-prefix-out-of-a-list-of-strings/51457611
"""
def suffix_verify(self, text, step=16):
tx = text
sa, _, lcp = suffix_array(text=tx, _step=step)
assert set(sa) == set(range(len(tx)))
ok = True
for i0, i1, h in zip(sa[:-1], sa[1:], lcp[1:]):
assert tx[i1 : i1 + h] == tx[i0 : i0 + h] # type: ignore
assert tx[i1 + h : i1 + h + 1] > tx[i0 + h : i0 + h + 1] # type: ignore
assert max(i0, i1) <= len(tx) - h # type: ignore
assert ok == True
class TestSuffixArray(HelperTestMixin):
"""
author: Anonta (https://stackoverflow.com/users/5798361/anonta)
source: https://stackoverflow.com/questions/51456472/python-fastest-algorithm-to-get-the-most-common-prefix-out-of-a-list-of-strings/51457611
"""
def test_16(self):
# 'a' < 'ana' < 'anana' < 'banana' < 'na' < 'nana'
expect = ([5, 3, 1, 0, 4, 2], [3, 2, 5, 1, 4, 0], [0, 1, 3, 0, 0, 2])
assert suffix_array(text="banana", _step=16) == expect
def test_1(self):
expect = ([5, 3, 1, 0, 4, 2], [3, 2, 5, 1, 4, 0], [0, 1, 3, 0, 0, 2])
assert suffix_array(text="banana", _step=1) == expect
def test_mini(self):
assert suffix_array(text="", _step=1) == ([], [], [])
assert suffix_array(text="a", _step=1) == ([0], [0], [0])
assert suffix_array(text="aa", _step=1) == ([1, 0], [1, 0], [0, 1])
assert suffix_array(text="aaa", _step=1) == ([2, 1, 0], [2, 1, 0], [0, 1, 2])
def test_example(self):
self.suffix_verify("abracadabra")
def test_cartesian(self):
"""Test all combinations of alphabet "ABC" up to length 4 characters"""
for size in range(7):
for cartesian in itertools.product(*(size * ["ABC"])):
text = "".join(cartesian)
self.suffix_verify(text, 1)
def test_lcp(self):
expect = {"ana": [1, 3]}
assert longest_common_substring("banana") == expect
expect = {" s": [3, 21], "no": [0, 13], "o ": [5, 20, 38]}
assert longest_common_substring("not so Agamemnon, who spoke fiercely to ") == expect
| [
"text._utils.suffix_array",
"itertools.product",
"text.longest_common_substring"
] | [((435, 468), 'text._utils.suffix_array', 'suffix_array', ([], {'text': 'tx', '_step': 'step'}), '(text=tx, _step=step)\n', (447, 468), False, 'from text._utils import suffix_array\n'), ((1282, 1319), 'text._utils.suffix_array', 'suffix_array', ([], {'text': '"""banana"""', '_step': '(16)'}), "(text='banana', _step=16)\n", (1294, 1319), False, 'from text._utils import suffix_array\n'), ((1446, 1482), 'text._utils.suffix_array', 'suffix_array', ([], {'text': '"""banana"""', '_step': '(1)'}), "(text='banana', _step=1)\n", (1458, 1482), False, 'from text._utils import suffix_array\n'), ((1534, 1564), 'text._utils.suffix_array', 'suffix_array', ([], {'text': '""""""', '_step': '(1)'}), "(text='', _step=1)\n", (1546, 1564), False, 'from text._utils import suffix_array\n'), ((1596, 1627), 'text._utils.suffix_array', 'suffix_array', ([], {'text': '"""a"""', '_step': '(1)'}), "(text='a', _step=1)\n", (1608, 1627), False, 'from text._utils import suffix_array\n'), ((1662, 1694), 'text._utils.suffix_array', 'suffix_array', ([], {'text': '"""aa"""', '_step': '(1)'}), "(text='aa', _step=1)\n", (1674, 1694), False, 'from text._utils import suffix_array\n'), ((1738, 1771), 'text._utils.suffix_array', 'suffix_array', ([], {'text': '"""aaa"""', '_step': '(1)'}), "(text='aaa', _step=1)\n", (1750, 1771), False, 'from text._utils import suffix_array\n'), ((2050, 2086), 'itertools.product', 'itertools.product', (["*(size * ['ABC'])"], {}), "(*(size * ['ABC']))\n", (2067, 2086), False, 'import itertools\n'), ((2247, 2281), 'text.longest_common_substring', 'longest_common_substring', (['"""banana"""'], {}), "('banana')\n", (2271, 2281), False, 'from text import longest_common_substring\n'), ((2374, 2442), 'text.longest_common_substring', 'longest_common_substring', (['"""not so Agamemnon, who spoke fiercely to """'], {}), "('not so Agamemnon, who spoke fiercely to ')\n", (2398, 2442), False, 'from text import longest_common_substring\n')] |
import os
import re
import subprocess
from collections import Counter
from django.conf import settings
from django.core.management.base import BaseCommand
import datadog
from dimagi.ext.couchdbkit import Document
from corehq.feature_previews import all_previews
from corehq.toggles import all_toggles
class DatadogLogger:
def __init__(self, stdout):
self.stdout = stdout
self.datadog = os.environ.get("TRAVIS_EVENT_TYPE") == 'cron'
if self.datadog:
api_key = os.environ.get("DATADOG_API_KEY")
app_key = os.environ.get("DATADOG_APP_KEY")
assert api_key and app_key, "DATADOG_API_KEY and DATADOG_APP_KEY must both be set"
datadog.initialize(api_key=api_key, app_key=app_key)
self.metrics = []
def log(self, metric, value, tags=None):
self.stdout.write(f"{metric}: {value} {tags or ''}")
if self.datadog:
self.metrics.append({
'metric': metric,
'points': value,
'type': "gauge",
'host': "travis-ci.org",
'tags': [
"environment:travis",
f"travis_build:{os.environ.get('TRAVIS_BUILD_ID')}",
f"travis_number:{os.environ.get('TRAVIS_BUILD_NUMBER')}",
f"travis_job_number:{os.environ.get('TRAVIS_JOB_NUMBER')}",
] + (tags or []),
})
def send_all(self):
if self.datadog:
datadog.api.Metric.send(self.metrics)
self.metrics = []
class Command(BaseCommand):
help = ("Display a variety of code-quality metrics. This is run on every travis "
"build, but only submitted to datadog during the daily cron job.")
def handle(self, **options):
self.stdout.write("----------> Begin Static Analysis <----------")
self.logger = DatadogLogger(self.stdout)
self.show_couch_model_count()
self.show_custom_modules()
self.show_js_dependencies()
self.show_toggles()
self.show_complexity()
self.logger.send_all()
self.stdout.write("----------> End Static Analysis <----------")
def show_couch_model_count(self):
def all_subclasses(cls):
return set(cls.__subclasses__()).union([
s for c in cls.__subclasses__() for s in all_subclasses(c)
])
model_count = len(all_subclasses(Document))
self.logger.log("commcare.static_analysis.couch_model_count", model_count)
def show_custom_modules(self):
custom_module_count = len(set(settings.DOMAIN_MODULE_MAP.values()))
custom_domain_count = len(settings.DOMAIN_MODULE_MAP)
self.logger.log("commcare.static_analysis.custom_module_count", custom_module_count)
self.logger.log("commcare.static_analysis.custom_domain_count", custom_domain_count)
def show_js_dependencies(self):
proc = subprocess.Popen(["./scripts/codechecks/hqDefine.sh", "static-analysis"], stdout=subprocess.PIPE)
output = proc.communicate()[0].strip().decode("utf-8")
(step1, step2, step3) = output.split(" ")
self.logger.log("commcare.static_analysis.hqdefine_file_count", int(step1), tags=[
'status:unmigrated',
])
self.logger.log("commcare.static_analysis.hqdefine_file_count", int(step2), tags=[
'status:hqdefine_only',
])
self.logger.log("commcare.static_analysis.requirejs_file_count", int(step3), tags=[
'status:migrated',
])
def show_toggles(self):
counts = Counter(t.tag.name for t in all_toggles() + all_previews())
for tag, count in counts.items():
self.logger.log("commcare.static_analysis.toggle_count", count, [f"toggle_tag:{tag}"])
def show_complexity(self):
# We can use `--json` for more granularity, but it doesn't provide a summary
output = subprocess.run([
"radon", "cc", ".",
"--min=C",
"--total-average",
"--exclude=node_modules/*,staticfiles/*",
], stdout=subprocess.PIPE).stdout.decode('utf-8').strip()
raw_blocks, raw_complexity = output.split('\n')[-2:]
blocks_pattern = r'^(\d+) blocks \(classes, functions, methods\) analyzed.$'
blocks = int(re.match(blocks_pattern, raw_blocks).group(1))
self.logger.log("commcare.static_analysis.code_blocks", blocks)
complexity_pattern = r'^Average complexity: A \(([\d.]+)\)$'
complexity = round(float(re.match(complexity_pattern, raw_complexity).group(1)), 3)
self.logger.log("commcare.static_analysis.avg_complexity", complexity)
for grade in ["C", "D", "E", "F"]:
count = len(re.findall(f" - {grade}\n", output))
self.logger.log(
"commcare.static_analysis.complex_block_count",
count,
tags=[f"complexity_grade:{grade}"],
)
| [
"corehq.feature_previews.all_previews",
"datadog.initialize",
"subprocess.Popen",
"subprocess.run",
"os.environ.get",
"re.match",
"datadog.api.Metric.send",
"django.conf.settings.DOMAIN_MODULE_MAP.values",
"re.findall",
"corehq.toggles.all_toggles"
] | [((2957, 3058), 'subprocess.Popen', 'subprocess.Popen', (["['./scripts/codechecks/hqDefine.sh', 'static-analysis']"], {'stdout': 'subprocess.PIPE'}), "(['./scripts/codechecks/hqDefine.sh', 'static-analysis'],\n stdout=subprocess.PIPE)\n", (2973, 3058), False, 'import subprocess\n'), ((412, 447), 'os.environ.get', 'os.environ.get', (['"""TRAVIS_EVENT_TYPE"""'], {}), "('TRAVIS_EVENT_TYPE')\n", (426, 447), False, 'import os\n'), ((505, 538), 'os.environ.get', 'os.environ.get', (['"""DATADOG_API_KEY"""'], {}), "('DATADOG_API_KEY')\n", (519, 538), False, 'import os\n'), ((561, 594), 'os.environ.get', 'os.environ.get', (['"""DATADOG_APP_KEY"""'], {}), "('DATADOG_APP_KEY')\n", (575, 594), False, 'import os\n'), ((702, 754), 'datadog.initialize', 'datadog.initialize', ([], {'api_key': 'api_key', 'app_key': 'app_key'}), '(api_key=api_key, app_key=app_key)\n', (720, 754), False, 'import datadog\n'), ((1502, 1539), 'datadog.api.Metric.send', 'datadog.api.Metric.send', (['self.metrics'], {}), '(self.metrics)\n', (1525, 1539), False, 'import datadog\n'), ((2619, 2654), 'django.conf.settings.DOMAIN_MODULE_MAP.values', 'settings.DOMAIN_MODULE_MAP.values', ([], {}), '()\n', (2652, 2654), False, 'from django.conf import settings\n'), ((4776, 4811), 're.findall', 're.findall', (['f""" - {grade}\n"""', 'output'], {}), "(f' - {grade}\\n', output)\n", (4786, 4811), False, 'import re\n'), ((4348, 4384), 're.match', 're.match', (['blocks_pattern', 'raw_blocks'], {}), '(blocks_pattern, raw_blocks)\n', (4356, 4384), False, 'import re\n'), ((3650, 3663), 'corehq.toggles.all_toggles', 'all_toggles', ([], {}), '()\n', (3661, 3663), False, 'from corehq.toggles import all_toggles\n'), ((3666, 3680), 'corehq.feature_previews.all_previews', 'all_previews', ([], {}), '()\n', (3678, 3680), False, 'from corehq.feature_previews import all_previews\n'), ((4570, 4614), 're.match', 're.match', (['complexity_pattern', 'raw_complexity'], {}), '(complexity_pattern, raw_complexity)\n', (4578, 4614), False, 'import re\n'), ((3957, 4093), 'subprocess.run', 'subprocess.run', (["['radon', 'cc', '.', '--min=C', '--total-average',\n '--exclude=node_modules/*,staticfiles/*']"], {'stdout': 'subprocess.PIPE'}), "(['radon', 'cc', '.', '--min=C', '--total-average',\n '--exclude=node_modules/*,staticfiles/*'], stdout=subprocess.PIPE)\n", (3971, 4093), False, 'import subprocess\n'), ((1196, 1229), 'os.environ.get', 'os.environ.get', (['"""TRAVIS_BUILD_ID"""'], {}), "('TRAVIS_BUILD_ID')\n", (1210, 1229), False, 'import os\n'), ((1270, 1307), 'os.environ.get', 'os.environ.get', (['"""TRAVIS_BUILD_NUMBER"""'], {}), "('TRAVIS_BUILD_NUMBER')\n", (1284, 1307), False, 'import os\n'), ((1352, 1387), 'os.environ.get', 'os.environ.get', (['"""TRAVIS_JOB_NUMBER"""'], {}), "('TRAVIS_JOB_NUMBER')\n", (1366, 1387), False, 'import os\n')] |
import datetime
import datetime as dt
import pytz
def current_time():
return dt.datetime.now().strftime("%H:%M:%S")
def time_string_to_js_timestamp(time: datetime) -> int:
# js need * 1000 because of different standards
timezone = pytz.timezone("UTC")
return round(timezone.localize(time).timestamp() * 1000)
| [
"pytz.timezone",
"datetime.datetime.now"
] | [((247, 267), 'pytz.timezone', 'pytz.timezone', (['"""UTC"""'], {}), "('UTC')\n", (260, 267), False, 'import pytz\n'), ((83, 100), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (98, 100), True, 'import datetime as dt\n')] |
"""
Tests for snakelize
module: django_auto_model.utils
"""
import datetime
from django_auto_model.utils import get_now
def test_is_datetime():
"""Should be a datetime instance"""
now = get_now()
assert isinstance(now, datetime.datetime)
def test_value_is_close_to_now():
"""Should be close enough to the test execution time"""
before = datetime.datetime.now()
now = get_now()
after = datetime.datetime.now()
assert now >= before
assert now <= after
def test_objects_are_not_singleton():
"""Different calls to the function return different instances"""
now1 = get_now()
now2 = get_now()
assert now1 is not now2
| [
"datetime.datetime.now",
"django_auto_model.utils.get_now"
] | [((195, 204), 'django_auto_model.utils.get_now', 'get_now', ([], {}), '()\n', (202, 204), False, 'from django_auto_model.utils import get_now\n'), ((359, 382), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (380, 382), False, 'import datetime\n'), ((393, 402), 'django_auto_model.utils.get_now', 'get_now', ([], {}), '()\n', (400, 402), False, 'from django_auto_model.utils import get_now\n'), ((415, 438), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (436, 438), False, 'import datetime\n'), ((608, 617), 'django_auto_model.utils.get_now', 'get_now', ([], {}), '()\n', (615, 617), False, 'from django_auto_model.utils import get_now\n'), ((629, 638), 'django_auto_model.utils.get_now', 'get_now', ([], {}), '()\n', (636, 638), False, 'from django_auto_model.utils import get_now\n')] |
from setuptools import setup, find_packages
from setuptools.command.test import test
from distutils.util import convert_path
# We can't import the submodule normally as that would "run" the main module
# code while the setup script is meant to *build* the module.
# Besides preventing a whole possible mess of issues with an un-built package,
# this also prevents the vapoursynth import which breaks the docs on RTD.
# convert_path is used here because according to the distutils docs:
# '...filenames in the setup script are always supplied in Unix
# style, and have to be converted to the local convention before we can
# actually use them in the filesystem.'
meta = {}
exec(open(convert_path('vsutil/_metadata.py')).read(), meta)
class DiscoverTest(test):
def finalize_options(self):
test.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import os
import unittest
path = os.path.join(os.path.dirname(__file__), "tests")
runner = unittest.TextTestRunner(verbosity=2)
suite = unittest.TestLoader().discover(path, pattern="test_*.py")
runner.run(suite)
setup(
name='vsutil',
version=meta['__version__'],
packages=find_packages(exclude=['tests']),
package_data={
'vsutil': ['py.typed']
},
url='https://encode.moe/vsutil',
license='MIT',
author=meta['__author__'].split()[0],
author_email=meta['__author__'].split()[1][1:-1],
description='A collection of general-purpose Vapoursynth functions to be reused in modules and scripts.',
install_requires=[
"vapoursynth"
],
cmdclass={
'test': DiscoverTest
},
python_requires='>=3.8',
project_urls={
'Documentation': 'http://vsutil.encode.moe/en/latest/',
'Source': 'https://github.com/Irrational-Encoding-Wizardry/vsutil',
'Tracker': 'https://github.com/Irrational-Encoding-Wizardry/vsutil/issues',
},
keywords='encoding vapoursynth video',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Topic :: Multimedia :: Video",
"Typing :: Typed",
],
)
| [
"distutils.util.convert_path",
"setuptools.find_packages",
"setuptools.command.test.test.finalize_options",
"os.path.dirname",
"unittest.TextTestRunner",
"unittest.TestLoader"
] | [((805, 832), 'setuptools.command.test.test.finalize_options', 'test.finalize_options', (['self'], {}), '(self)\n', (826, 832), False, 'from setuptools.command.test import test\n'), ((1041, 1077), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (1064, 1077), False, 'import unittest\n'), ((1252, 1284), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests']"}), "(exclude=['tests'])\n", (1265, 1284), False, 'from setuptools import setup, find_packages\n'), ((988, 1013), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1003, 1013), False, 'import os\n'), ((685, 720), 'distutils.util.convert_path', 'convert_path', (['"""vsutil/_metadata.py"""'], {}), "('vsutil/_metadata.py')\n", (697, 720), False, 'from distutils.util import convert_path\n'), ((1094, 1115), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (1113, 1115), False, 'import unittest\n')] |
from main import dp
from aiogram import types
from aiogram.dispatcher.filters.builtin import Text
@dp.message_handler(Text(equals="Все задания 🤩"))
async def vse_zadaniya(msg: types.Message):
await msg.answer(text="<b>Ваши задания:</b>\n\nскоро наполню")
@dp.message_handler(Text(equals="Добавить 📝"))
async def dobavit(msg: types.Message):
await msg.answer(text="прикрепите ваше задание")
@dp.message_handler(Text(equals="Скрыть клавиутуру 😤"))
async def dobavit(msg: types.Message):
await msg.answer(text="Клавиатура скрыта\nДля вызова /start", reply_markup=types.ReplyKeyboardRemove())
| [
"aiogram.dispatcher.filters.builtin.Text",
"aiogram.types.ReplyKeyboardRemove"
] | [((121, 149), 'aiogram.dispatcher.filters.builtin.Text', 'Text', ([], {'equals': '"""Все задания 🤩"""'}), "(equals='Все задания 🤩')\n", (125, 149), False, 'from aiogram.dispatcher.filters.builtin import Text\n'), ((284, 309), 'aiogram.dispatcher.filters.builtin.Text', 'Text', ([], {'equals': '"""Добавить 📝"""'}), "(equals='Добавить 📝')\n", (288, 309), False, 'from aiogram.dispatcher.filters.builtin import Text\n'), ((425, 459), 'aiogram.dispatcher.filters.builtin.Text', 'Text', ([], {'equals': '"""Скрыть клавиутуру 😤"""'}), "(equals='Скрыть клавиутуру 😤')\n", (429, 459), False, 'from aiogram.dispatcher.filters.builtin import Text\n'), ((604, 631), 'aiogram.types.ReplyKeyboardRemove', 'types.ReplyKeyboardRemove', ([], {}), '()\n', (629, 631), False, 'from aiogram import types\n')] |
#!/usr/bin/env python
import Bio
from Bio import SeqIO
import sys
filt = []
seqs = list(SeqIO.parse(sys.argv[1],'fasta'))
minlen = int(sys.argv[2])
maxlen = int(sys.argv[3])
output = sys.argv[4]
for rec in seqs:
s = str(rec.seq)
l = len(s)
if ((l >= minlen) and (l <= maxlen)):
filt.append(rec)
SeqIO.write(filt, output, 'fasta')
| [
"Bio.SeqIO.parse",
"Bio.SeqIO.write"
] | [((323, 357), 'Bio.SeqIO.write', 'SeqIO.write', (['filt', 'output', '"""fasta"""'], {}), "(filt, output, 'fasta')\n", (334, 357), False, 'from Bio import SeqIO\n'), ((91, 124), 'Bio.SeqIO.parse', 'SeqIO.parse', (['sys.argv[1]', '"""fasta"""'], {}), "(sys.argv[1], 'fasta')\n", (102, 124), False, 'from Bio import SeqIO\n')] |
import json
import datetime
import requests
import urlobject
from .utils import format_faf_date
API_BASE = urlobject.URLObject('https://api.faforever.com')
ENTITY_TYPE_TO_DEFAULT_DATE_FIELD = {
'game': 'startTime',
'player': 'createTime',
'map': 'createTime',
'mapVersion': 'createTime',
}
def construct_url(entity, include, date_field, page_size, start_date, end_date, page_number=1, sort='ASC', filters=(), api_base=API_BASE):
url = api_base.with_path(f'/data/{entity}')
url = url.add_query_param('page[size]', page_size)
url = url.add_query_param('page[number]', page_number)
url = url.add_query_param('page[totals]', '')
filters = list(filters)
start_date = format_faf_date(start_date)
filters.append(f'{date_field}=ge={start_date}')
end_date = format_faf_date(end_date)
filters.append(f'{date_field}=le={end_date}')
url = url.add_query_param('filter', ';'.join(filters))
if include:
url = url.add_query_param('include', ','.join(include))
url = url.add_query_param('sort', f'-{date_field}' if sort == 'DESC' else f'{date_field}')
return url
def fetch_page(url):
response = requests.get(url)
response.raise_for_status()
return response.json()
def yield_pages(url_constructor, start_page=1, max_pages=float('inf')):
current_page = start_page
page = fetch_page(url_constructor(page_number=current_page))
yield page
max_pages = min(max_pages, page['meta']['page']['totalPages'])
while current_page < max_pages:
current_page += 1
yield fetch_page(url_constructor(page_number=current_page))
def write_json(path, doc, pretty):
with open(path, 'w') as handle:
json.dump(doc, handle, indent=(4 if pretty else None))
| [
"urlobject.URLObject",
"json.dump",
"requests.get"
] | [((109, 157), 'urlobject.URLObject', 'urlobject.URLObject', (['"""https://api.faforever.com"""'], {}), "('https://api.faforever.com')\n", (128, 157), False, 'import urlobject\n'), ((1163, 1180), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1175, 1180), False, 'import requests\n'), ((1700, 1752), 'json.dump', 'json.dump', (['doc', 'handle'], {'indent': '(4 if pretty else None)'}), '(doc, handle, indent=4 if pretty else None)\n', (1709, 1752), False, 'import json\n')] |
from nornir import InitNornir
from nornir.core.filter import F
def main():
nr = InitNornir()
print("\nExercise 3a (role AGG)")
print("-" * 20)
agg_devs = nr.filter(F(role__contains="AGG"))
print(agg_devs.inventory.hosts)
print("-" * 20)
print("\nExercise 3b (sea or sfo group)")
print("-" * 20)
union = nr.filter(F(groups__contains="sea") | F(groups__contains="sfo"))
print(union.inventory.hosts)
print("-" * 20)
print("\nExercise 3c (WAN-role and WIFI password '<PASSWORD>')")
print("-" * 20)
racecar = nr.filter(
F(site_details__wifi_password__contains="<PASSWORD>") & F(role="WAN")
)
print(racecar.inventory.hosts)
print("-" * 20)
print("\nExercise 3d (WAN-role and not WIFI password '<PASSWORD>')")
print("-" * 20)
not_racecar = nr.filter(
~F(site_details__wifi_password__contains="<PASSWORD>") & F(role="WAN")
)
print(not_racecar.inventory.hosts)
print("-" * 20)
print()
if __name__ == "__main__":
main()
| [
"nornir.core.filter.F",
"nornir.InitNornir"
] | [((86, 98), 'nornir.InitNornir', 'InitNornir', ([], {}), '()\n', (96, 98), False, 'from nornir import InitNornir\n'), ((183, 206), 'nornir.core.filter.F', 'F', ([], {'role__contains': '"""AGG"""'}), "(role__contains='AGG')\n", (184, 206), False, 'from nornir.core.filter import F\n'), ((353, 378), 'nornir.core.filter.F', 'F', ([], {'groups__contains': '"""sea"""'}), "(groups__contains='sea')\n", (354, 378), False, 'from nornir.core.filter import F\n'), ((381, 406), 'nornir.core.filter.F', 'F', ([], {'groups__contains': '"""sfo"""'}), "(groups__contains='sfo')\n", (382, 406), False, 'from nornir.core.filter import F\n'), ((584, 637), 'nornir.core.filter.F', 'F', ([], {'site_details__wifi_password__contains': '"""<PASSWORD>"""'}), "(site_details__wifi_password__contains='<PASSWORD>')\n", (585, 637), False, 'from nornir.core.filter import F\n'), ((640, 653), 'nornir.core.filter.F', 'F', ([], {'role': '"""WAN"""'}), "(role='WAN')\n", (641, 653), False, 'from nornir.core.filter import F\n'), ((903, 916), 'nornir.core.filter.F', 'F', ([], {'role': '"""WAN"""'}), "(role='WAN')\n", (904, 916), False, 'from nornir.core.filter import F\n'), ((847, 900), 'nornir.core.filter.F', 'F', ([], {'site_details__wifi_password__contains': '"""<PASSWORD>"""'}), "(site_details__wifi_password__contains='<PASSWORD>')\n", (848, 900), False, 'from nornir.core.filter import F\n')] |
# -*- coding: utf-8 -*-
from collections import Iterable
from aserializer.utils import py2to3, registry
from aserializer.fields.fields import BaseSerializerField, SerializerFieldValueError
class SerializerObjectField(BaseSerializerField):
def __init__(self, fields=None, exclude=None, *args, **kwargs):
super(SerializerObjectField, self).__init__(*args, **kwargs)
self.only_fields = fields or []
self.exclude = exclude or []
self.unknown_error = None
self.extras = {}
self._serializer_cls = None
@staticmethod
def normalize_serializer_cls(serializer_cls):
if isinstance(serializer_cls, py2to3.string):
serializer_cls = registry.get_serializer(serializer_cls)
return serializer_cls
def get_serializer_cls(self):
return self.normalize_serializer_cls(self._serializer_cls)
def pre_value(self, fields=None, exclude=None, **extras):
if isinstance(fields, (list, tuple, set)):
self.only_fields = set(list(self.only_fields) + list(fields))
if isinstance(exclude, (list, tuple, set)):
self.exclude = set(list(self.exclude) + list(exclude))
self.unknown_error = extras.pop('unknown_error', None)
self.extras = extras
def get_instance(self):
return None
def __get__(self, instance, owner):
if instance is None:
return self
field, field_name = self._get_field_from_instance(instance=instance)
if field:
return field.get_instance()
return self
class SerializerField(SerializerObjectField):
def __init__(self, serializer, *args, **kwargs):
super(SerializerField, self).__init__(*args, **kwargs)
self._serializer_cls = serializer
self._serializer = None
def get_instance(self):
return self._serializer
def validate(self):
if self._serializer:
if not self._serializer.is_valid():
raise SerializerFieldValueError(self._serializer.errors, field_names=self.names)
elif self.required:
raise SerializerFieldValueError(self._error_messages['required'], field_names=self.names)
def set_value(self, value):
if value is None:
self._serializer = None
return
if self._serializer is None:
self._serializer_cls = self.normalize_serializer_cls(self._serializer_cls)
self._serializer = self._serializer_cls(source=value,
fields=self.only_fields,
exclude=self.exclude,
unknown_error=self.unknown_error,
**self.extras)
else:
self._serializer.initial(source=value)
def _to_native(self):
if self._serializer:
return self._serializer.dump()
return None
def _to_python(self):
if self._serializer:
return self._serializer.to_dict()
return None
class ListSerializerField(SerializerObjectField):
error_messages = {
'required': 'This list is empty.',
}
def __init__(self, serializer, sort_by=None, *args, **kwargs):
super(ListSerializerField, self).__init__(*args, **kwargs)
self._serializer_cls = serializer
self.items = []
self._python_items = []
self._native_items = []
self._sort_by = None
if sort_by:
self._sort_by = [sort_by, ] if isinstance(sort_by, py2to3.string) else sort_by
def validate(self):
if self.items:
_errors = []
for item in self.items:
if not item.is_valid():
_errors.append(item.errors)
if _errors:
raise SerializerFieldValueError(_errors)
elif self.required:
raise SerializerFieldValueError(self._error_messages['required'], field_names=self.names)
def get_instance(self):
return self.items
def add_item(self, source):
self._serializer_cls = self.normalize_serializer_cls(self._serializer_cls)
_serializer = self._serializer_cls(source=source,
fields=self.only_fields,
exclude=self.exclude,
unknown_error=self.unknown_error,
**self.extras)
self.items.append(_serializer)
def set_value(self, value):
self.items[:] = []
self._native_items[:] = []
self._python_items[:] = []
if isinstance(value, Iterable):
for item in value:
self.add_item(source=item)
def _to_native(self):
if not self._native_items:
for item in self.items:
self._native_items.append(item.dump())
if self._sort_by:
self._native_items = sorted(self._native_items,
key=lambda item: [item.get(k, None) for k in self._sort_by])
return self._native_items
def _to_python(self):
if not self._python_items:
for item in self.items:
self._python_items.append(item.to_dict())
# TODO: what about deserialization? do we want/need sorting here as well or do we trust the order of items from json?
# if self._sort_by:
# return sorted(unsorted,
# key=lambda item: [getattr(item, k, None) for k in self._sort_by])
return self._python_items
| [
"aserializer.utils.registry.get_serializer",
"aserializer.fields.fields.SerializerFieldValueError"
] | [((705, 744), 'aserializer.utils.registry.get_serializer', 'registry.get_serializer', (['serializer_cls'], {}), '(serializer_cls)\n', (728, 744), False, 'from aserializer.utils import py2to3, registry\n'), ((1998, 2072), 'aserializer.fields.fields.SerializerFieldValueError', 'SerializerFieldValueError', (['self._serializer.errors'], {'field_names': 'self.names'}), '(self._serializer.errors, field_names=self.names)\n', (2023, 2072), False, 'from aserializer.fields.fields import BaseSerializerField, SerializerFieldValueError\n'), ((2119, 2207), 'aserializer.fields.fields.SerializerFieldValueError', 'SerializerFieldValueError', (["self._error_messages['required']"], {'field_names': 'self.names'}), "(self._error_messages['required'], field_names=\n self.names)\n", (2144, 2207), False, 'from aserializer.fields.fields import BaseSerializerField, SerializerFieldValueError\n'), ((3891, 3925), 'aserializer.fields.fields.SerializerFieldValueError', 'SerializerFieldValueError', (['_errors'], {}), '(_errors)\n', (3916, 3925), False, 'from aserializer.fields.fields import BaseSerializerField, SerializerFieldValueError\n'), ((3972, 4060), 'aserializer.fields.fields.SerializerFieldValueError', 'SerializerFieldValueError', (["self._error_messages['required']"], {'field_names': 'self.names'}), "(self._error_messages['required'], field_names=\n self.names)\n", (3997, 4060), False, 'from aserializer.fields.fields import BaseSerializerField, SerializerFieldValueError\n')] |
from django.db import models
from django.utils.translation import ugettext_lazy as _
class DirectoryAccessGroup(models.Model):
"""
Grants expiring group access to the personnel directory.
"""
organization = models.ForeignKey('core.Organization', on_delete=models.CASCADE)
group = models.ForeignKey('auth.Group', on_delete=models.CASCADE)
active_from = models.DateTimeField(blank=True, null=True)
active_until = models.DateTimeField(blank=True, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = _('directory access group')
verbose_name_plural = _('directory access groups')
ordering = ('organization', 'group')
| [
"django.db.models.DateTimeField",
"django.utils.translation.ugettext_lazy",
"django.db.models.ForeignKey"
] | [((226, 290), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""core.Organization"""'], {'on_delete': 'models.CASCADE'}), "('core.Organization', on_delete=models.CASCADE)\n", (243, 290), False, 'from django.db import models\n'), ((303, 360), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""auth.Group"""'], {'on_delete': 'models.CASCADE'}), "('auth.Group', on_delete=models.CASCADE)\n", (320, 360), False, 'from django.db import models\n'), ((379, 422), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (399, 422), False, 'from django.db import models\n'), ((442, 485), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (462, 485), False, 'from django.db import models\n'), ((504, 543), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (524, 543), False, 'from django.db import models\n'), ((561, 596), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (581, 596), False, 'from django.db import models\n'), ((637, 664), 'django.utils.translation.ugettext_lazy', '_', (['"""directory access group"""'], {}), "('directory access group')\n", (638, 664), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((695, 723), 'django.utils.translation.ugettext_lazy', '_', (['"""directory access groups"""'], {}), "('directory access groups')\n", (696, 723), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
""" MACD Indicator
"""
import math
import pandas
from talib import abstract
from analyzers.utils import IndicatorUtils
class SAR(IndicatorUtils):
def analyze(self, historical_data, signal=['sar'], hot_thresh=None, cold_thresh=None):
"""Performs a macd analysis on the historical data
Args:
historical_data (list): A matrix of historical OHCLV data.
signal (list, optional): Defaults to macd. The indicator line to check hot/cold
against.
hot_thresh (float, optional): Defaults to None. The threshold at which this might be
good to purchase.
cold_thresh (float, optional): Defaults to None. The threshold at which this might be
good to sell.
Returns:
pandas.DataFrame: A dataframe containing the indicators and hot/cold values.
"""
dataframe = self.convert_to_dataframe(historical_data)
sar_values = abstract.SAR(dataframe).iloc[:]
sar_values.dropna(how='all', inplace=True)
if sar_values[signal[0]].shape[0]:
sar_values['is_hot'] = sar_values[signal[0]] > hot_thresh
sar_values['is_cold'] = sar_values[signal[0]] < cold_thresh
return sar_values
| [
"talib.abstract.SAR"
] | [((967, 990), 'talib.abstract.SAR', 'abstract.SAR', (['dataframe'], {}), '(dataframe)\n', (979, 990), False, 'from talib import abstract\n')] |
import sqlite3
from scripts.artifact_report import ArtifactHtmlReport
from scripts.ilapfuncs import logfunc, tsv, open_sqlite_db_readonly
def get_installedappsGass(files_found, report_folder, seeker, wrap_text):
for file_found in files_found:
file_found = str(file_found)
if file_found.endswith('.db'):
db = open_sqlite_db_readonly(file_found)
cursor = db.cursor()
cursor.execute('''
SELECT
distinct(package_name)
FROM
app_info
''')
if 'user' in file_found:
usernum = file_found.split("/")
usernum = '_'+str(usernum[-4])
else:
usernum = ''
all_rows = cursor.fetchall()
usageentries = len(all_rows)
if usageentries > 0:
report = ArtifactHtmlReport('Installed Apps')
report.start_artifact_report(report_folder, f'Installed Apps (GMS){usernum}')
report.add_script()
data_headers = ('Bundle ID',) # Don't remove the comma, that is required to make this a tuple as there is only 1 element
data_list = []
for row in all_rows:
data_list.append((row[0],))
report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()
tsvname = f'installed apps - GMS{usernum}'
tsv(report_folder, data_headers, data_list, tsvname)
else:
logfunc('No Installed Apps data available{usernum}')
db.close()
| [
"scripts.artifact_report.ArtifactHtmlReport",
"scripts.ilapfuncs.tsv",
"scripts.ilapfuncs.logfunc",
"scripts.ilapfuncs.open_sqlite_db_readonly"
] | [((355, 390), 'scripts.ilapfuncs.open_sqlite_db_readonly', 'open_sqlite_db_readonly', (['file_found'], {}), '(file_found)\n', (378, 390), False, 'from scripts.ilapfuncs import logfunc, tsv, open_sqlite_db_readonly\n'), ((913, 949), 'scripts.artifact_report.ArtifactHtmlReport', 'ArtifactHtmlReport', (['"""Installed Apps"""'], {}), "('Installed Apps')\n", (931, 949), False, 'from scripts.artifact_report import ArtifactHtmlReport\n'), ((1565, 1617), 'scripts.ilapfuncs.tsv', 'tsv', (['report_folder', 'data_headers', 'data_list', 'tsvname'], {}), '(report_folder, data_headers, data_list, tsvname)\n', (1568, 1617), False, 'from scripts.ilapfuncs import logfunc, tsv, open_sqlite_db_readonly\n'), ((1652, 1704), 'scripts.ilapfuncs.logfunc', 'logfunc', (['"""No Installed Apps data available{usernum}"""'], {}), "('No Installed Apps data available{usernum}')\n", (1659, 1704), False, 'from scripts.ilapfuncs import logfunc, tsv, open_sqlite_db_readonly\n')] |
import click
import collections
@click.group()
def cli():
pass
@cli.group()
def day1():
pass
@day1.command()
@click.argument('input_file', type=click.File())
def part1(input_file):
from day1 import part1_stanta_floor_positioning_system
for directions in input_file:
floor = part1_stanta_floor_positioning_system(directions)
print('Final Floor: {0}'.format(floor))
@day1.command()
@click.argument('input_file', type=click.File())
@click.option('--halt', type=int)
def part2(input_file, halt):
from day1 import part2_santa_fps_halt
for directions in input_file:
position = part2_santa_fps_halt(directions, halt)
if position is not None:
print('Position: {0}'.format(position))
else:
print('Never reached floor: {0}'.format(halt))
@cli.group()
def day2():
pass
@day2.command()
@click.argument('input_file', type=click.File())
def part1(input_file):
from day2 import part1_wrapping_paper_estimate
total = 0
for dimensions in input_file:
total += part1_wrapping_paper_estimate(dimensions)
print('Total wrapping paper required: {0} sq ft'.format(total))
@day2.command()
@click.argument('input_file', type=click.File())
def part2(input_file):
from day2 import part2_ribbon_estimate
total = 0
for dimensions in input_file:
total += part2_ribbon_estimate(dimensions)
print('Total ribbon required: {0} ft'.format(total))
@cli.group()
def day3():
pass
@day3.command()
@click.argument('input_file', type=click.File())
def part1(input_file):
from day3 import part1_santa_gps
for directions in input_file:
print(part1_santa_gps(directions.strip()))
@day3.command()
@click.argument('input_file', type=click.File())
def part2(input_file):
from day3 import part2_santa_and_robo_gps
for directions in input_file:
print(part2_santa_and_robo_gps(directions.strip()))
@cli.group()
def day4():
pass
@day4.command()
@click.argument('secret_key')
def part1(secret_key):
from day4 import part1_adventcoin_miner
print(part1_adventcoin_miner(secret_key.strip()))
@day4.command()
@click.argument('secret_key')
@click.argument('match', default='000000')
def part2(secret_key, match):
from day4 import part2_adventcoin_miner
print(part2_adventcoin_miner(secret_key.strip(), match))
@cli.group()
def day5():
pass
@day5.command()
@click.argument('input_file', type=click.File())
def part1(input_file):
from day5 import part1_is_nice_string
results = collections.Counter()
results.update([part1_is_nice_string(directions.strip()) for directions in input_file])
print(results[True])
| [
"day1.part2_santa_fps_halt",
"click.argument",
"click.group",
"click.option",
"day2.part1_wrapping_paper_estimate",
"click.File",
"collections.Counter",
"day2.part2_ribbon_estimate",
"day1.part1_stanta_floor_positioning_system"
] | [((35, 48), 'click.group', 'click.group', ([], {}), '()\n', (46, 48), False, 'import click\n'), ((470, 502), 'click.option', 'click.option', (['"""--halt"""'], {'type': 'int'}), "('--halt', type=int)\n", (482, 502), False, 'import click\n'), ((1998, 2026), 'click.argument', 'click.argument', (['"""secret_key"""'], {}), "('secret_key')\n", (2012, 2026), False, 'import click\n'), ((2167, 2195), 'click.argument', 'click.argument', (['"""secret_key"""'], {}), "('secret_key')\n", (2181, 2195), False, 'import click\n'), ((2197, 2238), 'click.argument', 'click.argument', (['"""match"""'], {'default': '"""000000"""'}), "('match', default='000000')\n", (2211, 2238), False, 'import click\n'), ((2556, 2577), 'collections.Counter', 'collections.Counter', ([], {}), '()\n', (2575, 2577), False, 'import collections\n'), ((304, 353), 'day1.part1_stanta_floor_positioning_system', 'part1_stanta_floor_positioning_system', (['directions'], {}), '(directions)\n', (341, 353), False, 'from day1 import part1_stanta_floor_positioning_system\n'), ((158, 170), 'click.File', 'click.File', ([], {}), '()\n', (168, 170), False, 'import click\n'), ((627, 665), 'day1.part2_santa_fps_halt', 'part2_santa_fps_halt', (['directions', 'halt'], {}), '(directions, halt)\n', (647, 665), False, 'from day1 import part2_santa_fps_halt\n'), ((455, 467), 'click.File', 'click.File', ([], {}), '()\n', (465, 467), False, 'import click\n'), ((1066, 1107), 'day2.part1_wrapping_paper_estimate', 'part1_wrapping_paper_estimate', (['dimensions'], {}), '(dimensions)\n', (1095, 1107), False, 'from day2 import part1_wrapping_paper_estimate\n'), ((913, 925), 'click.File', 'click.File', ([], {}), '()\n', (923, 925), False, 'import click\n'), ((1374, 1407), 'day2.part2_ribbon_estimate', 'part2_ribbon_estimate', (['dimensions'], {}), '(dimensions)\n', (1395, 1407), False, 'from day2 import part2_ribbon_estimate\n'), ((1229, 1241), 'click.File', 'click.File', ([], {}), '()\n', (1239, 1241), False, 'import click\n'), ((1554, 1566), 'click.File', 'click.File', ([], {}), '()\n', (1564, 1566), False, 'import click\n'), ((1766, 1778), 'click.File', 'click.File', ([], {}), '()\n', (1776, 1778), False, 'import click\n'), ((2463, 2475), 'click.File', 'click.File', ([], {}), '()\n', (2473, 2475), False, 'import click\n')] |
# Utilization Checks
# https://aonecode.com/amazon-online-assessment-utilization-checks
import math
class UtilizationChecks:
def solve(self, instances, averageUtil):
i = 0
while i < len(averageUtil):
if 25 <= averageUtil[i] <= 60:
i += 1
continue
# we need increase or decrease #. of instances
if averageUtil[i] > 60:
if 2 * instances > 2 * pow(10, 8):
continue
instances *= 2
else:
if instances == 1:
i += 1
continue
instances = math.ceil(instances / 2)
i += 10
print(instances)
return instances
uc = UtilizationChecks()
uc.solve(2, [25, 23, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 76, 80])
uc.solve(3, [5, 10, 80])
uc.solve(5, [30, 5, 4, 8, 19, 89])
| [
"math.ceil"
] | [((706, 730), 'math.ceil', 'math.ceil', (['(instances / 2)'], {}), '(instances / 2)\n', (715, 730), False, 'import math\n')] |
#!/usr/bin/env python
import dynamic_reconfigure.server
from jsk_topic_tools import ConnectionBasedTransport
import json
import os.path as osp
import rospy
from std_msgs.msg import String
from jsk_arc2017_common.cfg import CandidatesPublisherConfig
from jsk_recognition_msgs.msg import Label
from jsk_recognition_msgs.msg import LabelArray
class CandidatesPublisher(ConnectionBasedTransport):
def __init__(self):
super(CandidatesPublisher, self).__init__()
self.pub = self.advertise(
'~output/candidates', LabelArray, queue_size=1)
self.srv = dynamic_reconfigure.server.Server(
CandidatesPublisherConfig, self._config_cb)
self.label_names = rospy.get_param('~label_names')
self.json_dir = rospy.get_param('~json_dir', None)
hz = rospy.get_param('~hz', 10.0)
self.timer = rospy.Timer(rospy.Duration(1.0 / hz), self._timer_cb)
def subscribe(self):
self.sub = rospy.Subscriber('~input/json_dir', String, self._cb)
def unsubscribe(self):
self.sub.unregister()
def _config_cb(self, config, level):
self.target_location = config.target_location
return config
def _cb(self, msg):
self.json_dir = msg.data
def _timer_cb(self, event):
if self.json_dir is None:
rospy.logwarn_throttle(10, 'Input json_dir is not set.')
return
if not osp.isdir(self.json_dir):
rospy.logfatal_throttle(
10, 'Input json_dir is not directory: %s' % self.json_dir)
return
filename = osp.join(self.json_dir, 'item_location_file.json')
if osp.exists(filename):
with open(filename) as location_f:
data = json.load(location_f)
bin_contents = {}
for bin_ in data['bins']:
bin_contents[bin_['bin_id']] = bin_['contents']
tote_contents = data['tote']['contents']
if self.target_location[:3] == 'bin':
contents = bin_contents[self.target_location[4]]
elif self.target_location == 'tote':
contents = tote_contents
else:
return
candidates_fixed = [l for l in self.label_names
if l.startswith('__')]
candidates = candidates_fixed + contents
label_list = [self.label_names.index(x) for x in candidates]
label_list = sorted(label_list)
labels = []
for label in label_list:
label_msg = Label()
label_msg.id = label
label_msg.name = self.label_names[label]
labels.append(label_msg)
msg = LabelArray()
msg.labels = labels
msg.header.stamp = rospy.Time.now()
self.pub.publish(msg)
if __name__ == '__main__':
rospy.init_node('candidates_publisher')
candidates_publisher = CandidatesPublisher()
rospy.spin()
| [
"os.path.exists",
"rospy.logfatal_throttle",
"rospy.init_node",
"rospy.get_param",
"jsk_recognition_msgs.msg.Label",
"os.path.join",
"jsk_recognition_msgs.msg.LabelArray",
"rospy.Time.now",
"json.load",
"rospy.logwarn_throttle",
"os.path.isdir",
"rospy.spin",
"rospy.Duration",
"rospy.Subsc... | [((2897, 2936), 'rospy.init_node', 'rospy.init_node', (['"""candidates_publisher"""'], {}), "('candidates_publisher')\n", (2912, 2936), False, 'import rospy\n'), ((2990, 3002), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (3000, 3002), False, 'import rospy\n'), ((706, 737), 'rospy.get_param', 'rospy.get_param', (['"""~label_names"""'], {}), "('~label_names')\n", (721, 737), False, 'import rospy\n'), ((762, 796), 'rospy.get_param', 'rospy.get_param', (['"""~json_dir"""', 'None'], {}), "('~json_dir', None)\n", (777, 796), False, 'import rospy\n'), ((810, 838), 'rospy.get_param', 'rospy.get_param', (['"""~hz"""', '(10.0)'], {}), "('~hz', 10.0)\n", (825, 838), False, 'import rospy\n'), ((959, 1012), 'rospy.Subscriber', 'rospy.Subscriber', (['"""~input/json_dir"""', 'String', 'self._cb'], {}), "('~input/json_dir', String, self._cb)\n", (975, 1012), False, 'import rospy\n'), ((1594, 1644), 'os.path.join', 'osp.join', (['self.json_dir', '"""item_location_file.json"""'], {}), "(self.json_dir, 'item_location_file.json')\n", (1602, 1644), True, 'import os.path as osp\n'), ((1656, 1676), 'os.path.exists', 'osp.exists', (['filename'], {}), '(filename)\n', (1666, 1676), True, 'import os.path as osp\n'), ((872, 896), 'rospy.Duration', 'rospy.Duration', (['(1.0 / hz)'], {}), '(1.0 / hz)\n', (886, 896), False, 'import rospy\n'), ((1326, 1382), 'rospy.logwarn_throttle', 'rospy.logwarn_throttle', (['(10)', '"""Input json_dir is not set."""'], {}), "(10, 'Input json_dir is not set.')\n", (1348, 1382), False, 'import rospy\n'), ((1418, 1442), 'os.path.isdir', 'osp.isdir', (['self.json_dir'], {}), '(self.json_dir)\n', (1427, 1442), True, 'import os.path as osp\n'), ((1456, 1543), 'rospy.logfatal_throttle', 'rospy.logfatal_throttle', (['(10)', "('Input json_dir is not directory: %s' % self.json_dir)"], {}), "(10, 'Input json_dir is not directory: %s' % self.\n json_dir)\n", (1479, 1543), False, 'import rospy\n'), ((2738, 2750), 'jsk_recognition_msgs.msg.LabelArray', 'LabelArray', ([], {}), '()\n', (2748, 2750), False, 'from jsk_recognition_msgs.msg import LabelArray\n'), ((2814, 2830), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (2828, 2830), False, 'import rospy\n'), ((1748, 1769), 'json.load', 'json.load', (['location_f'], {}), '(location_f)\n', (1757, 1769), False, 'import json\n'), ((2577, 2584), 'jsk_recognition_msgs.msg.Label', 'Label', ([], {}), '()\n', (2582, 2584), False, 'from jsk_recognition_msgs.msg import Label\n')] |
#!/usr/bin/env python
from redis import Redis
import uuid
import sys
import os
import subprocess
import shutil
import numpy as np
import itertools as it
import json
from rdkit import Chem
from rdkit.Chem import AllChem, ChemicalForceFields
redis = Redis.from_url("redis://" + os.environ.get("EXECUTOR_CONSTR", "127.0.0.1:6379/0"))
ENERGY_THRESHOLD = 1e-4
ANGLE_DELTA = 1e-7
FF_RELAX_STEPS = 50
def clockwork(resolution):
if resolution == 0:
start = 0
step = 360
n_steps = 1
else:
start = 360.0 / 2.0 ** (resolution)
step = 360.0 / 2.0 ** (resolution-1)
n_steps = 2 ** (resolution - 1)
return start, step, n_steps
def get_classical_constrained_geometry(sdfstr, torsions, molname, dihedrals, angles):
mol = Chem.MolFromMolBlock(sdfstr, removeHs=False)
ffprop = ChemicalForceFields.MMFFGetMoleculeProperties(mol)
ffc = ChemicalForceFields.MMFFGetMoleculeForceField(mol, ffprop)
conformer = mol.GetConformer()
# Set angles and constrains for all torsions
for dih_id, angle in zip(dihedrals, angles):
# Set clockwork angle
try: Chem.rdMolTransforms.SetDihedralDeg(conformer, *torsions[dih_id], float(angle))
except: pass
# Set forcefield constrain
ffc.MMFFAddTorsionConstraint(*torsions[dih_id], False, angle-ANGLE_DELTA, angle+ANGLE_DELTA, 1.0e10)
# reduce bad contacts
try:
ffc.Minimize(maxIts=FF_RELAX_STEPS, energyTol=1e-2, forceTol=1e-3)
except RuntimeError:
pass
atoms = [atom.GetSymbol() for atom in mol.GetAtoms()]
coordinates = conformer.GetPositions()
return f'{len(atoms)}\n\n' + '\n'.join([f'{element} {coords[0]} {coords[1]} {coords[2]}' for element, coords in zip(atoms, coordinates)])
def do_workpackage(molname, dihedrals, resolution):
ndih = len(dihedrals)
start, step, n_steps = clockwork(resolution)
scanangles = np.arange(start, start+step*n_steps, step)
# fetch input
sdfstr = redis.get(f'clockwork:{molname}:sdf').decode("ascii")
torsions = json.loads(redis.get(f'clockwork:{molname}:dihedrals').decode("ascii"))
accepted_geometries = []
accepted_energies = []
for angles in it.product(scanangles, repeat=ndih):
xyzfile = get_classical_constrained_geometry(sdfstr, torsions, molname, dihedrals, angles)
print (xyzfile)
#optxyzfile, energy, bonds = get_xtb_geoopt(xyzfile)
#if set(bonds) != set(refbonds):
# continue
#for i in range(len(accepted_energies)):
# if abs(accepted_energies[i] - energy) < ENERGY_THRESHOLD:
# # compare geometries optxyzfile vs accepted_geometries
#else:
# accepted_energies.append(energy)
# accepted_geometries.append(optxyzfile)
results = {}
results['mol'] = molname
results['ndih'] = ndih
results['res'] = resolution
results['geometries'] = accepted_geometries
results['energies'] = accepted_energies
return json.dumps(results)
do_workpackage("debug", (1, 2, 3), 2)
| [
"rdkit.Chem.MolFromMolBlock",
"itertools.product",
"json.dumps",
"os.environ.get",
"rdkit.Chem.ChemicalForceFields.MMFFGetMoleculeForceField",
"rdkit.Chem.ChemicalForceFields.MMFFGetMoleculeProperties",
"numpy.arange"
] | [((760, 804), 'rdkit.Chem.MolFromMolBlock', 'Chem.MolFromMolBlock', (['sdfstr'], {'removeHs': '(False)'}), '(sdfstr, removeHs=False)\n', (780, 804), False, 'from rdkit import Chem\n'), ((818, 868), 'rdkit.Chem.ChemicalForceFields.MMFFGetMoleculeProperties', 'ChemicalForceFields.MMFFGetMoleculeProperties', (['mol'], {}), '(mol)\n', (863, 868), False, 'from rdkit.Chem import AllChem, ChemicalForceFields\n'), ((877, 935), 'rdkit.Chem.ChemicalForceFields.MMFFGetMoleculeForceField', 'ChemicalForceFields.MMFFGetMoleculeForceField', (['mol', 'ffprop'], {}), '(mol, ffprop)\n', (922, 935), False, 'from rdkit.Chem import AllChem, ChemicalForceFields\n'), ((1846, 1892), 'numpy.arange', 'np.arange', (['start', '(start + step * n_steps)', 'step'], {}), '(start, start + step * n_steps, step)\n', (1855, 1892), True, 'import numpy as np\n'), ((2127, 2162), 'itertools.product', 'it.product', (['scanangles'], {'repeat': 'ndih'}), '(scanangles, repeat=ndih)\n', (2137, 2162), True, 'import itertools as it\n'), ((2842, 2861), 'json.dumps', 'json.dumps', (['results'], {}), '(results)\n', (2852, 2861), False, 'import json\n'), ((294, 347), 'os.environ.get', 'os.environ.get', (['"""EXECUTOR_CONSTR"""', '"""127.0.0.1:6379/0"""'], {}), "('EXECUTOR_CONSTR', '127.0.0.1:6379/0')\n", (308, 347), False, 'import os\n')] |
# -*- coding: utf-8 -*-
"""
Javelin Web2Py Admin Controller
"""
# metadata
__author__ = "<NAME>"
__copyright__ = "(c) 2013, Jacobson and Varni, LLC"
__date__ = "7/12/2013"
__email__ = "<EMAIL>"
__data__ = {'name' : 'jadmin', 'label' : 'Admin', 'description' : 'Only accessible to admins',
'icon' : 'briefcase', 'u-icon' : u'\uf0b1', 'color':'orange', 'required' : True}
import time
from datetime import datetime
from applications.javelin.ctr_data import ctr_enabled, get_ctr_data
from gluon.contrib import simplejson as json
from gluon.tools import Service
from gluon.storage import Storage
service = Service(globals())
DOC_TYPES = Storage(
CALLSLIP=Storage(value=0, label="Call Slips"),
ATTSHEETS=Storage(value=1, label="Attendance Sheets"),
NAMETAGS=Storage(value=2, label="Nametags")
)
@auth.requires_login()
@auth.requires_membership('admin')
def index():
"""Loads the index page for the 'Admin' controller
:returns: a dictionary to pass to the view with the list of ctr_enabled and the active module ('admin')
"""
ctr_data = get_ctr_data()
users = db().select(db.auth_user.ALL)
approvals = db(db.auth_user.registration_key=='pending').select(db.auth_user.ALL)
return dict(ctr_enabled=ctr_enabled, ctr_data=ctr_data, active_module='jadmin', users=users, approvals=approvals, doctypes=DOC_TYPES)
@auth.requires_login()
@auth.requires_membership('admin')
@service.json
def create_doc(doctype, data):
logger.debug("CREATE DOC CALLED")
import StringIO
from reportlab.platypus import SimpleDocTemplate, Paragraph, Table, TableStyle, Image, Spacer
from reportlab.platypus.flowables import PageBreak
from reportlab.lib.styles import ParagraphStyle
from reportlab.lib.enums import TA_CENTER, TA_LEFT
from reportlab.lib.pagesizes import letter, inch
from reportlab.lib import colors
io = StringIO.StringIO()
doc = SimpleDocTemplate(io, pagesize=letter,
rightMargin=0.18*inch, leftMargin=0.18*inch, topMargin=0.18*inch, bottomMargin=0)
elements = list()
doctype = int(doctype)
if data: data = json.loads(data)
if doctype == DOC_TYPES.CALLSLIP.value:
doc_title = "Call_Slips"
people = data['people']
message = data['message']
persons = list()
for p in people:
if p.startswith('group_'):
group = db(db.group_rec.group_id==p.replace('group_', '')).select(db.person.id,
join=db.group_rec.on(db.person.id==db.group_rec.person_id))
for g in group:
if g.id not in persons:
persons.append(g.id)
elif p.startswith('grade_'):
grade = db(db.person.grade==p.replace('grade_', '')).select(db.person.id)
for g in grade:
if g.id not in persons:
persons.append(g.id)
elif p == 'all_leaders':
leaders = db(db.person.leader==True).select(db.person.id)
for l in leaders:
if l.id not in persons:
persons.append(l.id)
elif p == 'all_people':
allpeople = db().select(db.person.id)
for a in allpeople:
if a.id not in persons:
persons.append(a.id)
else:
if p not in persons:
persons.append(p)
people = [Storage(id=pid, last_name=db(db.person.id==pid).select(db.person.last_name).first().last_name,
first_name=db(db.person.id==pid).select(db.person.first_name).first().first_name,
courses=['{}: {}'.format(c.period, c.room) for c in db().select(db.course.period, db.course.room,
join=db.course_rec.on((db.course.id==db.course_rec.course_id) & (db.course_rec.student_id==pid)),
orderby=db.course.period)]
) for pid in persons]
i = 0
centerStyle = ParagraphStyle(name='Center', alignment=TA_CENTER)
leftStyle = ParagraphStyle(name='Left', alignment=TA_LEFT)
tableStyle = TableStyle([('VALIGN',(0,0),(-1,-1),'TOP'),
('INNERGRID', (0,0), (-1,-1), 0.25, colors.black)])
page = list()
for person in people:
page.append([Paragraph("<para alignment='left'><br></para>" +\
"<para alignment='center'><font face='Times-Bold' size=16>Vintage Crusher Crew</font><br><br><br></para>" +\
"<para alignment='left'><font face='Times' size=14><b>Name:</b> {} {}</font><br><br></para>".format(person.first_name, person.last_name) +\
"<para alignment='left'><font face='Times' size=12><b>Rooms:</b> {}</font><br><br></para>".format(', '.join(person.courses)) +\
"<para alignment='left'><font face='Times' size=12><b>Message:</b></font><br></para>" +\
"<para alignment='left'><font face='Times' size=12>{}</font></para>".format(message), leftStyle)])
i = (i+1)%4
if i == 0:
table = Table(page, colWidths=[8*inch], rowHeights=[2.5*inch]*len(page))
table.setStyle(tableStyle)
elements.append(table)
elements.append(PageBreak())
page = list()
elif doctype == DOC_TYPES.ATTSHEETS.value:
pass
elif doctype == DOC_TYPES.NAMETAGS.value:
people = data['people']
event_name = data['event_name']
events = data['events']
present = data['present']
persons = list()
for p in people:
if p.startswith('group_'):
group = db(db.group_rec.group_id==p.replace('group_', '')).select(db.person.id,
join=db.group_rec.on(db.person.id==db.group_rec.person_id))
for g in group:
if g.id not in persons:
persons.append(g.id)
elif p.startswith('grade_'):
grade = db(db.person.grade==p.replace('grade_', '')).select(db.person.id)
for g in grade:
if g.id not in persons:
persons.append(g.id)
elif p == 'all_leaders':
leaders = db(db.person.leader==True).select(db.person.id)
for l in leaders:
if l.id not in persons:
persons.append(l.id)
elif p == 'all_people':
allpeople = db().select(db.person.id)
for a in allpeople:
if a.id not in persons:
persons.append(a.id)
else:
if p not in persons:
persons.append(p)
centerStyle = ParagraphStyle(name='Center', alignment=TA_CENTER)
leftStyle = ParagraphStyle(name='Left', alignment=TA_LEFT)
tableStyle = TableStyle([('VALIGN',(0,-1),(-1,-1),'TOP')])
label_num = 0
row_num = 0
labels = list()
for pid in persons:
row = db(db.person.id==pid).select(db.person.ALL).first()
label = list()
if label_num == 2:
table = Table([labels], colWidths=[4*inch,0.14*inch,4*inch], rowHeights=[2*inch]*(len(labels)/2))
table.setStyle(tableStyle)
elements.append(table)
label_num = 0
labels = list()
row_num += 1
if row_num == 5:
row_num = 0
elements.append(PageBreak())
header = Paragraph("<font face='Times-Bold' size=11>{} {}</font>".format(year, event_name), centerStyle)
label.append(header)
label.append(Spacer(1,11))
firstName = Paragraph("<font face='Times-Bold' size=18>{}</font>".format(row.first_name), centerStyle)
label.append(firstName)
label.append(Spacer(1, 11))
lastName = Paragraph("<font face='Times-Roman' size=11>{}</font>".format(row.last_name), centerStyle)
label.append(lastName)
label.append(Spacer(1,20))
# if row.crew.wefsk != '' or row.crew.wefsk != None or row.crew.wefsk != 'N/A':
# try:
# rooms = rotation(row.crew.wefsk.split('-')[0], row.crew.wefsk.split('-')[1])
# except:
# rooms = 'N/A'
# else:
# rooms = 'N/A'
label.append(Paragraph("<font face='Times-Roman' size=11>ID#: {}</font>".format(row.student_id), leftStyle))
label.append(Paragraph("<font face='Times-Roman' size=11>Crew #: {}</font>".format(row.crew), leftStyle))
# label.append(Paragraph("<font face='Times-Roman' size=11>Crew Room: {}</font>".format(row.crew.room), leftStyle))
# label.append(Paragraph("<font face='Times-Roman' size=11>W.E.F.S.K. Rotation: {}</font>".format(rooms), leftStyle))
labels.append(label)
if label_num == 0:
labels.append(Spacer(14, 144))
label_num += 1
doc_title = '_'.join(event_name.split())
doc.build(elements)
io.seek(0)
now = datetime.now().strftime('%Y-%m-%d')
filename = "{}_{}_{}.pdf".format(doc_title, now, int(time.time()))
file_id = db.file.insert(name=filename, file=db.file.file.store(io, filename))
db_file = db.file(file_id).file
return dict(filename=db_file)
@auth.requires_login()
@auth.requires_membership('admin')
@service.json
def update_names(names):
names = json.loads(names)
response = []
for name in names:
r = db.module_names.update_or_insert(name=name['name'], label=name['value'])
response.append(r)
errors = list()
for i in range(len(response)):
if response[i] == 0:
errors.append(names[i])
return dict(errors=errors)
@auth.requires_login()
@auth.requires_membership('admin')
@service.json
def approve_user(id):
response = db(db.auth_user.id==id).update(registration_key='')
return dict(response=response)
@auth.requires_login()
@auth.requires_membership('admin')
@service.json
def disapprove_user(id):
response = db(db.auth_user.id==id).delete()
return dict(response=response)
@auth.requires_login()
@auth.requires_membership('admin')
@service.json
def import_from_csv(csv_file):
"""Imports records into the database from a CSV file
:param file: the file to be imported
:param contains_ids: a boolean value which specifies if the records have ids; default is True
:returns: a dictionary with a response, either a 0 or 1, depending on success
"""
response = list()
lines = csv_file.rstrip().splitlines()
if len(lines) > 0:
columns = lines.pop(0).split(',')
for i in range(len(columns)):
columns[i] = '_'.join(columns[i].lower().split())
for line in lines:
record = dict()
line = line.split(',')
for i in range(len(line)):
record[columns[i]] = line[i]
record = dict((k,v) for k,v in record.items() if k in db.person.fields)
response.append(db.person.update_or_insert(db.person.id==record['id'], **record))
return dict(response=response)
@auth.requires_login()
@auth.requires_membership('admin')
@service.json
def import_from_query(csv_file, leaders):
"""Imports records into the database from a CSV file (in the form of the queries from VHS)
:param file: the file to be imported
:returns: a dictionary with a response, either a 0 or 1, depending on success
"""
import csv
import StringIO
leaders = True if leaders=="true" else False
def phone_format(n):
try:
return format(int(n[:-1]), ",").replace(",", "-") + n[-1]
except:
return None
if not leaders:
file_string = StringIO.StringIO(csv_file)
lines = list(csv.reader(file_string, skipinitialspace=True))
del file_string
del csv_file
# INSERT STUDENTS
student_ids = list()
teacher_ids = list()
course_ids = list()
columns = lines.pop(0)
while len(lines) > 0:
record = dict()
line = lines.pop(0)
student_id = line[columns.index('student_id')]
teacher_id = line[columns.index('teacher_id')]
course_id = line[columns.index('course_id')]
if student_id and student_id not in student_ids:
student_ids.append(student_id)
for i in range(len(line)):
record[columns[i]] = line[i]
record = dict((k,v) for k,v in record.items() if k in db.person.fields)
if record.get('cell_phone', None):
record['cell_phone'] = phone_format(record['cell_phone'])
if record.get('home_phone', None):
record['home_phone'] = phone_format(record['home_phone'])
db.person.update_or_insert(db.person.student_id==student_id, **record)
if teacher_id and teacher_id not in teacher_ids:
teacher_ids.append(teacher_id)
db.teacher.update_or_insert(db.teacher.teacher_id==teacher_id, **{
'teacher_id':line[columns.index('teacher_id')],
'teacher_name':line[columns.index('teacher_name')]})
if course_id and teacher_id and course_id not in course_ids:
course_ids.append(course_id)
teacher = db(db.teacher.teacher_id==teacher_id).select(db.teacher.id).first()
if teacher:
db.course.update_or_insert(db.course.course_id==course_id, **{
'course_id':line[columns.index('course_id')],
'code':line[columns.index('course_code')],
'title':line[columns.index('course_title')],
'period':line[columns.index('period')],
'room':line[columns.index('room')],
'teacher_id':teacher.id})
if course_id and student_id:
course = db(db.course.course_id==course_id).select().first()
student = db(db.person.student_id==student_id).select().first()
if course and student:
db.course_rec.update_or_insert((db.course_rec.course_id==course.id) &
(db.course_rec.student_id==student.id),
course_id=course.id,
student_id=student.id)
db.commit()
del record
del line
return dict(response=True)
else:
errors = list()
lines = list(csv.reader(StringIO.StringIO(csv_file), skipinitialspace=True))
columns = lines.pop(0)
short_tasks = {
'Team Sacrifice (Must have a car and willingness to work later than others)' : 'Team Sacrifice',
"Peer Support (Must be enrolled in Mr. Ward's Psychology or Peer Support class)" : 'Peer Support',
"Tutor/Study Buddy (Academic credits are available for this option)" : 'Tutor/Study Buddy',
"Database Manager (Must know Excel, Mail merge, and other technologies)" : 'Database Manager',
"Facebook Maintenance (You are responsible for up keeping on our page. Must be a FB addict)" : "Facebook Maintenance",
"Fundraising Team" : "Fundraising Team",
"TAs (Work with freshmen and Mr. Varni, Mr. Ward, or Mrs. Housley during the school day (Academic credits are available for this option)": "TAs",
"Posters & Propaganda" : "Posters & Propaganda",
"Public Outreach (Attend Parent Night, Back-to-School, other public events)" : 'Public Outreach',
"ASB Support (Those enrolled in 4th period Leadership class should check this option, but others are welcome as well)" : "ASB Support",
"L.O.C.s (Loyal Order of the Crushers. Attend home athletic and extracurricular events)": "L.O.C.s",
"Dirty 30 (Explain various aspects of high school culture to freshmen on Orientation Day afternoon)" : "Dirty 30",
"Set-up (Room Mapping) and Clean-up (Orientation Day only)": "Set-up and Clean-up",
"Homecoming Parade (Dress up and ride on our float! Easy!)" : "Homecoming Parade",
"Security/Safety (Helps keep freshmen in line; works with Peer Support on Orientation Day)": "Security/Safety",
"Food Prep & Clean-up (Orientation Day only)": "Food Prep & Clean-up",
"Fashion (Make costumes for House Hotties and Homecoming Parade)" : "Fashion",
'Burgundy Beauties and Golden Guns (Formerly "House Hotties")' : "Burgundy Beauties and Golden Guns",
"Audio-Visual (Responsible for music and videos during Orientation)" : "Audio-Visual",
"A-Team (Alumni only)": "A-Team"
}
task_teams = [task.name for task in db().select(db.groups.name)]
for line in lines:
record = dict()
for i in range(len(line)):
if columns[i] == 'last_name' or columns[i] == 'first_name':
line[i] = line[i].capitalize()
record[columns[i]] = line[i]
record = dict((k,v) for k,v in record.items() if k in db.person.fields)
if record.get('cell_phone', None):
record['cell_phone'] = phone_format(record['cell_phone'])
try:
person = db((db.person.last_name==record['last_name']) &
(db.person.first_name==record['first_name'])).select(db.person.ALL).first()
if person:
person_id = person.id
db(db.person.id==person_id).update(**record)
db(db.person.id==person_id).update(leader=True)
aTasks = line[columns.index('a_tasks')].split(',')
bTasks = line[columns.index('b_tasks')].split(',')
cTasks = line[columns.index('c_tasks')].split(',')
tasks_to_add = list()
for task in aTasks:
if task not in task_teams and task in short_tasks.values():
task_id = db.groups.insert(name=task)
tasks_to_add.append(task_id)
task_teams.append(task)
elif task in task_teams and task in short_tasks.values():
task_row = db(db.groups.name==task).select().first()
if task_row:
task_id = task_row.id
tasks_to_add.append(task_id)
for task in bTasks:
if task not in task_teams and task in short_tasks.values():
task_id = db.groups.insert(name=task)
tasks_to_add.append(task_id)
task_teams.append(task)
elif task in task_teams and task in short_tasks.values():
task_row = db(db.groups.name==task).select().first()
if task_row:
task_id = task_row.id
tasks_to_add.append(task_id)
for task in cTasks:
if task not in task_teams and task in short_tasks.values():
task_id = db.groups.insert(name=task)
tasks_to_add.append(task_id)
task_teams.append(task)
elif task in task_teams and task in short_tasks.values():
task_row = db(db.groups.name==task).select().first()
if task_row:
task_id = task_row.id
tasks_to_add.append(task_id)
for task in tasks_to_add:
if not db((db.group_rec.group_id==task_id) & (db.group_rec.person_id==person_id)).select().first():
db.group_rec.insert(group_id=task_id, person_id=person_id)
except:
errors.append(record['last_name'] + ", " + record['first_name'])
return dict(errors=errors)
@auth.requires_login()
@auth.requires_membership('admin')
@service.json
def get_person_group_data(query=None):
if query:
qlist = query.split()
query = query.lower()
students = db(((db.person.last_name.contains(qlist, all=True)) |
(db.person.first_name.contains(qlist, all=True))) ).select(
db.person.id, db.person.last_name, db.person.first_name,
orderby=db.person.last_name|db.person.first_name).as_list()
allfields = [{'text': 'All', 'children':[d for d in [{'id':'all_people', 'last_name':'All Students', 'first_name' : ''},
{'id':'all_leaders', 'last_name':'All Leaders', 'first_name' : ''}] if query in d['last_name'].lower()]}]
allfields = [] if not allfields[0]['children'] else allfields
gradefields = [{'text': 'By Grade', 'children':[d for d in [{'id':'grade_9', 'last_name': 'Freshmen', 'first_name': ''},
{'id':'grade_10', 'last_name': 'Sophomores', 'first_name': ''},
{'id':'grade_11', 'last_name': 'Juniors', 'first_name': ''},
{'id':'grade_12', 'last_name': 'Seniors', 'first_name': ''}] if query in d['last_name'].lower()]}]
gradefields = [] if not gradefields[0]['children'] else gradefields
taskteams = [{'text': 'Task Teams', 'children': [{'id':'group_' + str(g.id),
'last_name': g.name,
'first_name':''}
for g in db(db.groups.name.contains(qlist)).select(db.groups.ALL, orderby=db.groups.name)]}]
taskteams = [] if not taskteams[0]['children'] else taskteams
students = [] if not students else [{'text': 'Students', 'children':students}]
people = allfields +\
gradefields +\
taskteams +\
students
else:
students = db().select(db.person.id, db.person.last_name, db.person.first_name,
orderby=db.person.last_name|db.person.first_name).as_list()
people = [{'text': 'All', 'children':[{'id':'all_people', 'last_name':'All Students', 'first_name' : ''},
{'id':'all_leaders', 'last_name':'All Leaders', 'first_name' : ''}]}] +\
[{'text': 'By Grade', 'children':[{'id':'grade_9', 'last_name': 'Freshmen', 'first_name': ''},
{'id':'grade_10', 'last_name': 'Sophomores', 'first_name': ''},
{'id':'grade_11', 'last_name': 'Juniors', 'first_name': ''},
{'id':'grade_12', 'last_name': 'Seniors', 'first_name': ''} ]}] +\
[{'text': 'Task Teams', 'children': [{'id':'group_' + str(g.id),
'last_name': g.name,
'first_name':''}
for g in db().select(db.groups.ALL, orderby=db.groups.name)]}] +\
[{'text': 'Students', 'children':students}]
return people
@auth.requires_login()
@auth.requires_membership('admin')
def call():
"""Call function used when calling a function from an HTTP request"""
return service() | [
"StringIO.StringIO",
"reportlab.platypus.TableStyle",
"reportlab.platypus.flowables.PageBreak",
"gluon.contrib.simplejson.loads",
"reportlab.lib.styles.ParagraphStyle",
"reportlab.platypus.Spacer",
"datetime.datetime.now",
"csv.reader",
"time.time",
"reportlab.platypus.SimpleDocTemplate",
"appli... | [((1045, 1059), 'applications.javelin.ctr_data.get_ctr_data', 'get_ctr_data', ([], {}), '()\n', (1057, 1059), False, 'from applications.javelin.ctr_data import ctr_enabled, get_ctr_data\n'), ((1815, 1834), 'StringIO.StringIO', 'StringIO.StringIO', ([], {}), '()\n', (1832, 1834), False, 'import StringIO\n'), ((1843, 1974), 'reportlab.platypus.SimpleDocTemplate', 'SimpleDocTemplate', (['io'], {'pagesize': 'letter', 'rightMargin': '(0.18 * inch)', 'leftMargin': '(0.18 * inch)', 'topMargin': '(0.18 * inch)', 'bottomMargin': '(0)'}), '(io, pagesize=letter, rightMargin=0.18 * inch, leftMargin=\n 0.18 * inch, topMargin=0.18 * inch, bottomMargin=0)\n', (1860, 1974), False, 'from reportlab.platypus import SimpleDocTemplate, Paragraph, Table, TableStyle, Image, Spacer\n'), ((8138, 8155), 'gluon.contrib.simplejson.loads', 'json.loads', (['names'], {}), '(names)\n', (8148, 8155), True, 'from gluon.contrib import simplejson as json\n'), ((657, 693), 'gluon.storage.Storage', 'Storage', ([], {'value': '(0)', 'label': '"""Call Slips"""'}), "(value=0, label='Call Slips')\n", (664, 693), False, 'from gluon.storage import Storage\n'), ((706, 749), 'gluon.storage.Storage', 'Storage', ([], {'value': '(1)', 'label': '"""Attendance Sheets"""'}), "(value=1, label='Attendance Sheets')\n", (713, 749), False, 'from gluon.storage import Storage\n'), ((761, 795), 'gluon.storage.Storage', 'Storage', ([], {'value': '(2)', 'label': '"""Nametags"""'}), "(value=2, label='Nametags')\n", (768, 795), False, 'from gluon.storage import Storage\n'), ((2030, 2046), 'gluon.contrib.simplejson.loads', 'json.loads', (['data'], {}), '(data)\n', (2040, 2046), True, 'from gluon.contrib import simplejson as json\n'), ((3515, 3565), 'reportlab.lib.styles.ParagraphStyle', 'ParagraphStyle', ([], {'name': '"""Center"""', 'alignment': 'TA_CENTER'}), "(name='Center', alignment=TA_CENTER)\n", (3529, 3565), False, 'from reportlab.lib.styles import ParagraphStyle\n'), ((3580, 3626), 'reportlab.lib.styles.ParagraphStyle', 'ParagraphStyle', ([], {'name': '"""Left"""', 'alignment': 'TA_LEFT'}), "(name='Left', alignment=TA_LEFT)\n", (3594, 3626), False, 'from reportlab.lib.styles import ParagraphStyle\n'), ((3642, 3748), 'reportlab.platypus.TableStyle', 'TableStyle', (["[('VALIGN', (0, 0), (-1, -1), 'TOP'), ('INNERGRID', (0, 0), (-1, -1), 0.25,\n colors.black)]"], {}), "([('VALIGN', (0, 0), (-1, -1), 'TOP'), ('INNERGRID', (0, 0), (-1,\n -1), 0.25, colors.black)])\n", (3652, 3748), False, 'from reportlab.platypus import SimpleDocTemplate, Paragraph, Table, TableStyle, Image, Spacer\n'), ((10246, 10273), 'StringIO.StringIO', 'StringIO.StringIO', (['csv_file'], {}), '(csv_file)\n', (10263, 10273), False, 'import StringIO\n'), ((7779, 7793), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7791, 7793), False, 'from datetime import datetime\n'), ((7870, 7881), 'time.time', 'time.time', ([], {}), '()\n', (7879, 7881), False, 'import time\n'), ((10289, 10335), 'csv.reader', 'csv.reader', (['file_string'], {'skipinitialspace': '(True)'}), '(file_string, skipinitialspace=True)\n', (10299, 10335), False, 'import csv\n'), ((5756, 5806), 'reportlab.lib.styles.ParagraphStyle', 'ParagraphStyle', ([], {'name': '"""Center"""', 'alignment': 'TA_CENTER'}), "(name='Center', alignment=TA_CENTER)\n", (5770, 5806), False, 'from reportlab.lib.styles import ParagraphStyle\n'), ((5821, 5867), 'reportlab.lib.styles.ParagraphStyle', 'ParagraphStyle', ([], {'name': '"""Left"""', 'alignment': 'TA_LEFT'}), "(name='Left', alignment=TA_LEFT)\n", (5835, 5867), False, 'from reportlab.lib.styles import ParagraphStyle\n'), ((5883, 5933), 'reportlab.platypus.TableStyle', 'TableStyle', (["[('VALIGN', (0, -1), (-1, -1), 'TOP')]"], {}), "([('VALIGN', (0, -1), (-1, -1), 'TOP')])\n", (5893, 5933), False, 'from reportlab.platypus import SimpleDocTemplate, Paragraph, Table, TableStyle, Image, Spacer\n'), ((12519, 12546), 'StringIO.StringIO', 'StringIO.StringIO', (['csv_file'], {}), '(csv_file)\n', (12536, 12546), False, 'import StringIO\n'), ((4643, 4654), 'reportlab.platypus.flowables.PageBreak', 'PageBreak', ([], {}), '()\n', (4652, 4654), False, 'from reportlab.platypus.flowables import PageBreak\n'), ((6541, 6554), 'reportlab.platypus.Spacer', 'Spacer', (['(1)', '(11)'], {}), '(1, 11)\n', (6547, 6554), False, 'from reportlab.platypus import SimpleDocTemplate, Paragraph, Table, TableStyle, Image, Spacer\n'), ((6705, 6718), 'reportlab.platypus.Spacer', 'Spacer', (['(1)', '(11)'], {}), '(1, 11)\n', (6711, 6718), False, 'from reportlab.platypus import SimpleDocTemplate, Paragraph, Table, TableStyle, Image, Spacer\n'), ((6868, 6881), 'reportlab.platypus.Spacer', 'Spacer', (['(1)', '(20)'], {}), '(1, 20)\n', (6874, 6881), False, 'from reportlab.platypus import SimpleDocTemplate, Paragraph, Table, TableStyle, Image, Spacer\n'), ((7656, 7671), 'reportlab.platypus.Spacer', 'Spacer', (['(14)', '(144)'], {}), '(14, 144)\n', (7662, 7671), False, 'from reportlab.platypus import SimpleDocTemplate, Paragraph, Table, TableStyle, Image, Spacer\n'), ((6378, 6389), 'reportlab.platypus.flowables.PageBreak', 'PageBreak', ([], {}), '()\n', (6387, 6389), False, 'from reportlab.platypus.flowables import PageBreak\n')] |
import unittest
import sys
#sys.path.append('../')
from models.Individual import Individual
from models.Family import Family
from models.Gedcom import Gedcom
class TestSprint1(unittest.TestCase):
def setUp(self):
SUPPORT_TAGS = {"INDI", "NAME", "SEX", "BIRT", "DEAT", "FAMC", "FAMS", "FAM", "MARR", "HUSB", "WIFE", "CHIL",
"DIV", "DATE", "HEAD", "TRLR", "NOTE"}
self.G1 = Gedcom('../testing_files/right.ged', SUPPORT_TAGS)
self.G2 = Gedcom('../testing_files/wrong.ged', SUPPORT_TAGS)
self.ind_1 = Individual("01")
self.ind_2 = Individual("02")
self.ind_3 = Individual("03")
self.fam_1 = Family("01")
self.fam_2 = Family("02")
def tearDown(self):
self.ind_1 = Individual("01")
self.ind_2 = Individual("02")
self.ind_3 = Individual("03")
self.fam_1 = Family("01")
self.fam_2 = Family("02")
def test_US11_no_bigamy(self):
self.ind_1.set_birthDate(["09", "APR", "1997"])
self.ind_2.set_birthDate(["19", "DEC", "1997"])
self.ind_1.add_to_family(self.fam_1)
self.fam_1.set_marriedDate(["01", "JUN", "2017"])
self.assertTrue(self.ind_1.no_bigamy())
self.fam_2.set_marriedDate(["05", "JUN", "2016"])
self.ind_1.add_to_family(self.fam_2)
self.assertFalse(self.ind_1.no_bigamy())
self.fam_2.set_divorcedDate(("01", "JAN", "2017"))
self.assertTrue(self.ind_1.no_bigamy())
self.fam_2.set_divorcedDate(("01", "AUG", "2017"))
self.assertFalse(self.ind_1.no_bigamy())
self.fam_1.set_divorcedDate(("01", "DEC", "2018"))
self.assertFalse(self.ind_1.no_bigamy())
self.fam_2.set_divorcedDate(("01", "JAN", "2017"))
self.assertTrue(self.ind_1.no_bigamy())
def test_US02_birth_before_marriage(self):
self.ind_1.set_birthDate(["09", "APR", "1997"])
self.ind_1.add_to_family(self.fam_1)
self.fam_1.set_marriedDate(["01", "JUN", "2018"])
self.assertTrue(self.ind_1.birth_before_marriage())
def test_US03_birth_before_death(self):
self.ind_1.set_birthDate(["09", "APR", "1997"])
self.ind_1.set_deathDate(["01", "JUN", "2018"])
self.assertTrue(self.ind_1.birth_before_death())
def test_US07_less_then_150_years_old(self):
self.ind_1.set_birthDate(["09", "APR", "1997"])
self.assertTrue(self.ind_1.less_then_150_years_old())
self.ind_2.set_birthDate(["09", "APR", "997"])
self.assertFalse(self.ind_2.less_then_150_years_old())
def test_US04_marriage_before_divorce(self):
t1 = Family("F01")
male1 = Individual("P01")
female1 = Individual("P02")
male1.set_deathDate(['8', 'SEP', '2010'])
female1.set_deathDate(['8', 'SEP', '2011'])
t1.set_husband(male1)
t1.set_wife(female1)
t1.set_marriedDate(['8', 'SEP', '2000'])
t1.set_divorcedDate(['8', 'SEP', '2009'])
# ---------------------------------
t2 = Family("F02")
male2 = Individual("P03")
female2 = Individual("P04")
male2.set_deathDate(['8', 'SEP', '2012'])
female2.set_deathDate(['8', 'SEP', '2013'])
t2.set_husband(male2)
t2.set_wife(female2)
t2.set_marriedDate(['8', 'SEP', '2005'])
t2.set_divorcedDate(['8', 'SEP', '2004'])
# ---------------------------------
assert t1.marriage_before_divorce() == True
assert t2.marriage_before_divorce() == False
def test_US05_marriage_before_death(self):
t1 = Family("F01")
male1 = Individual("P01")
female1 = Individual("P02")
male1.set_deathDate(['8', 'SEP', '2010'])
female1.set_deathDate(['8', 'SEP', '2011'])
t1.set_husband(male1)
t1.set_wife(female1)
t1.set_marriedDate(['8', 'SEP', '2000'])
# ---------------------------------
t2 = Family("F02")
male2 = Individual("P03")
female2 = Individual("P04")
male2.set_deathDate(['8', 'SEP', '1999'])
female2.set_deathDate(['9', 'SEP', '2011'])
t2.set_husband(male2)
t2.set_wife(female2)
t2.set_marriedDate(['8', 'SEP', '2000'])
# ---------------------------------
t3 = Family("F03")
male3 = Individual("P05")
female3 = Individual("P06")
male3.set_deathDate(['8', 'SEP', '2003'])
female3.set_deathDate(['9', 'SEP', '1998'])
t3.set_husband(male3)
t3.set_wife(female3)
t3.set_marriedDate(['8', 'SEP', '2000'])
# ---------------------------------
t4 = Family("F04")
male4 = Individual("P07")
female4 = Individual("P08")
male4.set_deathDate(['8', 'SEP', '1998'])
female4.set_deathDate(['9', 'SEP', '1999'])
t4.set_husband(male4)
t4.set_wife(female4)
t4.set_marriedDate(['8', 'SEP', '2000'])
# ---------------------------------
t5 = Family("F05")
male5 = Individual("P09")
female5 = Individual("P10")
male5.set_deathDate(['8', 'SEP', '2009'])
female5.set_deathDate(['8', 'SEP', '2009'])
t5.set_husband(male5)
t5.set_wife(female5)
t5.set_marriedDate(['8', 'SEP', '2009'])
# ---------------------------------
assert t1.marriage_before_death() == True
assert t2.marriage_before_death() == False
assert t3.marriage_before_death() == False
assert t4.marriage_before_death() == False
assert t5.marriage_before_death() == True
def test_US06_divorse_before_death(self):
t1 = Family("F01")
male1 = Individual("P01")
female1 = Individual("P02")
male1.set_deathDate(["5", "MAR", "2000"])
female1.set_deathDate(["9", "APR", "2002"])
t1.set_husband(male1)
t1.set_wife(female1)
t1.set_divorcedDate(["1", "JAN", "1999"])
# ---------------------------------
t2 = Family("F02")
male2 = Individual("P03")
female2 = Individual("P04")
male2.set_deathDate(["5", "MAR", "2000"])
female2.set_deathDate(["9", "APR", "2002"])
t2.set_husband(male2)
t2.set_wife(female2)
t2.set_divorcedDate(["1", "JAN", "2003"])
# ---------------------------------
assert t1.divorce_before_death() == True
assert t2.divorce_before_death() == False
def test_US08_birth_before_marriage_of_parents(self):
t1 = Family("F01")
male1 = Individual("P01")
female1 = Individual("P02")
child1 = Individual("P03")
t1.add_child(child1)
t1.set_marriedDate(['8', 'SEP', '2000'])
child1.set_birthDate(["6", "JAN", "1998"])
t1.set_husband(male1)
t1.set_wife(female1)
# ---------------------------------
t2 = Family("F02")
male2 = Individual("P04")
female2 = Individual("P05")
child2 = Individual("P06")
t2.add_child(child2)
t2.set_marriedDate(['8', 'SEP', '2000'])
child2.set_birthDate(["6", "JAN", "2001"])
t2.set_husband(male2)
t2.set_wife(female2)
# ---------------------------------
t3 = Family("F03")
male3 = Individual("P07")
female3 = Individual("P08")
child3 = Individual("P09")
t3.add_child(child3)
t3.set_marriedDate(['6', 'MAR', '2000'])
child3.set_birthDate(["6", "MAR", "2000"])
t3.set_husband(male3)
t3.set_wife(female3)
# ---------------------------------
assert t1.birth_before_marriage_of_parents() == False
assert t2.birth_before_marriage_of_parents() == True
assert t3.birth_before_marriage_of_parents() == False
def test_US09_birth_before_death_of_parent(self):
t1 = Family("F01")
male1 = Individual("P01")
female1 = Individual("P02")
child1 = Individual("P03")
t1.add_child(child1)
male1.set_deathDate(["5", "MAR", "2000"])
female1.set_deathDate(["9", "APR", "2002"])
child1.set_birthDate(["6", "JAN", "1998"])
t1.set_husband(male1)
t1.set_wife(female1)
# ---------------------------------
t2 = Family("F02")
male2 = Individual("P04")
female2 = Individual("P05")
child2 = Individual("P06")
t2.add_child(child2)
male2.set_deathDate(["5", "MAR", "2000"])
female2.set_deathDate(["9", "APR", "2002"])
child2.set_birthDate(["6", "JAN", "2001"])
t2.set_husband(male2)
t2.set_wife(female2)
# ---------------------------------
t3 = Family("F03")
male3 = Individual("P07")
female3 = Individual("P08")
child3 = Individual("P09")
t3.add_child(child3)
male3.set_deathDate(["5", "MAR", "2000"])
female3.set_deathDate(["9", "APR", "2002"])
child3.set_birthDate(["6", "MAR", "2000"])
t3.set_husband(male3)
t3.set_wife(female3)
# ---------------------------------
assert t1.birth_before_death_of_parents() == True
assert t2.birth_before_death_of_parents() == False
assert t3.birth_before_death_of_parents() == True
def test_US10_marriage_after_14(self):
t1 = Family("F01")
male1 = Individual("P01")
female1 = Individual("P02")
male1.set_birthDate(['8', 'SEP', '2000'])
female1.set_birthDate(['8', 'SEP', '2000'])
t1.set_husband(male1)
t1.set_wife(female1)
t1.set_marriedDate(['8', 'SEP', '2014'])
# --------------------------------------------------
t2 = Family("F02")
male2 = Individual("P03")
female2 = Individual("P04")
male2.set_birthDate(['7', 'SEP', '2000'])
female2.set_birthDate(['8', 'SEP', '2000'])
t2.set_husband(male2)
t2.set_wife(female2)
t2.set_marriedDate(['8', 'SEP', '2014'])
# --------------------------------------------------
t3 = Family("F03")
male3 = Individual("P05")
female3 = Individual("P06")
male3.set_birthDate(['8', 'SEP', '2000'])
female3.set_birthDate(['7', 'SEP', '2000'])
t3.set_husband(male3)
t3.set_wife(female3)
t3.set_marriedDate(['8', 'SEP', '2014'])
# --------------------------------------------------
t4 = Family("F04")
male4 = Individual("P07")
female4 = Individual("P08")
male4.set_birthDate(['1', 'SEP', '1990'])
female4.set_birthDate(['2', 'SEP', '1990'])
t4.set_husband(male4)
t4.set_wife(female4)
t4.set_marriedDate(['8', 'SEP', '2014'])
# --------------------------------------------------
t5 = Family("F05")
male5 = Individual("P09")
female5 = Individual("P10")
male5.set_birthDate(['09', 'APR', '1997'])
female5.set_birthDate(['19', 'DEC', '1997'])
t5.set_husband(male5)
t5.set_wife(female5)
t5.set_marriedDate(['1', 'JUN', '2007'])
# --------------------------------------------------
assert t1.marriage_after_14() == False
assert t2.marriage_after_14() == False
assert t3.marriage_after_14() == False
assert t4.marriage_after_14() == True
assert t5.marriage_after_14() == False
def test_US13_siblings_spacing(self):
t1 = Family("t1")
t2 = Family("t2")
t3 = Family("t3")
t4 = Family("t4")
t5 = Family("t5")
t6 = Family("t6")
p1 = Individual("p1")
p1.set_birthDate(("1", "JAN", "1990"))
p2 = Individual("p2")
p2.set_birthDate(("1", "JAN", "1990"))
p3 = Individual("p3")
p3.set_birthDate(("1", "SEP", "1990"))
p4 = Individual("p4")
p4.set_birthDate(("2", "JAN", "1990"))
p5 = Individual("p5")
p5.set_birthDate(("3", "JAN", "1990"))
p6 = Individual("p6")
p6.set_birthDate(("30", "MAY", "1990"))
# --------------------------------------------------
t1.add_child(p1)
t1.add_child(p2)
t2.add_child(p1)
t2.add_child(p3)
t3.add_child(p1)
t3.add_child(p4)
t4.add_child(p1)
t4.add_child(p5)
t5.add_child(p1)
t5.add_child(p6)
t6.add_child(p1)
t6.add_child(p3)
t6.add_child(p6)
# --------------------------------------------------
assert t1.siblings_spacing() == True
assert t2.siblings_spacing() == True
assert t3.siblings_spacing() == True
assert t4.siblings_spacing() == False
assert t5.siblings_spacing() == False
assert t6.siblings_spacing() == False
def test_US14_multiple_births_lessOrEqual_than_5(self):
t1 = Family("t1")
t2 = Family("t2")
t3 = Family("t3")
p1 = Individual("p1")
p1.set_birthDate(("1", "JAN", "1990"))
p2 = Individual("p2")
p2.set_birthDate(("1", "JAN", "1990"))
p3 = Individual("p3")
p3.set_birthDate(("1", "JAN", "1990"))
p4 = Individual("p4")
p4.set_birthDate(("3", "JAN", "1990"))
p5 = Individual("p5")
p5.set_birthDate(("2", "JAN", "1990"))
p6 = Individual("p6")
p6.set_birthDate(("30", "MAY", "1990"))
p7 = Individual("p7")
p7.set_birthDate(("2", "JAN", "1990"))
p8 = Individual("p8")
p8.set_birthDate(("2", "JAN", "1990"))
p9 = Individual("p9")
p9.set_birthDate(("2", "SEP", "1990"))
p10 = Individual("p10")
p10.set_birthDate(("2", "SEP", "1990"))
p11 = Individual("p11")
p11.set_birthDate(("3", "SEP", "1990"))
p12 = Individual("p12")
p12.set_birthDate(("3", "SEP", "1990"))
p13 = Individual("p13")
p13.set_birthDate(("3", "SEP", "1990"))
# -------------------------------
t1.add_child(p1)
t1.add_child(p2)
t1.add_child(p3)
t1.add_child(p4)
t1.add_child(p5)
t1.add_child(p6)
t1.add_child(p7)
t1.add_child(p8)
t2.add_child(p1)
t2.add_child(p2)
t2.add_child(p3)
t2.add_child(p4)
t2.add_child(p5)
t2.add_child(p6)
t3.add_child(p3)
t3.add_child(p4)
t3.add_child(p5)
t3.add_child(p6)
t3.add_child(p7)
t3.add_child(p8)
t3.add_child(p9)
t3.add_child(p10)
t3.add_child(p11)
t3.add_child(p12)
t3.add_child(p13)
# ---------------------------------
assert t1.multiple_births_lessOrEqual_than_5() == False
assert t2.multiple_births_lessOrEqual_than_5() == True
assert t3.multiple_births_lessOrEqual_than_5() == False
def test_US12_parents_not_too_old(self):
t1 = Family("t1")
t2 = Family("t2")
p1 = Individual("p1")
p2 = Individual("p2")
p3 = Individual("p3")
p4 = Individual("p4")
p5 = Individual("p5")
p6 = Individual("p6")
t1.set_wife(p1)
t1.set_husband(p2)
t1.add_child(p3)
t2.set_wife(p4)
t2.set_husband(p5)
t2.add_child(p6)
p1.set_birthDate(["1", "JAN", "1990"])
p2.set_birthDate(["1", "JAN", "1990"])
p4.set_birthDate(["1", "JAN", "1790"])
p5.set_birthDate(["1", "JAN", "1790"])
p3.set_birthDate(["1", "JAN", "2010"])
p6.set_birthDate(["1", "JAN", "2000"])
# ---------------------------------
assert t1.parents_not_too_old() == True
assert t2.parents_not_too_old() == False
def test_US15_Fewer_than_15_siblings(self):
t1 = Family("t1")
t2 = Family("t2")
p1 = Individual("p1")
p2 = Individual("p2")
p3 = Individual("p3")
p4 = Individual("p4")
p5 = Individual("p5")
p6 = Individual("p6")
p7 = Individual("p7")
p8 = Individual("p8")
p9 = Individual("p9")
p10 = Individual("p10")
p11 = Individual("p11")
p12 = Individual("p12")
p13 = Individual("p13")
p14 = Individual("p14")
p15= Individual("p15")
p16= Individual("p16")
p17= Individual("p17")
p18= Individual("p18")
p19= Individual("p19")
p20= Individual("p20")
p21= Individual("p21")
p22 = Individual("p22")
p23 = Individual("p23")
p24 = Individual("p24")
p25= Individual("p25")
p26 = Individual("p26")
p27 = Individual("p27")
p28 = Individual("p28")
# ---------------------------------
t1.add_child(p1)
t1.add_child(p2)
t1.add_child(p3)
t1.add_child(p4)
t1.add_child(p5)
t1.add_child(p6)
t1.add_child(p7)
t1.add_child(p8)
t1.add_child(p9)
t1.add_child(p10)
t1.add_child(p11)
t1.add_child(p12)
t2.add_child(p13)
t2.add_child(p14)
t2.add_child(p15)
t2.add_child(p16)
t2.add_child(p17)
t2.add_child(p18)
t2.add_child(p19)
t2.add_child(p20)
t2.add_child(p21)
t2.add_child(p22)
t2.add_child(p23)
t2.add_child(p24)
t2.add_child(p25)
t2.add_child(p26)
t2.add_child(p27)
t2.add_child(p28)
# ---------------------------------
assert t1.fewer_than_15_siblings() == True
assert t2.fewer_than_15_siblings() == False
def test_US21_Correct_Gender_For_Role(self):
t1 = Family("t1")
t2 = Family("t2")
p1 = Individual("p1")
p2 = Individual("p2")
p3 = Individual("p3")
p4 = Individual("p4")
# ---------------------------------
t1.set_wife(p1)
t1.set_husband(p2)
t2.set_wife(p3)
t2.set_husband(p4)
p1.set_gender('F')
p2.set_gender('M')
p4.set_gender('F')
p3.set_gender('M')
# ---------------------------------
assert t1.correct_gender_for_role() == True
assert t2.correct_gender_for_role() == False
def test_US24_Unique_families_by_spouses(self):
SUPPORT_TAGS = {"INDI", "NAME", "SEX", "BIRT", "DEAT", "FAMC", "FAMS", "FAM", "MARR", "HUSB", "WIFE", "CHIL",
"DIV", "DATE", "HEAD", "TRLR", "NOTE"}
self.G1 = Gedcom('testing_files/Jiashu_Wang.ged', SUPPORT_TAGS)
G2 = Gedcom('testing_files/MichealFahimGEDCOM.ged', SUPPORT_TAGS)
G3 = Gedcom('testing_files/mock-family.ged', SUPPORT_TAGS)
# ---------------------------------
assert self.G1.unique_families_by_spouses() == True
assert G2.unique_families_by_spouses() == True
assert G3.unique_families_by_spouses() == True
def test_US25_Unique_first_names_in_families(self):
SUPPORT_TAGS = {"INDI", "NAME", "SEX", "BIRT", "DEAT", "FAMC", "FAMS", "FAM", "MARR", "HUSB", "WIFE", "CHIL",
"DIV", "DATE", "HEAD", "TRLR", "NOTE"}
self.G1 = Gedcom('testing_files/Jiashu_Wang.ged', SUPPORT_TAGS)
G2 = Gedcom('testing_files/MichealFahimGEDCOM.ged', SUPPORT_TAGS)
G3 = Gedcom('testing_files/mock-family.ged', SUPPORT_TAGS)
# ---------------------------------
assert self.G1.unique_first_names_in_families() == True
assert G2.unique_first_names_in_families() == True
assert G3.unique_first_names_in_families() == True
def test_US22_UniqueId(self):
pass
# finished in main funciton
def test_US23_unique_name_and_birth_date(self):
SUPPORT_TAGS = {"INDI", "NAME", "SEX", "BIRT", "DEAT", "FAMC", "FAMS", "FAM", "MARR", "HUSB", "WIFE", "CHIL",
"DIV", "DATE", "HEAD", "TRLR", "NOTE"}
self.G1 = Gedcom('testing_files/Jiashu_Wang.ged', SUPPORT_TAGS)
G2 = Gedcom('testing_files/MichealFahimGEDCOM.ged', SUPPORT_TAGS)
G3 = Gedcom('testing_files/mock-family.ged', SUPPORT_TAGS)
# --------------------------------------------------
assert self.G1.unique_name_and_birth_date() == True
assert G2.unique_name_and_birth_date() == True
assert G3.unique_name_and_birth_date() == True
def test_US18_Siblings_should_not_marry(self):
t1 = Family("t1")
t2 = Family("t2")
t3 = Family("t3")
t4 = Family("t4")
t5 = Family("t5")
t6 = Family("t6")
p1 = Individual("p1")
p2 = Individual("p2")
p3 = Individual("p3")
p4 = Individual("p4")
# --------------------------------------------------
t1.set_husband(p1)
t1.set_wife(p2)
t4.set_husband(p3)
t4.set_wife(p4)
'''
t2.add_child(p1)
t3.add_child(p2)
t4.set_husband(p3)
t4.set_wife(p4)
t5.add_child(p3)
t5.add_child(p4)
'''
# --------------------------------------------------
p1.set_parentFamily(t2)
p2.set_parentFamily(t3)
p3.set_parentFamily(t5)
p4.set_parentFamily(t5)
# --------------------------------------------------
assert t1.siblings_should_not_marry() == True
#assert t2.siblings_should_not_marry() == True
assert t4.siblings_should_not_marry() == False
#assert t5.siblings_should_not_marry() == False
def test_US19_First_cousins_should_not_marry(self):
t1 = Family("t1")
t2 = Family("t2")
t3 = Family("t3")
t4 = Family("t4")
t5 = Family("t5")
t6 = Family("t6")
t7 = Family("t7")
t8 = Family("t8")
t9 = Family("t9")
p1 = Individual("p1")
p2 = Individual("p2")
p3 = Individual("p3")
p4 = Individual("p4")
p5 = Individual("p5")
p6 = Individual("p6")
p7 = Individual("p7")
p8 = Individual("p8")
# --------------------------------------------------
'''
t1.add_child(p1)
t1.add_child(p2)
t2.set_wife(p1)
t2.add_child(p3)
t3.set_wife(p2)
t3.add_child(p4)
t4.set_husband(p3)
t5.set_wife(p4)
t6.add_child(p5)
t6.add_child(p6)
t7.set_wife(p5)
t8.set_wife(p6)
t7.add_child(p7)
t8.add_child(p8)
t9.set_wife(p7)
t9.set_husband(p8)
'''
# --------------------------------------------------
p3.set_parentFamily(t1)
t1.set_husband(p8)
t1.set_wife(p7)
p8.set_parentFamily(t2)
p7.set_parentFamily(t3)
t2.add_child(p8)
t3.add_child(p7)
assert p3.first_cousins_should_not_marry()==True
#assert p4.first_cousins_should_not_marry()==True
def test_US16_Male_last_names(self):
t1 = Family("t1")
t2 = Family("t2")
t3 = Family("t3")
t4 = Family("t4")
t5 = Family("t5")
t6 = Family("t6")
t7 = Family("t7")
t8 = Family("t8")
t9 = Family("t9")
t10 = Family("t10")
p1 = Individual("p1")
p2 = Individual("p2")
p3 = Individual("p3")
p4 = Individual("p4")
p5 = Individual("p5")
p6 = Individual("p6")
p7 = Individual("p7")
p8 = Individual("p8")
p9 = Individual("p9")
p10 = Individual("p10")
# --------------------------------------------------
t1.set_husband(p1)
t1.add_child(p2)
t1.add_child(p3)
t2.set_husband(p2)
t3.set_husband(p3)
t2.add_child(p4)
t3.add_child(p5)
t4.set_husband(p4)
t5.set_husband(p5)
t6.set_husband(p6)
t6.add_child(p7)
t6.add_child(p8)
t7.set_husband(p7)
t8.set_husband(p8)
t7.add_child(p9)
t8.add_child(p10)
t9.set_husband(p9)
t10.set_husband(p10)
p1.set_gender("M")
p1.set_name("<NAME>")
p2.set_gender("M")
p2.set_name("<NAME>")
p3.set_gender("M")
p3.set_name("<NAME>")
p4.set_gender("M")
p4.set_name("<NAME>")
p5.set_gender("M")
p5.set_name("<NAME>")
p6.set_gender("M")
p6.set_name("<NAME>")
p7.set_gender("M")
p7.set_name("<NAME>")
p8.set_gender("M")
p8.set_name("<NAME>")
p9.set_gender("M")
p9.set_name("<NAME>")
p10.set_gender("M")
p10.set_name("<NAME>")
# --------------------------------------------------
assert t3.male_last_names()==True
assert t8.male_last_names()==False
def test_US17_No_marriages_to_descendants(self):
t1 = Family("t1")
t2 = Family("t2")
t3 = Family("t3")
t4 = Family("t4")
p1 = Individual("p1")
p2 = Individual("p2")
p3 = Individual("p3")
p4 = Individual("p4")
p5 = Individual("p5")
p6 = Individual("p6")
p7 = Individual("p7")
p8 = Individual("p8")
p9 = Individual("p9")
t1.set_husband(p1)
t1.set_wife(p2)
t1.add_child(p3)
t2.set_wife(p3)
t2.set_husband(p4)
t2.add_child(p5)
t3.set_husband(p6)
t3.set_wife(p7)
t3.add_child(p8)
t4.set_husband(p6)
t4.set_wife(p8)
t4.add_child(p9)
# --------------------------------------------------
#assert p3.no_marriages_to_descendants()==True
#assert p6.no_marriages_to_descendants()==False
#assert p8.no_marriages_to_descendants()==True
def test_US27_eInclude_individual_ags(self):
SUPPORT_TAGS = {"INDI", "NAME", "SEX", "BIRT", "DEAT", "FAMC", "FAMS", "FAM", "MARR", "HUSB", "WIFE", "CHIL",
"DIV", "DATE", "HEAD", "TRLR", "NOTE"}
self.G1 = Gedcom('testing_files/Jiashu_Wang.ged', SUPPORT_TAGS)
G2 = Gedcom('testing_files/MichealFahimGEDCOM.ged', SUPPORT_TAGS)
G3 = Gedcom('testing_files/mock-family.ged', SUPPORT_TAGS)
# --------------------------------------------------
'''
assert self.G1.include_individual_ages() == True
assert G2.include_individual_ages() == True
assert G3.include_individual_ages() == True
'''
def test_US28_Order_siblings_by_age(self):
t1 = Family("t1")
t2 = Family("t2")
p1 = Individual("p1")
p1.set_birthDate((1990, 4, 1))
p2 = Individual("p2")
p2.set_birthDate((1990, 1, 1))
p3 = Individual("p3")
p3.set_birthDate((1990, 9, 1))
p4 = Individual("p4")
p4.set_birthDate((1987, 1, 1))
p5 = Individual("p5")
p5.set_birthDate((2019, 1, 1))
p6 = Individual("p6")
p6.set_birthDate((2017, 5, 30))
p7 = Individual("p7")
p7.set_birthDate((2018, 3, 30))
p8 = Individual("p8")
p8.set_birthDate((2019, 8, 30))
# --------------------------------------------------
t1.add_child(p1)
t1.add_child(p2)
t1.add_child(p3)
t1.add_child(p4)
t1.add_child(p5)
t1.add_child(p6)
t2.add_child(p1)
t2.add_child(p2)
t2.add_child(p3)
t2.add_child(p4)
t2.add_child(p7)
t2.add_child(p8)
# --------------------------------------------------
assert t1.order_siblings_by_age() == [p4, p2, p1, p3, p6, p5]
assert t2.order_siblings_by_age() == [p4, p2, p1, p3,p7,p8]
def test_US20_Aunts_and_uncles(self):
t1 = Family("t1")
t2 = Family("t2")
t3 = Family("t3")
t4 = Family("t4")
t5 = Family("t5")
t6 = Family("t6")
t7 = Family("t7")
t8 = Family("t8")
t9 = Family("t9")
t10 = Family("t10")
t11 = Family("t11")
t12 = Family("t12")
p1 = Individual("p1")
p2 = Individual("p2")
p3 = Individual("p3")
p4 = Individual("p4")
p5 = Individual("p5")
p6 = Individual("p6")
p7 = Individual("p7")
p8 = Individual("p8")
p9 = Individual("p9")
p10 = Individual("p10")
p11 = Individual("p11")
# --------------------------------------------------
p11.set_parentFamily(t1)
t1.set_husband(p1)
t1.set_wife(p2)
p1.set_parentFamily(t2)
p2.set_parentFamily(t3)
#t2.set_husband(p3)
#t2.set_wife(p4)
#t3.set_husband(p5)
#t3.set_wife(p6)
t2.set_children([p1, p7, p8])
t3.set_children([p2, p9, p10])
'''
t1.add_child(p3)
t1.add_child(p4)
t2.set_husband(p3)
t3.set_wife(p4)
t2.add_child(p5)
t3.add_child(p6)
t4.set_husband(p7)
t4.set_wife(p8)
t4.add_child(p9)
t4.add_child(p10)
t5.set_husband(p9)
t5.add_child(p11)
t6.set_husband(p10)
t6.set_wife(p11)
'''
# --------------------------------------------------
assert p11.aunts_and_uncles()==True
#assert p10.aunts_and_uncles()==False
def test_US26_Corresponding_entries(self):
SUPPORT_TAGS = {"INDI", "NAME", "SEX", "BIRT", "DEAT", "FAMC", "FAMS", "FAM", "MARR", "HUSB", "WIFE", "CHIL",
"DIV", "DATE", "HEAD", "TRLR", "NOTE"}
self.G1 = Gedcom('testing_files/Jiashu_Wang.ged', SUPPORT_TAGS)
G2 = Gedcom('testing_files/MichealFahimGEDCOM.ged', SUPPORT_TAGS)
G3 = Gedcom('testing_files/mock-family.ged', SUPPORT_TAGS)
# --------------------------------------------------
assert self.G1.corresponding_entries() == True
assert G2.corresponding_entries() == True
assert G3.corresponding_entries() == True
def test_US29_list_deceased(self):
self.assertEqual(self.G1.listDeceased().len(),5 )
self.assertNotEqual(self.G1.listDeceased().len(),3 )
deceasedPeople = []
for indi in deceasedPeople:
self.assertIn(indi, self.G1.listDeceased())
#List all living married people in a GEDCOM file
def test_US30_list_living_married(self):
self.assertEqual(self.G1.listLivingMarried().len(),5 )
self.assertNotEqual(self.G1.listLivingMarried().len(),3 )
marriedPeople = []
for indi in marriedPeople:
self.assertIn(indi, self.G1.listLivingmarried())
#List all living people over 30 who have never been married in a GEDCOM file
def test_US31_list_living_single(self):
self.assertEqual(self.G1.listLivingSingle().len(),5 )
self.assertNotEqual(self.G1.listLivingSingle().len(),3 )
singlePeople = []
for indi in singlePeople:
self.assertIn(indi, self.G1.listLivingSingle())
#List all multiple births in a GEDCOM file
def test_US32_list_multiple_births(self):
self.assertEqual(self.G1.listMultipleBirths().len(),4 )
MultipleBirths = []
for birt in MultipleBirths:
self.assertIn(birt, self.G1.listMultipleBirths())
#List all orphaned children (both parents dead and child < 18 years old) in a GEDCOM file
def test_US33_list_orphans(self):
self.assertEqual(self.G1.listOrphans().len(),4)
OrphansPeople = []
for indi in OrphansPeople:
self.assertIn(indi, self.G1.listOrphans())
#List all couples who were married when the older spouse was more than twice as old as the younger spouse
def test_US34_list_large_age_differences(self):
self.assertEqual(self.G1.listLargeAgeDifferences().len(),4 )
ageDifferences = []
for birt in ageDifferences:
self.assertIn(birt, self.G1.listLargeAgeDifferences())
#List all people in a GEDCOM file who were born in the last 30 days
def test_US35_list_recent_births(self):
self.assertEqual(self.G1.listRecentBirths().len(),5 )
self.assertNotEqual(self.G1.listRecentBirths().len(),3 )
bornPeople =[]
for indi in bornPeople:
self.assertIn(indi, self.G1.listRecentBirths())
#list all people in a GEDCOM file who died in the last 30 days
def test_US36_ListRecentDeaths(self):
self.assertEqual(self.G1.listRecentDeaths().len(), 5)
self.assertNotEqual(self.G1.listRecentDeaths().len(), 3)
#manually input deceased people and append to the array
deceasedProple =[]
for indi in deceasedProple:
self.assertIn(indi, self.G1.listRecentDeaths())
#list all living spouses and descendants of people in the GEDCOM who died in the last 30 days
def test_US37_listRecentSurvivors(self):
self.assertEqual(self.G1.listRecentSurviors().len(),7)
self.assertNotEqual(self.G1.listRecentSurviors().len(), 8)
# manually input deceased people's relatives and append to the array
deceasedProple = []
for indi in deceasedProple:
self.assertIn(indi, self.G1.listRecentSurviors())
#list all living people in a GEDCOM file whose birthdays occur in the next 30 days
def test_US38_listUpcomingBirthdays(self):
self.assertEqual(self.G1.listUpcomingBirthdays().len(),6)
#manually input people with birthdays
birthdayPeople =[]
for indi in birthdayPeople:
self.assertIn(indi, self.G1.listUpcomingBirthdays())
# list all living people in a GEDCOM file whose marriage anniversaries occur in the next 30 days
def test_US39_UpcomingAnniversaries(self):
self.assertEqual(self.G1.upcomingAnniversaries().len(),4)
#manually input individuals who have anniversaries coming up
AnniversaryIndi = []
for indi in AnniversaryIndi:
self.assertIn(indi, self.G1.upcomingAnniversaries())
# list line numbers from GEDCOM source file when reporting errors
def test_US40_includeInputLineNumbers(self):
self.assertEqual(self.G1.includeInputLineNumbers().len(), 2)
self.assertTrue(self.G1.includeInputLineNumbers() == ['20','25'])
self.assertTrue(self.G2.includeInputLineNumbers() == ['15'])
# Accept and use dates without days or without days and months
def test_US41_IncludePartialDates(self):
self.assertTrue(self.G1.IncludePartialDates())
# All dates should be legitimate dates for the months specified(e.g. 2/30/2015 is not legitimate)
def test_US42_RejectIllegitimateDates(self):
self.assertTrue(self.G1.rejectIllegitimateDates())
self.assertFalse(self.G2.rejectIllegitimateDates())
def testInputValidation(self):
pass
if __name__ == '__main__':
print('Running unit tests')
unittest.main() | [
"unittest.main",
"models.Family.Family",
"models.Gedcom.Gedcom",
"models.Individual.Individual"
] | [((35999, 36014), 'unittest.main', 'unittest.main', ([], {}), '()\n', (36012, 36014), False, 'import unittest\n'), ((420, 470), 'models.Gedcom.Gedcom', 'Gedcom', (['"""../testing_files/right.ged"""', 'SUPPORT_TAGS'], {}), "('../testing_files/right.ged', SUPPORT_TAGS)\n", (426, 470), False, 'from models.Gedcom import Gedcom\n'), ((489, 539), 'models.Gedcom.Gedcom', 'Gedcom', (['"""../testing_files/wrong.ged"""', 'SUPPORT_TAGS'], {}), "('../testing_files/wrong.ged', SUPPORT_TAGS)\n", (495, 539), False, 'from models.Gedcom import Gedcom\n'), ((562, 578), 'models.Individual.Individual', 'Individual', (['"""01"""'], {}), "('01')\n", (572, 578), False, 'from models.Individual import Individual\n'), ((600, 616), 'models.Individual.Individual', 'Individual', (['"""02"""'], {}), "('02')\n", (610, 616), False, 'from models.Individual import Individual\n'), ((638, 654), 'models.Individual.Individual', 'Individual', (['"""03"""'], {}), "('03')\n", (648, 654), False, 'from models.Individual import Individual\n'), ((677, 689), 'models.Family.Family', 'Family', (['"""01"""'], {}), "('01')\n", (683, 689), False, 'from models.Family import Family\n'), ((711, 723), 'models.Family.Family', 'Family', (['"""02"""'], {}), "('02')\n", (717, 723), False, 'from models.Family import Family\n'), ((770, 786), 'models.Individual.Individual', 'Individual', (['"""01"""'], {}), "('01')\n", (780, 786), False, 'from models.Individual import Individual\n'), ((808, 824), 'models.Individual.Individual', 'Individual', (['"""02"""'], {}), "('02')\n", (818, 824), False, 'from models.Individual import Individual\n'), ((846, 862), 'models.Individual.Individual', 'Individual', (['"""03"""'], {}), "('03')\n", (856, 862), False, 'from models.Individual import Individual\n'), ((885, 897), 'models.Family.Family', 'Family', (['"""01"""'], {}), "('01')\n", (891, 897), False, 'from models.Family import Family\n'), ((919, 931), 'models.Family.Family', 'Family', (['"""02"""'], {}), "('02')\n", (925, 931), False, 'from models.Family import Family\n'), ((2651, 2664), 'models.Family.Family', 'Family', (['"""F01"""'], {}), "('F01')\n", (2657, 2664), False, 'from models.Family import Family\n'), ((2685, 2702), 'models.Individual.Individual', 'Individual', (['"""P01"""'], {}), "('P01')\n", (2695, 2702), False, 'from models.Individual import Individual\n'), ((2725, 2742), 'models.Individual.Individual', 'Individual', (['"""P02"""'], {}), "('P02')\n", (2735, 2742), False, 'from models.Individual import Individual\n'), ((3092, 3105), 'models.Family.Family', 'Family', (['"""F02"""'], {}), "('F02')\n", (3098, 3105), False, 'from models.Family import Family\n'), ((3126, 3143), 'models.Individual.Individual', 'Individual', (['"""P03"""'], {}), "('P03')\n", (3136, 3143), False, 'from models.Individual import Individual\n'), ((3166, 3183), 'models.Individual.Individual', 'Individual', (['"""P04"""'], {}), "('P04')\n", (3176, 3183), False, 'from models.Individual import Individual\n'), ((3694, 3707), 'models.Family.Family', 'Family', (['"""F01"""'], {}), "('F01')\n", (3700, 3707), False, 'from models.Family import Family\n'), ((3728, 3745), 'models.Individual.Individual', 'Individual', (['"""P01"""'], {}), "('P01')\n", (3738, 3745), False, 'from models.Individual import Individual\n'), ((3768, 3785), 'models.Individual.Individual', 'Individual', (['"""P02"""'], {}), "('P02')\n", (3778, 3785), False, 'from models.Individual import Individual\n'), ((4081, 4094), 'models.Family.Family', 'Family', (['"""F02"""'], {}), "('F02')\n", (4087, 4094), False, 'from models.Family import Family\n'), ((4115, 4132), 'models.Individual.Individual', 'Individual', (['"""P03"""'], {}), "('P03')\n", (4125, 4132), False, 'from models.Individual import Individual\n'), ((4155, 4172), 'models.Individual.Individual', 'Individual', (['"""P04"""'], {}), "('P04')\n", (4165, 4172), False, 'from models.Individual import Individual\n'), ((4468, 4481), 'models.Family.Family', 'Family', (['"""F03"""'], {}), "('F03')\n", (4474, 4481), False, 'from models.Family import Family\n'), ((4502, 4519), 'models.Individual.Individual', 'Individual', (['"""P05"""'], {}), "('P05')\n", (4512, 4519), False, 'from models.Individual import Individual\n'), ((4542, 4559), 'models.Individual.Individual', 'Individual', (['"""P06"""'], {}), "('P06')\n", (4552, 4559), False, 'from models.Individual import Individual\n'), ((4855, 4868), 'models.Family.Family', 'Family', (['"""F04"""'], {}), "('F04')\n", (4861, 4868), False, 'from models.Family import Family\n'), ((4889, 4906), 'models.Individual.Individual', 'Individual', (['"""P07"""'], {}), "('P07')\n", (4899, 4906), False, 'from models.Individual import Individual\n'), ((4929, 4946), 'models.Individual.Individual', 'Individual', (['"""P08"""'], {}), "('P08')\n", (4939, 4946), False, 'from models.Individual import Individual\n'), ((5242, 5255), 'models.Family.Family', 'Family', (['"""F05"""'], {}), "('F05')\n", (5248, 5255), False, 'from models.Family import Family\n'), ((5276, 5293), 'models.Individual.Individual', 'Individual', (['"""P09"""'], {}), "('P09')\n", (5286, 5293), False, 'from models.Individual import Individual\n'), ((5316, 5333), 'models.Individual.Individual', 'Individual', (['"""P10"""'], {}), "('P10')\n", (5326, 5333), False, 'from models.Individual import Individual\n'), ((5949, 5962), 'models.Family.Family', 'Family', (['"""F01"""'], {}), "('F01')\n", (5955, 5962), False, 'from models.Family import Family\n'), ((5983, 6000), 'models.Individual.Individual', 'Individual', (['"""P01"""'], {}), "('P01')\n", (5993, 6000), False, 'from models.Individual import Individual\n'), ((6023, 6040), 'models.Individual.Individual', 'Individual', (['"""P02"""'], {}), "('P02')\n", (6033, 6040), False, 'from models.Individual import Individual\n'), ((6337, 6350), 'models.Family.Family', 'Family', (['"""F02"""'], {}), "('F02')\n", (6343, 6350), False, 'from models.Family import Family\n'), ((6371, 6388), 'models.Individual.Individual', 'Individual', (['"""P03"""'], {}), "('P03')\n", (6381, 6388), False, 'from models.Individual import Individual\n'), ((6411, 6428), 'models.Individual.Individual', 'Individual', (['"""P04"""'], {}), "('P04')\n", (6421, 6428), False, 'from models.Individual import Individual\n'), ((6893, 6906), 'models.Family.Family', 'Family', (['"""F01"""'], {}), "('F01')\n", (6899, 6906), False, 'from models.Family import Family\n'), ((6927, 6944), 'models.Individual.Individual', 'Individual', (['"""P01"""'], {}), "('P01')\n", (6937, 6944), False, 'from models.Individual import Individual\n'), ((6967, 6984), 'models.Individual.Individual', 'Individual', (['"""P02"""'], {}), "('P02')\n", (6977, 6984), False, 'from models.Individual import Individual\n'), ((7006, 7023), 'models.Individual.Individual', 'Individual', (['"""P03"""'], {}), "('P03')\n", (7016, 7023), False, 'from models.Individual import Individual\n'), ((7297, 7310), 'models.Family.Family', 'Family', (['"""F02"""'], {}), "('F02')\n", (7303, 7310), False, 'from models.Family import Family\n'), ((7331, 7348), 'models.Individual.Individual', 'Individual', (['"""P04"""'], {}), "('P04')\n", (7341, 7348), False, 'from models.Individual import Individual\n'), ((7371, 7388), 'models.Individual.Individual', 'Individual', (['"""P05"""'], {}), "('P05')\n", (7381, 7388), False, 'from models.Individual import Individual\n'), ((7410, 7427), 'models.Individual.Individual', 'Individual', (['"""P06"""'], {}), "('P06')\n", (7420, 7427), False, 'from models.Individual import Individual\n'), ((7701, 7714), 'models.Family.Family', 'Family', (['"""F03"""'], {}), "('F03')\n", (7707, 7714), False, 'from models.Family import Family\n'), ((7735, 7752), 'models.Individual.Individual', 'Individual', (['"""P07"""'], {}), "('P07')\n", (7745, 7752), False, 'from models.Individual import Individual\n'), ((7775, 7792), 'models.Individual.Individual', 'Individual', (['"""P08"""'], {}), "('P08')\n", (7785, 7792), False, 'from models.Individual import Individual\n'), ((7814, 7831), 'models.Individual.Individual', 'Individual', (['"""P09"""'], {}), "('P09')\n", (7824, 7831), False, 'from models.Individual import Individual\n'), ((8357, 8370), 'models.Family.Family', 'Family', (['"""F01"""'], {}), "('F01')\n", (8363, 8370), False, 'from models.Family import Family\n'), ((8391, 8408), 'models.Individual.Individual', 'Individual', (['"""P01"""'], {}), "('P01')\n", (8401, 8408), False, 'from models.Individual import Individual\n'), ((8431, 8448), 'models.Individual.Individual', 'Individual', (['"""P02"""'], {}), "('P02')\n", (8441, 8448), False, 'from models.Individual import Individual\n'), ((8470, 8487), 'models.Individual.Individual', 'Individual', (['"""P03"""'], {}), "('P03')\n", (8480, 8487), False, 'from models.Individual import Individual\n'), ((8818, 8831), 'models.Family.Family', 'Family', (['"""F02"""'], {}), "('F02')\n", (8824, 8831), False, 'from models.Family import Family\n'), ((8852, 8869), 'models.Individual.Individual', 'Individual', (['"""P04"""'], {}), "('P04')\n", (8862, 8869), False, 'from models.Individual import Individual\n'), ((8892, 8909), 'models.Individual.Individual', 'Individual', (['"""P05"""'], {}), "('P05')\n", (8902, 8909), False, 'from models.Individual import Individual\n'), ((8931, 8948), 'models.Individual.Individual', 'Individual', (['"""P06"""'], {}), "('P06')\n", (8941, 8948), False, 'from models.Individual import Individual\n'), ((9279, 9292), 'models.Family.Family', 'Family', (['"""F03"""'], {}), "('F03')\n", (9285, 9292), False, 'from models.Family import Family\n'), ((9313, 9330), 'models.Individual.Individual', 'Individual', (['"""P07"""'], {}), "('P07')\n", (9323, 9330), False, 'from models.Individual import Individual\n'), ((9353, 9370), 'models.Individual.Individual', 'Individual', (['"""P08"""'], {}), "('P08')\n", (9363, 9370), False, 'from models.Individual import Individual\n'), ((9392, 9409), 'models.Individual.Individual', 'Individual', (['"""P09"""'], {}), "('P09')\n", (9402, 9409), False, 'from models.Individual import Individual\n'), ((9971, 9984), 'models.Family.Family', 'Family', (['"""F01"""'], {}), "('F01')\n", (9977, 9984), False, 'from models.Family import Family\n'), ((10005, 10022), 'models.Individual.Individual', 'Individual', (['"""P01"""'], {}), "('P01')\n", (10015, 10022), False, 'from models.Individual import Individual\n'), ((10045, 10062), 'models.Individual.Individual', 'Individual', (['"""P02"""'], {}), "('P02')\n", (10055, 10062), False, 'from models.Individual import Individual\n'), ((10375, 10388), 'models.Family.Family', 'Family', (['"""F02"""'], {}), "('F02')\n", (10381, 10388), False, 'from models.Family import Family\n'), ((10409, 10426), 'models.Individual.Individual', 'Individual', (['"""P03"""'], {}), "('P03')\n", (10419, 10426), False, 'from models.Individual import Individual\n'), ((10449, 10466), 'models.Individual.Individual', 'Individual', (['"""P04"""'], {}), "('P04')\n", (10459, 10466), False, 'from models.Individual import Individual\n'), ((10779, 10792), 'models.Family.Family', 'Family', (['"""F03"""'], {}), "('F03')\n", (10785, 10792), False, 'from models.Family import Family\n'), ((10813, 10830), 'models.Individual.Individual', 'Individual', (['"""P05"""'], {}), "('P05')\n", (10823, 10830), False, 'from models.Individual import Individual\n'), ((10853, 10870), 'models.Individual.Individual', 'Individual', (['"""P06"""'], {}), "('P06')\n", (10863, 10870), False, 'from models.Individual import Individual\n'), ((11183, 11196), 'models.Family.Family', 'Family', (['"""F04"""'], {}), "('F04')\n", (11189, 11196), False, 'from models.Family import Family\n'), ((11217, 11234), 'models.Individual.Individual', 'Individual', (['"""P07"""'], {}), "('P07')\n", (11227, 11234), False, 'from models.Individual import Individual\n'), ((11257, 11274), 'models.Individual.Individual', 'Individual', (['"""P08"""'], {}), "('P08')\n", (11267, 11274), False, 'from models.Individual import Individual\n'), ((11587, 11600), 'models.Family.Family', 'Family', (['"""F05"""'], {}), "('F05')\n", (11593, 11600), False, 'from models.Family import Family\n'), ((11621, 11638), 'models.Individual.Individual', 'Individual', (['"""P09"""'], {}), "('P09')\n", (11631, 11638), False, 'from models.Individual import Individual\n'), ((11661, 11678), 'models.Individual.Individual', 'Individual', (['"""P10"""'], {}), "('P10')\n", (11671, 11678), False, 'from models.Individual import Individual\n'), ((12290, 12302), 'models.Family.Family', 'Family', (['"""t1"""'], {}), "('t1')\n", (12296, 12302), False, 'from models.Family import Family\n'), ((12320, 12332), 'models.Family.Family', 'Family', (['"""t2"""'], {}), "('t2')\n", (12326, 12332), False, 'from models.Family import Family\n'), ((12350, 12362), 'models.Family.Family', 'Family', (['"""t3"""'], {}), "('t3')\n", (12356, 12362), False, 'from models.Family import Family\n'), ((12380, 12392), 'models.Family.Family', 'Family', (['"""t4"""'], {}), "('t4')\n", (12386, 12392), False, 'from models.Family import Family\n'), ((12410, 12422), 'models.Family.Family', 'Family', (['"""t5"""'], {}), "('t5')\n", (12416, 12422), False, 'from models.Family import Family\n'), ((12440, 12452), 'models.Family.Family', 'Family', (['"""t6"""'], {}), "('t6')\n", (12446, 12452), False, 'from models.Family import Family\n'), ((12470, 12486), 'models.Individual.Individual', 'Individual', (['"""p1"""'], {}), "('p1')\n", (12480, 12486), False, 'from models.Individual import Individual\n'), ((12555, 12571), 'models.Individual.Individual', 'Individual', (['"""p2"""'], {}), "('p2')\n", (12565, 12571), False, 'from models.Individual import Individual\n'), ((12640, 12656), 'models.Individual.Individual', 'Individual', (['"""p3"""'], {}), "('p3')\n", (12650, 12656), False, 'from models.Individual import Individual\n'), ((12725, 12741), 'models.Individual.Individual', 'Individual', (['"""p4"""'], {}), "('p4')\n", (12735, 12741), False, 'from models.Individual import Individual\n'), ((12810, 12826), 'models.Individual.Individual', 'Individual', (['"""p5"""'], {}), "('p5')\n", (12820, 12826), False, 'from models.Individual import Individual\n'), ((12895, 12911), 'models.Individual.Individual', 'Individual', (['"""p6"""'], {}), "('p6')\n", (12905, 12911), False, 'from models.Individual import Individual\n'), ((13846, 13858), 'models.Family.Family', 'Family', (['"""t1"""'], {}), "('t1')\n", (13852, 13858), False, 'from models.Family import Family\n'), ((13876, 13888), 'models.Family.Family', 'Family', (['"""t2"""'], {}), "('t2')\n", (13882, 13888), False, 'from models.Family import Family\n'), ((13906, 13918), 'models.Family.Family', 'Family', (['"""t3"""'], {}), "('t3')\n", (13912, 13918), False, 'from models.Family import Family\n'), ((13936, 13952), 'models.Individual.Individual', 'Individual', (['"""p1"""'], {}), "('p1')\n", (13946, 13952), False, 'from models.Individual import Individual\n'), ((14021, 14037), 'models.Individual.Individual', 'Individual', (['"""p2"""'], {}), "('p2')\n", (14031, 14037), False, 'from models.Individual import Individual\n'), ((14106, 14122), 'models.Individual.Individual', 'Individual', (['"""p3"""'], {}), "('p3')\n", (14116, 14122), False, 'from models.Individual import Individual\n'), ((14191, 14207), 'models.Individual.Individual', 'Individual', (['"""p4"""'], {}), "('p4')\n", (14201, 14207), False, 'from models.Individual import Individual\n'), ((14276, 14292), 'models.Individual.Individual', 'Individual', (['"""p5"""'], {}), "('p5')\n", (14286, 14292), False, 'from models.Individual import Individual\n'), ((14361, 14377), 'models.Individual.Individual', 'Individual', (['"""p6"""'], {}), "('p6')\n", (14371, 14377), False, 'from models.Individual import Individual\n'), ((14447, 14463), 'models.Individual.Individual', 'Individual', (['"""p7"""'], {}), "('p7')\n", (14457, 14463), False, 'from models.Individual import Individual\n'), ((14532, 14548), 'models.Individual.Individual', 'Individual', (['"""p8"""'], {}), "('p8')\n", (14542, 14548), False, 'from models.Individual import Individual\n'), ((14617, 14633), 'models.Individual.Individual', 'Individual', (['"""p9"""'], {}), "('p9')\n", (14627, 14633), False, 'from models.Individual import Individual\n'), ((14703, 14720), 'models.Individual.Individual', 'Individual', (['"""p10"""'], {}), "('p10')\n", (14713, 14720), False, 'from models.Individual import Individual\n'), ((14791, 14808), 'models.Individual.Individual', 'Individual', (['"""p11"""'], {}), "('p11')\n", (14801, 14808), False, 'from models.Individual import Individual\n'), ((14879, 14896), 'models.Individual.Individual', 'Individual', (['"""p12"""'], {}), "('p12')\n", (14889, 14896), False, 'from models.Individual import Individual\n'), ((14967, 14984), 'models.Individual.Individual', 'Individual', (['"""p13"""'], {}), "('p13')\n", (14977, 14984), False, 'from models.Individual import Individual\n'), ((16122, 16134), 'models.Family.Family', 'Family', (['"""t1"""'], {}), "('t1')\n", (16128, 16134), False, 'from models.Family import Family\n'), ((16148, 16160), 'models.Family.Family', 'Family', (['"""t2"""'], {}), "('t2')\n", (16154, 16160), False, 'from models.Family import Family\n'), ((16174, 16190), 'models.Individual.Individual', 'Individual', (['"""p1"""'], {}), "('p1')\n", (16184, 16190), False, 'from models.Individual import Individual\n'), ((16204, 16220), 'models.Individual.Individual', 'Individual', (['"""p2"""'], {}), "('p2')\n", (16214, 16220), False, 'from models.Individual import Individual\n'), ((16234, 16250), 'models.Individual.Individual', 'Individual', (['"""p3"""'], {}), "('p3')\n", (16244, 16250), False, 'from models.Individual import Individual\n'), ((16264, 16280), 'models.Individual.Individual', 'Individual', (['"""p4"""'], {}), "('p4')\n", (16274, 16280), False, 'from models.Individual import Individual\n'), ((16294, 16310), 'models.Individual.Individual', 'Individual', (['"""p5"""'], {}), "('p5')\n", (16304, 16310), False, 'from models.Individual import Individual\n'), ((16324, 16340), 'models.Individual.Individual', 'Individual', (['"""p6"""'], {}), "('p6')\n", (16334, 16340), False, 'from models.Individual import Individual\n'), ((16979, 16991), 'models.Family.Family', 'Family', (['"""t1"""'], {}), "('t1')\n", (16985, 16991), False, 'from models.Family import Family\n'), ((17005, 17017), 'models.Family.Family', 'Family', (['"""t2"""'], {}), "('t2')\n", (17011, 17017), False, 'from models.Family import Family\n'), ((17031, 17047), 'models.Individual.Individual', 'Individual', (['"""p1"""'], {}), "('p1')\n", (17041, 17047), False, 'from models.Individual import Individual\n'), ((17061, 17077), 'models.Individual.Individual', 'Individual', (['"""p2"""'], {}), "('p2')\n", (17071, 17077), False, 'from models.Individual import Individual\n'), ((17091, 17107), 'models.Individual.Individual', 'Individual', (['"""p3"""'], {}), "('p3')\n", (17101, 17107), False, 'from models.Individual import Individual\n'), ((17121, 17137), 'models.Individual.Individual', 'Individual', (['"""p4"""'], {}), "('p4')\n", (17131, 17137), False, 'from models.Individual import Individual\n'), ((17151, 17167), 'models.Individual.Individual', 'Individual', (['"""p5"""'], {}), "('p5')\n", (17161, 17167), False, 'from models.Individual import Individual\n'), ((17181, 17197), 'models.Individual.Individual', 'Individual', (['"""p6"""'], {}), "('p6')\n", (17191, 17197), False, 'from models.Individual import Individual\n'), ((17211, 17227), 'models.Individual.Individual', 'Individual', (['"""p7"""'], {}), "('p7')\n", (17221, 17227), False, 'from models.Individual import Individual\n'), ((17241, 17257), 'models.Individual.Individual', 'Individual', (['"""p8"""'], {}), "('p8')\n", (17251, 17257), False, 'from models.Individual import Individual\n'), ((17271, 17287), 'models.Individual.Individual', 'Individual', (['"""p9"""'], {}), "('p9')\n", (17281, 17287), False, 'from models.Individual import Individual\n'), ((17302, 17319), 'models.Individual.Individual', 'Individual', (['"""p10"""'], {}), "('p10')\n", (17312, 17319), False, 'from models.Individual import Individual\n'), ((17334, 17351), 'models.Individual.Individual', 'Individual', (['"""p11"""'], {}), "('p11')\n", (17344, 17351), False, 'from models.Individual import Individual\n'), ((17366, 17383), 'models.Individual.Individual', 'Individual', (['"""p12"""'], {}), "('p12')\n", (17376, 17383), False, 'from models.Individual import Individual\n'), ((17398, 17415), 'models.Individual.Individual', 'Individual', (['"""p13"""'], {}), "('p13')\n", (17408, 17415), False, 'from models.Individual import Individual\n'), ((17430, 17447), 'models.Individual.Individual', 'Individual', (['"""p14"""'], {}), "('p14')\n", (17440, 17447), False, 'from models.Individual import Individual\n'), ((17461, 17478), 'models.Individual.Individual', 'Individual', (['"""p15"""'], {}), "('p15')\n", (17471, 17478), False, 'from models.Individual import Individual\n'), ((17492, 17509), 'models.Individual.Individual', 'Individual', (['"""p16"""'], {}), "('p16')\n", (17502, 17509), False, 'from models.Individual import Individual\n'), ((17523, 17540), 'models.Individual.Individual', 'Individual', (['"""p17"""'], {}), "('p17')\n", (17533, 17540), False, 'from models.Individual import Individual\n'), ((17554, 17571), 'models.Individual.Individual', 'Individual', (['"""p18"""'], {}), "('p18')\n", (17564, 17571), False, 'from models.Individual import Individual\n'), ((17585, 17602), 'models.Individual.Individual', 'Individual', (['"""p19"""'], {}), "('p19')\n", (17595, 17602), False, 'from models.Individual import Individual\n'), ((17616, 17633), 'models.Individual.Individual', 'Individual', (['"""p20"""'], {}), "('p20')\n", (17626, 17633), False, 'from models.Individual import Individual\n'), ((17647, 17664), 'models.Individual.Individual', 'Individual', (['"""p21"""'], {}), "('p21')\n", (17657, 17664), False, 'from models.Individual import Individual\n'), ((17679, 17696), 'models.Individual.Individual', 'Individual', (['"""p22"""'], {}), "('p22')\n", (17689, 17696), False, 'from models.Individual import Individual\n'), ((17711, 17728), 'models.Individual.Individual', 'Individual', (['"""p23"""'], {}), "('p23')\n", (17721, 17728), False, 'from models.Individual import Individual\n'), ((17743, 17760), 'models.Individual.Individual', 'Individual', (['"""p24"""'], {}), "('p24')\n", (17753, 17760), False, 'from models.Individual import Individual\n'), ((17774, 17791), 'models.Individual.Individual', 'Individual', (['"""p25"""'], {}), "('p25')\n", (17784, 17791), False, 'from models.Individual import Individual\n'), ((17806, 17823), 'models.Individual.Individual', 'Individual', (['"""p26"""'], {}), "('p26')\n", (17816, 17823), False, 'from models.Individual import Individual\n'), ((17838, 17855), 'models.Individual.Individual', 'Individual', (['"""p27"""'], {}), "('p27')\n", (17848, 17855), False, 'from models.Individual import Individual\n'), ((17870, 17887), 'models.Individual.Individual', 'Individual', (['"""p28"""'], {}), "('p28')\n", (17880, 17887), False, 'from models.Individual import Individual\n'), ((18858, 18870), 'models.Family.Family', 'Family', (['"""t1"""'], {}), "('t1')\n", (18864, 18870), False, 'from models.Family import Family\n'), ((18884, 18896), 'models.Family.Family', 'Family', (['"""t2"""'], {}), "('t2')\n", (18890, 18896), False, 'from models.Family import Family\n'), ((18910, 18926), 'models.Individual.Individual', 'Individual', (['"""p1"""'], {}), "('p1')\n", (18920, 18926), False, 'from models.Individual import Individual\n'), ((18940, 18956), 'models.Individual.Individual', 'Individual', (['"""p2"""'], {}), "('p2')\n", (18950, 18956), False, 'from models.Individual import Individual\n'), ((18970, 18986), 'models.Individual.Individual', 'Individual', (['"""p3"""'], {}), "('p3')\n", (18980, 18986), False, 'from models.Individual import Individual\n'), ((19000, 19016), 'models.Individual.Individual', 'Individual', (['"""p4"""'], {}), "('p4')\n", (19010, 19016), False, 'from models.Individual import Individual\n'), ((19665, 19718), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/Jiashu_Wang.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/Jiashu_Wang.ged', SUPPORT_TAGS)\n", (19671, 19718), False, 'from models.Gedcom import Gedcom\n'), ((19732, 19792), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/MichealFahimGEDCOM.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/MichealFahimGEDCOM.ged', SUPPORT_TAGS)\n", (19738, 19792), False, 'from models.Gedcom import Gedcom\n'), ((19806, 19859), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/mock-family.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/mock-family.ged', SUPPORT_TAGS)\n", (19812, 19859), False, 'from models.Gedcom import Gedcom\n'), ((20331, 20384), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/Jiashu_Wang.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/Jiashu_Wang.ged', SUPPORT_TAGS)\n", (20337, 20384), False, 'from models.Gedcom import Gedcom\n'), ((20398, 20458), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/MichealFahimGEDCOM.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/MichealFahimGEDCOM.ged', SUPPORT_TAGS)\n", (20404, 20458), False, 'from models.Gedcom import Gedcom\n'), ((20472, 20525), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/mock-family.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/mock-family.ged', SUPPORT_TAGS)\n", (20478, 20525), False, 'from models.Gedcom import Gedcom\n'), ((21086, 21139), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/Jiashu_Wang.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/Jiashu_Wang.ged', SUPPORT_TAGS)\n", (21092, 21139), False, 'from models.Gedcom import Gedcom\n'), ((21153, 21213), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/MichealFahimGEDCOM.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/MichealFahimGEDCOM.ged', SUPPORT_TAGS)\n", (21159, 21213), False, 'from models.Gedcom import Gedcom\n'), ((21227, 21280), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/mock-family.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/mock-family.ged', SUPPORT_TAGS)\n", (21233, 21280), False, 'from models.Gedcom import Gedcom\n'), ((21577, 21589), 'models.Family.Family', 'Family', (['"""t1"""'], {}), "('t1')\n", (21583, 21589), False, 'from models.Family import Family\n'), ((21603, 21615), 'models.Family.Family', 'Family', (['"""t2"""'], {}), "('t2')\n", (21609, 21615), False, 'from models.Family import Family\n'), ((21629, 21641), 'models.Family.Family', 'Family', (['"""t3"""'], {}), "('t3')\n", (21635, 21641), False, 'from models.Family import Family\n'), ((21655, 21667), 'models.Family.Family', 'Family', (['"""t4"""'], {}), "('t4')\n", (21661, 21667), False, 'from models.Family import Family\n'), ((21681, 21693), 'models.Family.Family', 'Family', (['"""t5"""'], {}), "('t5')\n", (21687, 21693), False, 'from models.Family import Family\n'), ((21707, 21719), 'models.Family.Family', 'Family', (['"""t6"""'], {}), "('t6')\n", (21713, 21719), False, 'from models.Family import Family\n'), ((21733, 21749), 'models.Individual.Individual', 'Individual', (['"""p1"""'], {}), "('p1')\n", (21743, 21749), False, 'from models.Individual import Individual\n'), ((21763, 21779), 'models.Individual.Individual', 'Individual', (['"""p2"""'], {}), "('p2')\n", (21773, 21779), False, 'from models.Individual import Individual\n'), ((21793, 21809), 'models.Individual.Individual', 'Individual', (['"""p3"""'], {}), "('p3')\n", (21803, 21809), False, 'from models.Individual import Individual\n'), ((21823, 21839), 'models.Individual.Individual', 'Individual', (['"""p4"""'], {}), "('p4')\n", (21833, 21839), False, 'from models.Individual import Individual\n'), ((22727, 22739), 'models.Family.Family', 'Family', (['"""t1"""'], {}), "('t1')\n", (22733, 22739), False, 'from models.Family import Family\n'), ((22753, 22765), 'models.Family.Family', 'Family', (['"""t2"""'], {}), "('t2')\n", (22759, 22765), False, 'from models.Family import Family\n'), ((22779, 22791), 'models.Family.Family', 'Family', (['"""t3"""'], {}), "('t3')\n", (22785, 22791), False, 'from models.Family import Family\n'), ((22805, 22817), 'models.Family.Family', 'Family', (['"""t4"""'], {}), "('t4')\n", (22811, 22817), False, 'from models.Family import Family\n'), ((22831, 22843), 'models.Family.Family', 'Family', (['"""t5"""'], {}), "('t5')\n", (22837, 22843), False, 'from models.Family import Family\n'), ((22857, 22869), 'models.Family.Family', 'Family', (['"""t6"""'], {}), "('t6')\n", (22863, 22869), False, 'from models.Family import Family\n'), ((22883, 22895), 'models.Family.Family', 'Family', (['"""t7"""'], {}), "('t7')\n", (22889, 22895), False, 'from models.Family import Family\n'), ((22909, 22921), 'models.Family.Family', 'Family', (['"""t8"""'], {}), "('t8')\n", (22915, 22921), False, 'from models.Family import Family\n'), ((22935, 22947), 'models.Family.Family', 'Family', (['"""t9"""'], {}), "('t9')\n", (22941, 22947), False, 'from models.Family import Family\n'), ((22961, 22977), 'models.Individual.Individual', 'Individual', (['"""p1"""'], {}), "('p1')\n", (22971, 22977), False, 'from models.Individual import Individual\n'), ((22991, 23007), 'models.Individual.Individual', 'Individual', (['"""p2"""'], {}), "('p2')\n", (23001, 23007), False, 'from models.Individual import Individual\n'), ((23021, 23037), 'models.Individual.Individual', 'Individual', (['"""p3"""'], {}), "('p3')\n", (23031, 23037), False, 'from models.Individual import Individual\n'), ((23051, 23067), 'models.Individual.Individual', 'Individual', (['"""p4"""'], {}), "('p4')\n", (23061, 23067), False, 'from models.Individual import Individual\n'), ((23081, 23097), 'models.Individual.Individual', 'Individual', (['"""p5"""'], {}), "('p5')\n", (23091, 23097), False, 'from models.Individual import Individual\n'), ((23111, 23127), 'models.Individual.Individual', 'Individual', (['"""p6"""'], {}), "('p6')\n", (23121, 23127), False, 'from models.Individual import Individual\n'), ((23141, 23157), 'models.Individual.Individual', 'Individual', (['"""p7"""'], {}), "('p7')\n", (23151, 23157), False, 'from models.Individual import Individual\n'), ((23171, 23187), 'models.Individual.Individual', 'Individual', (['"""p8"""'], {}), "('p8')\n", (23181, 23187), False, 'from models.Individual import Individual\n'), ((24111, 24123), 'models.Family.Family', 'Family', (['"""t1"""'], {}), "('t1')\n", (24117, 24123), False, 'from models.Family import Family\n'), ((24137, 24149), 'models.Family.Family', 'Family', (['"""t2"""'], {}), "('t2')\n", (24143, 24149), False, 'from models.Family import Family\n'), ((24163, 24175), 'models.Family.Family', 'Family', (['"""t3"""'], {}), "('t3')\n", (24169, 24175), False, 'from models.Family import Family\n'), ((24189, 24201), 'models.Family.Family', 'Family', (['"""t4"""'], {}), "('t4')\n", (24195, 24201), False, 'from models.Family import Family\n'), ((24215, 24227), 'models.Family.Family', 'Family', (['"""t5"""'], {}), "('t5')\n", (24221, 24227), False, 'from models.Family import Family\n'), ((24241, 24253), 'models.Family.Family', 'Family', (['"""t6"""'], {}), "('t6')\n", (24247, 24253), False, 'from models.Family import Family\n'), ((24267, 24279), 'models.Family.Family', 'Family', (['"""t7"""'], {}), "('t7')\n", (24273, 24279), False, 'from models.Family import Family\n'), ((24293, 24305), 'models.Family.Family', 'Family', (['"""t8"""'], {}), "('t8')\n", (24299, 24305), False, 'from models.Family import Family\n'), ((24319, 24331), 'models.Family.Family', 'Family', (['"""t9"""'], {}), "('t9')\n", (24325, 24331), False, 'from models.Family import Family\n'), ((24346, 24359), 'models.Family.Family', 'Family', (['"""t10"""'], {}), "('t10')\n", (24352, 24359), False, 'from models.Family import Family\n'), ((24373, 24389), 'models.Individual.Individual', 'Individual', (['"""p1"""'], {}), "('p1')\n", (24383, 24389), False, 'from models.Individual import Individual\n'), ((24403, 24419), 'models.Individual.Individual', 'Individual', (['"""p2"""'], {}), "('p2')\n", (24413, 24419), False, 'from models.Individual import Individual\n'), ((24433, 24449), 'models.Individual.Individual', 'Individual', (['"""p3"""'], {}), "('p3')\n", (24443, 24449), False, 'from models.Individual import Individual\n'), ((24463, 24479), 'models.Individual.Individual', 'Individual', (['"""p4"""'], {}), "('p4')\n", (24473, 24479), False, 'from models.Individual import Individual\n'), ((24493, 24509), 'models.Individual.Individual', 'Individual', (['"""p5"""'], {}), "('p5')\n", (24503, 24509), False, 'from models.Individual import Individual\n'), ((24523, 24539), 'models.Individual.Individual', 'Individual', (['"""p6"""'], {}), "('p6')\n", (24533, 24539), False, 'from models.Individual import Individual\n'), ((24553, 24569), 'models.Individual.Individual', 'Individual', (['"""p7"""'], {}), "('p7')\n", (24563, 24569), False, 'from models.Individual import Individual\n'), ((24583, 24599), 'models.Individual.Individual', 'Individual', (['"""p8"""'], {}), "('p8')\n", (24593, 24599), False, 'from models.Individual import Individual\n'), ((24613, 24629), 'models.Individual.Individual', 'Individual', (['"""p9"""'], {}), "('p9')\n", (24623, 24629), False, 'from models.Individual import Individual\n'), ((24644, 24661), 'models.Individual.Individual', 'Individual', (['"""p10"""'], {}), "('p10')\n", (24654, 24661), False, 'from models.Individual import Individual\n'), ((26006, 26018), 'models.Family.Family', 'Family', (['"""t1"""'], {}), "('t1')\n", (26012, 26018), False, 'from models.Family import Family\n'), ((26032, 26044), 'models.Family.Family', 'Family', (['"""t2"""'], {}), "('t2')\n", (26038, 26044), False, 'from models.Family import Family\n'), ((26058, 26070), 'models.Family.Family', 'Family', (['"""t3"""'], {}), "('t3')\n", (26064, 26070), False, 'from models.Family import Family\n'), ((26084, 26096), 'models.Family.Family', 'Family', (['"""t4"""'], {}), "('t4')\n", (26090, 26096), False, 'from models.Family import Family\n'), ((26110, 26126), 'models.Individual.Individual', 'Individual', (['"""p1"""'], {}), "('p1')\n", (26120, 26126), False, 'from models.Individual import Individual\n'), ((26140, 26156), 'models.Individual.Individual', 'Individual', (['"""p2"""'], {}), "('p2')\n", (26150, 26156), False, 'from models.Individual import Individual\n'), ((26170, 26186), 'models.Individual.Individual', 'Individual', (['"""p3"""'], {}), "('p3')\n", (26180, 26186), False, 'from models.Individual import Individual\n'), ((26200, 26216), 'models.Individual.Individual', 'Individual', (['"""p4"""'], {}), "('p4')\n", (26210, 26216), False, 'from models.Individual import Individual\n'), ((26230, 26246), 'models.Individual.Individual', 'Individual', (['"""p5"""'], {}), "('p5')\n", (26240, 26246), False, 'from models.Individual import Individual\n'), ((26260, 26276), 'models.Individual.Individual', 'Individual', (['"""p6"""'], {}), "('p6')\n", (26270, 26276), False, 'from models.Individual import Individual\n'), ((26290, 26306), 'models.Individual.Individual', 'Individual', (['"""p7"""'], {}), "('p7')\n", (26300, 26306), False, 'from models.Individual import Individual\n'), ((26320, 26336), 'models.Individual.Individual', 'Individual', (['"""p8"""'], {}), "('p8')\n", (26330, 26336), False, 'from models.Individual import Individual\n'), ((26350, 26366), 'models.Individual.Individual', 'Individual', (['"""p9"""'], {}), "('p9')\n", (26360, 26366), False, 'from models.Individual import Individual\n'), ((27145, 27198), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/Jiashu_Wang.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/Jiashu_Wang.ged', SUPPORT_TAGS)\n", (27151, 27198), False, 'from models.Gedcom import Gedcom\n'), ((27212, 27272), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/MichealFahimGEDCOM.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/MichealFahimGEDCOM.ged', SUPPORT_TAGS)\n", (27218, 27272), False, 'from models.Gedcom import Gedcom\n'), ((27286, 27339), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/mock-family.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/mock-family.ged', SUPPORT_TAGS)\n", (27292, 27339), False, 'from models.Gedcom import Gedcom\n'), ((27648, 27660), 'models.Family.Family', 'Family', (['"""t1"""'], {}), "('t1')\n", (27654, 27660), False, 'from models.Family import Family\n'), ((27674, 27686), 'models.Family.Family', 'Family', (['"""t2"""'], {}), "('t2')\n", (27680, 27686), False, 'from models.Family import Family\n'), ((27700, 27716), 'models.Individual.Individual', 'Individual', (['"""p1"""'], {}), "('p1')\n", (27710, 27716), False, 'from models.Individual import Individual\n'), ((27769, 27785), 'models.Individual.Individual', 'Individual', (['"""p2"""'], {}), "('p2')\n", (27779, 27785), False, 'from models.Individual import Individual\n'), ((27838, 27854), 'models.Individual.Individual', 'Individual', (['"""p3"""'], {}), "('p3')\n", (27848, 27854), False, 'from models.Individual import Individual\n'), ((27907, 27923), 'models.Individual.Individual', 'Individual', (['"""p4"""'], {}), "('p4')\n", (27917, 27923), False, 'from models.Individual import Individual\n'), ((27976, 27992), 'models.Individual.Individual', 'Individual', (['"""p5"""'], {}), "('p5')\n", (27986, 27992), False, 'from models.Individual import Individual\n'), ((28045, 28061), 'models.Individual.Individual', 'Individual', (['"""p6"""'], {}), "('p6')\n", (28055, 28061), False, 'from models.Individual import Individual\n'), ((28115, 28131), 'models.Individual.Individual', 'Individual', (['"""p7"""'], {}), "('p7')\n", (28125, 28131), False, 'from models.Individual import Individual\n'), ((28185, 28201), 'models.Individual.Individual', 'Individual', (['"""p8"""'], {}), "('p8')\n", (28195, 28201), False, 'from models.Individual import Individual\n'), ((28860, 28872), 'models.Family.Family', 'Family', (['"""t1"""'], {}), "('t1')\n", (28866, 28872), False, 'from models.Family import Family\n'), ((28886, 28898), 'models.Family.Family', 'Family', (['"""t2"""'], {}), "('t2')\n", (28892, 28898), False, 'from models.Family import Family\n'), ((28912, 28924), 'models.Family.Family', 'Family', (['"""t3"""'], {}), "('t3')\n", (28918, 28924), False, 'from models.Family import Family\n'), ((28938, 28950), 'models.Family.Family', 'Family', (['"""t4"""'], {}), "('t4')\n", (28944, 28950), False, 'from models.Family import Family\n'), ((28964, 28976), 'models.Family.Family', 'Family', (['"""t5"""'], {}), "('t5')\n", (28970, 28976), False, 'from models.Family import Family\n'), ((28990, 29002), 'models.Family.Family', 'Family', (['"""t6"""'], {}), "('t6')\n", (28996, 29002), False, 'from models.Family import Family\n'), ((29016, 29028), 'models.Family.Family', 'Family', (['"""t7"""'], {}), "('t7')\n", (29022, 29028), False, 'from models.Family import Family\n'), ((29042, 29054), 'models.Family.Family', 'Family', (['"""t8"""'], {}), "('t8')\n", (29048, 29054), False, 'from models.Family import Family\n'), ((29068, 29080), 'models.Family.Family', 'Family', (['"""t9"""'], {}), "('t9')\n", (29074, 29080), False, 'from models.Family import Family\n'), ((29095, 29108), 'models.Family.Family', 'Family', (['"""t10"""'], {}), "('t10')\n", (29101, 29108), False, 'from models.Family import Family\n'), ((29123, 29136), 'models.Family.Family', 'Family', (['"""t11"""'], {}), "('t11')\n", (29129, 29136), False, 'from models.Family import Family\n'), ((29151, 29164), 'models.Family.Family', 'Family', (['"""t12"""'], {}), "('t12')\n", (29157, 29164), False, 'from models.Family import Family\n'), ((29178, 29194), 'models.Individual.Individual', 'Individual', (['"""p1"""'], {}), "('p1')\n", (29188, 29194), False, 'from models.Individual import Individual\n'), ((29208, 29224), 'models.Individual.Individual', 'Individual', (['"""p2"""'], {}), "('p2')\n", (29218, 29224), False, 'from models.Individual import Individual\n'), ((29238, 29254), 'models.Individual.Individual', 'Individual', (['"""p3"""'], {}), "('p3')\n", (29248, 29254), False, 'from models.Individual import Individual\n'), ((29268, 29284), 'models.Individual.Individual', 'Individual', (['"""p4"""'], {}), "('p4')\n", (29278, 29284), False, 'from models.Individual import Individual\n'), ((29298, 29314), 'models.Individual.Individual', 'Individual', (['"""p5"""'], {}), "('p5')\n", (29308, 29314), False, 'from models.Individual import Individual\n'), ((29328, 29344), 'models.Individual.Individual', 'Individual', (['"""p6"""'], {}), "('p6')\n", (29338, 29344), False, 'from models.Individual import Individual\n'), ((29358, 29374), 'models.Individual.Individual', 'Individual', (['"""p7"""'], {}), "('p7')\n", (29368, 29374), False, 'from models.Individual import Individual\n'), ((29388, 29404), 'models.Individual.Individual', 'Individual', (['"""p8"""'], {}), "('p8')\n", (29398, 29404), False, 'from models.Individual import Individual\n'), ((29418, 29434), 'models.Individual.Individual', 'Individual', (['"""p9"""'], {}), "('p9')\n", (29428, 29434), False, 'from models.Individual import Individual\n'), ((29449, 29466), 'models.Individual.Individual', 'Individual', (['"""p10"""'], {}), "('p10')\n", (29459, 29466), False, 'from models.Individual import Individual\n'), ((29481, 29498), 'models.Individual.Individual', 'Individual', (['"""p11"""'], {}), "('p11')\n", (29491, 29498), False, 'from models.Individual import Individual\n'), ((30682, 30735), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/Jiashu_Wang.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/Jiashu_Wang.ged', SUPPORT_TAGS)\n", (30688, 30735), False, 'from models.Gedcom import Gedcom\n'), ((30749, 30809), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/MichealFahimGEDCOM.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/MichealFahimGEDCOM.ged', SUPPORT_TAGS)\n", (30755, 30809), False, 'from models.Gedcom import Gedcom\n'), ((30823, 30876), 'models.Gedcom.Gedcom', 'Gedcom', (['"""testing_files/mock-family.ged"""', 'SUPPORT_TAGS'], {}), "('testing_files/mock-family.ged', SUPPORT_TAGS)\n", (30829, 30876), False, 'from models.Gedcom import Gedcom\n')] |
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 17 09:36:07 2015
@author: Ben
"""
from shared_classes import Stock, StockItem, SpecifiedStock
from datamapfunctions import DataMapFunctions, Abstract
import util
import numpy as np
import config as cfg
class SupplyStock(Stock, StockItem):
def __init__(self, id, drivers, sql_id_table='SupplyStock', sql_data_table='SupplyStockData',
primary_key='node_id', **kwargs):
Stock.__init__(self, id, drivers, sql_id_table='SupplyStock', sql_data_table='SupplyStockData',
primary_key='node_id', **kwargs)
StockItem.__init__(self)
def return_stock_slice(self, elements):
group = self.specified.loc[elements].transpose()
return group
class SupplySales(Abstract, DataMapFunctions):
def __init__(self, id, supply_node_id, sql_id_table, sql_data_table, primary_key, data_id_key, reference=False, scenario=None):
self.id = id
self.input_type = 'total'
self.supply_node_id = supply_node_id
self.sql_id_table = sql_id_table
self.sql_data_table = sql_data_table
self.scenario = scenario
self.mapped = False
if reference:
for col, att in util.object_att_from_table(self.sql_id_table, self.supply_node_id, primary_key):
setattr(self, col, att)
DataMapFunctions.__init__(self, data_id_key)
self.read_timeseries_data(supply_node_id=self.supply_node_id)
self.raw_values = util.remove_df_levels(self.raw_values, 'supply_technology')
else:
# measure specific sales does not require technology filtering
Abstract.__init__(self, self.id, primary_key=primary_key, data_id_key=data_id_key)
def calculate(self, vintages, years, interpolation_method=None, extrapolation_method=None):
self.vintages = vintages
self.years = years
self.remap(time_index_name='vintage',fill_timeseries=True, interpolation_method=interpolation_method, extrapolation_method=extrapolation_method, fill_value=np.nan)
self.convert()
def convert(self):
model_energy_unit = cfg.calculation_energy_unit
model_time_step = cfg.cfgfile.get('case', 'time_step')
if self.time_unit is not None:
# if sales has a time_unit, then the unit is energy and must be converted to capacity
self.values = util.unit_convert(self.values, unit_from_num=self.capacity_or_energy_unit,
unit_from_den=self.time_unit, unit_to_num=model_energy_unit,
unit_to_den=model_time_step)
else:
# if sales is a capacity unit, the model must convert the unit type to an energy unit for conversion ()
self.values = util.unit_convert(self.values, unit_from_num=cfg.ureg.Quantity(self.capacity_or_energy_unit)
* cfg.ureg.Quantity(model_time_step),
unit_from_den=model_time_step,
unit_to_num=model_energy_unit,
unit_to_den=model_time_step)
def reconcile_with_stock_levels(self, needed_sales_share_levels, needed_sales_names):
if not set(needed_sales_names).issubset(self.values.index.names):
# we can't have more specificity in sales share than in stock
raise ValueError('Sales share expressed as an intensity cannot have levels not in stock')
# pick up extra levels
self.values = util.expand_multi(self.values, needed_sales_share_levels,
needed_sales_names).sort_index()
class SupplySalesShare(Abstract, DataMapFunctions):
def __init__(self, id, supply_node_id, sql_id_table, sql_data_table, primary_key, data_id_key, reference=False, scenario=None):
self.id = id
self.supply_node_id = supply_node_id
self.sql_id_table = sql_id_table
self.sql_data_table = sql_data_table
self.scenario = scenario
self.mapped = False
self.input_type = 'intensity'
if reference:
for col, att in util.object_att_from_table(self.sql_id_table, self.supply_node_id, primary_key):
if att is not None:
setattr(self, col, att)
DataMapFunctions.__init__(self, data_id_key)
self.read_timeseries_data(supply_node_id=self.supply_node_id)
self.raw_values = util.remove_df_levels(self.raw_values, ['supply_node', 'supply_technology'])
else:
# measure specific sales share does not require technology filtering
Abstract.__init__(self, self.id, primary_key=primary_key, data_id_key=data_id_key)
def calculate(self, vintages, years):
self.vintages = vintages
self.years = years
self.remap(time_index_name='vintage')
def reconcile_with_stock_levels(self, needed_sales_share_levels, needed_sales_share_names):
if self.input_type == 'intensity':
if not set(self.values.index.names).issubset(needed_sales_share_names):
# we can't have more specificity in sales share than in stock
raise ValueError('Sales share expressed as an intensity cannot have levels not in stock')
# pick up extra levels
self.values = util.expand_multi(self.values, needed_sales_share_levels,
needed_sales_share_names).sort_index()
self.values.fillna(0, inplace=True)
elif self.input_type == 'total':
raise ValueError(
'A sales share type of total is not currently supported. Please normalize to sales share as a percentage')
# if not set(sales_share.values.index.names).issubset(stock.values.index.names):
# we have extra salesshare levels and we need to do a groupby sum
# sales_share.values = sales_share.values.groupby(level=needed_sales_share_levels).sum()
# todo: add logic here so that if stock and service demand
# has more specificity than sales share, we raise an exception
@staticmethod
def scale_reference_array_to_gap(ss_array, space_for_reference):
num_years, num_techs, num_techs = np.shape(ss_array)
ref_sums = np.sum(ss_array, axis=1)
# ignore where no reference is specified to avoid dividing by zero
vintage_no_ref, retiring_no_ref = np.nonzero(ref_sums)
factors = np.zeros(np.shape(ref_sums))
factors[vintage_no_ref, retiring_no_ref] += space_for_reference[vintage_no_ref, retiring_no_ref] / ref_sums[
vintage_no_ref, retiring_no_ref]
factors = np.reshape(np.repeat(factors, num_techs, axis=0), (num_years, num_techs, num_techs))
# gross up reference sales share with the need
return ss_array * factors
@staticmethod
def normalize_array(ss_array, retiring_must_have_replacement=True):
# Normalize to 1
sums = np.sum(ss_array, axis=1)
if np.any(sums == 0) and retiring_must_have_replacement:
raise ValueError('Every retiring technology must have a replacement specified in sales share')
# indicies needing scaling
vintage, retiring = np.nonzero(sums != 1)
# normalize all to 1
ss_array[vintage, :, retiring] = (ss_array[vintage, :, retiring].T / sums[vintage, retiring]).T
return ss_array
@staticmethod
def cap_array_at_1(ss_array):
# Normalize down to 1
sums = np.sum(ss_array, axis=1)
vintage, retiring = np.nonzero(sums > 1)
# normalize those greater than 1
ss_array[vintage, :, retiring] = (ss_array[vintage, :, retiring].T / sums[vintage, retiring]).T
return ss_array
class SupplySpecifiedStock(SpecifiedStock):
def __init__(self, id, sql_id_table, sql_data_table, scenario):
SpecifiedStock.__init__(self, id, sql_id_table, sql_data_table, scenario)
def convert(self):
"""
convert values to model currency and capacity (energy_unit/time_step)
"""
if self.values is not None:
model_energy_unit = cfg.calculation_energy_unit
model_time_step = cfg.cfgfile.get('case', 'time_step')
if self.time_unit is not None:
self.values = util.unit_convert(self.values, unit_from_num=self.capacity_or_energy_unit,
unit_from_den=self.time_unit, unit_to_num=model_energy_unit,
unit_to_den=model_time_step)
else:
self.values = util.unit_convert(self.values, unit_from_num=cfg.ureg.Quantity(self.capacity_or_energy_unit)
* cfg.ureg.Quantity(model_time_step),
unit_from_den = model_time_step,
unit_to_num=model_energy_unit,
unit_to_den=model_time_step)
| [
"datamapfunctions.DataMapFunctions.__init__",
"numpy.repeat",
"shared_classes.SpecifiedStock.__init__",
"numpy.any",
"util.expand_multi",
"numpy.sum",
"shared_classes.StockItem.__init__",
"util.remove_df_levels",
"datamapfunctions.Abstract.__init__",
"util.unit_convert",
"numpy.nonzero",
"util... | [((458, 590), 'shared_classes.Stock.__init__', 'Stock.__init__', (['self', 'id', 'drivers'], {'sql_id_table': '"""SupplyStock"""', 'sql_data_table': '"""SupplyStockData"""', 'primary_key': '"""node_id"""'}), "(self, id, drivers, sql_id_table='SupplyStock',\n sql_data_table='SupplyStockData', primary_key='node_id', **kwargs)\n", (472, 590), False, 'from shared_classes import Stock, StockItem, SpecifiedStock\n'), ((616, 640), 'shared_classes.StockItem.__init__', 'StockItem.__init__', (['self'], {}), '(self)\n', (634, 640), False, 'from shared_classes import Stock, StockItem, SpecifiedStock\n'), ((2234, 2270), 'config.cfgfile.get', 'cfg.cfgfile.get', (['"""case"""', '"""time_step"""'], {}), "('case', 'time_step')\n", (2249, 2270), True, 'import config as cfg\n'), ((6446, 6464), 'numpy.shape', 'np.shape', (['ss_array'], {}), '(ss_array)\n', (6454, 6464), True, 'import numpy as np\n'), ((6485, 6509), 'numpy.sum', 'np.sum', (['ss_array'], {'axis': '(1)'}), '(ss_array, axis=1)\n', (6491, 6509), True, 'import numpy as np\n'), ((6628, 6648), 'numpy.nonzero', 'np.nonzero', (['ref_sums'], {}), '(ref_sums)\n', (6638, 6648), True, 'import numpy as np\n'), ((7184, 7208), 'numpy.sum', 'np.sum', (['ss_array'], {'axis': '(1)'}), '(ss_array, axis=1)\n', (7190, 7208), True, 'import numpy as np\n'), ((7446, 7467), 'numpy.nonzero', 'np.nonzero', (['(sums != 1)'], {}), '(sums != 1)\n', (7456, 7467), True, 'import numpy as np\n'), ((7724, 7748), 'numpy.sum', 'np.sum', (['ss_array'], {'axis': '(1)'}), '(ss_array, axis=1)\n', (7730, 7748), True, 'import numpy as np\n'), ((7777, 7797), 'numpy.nonzero', 'np.nonzero', (['(sums > 1)'], {}), '(sums > 1)\n', (7787, 7797), True, 'import numpy as np\n'), ((8088, 8161), 'shared_classes.SpecifiedStock.__init__', 'SpecifiedStock.__init__', (['self', 'id', 'sql_id_table', 'sql_data_table', 'scenario'], {}), '(self, id, sql_id_table, sql_data_table, scenario)\n', (8111, 8161), False, 'from shared_classes import Stock, StockItem, SpecifiedStock\n'), ((1250, 1329), 'util.object_att_from_table', 'util.object_att_from_table', (['self.sql_id_table', 'self.supply_node_id', 'primary_key'], {}), '(self.sql_id_table, self.supply_node_id, primary_key)\n', (1276, 1329), False, 'import util\n'), ((1383, 1427), 'datamapfunctions.DataMapFunctions.__init__', 'DataMapFunctions.__init__', (['self', 'data_id_key'], {}), '(self, data_id_key)\n', (1408, 1427), False, 'from datamapfunctions import DataMapFunctions, Abstract\n'), ((1532, 1591), 'util.remove_df_levels', 'util.remove_df_levels', (['self.raw_values', '"""supply_technology"""'], {}), "(self.raw_values, 'supply_technology')\n", (1553, 1591), False, 'import util\n'), ((1693, 1780), 'datamapfunctions.Abstract.__init__', 'Abstract.__init__', (['self', 'self.id'], {'primary_key': 'primary_key', 'data_id_key': 'data_id_key'}), '(self, self.id, primary_key=primary_key, data_id_key=\n data_id_key)\n', (1710, 1780), False, 'from datamapfunctions import DataMapFunctions, Abstract\n'), ((2434, 2606), 'util.unit_convert', 'util.unit_convert', (['self.values'], {'unit_from_num': 'self.capacity_or_energy_unit', 'unit_from_den': 'self.time_unit', 'unit_to_num': 'model_energy_unit', 'unit_to_den': 'model_time_step'}), '(self.values, unit_from_num=self.capacity_or_energy_unit,\n unit_from_den=self.time_unit, unit_to_num=model_energy_unit,\n unit_to_den=model_time_step)\n', (2451, 2606), False, 'import util\n'), ((4308, 4387), 'util.object_att_from_table', 'util.object_att_from_table', (['self.sql_id_table', 'self.supply_node_id', 'primary_key'], {}), '(self.sql_id_table, self.supply_node_id, primary_key)\n', (4334, 4387), False, 'import util\n'), ((4481, 4525), 'datamapfunctions.DataMapFunctions.__init__', 'DataMapFunctions.__init__', (['self', 'data_id_key'], {}), '(self, data_id_key)\n', (4506, 4525), False, 'from datamapfunctions import DataMapFunctions, Abstract\n'), ((4630, 4706), 'util.remove_df_levels', 'util.remove_df_levels', (['self.raw_values', "['supply_node', 'supply_technology']"], {}), "(self.raw_values, ['supply_node', 'supply_technology'])\n", (4651, 4706), False, 'import util\n'), ((4814, 4901), 'datamapfunctions.Abstract.__init__', 'Abstract.__init__', (['self', 'self.id'], {'primary_key': 'primary_key', 'data_id_key': 'data_id_key'}), '(self, self.id, primary_key=primary_key, data_id_key=\n data_id_key)\n', (4831, 4901), False, 'from datamapfunctions import DataMapFunctions, Abstract\n'), ((6677, 6695), 'numpy.shape', 'np.shape', (['ref_sums'], {}), '(ref_sums)\n', (6685, 6695), True, 'import numpy as np\n'), ((6889, 6926), 'numpy.repeat', 'np.repeat', (['factors', 'num_techs'], {'axis': '(0)'}), '(factors, num_techs, axis=0)\n', (6898, 6926), True, 'import numpy as np\n'), ((7221, 7238), 'numpy.any', 'np.any', (['(sums == 0)'], {}), '(sums == 0)\n', (7227, 7238), True, 'import numpy as np\n'), ((8422, 8458), 'config.cfgfile.get', 'cfg.cfgfile.get', (['"""case"""', '"""time_step"""'], {}), "('case', 'time_step')\n", (8437, 8458), True, 'import config as cfg\n'), ((3687, 3764), 'util.expand_multi', 'util.expand_multi', (['self.values', 'needed_sales_share_levels', 'needed_sales_names'], {}), '(self.values, needed_sales_share_levels, needed_sales_names)\n', (3704, 3764), False, 'import util\n'), ((8532, 8704), 'util.unit_convert', 'util.unit_convert', (['self.values'], {'unit_from_num': 'self.capacity_or_energy_unit', 'unit_from_den': 'self.time_unit', 'unit_to_num': 'model_energy_unit', 'unit_to_den': 'model_time_step'}), '(self.values, unit_from_num=self.capacity_or_energy_unit,\n unit_from_den=self.time_unit, unit_to_num=model_energy_unit,\n unit_to_den=model_time_step)\n', (8549, 8704), False, 'import util\n'), ((5515, 5602), 'util.expand_multi', 'util.expand_multi', (['self.values', 'needed_sales_share_levels', 'needed_sales_share_names'], {}), '(self.values, needed_sales_share_levels,\n needed_sales_share_names)\n', (5532, 5602), False, 'import util\n'), ((2888, 2935), 'config.ureg.Quantity', 'cfg.ureg.Quantity', (['self.capacity_or_energy_unit'], {}), '(self.capacity_or_energy_unit)\n', (2905, 2935), True, 'import config as cfg\n'), ((3013, 3047), 'config.ureg.Quantity', 'cfg.ureg.Quantity', (['model_time_step'], {}), '(model_time_step)\n', (3030, 3047), True, 'import config as cfg\n'), ((8877, 8924), 'config.ureg.Quantity', 'cfg.ureg.Quantity', (['self.capacity_or_energy_unit'], {}), '(self.capacity_or_energy_unit)\n', (8894, 8924), True, 'import config as cfg\n'), ((9002, 9036), 'config.ureg.Quantity', 'cfg.ureg.Quantity', (['model_time_step'], {}), '(model_time_step)\n', (9019, 9036), True, 'import config as cfg\n')] |
"""
2.Question 2
This problem also asks you to solve a knapsack instance, but a much bigger one.
This file (knapsack_big.txt) describes a knapsack instance, and it has the following format:
[knapsack_size][number_of_items]
[value_1] [weight_1]
[value_2] [weight_2]
...
For example, the third line of the file is "50074 834558", indicating that the second item has value 50074 and size 834558, respectively. As before, you should assume that item weights and the knapsack capacity are integers.
This instance is so big that the straightforward iterative implemetation uses an infeasible amount of time and space. So you will have to be creative to compute an optimal solution. One idea is to go back to a recursive implementation, solving subproblems --- and, of course, caching the results to avoid redundant work --- only on an "as needed" basis. Also, be sure to think about appropriate data structures for storing and looking up solutions to subproblems.
In the box below, type in the value of the optimal solution.
ADVICE: If you're not getting the correct answer, try debugging your algorithm using some small test cases. And then post them to the discussion forum!
"""
import sys
sys.setrecursionlimit(10**6) # set larger limit of recursion
def dataReader(filePath):
with open(filePath) as f:
data = f.readlines()
size, numItems = list(map(int, data[0].split()))
values, weights = [], []
for i in range(1, len(data)):
v, w = list(map(int, data[i].split()))
values.append(v)
weights.append(w)
return size, numItems, values, weights
def knapsackMemorization(size, numItems, values, weights):
# use recursion with memorization to calculate the "needed" values instead of every single value
def helper(size, numItems):
if size < 0: return None
if (numItems, size) in dp.keys():
return dp[(numItems, size)]
op1 = helper(size - weights[numItems - 1], numItems - 1)
op2 = helper(size, numItems - 1)
dp[(numItems, size)] = max(op1 + values[numItems - 1], op2) if op1 != None else op2
return dp[(numItems, size)]
# use dict instead of list to make better usage of memory
dp = {}
for i in range(size + 1):
dp[(0, i)] = 0
return helper(size, numItems)
def main():
filePath = "data/knapsack_big.txt"
size, numItems, values, weights = dataReader(filePath)
ans = knapsackMemorization(size, numItems, values, weights)
print(ans)
if __name__ == "__main__":
main()
| [
"sys.setrecursionlimit"
] | [((1220, 1250), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(10 ** 6)'], {}), '(10 ** 6)\n', (1241, 1250), False, 'import sys\n')] |
import json
import urllib.request
from linebot import (LineBotApi, WebhookHandler)
from linebot.models import (MessageEvent, TextMessage, PostbackEvent, FollowEvent, UnfollowEvent)
from linebot.exceptions import (LineBotApiError, InvalidSignatureError)
import os
import sys
import logging
import boto3
from boto3.dynamodb.conditions import Key
#他ファイルimport
from textmessage import textmessage
from postbackevent import postbackevent
import dynamodbfunctions
dynamodb = boto3.resource('dynamodb')
#channelの環境変数読み込み
channel_secret = os.environ["CHANNEL_SECRET"]
channel_access_token = os.environ["ACCESS_TOKEN"]
#値がなかったら実行終了
if (channel_secret is None) or (channel_access_token is None):
sys.exit(1)
line_bot_api = LineBotApi(channel_access_token)
handler = WebhookHandler(channel_secret)
def lambda_handler(event, context):
#署名の検証
if "x-line-signature" in event["headers"]:
signature = event["headers"]["x-line-signature"]
elif "X-Line-Signature" in event["headers"]:
signature = event["headers"]["X-Line-Signature"]
body = event["body"]
#テキストメッセージ受信時
@handler.add(MessageEvent, message=TextMessage)
def message(line_event):
textmessage(line_event, line_bot_api)
#ポストバック時
@handler.add(PostbackEvent)
def message(line_event):
postbackevent(line_event, line_bot_api)
#友だち追加時
@handler.add(FollowEvent)
def handle_follow(line_event):
line_user_id = line_event.source.user_id
#IDは昇順で連番 DBから割り振るユーザーIDを取得してから登録
user_id = dynamodbfunctions.get_new_user_id(os.environ["LINE_USER_TABLE"])
dynamodbfunctions.user_regist(line_user_id, user_id)
#友だち削除・ブロック時
@handler.add(UnfollowEvent)
def handle_unfollow(line_event):
line_user_id = line_event.source.user_id
dynamodbfunctions.user_delete(line_user_id)
try:
handler.handle(body, signature)
except LineBotApiError as e:
return{
'statusCode': 400,
}
except InvalidSignatureError:
return{
'statusCode': 400,
}
return{
'statusCode': 200,
}
| [
"linebot.WebhookHandler",
"linebot.LineBotApi",
"textmessage.textmessage",
"postbackevent.postbackevent",
"dynamodbfunctions.user_regist",
"boto3.resource",
"sys.exit",
"dynamodbfunctions.get_new_user_id",
"dynamodbfunctions.user_delete"
] | [((472, 498), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {}), "('dynamodb')\n", (486, 498), False, 'import boto3\n'), ((722, 754), 'linebot.LineBotApi', 'LineBotApi', (['channel_access_token'], {}), '(channel_access_token)\n', (732, 754), False, 'from linebot import LineBotApi, WebhookHandler\n'), ((765, 795), 'linebot.WebhookHandler', 'WebhookHandler', (['channel_secret'], {}), '(channel_secret)\n', (779, 795), False, 'from linebot import LineBotApi, WebhookHandler\n'), ((694, 705), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (702, 705), False, 'import sys\n'), ((1187, 1224), 'textmessage.textmessage', 'textmessage', (['line_event', 'line_bot_api'], {}), '(line_event, line_bot_api)\n', (1198, 1224), False, 'from textmessage import textmessage\n'), ((1308, 1347), 'postbackevent.postbackevent', 'postbackevent', (['line_event', 'line_bot_api'], {}), '(line_event, line_bot_api)\n', (1321, 1347), False, 'from postbackevent import postbackevent\n'), ((1535, 1599), 'dynamodbfunctions.get_new_user_id', 'dynamodbfunctions.get_new_user_id', (["os.environ['LINE_USER_TABLE']"], {}), "(os.environ['LINE_USER_TABLE'])\n", (1568, 1599), False, 'import dynamodbfunctions\n'), ((1608, 1660), 'dynamodbfunctions.user_regist', 'dynamodbfunctions.user_regist', (['line_user_id', 'user_id'], {}), '(line_user_id, user_id)\n', (1637, 1660), False, 'import dynamodbfunctions\n'), ((1805, 1848), 'dynamodbfunctions.user_delete', 'dynamodbfunctions.user_delete', (['line_user_id'], {}), '(line_user_id)\n', (1834, 1848), False, 'import dynamodbfunctions\n')] |