code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
from tkinter import *
from tkinter import ttk
GUI = Tk()
GUI.geometry('600x700')
F1 = Frame(GUI)
F1.place(x=15,y=600)
FONT1 = ('Angsana New',25)
v_msg = StringVar()
E1 = ttk.Entry(F1,textvariable=v_msg,font=FONT1,width=40)
E1.grid(row=0,column=0)
global allmsg
allmsg = []
def SendMessage(event=None):
global linenumber
linenumber = 0
msg = v_msg.get()
allmessage = ''
if msg != '':
allmsg.append(msg)
msg10 = allmsg[-10:]
for m in msg10:
allmessage += m + '\n'
result.set(allmessage)
v_msg.set('')
B1 = ttk.Button(F1,text='Send',command=SendMessage)
B1.grid(row=0,column=1,padx=10,ipadx=15,ipady=10)
E1.bind('<Return>',SendMessage)
E1.focus()
########text#######
result = StringVar()
result.set('-------ข้อความจะปรากฏที่นี่-------')
L1 = ttk.Label(GUI,textvariable=result,font=FONT1,width=50)
L1.place(x=20,y=20)
############History##############
global linenumber
linenumber = 0
def Uphistory(event=None):
global linenumber
print(linenumber)
allmessage = ''
linenumber += 1
msg10 = allmsg[-10 + linenumber:]
print(msg10)
for m in msg10:
allmessage += m + '\n'
result.set(allmessage)
GUI.bind('<Up>',Uphistory)
GUI.mainloop()
|
[
"tkinter.ttk.Label",
"tkinter.ttk.Entry",
"tkinter.ttk.Button"
] |
[((186, 241), 'tkinter.ttk.Entry', 'ttk.Entry', (['F1'], {'textvariable': 'v_msg', 'font': 'FONT1', 'width': '(40)'}), '(F1, textvariable=v_msg, font=FONT1, width=40)\n', (195, 241), False, 'from tkinter import ttk\n'), ((563, 611), 'tkinter.ttk.Button', 'ttk.Button', (['F1'], {'text': '"""Send"""', 'command': 'SendMessage'}), "(F1, text='Send', command=SendMessage)\n", (573, 611), False, 'from tkinter import ttk\n'), ((809, 866), 'tkinter.ttk.Label', 'ttk.Label', (['GUI'], {'textvariable': 'result', 'font': 'FONT1', 'width': '(50)'}), '(GUI, textvariable=result, font=FONT1, width=50)\n', (818, 866), False, 'from tkinter import ttk\n')]
|
"""
Train on images split into directories. This assumes we've split
our videos into frames and moved them to their respective folders.
Based on:
https://keras.io/preprocessing/image/
and
https://keras.io/applications/
"""
import argparse
import numpy as np
import os
from keras.applications.inception_v3 import InceptionV3
from keras.optimizers import SGD # Stochastic gradient descent: use 1 example for gradient descent in each iteration
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Model
from keras.layers import Dense, GlobalAveragePooling2D
from keras.callbacks import ModelCheckpoint, TensorBoard, EarlyStopping,CSVLogger
from data import DataSet
import os.path
import settings
import function_list as ff
cg = settings.Experiment()
data = DataSet()
main_folder = os.path.join(cg.oct_main_dir,'UCF101')
os.makedirs(os.path.join(main_folder,'checkpoints','inception'),exist_ok=True)
model_name = 'inception'
# Helper: Save the model.
checkpointer = ModelCheckpoint(
filepath=os.path.join(main_folder, 'checkpoints', model_name,model_name+'.hdf5'),
monitor='val_loss',
verbose=1,
save_best_only=True)
# Helper: save the log
csv_logger = CSVLogger(os.path.join(main_folder, 'logs', model_name + '-' + 'training-log' + '.csv'))
# Helper: Stop when we stop learning.
#early_stopper = EarlyStopping(patience=10)
# Helper: TensorBoard
#tensorboard = TensorBoard(log_dir=os.path.join('data', 'logs'))
def get_generators():
''' look at the tutorial about imagedatagenerator.flow_from_directory: https://medium.com/@vijayabhaskar96/tutorial-image-classification-with-keras-flow-from-directory-and-generators-95f75ebe5720'''
train_datagen = ImageDataGenerator(
rescale=1./255,
shear_range=0.2,
horizontal_flip=True,
rotation_range=10.,
width_shift_range=0.2,
height_shift_range=0.2)
test_datagen = ImageDataGenerator(rescale=1./255)
train_generator = train_datagen.flow_from_directory(
os.path.join(main_folder, 'train_image'),
target_size=(299, 299), # the size of my input images, every image will be resized to this size
color_mode = 'rgb', # if black and white than set to "greyscale"
batch_size=32,
classes=data.classes,
class_mode='categorical')
validation_generator = test_datagen.flow_from_directory(
os.path.join(main_folder,'test_image'),
target_size=(299, 299),
batch_size=32,
color_mode = 'rgb',
classes=data.classes,
class_mode='categorical')
return train_generator, validation_generator
def get_model(weights='imagenet'):
# create the base pre-trained model
base_model = InceptionV3(weights=weights, include_top=False)
# add a global spatial average pooling layer
x = base_model.output
x = GlobalAveragePooling2D()(x)
# let's add a fully-connected layer
x = Dense(1024, activation='relu')(x)
# and a logistic layer
predictions = Dense(len(data.classes), activation='softmax')(x)
# this is the model we will train
model = Model(inputs=base_model.input, outputs=predictions)
return model
def freeze_all_but_top(model):
"""Used to train just the top layers of the model, which are layers we add (one fully-connected layer and aone logistic layer)"""
# first: train only the top layers (which were randomly initialized)
# i.e. freeze all convolutional InceptionV3 layers
for layer in model.layers[:-2]:
layer.trainable = False
# compile the model (should be done *after* setting layers to non-trainable)
model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])
return model
def freeze_all_but_mid_and_top(model):
"""After we fine-tune the dense layers, train deeper.
total layer number = 313"""
# we chose to train the top 2 inception blocks, i.e. we will freeze
# the first 172 layers and unfreeze the rest:
for layer in model.layers[:172]:
layer.trainable = False
for layer in model.layers[172:]:
layer.trainable = True
# we need to recompile the model for these modifications to take effect
# we use SGD with a low learning rate
model.compile(
optimizer=SGD(lr=0.0001, momentum=0.9), # 0.9 is a default momentum used in SGD
loss='categorical_crossentropy',
metrics=['accuracy', 'top_k_categorical_accuracy'])
return model
def train_model(model, nb_epoch, generators, callbacks=[]):
train_generator, validation_generator = generators
hist = model.fit_generator(
train_generator,
steps_per_epoch=100,
validation_data=validation_generator,
validation_steps=10,
epochs=nb_epoch,
callbacks=callbacks)
return model,hist
def main(weights_file):
model = get_model()
generators = get_generators()
if weights_file is None:
print("Loading network from ImageNet weights.")
# Get and train the top layers.
model = freeze_all_but_top(model)
model,_ = train_model(model, 10, generators)
else:
print("Loading saved model: %s." % weights_file)
model.load_weights(weights_file)
# Get and train the mid layers.
model = freeze_all_but_mid_and_top(model)
model,hist = train_model(model,200,generators,[checkpointer,csv_logger])
# # save history of training
# train_acc_list = np.asarray(hist.history['acc'])
# train_top_acc_list = np.asarray(hist.history['top_k_categorical_accuracy'])
# val_acc_list = np.asarray(hist.history['val_acc'])
# val_top_acc_list = np.asarray(hist.history['val_top_k_categorical_accuracy'])
# val_loss_list = np.asarray(hist.history['val_loss'])
# np.save(os.path.join(main_folder,'checkpoints','inception',model_name+'_train_acc'),train_acc_list)
# np.save(os.path.join(main_folder,'checkpoints','inception',model_name+'_train_top_5_acc'),train_top_acc_list)
# np.save(os.path.join(main_folder,'checkpoints','inception',model_name+'_val_acc'),val_acc_list)
# np.save(os.path.join(main_folder,'checkpoints','inception',model_name+'_val_top_5_acc'),val_top_acc_list)
# np.save(os.path.join(main_folder,'checkpoints','inception',model_name+'_val_loss'),val_loss_list)
if __name__ == '__main__':
weights_file = None
main(weights_file)
|
[
"settings.Experiment",
"keras.preprocessing.image.ImageDataGenerator",
"os.path.join",
"keras.optimizers.SGD",
"keras.models.Model",
"keras.layers.GlobalAveragePooling2D",
"keras.layers.Dense",
"keras.applications.inception_v3.InceptionV3",
"data.DataSet"
] |
[((758, 779), 'settings.Experiment', 'settings.Experiment', ([], {}), '()\n', (777, 779), False, 'import settings\n'), ((789, 798), 'data.DataSet', 'DataSet', ([], {}), '()\n', (796, 798), False, 'from data import DataSet\n'), ((813, 852), 'os.path.join', 'os.path.join', (['cg.oct_main_dir', '"""UCF101"""'], {}), "(cg.oct_main_dir, 'UCF101')\n", (825, 852), False, 'import os\n'), ((865, 918), 'os.path.join', 'os.path.join', (['main_folder', '"""checkpoints"""', '"""inception"""'], {}), "(main_folder, 'checkpoints', 'inception')\n", (877, 918), False, 'import os\n'), ((1212, 1289), 'os.path.join', 'os.path.join', (['main_folder', '"""logs"""', "(model_name + '-' + 'training-log' + '.csv')"], {}), "(main_folder, 'logs', model_name + '-' + 'training-log' + '.csv')\n", (1224, 1289), False, 'import os\n'), ((1708, 1856), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)', 'shear_range': '(0.2)', 'horizontal_flip': '(True)', 'rotation_range': '(10.0)', 'width_shift_range': '(0.2)', 'height_shift_range': '(0.2)'}), '(rescale=1.0 / 255, shear_range=0.2, horizontal_flip=True,\n rotation_range=10.0, width_shift_range=0.2, height_shift_range=0.2)\n', (1726, 1856), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((1918, 1955), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)'}), '(rescale=1.0 / 255)\n', (1936, 1955), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((2725, 2772), 'keras.applications.inception_v3.InceptionV3', 'InceptionV3', ([], {'weights': 'weights', 'include_top': '(False)'}), '(weights=weights, include_top=False)\n', (2736, 2772), False, 'from keras.applications.inception_v3 import InceptionV3\n'), ((3113, 3164), 'keras.models.Model', 'Model', ([], {'inputs': 'base_model.input', 'outputs': 'predictions'}), '(inputs=base_model.input, outputs=predictions)\n', (3118, 3164), False, 'from keras.models import Model\n'), ((1028, 1102), 'os.path.join', 'os.path.join', (['main_folder', '"""checkpoints"""', 'model_name', "(model_name + '.hdf5')"], {}), "(main_folder, 'checkpoints', model_name, model_name + '.hdf5')\n", (1040, 1102), False, 'import os\n'), ((2019, 2059), 'os.path.join', 'os.path.join', (['main_folder', '"""train_image"""'], {}), "(main_folder, 'train_image')\n", (2031, 2059), False, 'import os\n'), ((2395, 2434), 'os.path.join', 'os.path.join', (['main_folder', '"""test_image"""'], {}), "(main_folder, 'test_image')\n", (2407, 2434), False, 'import os\n'), ((2857, 2881), 'keras.layers.GlobalAveragePooling2D', 'GlobalAveragePooling2D', ([], {}), '()\n', (2879, 2881), False, 'from keras.layers import Dense, GlobalAveragePooling2D\n'), ((2933, 2963), 'keras.layers.Dense', 'Dense', (['(1024)'], {'activation': '"""relu"""'}), "(1024, activation='relu')\n", (2938, 2963), False, 'from keras.layers import Dense, GlobalAveragePooling2D\n'), ((4288, 4316), 'keras.optimizers.SGD', 'SGD', ([], {'lr': '(0.0001)', 'momentum': '(0.9)'}), '(lr=0.0001, momentum=0.9)\n', (4291, 4316), False, 'from keras.optimizers import SGD\n')]
|
# -*- coding: utf-8 -*-
# @Time : 23.03.21 14:42
# @Author : sing_sd
import numpy as np
import matplotlib.pyplot as plt
import numpy as np
from rdp import rdp
import pickle
import src.clustering.COStransforms as ct
from frechetdist import frdist
ref = {'lon': 12.0, 'lat': 54.35, 'ECEF': ct.WGS84toECEF(12.0, 54.35)}
colour_array = ["r", "g", "b", "y", "c", "m", "#9475FC", "k"] # an extra k
# pylab.ion()
# pylab.clf()
########## A Graph associatiation method by frdist distance
def assign_to_graph(data1):
ENUcoord, nodes, edges = get_data_nodes_edges(data1)
data = ENUcoord.tolist()
assignments = np.zeros(shape=(len(data), 1), dtype=int)
idx_start = 0
EPSILON = 1000
idx_end = idx_start + data1.shape[0]
ENUcoord_mmsi = ENUcoord[idx_start:idx_end, :]
ENUcoord_subset = rdp(ENUcoord_mmsi, EPSILON)
rdpNodes = np.array(ENUcoord_subset)
cost_matrix = np.zeros((len(rdpNodes) - 1, len(edges)))
start = idx_start
for i in range(1, len(rdpNodes)):
ends = np.where((ENUcoord_mmsi == rdpNodes[i, :]).all(axis=1))[0]
end = ends[np.argmax(ends > start)] + idx_start
for e_idx, e in enumerate(edges):
cost_matrix[i-1, e_idx] = frdist([rdpNodes[i-1, :], rdpNodes[i, :]], [nodes[e[0],:], nodes[e[1],:]])
e_idx = np.argmin(cost_matrix[i-1, :]) # edge id
if min(cost_matrix[i-1, :]) > 25000:
e_idx = len(edges) # ensures color as black
assignments[start:end, 0] = e_idx
start = end
assignments[end, 0] = assignments[end-1, 0]
return assignments #np.array(assignments[:, 0]).reshape(len(assignments),1)
def get_data_nodes_edges(data1):
clm_idx = 0
data1 = np.array(data1)
nData1 = data1.shape[0]
lon = np.array(data1[:, clm_idx], dtype=float).reshape([1, nData1])
lat = np.array(data1[:, clm_idx + 1], dtype=float).reshape([1, nData1])
ENUcoord = ct.WGS84toENU(lon, lat, ref)
ENUcoord = np.transpose(ENUcoord)
ENUcoord = np.delete(ENUcoord, np.s_[2], axis=1)
with open("../resources/graph_nodes_refined.pkl", 'rb') as f:
nodesWGS = pickle.load(f)
nodes = ct.WGS84toENU(nodesWGS[:, 0].T, nodesWGS[:, 1].T, ref)
nodes = np.transpose(nodes)
nodes = np.delete(nodes, np.s_[2], axis=1)
with open("../resources/graph_edges_refined.pkl", 'rb') as f:
edges = pickle.load(f)
# i. e., now same as before edges = [[1, 2], [0, 1], [0, 3], [0, 4], [1, 4], [4, 6], [0, 5]]
return ENUcoord, nodes, edges
def point2edge(point, edge_start, edge_end):
line_vec = np.subtract(edge_end, edge_start)
pnt_vec = np.subtract(point, edge_start)
line_len = np.linalg.norm(line_vec)
line_unitvec = line_vec / np.linalg.norm(line_vec)
pnt_vec_scaled = np.multiply(pnt_vec, 1.0 / line_len)
t = np.dot(line_unitvec, pnt_vec_scaled)
if t < 0.0:
t = 0.0
elif t > 1.0:
t = 1.0
nearest = np.multiply(line_vec, t)
distance = np.linalg.norm(np.subtract(nearest, pnt_vec))
return distance
def cosine_angle(a, b, c): # angle at b
return np.dot(a-b, c-b) / (np.linalg.norm(a-b) * np.linalg.norm(c-b))
def get_slope_edges(edges, nodes):
n_edges = len(edges)
slope_edges = [0.0]*(n_edges+1) # +1 for outlier edge
for e in range(n_edges):
slope_edges[e] = (nodes[edges[e][0]][1] - nodes[edges[e][1]][1]) / (nodes[edges[e][0]][0] - nodes[edges[e][1]][0])
return np.array(slope_edges)
def get_assignment(data1):
DIST_THR = 7000
ENUcoord, nodes, edges = get_data_nodes_edges(data1)
data = ENUcoord.tolist()
cost_matrix = np.zeros((len(data), len(edges)+1))
cost_matrix[:, len(edges)] = DIST_THR + 1
assignments = []
for ii, points in enumerate(data):
for jj, items in enumerate(edges):
start = nodes[edges[jj][0]][0:2]
end = nodes[edges[jj][1]][0:2]
cost_matrix[ii, jj] = point2edge(points, start, end)
e_idx = np.argmin(cost_matrix[ii, :]) # edge id
if min(cost_matrix[ii, :]) > DIST_THR: # 5000:
e_idx = len(edges) # ensures color as black
assignments.append((ii, e_idx))
assignments = np.array(assignments)
EPSILON = 500 #1000
if data1.shape[1] == 2:
idx_start = 0
idx_end = data1.shape[0]
ENUcoord_mmsi = ENUcoord[idx_start:idx_end, :]
ENUcoord_subset = rdp(ENUcoord_mmsi, EPSILON)
rdpNodes = np.array(ENUcoord_subset)
start = idx_start
slope_edges = get_slope_edges(edges, nodes)
for i in range(1, len(rdpNodes)):
ends = np.where((ENUcoord_mmsi == rdpNodes[i, :]).all(axis=1))[0]
end = ends[np.argmax(ends > start)] + idx_start
unique_labels = np.unique(assignments[start:end, 1]) # numpy unique does not preserve order
max_num = np.inf
slope_rdpnodes = (rdpNodes[i, 1] - rdpNodes[i - 1, 1]) / (rdpNodes[i, 0] - rdpNodes[i - 1, 0])
label = unique_labels[np.argmin(np.abs(slope_rdpnodes - slope_edges[unique_labels]))] # unique_labels[0]
for j in unique_labels:
if max(cost_matrix[start,j], cost_matrix[end, j]) < DIST_THR:
if abs(slope_rdpnodes - slope_edges[j]) < max_num:
# and not (-0.8 < slope_edges[j]*slope_rdpnodes < -1.2):
label = j
max_num = abs(slope_rdpnodes - slope_edges[j])
assignments[start:end, 1] = label
start = end
assignments[end, 1] = assignments[end - 1, 1] # assign the cluster number to last point
else:
print("need to be updated like single vessel data code in Else loop")
exit(0)
angle_thr = 0.2
# mmsi_index = data1.columns.get_loc("mmsi")
for mmsi in data1.mmsi.unique():
idx_start = np.argmax(data1["mmsi"] == mmsi)
idx_end = idx_start + np.sum(data1["mmsi"] == mmsi)
ENUcoord_mmsi = ENUcoord[idx_start:idx_end, :]
ENUcoord_subset = rdp(ENUcoord_mmsi, EPSILON)
rdpNodes = np.array(ENUcoord_subset)
start = idx_start
for i in range(1, len(rdpNodes)):
ends = np.where((ENUcoord_mmsi == rdpNodes[i, :]).all(axis=1))[0]
end = ends[np.argmax(ends>start)] + idx_start
unique_labels = np.unique(assignments[start:end, 1])
label = unique_labels[0]
max_num = 0
for j in unique_labels:
if max_num < sum(assignments[start:end, 1] == j):
label = j
max_num = sum(assignments[start:end, 1] == j)
# if label != len(edges) and \
# (-angle_thr < cosine_angle(rdpNodes[i-1, :], nodes[edges[label][0]][0:2],
# rdpNodes[i, :]) < angle_thr and \
# -angle_thr < cosine_angle(rdpNodes[i-1, :], nodes[edges[label][1]][0:2],
# rdpNodes[i, :]) < angle_thr):
# label = len(edges)
assignments[start:end, 1] = label
start = end
assignments[end, 1] = assignments[end - 1, 1]
print('data associated')
return assignments[:, 1] #np.array(assignments[:, 1]).reshape(len(assignments),1)
|
[
"src.clustering.COStransforms.WGS84toECEF",
"frechetdist.frdist",
"numpy.multiply",
"numpy.subtract",
"numpy.sum",
"numpy.argmax",
"numpy.abs",
"src.clustering.COStransforms.WGS84toENU",
"numpy.transpose",
"rdp.rdp",
"numpy.argmin",
"pickle.load",
"numpy.array",
"numpy.linalg.norm",
"numpy.dot",
"numpy.delete",
"numpy.unique"
] |
[((294, 321), 'src.clustering.COStransforms.WGS84toECEF', 'ct.WGS84toECEF', (['(12.0)', '(54.35)'], {}), '(12.0, 54.35)\n', (308, 321), True, 'import src.clustering.COStransforms as ct\n'), ((813, 840), 'rdp.rdp', 'rdp', (['ENUcoord_mmsi', 'EPSILON'], {}), '(ENUcoord_mmsi, EPSILON)\n', (816, 840), False, 'from rdp import rdp\n'), ((856, 881), 'numpy.array', 'np.array', (['ENUcoord_subset'], {}), '(ENUcoord_subset)\n', (864, 881), True, 'import numpy as np\n'), ((1702, 1717), 'numpy.array', 'np.array', (['data1'], {}), '(data1)\n', (1710, 1717), True, 'import numpy as np\n'), ((1910, 1938), 'src.clustering.COStransforms.WGS84toENU', 'ct.WGS84toENU', (['lon', 'lat', 'ref'], {}), '(lon, lat, ref)\n', (1923, 1938), True, 'import src.clustering.COStransforms as ct\n'), ((1954, 1976), 'numpy.transpose', 'np.transpose', (['ENUcoord'], {}), '(ENUcoord)\n', (1966, 1976), True, 'import numpy as np\n'), ((1992, 2029), 'numpy.delete', 'np.delete', (['ENUcoord', 'np.s_[2]'], {'axis': '(1)'}), '(ENUcoord, np.s_[2], axis=1)\n', (2001, 2029), True, 'import numpy as np\n'), ((2143, 2197), 'src.clustering.COStransforms.WGS84toENU', 'ct.WGS84toENU', (['nodesWGS[:, 0].T', 'nodesWGS[:, 1].T', 'ref'], {}), '(nodesWGS[:, 0].T, nodesWGS[:, 1].T, ref)\n', (2156, 2197), True, 'import src.clustering.COStransforms as ct\n'), ((2210, 2229), 'numpy.transpose', 'np.transpose', (['nodes'], {}), '(nodes)\n', (2222, 2229), True, 'import numpy as np\n'), ((2242, 2276), 'numpy.delete', 'np.delete', (['nodes', 'np.s_[2]'], {'axis': '(1)'}), '(nodes, np.s_[2], axis=1)\n', (2251, 2276), True, 'import numpy as np\n'), ((2572, 2605), 'numpy.subtract', 'np.subtract', (['edge_end', 'edge_start'], {}), '(edge_end, edge_start)\n', (2583, 2605), True, 'import numpy as np\n'), ((2620, 2650), 'numpy.subtract', 'np.subtract', (['point', 'edge_start'], {}), '(point, edge_start)\n', (2631, 2650), True, 'import numpy as np\n'), ((2666, 2690), 'numpy.linalg.norm', 'np.linalg.norm', (['line_vec'], {}), '(line_vec)\n', (2680, 2690), True, 'import numpy as np\n'), ((2767, 2803), 'numpy.multiply', 'np.multiply', (['pnt_vec', '(1.0 / line_len)'], {}), '(pnt_vec, 1.0 / line_len)\n', (2778, 2803), True, 'import numpy as np\n'), ((2812, 2848), 'numpy.dot', 'np.dot', (['line_unitvec', 'pnt_vec_scaled'], {}), '(line_unitvec, pnt_vec_scaled)\n', (2818, 2848), True, 'import numpy as np\n'), ((2929, 2953), 'numpy.multiply', 'np.multiply', (['line_vec', 't'], {}), '(line_vec, t)\n', (2940, 2953), True, 'import numpy as np\n'), ((3434, 3455), 'numpy.array', 'np.array', (['slope_edges'], {}), '(slope_edges)\n', (3442, 3455), True, 'import numpy as np\n'), ((4175, 4196), 'numpy.array', 'np.array', (['assignments'], {}), '(assignments)\n', (4183, 4196), True, 'import numpy as np\n'), ((1304, 1336), 'numpy.argmin', 'np.argmin', (['cost_matrix[i - 1, :]'], {}), '(cost_matrix[i - 1, :])\n', (1313, 1336), True, 'import numpy as np\n'), ((2116, 2130), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2127, 2130), False, 'import pickle\n'), ((2360, 2374), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2371, 2374), False, 'import pickle\n'), ((2721, 2745), 'numpy.linalg.norm', 'np.linalg.norm', (['line_vec'], {}), '(line_vec)\n', (2735, 2745), True, 'import numpy as np\n'), ((2984, 3013), 'numpy.subtract', 'np.subtract', (['nearest', 'pnt_vec'], {}), '(nearest, pnt_vec)\n', (2995, 3013), True, 'import numpy as np\n'), ((3088, 3108), 'numpy.dot', 'np.dot', (['(a - b)', '(c - b)'], {}), '(a - b, c - b)\n', (3094, 3108), True, 'import numpy as np\n'), ((3963, 3992), 'numpy.argmin', 'np.argmin', (['cost_matrix[ii, :]'], {}), '(cost_matrix[ii, :])\n', (3972, 3992), True, 'import numpy as np\n'), ((4387, 4414), 'rdp.rdp', 'rdp', (['ENUcoord_mmsi', 'EPSILON'], {}), '(ENUcoord_mmsi, EPSILON)\n', (4390, 4414), False, 'from rdp import rdp\n'), ((4434, 4459), 'numpy.array', 'np.array', (['ENUcoord_subset'], {}), '(ENUcoord_subset)\n', (4442, 4459), True, 'import numpy as np\n'), ((1212, 1290), 'frechetdist.frdist', 'frdist', (['[rdpNodes[i - 1, :], rdpNodes[i, :]]', '[nodes[e[0], :], nodes[e[1], :]]'], {}), '([rdpNodes[i - 1, :], rdpNodes[i, :]], [nodes[e[0], :], nodes[e[1], :]])\n', (1218, 1290), False, 'from frechetdist import frdist\n'), ((1756, 1796), 'numpy.array', 'np.array', (['data1[:, clm_idx]'], {'dtype': 'float'}), '(data1[:, clm_idx], dtype=float)\n', (1764, 1796), True, 'import numpy as np\n'), ((1828, 1872), 'numpy.array', 'np.array', (['data1[:, clm_idx + 1]'], {'dtype': 'float'}), '(data1[:, clm_idx + 1], dtype=float)\n', (1836, 1872), True, 'import numpy as np\n'), ((3108, 3129), 'numpy.linalg.norm', 'np.linalg.norm', (['(a - b)'], {}), '(a - b)\n', (3122, 3129), True, 'import numpy as np\n'), ((3130, 3151), 'numpy.linalg.norm', 'np.linalg.norm', (['(c - b)'], {}), '(c - b)\n', (3144, 3151), True, 'import numpy as np\n'), ((4746, 4782), 'numpy.unique', 'np.unique', (['assignments[start:end, 1]'], {}), '(assignments[start:end, 1])\n', (4755, 4782), True, 'import numpy as np\n'), ((5858, 5890), 'numpy.argmax', 'np.argmax', (["(data1['mmsi'] == mmsi)"], {}), "(data1['mmsi'] == mmsi)\n", (5867, 5890), True, 'import numpy as np\n'), ((6044, 6071), 'rdp.rdp', 'rdp', (['ENUcoord_mmsi', 'EPSILON'], {}), '(ENUcoord_mmsi, EPSILON)\n', (6047, 6071), False, 'from rdp import rdp\n'), ((6095, 6120), 'numpy.array', 'np.array', (['ENUcoord_subset'], {}), '(ENUcoord_subset)\n', (6103, 6120), True, 'import numpy as np\n'), ((1095, 1118), 'numpy.argmax', 'np.argmax', (['(ends > start)'], {}), '(ends > start)\n', (1104, 1118), True, 'import numpy as np\n'), ((5925, 5954), 'numpy.sum', 'np.sum', (["(data1['mmsi'] == mmsi)"], {}), "(data1['mmsi'] == mmsi)\n", (5931, 5954), True, 'import numpy as np\n'), ((6374, 6410), 'numpy.unique', 'np.unique', (['assignments[start:end, 1]'], {}), '(assignments[start:end, 1])\n', (6383, 6410), True, 'import numpy as np\n'), ((4681, 4704), 'numpy.argmax', 'np.argmax', (['(ends > start)'], {}), '(ends > start)\n', (4690, 4704), True, 'import numpy as np\n'), ((5002, 5053), 'numpy.abs', 'np.abs', (['(slope_rdpnodes - slope_edges[unique_labels])'], {}), '(slope_rdpnodes - slope_edges[unique_labels])\n', (5008, 5053), True, 'import numpy as np\n'), ((6307, 6330), 'numpy.argmax', 'np.argmax', (['(ends > start)'], {}), '(ends > start)\n', (6316, 6330), True, 'import numpy as np\n')]
|
import math
from typing import Tuple
import numpy as np
import open3d as o3d
import open3d.core as o3c
import pytest
from dq3d import quat, dualquat
import nnrt
import nnrt.geometry as nnrt_geom
from image_processing import compute_normals
from image_processing.numba_cuda.preprocessing import cuda_compute_normal
from image_processing.numpy_cpu.preprocessing import cpu_compute_normal
def generate_xy_plane_depth_image(resolution: Tuple[int, int], depth: int) -> np.ndarray:
image = np.ones(resolution, dtype=np.uint16) * depth
return image
def generate_xy_plane_color_image(resolution: Tuple[int, int], value: Tuple[int, int, int]) -> np.ndarray:
image = np.ndarray((resolution[0], resolution[1], 3), dtype=np.uint8)
image[:, :] = value
return image
def construct_intrinsic_matrix1_3x3():
intrinsics = np.eye(3, dtype=np.float32)
intrinsics[0, 0] = 100.0
intrinsics[1, 1] = 100.0
intrinsics[0, 2] = 50.0
intrinsics[1, 2] = 50.0
return intrinsics
def construct_intrinsic_matrix1_4x4():
intrinsics = np.eye(4, dtype=np.float32)
intrinsics[0, 0] = 100.0
intrinsics[1, 1] = 100.0
intrinsics[0, 2] = 50.0
intrinsics[1, 2] = 50.0
return intrinsics
def construct_test_volume1(device=o3d.core.Device('cuda:0')):
# initialize volume
voxel_size = 0.01 # 1 cm voxel size
sdf_truncation_distance = 0.02 # truncation distance = 2cm
block_resolution = 8 # 8^3 voxel blocks
initial_block_count = 128 # initially allocated number of voxel blocks
volume = nnrt.geometry.WarpableTSDFVoxelGrid(
{
'tsdf': o3d.core.Dtype.Float32,
'weight': o3d.core.Dtype.UInt16,
'color': o3d.core.Dtype.UInt16
},
voxel_size=voxel_size,
sdf_trunc=sdf_truncation_distance,
block_resolution=block_resolution,
block_count=initial_block_count,
device=device)
# generate image
image_width = 100
image_height = 100
image_resolution = (image_width, image_height)
depth = 50 # mm
depth_image = generate_xy_plane_depth_image(image_resolution, depth)
depth_image_gpu = o3d.t.geometry.Image(o3c.Tensor(depth_image, device=device))
value = (100, 100, 100)
color_image = generate_xy_plane_color_image(image_resolution, value)
color_image_gpu = o3d.t.geometry.Image(o3c.Tensor(color_image, device=device))
# set up matrix parameters
intrinsics = construct_intrinsic_matrix1_3x3()
intrinsics_open3d_gpu = o3c.Tensor(intrinsics, device=device)
extrinsics_open3d_gpu = o3c.Tensor(np.eye(4, dtype=np.float32), device=device)
# integrate volume
volume.integrate(depth_image_gpu, color_image_gpu, intrinsics_open3d_gpu, extrinsics_open3d_gpu, 1000.0, 3.0)
return volume
@pytest.mark.parametrize("device", [o3d.core.Device('cuda:0'), o3d.core.Device('cpu:0')])
def test_integrate_warped_simple_motion_dq(device):
camera_rotation = np.ascontiguousarray(np.eye(3, dtype=np.float32))
camera_translation = np.ascontiguousarray(np.zeros(3, dtype=np.float32))
# we need at least four nodes this time, otherwise psdf computation will consider voxel invalid and produce "NaN".
# Make it five.
nodes = np.array([[0.0, 0.0, 0.05],
[0.02, 0.0, 0.05],
[-0.02, 0.0, 0.05],
[0.00, 0.02, 0.05],
[0.00, -0.02, 0.05]],
dtype=np.float32)
volume = construct_test_volume1(device)
voxel_tsdf_and_weights: o3c.Tensor = volume.extract_tsdf_values_and_weights()
voxel_tsdf_and_weights_np_originals = voxel_tsdf_and_weights.cpu().numpy()
# the first node moves 1 cm along the negative z axis (towards the camera).
node_dual_quaternions_dq3d = [dualquat(quat.identity(), quat(1.0, 0.0, 0.0, -0.005))] + [dualquat(quat.identity())] * (len(nodes) - 1)
node_dual_quaternions = np.array([np.concatenate((dq.real.data, dq.dual.data)) for dq in node_dual_quaternions_dq3d])
depth = 50 # mm
image_width = 100
image_height = 100
image_resolution = (image_width, image_height)
depth_image = generate_xy_plane_depth_image(image_resolution, depth)
# let's imagine that the central surface point is 1 cm closer to the camera as well, so we alter the depth
# to 40 mm there. Make the motion cease at the other four nodes, e.g. their depth should remain at 50.
# We can make a radial "pinch" in the center of the depth image.
# For our predefined camera, 1 px = 0.005 m, and the nodes are around the 0.002 m radius,
# which puts our pixel radius at 0.002 / 0.0005 = 40 px
pinch_diameter = 40
pinch_radius = pinch_diameter // 2
pinch_height = 10
y_coordinates = np.linspace(-1, 1, pinch_diameter)[None, :] * pinch_height
x_coordinates = np.linspace(-1, 1, pinch_diameter)[:, None] * pinch_height
delta = -pinch_height + np.sqrt(x_coordinates ** 2 + y_coordinates ** 2)
half_image_width = image_width // 2
half_image_height = image_height // 2
# @formatter:off
depth_image[half_image_height - pinch_radius:half_image_height + pinch_radius,
half_image_width - pinch_radius:half_image_width + pinch_radius] += np.round(delta).astype(np.uint16)
# @formatter:on
# ---- compute normals ----
intrinsic_matrix = construct_intrinsic_matrix1_3x3()
fx, fy, cx, cy = intrinsic_matrix[0, 0], intrinsic_matrix[1, 1], intrinsic_matrix[0, 2], intrinsic_matrix[1, 2]
point_image = nnrt.backproject_depth_ushort(depth_image, fx, fy, cx, cy, 1000.0)
normals = compute_normals(device, point_image)
# ---- compute updates ----
truncation_distance = 0.02 # same value as in construct_test_volume1
node_coverage = 0.05
depth_image_o3d = o3d.t.geometry.Image.from_legacy_image(o3d.geometry.Image(depth_image), device=device)
normals_o3d = o3c.Tensor(normals, dtype=o3c.Dtype.Float32, device=device)
intrinsic_matrix_o3d = o3c.Tensor(intrinsic_matrix, dtype=o3c.Dtype.Float32, device=device)
extrinsic_matrix_o3d = o3c.Tensor.eye(4, dtype=o3c.Dtype.Float32, device=device)
node_dual_quaternions_o3d = o3c.Tensor(node_dual_quaternions, dtype=o3c.Dtype.Float32, device=device)
nodes_o3d = o3c.Tensor(nodes, dtype=o3c.Dtype.Float32, device=device)
node_edges_o3d = o3c.Tensor((1, 1))
cos_voxel_ray_to_normal = volume.integrate_warped_dq(
depth_image_o3d, normals_o3d, intrinsic_matrix_o3d, extrinsic_matrix_o3d,
nodes_o3d, node_edges_o3d, node_dual_quaternions_o3d, node_coverage,
anchor_count=4, minimum_valid_anchor_count=3, depth_scale=1000.0, depth_max=3.0,
compute_anchors_using=nnrt_geom.AnchorComputationMethod.EUCLIDEAN, use_node_distance_thresholding=False)
cos_voxel_ray_to_normal = np.squeeze(cos_voxel_ray_to_normal.cpu().numpy())
voxel_tsdf_and_weights: o3c.Tensor = volume.extract_tsdf_values_and_weights()
voxel_tsdf_and_weights_np = voxel_tsdf_and_weights.cpu().numpy()
# voxel in the center of the plane is at 0, 0, 0.05,
# which should coincide with the first and only node
# voxel global position is (0, 0, 5) (in voxels)
# voxel is, presumably, in block 3
# voxel's index in block 0 is 5 * (8*8) = 320
# each block holds 512 voxels
center_plane_voxel_index = 512 + 512 + 512 + 320
indices_to_test = [center_plane_voxel_index,
center_plane_voxel_index + 1,
center_plane_voxel_index + 8,
center_plane_voxel_index + 16,
center_plane_voxel_index + 64]
# generated using the above function.
# Note: if anything about the reference implementation changes, these residuals need to be re-computed.
# each array row contains:
# u, v, cosine, tsdf, weight
ground_truth_data = np.array([
[50, 50, 0.4970065653324127, 0.0, 0.0],
[71, 50, 0.9784621335214618, 0.06499883711342021, 2.0],
[50, 71, 0.9784621335214618, 0.06499883711342021, 2.0],
[50, 92, 0.9215041958391356, 0.06362117264804237, 2.0],
[50, 50, 0.4970065653324127, 0.0, 0.0]
])
def check_voxel_at(index, ground_truth):
assert math.isclose(cos_voxel_ray_to_normal[int(ground_truth[0]), int(ground_truth[1])], ground_truth[2], abs_tol=1e-7)
if ground_truth[2] > 0.5:
assert np.allclose(voxel_tsdf_and_weights_np[index], ground_truth[3:])
for index, ground_truth in zip(indices_to_test, ground_truth_data):
check_voxel_at(index, ground_truth)
@pytest.mark.parametrize("device", [o3d.core.Device('cuda:0'), o3d.core.Device('cpu:0')])
def test_integrate_warped_simple_motion_mat(device):
camera_rotation = np.ascontiguousarray(np.eye(3, dtype=np.float32))
camera_translation = np.ascontiguousarray(np.zeros(3, dtype=np.float32))
# we need at least four nodes this time, otherwise psdf computation will consider voxel invalid and produce "NaN".
# Make it five.
nodes = np.array([[0.0, 0.0, 0.05],
[0.02, 0.0, 0.05],
[-0.02, 0.0, 0.05],
[0.00, 0.02, 0.05],
[0.00, -0.02, 0.05]],
dtype=np.float32)
# voxel size = 0.01 m
volume = construct_test_volume1(device)
voxel_tsdf_and_weights: o3c.Tensor = volume.extract_tsdf_values_and_weights()
voxel_tsdf_and_weights_np_originals = voxel_tsdf_and_weights.cpu().numpy()
# the first node moves 1 cm along the negative z axis (towards the camera).
node_dual_quaternions_dq3d = [dualquat(quat.identity(), quat(1.0, 0.0, 0.0, -0.005))] + [dualquat(quat.identity())] * (len(nodes) - 1)
node_rotations_mat = np.array([dq.rotation().to_rotation_matrix().astype(np.float32) for dq in node_dual_quaternions_dq3d])
node_translations_vec = np.array([dq.translation().astype(np.float32) for dq in node_dual_quaternions_dq3d])
depth = 50 # mm
image_width = 100
image_height = 100
image_resolution = (image_width, image_height)
depth_image = generate_xy_plane_depth_image(image_resolution, depth)
color_image = np.zeros((image_height, image_width, 3), dtype=np.uint8)
# let's imagine that the central surface point is 1 cm closer to the camera as well, so we alter the depth
# to 40 mm there. Make the motion cease at the other four nodes, e.g. their depth should remain at 50.
# We can make a radial "pinch" in the center of the depth image.
# For our predefined camera, 1 px = 0.005 m, and the nodes are around the 0.002 m radius,
# which puts our pixel radius at 0.002 / 0.0005 = 40 px
pinch_diameter = 40
pinch_radius = pinch_diameter // 2
pinch_height = 10
y_coordinates = np.linspace(-1, 1, pinch_diameter)[None, :] * pinch_height
x_coordinates = np.linspace(-1, 1, pinch_diameter)[:, None] * pinch_height
delta = -pinch_height + np.sqrt(x_coordinates ** 2 + y_coordinates ** 2)
half_image_width = image_width // 2
half_image_height = image_height // 2
# @formatter:off
depth_image[half_image_height - pinch_radius:half_image_height + pinch_radius,
half_image_width - pinch_radius:half_image_width + pinch_radius] += np.round(delta).astype(np.uint16)
# @formatter:on
# ---- compute normals ----
intrinsic_matrix = construct_intrinsic_matrix1_3x3()
fx, fy, cx, cy = intrinsic_matrix[0, 0], intrinsic_matrix[1, 1], intrinsic_matrix[0, 2], intrinsic_matrix[1, 2]
point_image = nnrt.backproject_depth_ushort(depth_image, fx, fy, cx, cy, 1000.0)
normals = compute_normals(device, point_image)
# ---- compute updates ----
node_coverage = 0.05
depth_image_o3d = o3d.t.geometry.Image.from_legacy_image(o3d.geometry.Image(depth_image), device=device)
color_image_o3d = o3d.t.geometry.Image.from_legacy_image(o3d.geometry.Image(color_image), device=device)
normals_o3d = o3c.Tensor(normals, dtype=o3c.Dtype.Float32, device=device)
intrinsic_matrix_o3d = o3c.Tensor(intrinsic_matrix, dtype=o3c.Dtype.Float32, device=device)
extrinsic_matrix_o3d = o3c.Tensor.eye(4, dtype=o3c.Dtype.Float32, device=device)
node_rotations_o3d = o3c.Tensor(node_rotations_mat, dtype=o3c.Dtype.Float32, device=device)
node_translations_o3d = o3c.Tensor(node_translations_vec, dtype=o3c.Dtype.Float32, device=device)
nodes_o3d = o3c.Tensor(nodes, dtype=o3c.Dtype.Float32, device=device)
edges_o3d = o3c.Tensor((1, 1))
cos_voxel_ray_to_normal = volume.integrate_warped_mat(
depth_image_o3d, color_image_o3d, normals_o3d, intrinsic_matrix_o3d, extrinsic_matrix_o3d,
nodes_o3d, edges_o3d, node_rotations_o3d, node_translations_o3d, node_coverage,
anchor_count=4, minimum_valid_anchor_count=3, depth_scale=1000.0, depth_max=3.0,
compute_anchors_using=nnrt_geom.AnchorComputationMethod.EUCLIDEAN, use_node_distance_thresholding=False)
cos_voxel_ray_to_normal = np.squeeze(cos_voxel_ray_to_normal.cpu().numpy())
voxel_tsdf_and_weights: o3c.Tensor = volume.extract_tsdf_values_and_weights()
voxel_tsdf_and_weights_np = voxel_tsdf_and_weights.cpu().numpy()
# voxel in the center of the plane is at 0, 0, 0.05,
# which should coincide with the first and only node
# voxel global position is (0, 0, 5) (in voxels)
# voxel is, presumably, in block 3
# voxel's index in block 0 is 5 * (8*8) = 320
# each block holds 512 voxels
center_plane_voxel_index = 512 + 512 + 512 + 320
indices_to_test = [center_plane_voxel_index,
center_plane_voxel_index + 1, # x + 1
center_plane_voxel_index + 8, # y + 1
center_plane_voxel_index + 16, # y + 2
center_plane_voxel_index + 64] # z + 1
# generated using the above function.
# Note: if anything about the reference implementation changes, these residuals need to be re-computed.
# each array row contains:
# v, u, cosine, tsdf, weight
ground_truth_data = np.array([
[50, 50, 0.4970065653324127, 0.0, 0.0],
[71, 50, 0.9784621335214618, 0.06499883711342021, 2.0],
[50, 71, 0.9784621335214618, 0.06499883711342021, 2.0],
[50, 92, 0.9215041958391356, 0.06362117264804237, 2.0],
[50, 50, 0.4970065653324127, 0.0, 0.0]
])
def check_voxel_at(index, ground_truth):
assert math.isclose(cos_voxel_ray_to_normal[int(ground_truth[0]), int(ground_truth[1])], ground_truth[2], abs_tol=1e-7)
if ground_truth[2] > 0.5:
assert np.allclose(voxel_tsdf_and_weights_np[index], ground_truth[3:])
for index, ground_truth in zip(indices_to_test, ground_truth_data):
check_voxel_at(index, ground_truth)
|
[
"numpy.allclose",
"numpy.ones",
"numpy.round",
"numpy.ndarray",
"open3d.core.Device",
"open3d.geometry.Image",
"image_processing.compute_normals",
"dq3d.quat",
"numpy.linspace",
"nnrt.backproject_depth_ushort",
"open3d.core.Tensor.eye",
"open3d.core.Tensor",
"dq3d.quat.identity",
"numpy.concatenate",
"numpy.zeros",
"numpy.array",
"numpy.eye",
"nnrt.geometry.WarpableTSDFVoxelGrid",
"numpy.sqrt"
] |
[((676, 737), 'numpy.ndarray', 'np.ndarray', (['(resolution[0], resolution[1], 3)'], {'dtype': 'np.uint8'}), '((resolution[0], resolution[1], 3), dtype=np.uint8)\n', (686, 737), True, 'import numpy as np\n'), ((837, 864), 'numpy.eye', 'np.eye', (['(3)'], {'dtype': 'np.float32'}), '(3, dtype=np.float32)\n', (843, 864), True, 'import numpy as np\n'), ((1059, 1086), 'numpy.eye', 'np.eye', (['(4)'], {'dtype': 'np.float32'}), '(4, dtype=np.float32)\n', (1065, 1086), True, 'import numpy as np\n'), ((1259, 1284), 'open3d.core.Device', 'o3d.core.Device', (['"""cuda:0"""'], {}), "('cuda:0')\n", (1274, 1284), True, 'import open3d as o3d\n'), ((1551, 1842), 'nnrt.geometry.WarpableTSDFVoxelGrid', 'nnrt.geometry.WarpableTSDFVoxelGrid', (["{'tsdf': o3d.core.Dtype.Float32, 'weight': o3d.core.Dtype.UInt16, 'color':\n o3d.core.Dtype.UInt16}"], {'voxel_size': 'voxel_size', 'sdf_trunc': 'sdf_truncation_distance', 'block_resolution': 'block_resolution', 'block_count': 'initial_block_count', 'device': 'device'}), "({'tsdf': o3d.core.Dtype.Float32,\n 'weight': o3d.core.Dtype.UInt16, 'color': o3d.core.Dtype.UInt16},\n voxel_size=voxel_size, sdf_trunc=sdf_truncation_distance,\n block_resolution=block_resolution, block_count=initial_block_count,\n device=device)\n", (1586, 1842), False, 'import nnrt\n'), ((2512, 2549), 'open3d.core.Tensor', 'o3c.Tensor', (['intrinsics'], {'device': 'device'}), '(intrinsics, device=device)\n', (2522, 2549), True, 'import open3d.core as o3c\n'), ((3233, 3362), 'numpy.array', 'np.array', (['[[0.0, 0.0, 0.05], [0.02, 0.0, 0.05], [-0.02, 0.0, 0.05], [0.0, 0.02, 0.05],\n [0.0, -0.02, 0.05]]'], {'dtype': 'np.float32'}), '([[0.0, 0.0, 0.05], [0.02, 0.0, 0.05], [-0.02, 0.0, 0.05], [0.0, \n 0.02, 0.05], [0.0, -0.02, 0.05]], dtype=np.float32)\n', (3241, 3362), True, 'import numpy as np\n'), ((5507, 5573), 'nnrt.backproject_depth_ushort', 'nnrt.backproject_depth_ushort', (['depth_image', 'fx', 'fy', 'cx', 'cy', '(1000.0)'], {}), '(depth_image, fx, fy, cx, cy, 1000.0)\n', (5536, 5573), False, 'import nnrt\n'), ((5588, 5624), 'image_processing.compute_normals', 'compute_normals', (['device', 'point_image'], {}), '(device, point_image)\n', (5603, 5624), False, 'from image_processing import compute_normals\n'), ((5884, 5943), 'open3d.core.Tensor', 'o3c.Tensor', (['normals'], {'dtype': 'o3c.Dtype.Float32', 'device': 'device'}), '(normals, dtype=o3c.Dtype.Float32, device=device)\n', (5894, 5943), True, 'import open3d.core as o3c\n'), ((5971, 6039), 'open3d.core.Tensor', 'o3c.Tensor', (['intrinsic_matrix'], {'dtype': 'o3c.Dtype.Float32', 'device': 'device'}), '(intrinsic_matrix, dtype=o3c.Dtype.Float32, device=device)\n', (5981, 6039), True, 'import open3d.core as o3c\n'), ((6067, 6124), 'open3d.core.Tensor.eye', 'o3c.Tensor.eye', (['(4)'], {'dtype': 'o3c.Dtype.Float32', 'device': 'device'}), '(4, dtype=o3c.Dtype.Float32, device=device)\n', (6081, 6124), True, 'import open3d.core as o3c\n'), ((6157, 6230), 'open3d.core.Tensor', 'o3c.Tensor', (['node_dual_quaternions'], {'dtype': 'o3c.Dtype.Float32', 'device': 'device'}), '(node_dual_quaternions, dtype=o3c.Dtype.Float32, device=device)\n', (6167, 6230), True, 'import open3d.core as o3c\n'), ((6247, 6304), 'open3d.core.Tensor', 'o3c.Tensor', (['nodes'], {'dtype': 'o3c.Dtype.Float32', 'device': 'device'}), '(nodes, dtype=o3c.Dtype.Float32, device=device)\n', (6257, 6304), True, 'import open3d.core as o3c\n'), ((6326, 6344), 'open3d.core.Tensor', 'o3c.Tensor', (['(1, 1)'], {}), '((1, 1))\n', (6336, 6344), True, 'import open3d.core as o3c\n'), ((7846, 8124), 'numpy.array', 'np.array', (['[[50, 50, 0.4970065653324127, 0.0, 0.0], [71, 50, 0.9784621335214618, \n 0.06499883711342021, 2.0], [50, 71, 0.9784621335214618, \n 0.06499883711342021, 2.0], [50, 92, 0.9215041958391356, \n 0.06362117264804237, 2.0], [50, 50, 0.4970065653324127, 0.0, 0.0]]'], {}), '([[50, 50, 0.4970065653324127, 0.0, 0.0], [71, 50, \n 0.9784621335214618, 0.06499883711342021, 2.0], [50, 71, \n 0.9784621335214618, 0.06499883711342021, 2.0], [50, 92, \n 0.9215041958391356, 0.06362117264804237, 2.0], [50, 50, \n 0.4970065653324127, 0.0, 0.0]])\n', (7854, 8124), True, 'import numpy as np\n'), ((9004, 9133), 'numpy.array', 'np.array', (['[[0.0, 0.0, 0.05], [0.02, 0.0, 0.05], [-0.02, 0.0, 0.05], [0.0, 0.02, 0.05],\n [0.0, -0.02, 0.05]]'], {'dtype': 'np.float32'}), '([[0.0, 0.0, 0.05], [0.02, 0.0, 0.05], [-0.02, 0.0, 0.05], [0.0, \n 0.02, 0.05], [0.0, -0.02, 0.05]], dtype=np.float32)\n', (9012, 9133), True, 'import numpy as np\n'), ((10142, 10198), 'numpy.zeros', 'np.zeros', (['(image_height, image_width, 3)'], {'dtype': 'np.uint8'}), '((image_height, image_width, 3), dtype=np.uint8)\n', (10150, 10198), True, 'import numpy as np\n'), ((11510, 11576), 'nnrt.backproject_depth_ushort', 'nnrt.backproject_depth_ushort', (['depth_image', 'fx', 'fy', 'cx', 'cy', '(1000.0)'], {}), '(depth_image, fx, fy, cx, cy, 1000.0)\n', (11539, 11576), False, 'import nnrt\n'), ((11591, 11627), 'image_processing.compute_normals', 'compute_normals', (['device', 'point_image'], {}), '(device, point_image)\n', (11606, 11627), False, 'from image_processing import compute_normals\n'), ((11922, 11981), 'open3d.core.Tensor', 'o3c.Tensor', (['normals'], {'dtype': 'o3c.Dtype.Float32', 'device': 'device'}), '(normals, dtype=o3c.Dtype.Float32, device=device)\n', (11932, 11981), True, 'import open3d.core as o3c\n'), ((12009, 12077), 'open3d.core.Tensor', 'o3c.Tensor', (['intrinsic_matrix'], {'dtype': 'o3c.Dtype.Float32', 'device': 'device'}), '(intrinsic_matrix, dtype=o3c.Dtype.Float32, device=device)\n', (12019, 12077), True, 'import open3d.core as o3c\n'), ((12105, 12162), 'open3d.core.Tensor.eye', 'o3c.Tensor.eye', (['(4)'], {'dtype': 'o3c.Dtype.Float32', 'device': 'device'}), '(4, dtype=o3c.Dtype.Float32, device=device)\n', (12119, 12162), True, 'import open3d.core as o3c\n'), ((12188, 12258), 'open3d.core.Tensor', 'o3c.Tensor', (['node_rotations_mat'], {'dtype': 'o3c.Dtype.Float32', 'device': 'device'}), '(node_rotations_mat, dtype=o3c.Dtype.Float32, device=device)\n', (12198, 12258), True, 'import open3d.core as o3c\n'), ((12287, 12360), 'open3d.core.Tensor', 'o3c.Tensor', (['node_translations_vec'], {'dtype': 'o3c.Dtype.Float32', 'device': 'device'}), '(node_translations_vec, dtype=o3c.Dtype.Float32, device=device)\n', (12297, 12360), True, 'import open3d.core as o3c\n'), ((12377, 12434), 'open3d.core.Tensor', 'o3c.Tensor', (['nodes'], {'dtype': 'o3c.Dtype.Float32', 'device': 'device'}), '(nodes, dtype=o3c.Dtype.Float32, device=device)\n', (12387, 12434), True, 'import open3d.core as o3c\n'), ((12451, 12469), 'open3d.core.Tensor', 'o3c.Tensor', (['(1, 1)'], {}), '((1, 1))\n', (12461, 12469), True, 'import open3d.core as o3c\n'), ((14036, 14314), 'numpy.array', 'np.array', (['[[50, 50, 0.4970065653324127, 0.0, 0.0], [71, 50, 0.9784621335214618, \n 0.06499883711342021, 2.0], [50, 71, 0.9784621335214618, \n 0.06499883711342021, 2.0], [50, 92, 0.9215041958391356, \n 0.06362117264804237, 2.0], [50, 50, 0.4970065653324127, 0.0, 0.0]]'], {}), '([[50, 50, 0.4970065653324127, 0.0, 0.0], [71, 50, \n 0.9784621335214618, 0.06499883711342021, 2.0], [50, 71, \n 0.9784621335214618, 0.06499883711342021, 2.0], [50, 92, \n 0.9215041958391356, 0.06362117264804237, 2.0], [50, 50, \n 0.4970065653324127, 0.0, 0.0]])\n', (14044, 14314), True, 'import numpy as np\n'), ((493, 529), 'numpy.ones', 'np.ones', (['resolution'], {'dtype': 'np.uint16'}), '(resolution, dtype=np.uint16)\n', (500, 529), True, 'import numpy as np\n'), ((2177, 2215), 'open3d.core.Tensor', 'o3c.Tensor', (['depth_image'], {'device': 'device'}), '(depth_image, device=device)\n', (2187, 2215), True, 'import open3d.core as o3c\n'), ((2361, 2399), 'open3d.core.Tensor', 'o3c.Tensor', (['color_image'], {'device': 'device'}), '(color_image, device=device)\n', (2371, 2399), True, 'import open3d.core as o3c\n'), ((2589, 2616), 'numpy.eye', 'np.eye', (['(4)'], {'dtype': 'np.float32'}), '(4, dtype=np.float32)\n', (2595, 2616), True, 'import numpy as np\n'), ((2976, 3003), 'numpy.eye', 'np.eye', (['(3)'], {'dtype': 'np.float32'}), '(3, dtype=np.float32)\n', (2982, 3003), True, 'import numpy as np\n'), ((3051, 3080), 'numpy.zeros', 'np.zeros', (['(3)'], {'dtype': 'np.float32'}), '(3, dtype=np.float32)\n', (3059, 3080), True, 'import numpy as np\n'), ((4921, 4969), 'numpy.sqrt', 'np.sqrt', (['(x_coordinates ** 2 + y_coordinates ** 2)'], {}), '(x_coordinates ** 2 + y_coordinates ** 2)\n', (4928, 4969), True, 'import numpy as np\n'), ((5818, 5849), 'open3d.geometry.Image', 'o3d.geometry.Image', (['depth_image'], {}), '(depth_image)\n', (5836, 5849), True, 'import open3d as o3d\n'), ((2827, 2852), 'open3d.core.Device', 'o3d.core.Device', (['"""cuda:0"""'], {}), "('cuda:0')\n", (2842, 2852), True, 'import open3d as o3d\n'), ((2854, 2878), 'open3d.core.Device', 'o3d.core.Device', (['"""cpu:0"""'], {}), "('cpu:0')\n", (2869, 2878), True, 'import open3d as o3d\n'), ((8747, 8774), 'numpy.eye', 'np.eye', (['(3)'], {'dtype': 'np.float32'}), '(3, dtype=np.float32)\n', (8753, 8774), True, 'import numpy as np\n'), ((8822, 8851), 'numpy.zeros', 'np.zeros', (['(3)'], {'dtype': 'np.float32'}), '(3, dtype=np.float32)\n', (8830, 8851), True, 'import numpy as np\n'), ((10912, 10960), 'numpy.sqrt', 'np.sqrt', (['(x_coordinates ** 2 + y_coordinates ** 2)'], {}), '(x_coordinates ** 2 + y_coordinates ** 2)\n', (10919, 10960), True, 'import numpy as np\n'), ((11747, 11778), 'open3d.geometry.Image', 'o3d.geometry.Image', (['depth_image'], {}), '(depth_image)\n', (11765, 11778), True, 'import open3d as o3d\n'), ((11856, 11887), 'open3d.geometry.Image', 'o3d.geometry.Image', (['color_image'], {}), '(color_image)\n', (11874, 11887), True, 'import open3d as o3d\n'), ((8597, 8622), 'open3d.core.Device', 'o3d.core.Device', (['"""cuda:0"""'], {}), "('cuda:0')\n", (8612, 8622), True, 'import open3d as o3d\n'), ((8624, 8648), 'open3d.core.Device', 'o3d.core.Device', (['"""cpu:0"""'], {}), "('cpu:0')\n", (8639, 8648), True, 'import open3d as o3d\n'), ((3933, 3977), 'numpy.concatenate', 'np.concatenate', (['(dq.real.data, dq.dual.data)'], {}), '((dq.real.data, dq.dual.data))\n', (3947, 3977), True, 'import numpy as np\n'), ((4755, 4789), 'numpy.linspace', 'np.linspace', (['(-1)', '(1)', 'pinch_diameter'], {}), '(-1, 1, pinch_diameter)\n', (4766, 4789), True, 'import numpy as np\n'), ((4834, 4868), 'numpy.linspace', 'np.linspace', (['(-1)', '(1)', 'pinch_diameter'], {}), '(-1, 1, pinch_diameter)\n', (4845, 4868), True, 'import numpy as np\n'), ((5228, 5243), 'numpy.round', 'np.round', (['delta'], {}), '(delta)\n', (5236, 5243), True, 'import numpy as np\n'), ((8378, 8441), 'numpy.allclose', 'np.allclose', (['voxel_tsdf_and_weights_np[index]', 'ground_truth[3:]'], {}), '(voxel_tsdf_and_weights_np[index], ground_truth[3:])\n', (8389, 8441), True, 'import numpy as np\n'), ((10746, 10780), 'numpy.linspace', 'np.linspace', (['(-1)', '(1)', 'pinch_diameter'], {}), '(-1, 1, pinch_diameter)\n', (10757, 10780), True, 'import numpy as np\n'), ((10825, 10859), 'numpy.linspace', 'np.linspace', (['(-1)', '(1)', 'pinch_diameter'], {}), '(-1, 1, pinch_diameter)\n', (10836, 10859), True, 'import numpy as np\n'), ((11231, 11246), 'numpy.round', 'np.round', (['delta'], {}), '(delta)\n', (11239, 11246), True, 'import numpy as np\n'), ((14568, 14631), 'numpy.allclose', 'np.allclose', (['voxel_tsdf_and_weights_np[index]', 'ground_truth[3:]'], {}), '(voxel_tsdf_and_weights_np[index], ground_truth[3:])\n', (14579, 14631), True, 'import numpy as np\n'), ((3799, 3814), 'dq3d.quat.identity', 'quat.identity', ([], {}), '()\n', (3812, 3814), False, 'from dq3d import quat, dualquat\n'), ((3816, 3843), 'dq3d.quat', 'quat', (['(1.0)', '(0.0)', '(0.0)', '(-0.005)'], {}), '(1.0, 0.0, 0.0, -0.005)\n', (3820, 3843), False, 'from dq3d import quat, dualquat\n'), ((9596, 9611), 'dq3d.quat.identity', 'quat.identity', ([], {}), '()\n', (9609, 9611), False, 'from dq3d import quat, dualquat\n'), ((9613, 9640), 'dq3d.quat', 'quat', (['(1.0)', '(0.0)', '(0.0)', '(-0.005)'], {}), '(1.0, 0.0, 0.0, -0.005)\n', (9617, 9640), False, 'from dq3d import quat, dualquat\n'), ((3858, 3873), 'dq3d.quat.identity', 'quat.identity', ([], {}), '()\n', (3871, 3873), False, 'from dq3d import quat, dualquat\n'), ((9655, 9670), 'dq3d.quat.identity', 'quat.identity', ([], {}), '()\n', (9668, 9670), False, 'from dq3d import quat, dualquat\n')]
|
##############################################################################
#
# Copyright (c) 2016, <NAME>
#
# This file is part of arlpy which is released under Simplified BSD License.
# See file LICENSE or go to http://www.opensource.org/licenses/BSD-3-Clause
# for full license details.
#
##############################################################################
"""DTLA support toolbox."""
import os as _os
import numpy as _np
from scipy import signal as _sig
_fs = 1/(1.6e-6*26)
_framelen = 2*26
_channels = 24
_magic = 0xc0de
def check(filename):
"""Check if a file is likely to be a valid DTLA datafile."""
statinfo = _os.stat(filename)
if statinfo.st_size >= 2*2*_channels:
with open(filename, 'rb') as f:
data = _np.fromfile(f, dtype=_np.uint16, count=_framelen/2)
if data[0] == _magic & data[1] == _magic:
return True
return False
def get_sampling_rate(filename=None):
"""Get the sampling rate in Hz."""
return _fs
def get_channels(filename=None):
"""Get the number of available data channels."""
return _channels
def get_data_length(filename):
"""Get the length of the datafile in samples."""
statinfo = _os.stat(filename)
return statinfo.st_size//_framelen
def get_data(filename, channel=None, start=0, length=None, detrend='linear'):
"""Load selected data from DTLA recording.
:param filename: name of the datafile
:param channel: list of channels to read (base 0, None to read all channels)
:param start: sample index to start from
:param length: number of samples to read (None means read all available samples)
:param detrend: processing to be applied to each channel to remove offset/bias
(supported values: ``'linear'``, ``'constant'``, ``None``)
"""
if channel is None:
channel = range(_channels)
elif isinstance(channel, int):
channel = [channel]
if length is None:
length = get_data_length(filename)-start
with open(filename, 'rb') as f:
f.seek(start*_framelen, _os.SEEK_SET)
data = _np.fromfile(f, dtype=_np.uint16, count=_framelen//2*length)
data = _np.reshape(data, [length,_framelen//2])
data = data[:,2:]
data = _np.take(data, channel, axis=1).astype(_np.float)
if len(channel) == 1:
data = data.ravel()
data = 5*data/65536-2.5
if detrend is not None:
data = _sig.detrend(data, axis=0, type=detrend)
return data
|
[
"os.stat",
"numpy.fromfile",
"numpy.take",
"numpy.reshape",
"scipy.signal.detrend"
] |
[((645, 663), 'os.stat', '_os.stat', (['filename'], {}), '(filename)\n', (653, 663), True, 'import os as _os\n'), ((1210, 1228), 'os.stat', '_os.stat', (['filename'], {}), '(filename)\n', (1218, 1228), True, 'import os as _os\n'), ((2181, 2224), 'numpy.reshape', '_np.reshape', (['data', '[length, _framelen // 2]'], {}), '(data, [length, _framelen // 2])\n', (2192, 2224), True, 'import numpy as _np\n'), ((2109, 2173), 'numpy.fromfile', '_np.fromfile', (['f'], {'dtype': '_np.uint16', 'count': '(_framelen // 2 * length)'}), '(f, dtype=_np.uint16, count=_framelen // 2 * length)\n', (2121, 2173), True, 'import numpy as _np\n'), ((2430, 2470), 'scipy.signal.detrend', '_sig.detrend', (['data'], {'axis': '(0)', 'type': 'detrend'}), '(data, axis=0, type=detrend)\n', (2442, 2470), True, 'from scipy import signal as _sig\n'), ((765, 819), 'numpy.fromfile', '_np.fromfile', (['f'], {'dtype': '_np.uint16', 'count': '(_framelen / 2)'}), '(f, dtype=_np.uint16, count=_framelen / 2)\n', (777, 819), True, 'import numpy as _np\n'), ((2255, 2286), 'numpy.take', '_np.take', (['data', 'channel'], {'axis': '(1)'}), '(data, channel, axis=1)\n', (2263, 2286), True, 'import numpy as _np\n')]
|
# -*- coding: utf-8 -*-
import unittest
import os
import pickle
import pandas as pd
import numpy as np
from td_query import ROOT_PATH
from td_query.data_manipulate import data_manipulate_instance as instance
from teradata import UdaExec
class TestDataManipulate(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("**************************************** setUpClass ****************************************")
instance.init()
print(instance.teradata)
@classmethod
def tearDownClass(cls):
print("************************************** tearDownClass ***************************************")
def setUp(self):
print("****** setUp *******")
def tearDown(self):
print("***** tearDown *****")
def _example(self):
df = instance.query_sample()
# with open(ROOT_PATH + '/external/df_dispatch_bna.pickle', 'wb') as f: # save
# pickle.dump(df, f)
print(df)
def _query(self):
query = '''select top 10 * from pp_scratch_risk.ms_auto_trend_us_bad;'''
df = instance.query(query)
print(df)
def _query_table_schema(self):
dest_db = "pp_scratch_risk"
dest_table = "ms_auto_trend_us2_1_3_100_100_1_1_1"
result_cursor = instance.teradata.execute("show select * from {}.{};".format(dest_db, dest_table))
last_row = result_cursor.fetchall()
print(last_row)
def _query_table_top_rows(self):
table = "pp_scratch_risk.ms_auto_trend_us_bad"
df = instance.query_table_top_rows(table)
print(df)
def _drop_table(self):
dest_db = "pp_scratch_risk"
dest_table = "ms_auto_trend_us2_1_3_100_100_1_1_1"
instance.drop_table(dest_db, dest_table)
def _transalte_100_63_22_14_1(self):
rules = [
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C')",
"(SELLER_CONSUMER_SEG == 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string == '10008') & (amt2 != 'c-1h') & (amt2 != 'e-<50')",
]
result = instance.translate_hyperloop_rules_to_sql(rules)
print(result)
def _duplicate_rows_to_new_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
dest_db = "pp_scratch_risk"
weight_a = 900
weight_b = 400
weight_c = 9
weight_d = 16
weight_e = 1
dest_table = "ms_auto_trend_us2_1_3_{}_{}_{}_{}_{}".format(weight_a, weight_b, weight_c, weight_d, weight_e)
instance.duplicate_rows_to_new_table(src_db, src_table, dest_db, dest_table, weight_a, weight_b, weight_c, weight_d, weight_e)
def _duplicate_rows_from_bad_and_sample_from_good_into_new_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us'
dest_db = "pp_scratch_risk"
bad_scale = 1
good_scale = 3
weight_a = 52
weight_b = 16
weight_c = 23
weight_d = 5
weight_e = 4
dest_table = "ms_auto_trend_us_{}_{}__{}_{}_{}_{}_{}_v2".format(bad_scale, good_scale, weight_a, weight_b, weight_c, weight_d, weight_e)
instance.duplicate_rows_from_bad_and_sample_from_good_into_new_table(src_db, src_table, dest_db, dest_table,
bad_scale, good_scale,
weight_a, weight_b, weight_c, weight_d, weight_e)
def _generate_hl_job_json(self):
training_table = "ms_auto_trend_us2_1_3"
testing_table = "ms_auto_trend_us_t"
instance.generate_hl_job_json(training_table, testing_table, template_name='hl_job_template_na.json')
def _add_weight_col_to_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
# weight_a = 0.312
# weight_b = 0.140
# weight_c = 0.011
# weight_d = 0.011
# weight_e = 0.001
weight_a = 10 * 30
weight_b = 8 * 20
weight_c = 4.6 * 3
weight_d = 3.7 * 4
weight_e = 1 * 1
instance.add_weight_col_to_table(src_db, src_table, weight_a, weight_b, weight_c, weight_d, weight_e)
def _update_weight_col_in_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
src_col = 'PMT_USD_AMT'
instance.update_weight_col_in_table(src_db, src_table, src_col)
def _update_custom_weight_col_in_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
src_col = 'PMT_USD_AMT'
instance.update_custom_weight_col_in_table(src_db, src_table, src_col)
|
[
"td_query.data_manipulate.data_manipulate_instance.update_custom_weight_col_in_table",
"td_query.data_manipulate.data_manipulate_instance.query",
"td_query.data_manipulate.data_manipulate_instance.init",
"td_query.data_manipulate.data_manipulate_instance.generate_hl_job_json",
"td_query.data_manipulate.data_manipulate_instance.drop_table",
"td_query.data_manipulate.data_manipulate_instance.duplicate_rows_from_bad_and_sample_from_good_into_new_table",
"td_query.data_manipulate.data_manipulate_instance.query_sample",
"td_query.data_manipulate.data_manipulate_instance.update_weight_col_in_table",
"td_query.data_manipulate.data_manipulate_instance.add_weight_col_to_table",
"td_query.data_manipulate.data_manipulate_instance.duplicate_rows_to_new_table",
"td_query.data_manipulate.data_manipulate_instance.translate_hyperloop_rules_to_sql",
"td_query.data_manipulate.data_manipulate_instance.query_table_top_rows"
] |
[((445, 460), 'td_query.data_manipulate.data_manipulate_instance.init', 'instance.init', ([], {}), '()\n', (458, 460), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((811, 834), 'td_query.data_manipulate.data_manipulate_instance.query_sample', 'instance.query_sample', ([], {}), '()\n', (832, 834), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((1091, 1112), 'td_query.data_manipulate.data_manipulate_instance.query', 'instance.query', (['query'], {}), '(query)\n', (1105, 1112), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((1543, 1579), 'td_query.data_manipulate.data_manipulate_instance.query_table_top_rows', 'instance.query_table_top_rows', (['table'], {}), '(table)\n', (1572, 1579), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((1729, 1769), 'td_query.data_manipulate.data_manipulate_instance.drop_table', 'instance.drop_table', (['dest_db', 'dest_table'], {}), '(dest_db, dest_table)\n', (1748, 1769), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((2144, 2192), 'td_query.data_manipulate.data_manipulate_instance.translate_hyperloop_rules_to_sql', 'instance.translate_hyperloop_rules_to_sql', (['rules'], {}), '(rules)\n', (2185, 2192), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((2611, 2741), 'td_query.data_manipulate.data_manipulate_instance.duplicate_rows_to_new_table', 'instance.duplicate_rows_to_new_table', (['src_db', 'src_table', 'dest_db', 'dest_table', 'weight_a', 'weight_b', 'weight_c', 'weight_d', 'weight_e'], {}), '(src_db, src_table, dest_db, dest_table,\n weight_a, weight_b, weight_c, weight_d, weight_e)\n', (2647, 2741), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((3232, 3421), 'td_query.data_manipulate.data_manipulate_instance.duplicate_rows_from_bad_and_sample_from_good_into_new_table', 'instance.duplicate_rows_from_bad_and_sample_from_good_into_new_table', (['src_db', 'src_table', 'dest_db', 'dest_table', 'bad_scale', 'good_scale', 'weight_a', 'weight_b', 'weight_c', 'weight_d', 'weight_e'], {}), '(src_db,\n src_table, dest_db, dest_table, bad_scale, good_scale, weight_a,\n weight_b, weight_c, weight_d, weight_e)\n', (3300, 3421), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((3708, 3814), 'td_query.data_manipulate.data_manipulate_instance.generate_hl_job_json', 'instance.generate_hl_job_json', (['training_table', 'testing_table'], {'template_name': '"""hl_job_template_na.json"""'}), "(training_table, testing_table, template_name=\n 'hl_job_template_na.json')\n", (3737, 3814), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((4205, 4310), 'td_query.data_manipulate.data_manipulate_instance.add_weight_col_to_table', 'instance.add_weight_col_to_table', (['src_db', 'src_table', 'weight_a', 'weight_b', 'weight_c', 'weight_d', 'weight_e'], {}), '(src_db, src_table, weight_a, weight_b,\n weight_c, weight_d, weight_e)\n', (4237, 4310), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((4470, 4533), 'td_query.data_manipulate.data_manipulate_instance.update_weight_col_in_table', 'instance.update_weight_col_in_table', (['src_db', 'src_table', 'src_col'], {}), '(src_db, src_table, src_col)\n', (4505, 4533), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((4704, 4774), 'td_query.data_manipulate.data_manipulate_instance.update_custom_weight_col_in_table', 'instance.update_custom_weight_col_in_table', (['src_db', 'src_table', 'src_col'], {}), '(src_db, src_table, src_col)\n', (4746, 4774), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n')]
|
#!/usr/bin/env python
# This script needs to control its startup sequence to interface with ParaView's
# `pvpython`.
#
# 1. The user launches `gwrender.py` in a Python environment of their choice.
# They have `gwpv` and its dependencies installed in this environment.
# The `pvpython` executable is available in the `PATH`.
# 2. CLI arguments are parsed.
# a. The `scene` entrypoint is dispatched to `pvpython` in a subprocess,
# passing along the path to the active Python environment.
# b. The `scenes` entrypoint launches subprocesses with the `pvpython`
# executable that each call the `scene` entrypoint.
# 3. Now running in `pvpython`, the Python environment is activated using its
# `activate_this.py` script.
# 4. The `gwpv.render.frames` module is imported in the global namespace so
# ParaView plugins are loaded and work with `multiprocessing`.
#
# FIXME:
# - Installing in editable mode with `pip install -e` is broken
# - Generated state file doesn't `UpdatePipeline()` in between adding the
# reader and the filter, so the timesteps are not loaded from the file yet.
# This generates an error in the GUI and timesteps are unavailable.
# I had no success propagating the time range from the reader to the filter
# in `RequestInformation` so far, neither using information keys nor
# `vtkFieldData`.
from __future__ import division
import logging
import json
def _render_frame_window(job_id_and_frame_window, **kwargs):
from gwpv.render.frames import render_frames
render_frames(job_id=job_id_and_frame_window[0],
frame_window=job_id_and_frame_window[1],
**kwargs)
def render_parallel(num_jobs, scene, frame_window=None, **kwargs):
import functools
import h5py
import multiprocessing
from gwpv.scene_configuration import parse_as, animate
from tqdm import tqdm
logger = logging.getLogger(__name__)
# Infer frame window if needed
if 'FreezeTime' in scene['Animation']:
frame_window = (0, 1)
elif frame_window is None:
if 'Crop' in scene['Animation']:
max_animation_length = (scene['Animation']['Crop'][1] -
scene['Animation']['Crop'][0])
else:
waveform_file_and_subfile = parse_as.file_and_subfile(
scene['Datasources']['Waveform'])
with h5py.File(waveform_file_and_subfile[0], 'r') as waveform_file:
waveform_times = waveform_file[
waveform_file_and_subfile[1]]['Y_l2_m2.dat'][:, 0]
max_animation_length = waveform_times[-1] - waveform_times[0]
logger.debug(
"Inferred max. animation length {}M from waveform data.".
format(max_animation_length))
frame_window = (0,
animate.num_frames(
max_animation_length=max_animation_length,
animation_speed=scene['Animation']['Speed'],
frame_rate=scene['Animation']['FrameRate']))
logger.debug("Inferred total frame window: {}".format(frame_window))
num_frames = frame_window[1] - frame_window[0]
frames_per_job = int(num_frames / num_jobs)
extra_frames = num_frames % num_jobs
logger.debug(
"Using {} jobs with {} frames per job ({} jobs render an additional frame)."
.format(num_jobs, frames_per_job, extra_frames))
frame_windows = []
distributed_frames = frame_window[0]
for i in range(num_jobs):
frames_this_job = frames_per_job + (1 if i < extra_frames else 0)
frame_windows.append(
(distributed_frames, distributed_frames + frames_this_job))
distributed_frames += frames_this_job
logger.debug("Frame windows: {}".format(frame_windows))
pool = multiprocessing.Pool(num_jobs,
initializer=tqdm.set_lock,
initargs=(tqdm.get_lock(), ))
render_frame_window = functools.partial(_render_frame_window,
scene=scene,
**kwargs)
pool.map(render_frame_window, enumerate(frame_windows))
def render_scene_entrypoint(scene_files, keypath_overrides, scene_paths,
num_jobs, render_movie_to_file,
force_offscreen_rendering, **kwargs):
from gwpv.scene_configuration.load import load_scene
from gwpv.download_data import download_data
from gwpv.swsh_cache import precompute_cached_swsh_grid
# Validate options
assert (
kwargs['frames_dir'] is not None or kwargs['no_render']
or render_movie_to_file is not None
), "Provide the `--frames-dir` option, the '--render-movie-to-file' option, or disable rendering with `--no-render`."
if kwargs['frames_dir'] is None and render_movie_to_file is not None:
kwargs['frames_dir'] = render_movie_to_file + '_frames'
# Load scene configuration file
scene = load_scene(scene_files, keypath_overrides, paths=scene_paths)
# Download data files
download_data(scene['Datasources'])
# Cache SWSH grid
precompute_cached_swsh_grid(scene)
if num_jobs == 1:
render_frames(scene=scene, **kwargs)
else:
render_parallel(num_jobs=num_jobs, scene=scene, **kwargs)
if (render_movie_to_file is not None
and 'FreezeTime' not in scene['Animation']):
from gwpv.render.movie import render_movie
render_movie(output_filename=render_movie_to_file,
frame_rate=scene['Animation']['FrameRate'],
frames_dir=kwargs['frames_dir'])
def dispatch_to_pvpython(force_offscreen_rendering, cli_args):
import os
import sys
import subprocess
logger = logging.getLogger(__name__)
# Check if we're running in a virtual environment and pass that
# information on
activate_venv_script = os.path.join(sys.prefix, 'bin', 'activate_this.py')
pvpython_command = (['pvpython'] + (['--force-offscreen-rendering']
if force_offscreen_rendering else []) +
cli_args +
(['--activate-venv', sys.prefix]
if os.path.exists(activate_venv_script) else []))
logger.debug("Dispatching to 'pvpython' as: {}".format(pvpython_command))
return subprocess.call(pvpython_command)
def render_scenes_entrypoint(scenes_file, output_dir, output_prefix,
output_suffix, scene_overrides, scene_paths,
keypath_overrides, render_missing_frames,
num_jobs, force_offscreen_rendering, verbose,
logging_config):
import itertools
import os
import yaml
from tqdm import tqdm
common_args = (
list(
itertools.chain(*[('--override', "=".join(override))
for override in keypath_overrides])) +
(['--render-missing-frames'] if render_missing_frames else []) + list(
itertools.chain(*[('-p', scene_path)
for scene_path in scene_paths])) +
['-n', str(num_jobs)] + ['-v'] * verbose +
(['--logging-config', "'" + json.dumps(logging_config) +
"'"] if logging_config is not None else []))
with tqdm(yaml.safe_load(open(scenes_file, 'r'))['Scenes'],
desc='Scenes',
unit='scene') as scenes:
for scene in scenes:
scenes.set_postfix(current_scene=scene['Name'])
scene_files = [scenes_file + ':' + scene['Name']] + scene_overrides
movie_file = os.path.join(
output_dir, output_prefix + scene['Name'] + output_suffix)
# Run as a subprocess instead of calling `render_scene_entrypoint`
# directly to make sure ParaView releases memory after each run
dispatch_to_pvpython(
force_offscreen_rendering, [__file__, 'scene'] + scene_files +
['--render-movie-to-file', movie_file] + common_args)
def render_waveform_entrypoint(scene_files, keypath_overrides, scene_paths,
**kwargs):
from gwpv.render.waveform import render_waveform
from gwpv.scene_configuration.load import load_scene
from gwpv.download_data import download_data
scene = load_scene(scene_files, keypath_overrides, paths=scene_paths)
download_data(scene['Datasources'])
render_waveform(scene, **kwargs)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(
'gwrender.py',
description="Visualize gravitational waves with ParaView")
subparsers = parser.add_subparsers(dest='entrypoint')
subparsers.required = True
# `scene` CLI
parser_scene = subparsers.add_parser(
'scene', help="Render frames for a single scene.")
parser_scene.set_defaults(subcommand=render_scene_entrypoint)
parser_scene.add_argument(
'scene_files',
help=
"Path to one or more YAML scene configuration files. Entries in later files override those in earlier files.",
nargs='+')
parser_scene.add_argument('--frames-dir',
'-o',
help="Output directory for frames",
required=False)
parser_scene.add_argument(
'--frame-window',
help=
"Subset of frames to render. Includes lower bound and excludes upper bound.",
type=int,
nargs=2)
parser_scene.add_argument(
'--render-movie-to-file',
help=
"Name of a file (excluding extension) to render a movie from all frames to."
)
parser_scene.add_argument(
'--save-state-to-file',
help=
"Name of a file (excluding the 'pvsm' extension) to save the ParaView state to. The file can be loaded with ParaView to inspect the scene interactively."
)
parser_scene.add_argument(
'--no-render',
action='store_true',
help="Skip rendering any frames, e.g. to produce only a state file.")
parser_scene_preview_group = parser_scene.add_mutually_exclusive_group()
parser_scene_preview_group.add_argument(
'--show-preview',
action='store_true',
help="Show a window with a preview of the full movie.")
parser_scene.add_argument('--hide-progress',
dest='show_progress',
action='store_false',
help="Hide the progress bar")
# `scenes` CLI
parser_scenes = subparsers.add_parser(
'scenes', help="Render a set of scenes consecutively.")
parser_scenes.set_defaults(subcommand=render_scenes_entrypoint)
parser_scenes.add_argument(
'scenes_file',
help="Path to a YAML file listing the scenes to render.")
parser_scenes.add_argument('scene_overrides',
help="Overrides to apply to all scenes",
nargs='*',
default=[])
parser_scenes.add_argument('--output-dir', '-o')
parser_scenes.add_argument('--output-prefix', default="")
parser_scenes.add_argument('--output-suffix', default="")
# Common CLI for `scene` and `scenes`
for subparser in [parser_scene, parser_scenes]:
subparser.add_argument(
'--render-missing-frames',
help="Only render missing frames without replacing existing files.",
action='store_true')
subparser.add_argument('--num-jobs',
'-n',
help="Render frames in parallel",
type=int,
default=1)
subparser.add_argument('--force-offscreen-rendering',
'-x',
action='store_true')
subparser.add_argument('--activate-venv')
# `waveform` CLI
parser_waveform = subparsers.add_parser(
'waveform', help="Render waveform for a scene.")
parser_waveform.set_defaults(subcommand=render_waveform_entrypoint)
parser_waveform.add_argument(
'scene_files',
help=
"Path to one or more YAML scene configuration files. Entries in later files override those in earlier files.",
nargs='+')
parser_waveform.add_argument('--output-file', '-o', required=True)
parser_waveform.add_argument('--time-merger', type=float, required=True)
parser_waveform.add_argument('--mass', type=float, required=True)
parser_waveform.add_argument('--bounds', type=float, nargs=2)
# Common CLI for all entrypoints
for subparser in [parser_scene, parser_scenes, parser_waveform]:
subparser.add_argument(
'--scene-path',
'-p',
help="Append search paths for scene configuration files",
action='append',
dest='scene_paths',
default=[])
subparser.add_argument(
'--override',
help=
"A key-value pair that replaces an entry in the scene file, e.g. '--override Animation.FrameRate=30'. The value is parsed as YAML.",
action='append',
type=lambda kv: kv.split('='),
dest='keypath_overrides',
default=[])
subparser.add_argument('--verbose',
'-v',
action='count',
default=0,
help="Logging verbosity (-v, -vv, ...)")
subparser.add_argument('--logging-config', type=json.loads)
args = parser.parse_args()
# Setup logging
logging.basicConfig(level=logging.WARNING - args.verbose * 10)
if args.logging_config is not None:
import logging.config
if 'version' not in args.logging_config:
args.logging_config['version'] = 1
logging.config.dictConfig(args.logging_config)
if args.entrypoint != 'scenes':
del args.verbose
del args.logging_config
logger = logging.getLogger(__name__)
# Re-launch the script with `pvpython` if necessary
if args.entrypoint == 'scene':
try:
logger.debug("Checking if we're running with 'pvpython'...")
import paraview
except ImportError:
import sys
logger.debug("Not running with 'pvpython', dispatching...")
sys.exit(
dispatch_to_pvpython(args.force_offscreen_rendering, sys.argv))
logger.debug("Running with 'pvpython'.")
# Activate the virtual environment if requested before trying to import
# from `gwpv` below
if args.entrypoint in ['scene', 'scenes']:
if args.activate_venv:
activate_venv = args.activate_venv
logger.debug('Activating venv: {}'.format(activate_venv))
import os
activate_venv_script = os.path.join(activate_venv, 'bin',
'activate_this.py')
assert os.path.exists(activate_venv_script), (
"No 'bin/activate_this.py' script found in '{}'.".format(
activate_venv))
with open(activate_venv_script, 'r') as f:
exec(f.read(), {'__file__': activate_venv_script})
del args.activate_venv
# Import render_frames here to make loading the ParaView plugins work with
# `multiprocessing`
if args.entrypoint == 'scene':
from gwpv.render.frames import render_frames
# Forward to the user-selected entrypoint
subcommand = args.subcommand
del args.subcommand
del args.entrypoint
subcommand(**vars(args))
|
[
"argparse.ArgumentParser",
"json.dumps",
"os.path.join",
"os.path.exists",
"itertools.chain",
"functools.partial",
"h5py.File",
"tqdm.tqdm.get_lock",
"gwpv.render.frames.render_frames",
"gwpv.swsh_cache.precompute_cached_swsh_grid",
"subprocess.call",
"gwpv.render.waveform.render_waveform",
"logging.basicConfig",
"gwpv.render.movie.render_movie",
"gwpv.download_data.download_data",
"gwpv.scene_configuration.parse_as.file_and_subfile",
"gwpv.scene_configuration.load.load_scene",
"logging.config.dictConfig",
"gwpv.scene_configuration.animate.num_frames",
"logging.getLogger"
] |
[((1520, 1624), 'gwpv.render.frames.render_frames', 'render_frames', ([], {'job_id': 'job_id_and_frame_window[0]', 'frame_window': 'job_id_and_frame_window[1]'}), '(job_id=job_id_and_frame_window[0], frame_window=\n job_id_and_frame_window[1], **kwargs)\n', (1533, 1624), False, 'from gwpv.render.frames import render_frames\n'), ((1888, 1915), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1905, 1915), False, 'import logging\n'), ((4031, 4093), 'functools.partial', 'functools.partial', (['_render_frame_window'], {'scene': 'scene'}), '(_render_frame_window, scene=scene, **kwargs)\n', (4048, 4093), False, 'import functools\n'), ((5063, 5124), 'gwpv.scene_configuration.load.load_scene', 'load_scene', (['scene_files', 'keypath_overrides'], {'paths': 'scene_paths'}), '(scene_files, keypath_overrides, paths=scene_paths)\n', (5073, 5124), False, 'from gwpv.scene_configuration.load import load_scene\n'), ((5156, 5191), 'gwpv.download_data.download_data', 'download_data', (["scene['Datasources']"], {}), "(scene['Datasources'])\n", (5169, 5191), False, 'from gwpv.download_data import download_data\n'), ((5219, 5253), 'gwpv.swsh_cache.precompute_cached_swsh_grid', 'precompute_cached_swsh_grid', (['scene'], {}), '(scene)\n', (5246, 5253), False, 'from gwpv.swsh_cache import precompute_cached_swsh_grid\n'), ((5855, 5882), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (5872, 5882), False, 'import logging\n'), ((5999, 6050), 'os.path.join', 'os.path.join', (['sys.prefix', '"""bin"""', '"""activate_this.py"""'], {}), "(sys.prefix, 'bin', 'activate_this.py')\n", (6011, 6050), False, 'import os\n'), ((6459, 6492), 'subprocess.call', 'subprocess.call', (['pvpython_command'], {}), '(pvpython_command)\n', (6474, 6492), False, 'import subprocess\n'), ((8485, 8546), 'gwpv.scene_configuration.load.load_scene', 'load_scene', (['scene_files', 'keypath_overrides'], {'paths': 'scene_paths'}), '(scene_files, keypath_overrides, paths=scene_paths)\n', (8495, 8546), False, 'from gwpv.scene_configuration.load import load_scene\n'), ((8551, 8586), 'gwpv.download_data.download_data', 'download_data', (["scene['Datasources']"], {}), "(scene['Datasources'])\n", (8564, 8586), False, 'from gwpv.download_data import download_data\n'), ((8591, 8623), 'gwpv.render.waveform.render_waveform', 'render_waveform', (['scene'], {}), '(scene, **kwargs)\n', (8606, 8623), False, 'from gwpv.render.waveform import render_waveform\n'), ((8686, 8788), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""gwrender.py"""'], {'description': '"""Visualize gravitational waves with ParaView"""'}), "('gwrender.py', description=\n 'Visualize gravitational waves with ParaView')\n", (8709, 8788), False, 'import argparse\n'), ((13860, 13922), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': '(logging.WARNING - args.verbose * 10)'}), '(level=logging.WARNING - args.verbose * 10)\n', (13879, 13922), False, 'import logging\n'), ((14250, 14277), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (14267, 14277), False, 'import logging\n'), ((5285, 5321), 'gwpv.render.frames.render_frames', 'render_frames', ([], {'scene': 'scene'}), '(scene=scene, **kwargs)\n', (5298, 5321), False, 'from gwpv.render.frames import render_frames\n'), ((5556, 5688), 'gwpv.render.movie.render_movie', 'render_movie', ([], {'output_filename': 'render_movie_to_file', 'frame_rate': "scene['Animation']['FrameRate']", 'frames_dir': "kwargs['frames_dir']"}), "(output_filename=render_movie_to_file, frame_rate=scene[\n 'Animation']['FrameRate'], frames_dir=kwargs['frames_dir'])\n", (5568, 5688), False, 'from gwpv.render.movie import render_movie\n'), ((14097, 14143), 'logging.config.dictConfig', 'logging.config.dictConfig', (['args.logging_config'], {}), '(args.logging_config)\n', (14122, 14143), False, 'import logging\n'), ((6323, 6359), 'os.path.exists', 'os.path.exists', (['activate_venv_script'], {}), '(activate_venv_script)\n', (6337, 6359), False, 'import os\n'), ((7767, 7838), 'os.path.join', 'os.path.join', (['output_dir', "(output_prefix + scene['Name'] + output_suffix)"], {}), "(output_dir, output_prefix + scene['Name'] + output_suffix)\n", (7779, 7838), False, 'import os\n'), ((15111, 15165), 'os.path.join', 'os.path.join', (['activate_venv', '"""bin"""', '"""activate_this.py"""'], {}), "(activate_venv, 'bin', 'activate_this.py')\n", (15123, 15165), False, 'import os\n'), ((15233, 15269), 'os.path.exists', 'os.path.exists', (['activate_venv_script'], {}), '(activate_venv_script)\n', (15247, 15269), False, 'import os\n'), ((2286, 2345), 'gwpv.scene_configuration.parse_as.file_and_subfile', 'parse_as.file_and_subfile', (["scene['Datasources']['Waveform']"], {}), "(scene['Datasources']['Waveform'])\n", (2311, 2345), False, 'from gwpv.scene_configuration import parse_as, animate\n'), ((2849, 3008), 'gwpv.scene_configuration.animate.num_frames', 'animate.num_frames', ([], {'max_animation_length': 'max_animation_length', 'animation_speed': "scene['Animation']['Speed']", 'frame_rate': "scene['Animation']['FrameRate']"}), "(max_animation_length=max_animation_length,\n animation_speed=scene['Animation']['Speed'], frame_rate=scene[\n 'Animation']['FrameRate'])\n", (2867, 3008), False, 'from gwpv.scene_configuration import parse_as, animate\n'), ((3985, 4000), 'tqdm.tqdm.get_lock', 'tqdm.get_lock', ([], {}), '()\n', (3998, 4000), False, 'from tqdm import tqdm\n'), ((2380, 2424), 'h5py.File', 'h5py.File', (['waveform_file_and_subfile[0]', '"""r"""'], {}), "(waveform_file_and_subfile[0], 'r')\n", (2389, 2424), False, 'import h5py\n'), ((7167, 7235), 'itertools.chain', 'itertools.chain', (["*[('-p', scene_path) for scene_path in scene_paths]"], {}), "(*[('-p', scene_path) for scene_path in scene_paths])\n", (7182, 7235), False, 'import itertools\n'), ((7356, 7382), 'json.dumps', 'json.dumps', (['logging_config'], {}), '(logging_config)\n', (7366, 7382), False, 'import json\n')]
|
import sys
from magma import *
from mantle import *
from loam.boards.papilio import Papilio
papilio = Papilio()
papilio.Clock.on()
papilio.Timer.on()
papilio.C[8].rename('LED').output().on()
main = papilio.main()
wire( main.systimer.O[24], main.LED )
compile(sys.argv[1], main)
|
[
"loam.boards.papilio.Papilio"
] |
[((103, 112), 'loam.boards.papilio.Papilio', 'Papilio', ([], {}), '()\n', (110, 112), False, 'from loam.boards.papilio import Papilio\n')]
|
from copy import deepcopy
import mushroom_rl.environments
class EnvironmentBuilder:
"""
Class to spawn instances of a MushroomRL environment
"""
def __init__(self, env_name, env_params):
"""
Constructor
Args:
env_name: name of the environment to build
env_params: required parameters to build the specified environment
"""
self.env_name = env_name
self.env_params = env_params
def build(self):
"""
Build and return an environment
"""
environment = getattr(mushroom_rl.environments, self.env_name)
return environment(*self.env_params.values())
def copy(self):
"""
Create a deepcopy of the environment_builder and return it
"""
return deepcopy(self)
|
[
"copy.deepcopy"
] |
[((806, 820), 'copy.deepcopy', 'deepcopy', (['self'], {}), '(self)\n', (814, 820), False, 'from copy import deepcopy\n')]
|
#! -*- encoding:utf-8 -*-
import logging
from lib.file_util import read_csv_to_list
from task_def import VarifyProxyTask
logger = logging.getLogger()
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
def choose_one_useful_proxy():
proxies = []
read_csv_to_list('useful.csv', proxies)
for proxy in proxies:
# print 'Proxy is {}'.format(proxy)
logger.debug("chose proxy is %s", proxy)
task = VarifyProxyTask(url=proxy, timeout=1)
if task():
return proxy
return None
if __name__ == "__main__":
proxy = choose_one_useful_proxy()
if proxy:
logger.debug("Find One useful proxy: %s", proxy)
|
[
"lib.file_util.read_csv_to_list",
"logging.StreamHandler",
"task_def.VarifyProxyTask",
"logging.Formatter",
"logging.getLogger"
] |
[((132, 151), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (149, 151), False, 'import logging\n'), ((162, 185), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (183, 185), False, 'import logging\n'), ((198, 270), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s %(name)-12s %(levelname)-8s %(message)s"""'], {}), "('%(asctime)s %(name)-12s %(levelname)-8s %(message)s')\n", (215, 270), False, 'import logging\n'), ((420, 459), 'lib.file_util.read_csv_to_list', 'read_csv_to_list', (['"""useful.csv"""', 'proxies'], {}), "('useful.csv', proxies)\n", (436, 459), False, 'from lib.file_util import read_csv_to_list\n'), ((594, 631), 'task_def.VarifyProxyTask', 'VarifyProxyTask', ([], {'url': 'proxy', 'timeout': '(1)'}), '(url=proxy, timeout=1)\n', (609, 631), False, 'from task_def import VarifyProxyTask\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-16 16:15
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Bff',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='BffState',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('state_name', models.CharField(max_length=10)),
('state_num', models.IntegerField()),
],
),
migrations.CreateModel(
name='Brand',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('logo_pic_url', models.CharField(max_length=1000)),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.TextField()),
],
),
migrations.CreateModel(
name='Follow',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='Like',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='PhotoItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_bff_item', models.BooleanField()),
('time_created', models.DateTimeField(verbose_name='date published')),
('title', models.CharField(max_length=500)),
('photo_url', models.CharField(max_length=1000)),
('brand_assosiated', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.Brand')),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('profile_pic_url', models.CharField(max_length=1000, null=True)),
('website', models.CharField(max_length=1000, null=True)),
('email', models.CharField(max_length=1000, null=True)),
('gender', models.BooleanField()),
('first_name', models.CharField(max_length=50)),
('last_name', models.CharField(max_length=50)),
('facebook_id', models.CharField(max_length=128)),
('date_registered', models.DateTimeField(verbose_name='date registered')),
('last_login', models.DateTimeField(verbose_name='last login time')),
],
),
migrations.CreateModel(
name='Vote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('from_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.User')),
('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.PhotoItem')),
],
),
migrations.CreateModel(
name='VoteOption',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('vote_name', models.CharField(max_length=10)),
('vote_num', models.IntegerField()),
],
),
migrations.AddField(
model_name='vote',
name='vote',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stylemuzeapp.VoteOption'),
),
migrations.AddField(
model_name='photoitem',
name='user_created',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='like',
name='from_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='like',
name='item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.PhotoItem'),
),
migrations.AddField(
model_name='follow',
name='follower_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='follow_user_requested', to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='follow',
name='following_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='follow_user_being_followed', to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='comment',
name='from_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='comment',
name='item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stylemuzeapp.PhotoItem'),
),
migrations.AddField(
model_name='bff',
name='follower_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='bff_user_requested', to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='bff',
name='following_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='bff_user_being_bffed', to='stylemuzeapp.User'),
),
migrations.AddField(
model_name='bff',
name='state',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stylemuzeapp.BffState'),
),
]
|
[
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField"
] |
[((4324, 4432), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""stylemuzeapp.VoteOption"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to='stylemuzeapp.VoteOption')\n", (4341, 4432), False, 'from django.db import migrations, models\n'), ((4557, 4648), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""stylemuzeapp.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'stylemuzeapp.User')\n", (4574, 4648), False, 'from django.db import migrations, models\n'), ((4764, 4855), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""stylemuzeapp.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'stylemuzeapp.User')\n", (4781, 4855), False, 'from django.db import migrations, models\n'), ((4966, 5062), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""stylemuzeapp.PhotoItem"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'stylemuzeapp.PhotoItem')\n", (4983, 5062), False, 'from django.db import migrations, models\n'), ((5184, 5313), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""follow_user_requested"""', 'to': '"""stylemuzeapp.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='follow_user_requested', to='stylemuzeapp.User')\n", (5201, 5313), False, 'from django.db import migrations, models\n'), ((5436, 5570), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""follow_user_being_followed"""', 'to': '"""stylemuzeapp.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='follow_user_being_followed', to='stylemuzeapp.User')\n", (5453, 5570), False, 'from django.db import migrations, models\n'), ((5689, 5780), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""stylemuzeapp.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'stylemuzeapp.User')\n", (5706, 5780), False, 'from django.db import migrations, models\n'), ((5894, 5990), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""stylemuzeapp.PhotoItem"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'stylemuzeapp.PhotoItem')\n", (5911, 5990), False, 'from django.db import migrations, models\n'), ((6109, 6235), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""bff_user_requested"""', 'to': '"""stylemuzeapp.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='bff_user_requested', to='stylemuzeapp.User')\n", (6126, 6235), False, 'from django.db import migrations, models\n'), ((6355, 6483), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""bff_user_being_bffed"""', 'to': '"""stylemuzeapp.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='bff_user_being_bffed', to='stylemuzeapp.User')\n", (6372, 6483), False, 'from django.db import migrations, models\n'), ((6594, 6700), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""stylemuzeapp.BffState"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to='stylemuzeapp.BffState')\n", (6611, 6700), False, 'from django.db import migrations, models\n'), ((396, 489), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (412, 489), False, 'from django.db import migrations, models\n'), ((619, 712), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (635, 712), False, 'from django.db import migrations, models\n'), ((742, 773), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)'}), '(max_length=10)\n', (758, 773), False, 'from django.db import migrations, models\n'), ((806, 827), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (825, 827), False, 'from django.db import migrations, models\n'), ((958, 1051), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (974, 1051), False, 'from django.db import migrations, models\n'), ((1083, 1116), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)'}), '(max_length=1000)\n', (1099, 1116), False, 'from django.db import migrations, models\n'), ((1144, 1176), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1160, 1176), False, 'from django.db import migrations, models\n'), ((1309, 1402), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1325, 1402), False, 'from django.db import migrations, models\n'), ((1429, 1447), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1445, 1447), False, 'from django.db import migrations, models\n'), ((1579, 1672), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1595, 1672), False, 'from django.db import migrations, models\n'), ((1798, 1891), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1814, 1891), False, 'from django.db import migrations, models\n'), ((2022, 2115), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2038, 2115), False, 'from django.db import migrations, models\n'), ((2146, 2167), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (2165, 2167), False, 'from django.db import migrations, models\n'), ((2203, 2254), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""date published"""'}), "(verbose_name='date published')\n", (2223, 2254), False, 'from django.db import migrations, models\n'), ((2283, 2315), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)'}), '(max_length=500)\n', (2299, 2315), False, 'from django.db import migrations, models\n'), ((2348, 2381), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)'}), '(max_length=1000)\n', (2364, 2381), False, 'from django.db import migrations, models\n'), ((2421, 2523), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""stylemuzeapp.Brand"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='stylemuzeapp.Brand')\n", (2438, 2523), False, 'from django.db import migrations, models\n'), ((2649, 2742), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2665, 2742), False, 'from django.db import migrations, models\n'), ((2777, 2821), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)', 'null': '(True)'}), '(max_length=1000, null=True)\n', (2793, 2821), False, 'from django.db import migrations, models\n'), ((2852, 2896), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)', 'null': '(True)'}), '(max_length=1000, null=True)\n', (2868, 2896), False, 'from django.db import migrations, models\n'), ((2925, 2969), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)', 'null': '(True)'}), '(max_length=1000, null=True)\n', (2941, 2969), False, 'from django.db import migrations, models\n'), ((2999, 3020), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (3018, 3020), False, 'from django.db import migrations, models\n'), ((3054, 3085), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (3070, 3085), False, 'from django.db import migrations, models\n'), ((3118, 3149), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (3134, 3149), False, 'from django.db import migrations, models\n'), ((3184, 3216), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (3200, 3216), False, 'from django.db import migrations, models\n'), ((3255, 3307), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""date registered"""'}), "(verbose_name='date registered')\n", (3275, 3307), False, 'from django.db import migrations, models\n'), ((3341, 3393), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""last login time"""'}), "(verbose_name='last login time')\n", (3361, 3393), False, 'from django.db import migrations, models\n'), ((3523, 3616), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3539, 3616), False, 'from django.db import migrations, models\n'), ((3645, 3736), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""stylemuzeapp.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'stylemuzeapp.User')\n", (3662, 3736), False, 'from django.db import migrations, models\n'), ((3759, 3855), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""stylemuzeapp.PhotoItem"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'stylemuzeapp.PhotoItem')\n", (3776, 3855), False, 'from django.db import migrations, models\n'), ((3986, 4079), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (4002, 4079), False, 'from django.db import migrations, models\n'), ((4108, 4139), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)'}), '(max_length=10)\n', (4124, 4139), False, 'from django.db import migrations, models\n'), ((4171, 4192), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (4190, 4192), False, 'from django.db import migrations, models\n')]
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class vcs_details(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-vcs - based on the path /brocade_vcs_rpc/get-vcs-details/output/vcs-details. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__principal_switch_wwn','__co_ordinator_wwn','__local_switch_wwn','__node_vcs_mode','__node_vcs_type','__node_vcs_id',)
_yang_name = 'vcs-details'
_rest_name = 'vcs-details'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__principal_switch_wwn = YANGDynClass(base=unicode, is_leaf=True, yang_name="principal-switch-wwn", rest_name="principal-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
self.__local_switch_wwn = YANGDynClass(base=unicode, is_leaf=True, yang_name="local-switch-wwn", rest_name="local-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
self.__node_vcs_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="node-vcs-id", rest_name="node-vcs-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='uint32', is_config=True)
self.__node_vcs_type = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'vcs-fabric-cluster': {'value': 3}, u'vcs-unknown-cluster': {'value': 1}, u'vcs-stand-alone': {'value': 2}, u'vcs-management-cluster': {'value': 4}},), is_leaf=True, yang_name="node-vcs-type", rest_name="node-vcs-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='vcs-cluster-type', is_config=True)
self.__node_vcs_mode = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="node-vcs-mode", rest_name="node-vcs-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='boolean', is_config=True)
self.__co_ordinator_wwn = YANGDynClass(base=unicode, is_leaf=True, yang_name="co-ordinator-wwn", rest_name="co-ordinator-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'brocade_vcs_rpc', u'get-vcs-details', u'output', u'vcs-details']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'get-vcs-details', u'output', u'vcs-details']
def _get_principal_switch_wwn(self):
"""
Getter method for principal_switch_wwn, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/principal_switch_wwn (string)
YANG Description: WWN of principal switch
"""
return self.__principal_switch_wwn
def _set_principal_switch_wwn(self, v, load=False):
"""
Setter method for principal_switch_wwn, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/principal_switch_wwn (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_principal_switch_wwn is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_principal_switch_wwn() directly.
YANG Description: WWN of principal switch
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="principal-switch-wwn", rest_name="principal-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """principal_switch_wwn must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="principal-switch-wwn", rest_name="principal-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)""",
})
self.__principal_switch_wwn = t
if hasattr(self, '_set'):
self._set()
def _unset_principal_switch_wwn(self):
self.__principal_switch_wwn = YANGDynClass(base=unicode, is_leaf=True, yang_name="principal-switch-wwn", rest_name="principal-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
def _get_co_ordinator_wwn(self):
"""
Getter method for co_ordinator_wwn, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/co_ordinator_wwn (string)
YANG Description: WWN of Co-ordinator switch
"""
return self.__co_ordinator_wwn
def _set_co_ordinator_wwn(self, v, load=False):
"""
Setter method for co_ordinator_wwn, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/co_ordinator_wwn (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_co_ordinator_wwn is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_co_ordinator_wwn() directly.
YANG Description: WWN of Co-ordinator switch
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="co-ordinator-wwn", rest_name="co-ordinator-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """co_ordinator_wwn must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="co-ordinator-wwn", rest_name="co-ordinator-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)""",
})
self.__co_ordinator_wwn = t
if hasattr(self, '_set'):
self._set()
def _unset_co_ordinator_wwn(self):
self.__co_ordinator_wwn = YANGDynClass(base=unicode, is_leaf=True, yang_name="co-ordinator-wwn", rest_name="co-ordinator-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
def _get_local_switch_wwn(self):
"""
Getter method for local_switch_wwn, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/local_switch_wwn (string)
YANG Description: WWN of local switch
"""
return self.__local_switch_wwn
def _set_local_switch_wwn(self, v, load=False):
"""
Setter method for local_switch_wwn, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/local_switch_wwn (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_local_switch_wwn is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_local_switch_wwn() directly.
YANG Description: WWN of local switch
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="local-switch-wwn", rest_name="local-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """local_switch_wwn must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="local-switch-wwn", rest_name="local-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)""",
})
self.__local_switch_wwn = t
if hasattr(self, '_set'):
self._set()
def _unset_local_switch_wwn(self):
self.__local_switch_wwn = YANGDynClass(base=unicode, is_leaf=True, yang_name="local-switch-wwn", rest_name="local-switch-wwn", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='string', is_config=True)
def _get_node_vcs_mode(self):
"""
Getter method for node_vcs_mode, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/node_vcs_mode (boolean)
YANG Description: Node's VCS mode
"""
return self.__node_vcs_mode
def _set_node_vcs_mode(self, v, load=False):
"""
Setter method for node_vcs_mode, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/node_vcs_mode (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_node_vcs_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_node_vcs_mode() directly.
YANG Description: Node's VCS mode
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="node-vcs-mode", rest_name="node-vcs-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """node_vcs_mode must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="node-vcs-mode", rest_name="node-vcs-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='boolean', is_config=True)""",
})
self.__node_vcs_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_node_vcs_mode(self):
self.__node_vcs_mode = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="node-vcs-mode", rest_name="node-vcs-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='boolean', is_config=True)
def _get_node_vcs_type(self):
"""
Getter method for node_vcs_type, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/node_vcs_type (vcs-cluster-type)
YANG Description: Vcs Type
"""
return self.__node_vcs_type
def _set_node_vcs_type(self, v, load=False):
"""
Setter method for node_vcs_type, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/node_vcs_type (vcs-cluster-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_node_vcs_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_node_vcs_type() directly.
YANG Description: Vcs Type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'vcs-fabric-cluster': {'value': 3}, u'vcs-unknown-cluster': {'value': 1}, u'vcs-stand-alone': {'value': 2}, u'vcs-management-cluster': {'value': 4}},), is_leaf=True, yang_name="node-vcs-type", rest_name="node-vcs-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='vcs-cluster-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """node_vcs_type must be of a type compatible with vcs-cluster-type""",
'defined-type': "brocade-vcs:vcs-cluster-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'vcs-fabric-cluster': {'value': 3}, u'vcs-unknown-cluster': {'value': 1}, u'vcs-stand-alone': {'value': 2}, u'vcs-management-cluster': {'value': 4}},), is_leaf=True, yang_name="node-vcs-type", rest_name="node-vcs-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='vcs-cluster-type', is_config=True)""",
})
self.__node_vcs_type = t
if hasattr(self, '_set'):
self._set()
def _unset_node_vcs_type(self):
self.__node_vcs_type = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'vcs-fabric-cluster': {'value': 3}, u'vcs-unknown-cluster': {'value': 1}, u'vcs-stand-alone': {'value': 2}, u'vcs-management-cluster': {'value': 4}},), is_leaf=True, yang_name="node-vcs-type", rest_name="node-vcs-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='vcs-cluster-type', is_config=True)
def _get_node_vcs_id(self):
"""
Getter method for node_vcs_id, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/node_vcs_id (uint32)
YANG Description: Vcs Id
"""
return self.__node_vcs_id
def _set_node_vcs_id(self, v, load=False):
"""
Setter method for node_vcs_id, mapped from YANG variable /brocade_vcs_rpc/get_vcs_details/output/vcs_details/node_vcs_id (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_node_vcs_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_node_vcs_id() directly.
YANG Description: Vcs Id
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="node-vcs-id", rest_name="node-vcs-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """node_vcs_id must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="node-vcs-id", rest_name="node-vcs-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='uint32', is_config=True)""",
})
self.__node_vcs_id = t
if hasattr(self, '_set'):
self._set()
def _unset_node_vcs_id(self):
self.__node_vcs_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="node-vcs-id", rest_name="node-vcs-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='uint32', is_config=True)
principal_switch_wwn = __builtin__.property(_get_principal_switch_wwn, _set_principal_switch_wwn)
co_ordinator_wwn = __builtin__.property(_get_co_ordinator_wwn, _set_co_ordinator_wwn)
local_switch_wwn = __builtin__.property(_get_local_switch_wwn, _set_local_switch_wwn)
node_vcs_mode = __builtin__.property(_get_node_vcs_mode, _set_node_vcs_mode)
node_vcs_type = __builtin__.property(_get_node_vcs_type, _set_node_vcs_type)
node_vcs_id = __builtin__.property(_get_node_vcs_id, _set_node_vcs_id)
_pyangbind_elements = {'principal_switch_wwn': principal_switch_wwn, 'co_ordinator_wwn': co_ordinator_wwn, 'local_switch_wwn': local_switch_wwn, 'node_vcs_mode': node_vcs_mode, 'node_vcs_type': node_vcs_type, 'node_vcs_id': node_vcs_id, }
|
[
"pyangbind.lib.yangtypes.RestrictedClassType",
"pyangbind.lib.yangtypes.YANGDynClass",
"__builtin__.property"
] |
[((19635, 19709), '__builtin__.property', '__builtin__.property', (['_get_principal_switch_wwn', '_set_principal_switch_wwn'], {}), '(_get_principal_switch_wwn, _set_principal_switch_wwn)\n', (19655, 19709), False, 'import __builtin__\n'), ((19731, 19797), '__builtin__.property', '__builtin__.property', (['_get_co_ordinator_wwn', '_set_co_ordinator_wwn'], {}), '(_get_co_ordinator_wwn, _set_co_ordinator_wwn)\n', (19751, 19797), False, 'import __builtin__\n'), ((19819, 19885), '__builtin__.property', '__builtin__.property', (['_get_local_switch_wwn', '_set_local_switch_wwn'], {}), '(_get_local_switch_wwn, _set_local_switch_wwn)\n', (19839, 19885), False, 'import __builtin__\n'), ((19904, 19964), '__builtin__.property', '__builtin__.property', (['_get_node_vcs_mode', '_set_node_vcs_mode'], {}), '(_get_node_vcs_mode, _set_node_vcs_mode)\n', (19924, 19964), False, 'import __builtin__\n'), ((19983, 20043), '__builtin__.property', '__builtin__.property', (['_get_node_vcs_type', '_set_node_vcs_type'], {}), '(_get_node_vcs_type, _set_node_vcs_type)\n', (20003, 20043), False, 'import __builtin__\n'), ((20060, 20116), '__builtin__.property', '__builtin__.property', (['_get_node_vcs_id', '_set_node_vcs_id'], {}), '(_get_node_vcs_id, _set_node_vcs_id)\n', (20080, 20116), False, 'import __builtin__\n'), ((1912, 2246), 'pyangbind.lib.yangtypes.YANGDynClass', 'YANGDynClass', ([], {'base': 'unicode', 'is_leaf': '(True)', 'yang_name': '"""principal-switch-wwn"""', 'rest_name': '"""principal-switch-wwn"""', 'parent': 'self', 'path_helper': 'self._path_helper', 'extmethods': 'self._extmethods', 'register_paths': '(False)', 'namespace': '"""urn:brocade.com:mgmt:brocade-vcs"""', 'defining_module': '"""brocade-vcs"""', 'yang_type': '"""string"""', 'is_config': '(True)'}), "(base=unicode, is_leaf=True, yang_name='principal-switch-wwn',\n rest_name='principal-switch-wwn', parent=self, path_helper=self.\n _path_helper, extmethods=self._extmethods, register_paths=False,\n namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module=\n 'brocade-vcs', yang_type='string', is_config=True)\n", (1924, 2246), False, 'from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType\n'), ((2259, 2585), 'pyangbind.lib.yangtypes.YANGDynClass', 'YANGDynClass', ([], {'base': 'unicode', 'is_leaf': '(True)', 'yang_name': '"""local-switch-wwn"""', 'rest_name': '"""local-switch-wwn"""', 'parent': 'self', 'path_helper': 'self._path_helper', 'extmethods': 'self._extmethods', 'register_paths': '(False)', 'namespace': '"""urn:brocade.com:mgmt:brocade-vcs"""', 'defining_module': '"""brocade-vcs"""', 'yang_type': '"""string"""', 'is_config': '(True)'}), "(base=unicode, is_leaf=True, yang_name='local-switch-wwn',\n rest_name='local-switch-wwn', parent=self, path_helper=self.\n _path_helper, extmethods=self._extmethods, register_paths=False,\n namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module=\n 'brocade-vcs', yang_type='string', is_config=True)\n", (2271, 2585), False, 'from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType\n'), ((3648, 3969), 'pyangbind.lib.yangtypes.YANGDynClass', 'YANGDynClass', ([], {'base': 'YANGBool', 'is_leaf': '(True)', 'yang_name': '"""node-vcs-mode"""', 'rest_name': '"""node-vcs-mode"""', 'parent': 'self', 'path_helper': 'self._path_helper', 'extmethods': 'self._extmethods', 'register_paths': '(False)', 'namespace': '"""urn:brocade.com:mgmt:brocade-vcs"""', 'defining_module': '"""brocade-vcs"""', 'yang_type': '"""boolean"""', 'is_config': '(True)'}), "(base=YANGBool, is_leaf=True, yang_name='node-vcs-mode',\n rest_name='node-vcs-mode', parent=self, path_helper=self._path_helper,\n extmethods=self._extmethods, register_paths=False, namespace=\n 'urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs',\n yang_type='boolean', is_config=True)\n", (3660, 3969), False, 'from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType\n'), ((3983, 4309), 'pyangbind.lib.yangtypes.YANGDynClass', 'YANGDynClass', ([], {'base': 'unicode', 'is_leaf': '(True)', 'yang_name': '"""co-ordinator-wwn"""', 'rest_name': '"""co-ordinator-wwn"""', 'parent': 'self', 'path_helper': 'self._path_helper', 'extmethods': 'self._extmethods', 'register_paths': '(False)', 'namespace': '"""urn:brocade.com:mgmt:brocade-vcs"""', 'defining_module': '"""brocade-vcs"""', 'yang_type': '"""string"""', 'is_config': '(True)'}), "(base=unicode, is_leaf=True, yang_name='co-ordinator-wwn',\n rest_name='co-ordinator-wwn', parent=self, path_helper=self.\n _path_helper, extmethods=self._extmethods, register_paths=False,\n namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module=\n 'brocade-vcs', yang_type='string', is_config=True)\n", (3995, 4309), False, 'from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType\n'), ((7404, 7738), 'pyangbind.lib.yangtypes.YANGDynClass', 'YANGDynClass', ([], {'base': 'unicode', 'is_leaf': '(True)', 'yang_name': '"""principal-switch-wwn"""', 'rest_name': '"""principal-switch-wwn"""', 'parent': 'self', 'path_helper': 'self._path_helper', 'extmethods': 'self._extmethods', 'register_paths': '(False)', 'namespace': '"""urn:brocade.com:mgmt:brocade-vcs"""', 'defining_module': '"""brocade-vcs"""', 'yang_type': '"""string"""', 'is_config': '(True)'}), "(base=unicode, is_leaf=True, yang_name='principal-switch-wwn',\n rest_name='principal-switch-wwn', parent=self, path_helper=self.\n _path_helper, extmethods=self._extmethods, register_paths=False,\n namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module=\n 'brocade-vcs', yang_type='string', is_config=True)\n", (7416, 7738), False, 'from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType\n'), ((9602, 9928), 'pyangbind.lib.yangtypes.YANGDynClass', 'YANGDynClass', ([], {'base': 'unicode', 'is_leaf': '(True)', 'yang_name': '"""co-ordinator-wwn"""', 'rest_name': '"""co-ordinator-wwn"""', 'parent': 'self', 'path_helper': 'self._path_helper', 'extmethods': 'self._extmethods', 'register_paths': '(False)', 'namespace': '"""urn:brocade.com:mgmt:brocade-vcs"""', 'defining_module': '"""brocade-vcs"""', 'yang_type': '"""string"""', 'is_config': '(True)'}), "(base=unicode, is_leaf=True, yang_name='co-ordinator-wwn',\n rest_name='co-ordinator-wwn', parent=self, path_helper=self.\n _path_helper, extmethods=self._extmethods, register_paths=False,\n namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module=\n 'brocade-vcs', yang_type='string', is_config=True)\n", (9614, 9928), False, 'from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType\n'), ((11778, 12104), 'pyangbind.lib.yangtypes.YANGDynClass', 'YANGDynClass', ([], {'base': 'unicode', 'is_leaf': '(True)', 'yang_name': '"""local-switch-wwn"""', 'rest_name': '"""local-switch-wwn"""', 'parent': 'self', 'path_helper': 'self._path_helper', 'extmethods': 'self._extmethods', 'register_paths': '(False)', 'namespace': '"""urn:brocade.com:mgmt:brocade-vcs"""', 'defining_module': '"""brocade-vcs"""', 'yang_type': '"""string"""', 'is_config': '(True)'}), "(base=unicode, is_leaf=True, yang_name='local-switch-wwn',\n rest_name='local-switch-wwn', parent=self, path_helper=self.\n _path_helper, extmethods=self._extmethods, register_paths=False,\n namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module=\n 'brocade-vcs', yang_type='string', is_config=True)\n", (11790, 12104), False, 'from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType\n'), ((13903, 14224), 'pyangbind.lib.yangtypes.YANGDynClass', 'YANGDynClass', ([], {'base': 'YANGBool', 'is_leaf': '(True)', 'yang_name': '"""node-vcs-mode"""', 'rest_name': '"""node-vcs-mode"""', 'parent': 'self', 'path_helper': 'self._path_helper', 'extmethods': 'self._extmethods', 'register_paths': '(False)', 'namespace': '"""urn:brocade.com:mgmt:brocade-vcs"""', 'defining_module': '"""brocade-vcs"""', 'yang_type': '"""boolean"""', 'is_config': '(True)'}), "(base=YANGBool, is_leaf=True, yang_name='node-vcs-mode',\n rest_name='node-vcs-mode', parent=self, path_helper=self._path_helper,\n extmethods=self._extmethods, register_paths=False, namespace=\n 'urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs',\n yang_type='boolean', is_config=True)\n", (13915, 14224), False, 'from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType\n'), ((6369, 6705), 'pyangbind.lib.yangtypes.YANGDynClass', 'YANGDynClass', (['v'], {'base': 'unicode', 'is_leaf': '(True)', 'yang_name': '"""principal-switch-wwn"""', 'rest_name': '"""principal-switch-wwn"""', 'parent': 'self', 'path_helper': 'self._path_helper', 'extmethods': 'self._extmethods', 'register_paths': '(False)', 'namespace': '"""urn:brocade.com:mgmt:brocade-vcs"""', 'defining_module': '"""brocade-vcs"""', 'yang_type': '"""string"""', 'is_config': '(True)'}), "(v, base=unicode, is_leaf=True, yang_name=\n 'principal-switch-wwn', rest_name='principal-switch-wwn', parent=self,\n path_helper=self._path_helper, extmethods=self._extmethods,\n register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs',\n defining_module='brocade-vcs', yang_type='string', is_config=True)\n", (6381, 6705), False, 'from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType\n'), ((8599, 8928), 'pyangbind.lib.yangtypes.YANGDynClass', 'YANGDynClass', (['v'], {'base': 'unicode', 'is_leaf': '(True)', 'yang_name': '"""co-ordinator-wwn"""', 'rest_name': '"""co-ordinator-wwn"""', 'parent': 'self', 'path_helper': 'self._path_helper', 'extmethods': 'self._extmethods', 'register_paths': '(False)', 'namespace': '"""urn:brocade.com:mgmt:brocade-vcs"""', 'defining_module': '"""brocade-vcs"""', 'yang_type': '"""string"""', 'is_config': '(True)'}), "(v, base=unicode, is_leaf=True, yang_name='co-ordinator-wwn',\n rest_name='co-ordinator-wwn', parent=self, path_helper=self.\n _path_helper, extmethods=self._extmethods, register_paths=False,\n namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module=\n 'brocade-vcs', yang_type='string', is_config=True)\n", (8611, 8928), False, 'from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType\n'), ((10775, 11104), 'pyangbind.lib.yangtypes.YANGDynClass', 'YANGDynClass', (['v'], {'base': 'unicode', 'is_leaf': '(True)', 'yang_name': '"""local-switch-wwn"""', 'rest_name': '"""local-switch-wwn"""', 'parent': 'self', 'path_helper': 'self._path_helper', 'extmethods': 'self._extmethods', 'register_paths': '(False)', 'namespace': '"""urn:brocade.com:mgmt:brocade-vcs"""', 'defining_module': '"""brocade-vcs"""', 'yang_type': '"""string"""', 'is_config': '(True)'}), "(v, base=unicode, is_leaf=True, yang_name='local-switch-wwn',\n rest_name='local-switch-wwn', parent=self, path_helper=self.\n _path_helper, extmethods=self._extmethods, register_paths=False,\n namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module=\n 'brocade-vcs', yang_type='string', is_config=True)\n", (10787, 11104), False, 'from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType\n'), ((12918, 13242), 'pyangbind.lib.yangtypes.YANGDynClass', 'YANGDynClass', (['v'], {'base': 'YANGBool', 'is_leaf': '(True)', 'yang_name': '"""node-vcs-mode"""', 'rest_name': '"""node-vcs-mode"""', 'parent': 'self', 'path_helper': 'self._path_helper', 'extmethods': 'self._extmethods', 'register_paths': '(False)', 'namespace': '"""urn:brocade.com:mgmt:brocade-vcs"""', 'defining_module': '"""brocade-vcs"""', 'yang_type': '"""boolean"""', 'is_config': '(True)'}), "(v, base=YANGBool, is_leaf=True, yang_name='node-vcs-mode',\n rest_name='node-vcs-mode', parent=self, path_helper=self._path_helper,\n extmethods=self._extmethods, register_paths=False, namespace=\n 'urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs',\n yang_type='boolean', is_config=True)\n", (12930, 13242), False, 'from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType\n'), ((2611, 2711), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'long', 'restriction_dict': "{'range': ['0..4294967295']}", 'int_size': '(32)'}), "(base_type=long, restriction_dict={'range': [\n '0..4294967295']}, int_size=32)\n", (2630, 2711), False, 'from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType\n'), ((3025, 3272), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'unicode', 'restriction_type': '"""dict_key"""', 'restriction_arg': "{u'vcs-fabric-cluster': {'value': 3}, u'vcs-unknown-cluster': {'value': 1},\n u'vcs-stand-alone': {'value': 2}, u'vcs-management-cluster': {'value': 4}}"}), "(base_type=unicode, restriction_type='dict_key',\n restriction_arg={u'vcs-fabric-cluster': {'value': 3},\n u'vcs-unknown-cluster': {'value': 1}, u'vcs-stand-alone': {'value': 2},\n u'vcs-management-cluster': {'value': 4}})\n", (3044, 3272), False, 'from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType\n'), ((16694, 16941), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'unicode', 'restriction_type': '"""dict_key"""', 'restriction_arg': "{u'vcs-fabric-cluster': {'value': 3}, u'vcs-unknown-cluster': {'value': 1},\n u'vcs-stand-alone': {'value': 2}, u'vcs-management-cluster': {'value': 4}}"}), "(base_type=unicode, restriction_type='dict_key',\n restriction_arg={u'vcs-fabric-cluster': {'value': 3},\n u'vcs-unknown-cluster': {'value': 1}, u'vcs-stand-alone': {'value': 2},\n u'vcs-management-cluster': {'value': 4}})\n", (16713, 16941), False, 'from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType\n'), ((19240, 19340), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'long', 'restriction_dict': "{'range': ['0..4294967295']}", 'int_size': '(32)'}), "(base_type=long, restriction_dict={'range': [\n '0..4294967295']}, int_size=32)\n", (19259, 19340), False, 'from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType\n'), ((15063, 15310), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'unicode', 'restriction_type': '"""dict_key"""', 'restriction_arg': "{u'vcs-fabric-cluster': {'value': 3}, u'vcs-unknown-cluster': {'value': 1},\n u'vcs-stand-alone': {'value': 2}, u'vcs-management-cluster': {'value': 4}}"}), "(base_type=unicode, restriction_type='dict_key',\n restriction_arg={u'vcs-fabric-cluster': {'value': 3},\n u'vcs-unknown-cluster': {'value': 1}, u'vcs-stand-alone': {'value': 2},\n u'vcs-management-cluster': {'value': 4}})\n", (15082, 15310), False, 'from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType\n'), ((18103, 18203), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'long', 'restriction_dict': "{'range': ['0..4294967295']}", 'int_size': '(32)'}), "(base_type=long, restriction_dict={'range': [\n '0..4294967295']}, int_size=32)\n", (18122, 18203), False, 'from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType\n')]
|
from typing import Collection, Dict, List, Sequence, Type, TypeVar
import numpy as np
import pandas as pd
from athenian.api.controllers.features.metric import Metric, MetricInt
from athenian.api.controllers.features.metric_calculator import AnyMetricCalculator, \
BinnedMetricCalculator, \
MetricCalculator, MetricCalculatorEnsemble, SumMetricCalculator
from athenian.api.controllers.miners.github.developer import developer_changed_lines_column, \
developer_identity_column, DeveloperTopic
from athenian.api.controllers.miners.github.pull_request import ReviewResolution
from athenian.api.models.metadata.github import PullRequest, PullRequestComment, \
PullRequestReview, PullRequestReviewComment, PushCommit, \
Release
metric_calculators: Dict[str, Type[MetricCalculator]] = {}
T = TypeVar("T")
def register_metric(topic: DeveloperTopic):
"""Keep track of the developer metric calculators."""
assert isinstance(topic, DeveloperTopic)
def register_with_name(cls: Type[MetricCalculator]):
metric_calculators[topic.value] = cls
return cls
return register_with_name
class DeveloperMetricCalculatorEnsemble(MetricCalculatorEnsemble):
"""MetricCalculatorEnsemble adapted for developers."""
def __init__(self, *metrics: str, quantiles: Sequence[float], quantile_stride: int):
"""Initialize a new instance of ReleaseMetricCalculatorEnsemble class."""
super().__init__(*metrics,
quantiles=quantiles,
quantile_stride=quantile_stride,
class_mapping=metric_calculators)
class DeveloperBinnedMetricCalculator(BinnedMetricCalculator):
"""BinnedMetricCalculator adapted for developers."""
ensemble_class = DeveloperMetricCalculatorEnsemble
class DeveloperTopicCounter(SumMetricCalculator[int]):
"""Count all `topic` events in each time interval."""
may_have_negative_values = False
metric = MetricInt
timestamp_column: str
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
result = np.full((len(min_times), len(facts)), self.nan, self.dtype)
column = facts[self.timestamp_column].astype(min_times.dtype, copy=False).values
column_in_range = (min_times[:, None] <= column) & (column < max_times[:, None])
result[column_in_range] = 1
return result
class DeveloperTopicSummator(SumMetricCalculator[int]):
"""Sum all `topic` events in each time interval."""
may_have_negative_values = False
metric = MetricInt
topic_column: str
timestamp_column: str
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
result = np.full((len(min_times), len(facts)), self.nan, self.dtype)
topic_column = facts[self.topic_column].values
ts_column = facts[self.timestamp_column].values
column_in_range = (min_times[:, None] <= ts_column) & (ts_column < max_times[:, None])
for result_dim, column_in_range_dim in zip(result, column_in_range):
result_dim[column_in_range_dim] = topic_column[column_in_range_dim]
return result
@register_metric(DeveloperTopic.commits_pushed)
class CommitsPushedCounter(DeveloperTopicCounter):
"""Calculate "dev-commits-pushed" metric."""
timestamp_column = PushCommit.committed_date.name
@register_metric(DeveloperTopic.lines_changed)
class LinesChangedCounter(DeveloperTopicSummator):
"""Calculate "dev-lines-changed" metric."""
topic_column = developer_changed_lines_column
timestamp_column = PushCommit.committed_date.name
@register_metric(DeveloperTopic.active)
class ActiveCounter(MetricCalculator[int]):
"""Calculate "dev-active" metric."""
ACTIVITY_DAYS_THRESHOLD_DENSITY = 0.2
may_have_negative_values = False
metric = MetricInt
def _value(self, samples: np.ndarray) -> Metric[int]:
if len(samples) > 0:
days = samples[0] % 1000000
active = len(np.unique(samples // 1000000))
else:
days = 1
active = 0
assert days > 0
value = int(active / days > self.ACTIVITY_DAYS_THRESHOLD_DENSITY)
return self.metric.from_fields(True, value, None, None)
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
column = facts[PushCommit.committed_date.name].dt.floor(freq="D").values
column_in_range = (min_times[:, None] <= column) & (column < max_times[:, None])
timestamps = np.repeat(column[None, :], len(min_times), axis=0)
result = timestamps.view(int)
lengths = (max_times - min_times).astype("timedelta64[D]").view(int)
result += lengths[:, None]
result[~column_in_range] = self.nan
return result
@register_metric(DeveloperTopic.active0)
class Active0Counter(AnyMetricCalculator[int]):
"""Calculate "dev-active0" metric."""
deps = (ActiveCounter,)
may_have_negative_values = False
metric = MetricInt
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
return self._calcs[0].peek
@register_metric(DeveloperTopic.prs_created)
class PRsCreatedCounter(DeveloperTopicCounter):
"""Calculate "dev-prs-created" metric."""
timestamp_column = PullRequest.created_at.name
@register_metric(DeveloperTopic.prs_merged)
class PRsMergedCounter(DeveloperTopicCounter):
"""Calculate "dev-prs-merged" metric."""
timestamp_column = PullRequest.merged_at.name
@register_metric(DeveloperTopic.releases)
class ReleasesCounter(DeveloperTopicCounter):
"""Calculate "dev-releases" metric."""
timestamp_column = Release.published_at.name
@register_metric(DeveloperTopic.regular_pr_comments)
class RegularPRCommentsCounter(DeveloperTopicCounter):
"""Calculate "dev-regular-pr-comments" metric."""
timestamp_column = PullRequestComment.created_at.name
@register_metric(DeveloperTopic.review_pr_comments)
class ReviewPRCommentsCounter(DeveloperTopicCounter):
"""Calculate "dev-review-pr-comments" metric."""
timestamp_column = PullRequestReviewComment.created_at.name
@register_metric(DeveloperTopic.pr_comments)
class PRCommentsCounter(DeveloperTopicCounter):
"""Calculate "dev-pr-comments" metric."""
timestamp_column = "created_at"
@register_metric(DeveloperTopic.prs_reviewed)
class PRReviewedCounter(SumMetricCalculator[int]):
"""Calculate "dev-prs-reviewed" metric."""
may_have_negative_values = False
metric = MetricInt
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
result = np.full((len(min_times), len(facts)), self.nan, self.dtype)
column = facts[PullRequestReview.submitted_at.name].values
column_in_range = (min_times[:, None] <= column) & (column < max_times[:, None])
duplicated = facts.duplicated([
PullRequestReview.pull_request_node_id.name, developer_identity_column,
]).values
column_in_range[np.broadcast_to(duplicated[None, :], result.shape)] = False
result[column_in_range] = 1
return result
@register_metric(DeveloperTopic.reviews)
class ReviewsCounter(DeveloperTopicCounter):
"""Calculate "dev-reviews" metric."""
timestamp_column = PullRequestReview.submitted_at.name
class ReviewStatesCounter(SumMetricCalculator[int]):
"""Count reviews with the specified outcome in `state`."""
may_have_negative_values = False
metric = MetricInt
state = None
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
result = np.full((len(min_times), len(facts)), self.nan, self.dtype)
column = facts[PullRequestReview.submitted_at.name].values
column_in_range = (min_times[:, None] <= column) & (column < max_times[:, None])
wrong_state = facts[PullRequestReview.state.name].values != self.state.value
column_in_range[np.broadcast_to(wrong_state[None, :], result.shape)] = False
result[column_in_range] = 1
return result
@register_metric(DeveloperTopic.review_approvals)
class ApprovalsCounter(ReviewStatesCounter):
"""Calculate "dev-review-approved" metric."""
state = ReviewResolution.APPROVED
@register_metric(DeveloperTopic.review_rejections)
class RejectionsCounter(ReviewStatesCounter):
"""Calculate "dev-review-rejected" metric."""
state = ReviewResolution.CHANGES_REQUESTED
@register_metric(DeveloperTopic.review_neutrals)
class NeutralReviewsCounter(ReviewStatesCounter):
"""Calculate "dev-review-neutrals" metric."""
state = ReviewResolution.COMMENTED
@register_metric(DeveloperTopic.worked)
class WorkedCounter(AnyMetricCalculator[int]):
"""Calculate "dev-worked" metric."""
deps = (
PRsCreatedCounter,
PRsMergedCounter,
ReleasesCounter,
CommitsPushedCounter,
ReviewsCounter,
RegularPRCommentsCounter,
)
may_have_negative_values = False
metric = MetricInt
def _analyze(self,
facts: pd.DataFrame,
min_times: np.ndarray,
max_times: np.ndarray,
**kwargs) -> np.array:
result = np.full((len(min_times), len(facts)), 0, self.dtype)
for calc in self._calcs:
result |= calc.peek > 0
result[result == 0] = self.nan
return result
def group_actions_by_developers(devs: Sequence[Collection[str]],
df: pd.DataFrame,
) -> List[np.ndarray]:
"""Group developer actions by developer groups."""
indexes = []
identities = df[developer_identity_column].values.astype("S")
for group in devs:
if len(group) == 1:
dev = next(iter(group))
indexes.append(np.nonzero(identities == dev.encode())[0])
continue
if isinstance(group, set):
group = list(group)
indexes.append(np.nonzero(np.in1d(identities, np.array(group, dtype="S")))[0])
return indexes
|
[
"typing.TypeVar",
"numpy.broadcast_to",
"numpy.array",
"numpy.unique"
] |
[((808, 820), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (815, 820), False, 'from typing import Collection, Dict, List, Sequence, Type, TypeVar\n'), ((7516, 7566), 'numpy.broadcast_to', 'np.broadcast_to', (['duplicated[None, :]', 'result.shape'], {}), '(duplicated[None, :], result.shape)\n', (7531, 7566), True, 'import numpy as np\n'), ((8544, 8595), 'numpy.broadcast_to', 'np.broadcast_to', (['wrong_state[None, :]', 'result.shape'], {}), '(wrong_state[None, :], result.shape)\n', (8559, 8595), True, 'import numpy as np\n'), ((4206, 4235), 'numpy.unique', 'np.unique', (['(samples // 1000000)'], {}), '(samples // 1000000)\n', (4215, 4235), True, 'import numpy as np\n'), ((10604, 10630), 'numpy.array', 'np.array', (['group'], {'dtype': '"""S"""'}), "(group, dtype='S')\n", (10612, 10630), True, 'import numpy as np\n')]
|
#!/usr/bin/env python3
import socket
import uuid
import paho.mqtt.client as mqtt
from select import select
from time import time
client_id = 'paho-mqtt-python/issue72/' + str(uuid.uuid4())
topic = client_id
print("Using client_id / topic: " + client_id)
class SelectMqttExample:
def __init__(self):
pass
def on_connect(self, client, userdata, flags, rc):
print("Subscribing")
client.subscribe(topic)
def on_message(self, client, userdata, msg):
if self.state not in {1, 3, 5}:
print("Got unexpected message: {}".format(msg.decode()))
return
print("Got message with len {}".format(len(msg.payload)))
self.state += 1
self.t = time()
def on_disconnect(self, client, userdata, rc):
self.disconnected = True, rc
def do_select(self):
sock = self.client.socket()
if not sock:
raise Exception("Socket is gone")
print("Selecting for reading" + (" and writing" if self.client.want_write() else ""))
r, w, e = select(
[sock],
[sock] if self.client.want_write() else [],
[],
1
)
if sock in r:
print("Socket is readable, calling loop_read")
self.client.loop_read()
if sock in w:
print("Socket is writable, calling loop_write")
self.client.loop_write()
self.client.loop_misc()
def main(self):
self.disconnected = (False, None)
self.t = time()
self.state = 0
self.client = mqtt.Client(client_id=client_id)
self.client.on_connect = self.on_connect
self.client.on_message = self.on_message
self.client.on_disconnect = self.on_disconnect
self.client.connect('mqtt.eclipseprojects.io', 1883, 60)
print("Socket opened")
self.client.socket().setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 2048)
while not self.disconnected[0]:
self.do_select()
if self.state in {0, 2, 4}:
if time() - self.t >= 5:
print("Publishing")
self.client.publish(topic, b'Hello' * 40000)
self.state += 1
if self.state == 6:
self.state += 1
self.client.disconnect()
print("Disconnected: {}".format(self.disconnected[1]))
print("Starting")
SelectMqttExample().main()
print("Finished")
|
[
"paho.mqtt.client.Client",
"uuid.uuid4",
"time.time"
] |
[((177, 189), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (187, 189), False, 'import uuid\n'), ((723, 729), 'time.time', 'time', ([], {}), '()\n', (727, 729), False, 'from time import time\n'), ((1536, 1542), 'time.time', 'time', ([], {}), '()\n', (1540, 1542), False, 'from time import time\n'), ((1589, 1621), 'paho.mqtt.client.Client', 'mqtt.Client', ([], {'client_id': 'client_id'}), '(client_id=client_id)\n', (1600, 1621), True, 'import paho.mqtt.client as mqtt\n'), ((2085, 2091), 'time.time', 'time', ([], {}), '()\n', (2089, 2091), False, 'from time import time\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# recorder.py
#
# Copyright 2019 <<EMAIL>>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import os
import time
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient
import logging
import time
import argparse
import json
from random import randint
# Configure logging
logger = logging.getLogger("AWSIoTPythonSDK.core")
logger.setLevel(logging.DEBUG)
streamHandler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
streamHandler.setFormatter(formatter)
logger.addHandler(streamHandler)
host = 'a6zwxk9vm9bfa-ats.iot.us-west-2.amazonaws.com'
rootCAPath = 'root-CA.crt'
certificatePath = 'pi.cert.pem'
privateKeyPath = 'pi.private.key'
port = 8883
clientId = 'basicPubSub'
topic = 'sdk/test/Python'
myAWSIoTMQTTClient = None
fileId= ''
def recordAudio(filename, key):
print ("Starting Recorder")
cmd ='arecord /home/pi/Desktop/'+filename + ' -t wav -D sysdefault:CARD=1 -d 3 -r 48000;aws s3 cp /home/pi/Desktop/'+filename+' s3://bdfairdev/'+key+'/'+filename
os.system(cmd)
print ("Recording complete and sent to S3")
if key == 'greetings':
cmd = "ffplay -nodisp -autoexit /home/pi/PolyglotRobot/generalIntro1.mp3 >/dev/null 2>&1 &"
os.system(cmd)
cmd1 = " python3 /home/pi/PolyglotRobot/avainitialgreetings.py &"
os.system(cmd1)
def recordRawAudio(filename, key):
print ("Starting Recorder")
cmd ='arecord /home/pi/Desktop/'+filename + ' -c 2 -f S16_LE -r 22050 -t wav -D sysdefault:CARD=1 -d 3;aws s3 cp /home/pi/Desktop/'+filename+' s3://bdfairdev/'+key+'/'+filename
os.system(cmd)
print ("Recording complete and sent to S3")
# Custom MQTT message callback
def customCallback(client, userdata, message):
print("Received a new message: ")
print(message.payload)
payload = json.loads(message.payload)
langDetected = payload["language"]
output=payload["s3output"]
outputType=payload["type"]
print(output)
cmd ='aws s3 cp ' + output + ' /home/pi/Desktop/output.mp3'
os.system(cmd)
if outputType == 'weather':
cmd='python3 ' + langDetected+'_avagreeting.py & >/dev/null 2>&1'
print(cmd)
os.system(cmd)
time.sleep(2)
cmd='ffplay -nodisp -autoexit /home/pi/Desktop/output.mp3 >/dev/null 2>&1'
os.system(cmd)
print("from topic: ")
print(message.topic)
print("--------------\n\n")
fileId = str(randint(123456,234532))
fileId= fileId +'.wav'
if outputType == 'goodbye':
main(None)
recordRawAudio(langDetected+'_'+fileId, outputType)
def waitForResponse():
myAWSIoTMQTTClient = AWSIoTMQTTClient(clientId)
myAWSIoTMQTTClient.configureEndpoint(host, port)
myAWSIoTMQTTClient.configureCredentials(rootCAPath, privateKeyPath, certificatePath)
myAWSIoTMQTTClient.configureAutoReconnectBackoffTime(1, 32, 20)
myAWSIoTMQTTClient.configureOfflinePublishQueueing(-1) # Infinite offline Publish queueing
myAWSIoTMQTTClient.configureDrainingFrequency(2) # Draining: 2 Hz
myAWSIoTMQTTClient.configureConnectDisconnectTimeout(10) # 10 sec
myAWSIoTMQTTClient.configureMQTTOperationTimeout(5) # 5 sec
myAWSIoTMQTTClient.connect()
myAWSIoTMQTTClient.subscribe(topic, 1, customCallback)
time.sleep(2)
# Publish to the same topic in a loop forever
loopCount = 0
while True:
time.sleep(2)
def main(args):
time.sleep(2)
fileId = str(randint(123456,234532))
key='greetings'
fileId= fileId +'.wav'
recordAudio(fileId, key)
waitForResponse()
return 0
if __name__ == '__main__':
import sys
sys.exit(main(sys.argv))
|
[
"AWSIoTPythonSDK.MQTTLib.AWSIoTMQTTClient",
"json.loads",
"random.randint",
"logging.StreamHandler",
"os.system",
"time.sleep",
"logging.Formatter",
"logging.getLogger"
] |
[((1019, 1060), 'logging.getLogger', 'logging.getLogger', (['"""AWSIoTPythonSDK.core"""'], {}), "('AWSIoTPythonSDK.core')\n", (1036, 1060), False, 'import logging\n'), ((1108, 1131), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (1129, 1131), False, 'import logging\n'), ((1144, 1217), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (1161, 1217), False, 'import logging\n'), ((1775, 1789), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (1784, 1789), False, 'import os\n'), ((2341, 2355), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (2350, 2355), False, 'import os\n'), ((2564, 2591), 'json.loads', 'json.loads', (['message.payload'], {}), '(message.payload)\n', (2574, 2591), False, 'import json\n'), ((2785, 2799), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (2794, 2799), False, 'import os\n'), ((3053, 3067), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (3062, 3067), False, 'import os\n'), ((3376, 3402), 'AWSIoTPythonSDK.MQTTLib.AWSIoTMQTTClient', 'AWSIoTMQTTClient', (['clientId'], {}), '(clientId)\n', (3392, 3402), False, 'from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient\n'), ((4012, 4025), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (4022, 4025), False, 'import time\n'), ((4160, 4173), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (4170, 4173), False, 'import time\n'), ((1973, 1987), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (1982, 1987), False, 'import os\n'), ((2070, 2085), 'os.system', 'os.system', (['cmd1'], {}), '(cmd1)\n', (2079, 2085), False, 'import os\n'), ((2933, 2947), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (2942, 2947), False, 'import os\n'), ((2956, 2969), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (2966, 2969), False, 'import time\n'), ((3168, 3191), 'random.randint', 'randint', (['(123456)', '(234532)'], {}), '(123456, 234532)\n', (3175, 3191), False, 'from random import randint\n'), ((4119, 4132), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (4129, 4132), False, 'import time\n'), ((4191, 4214), 'random.randint', 'randint', (['(123456)', '(234532)'], {}), '(123456, 234532)\n', (4198, 4214), False, 'from random import randint\n')]
|
'''
Test the FFT functions
'''
import numpy as np
from .. import LombScargle as LS
import matplotlib.pyplot as plt
import matplotlib.colors as colors
from mpl_toolkits.axes_grid1 import make_axes_locatable
from ..Tools.mode import mode
from ..Tools.DetectGaps import DetectGaps
from ..Spectrogram.SpectrogramPlotter import SpectrogramPlotter
def Spectrum():
#pick two frequencies
f0 = 0.002
f1 = 0.005
#amplitudes
A0 = 2.0
A1 = 1.5
#phases
p0 = np.pi/2.0
p1 = 0.0
#time series
t = np.arange(1000.0)
dt = np.random.rand(t.size)*5
t = t + dt
t.sort()
v0 = A0*np.cos(2*np.pi*f0*t + p0)
v1 = A1*np.cos(2*np.pi*f1*t + p1)
v = v0 + v1
#frequencies
freq = np.arange(2000,dtype='float32')/(np.float32(4000*1.0))
print(freq.max())
#spectrum
power,A,phase,fr,fi = LS.LombScargle(t,v,freq)
#figure
fig = plt
fig.figure(figsize=(8,11))
ax0 = fig.subplot2grid((2,1),(0,0))
ax1 = fig.subplot2grid((2,1),(1,0))
ax0.plot(t,v0,color='red',linestyle='--')
ax0.plot(t,v1,color='orange',linestyle='--')
ax0.plot(t,v,color='black',linestyle='-')
ax1.plot(freq*1000.0,power,color='blue')
ax0.set_xlabel('$t$ (s)')
ax1.set_ylabel('Power')
ax1.set_xlabel('Frequency (mHz)')
fmx = np.min([freq.max(),1.5*np.max([f0,f1])])
ax1.set_xlim(0,fmx*1000)
def Spectrogram():
#pick two frequencies
f0 = 0.002
f1 = 0.005
#amplitudes
A0 = 2.0
A1 = 1.5
#phases
p0 = np.pi/2.0
p1 = 0.0
#time series
t = np.arange(10800.0)
dt = np.random.rand(t.size)*5
t = t + dt
t.sort()
v0 = A0*np.cos(2*np.pi*f0*t + p0)
v1 = A1*np.cos(2*np.pi*f1*t + p1)
v = v0 + v1
wind = 1800
slip = 200
#frequencies
freq = np.arange(900,dtype='float32')/(np.float32(1800*1.0))
#figure
fig = plt
fig.figure(figsize=(8,11))
ax0 = fig.subplot2grid((2,1),(0,0))
ax1 = fig.subplot2grid((2,1),(1,0))
ax0.plot(t,v0,color='red',linestyle='--')
ax0.plot(t,v1,color='orange',linestyle='--')
ax0.plot(t,v,color='black',linestyle='-')
ax0.set_xlabel('Time (s)')
#spectrogram
ax1,Nw,Freq,Spec = LS.PlotSpectrogram(t,v,wind,slip,Freq=freq,FreqAxisUnits='mHz',fig=fig,maps=[1,2,0,1])
fmx = np.min([Freq.max(),1.5*np.max([f0,f1])])
ax1.set_ylim(0,fmx*1000)
def Spectrogram2():
#pick two frequencies
f0 = 0.002
f1 = 0.005
#amplitudes
A0 = 2.0
A1 = 1.5
#phases
p0 = np.pi/2.0
p1 = 0.0
#time series
t = np.arange(10800.0)
dt = np.random.rand(t.size)*5
t = t + dt
t.sort()
v0 = A0*np.cos(2*np.pi*f0*t + p0)
v1 = A1*np.cos(2*np.pi*f1*t + p1)
v = v0 + v1
wind = 1800
slip = 200
#frequencies
freq = np.arange(900,dtype='float32')/(np.float32(1800*1.0))
#figure
fig = plt
fig.figure(figsize=(8,11))
ax0 = fig.subplot2grid((2,1),(0,0))
ax1 = fig.subplot2grid((2,1),(1,0))
ax0.plot(t,v0,color='red',linestyle='--')
ax0.plot(t,v1,color='orange',linestyle='--')
ax0.plot(t,v,color='black',linestyle='-')
ax0.set_xlabel('Time (s)')
Nw,Freq,Spec = LS.Spectrogram(t,v,wind,slip,Freq=freq)
#spectrogram
ax1,Nw,Freq,Spec = LS.PlotSpectrogram(Freq,Spec,FreqAxisUnits='mHz',fig=fig,maps=[1,2,0,1])
fmx = np.min([Freq.max(),1.5*np.max([f0,f1])])
ax1.set_ylim(0,fmx*1000)
def Spectrogram3():
#pick two frequencies
f0 = 0.002
f1 = 0.005
#amplitudes
A0 = 2.0
A1 = 1.5
#phases
p0 = np.pi/2.0
p1 = 0.0
#time series
t = np.arange(10800.0)
dt = np.random.rand(t.size)*5
t = t + dt
t.sort()
v0 = A0*np.cos(2*np.pi*f0*t + p0)
v1 = A1*np.cos(2*np.pi*f1*t + p1)
v = v0 + v1
wind = 1800
slip = 200
#frequencies
freq = np.arange(900,dtype='float32')/(np.float32(1800*1.0))
#figure
fig = plt
fig.figure(figsize=(8,11))
ax0 = fig.subplot2grid((2,1),(0,0))
ax1 = fig.subplot2grid((2,1),(1,0))
ax0.plot(t,v0,color='red',linestyle='--')
ax0.plot(t,v1,color='orange',linestyle='--')
ax0.plot(t,v,color='black',linestyle='-')
ax0.set_xlabel('Time (s)')
Nw,Freq,Spec = LS.Spectrogram(t,v,wind,slip,Freq=freq)
#spectrogram
ax1 = SpectrogramPlotter(Spec.Tspec,Freq*1000,Spec.Pow,fig=fig,maps=[1,2,0,1])
fmx = np.min([Freq.max(),1.5*np.max([f0,f1])])
ax1.set_ylim(0,fmx*1000)
def Spectrogram3D():
#pick some frequencies
fx0 = 0.002
fx1 = 0.007
fy0 = 0.007
fy1 = 0.010
#amplitudes
A0 = 2.0
A1 = 1.5
#phases
p0 = np.pi/2.0
p1 = 0.0
#time series
t = np.arange(10800.0)
dt = np.random.rand(t.size)*5
t = t + dt
t.sort()
x0 = A0*np.cos(2*np.pi*fx0*t + p0)
x1 = A1*np.cos(2*np.pi*fx1*t + p1)
x = x0 + x1
y0 = A0*np.cos(2*np.pi*fy0*t + p0)
y1 = A1*np.cos(2*np.pi*fy1*t + p1)
y = y0 + y1
z = np.zeros(t.size,dtype='float32')
#frequencies
freq = np.arange(900,dtype='float32')/(np.float32(1800*1.0))
#spectrogram
wind = 1800
slip = 200
Nw,Freq,Spec = LS.Spectrogram3D(t,x,y,z,wind,slip,Freq=freq,CombineComps=True)
Nf = Freq.size - 1
S = Spec.xyPow
f = Freq[:Nf+1]*1000.0
ts = Spec.Tspec
xlabel = 'Time (s)'
dt = mode(ts[1:] - ts[:-1])/2.0
scale = [np.nanmin(S),np.nanmax(S)]
norm = colors.Normalize(vmin=scale[0],vmax=scale[1])
cmap = plt.cm.get_cmap('gnuplot')
#find gaps
gaps = np.where(np.isfinite(S[:,1]) == False)[0]
ngd,T0,T1 = DetectGaps(S[:,1])
#figure
fig = plt
fig.figure(figsize=(8,11))
ax0 = fig.subplot2grid((2,1),(0,0))
ax1 = fig.subplot2grid((2,1),(1,0))
ax0.plot(t,x,color='red')
ax0.plot(t,y,color='orange')
sm = None
for i in range(0,ngd):
#select the good portion of the
use = np.arange(T0[i],T1[i]+1)
tax = np.append(ts[use]-dt,ts[use[-1]]+dt)
Stmp = S[use]
#mesh the axes
tm,fm = np.meshgrid(tax,f)
#plot the section
sm = ax1.pcolormesh(tm.T,fm.T,Stmp,cmap=cmap,norm=norm)
#colour bar
fig.subplots_adjust(right=0.8)
box = ax1.get_position()
if not sm is None:
cax = plt.axes([0.05*box.width + box.x1,box.y0+0.1*box.height,box.width*0.025,box.height*0.8])
cbar = fig.colorbar(sm,cax=cax)
cbar.set_label('Power')
#axis labels
ax1.set_xlabel(xlabel)
ax1.set_ylabel('$f$ (mHz)')
fmx = np.min([Freq.max(),1.5*np.max([fx0,fx1,fy0,fy1])])
ax1.set_ylim(0,fmx*1000)
return Spec
|
[
"numpy.meshgrid",
"matplotlib.colors.Normalize",
"matplotlib.pyplot.axes",
"numpy.float32",
"numpy.zeros",
"numpy.isfinite",
"numpy.nanmin",
"numpy.append",
"numpy.max",
"numpy.arange",
"numpy.cos",
"numpy.random.rand",
"matplotlib.pyplot.cm.get_cmap",
"numpy.nanmax"
] |
[((502, 519), 'numpy.arange', 'np.arange', (['(1000.0)'], {}), '(1000.0)\n', (511, 519), True, 'import numpy as np\n'), ((1455, 1473), 'numpy.arange', 'np.arange', (['(10800.0)'], {}), '(10800.0)\n', (1464, 1473), True, 'import numpy as np\n'), ((2364, 2382), 'numpy.arange', 'np.arange', (['(10800.0)'], {}), '(10800.0)\n', (2373, 2382), True, 'import numpy as np\n'), ((3320, 3338), 'numpy.arange', 'np.arange', (['(10800.0)'], {}), '(10800.0)\n', (3329, 3338), True, 'import numpy as np\n'), ((4292, 4310), 'numpy.arange', 'np.arange', (['(10800.0)'], {}), '(10800.0)\n', (4301, 4310), True, 'import numpy as np\n'), ((4541, 4574), 'numpy.zeros', 'np.zeros', (['t.size'], {'dtype': '"""float32"""'}), "(t.size, dtype='float32')\n", (4549, 4574), True, 'import numpy as np\n'), ((4949, 4995), 'matplotlib.colors.Normalize', 'colors.Normalize', ([], {'vmin': 'scale[0]', 'vmax': 'scale[1]'}), '(vmin=scale[0], vmax=scale[1])\n', (4965, 4995), True, 'import matplotlib.colors as colors\n'), ((5004, 5030), 'matplotlib.pyplot.cm.get_cmap', 'plt.cm.get_cmap', (['"""gnuplot"""'], {}), "('gnuplot')\n", (5019, 5030), True, 'import matplotlib.pyplot as plt\n'), ((526, 548), 'numpy.random.rand', 'np.random.rand', (['t.size'], {}), '(t.size)\n', (540, 548), True, 'import numpy as np\n'), ((584, 615), 'numpy.cos', 'np.cos', (['(2 * np.pi * f0 * t + p0)'], {}), '(2 * np.pi * f0 * t + p0)\n', (590, 615), True, 'import numpy as np\n'), ((619, 650), 'numpy.cos', 'np.cos', (['(2 * np.pi * f1 * t + p1)'], {}), '(2 * np.pi * f1 * t + p1)\n', (625, 650), True, 'import numpy as np\n'), ((682, 714), 'numpy.arange', 'np.arange', (['(2000)'], {'dtype': '"""float32"""'}), "(2000, dtype='float32')\n", (691, 714), True, 'import numpy as np\n'), ((715, 737), 'numpy.float32', 'np.float32', (['(4000 * 1.0)'], {}), '(4000 * 1.0)\n', (725, 737), True, 'import numpy as np\n'), ((1480, 1502), 'numpy.random.rand', 'np.random.rand', (['t.size'], {}), '(t.size)\n', (1494, 1502), True, 'import numpy as np\n'), ((1537, 1568), 'numpy.cos', 'np.cos', (['(2 * np.pi * f0 * t + p0)'], {}), '(2 * np.pi * f0 * t + p0)\n', (1543, 1568), True, 'import numpy as np\n'), ((1572, 1603), 'numpy.cos', 'np.cos', (['(2 * np.pi * f1 * t + p1)'], {}), '(2 * np.pi * f1 * t + p1)\n', (1578, 1603), True, 'import numpy as np\n'), ((1661, 1692), 'numpy.arange', 'np.arange', (['(900)'], {'dtype': '"""float32"""'}), "(900, dtype='float32')\n", (1670, 1692), True, 'import numpy as np\n'), ((1693, 1715), 'numpy.float32', 'np.float32', (['(1800 * 1.0)'], {}), '(1800 * 1.0)\n', (1703, 1715), True, 'import numpy as np\n'), ((2389, 2411), 'numpy.random.rand', 'np.random.rand', (['t.size'], {}), '(t.size)\n', (2403, 2411), True, 'import numpy as np\n'), ((2446, 2477), 'numpy.cos', 'np.cos', (['(2 * np.pi * f0 * t + p0)'], {}), '(2 * np.pi * f0 * t + p0)\n', (2452, 2477), True, 'import numpy as np\n'), ((2481, 2512), 'numpy.cos', 'np.cos', (['(2 * np.pi * f1 * t + p1)'], {}), '(2 * np.pi * f1 * t + p1)\n', (2487, 2512), True, 'import numpy as np\n'), ((2570, 2601), 'numpy.arange', 'np.arange', (['(900)'], {'dtype': '"""float32"""'}), "(900, dtype='float32')\n", (2579, 2601), True, 'import numpy as np\n'), ((2602, 2624), 'numpy.float32', 'np.float32', (['(1800 * 1.0)'], {}), '(1800 * 1.0)\n', (2612, 2624), True, 'import numpy as np\n'), ((3345, 3367), 'numpy.random.rand', 'np.random.rand', (['t.size'], {}), '(t.size)\n', (3359, 3367), True, 'import numpy as np\n'), ((3402, 3433), 'numpy.cos', 'np.cos', (['(2 * np.pi * f0 * t + p0)'], {}), '(2 * np.pi * f0 * t + p0)\n', (3408, 3433), True, 'import numpy as np\n'), ((3437, 3468), 'numpy.cos', 'np.cos', (['(2 * np.pi * f1 * t + p1)'], {}), '(2 * np.pi * f1 * t + p1)\n', (3443, 3468), True, 'import numpy as np\n'), ((3526, 3557), 'numpy.arange', 'np.arange', (['(900)'], {'dtype': '"""float32"""'}), "(900, dtype='float32')\n", (3535, 3557), True, 'import numpy as np\n'), ((3558, 3580), 'numpy.float32', 'np.float32', (['(1800 * 1.0)'], {}), '(1800 * 1.0)\n', (3568, 3580), True, 'import numpy as np\n'), ((4317, 4339), 'numpy.random.rand', 'np.random.rand', (['t.size'], {}), '(t.size)\n', (4331, 4339), True, 'import numpy as np\n'), ((4375, 4407), 'numpy.cos', 'np.cos', (['(2 * np.pi * fx0 * t + p0)'], {}), '(2 * np.pi * fx0 * t + p0)\n', (4381, 4407), True, 'import numpy as np\n'), ((4411, 4443), 'numpy.cos', 'np.cos', (['(2 * np.pi * fx1 * t + p1)'], {}), '(2 * np.pi * fx1 * t + p1)\n', (4417, 4443), True, 'import numpy as np\n'), ((4460, 4492), 'numpy.cos', 'np.cos', (['(2 * np.pi * fy0 * t + p0)'], {}), '(2 * np.pi * fy0 * t + p0)\n', (4466, 4492), True, 'import numpy as np\n'), ((4496, 4528), 'numpy.cos', 'np.cos', (['(2 * np.pi * fy1 * t + p1)'], {}), '(2 * np.pi * fy1 * t + p1)\n', (4502, 4528), True, 'import numpy as np\n'), ((4597, 4628), 'numpy.arange', 'np.arange', (['(900)'], {'dtype': '"""float32"""'}), "(900, dtype='float32')\n", (4606, 4628), True, 'import numpy as np\n'), ((4629, 4651), 'numpy.float32', 'np.float32', (['(1800 * 1.0)'], {}), '(1800 * 1.0)\n', (4639, 4651), True, 'import numpy as np\n'), ((4914, 4926), 'numpy.nanmin', 'np.nanmin', (['S'], {}), '(S)\n', (4923, 4926), True, 'import numpy as np\n'), ((4927, 4939), 'numpy.nanmax', 'np.nanmax', (['S'], {}), '(S)\n', (4936, 4939), True, 'import numpy as np\n'), ((5391, 5418), 'numpy.arange', 'np.arange', (['T0[i]', '(T1[i] + 1)'], {}), '(T0[i], T1[i] + 1)\n', (5400, 5418), True, 'import numpy as np\n'), ((5424, 5465), 'numpy.append', 'np.append', (['(ts[use] - dt)', '(ts[use[-1]] + dt)'], {}), '(ts[use] - dt, ts[use[-1]] + dt)\n', (5433, 5465), True, 'import numpy as np\n'), ((5510, 5529), 'numpy.meshgrid', 'np.meshgrid', (['tax', 'f'], {}), '(tax, f)\n', (5521, 5529), True, 'import numpy as np\n'), ((5708, 5813), 'matplotlib.pyplot.axes', 'plt.axes', (['[0.05 * box.width + box.x1, box.y0 + 0.1 * box.height, box.width * 0.025, \n box.height * 0.8]'], {}), '([0.05 * box.width + box.x1, box.y0 + 0.1 * box.height, box.width *\n 0.025, box.height * 0.8])\n', (5716, 5813), True, 'import matplotlib.pyplot as plt\n'), ((1247, 1263), 'numpy.max', 'np.max', (['[f0, f1]'], {}), '([f0, f1])\n', (1253, 1263), True, 'import numpy as np\n'), ((2157, 2173), 'numpy.max', 'np.max', (['[f0, f1]'], {}), '([f0, f1])\n', (2163, 2173), True, 'import numpy as np\n'), ((3111, 3127), 'numpy.max', 'np.max', (['[f0, f1]'], {}), '([f0, f1])\n', (3117, 3127), True, 'import numpy as np\n'), ((4054, 4070), 'numpy.max', 'np.max', (['[f0, f1]'], {}), '([f0, f1])\n', (4060, 4070), True, 'import numpy as np\n'), ((5061, 5081), 'numpy.isfinite', 'np.isfinite', (['S[:, 1]'], {}), '(S[:, 1])\n', (5072, 5081), True, 'import numpy as np\n'), ((5960, 5988), 'numpy.max', 'np.max', (['[fx0, fx1, fy0, fy1]'], {}), '([fx0, fx1, fy0, fy1])\n', (5966, 5988), True, 'import numpy as np\n')]
|
from unittest import TestCase
from contentful_management.content_type_field_validation import ContentTypeFieldValidation
class ContentTypeFieldValidationTest(TestCase):
def test_content_type_field_validation(self):
validation = ContentTypeFieldValidation({
'size': {'min': 3}
})
self.assertEqual(str(validation), "<ContentTypeFieldValidation size='{'min': 3}'>")
self.assertEqual(validation.to_json(), {
'size': {'min': 3}
})
self.assertEqual(validation.size, {'min': 3})
with self.assertRaises(Exception):
validation.foobar
validation.foo = 'bar'
self.assertEqual(validation.to_json(), {
'size': {'min': 3},
'foo': 'bar'
})
|
[
"contentful_management.content_type_field_validation.ContentTypeFieldValidation"
] |
[((242, 290), 'contentful_management.content_type_field_validation.ContentTypeFieldValidation', 'ContentTypeFieldValidation', (["{'size': {'min': 3}}"], {}), "({'size': {'min': 3}})\n", (268, 290), False, 'from contentful_management.content_type_field_validation import ContentTypeFieldValidation\n')]
|
# AAAAA
from parsing.item_parser import ItemParser
from parsing.list_parser import ListParser
import csv
import threading
import time
class Manager:
"""Manage data provided by parsers and to parsers"""
order = [
'bodyType', 'brand', 'color', 'fuelType', 'modelDate', 'name', 'numberOfDoors', 'productionDate',
'vehicleConfiguration', 'vehicleTransmission', 'engineDisplacement', 'enginePower', 'description',
'mileage', 'Комплектация', 'Привод', 'Руль', 'Состояние', 'Владельцы', 'ПТС', 'Таможня', 'Владение', 'id',
'Price'
]
def __init__(self):
pass
def load(self):
# 600
list_parser = ListParser(pages=(600, 5000))
item_parser = ItemParser()
finished = False
parsed_data = []
ids_to_parse = []
def save_data():
_id = 1
with open('data.csv', 'a', newline='', encoding='utf-8') as csv_f:
writer = csv.writer(csv_f, delimiter=',', quotechar='"')
while not finished or parsed_data:
try:
item = parsed_data.pop()
item['id'] = _id
_id += 1
writer.writerow([item.get(key) for key in self.order])
csv_f.flush()
if _id % 20 == 0:
print("Saved %d records" % (_id,))
except IndexError:
time.sleep(0.5)
continue
print("Saved %d records" % (_id,))
save_thread = threading.Thread(target=save_data)
save_thread.start()
def parse_list():
nonlocal finished
generator = list_parser.parse()
while True:
if len(ids_to_parse) >= 1000:
time.sleep(1)
continue
try:
car_id = next(generator)
ids_to_parse.append(car_id)
except Exception as error:
print(error)
finished = True
list_parser_thread = threading.Thread(target=parse_list)
list_parser_thread.start()
def parse_car():
while not finished or ids_to_parse:
try:
car_id = ids_to_parse.pop()
except IndexError:
time.sleep(0.5)
continue
try:
parsed_data.append(item_parser.parse_page(car_id))
except Exception as error:
print('Cannot parse car page: %s' % (error,))
car_parsers = [threading.Thread(target=parse_car) for _ in range(3)]
for car_parser in car_parsers: car_parser.start()
for car_parser in car_parsers: car_parser.join()
finished = True
save_thread.join()
print("DONE")
if __name__ == '__main__':
m = Manager()
m.load()
|
[
"threading.Thread",
"csv.writer",
"parsing.list_parser.ListParser",
"time.sleep",
"parsing.item_parser.ItemParser"
] |
[((671, 700), 'parsing.list_parser.ListParser', 'ListParser', ([], {'pages': '(600, 5000)'}), '(pages=(600, 5000))\n', (681, 700), False, 'from parsing.list_parser import ListParser\n'), ((723, 735), 'parsing.item_parser.ItemParser', 'ItemParser', ([], {}), '()\n', (733, 735), False, 'from parsing.item_parser import ItemParser\n'), ((1612, 1646), 'threading.Thread', 'threading.Thread', ([], {'target': 'save_data'}), '(target=save_data)\n', (1628, 1646), False, 'import threading\n'), ((2157, 2192), 'threading.Thread', 'threading.Thread', ([], {'target': 'parse_list'}), '(target=parse_list)\n', (2173, 2192), False, 'import threading\n'), ((2696, 2730), 'threading.Thread', 'threading.Thread', ([], {'target': 'parse_car'}), '(target=parse_car)\n', (2712, 2730), False, 'import threading\n'), ((961, 1008), 'csv.writer', 'csv.writer', (['csv_f'], {'delimiter': '""","""', 'quotechar': '"""\\""""'}), '(csv_f, delimiter=\',\', quotechar=\'"\')\n', (971, 1008), False, 'import csv\n'), ((1866, 1879), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1876, 1879), False, 'import time\n'), ((2426, 2441), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (2436, 2441), False, 'import time\n'), ((1493, 1508), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (1503, 1508), False, 'import time\n')]
|
from AI import trie
import itertools
def test_trie():
target_trie = trie.Trie()
letters = "ABCDEFG"
words = []
for i in range(7, 1, -1):
permutations = list(itertools.permutations(letters, i))
for p in permutations:
word = ''.join(p)
if target_trie.search(word):
words.append(word)
assert words != []
|
[
"AI.trie.Trie",
"itertools.permutations"
] |
[((74, 85), 'AI.trie.Trie', 'trie.Trie', ([], {}), '()\n', (83, 85), False, 'from AI import trie\n'), ((183, 217), 'itertools.permutations', 'itertools.permutations', (['letters', 'i'], {}), '(letters, i)\n', (205, 217), False, 'import itertools\n')]
|
# (C) Datadog, Inc. 2010-present
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
import ipaddress
from collections import defaultdict
from pyasn1.type.univ import OctetString
from pysnmp import hlapi
from pysnmp.smi import builder, view
from datadog_checks.base import ConfigurationError, is_affirmative
from .resolver import OIDResolver
def to_oid_tuple(oid_string):
"""Return a OID tuple from a OID string."""
return tuple(map(int, oid_string.lstrip('.').split('.')))
class ParsedMetric(object):
__slots__ = ('name', 'metric_tags', 'forced_type', 'enforce_scalar')
def __init__(self, name, metric_tags, forced_type, enforce_scalar=True):
self.name = name
self.metric_tags = metric_tags
self.forced_type = forced_type
self.enforce_scalar = enforce_scalar
class ParsedTableMetric(object):
__slots__ = ('name', 'index_tags', 'column_tags', 'forced_type')
def __init__(self, name, index_tags, column_tags, forced_type):
self.name = name
self.index_tags = index_tags
self.column_tags = column_tags
self.forced_type = forced_type
class InstanceConfig:
"""Parse and hold configuration about a single instance."""
DEFAULT_RETRIES = 5
DEFAULT_TIMEOUT = 1
DEFAULT_ALLOWED_FAILURES = 3
DEFAULT_BULK_THRESHOLD = 0
def __init__(self, instance, warning, log, global_metrics, mibs_path, profiles, profiles_by_oid):
self.instance = instance
self.tags = instance.get('tags', [])
self.metrics = instance.get('metrics', [])
profile = instance.get('profile')
if is_affirmative(instance.get('use_global_metrics', True)):
self.metrics.extend(global_metrics)
if profile:
if profile not in profiles:
raise ConfigurationError("Unknown profile '{}'".format(profile))
self.metrics.extend(profiles[profile]['definition']['metrics'])
self.enforce_constraints = is_affirmative(instance.get('enforce_mib_constraints', True))
self._snmp_engine, mib_view_controller = self.create_snmp_engine(mibs_path)
self._resolver = OIDResolver(mib_view_controller, self.enforce_constraints)
self.ip_address = None
self.ip_network = None
self.discovered_instances = {}
self.failing_instances = defaultdict(int)
self.allowed_failures = int(instance.get('discovery_allowed_failures', self.DEFAULT_ALLOWED_FAILURES))
self.bulk_threshold = int(instance.get('bulk_threshold', self.DEFAULT_BULK_THRESHOLD))
timeout = int(instance.get('timeout', self.DEFAULT_TIMEOUT))
retries = int(instance.get('retries', self.DEFAULT_RETRIES))
ip_address = instance.get('ip_address')
network_address = instance.get('network_address')
if not ip_address and not network_address:
raise ConfigurationError('An IP address or a network address needs to be specified')
if ip_address and network_address:
raise ConfigurationError('Only one of IP address and network address must be specified')
if ip_address:
self._transport = self.get_transport_target(instance, timeout, retries)
self.ip_address = ip_address
self.tags.append('snmp_device:{}'.format(self.ip_address))
if network_address:
if isinstance(network_address, bytes):
network_address = network_address.decode('utf-8')
self.ip_network = ipaddress.ip_network(network_address)
if not self.metrics and not profiles_by_oid:
raise ConfigurationError('Instance should specify at least one metric or profiles should be defined')
self._auth_data = self.get_auth_data(instance)
self.all_oids, self.bulk_oids, self.parsed_metrics = self.parse_metrics(self.metrics, warning, log)
self._context_data = hlapi.ContextData(*self.get_context_data(instance))
def resolve_oid(self, oid):
return self._resolver.resolve_oid(oid)
def refresh_with_profile(self, profile, warning, log):
self.metrics.extend(profile['definition']['metrics'])
self.all_oids, self.bulk_oids, self.parsed_metrics = self.parse_metrics(self.metrics, warning, log)
def call_cmd(self, cmd, *args, **kwargs):
return cmd(self._snmp_engine, self._auth_data, self._transport, self._context_data, *args, **kwargs)
@staticmethod
def create_snmp_engine(mibs_path):
"""
Create a command generator to perform all the snmp query.
If mibs_path is not None, load the mibs present in the custom mibs
folder. (Need to be in pysnmp format)
"""
snmp_engine = hlapi.SnmpEngine()
mib_builder = snmp_engine.getMibBuilder()
if mibs_path is not None:
mib_builder.addMibSources(builder.DirMibSource(mibs_path))
mib_view_controller = view.MibViewController(mib_builder)
return snmp_engine, mib_view_controller
@staticmethod
def get_transport_target(instance, timeout, retries):
"""
Generate a Transport target object based on the instance's configuration
"""
ip_address = instance['ip_address']
port = int(instance.get('port', 161)) # Default SNMP port
return hlapi.UdpTransportTarget((ip_address, port), timeout=timeout, retries=retries)
@staticmethod
def get_auth_data(instance):
"""
Generate a Security Parameters object based on the instance's
configuration.
"""
if 'community_string' in instance:
# SNMP v1 - SNMP v2
# See http://snmplabs.com/pysnmp/docs/api-reference.html#pysnmp.hlapi.CommunityData
if int(instance.get('snmp_version', 2)) == 1:
return hlapi.CommunityData(instance['community_string'], mpModel=0)
return hlapi.CommunityData(instance['community_string'], mpModel=1)
if 'user' in instance:
# SNMP v3
user = instance['user']
auth_key = None
priv_key = None
auth_protocol = None
priv_protocol = None
if 'authKey' in instance:
auth_key = instance['authKey']
auth_protocol = hlapi.usmHMACMD5AuthProtocol
if 'privKey' in instance:
priv_key = instance['privKey']
auth_protocol = hlapi.usmHMACMD5AuthProtocol
priv_protocol = hlapi.usmDESPrivProtocol
if 'authProtocol' in instance:
auth_protocol = getattr(hlapi, instance['authProtocol'])
if 'privProtocol' in instance:
priv_protocol = getattr(hlapi, instance['privProtocol'])
return hlapi.UsmUserData(user, auth_key, priv_key, auth_protocol, priv_protocol)
raise ConfigurationError('An authentication method needs to be provided')
@staticmethod
def get_context_data(instance):
"""
Generate a Context Parameters object based on the instance's
configuration.
We do not use the hlapi currently, but the rfc3413.oneliner.cmdgen
accepts Context Engine Id (always None for now) and Context Name parameters.
"""
context_engine_id = None
context_name = ''
if 'user' in instance:
if 'context_engine_id' in instance:
context_engine_id = OctetString(instance['context_engine_id'])
if 'context_name' in instance:
context_name = instance['context_name']
return context_engine_id, context_name
def parse_metrics(self, metrics, warning, log):
"""Parse configuration and returns data to be used for SNMP queries.
`oids` is a dictionnary of SNMP tables to symbols to query.
"""
table_oids = {}
parsed_metrics = []
def extract_symbol(mib, symbol):
if isinstance(symbol, dict):
symbol_oid = symbol['OID']
symbol = symbol['name']
self._resolver.register(to_oid_tuple(symbol_oid), symbol)
identity = hlapi.ObjectIdentity(symbol_oid)
else:
identity = hlapi.ObjectIdentity(mib, symbol)
return identity, symbol
def get_table_symbols(mib, table):
identity, table = extract_symbol(mib, table)
key = (mib, table)
if key in table_oids:
return table_oids[key][1], table
table_object = hlapi.ObjectType(identity)
symbols = []
table_oids[key] = (table_object, symbols)
return symbols, table
# Check the metrics completely defined
for metric in metrics:
forced_type = metric.get('forced_type')
metric_tags = metric.get('metric_tags', [])
if 'MIB' in metric:
if not ('table' in metric or 'symbol' in metric):
raise ConfigurationError('When specifying a MIB, you must specify either table or symbol')
if 'symbol' in metric:
to_query = metric['symbol']
try:
_, parsed_metric_name = get_table_symbols(metric['MIB'], to_query)
except Exception as e:
warning("Can't generate MIB object for variable : %s\nException: %s", metric, e)
else:
parsed_metric = ParsedMetric(parsed_metric_name, metric_tags, forced_type)
parsed_metrics.append(parsed_metric)
continue
elif 'symbols' not in metric:
raise ConfigurationError('When specifying a table, you must specify a list of symbols')
symbols, _ = get_table_symbols(metric['MIB'], metric['table'])
index_tags = []
column_tags = []
for metric_tag in metric_tags:
if not ('tag' in metric_tag and ('index' in metric_tag or 'column' in metric_tag)):
raise ConfigurationError(
'When specifying metric tags, you must specify a tag, and an index or column'
)
tag_key = metric_tag['tag']
if 'column' in metric_tag:
# In case it's a column, we need to query it as well
mib = metric_tag.get('MIB', metric['MIB'])
identity, column = extract_symbol(mib, metric_tag['column'])
column_tags.append((tag_key, column))
try:
object_type = hlapi.ObjectType(identity)
except Exception as e:
warning("Can't generate MIB object for variable : %s\nException: %s", metric, e)
else:
if 'table' in metric_tag:
tag_symbols, _ = get_table_symbols(mib, metric_tag['table'])
tag_symbols.append(object_type)
elif mib != metric['MIB']:
raise ConfigurationError(
'When tagging from a different MIB, the table must be specified'
)
else:
symbols.append(object_type)
elif 'index' in metric_tag:
index_tags.append((tag_key, metric_tag['index']))
if 'mapping' in metric_tag:
# Need to do manual resolution
for symbol in metric['symbols']:
self._resolver.register_index(
symbol['name'], metric_tag['index'], metric_tag['mapping']
)
for tag in metric['metric_tags']:
if 'column' in tag:
self._resolver.register_index(
tag['column']['name'], metric_tag['index'], metric_tag['mapping']
)
for symbol in metric['symbols']:
identity, parsed_metric_name = extract_symbol(metric['MIB'], symbol)
try:
symbols.append(hlapi.ObjectType(identity))
except Exception as e:
warning("Can't generate MIB object for variable : %s\nException: %s", metric, e)
parsed_metric = ParsedTableMetric(parsed_metric_name, index_tags, column_tags, forced_type)
parsed_metrics.append(parsed_metric)
elif 'OID' in metric:
oid_object = hlapi.ObjectType(hlapi.ObjectIdentity(metric['OID']))
table_oids[metric['OID']] = (oid_object, [])
self._resolver.register(to_oid_tuple(metric['OID']), metric['name'])
parsed_metric = ParsedMetric(metric['name'], metric_tags, forced_type, enforce_scalar=False)
parsed_metrics.append(parsed_metric)
else:
raise ConfigurationError('Unsupported metric in config file: {}'.format(metric))
oids = []
all_oids = []
bulk_oids = []
# Use bulk for SNMP version > 1 and there are enough symbols
bulk_limit = self.bulk_threshold if self._auth_data.mpModel else 0
for table, symbols in table_oids.values():
if not symbols:
# No table to browse, just one symbol
oids.append(table)
elif bulk_limit and len(symbols) > bulk_limit:
bulk_oids.append(table)
else:
all_oids.append(symbols)
if oids:
all_oids.insert(0, oids)
return all_oids, bulk_oids, parsed_metrics
|
[
"ipaddress.ip_network",
"pysnmp.hlapi.UsmUserData",
"pysnmp.smi.view.MibViewController",
"pyasn1.type.univ.OctetString",
"pysnmp.hlapi.ObjectType",
"collections.defaultdict",
"pysnmp.hlapi.SnmpEngine",
"pysnmp.hlapi.CommunityData",
"pysnmp.hlapi.UdpTransportTarget",
"pysnmp.smi.builder.DirMibSource",
"datadog_checks.base.ConfigurationError",
"pysnmp.hlapi.ObjectIdentity"
] |
[((2369, 2385), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (2380, 2385), False, 'from collections import defaultdict\n'), ((4740, 4758), 'pysnmp.hlapi.SnmpEngine', 'hlapi.SnmpEngine', ([], {}), '()\n', (4756, 4758), False, 'from pysnmp import hlapi\n'), ((4946, 4981), 'pysnmp.smi.view.MibViewController', 'view.MibViewController', (['mib_builder'], {}), '(mib_builder)\n', (4968, 4981), False, 'from pysnmp.smi import builder, view\n'), ((5339, 5417), 'pysnmp.hlapi.UdpTransportTarget', 'hlapi.UdpTransportTarget', (['(ip_address, port)'], {'timeout': 'timeout', 'retries': 'retries'}), '((ip_address, port), timeout=timeout, retries=retries)\n', (5363, 5417), False, 'from pysnmp import hlapi\n'), ((6886, 6953), 'datadog_checks.base.ConfigurationError', 'ConfigurationError', (['"""An authentication method needs to be provided"""'], {}), "('An authentication method needs to be provided')\n", (6904, 6953), False, 'from datadog_checks.base import ConfigurationError, is_affirmative\n'), ((2909, 2987), 'datadog_checks.base.ConfigurationError', 'ConfigurationError', (['"""An IP address or a network address needs to be specified"""'], {}), "('An IP address or a network address needs to be specified')\n", (2927, 2987), False, 'from datadog_checks.base import ConfigurationError, is_affirmative\n'), ((3050, 3137), 'datadog_checks.base.ConfigurationError', 'ConfigurationError', (['"""Only one of IP address and network address must be specified"""'], {}), "(\n 'Only one of IP address and network address must be specified')\n", (3068, 3137), False, 'from datadog_checks.base import ConfigurationError, is_affirmative\n'), ((3530, 3567), 'ipaddress.ip_network', 'ipaddress.ip_network', (['network_address'], {}), '(network_address)\n', (3550, 3567), False, 'import ipaddress\n'), ((3640, 3745), 'datadog_checks.base.ConfigurationError', 'ConfigurationError', (['"""Instance should specify at least one metric or profiles should be defined"""'], {}), "(\n 'Instance should specify at least one metric or profiles should be defined'\n )\n", (3658, 3745), False, 'from datadog_checks.base import ConfigurationError, is_affirmative\n'), ((5919, 5979), 'pysnmp.hlapi.CommunityData', 'hlapi.CommunityData', (["instance['community_string']"], {'mpModel': '(1)'}), "(instance['community_string'], mpModel=1)\n", (5938, 5979), False, 'from pysnmp import hlapi\n'), ((6797, 6870), 'pysnmp.hlapi.UsmUserData', 'hlapi.UsmUserData', (['user', 'auth_key', 'priv_key', 'auth_protocol', 'priv_protocol'], {}), '(user, auth_key, priv_key, auth_protocol, priv_protocol)\n', (6814, 6870), False, 'from pysnmp import hlapi\n'), ((8574, 8600), 'pysnmp.hlapi.ObjectType', 'hlapi.ObjectType', (['identity'], {}), '(identity)\n', (8590, 8600), False, 'from pysnmp import hlapi\n'), ((4882, 4913), 'pysnmp.smi.builder.DirMibSource', 'builder.DirMibSource', (['mibs_path'], {}), '(mibs_path)\n', (4902, 4913), False, 'from pysnmp.smi import builder, view\n'), ((5839, 5899), 'pysnmp.hlapi.CommunityData', 'hlapi.CommunityData', (["instance['community_string']"], {'mpModel': '(0)'}), "(instance['community_string'], mpModel=0)\n", (5858, 5899), False, 'from pysnmp import hlapi\n'), ((7460, 7502), 'pyasn1.type.univ.OctetString', 'OctetString', (["instance['context_engine_id']"], {}), "(instance['context_engine_id'])\n", (7471, 7502), False, 'from pyasn1.type.univ import OctetString\n'), ((8181, 8213), 'pysnmp.hlapi.ObjectIdentity', 'hlapi.ObjectIdentity', (['symbol_oid'], {}), '(symbol_oid)\n', (8201, 8213), False, 'from pysnmp import hlapi\n'), ((8259, 8292), 'pysnmp.hlapi.ObjectIdentity', 'hlapi.ObjectIdentity', (['mib', 'symbol'], {}), '(mib, symbol)\n', (8279, 8292), False, 'from pysnmp import hlapi\n'), ((9028, 9117), 'datadog_checks.base.ConfigurationError', 'ConfigurationError', (['"""When specifying a MIB, you must specify either table or symbol"""'], {}), "(\n 'When specifying a MIB, you must specify either table or symbol')\n", (9046, 9117), False, 'from datadog_checks.base import ConfigurationError, is_affirmative\n'), ((9755, 9841), 'datadog_checks.base.ConfigurationError', 'ConfigurationError', (['"""When specifying a table, you must specify a list of symbols"""'], {}), "(\n 'When specifying a table, you must specify a list of symbols')\n", (9773, 9841), False, 'from datadog_checks.base import ConfigurationError, is_affirmative\n'), ((10164, 10271), 'datadog_checks.base.ConfigurationError', 'ConfigurationError', (['"""When specifying metric tags, you must specify a tag, and an index or column"""'], {}), "(\n 'When specifying metric tags, you must specify a tag, and an index or column'\n )\n", (10182, 10271), False, 'from datadog_checks.base import ConfigurationError, is_affirmative\n'), ((12988, 13023), 'pysnmp.hlapi.ObjectIdentity', 'hlapi.ObjectIdentity', (["metric['OID']"], {}), "(metric['OID'])\n", (13008, 13023), False, 'from pysnmp import hlapi\n'), ((10776, 10802), 'pysnmp.hlapi.ObjectType', 'hlapi.ObjectType', (['identity'], {}), '(identity)\n', (10792, 10802), False, 'from pysnmp import hlapi\n'), ((12561, 12587), 'pysnmp.hlapi.ObjectType', 'hlapi.ObjectType', (['identity'], {}), '(identity)\n', (12577, 12587), False, 'from pysnmp import hlapi\n'), ((11293, 11382), 'datadog_checks.base.ConfigurationError', 'ConfigurationError', (['"""When tagging from a different MIB, the table must be specified"""'], {}), "(\n 'When tagging from a different MIB, the table must be specified')\n", (11311, 11382), False, 'from datadog_checks.base import ConfigurationError, is_affirmative\n')]
|
import bot
if __name__ == "__main__":
print('Refreshing all...')
links = bot.db.getLinksList()
for link in links:
bot.refreshMatch(link)
print('Refreshed succesfully')
|
[
"bot.db.getLinksList",
"bot.refreshMatch"
] |
[((76, 97), 'bot.db.getLinksList', 'bot.db.getLinksList', ([], {}), '()\n', (95, 97), False, 'import bot\n'), ((120, 142), 'bot.refreshMatch', 'bot.refreshMatch', (['link'], {}), '(link)\n', (136, 142), False, 'import bot\n')]
|
import asyncio
from tests.fixtures import auth_data, media_data
from wpclient.client import Client
from wpclient.models.media import MediaModel
async def create(auth_data, media_data):
async with Client(**auth_data) as client:
media = MediaModel.parse_obj(media_data)
coro = client.media.create(media)
client.add(coro)
await client.perform()
return client.result
async def get(auth_data, id):
async with Client(**auth_data) as client:
coro = client.media.get(id)
client.add(coro)
await client.perform()
return client.result
async def update(auth_data, id, media_updated):
async with Client(**auth_data) as client:
coro = client.media.update(id, media_updated)
client.add(coro)
await client.perform()
return client.result
async def delete(auth_data, id):
async with Client(**auth_data) as client:
coro = client.media.delete(id)
client.add(coro)
await client.perform()
return client.result
def test_async_crud(auth_data, media_data):
r = asyncio.run(create(auth_data, media_data))
media = r[0]
assert isinstance(media, MediaModel)
r = asyncio.run(get(auth_data, media.id))
media = r[0]
assert media.alt_text == media_data['alt_text']
media_updated = media_data.copy()
media_updated['alt_text'] = 'My updated alt text'
r = asyncio.run(update(auth_data, media.id, media_updated))
media = r[0]
assert media.alt_text == media_updated['alt_text']
r = asyncio.run(delete(auth_data, media.id))
assert r[0] == True
|
[
"wpclient.client.Client",
"wpclient.models.media.MediaModel.parse_obj",
"tests.fixtures.media_data.copy"
] |
[((1338, 1355), 'tests.fixtures.media_data.copy', 'media_data.copy', ([], {}), '()\n', (1353, 1355), False, 'from tests.fixtures import auth_data, media_data\n'), ((204, 223), 'wpclient.client.Client', 'Client', ([], {}), '(**auth_data)\n', (210, 223), False, 'from wpclient.client import Client\n'), ((251, 283), 'wpclient.models.media.MediaModel.parse_obj', 'MediaModel.parse_obj', (['media_data'], {}), '(media_data)\n', (271, 283), False, 'from wpclient.models.media import MediaModel\n'), ((457, 476), 'wpclient.client.Client', 'Client', ([], {}), '(**auth_data)\n', (463, 476), False, 'from wpclient.client import Client\n'), ((673, 692), 'wpclient.client.Client', 'Client', ([], {}), '(**auth_data)\n', (679, 692), False, 'from wpclient.client import Client\n'), ((893, 912), 'wpclient.client.Client', 'Client', ([], {}), '(**auth_data)\n', (899, 912), False, 'from wpclient.client import Client\n')]
|
import constants.cdr_cleaner.clean_cdr as cdr_consts
CLEAN_PPI_NUMERIC_FIELDS = """
UPDATE
`{project}.{dataset}.observation` u1
SET
u1.value_as_number = NULL,
u1.value_as_concept_id = 2000000010
FROM
(
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585889 AND (value_as_number < 0 OR value_as_number > 20)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585890 AND (value_as_number < 0 OR value_as_number > 20)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585795 AND (value_as_number < 0 OR value_as_number > 99)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585802 AND (value_as_number < 0 OR value_as_number > 99)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585820 AND (value_as_number < 0 OR value_as_number > 255)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585864 AND (value_as_number < 0 OR value_as_number > 99)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585870 AND (value_as_number < 0 OR value_as_number > 99)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1585873 AND (value_as_number < 0 OR value_as_number > 99)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1586159 AND (value_as_number < 0 OR value_as_number > 99)
UNION ALL
SELECT
*
FROM
`{project}.{dataset}.observation`
WHERE
observation_concept_id = 1586162 AND (value_as_number < 0 OR value_as_number > 99) ) a
WHERE
u1.observation_id = a.observation_id
"""
def get_clean_ppi_num_fields_using_parameters_queries(project_id, dataset_id):
"""
runs the query which updates the ppi numeric fields in observation table based on the
upper and lower bounds specified.
:param project_id: Name of the project
:param dataset_id: Name of the dataset where the queries should be run
:return:
"""
queries_list = []
query = dict()
query[cdr_consts.QUERY] = CLEAN_PPI_NUMERIC_FIELDS.format(
dataset=dataset_id,
project=project_id,
)
queries_list.append(query)
return queries_list
if __name__ == '__main__':
import cdr_cleaner.args_parser as parser
import cdr_cleaner.clean_cdr_engine as clean_engine
ARGS = parser.parse_args()
clean_engine.add_console_logging(ARGS.console_log)
query_list = get_clean_ppi_num_fields_using_parameters_queries(
ARGS.project_id, ARGS.dataset_id)
clean_engine.clean_dataset(ARGS.project_id, query_list)
|
[
"cdr_cleaner.clean_cdr_engine.add_console_logging",
"cdr_cleaner.args_parser.parse_args",
"cdr_cleaner.clean_cdr_engine.clean_dataset"
] |
[((2527, 2546), 'cdr_cleaner.args_parser.parse_args', 'parser.parse_args', ([], {}), '()\n', (2544, 2546), True, 'import cdr_cleaner.args_parser as parser\n'), ((2551, 2601), 'cdr_cleaner.clean_cdr_engine.add_console_logging', 'clean_engine.add_console_logging', (['ARGS.console_log'], {}), '(ARGS.console_log)\n', (2583, 2601), True, 'import cdr_cleaner.clean_cdr_engine as clean_engine\n'), ((2716, 2771), 'cdr_cleaner.clean_cdr_engine.clean_dataset', 'clean_engine.clean_dataset', (['ARGS.project_id', 'query_list'], {}), '(ARGS.project_id, query_list)\n', (2742, 2771), True, 'import cdr_cleaner.clean_cdr_engine as clean_engine\n')]
|
from ...exceptions.tardisexceptions import TardisError
from ...interfaces.siteadapter import ResourceStatus
from ...interfaces.siteadapter import SiteAdapter
from ...utilities.attributedict import AttributeDict
from ...utilities.staticmapping import StaticMapping
from contextlib import contextmanager
from datetime import datetime
from datetime import timedelta
from functools import partial
from uuid import uuid4
import asyncio
class FakeSiteAdapter(SiteAdapter):
def __init__(self, machine_type: str, site_name: str) -> None:
self._machine_type = machine_type
self._site_name = site_name
self._api_response_delay = self.configuration.api_response_delay
self._resource_boot_time = self.configuration.resource_boot_time
key_translator = StaticMapping(
remote_resource_uuid="remote_resource_uuid",
resource_status="resource_status",
created="created",
updated="updated",
resource_boot_time="resource_boot_time",
)
self.handle_response = partial(
self.handle_response,
key_translator=key_translator,
translator_functions=StaticMapping(),
)
self._stopped_n_terminated_resources = {}
async def deploy_resource(
self, resource_attributes: AttributeDict
) -> AttributeDict:
await asyncio.sleep(self._api_response_delay.get_value())
now = datetime.now()
response = AttributeDict(
remote_resource_uuid=uuid4().hex,
resource_status=ResourceStatus.Booting,
created=now,
updated=now,
resource_boot_time=self._resource_boot_time.get_value(),
)
return self.handle_response(response)
def get_resource_boot_time(self, resource_attributes: AttributeDict) -> float:
try:
return resource_attributes.resource_boot_time
except AttributeError:
# In case tardis is restarted, resource_boot_time is not set, so re-set
resource_boot_time = resource_attributes[
"resource_boot_time"
] = self._resource_boot_time.get_value()
return resource_boot_time
async def resource_status(
self, resource_attributes: AttributeDict
) -> AttributeDict:
await asyncio.sleep(self._api_response_delay.get_value())
try: # check if resource has been stopped or terminated
resource_status = self._stopped_n_terminated_resources[
resource_attributes.drone_uuid
]
except KeyError:
pass
else:
return self.handle_response(AttributeDict(resource_status=resource_status))
created_time = resource_attributes.created
resource_boot_time = self.get_resource_boot_time(resource_attributes)
# check if resource is already running
if (datetime.now() - created_time) > timedelta(seconds=resource_boot_time):
return self.handle_response(
AttributeDict(resource_status=ResourceStatus.Running)
)
return self.handle_response(resource_attributes)
async def stop_resource(self, resource_attributes: AttributeDict):
await asyncio.sleep(self._api_response_delay.get_value())
self._stopped_n_terminated_resources[
resource_attributes.drone_uuid
] = ResourceStatus.Stopped
return self.handle_response(
AttributeDict(resource_status=ResourceStatus.Stopped)
)
async def terminate_resource(self, resource_attributes: AttributeDict):
await asyncio.sleep(self._api_response_delay.get_value())
self._stopped_n_terminated_resources[
resource_attributes.drone_uuid
] = ResourceStatus.Deleted
return self.handle_response(
AttributeDict(resource_status=ResourceStatus.Deleted)
)
@contextmanager
def handle_exceptions(self) -> None:
try:
yield
except Exception as ex:
raise TardisError from ex
|
[
"uuid.uuid4",
"datetime.datetime.now",
"datetime.timedelta"
] |
[((1446, 1460), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1458, 1460), False, 'from datetime import datetime\n'), ((2952, 2989), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'resource_boot_time'}), '(seconds=resource_boot_time)\n', (2961, 2989), False, 'from datetime import timedelta\n'), ((2919, 2933), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2931, 2933), False, 'from datetime import datetime\n'), ((1528, 1535), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (1533, 1535), False, 'from uuid import uuid4\n')]
|
# import required libraries
import sounddevice as sd
from .recognize import *
from .record import *
from .ml import *
def speech(using,freq = 44100,duration = 5,key=None, language="en-US", show_all=False):
# Start recorder with the given values of
# duration and sample frequency
recording = sd.rec(int(duration * freq),
samplerate=freq, channels=2)
# Record audio for the given number of seconds
sd.wait()
write("recording.wav", recording, freq, sampwidth=2)
if using.lower()=='google':
r = Recognizer()
recording = AudioFile('recording.wav')
with recording as source:
audio = r.record(source)
text=r.recognize_google(audio,key, language, show_all)
elif using.lower()=='ml':
text=ml('recording.wav')
else:
text='engine not found'
return text
def google_audio(file,key=None, language="en-US", show_all=False):
r = Recognizer()
recording = AudioFile(file)
with recording as source:
audio = r.record(source)
text=r.recognize_google(audio,key, language, show_all)
return text
def recorder(name,duration = 5,freq = 44100):
recording = sd.rec(int(duration * freq),samplerate=freq, channels=2)
sd.wait()
write(name, recording, freq, sampwidth=2)
|
[
"sounddevice.wait"
] |
[((456, 465), 'sounddevice.wait', 'sd.wait', ([], {}), '()\n', (463, 465), True, 'import sounddevice as sd\n'), ((1299, 1308), 'sounddevice.wait', 'sd.wait', ([], {}), '()\n', (1306, 1308), True, 'import sounddevice as sd\n')]
|
from pycordia import errors, utils
import typing
import enum
class ComponentType(enum.Enum):
action_row = 1
button = 2
select_menu = 3
class ButtonStyles(enum.Enum):
primary = 1
secondary = 2
success = 3
danger = 4
link = 5
class SelectMenuOption:
def __init__(self, *, label: str, value: str,
description: str = None, emoji: dict = None,
default: bool = False
) -> None:
self.label = label
self.value = value
self.description = description
self.emoji = emoji
self.default = default
@classmethod
def from_json(cls, data: dict):
return utils.obj_from_dict(data, cls)
def to_json(self):
return utils.obj_to_dict(self)
class SelectMenu:
def __init__(self, *, custom_id: str, placeholder: str = None,
min_values: int = 1, max_values: int = 1, disabled: bool = False
) -> None:
pass
class Button:
def __init__(self, *, custom_id: str = None, disabled: bool = False,
style: ButtonStyles, label: str, emoji = None, url: str = None
) -> None:
self.__on_click_func = None
self.custom_id = custom_id
self.disabled = disabled
self.style = style
self.label = label
self.emoji = emoji
self.url = url
self.__verify_component()
@classmethod
def from_json(cls, data: dict):
obj = utils.obj_from_dict(data, cls)
obj.style = ComponentType(obj.style)
return obj
def to_json(self):
obj = utils.obj_to_dict(self)
obj["type"] = ComponentType.button.value
obj["style"] = obj["style"].value
return obj
def __verify_component(self):
if self.url and self.custom_id:
raise errors.ComponentError(
"A link button cannot have a custom ID."
)
elif not self.custom_id:
raise errors.ComponentError(
"Non-link buttons must contain a custom ID"
)
def on_click(self, fun):
self.__on_click_func = fun
def wrapper():
fun()
return wrapper
# class ActionRow:
# def __init__(self, *components: typing.List[typing.Union[SelectMenu, Button]]):
# self.__verify_components(components)
# self.component_type = ComponentType.action_row
# self.__components = [*components]
# @classmethod
# def from_list(cls, data: list):
# comps = []
# for elem in data:
# comp_type = int(elem["type"])
# if comp_type == ComponentType.action_row.value:
# raise errors.ComponentError(
# "An ActionRow cannot contain another ActionRow"
# )
# elif comp_type == ComponentType.button.value:
# comps.append(Button(**elem))
# elif comp_type == ComponentType.
# return ActionRow(*comps)
# @property
# def components(self):
# return self.__components
# def __verify_components(self, components):
# for comp in components:
# if isinstance(comp, ActionRow):
# raise errors.ComponentError(
# "An ActionRow cannot contain another ActionRow"
# )
|
[
"pycordia.utils.obj_to_dict",
"pycordia.errors.ComponentError",
"pycordia.utils.obj_from_dict"
] |
[((656, 686), 'pycordia.utils.obj_from_dict', 'utils.obj_from_dict', (['data', 'cls'], {}), '(data, cls)\n', (675, 686), False, 'from pycordia import errors, utils\n'), ((726, 749), 'pycordia.utils.obj_to_dict', 'utils.obj_to_dict', (['self'], {}), '(self)\n', (743, 749), False, 'from pycordia import errors, utils\n'), ((1434, 1464), 'pycordia.utils.obj_from_dict', 'utils.obj_from_dict', (['data', 'cls'], {}), '(data, cls)\n', (1453, 1464), False, 'from pycordia import errors, utils\n'), ((1567, 1590), 'pycordia.utils.obj_to_dict', 'utils.obj_to_dict', (['self'], {}), '(self)\n', (1584, 1590), False, 'from pycordia import errors, utils\n'), ((1798, 1861), 'pycordia.errors.ComponentError', 'errors.ComponentError', (['"""A link button cannot have a custom ID."""'], {}), "('A link button cannot have a custom ID.')\n", (1819, 1861), False, 'from pycordia import errors, utils\n'), ((1943, 2009), 'pycordia.errors.ComponentError', 'errors.ComponentError', (['"""Non-link buttons must contain a custom ID"""'], {}), "('Non-link buttons must contain a custom ID')\n", (1964, 2009), False, 'from pycordia import errors, utils\n')]
|
import sre_parse
import re
import pytest
def extract_literals(r):
ops = sre_parse.parse(r.pattern)
results = []
extract_literals_from_ops(ops, results)
return results
def extract_literals_from_ops(ops, results):
i = 0
while i < len(ops):
op, val = ops[i]
if op == sre_parse.LITERAL:
start_i = i
while i < len(ops) and ops[i][0] == sre_parse.LITERAL:
i += 1
results.append("".join(chr(c) for _, c in ops[start_i:i]))
continue
elif op == sre_parse.BRANCH:
_, branches = val
for branch in branches:
extract_literals_from_ops(branch, results)
elif op == sre_parse.SUBPATTERN:
_, _, _, sub_ops = val
extract_literals_from_ops(sub_ops, results)
elif op == sre_parse.MAX_REPEAT:
_, _, sub_ops = val
extract_literals_from_ops(sub_ops, results)
elif op == sre_parse.ASSERT or op == sre_parse.ASSERT_NOT:
_, sub_ops = val
extract_literals_from_ops(sub_ops, results)
i += 1
return results
@pytest.mark.parametrize(
("r", "expected"),
[
(r"^abc$", ["abc"]),
(r"abc|def", ["abc", "def"]),
(r"(abc|\d+)", ["abc"]),
(r"(?:abc){3,}", ["abc"]),
(r"(?:abc){,3}", ["abc"]),
(r"(?=abc)", ["abc"]),
(r"(?!abc)", ["abc"]),
(r"(?<=abc)", ["abc"]),
(r"(?<!abc)", ["abc"]),
]
)
def test_extract_literals(r, expected):
actual = extract_literals(re.compile(r))
assert actual == expected
|
[
"pytest.mark.parametrize",
"sre_parse.parse",
"re.compile"
] |
[((1146, 1420), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('r', 'expected')", "[('^abc$', ['abc']), ('abc|def', ['abc', 'def']), ('(abc|\\\\d+)', ['abc']),\n ('(?:abc){3,}', ['abc']), ('(?:abc){,3}', ['abc']), ('(?=abc)', ['abc']\n ), ('(?!abc)', ['abc']), ('(?<=abc)', ['abc']), ('(?<!abc)', ['abc'])]"], {}), "(('r', 'expected'), [('^abc$', ['abc']), ('abc|def',\n ['abc', 'def']), ('(abc|\\\\d+)', ['abc']), ('(?:abc){3,}', ['abc']), (\n '(?:abc){,3}', ['abc']), ('(?=abc)', ['abc']), ('(?!abc)', ['abc']), (\n '(?<=abc)', ['abc']), ('(?<!abc)', ['abc'])])\n", (1169, 1420), False, 'import pytest\n'), ((79, 105), 'sre_parse.parse', 'sre_parse.parse', (['r.pattern'], {}), '(r.pattern)\n', (94, 105), False, 'import sre_parse\n'), ((1574, 1587), 're.compile', 're.compile', (['r'], {}), '(r)\n', (1584, 1587), False, 'import re\n')]
|
import json
from entries.fcd import FCD
from entries.models import Nutrient, Recipe
def calculate_consumption(ndbno, measure, quantity):
nutrients = FCD.get_nutrients(ndbno)
intake = []
for nutrient in nutrients:
for i_measure in nutrient["measures"]:
if i_measure["label"] == measure and i_measure["value"] != 0:
intake.append({
"category": "i",
"label": nutrient["name"],
"unit": nutrient["unit"],
"quantity": float(i_measure["value"]) * quantity
})
return intake
def insert_food_consumption(entry, data):
entry.extra = {"ndbno": data["ndbno"]}
entry.extra = json.dumps({"ndbno": data["ndbno"]})
entry.save()
nutrients = calculate_consumption(
data["ndbno"], entry.measure, entry.quantity)
for nutrient_data in nutrients:
try:
nutrient = Nutrient(**nutrient_data)
nutrient.entry = entry
nutrient.save()
except Exception as e:
pass
def insert_recipe(entry, data):
recipe = Recipe.objects.get(id=data["id"])
for ingredient in recipe.recipeingredient_set.all():
for n in ingredient.getNutrients():
nutrient = Nutrient(**n)
nutrient.quantity = nutrient.quantity * entry.quantity
nutrient.entry = entry
nutrient.save()
def insert_nutrients(entry, data):
if data["category"] == "c" and "ndbno" not in data and "id" in data:
insert_recipe(entry, data)
elif data["category"] == "c" and "ndbno" in data:
entry.insert_food_nutrients(data)
else:
entry.insert_activity_nutrients()
|
[
"entries.fcd.FCD.get_nutrients",
"entries.models.Recipe.objects.get",
"entries.models.Nutrient",
"json.dumps"
] |
[((156, 180), 'entries.fcd.FCD.get_nutrients', 'FCD.get_nutrients', (['ndbno'], {}), '(ndbno)\n', (173, 180), False, 'from entries.fcd import FCD\n'), ((742, 778), 'json.dumps', 'json.dumps', (["{'ndbno': data['ndbno']}"], {}), "({'ndbno': data['ndbno']})\n", (752, 778), False, 'import json\n'), ((1146, 1179), 'entries.models.Recipe.objects.get', 'Recipe.objects.get', ([], {'id': "data['id']"}), "(id=data['id'])\n", (1164, 1179), False, 'from entries.models import Nutrient, Recipe\n'), ((962, 987), 'entries.models.Nutrient', 'Nutrient', ([], {}), '(**nutrient_data)\n', (970, 987), False, 'from entries.models import Nutrient, Recipe\n'), ((1304, 1317), 'entries.models.Nutrient', 'Nutrient', ([], {}), '(**n)\n', (1312, 1317), False, 'from entries.models import Nutrient, Recipe\n')]
|
import random
letters = ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L",
"M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"]
numbers = ["1", "2", "3", "4", "5", "6", "7", "8", "9",]
symbols = ["@", "#", "%", "&", "*", "?", "-", "_"]
print("Welcome to PyPasswordGenerator")
nr_letters = int(input('How many letters would you like in your password?\n'))
nr_numbers = int(input('How many numbers would you like in your password?\n'))
nr_symbols = int(input('How many symbols would you like in your password?\n'))
chosen_chars = []
for i in range(nr_letters):
chosen_chars.append(random.choice(letters))
for i in range(nr_numbers):
chosen_chars.append(random.choice(numbers))
for i in range(nr_symbols):
chosen_chars.append(random.choice(symbols))
random.shuffle(chosen_chars)
password = ''.join(chosen_chars)
print(f"Here is your password: \t{password} \nkeep it safe.")
|
[
"random.shuffle",
"random.choice"
] |
[((795, 823), 'random.shuffle', 'random.shuffle', (['chosen_chars'], {}), '(chosen_chars)\n', (809, 823), False, 'import random\n'), ((618, 640), 'random.choice', 'random.choice', (['letters'], {}), '(letters)\n', (631, 640), False, 'import random\n'), ((694, 716), 'random.choice', 'random.choice', (['numbers'], {}), '(numbers)\n', (707, 716), False, 'import random\n'), ((770, 792), 'random.choice', 'random.choice', (['symbols'], {}), '(symbols)\n', (783, 792), False, 'import random\n')]
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2017 Sightengine
http://sightengine.com/
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import requests, json, os
import sightengine
from .check import Check
VERSION = sightengine.__version__
headers = requests.utils.default_headers()
headers.update(
{
'User-Agent': 'SE-SDK-Python ' + VERSION,
}
)
class SightengineClient(object):
modelVersions = {}
def __init__(self, api_user, api_secret):
self.api_user = api_user
self.api_secret = api_secret
self.endpoint = 'https://api.sightengine.com/'
def feedback(self, model, modelClass, image):
if not model:
raise Exception('Please provide the version of the model ' + model)
if image.lower().startswith(('http://', 'https://')):
url = self.endpoint + '1.0/feedback.json'
r = requests.get(url, params={'model': model, 'class': modelClass, 'url': image, 'api_user': self.api_user, 'api_secret': self.api_secret}, headers=headers)
else:
url = self.endpoint + '1.0/feedback.json'
r = requests.post(url, files={'media': open(image, 'rb')}, data={'model': model, 'class': modelClass, 'api_user': self.api_user, 'api_secret': self.api_secret}, headers=headers)
output = json.loads(r.text)
return output
def check(self, *args):
return Check(self.api_user,self.api_secret, *args)
|
[
"requests.utils.default_headers",
"json.loads",
"requests.get"
] |
[((1225, 1257), 'requests.utils.default_headers', 'requests.utils.default_headers', ([], {}), '()\n', (1255, 1257), False, 'import requests, json, os\n'), ((2283, 2301), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (2293, 2301), False, 'import requests, json, os\n'), ((1853, 2009), 'requests.get', 'requests.get', (['url'], {'params': "{'model': model, 'class': modelClass, 'url': image, 'api_user': self.\n api_user, 'api_secret': self.api_secret}", 'headers': 'headers'}), "(url, params={'model': model, 'class': modelClass, 'url': image,\n 'api_user': self.api_user, 'api_secret': self.api_secret}, headers=headers)\n", (1865, 2009), False, 'import requests, json, os\n')]
|
from os import listdir
from os.path import isdir, exists
from shutil import copyfile, copytree, rmtree
from tkinter import Tk, filedialog
done = False
ip = ""
packs = listdir("./datapacks")
target = ""
Tk().withdraw()
print("Welcome to Matt's Minecraft Datapack install utility")
print("First, choose the 'datapacks' folder of the world where you want to install datapacks")
while target == "":
mode = input("First, enter 1 to manually enter the path, or enter 2 to use a gui, or q to quit:")
if mode == "1":
print("Enter the full path to the directory where you wish to install datapacks (you don't need quote marks around multi-word folders, nor do you need backquoted spaces)")
print("Example: /Users/xxxx/Library/Application Support/minecraft/saves/WORLD/datapacks")
target = input("Enter path: ")
if not isdir(target):
target = ""
if mode == "2":
target = filedialog.askdirectory()
if mode == "q":
target = "NO"
done = True
print("Data packs will be installed to " + target)
while not done:
#todo: just enter the world name, this finds the proper directory
#print("(minecraft/saves/WORLD/datapacks)")
print("Available datapacks:")
for datapack in packs:
print("* " + datapack)
ip = input("Select a datapack to install in this world, or type 'exit' to quit: ")
if ip == "exit":
break;
if ip in packs:
copyTo = target + "/" + ip
if exists(copyTo):
print("This datapack is already installed, so I'll delete it and reinstall")
rmtree(copyTo)
copytree("./datapacks/" + ip, copyTo)
else:
print("'" + ip + "\' is not in the list of datapacks")
|
[
"os.listdir",
"shutil.rmtree",
"os.path.isdir",
"os.path.exists",
"tkinter.filedialog.askdirectory",
"shutil.copytree",
"tkinter.Tk"
] |
[((168, 190), 'os.listdir', 'listdir', (['"""./datapacks"""'], {}), "('./datapacks')\n", (175, 190), False, 'from os import listdir\n'), ((203, 207), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (205, 207), False, 'from tkinter import Tk, filedialog\n'), ((927, 952), 'tkinter.filedialog.askdirectory', 'filedialog.askdirectory', ([], {}), '()\n', (950, 952), False, 'from tkinter import Tk, filedialog\n'), ((1501, 1515), 'os.path.exists', 'exists', (['copyTo'], {}), '(copyTo)\n', (1507, 1515), False, 'from os.path import isdir, exists\n'), ((1641, 1678), 'shutil.copytree', 'copytree', (["('./datapacks/' + ip)", 'copyTo'], {}), "('./datapacks/' + ip, copyTo)\n", (1649, 1678), False, 'from shutil import copyfile, copytree, rmtree\n'), ((851, 864), 'os.path.isdir', 'isdir', (['target'], {}), '(target)\n', (856, 864), False, 'from os.path import isdir, exists\n'), ((1618, 1632), 'shutil.rmtree', 'rmtree', (['copyTo'], {}), '(copyTo)\n', (1624, 1632), False, 'from shutil import copyfile, copytree, rmtree\n')]
|
#!/usr/bin/env python
import logging
import os
class ProtocolsFlavor(object):
"""Base class for protocols"""
def binlog_player_protocol_flags(self):
"""Returns the flags to pass to process to set the binlog player protocol."""
return []
def vtctl_client_protocol(self):
"""Returns the protocol to use for vtctl connections. Needs to be supported both in python and go."""
return ""
def tablet_manager_protocol_flags(self):
"""Returns the flags to use for specifying the tablet manager protocol."""
return ['-tablet_manager_protocol', 'bson']
def tabletconn_protocol_flags(self):
"""Returns the flags to use for specifying the query service protocol."""
return ['-tablet_protocol', 'gorpc']
def rpc_timeout_message(self):
"""Returns the error message used by the protocol to indicate a timeout."""
raise NotImplementedError('Implementations need to overwrite this')
class GoRpcProtocolsFlavor(ProtocolsFlavor):
"""Overrides to use go rpc everywhere"""
def binlog_player_protocol_flags(self):
return ['-binlog_player_protocol', 'gorpc']
def vtctl_client_protocol(self):
return 'gorpc'
def tablet_manager_protocol_flags(self):
return ['-tablet_manager_protocol', 'bson']
def tabletconn_protocol_flags(self):
return ['-tablet_protocol', 'gorpc']
def rpc_timeout_message(self):
return 'timeout waiting for'
__knows_protocols_flavor_map = {
'gorpc': GoRpcProtocolsFlavor,
}
__protocols_flavor = None
def protocols_flavor():
return __protocols_flavor
def set_protocols_flavor(flavor):
global __protocols_flavor
if not flavor:
flavor = 'gorpc'
klass = __knows_protocols_flavor_map.get(flavor, None)
if not klass:
logging.error('Unknown protocols flavor %s', flavor)
exit(1)
__protocols_flavor = klass()
logging.debug('Using protocols flavor %s', flavor)
|
[
"logging.error",
"logging.debug"
] |
[((1826, 1876), 'logging.debug', 'logging.debug', (['"""Using protocols flavor %s"""', 'flavor'], {}), "('Using protocols flavor %s', flavor)\n", (1839, 1876), False, 'import logging\n'), ((1727, 1779), 'logging.error', 'logging.error', (['"""Unknown protocols flavor %s"""', 'flavor'], {}), "('Unknown protocols flavor %s', flavor)\n", (1740, 1779), False, 'import logging\n')]
|
import unittest
from kattis import k_everywhere
###############################################################################
class SampleInput(unittest.TestCase):
'''Problem statement sample inputs and outputs'''
def test_sample_input_1(self):
'''Run and assert problem statement sample 1 input and output.'''
input1 = []
input1.append('saskatoon')
input1.append('toronto')
input1.append('winnipeg')
input1.append('toronto')
input1.append('vancouver')
input1.append('saskatoon')
input1.append('toronto')
self.assertEqual(k_everywhere.uniques(input1), 4)
def test_sample_input_2(self):
'''Run and assert problem statement sample 2 input and output.'''
input2 = []
input2.append('edmonton')
input2.append('edmonton')
input2.append('edmonton')
self.assertEqual(k_everywhere.uniques(input2), 1)
###############################################################################
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"kattis.k_everywhere.uniques"
] |
[((1053, 1068), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1066, 1068), False, 'import unittest\n'), ((616, 644), 'kattis.k_everywhere.uniques', 'k_everywhere.uniques', (['input1'], {}), '(input1)\n', (636, 644), False, 'from kattis import k_everywhere\n'), ((907, 935), 'kattis.k_everywhere.uniques', 'k_everywhere.uniques', (['input2'], {}), '(input2)\n', (927, 935), False, 'from kattis import k_everywhere\n')]
|
#!/usr/bin/env python3
# This variable defines all the external programs that this module
# relies on. lxbuildenv reads this variable in order to ensure
# the build will finish without exiting due to missing third-party
# programs.
LX_DEPENDENCIES = ["riscv", "icestorm", "yosys", "nextpnr-ice40"]
# Import lxbuildenv to integrate the deps/ directory
import lxbuildenv
# Disable pylint's E1101, which breaks completely on migen
#pylint:disable=E1101
#from migen import *
from migen import Module, Signal, Instance, ClockDomain, If
from migen.fhdl.specials import TSTriple
from migen.fhdl.decorators import ClockDomainsRenamer
from litex.build.lattice.platform import LatticePlatform
from litex.build.generic_platform import Pins, Subsignal
from litex.soc.integration.doc import AutoDoc, ModuleDoc
from litex.soc.integration.soc_core import SoCCore
from litex.soc.cores.cpu import CPUNone
from litex.soc.integration.builder import Builder
from litex.soc.interconnect import wishbone
from litex.soc.cores import spi_flash
from valentyusb.usbcore import io as usbio
from valentyusb.usbcore.cpu import epmem, unififo, epfifo, dummyusb, eptri
from valentyusb.usbcore.endpoint import EndpointType
import litex.soc.doc as lxsocdoc
import spibone
import argparse
import os
import subprocess
from rtl.version import Version
from rtl.romgen import RandomFirmwareROM, FirmwareROMHex
from rtl.messible import Messible
class BaseSoC(SoCCore, AutoDoc):
"""Fomu Bootloader and Base SoC
Fomu is an FPGA that fits in your USB port. This reference manual
documents the basic SoC that runs the bootloader, and that can be
reused to run your own RISC-V programs.
This reference manual only describes a particular version of the SoC.
The register sets described here are guaranteed to be available
with a given ``major version``, but are not guaranteed to be available on
any other version. Naturally, you are free to create your own SoC
that does not provide these hardware blocks. To see what the version of the
bitstream you're running, check the ``VERSION`` registers.
"""
csr_map = {
"ctrl": 0, # provided by default (optional)
"crg": 1, # user
"uart_phy": 2, # provided by default (optional)
"uart": 3, # provided by default (optional)
"identifier_mem": 4, # provided by default (optional)
"timer0": 5, # provided by default (optional)
"cpu_or_bridge": 8,
"usb": 9,
"picorvspi": 10,
"touch": 11,
"reboot": 12,
"rgb": 13,
"version": 14,
"lxspi": 15,
"messible": 16,
"button": 17,
}
SoCCore.mem_map = {
"rom": 0x00000000, # (default shadow @0x80000000)
"sram": 0x10000000, # (default shadow @0xa0000000)
"spiflash": 0x20000000, # (default shadow @0xa0000000)
"main_ram": 0x40000000, # (default shadow @0xc0000000)
"csr": 0xe0000000, # (default shadow @0xe0000000)
"vexriscv_debug": 0xf00f0000,
}
interrupt_map = {
"timer0": 2,
"usb": 3,
}
interrupt_map.update(SoCCore.interrupt_map)
def __init__(self, platform, boot_source="rand",
debug=None, bios_file=None,
use_dsp=False, placer="heap", output_dir="build",
pnr_seed=0,
**kwargs):
# Disable integrated RAM as we'll add it later
self.integrated_sram_size = 0
if hasattr(platform, "get_integrated_sram_size"):
self.integrated_sram_size = platform.get_integrated_sram_size()
self.output_dir = output_dir
clk_freq = int(12e6)
platform.add_crg(self)
SoCCore.__init__(self, platform, clk_freq, integrated_sram_size=self.integrated_sram_size, with_uart=False, csr_data_width=32, **kwargs)
usb_debug = False
if debug is not None:
if debug == "uart":
from litex.soc.cores.uart import UARTWishboneBridge
self.submodules.uart_bridge = UARTWishboneBridge(platform.request("serial"), clk_freq, baudrate=115200)
self.add_wb_master(self.uart_bridge.wishbone)
elif debug == "usb":
usb_debug = True
elif debug == "spi":
import spibone
# Add SPI Wishbone bridge
debug_device = [
("spidebug", 0,
Subsignal("mosi", Pins("dbg:0")),
Subsignal("miso", Pins("dbg:1")),
Subsignal("clk", Pins("dbg:2")),
Subsignal("cs_n", Pins("dbg:3")),
)
]
platform.add_extension(debug_device)
spi_pads = platform.request("spidebug")
self.submodules.spibone = ClockDomainsRenamer("usb_12")(spibone.SpiWishboneBridge(spi_pads, wires=4))
self.add_wb_master(self.spibone.wishbone)
if hasattr(self, "cpu") and not isinstance(self.cpu, CPUNone):
platform.add_cpu_variant(self, debug=True)
self.register_mem("vexriscv_debug", 0xf00f0000, self.cpu.debug_bus, 0x100)
else:
if hasattr(self, "cpu") and not isinstance(self.cpu, CPUNone):
platform.add_cpu_variant(self)
if hasattr(platform, "add_sram"):
# SPRAM- UP5K has single port RAM, might as well use it as SRAM to
# free up scarce block RAM.
spram_size = platform.add_sram(self)
self.register_mem("sram", self.mem_map["sram"], self.spram.bus, spram_size)
# Add a Messible for device->host communications
self.submodules.messible = Messible()
if boot_source == "rand":
kwargs['cpu_reset_address'] = 0
bios_size = 0x2000
self.submodules.random_rom = RandomFirmwareROM(bios_size)
self.add_constant("ROM_DISABLE", 1)
self.register_rom(self.random_rom.bus, bios_size)
elif boot_source == "bios":
kwargs['cpu_reset_address'] = 0
if bios_file is None:
self.integrated_rom_size = bios_size = 0x4000
self.submodules.rom = wishbone.SRAM(bios_size, read_only=True, init=[])
self.register_rom(self.rom.bus, bios_size)
else:
bios_size = 0x4000
self.submodules.firmware_rom = FirmwareROMHex(bios_size, bios_file)
self.add_constant("ROM_DISABLE", 1)
self.register_rom(self.firmware_rom.bus, bios_size)
elif boot_source == "spi":
kwargs['cpu_reset_address'] = 0
self.integrated_rom_size = bios_size = 0x2000
gateware_size = 0x1a000
self.flash_boot_address = self.mem_map["spiflash"] + gateware_size
self.submodules.rom = wishbone.SRAM(bios_size, read_only=True, init=[])
self.register_rom(self.rom.bus, bios_size)
else:
raise ValueError("unrecognized boot_source: {}".format(boot_source))
# The litex SPI module supports memory-mapped reads, as well as a bit-banged mode
# for doing writes.
spi_pads = platform.request("spiflash4x")
self.submodules.lxspi = spi_flash.SpiFlashDualQuad(spi_pads, dummy=platform.spi_dummy, endianness="little")
self.lxspi.add_clk_primitive(platform.device)
self.register_mem("spiflash", self.mem_map["spiflash"], self.lxspi.bus, size=platform.spi_size)
# Add USB pads, as well as the appropriate USB controller. If no CPU is
# present, use the DummyUsb controller.
usb_pads = platform.request_usb()
usb_iobuf = usbio.IoBuf(usb_pads.d_p, usb_pads.d_n, usb_pads.pullup)
if hasattr(self, "cpu") and not isinstance(self.cpu, CPUNone):
self.submodules.usb = eptri.TriEndpointInterface(usb_iobuf, debug=usb_debug)
else:
self.submodules.usb = dummyusb.DummyUsb(usb_iobuf, debug=usb_debug)
if usb_debug:
self.add_wb_master(self.usb.debug_bridge.wishbone)
# For the EVT board, ensure the pulldown pin is tristated as an input
if hasattr(usb_pads, "pulldown"):
pulldown = TSTriple()
self.specials += pulldown.get_tristate(usb_pads.pulldown)
self.comb += pulldown.oe.eq(0)
# Add GPIO pads for the touch buttons
if hasattr(platform, "add_touch"):
platform.add_touch(self)
if hasattr(platform, "add_button"):
platform.add_button(self)
bootloader_size = 512*1024
self.add_constant("FLASH_MAX_ADDR", value=platform.spi_size - bootloader_size)
# Allow the user to reboot the FPGA. Additionally, connect the CPU
# RESET line to a register that can be modified, to allow for
# us to debug programs even during reset.
platform.add_reboot(self)
if hasattr(self, "cpu") and not isinstance(self.cpu, CPUNone):
self.cpu.cpu_params.update(
i_externalResetVector=self.reboot.addr.storage,
)
platform.add_rgb(self)
self.submodules.version = Version(platform.revision, platform.hw_platform, self, pnr_seed, models=[
("0x45", "E", "Fomu EVT"),
("0x44", "D", "Fomu DVT"),
("0x50", "P", "Fomu PVT (production)"),
("0x48", "H", "Fomu Hacker"),
("0x11", "1", "OrangeCrab r0.1"),
("0x12", "2", "OrangeCrab r0.2"),
("0x63", "c", "OrangeCart"),
("0x3f", "?", "Unknown model"),
])
if hasattr(platform, "build_templates"):
platform.build_templates(use_dsp, pnr_seed, placer)
git_version_subprocess = subprocess.Popen("git describe --tags", shell=True, stdout=subprocess.PIPE)
git_version = git_version_subprocess.stdout.read().decode("utf-8").strip()
for (name,value) in platform.get_config(git_version):
self.add_constant("CONFIG_" + name, value)
def main():
parser = argparse.ArgumentParser(
description="Build Fomu Main Gateware")
parser.add_argument(
"--boot-source", choices=["spi", "rand", "bios"], default="bios",
help="where to have the CPU obtain its executable code from"
)
parser.add_argument(
"--document-only", default=False, action="store_true",
help="Don't build gateware or software, only build documentation"
)
parser.add_argument(
"--platform", choices=["fomu", "orangecrab", "orangecart"], required=True,
help="build foboot for a particular hardware"
)
parser.add_argument(
"--bios", help="use specified file as a BIOS, rather than building one"
)
parser.add_argument(
"--with-debug", help="enable debug support", choices=["usb", "uart", "spi", None]
)
parser.add_argument(
"--with-dsp", help="use dsp inference in yosys (not all yosys builds have -dsp)", action="store_true"
)
parser.add_argument(
"--no-cpu", help="disable cpu generation for debugging purposes", action="store_true"
)
parser.add_argument(
"--placer", choices=["sa", "heap"], default="heap", help="which placer to use in nextpnr"
)
parser.add_argument(
"--seed", default=0, help="seed to use in nextpnr"
)
parser.add_argument(
"--export-random-rom-file", help="Generate a random ROM file and save it to a file"
)
parser.add_argument(
"--skip-gateware", help="Skip generating gateware", default=False
)
args, _ = parser.parse_known_args()
# Select platform based arguments
if args.platform == "orangecrab":
from rtl.platform.orangecrab import Platform, add_platform_args
elif args.platform == "orangecart":
from rtl.platform.orangecart import Platform, add_platform_args
elif args.platform == "fomu":
from rtl.platform.fomu import Platform, add_platform_args
# Add any platform independent args
add_platform_args(parser)
args = parser.parse_args()
# load our platform file
if args.platform == "orangecrab":
platform = Platform(revision=args.revision, device=args.device)
elif args.platform == "orangecart":
platform = Platform(device=args.device)
elif args.platform == "fomu":
platform = Platform(revision=args.revision)
output_dir = 'build'
#if args.export_random_rom_file is not None:
rom_rand = os.path.join(output_dir, "gateware", "rand_rom.hex")
os.system(f"ecpbram --generate {rom_rand} --seed {0} --width {32} --depth {int(0x4000/4)}")
compile_software = False
if (args.boot_source == "bios" or args.boot_source == "spi") and args.bios is None:
compile_software = True
compile_gateware = True
if args.skip_gateware:
compile_gateware = False
cpu_type = "vexriscv"
cpu_variant = "minimal"
if args.with_debug:
cpu_variant = cpu_variant + "+debug"
if args.no_cpu:
cpu_type = None
cpu_variant = None
if args.document_only:
compile_gateware = False
compile_software = False
os.environ["LITEX"] = "1" # Give our Makefile something to look for
soc = BaseSoC(platform, cpu_type=cpu_type, cpu_variant=cpu_variant,
debug=args.with_debug, boot_source=args.boot_source,
bios_file=args.bios,
use_dsp=args.with_dsp, placer=args.placer,
pnr_seed=int(args.seed),
output_dir=output_dir)
builder = Builder(soc, output_dir=output_dir, csr_csv="build/csr.csv", csr_svd="build/soc.svd",
compile_software=compile_software, compile_gateware=compile_gateware)
if compile_software:
builder.software_packages = [
("bios", os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "sw")))
]
vns = builder.build()
soc.do_exit(vns)
lxsocdoc.generate_docs(soc, "build/documentation/", project_name="Fomu Bootloader", author="<NAME>")
if not args.document_only:
platform.finalise(output_dir)
if __name__ == "__main__":
main()
def export_random_rom_file(filename):
size = 0x2000
def xorshift32(x):
x = x ^ (x << 13) & 0xffffffff
x = x ^ (x >> 17) & 0xffffffff
x = x ^ (x << 5) & 0xffffffff
return x & 0xffffffff
def get_rand(x):
out = 0
for i in range(32):
x = xorshift32(x)
if (x & 1) == 1:
out = out | (1 << i)
return out & 0xffffffff
seed = 1
with open(filename, "w", newline="\n") as output:
for _ in range(int(size / 4)):
seed = get_rand(seed)
print("{:08x}".format(seed), file=output)
return 0
|
[
"argparse.ArgumentParser",
"rtl.messible.Messible",
"migen.fhdl.specials.TSTriple",
"rtl.romgen.FirmwareROMHex",
"os.path.join",
"valentyusb.usbcore.io.IoBuf",
"litex.soc.doc.generate_docs",
"rtl.platform.fomu.add_platform_args",
"os.path.dirname",
"litex.soc.integration.builder.Builder",
"spibone.SpiWishboneBridge",
"litex.build.generic_platform.Pins",
"valentyusb.usbcore.cpu.dummyusb.DummyUsb",
"litex.soc.integration.soc_core.SoCCore.__init__",
"subprocess.Popen",
"litex.soc.cores.spi_flash.SpiFlashDualQuad",
"litex.soc.interconnect.wishbone.SRAM",
"rtl.version.Version",
"rtl.platform.fomu.Platform",
"valentyusb.usbcore.cpu.eptri.TriEndpointInterface",
"migen.fhdl.decorators.ClockDomainsRenamer",
"rtl.romgen.RandomFirmwareROM"
] |
[((10355, 10418), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Build Fomu Main Gateware"""'}), "(description='Build Fomu Main Gateware')\n", (10378, 10418), False, 'import argparse\n'), ((12329, 12354), 'rtl.platform.fomu.add_platform_args', 'add_platform_args', (['parser'], {}), '(parser)\n', (12346, 12354), False, 'from rtl.platform.fomu import Platform, add_platform_args\n'), ((12790, 12842), 'os.path.join', 'os.path.join', (['output_dir', '"""gateware"""', '"""rand_rom.hex"""'], {}), "(output_dir, 'gateware', 'rand_rom.hex')\n", (12802, 12842), False, 'import os\n'), ((13945, 14110), 'litex.soc.integration.builder.Builder', 'Builder', (['soc'], {'output_dir': 'output_dir', 'csr_csv': '"""build/csr.csv"""', 'csr_svd': '"""build/soc.svd"""', 'compile_software': 'compile_software', 'compile_gateware': 'compile_gateware'}), "(soc, output_dir=output_dir, csr_csv='build/csr.csv', csr_svd=\n 'build/soc.svd', compile_software=compile_software, compile_gateware=\n compile_gateware)\n", (13952, 14110), False, 'from litex.soc.integration.builder import Builder\n'), ((14338, 14443), 'litex.soc.doc.generate_docs', 'lxsocdoc.generate_docs', (['soc', '"""build/documentation/"""'], {'project_name': '"""Fomu Bootloader"""', 'author': '"""<NAME>"""'}), "(soc, 'build/documentation/', project_name=\n 'Fomu Bootloader', author='<NAME>')\n", (14360, 14443), True, 'import litex.soc.doc as lxsocdoc\n'), ((3913, 4054), 'litex.soc.integration.soc_core.SoCCore.__init__', 'SoCCore.__init__', (['self', 'platform', 'clk_freq'], {'integrated_sram_size': 'self.integrated_sram_size', 'with_uart': '(False)', 'csr_data_width': '(32)'}), '(self, platform, clk_freq, integrated_sram_size=self.\n integrated_sram_size, with_uart=False, csr_data_width=32, **kwargs)\n', (3929, 4054), False, 'from litex.soc.integration.soc_core import SoCCore\n'), ((5948, 5958), 'rtl.messible.Messible', 'Messible', ([], {}), '()\n', (5956, 5958), False, 'from rtl.messible import Messible\n'), ((7517, 7605), 'litex.soc.cores.spi_flash.SpiFlashDualQuad', 'spi_flash.SpiFlashDualQuad', (['spi_pads'], {'dummy': 'platform.spi_dummy', 'endianness': '"""little"""'}), "(spi_pads, dummy=platform.spi_dummy, endianness=\n 'little')\n", (7543, 7605), False, 'from litex.soc.cores import spi_flash\n'), ((7951, 8007), 'valentyusb.usbcore.io.IoBuf', 'usbio.IoBuf', (['usb_pads.d_p', 'usb_pads.d_n', 'usb_pads.pullup'], {}), '(usb_pads.d_p, usb_pads.d_n, usb_pads.pullup)\n', (7962, 8007), True, 'from valentyusb.usbcore import io as usbio\n'), ((9435, 9778), 'rtl.version.Version', 'Version', (['platform.revision', 'platform.hw_platform', 'self', 'pnr_seed'], {'models': "[('0x45', 'E', 'Fomu EVT'), ('0x44', 'D', 'Fomu DVT'), ('0x50', 'P',\n 'Fomu PVT (production)'), ('0x48', 'H', 'Fomu Hacker'), ('0x11', '1',\n 'OrangeCrab r0.1'), ('0x12', '2', 'OrangeCrab r0.2'), ('0x63', 'c',\n 'OrangeCart'), ('0x3f', '?', 'Unknown model')]"}), "(platform.revision, platform.hw_platform, self, pnr_seed, models=[(\n '0x45', 'E', 'Fomu EVT'), ('0x44', 'D', 'Fomu DVT'), ('0x50', 'P',\n 'Fomu PVT (production)'), ('0x48', 'H', 'Fomu Hacker'), ('0x11', '1',\n 'OrangeCrab r0.1'), ('0x12', '2', 'OrangeCrab r0.2'), ('0x63', 'c',\n 'OrangeCart'), ('0x3f', '?', 'Unknown model')])\n", (9442, 9778), False, 'from rtl.version import Version\n'), ((10053, 10128), 'subprocess.Popen', 'subprocess.Popen', (['"""git describe --tags"""'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), "('git describe --tags', shell=True, stdout=subprocess.PIPE)\n", (10069, 10128), False, 'import subprocess\n'), ((12473, 12525), 'rtl.platform.fomu.Platform', 'Platform', ([], {'revision': 'args.revision', 'device': 'args.device'}), '(revision=args.revision, device=args.device)\n', (12481, 12525), False, 'from rtl.platform.fomu import Platform, add_platform_args\n'), ((6110, 6138), 'rtl.romgen.RandomFirmwareROM', 'RandomFirmwareROM', (['bios_size'], {}), '(bios_size)\n', (6127, 6138), False, 'from rtl.romgen import RandomFirmwareROM, FirmwareROMHex\n'), ((8113, 8167), 'valentyusb.usbcore.cpu.eptri.TriEndpointInterface', 'eptri.TriEndpointInterface', (['usb_iobuf'], {'debug': 'usb_debug'}), '(usb_iobuf, debug=usb_debug)\n', (8139, 8167), False, 'from valentyusb.usbcore.cpu import epmem, unififo, epfifo, dummyusb, eptri\n'), ((8216, 8261), 'valentyusb.usbcore.cpu.dummyusb.DummyUsb', 'dummyusb.DummyUsb', (['usb_iobuf'], {'debug': 'usb_debug'}), '(usb_iobuf, debug=usb_debug)\n', (8233, 8261), False, 'from valentyusb.usbcore.cpu import epmem, unififo, epfifo, dummyusb, eptri\n'), ((8491, 8501), 'migen.fhdl.specials.TSTriple', 'TSTriple', ([], {}), '()\n', (8499, 8501), False, 'from migen.fhdl.specials import TSTriple\n'), ((12585, 12613), 'rtl.platform.fomu.Platform', 'Platform', ([], {'device': 'args.device'}), '(device=args.device)\n', (12593, 12613), False, 'from rtl.platform.fomu import Platform, add_platform_args\n'), ((12667, 12699), 'rtl.platform.fomu.Platform', 'Platform', ([], {'revision': 'args.revision'}), '(revision=args.revision)\n', (12675, 12699), False, 'from rtl.platform.fomu import Platform, add_platform_args\n'), ((6463, 6512), 'litex.soc.interconnect.wishbone.SRAM', 'wishbone.SRAM', (['bios_size'], {'read_only': '(True)', 'init': '[]'}), '(bios_size, read_only=True, init=[])\n', (6476, 6512), False, 'from litex.soc.interconnect import wishbone\n'), ((6672, 6708), 'rtl.romgen.FirmwareROMHex', 'FirmwareROMHex', (['bios_size', 'bios_file'], {}), '(bios_size, bios_file)\n', (6686, 6708), False, 'from rtl.romgen import RandomFirmwareROM, FirmwareROMHex\n'), ((7116, 7165), 'litex.soc.interconnect.wishbone.SRAM', 'wishbone.SRAM', (['bios_size'], {'read_only': '(True)', 'init': '[]'}), '(bios_size, read_only=True, init=[])\n', (7129, 7165), False, 'from litex.soc.interconnect import wishbone\n'), ((14236, 14261), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (14251, 14261), False, 'import os\n'), ((5061, 5090), 'migen.fhdl.decorators.ClockDomainsRenamer', 'ClockDomainsRenamer', (['"""usb_12"""'], {}), "('usb_12')\n", (5080, 5090), False, 'from migen.fhdl.decorators import ClockDomainsRenamer\n'), ((5091, 5135), 'spibone.SpiWishboneBridge', 'spibone.SpiWishboneBridge', (['spi_pads'], {'wires': '(4)'}), '(spi_pads, wires=4)\n', (5116, 5135), False, 'import spibone\n'), ((4680, 4693), 'litex.build.generic_platform.Pins', 'Pins', (['"""dbg:0"""'], {}), "('dbg:0')\n", (4684, 4693), False, 'from litex.build.generic_platform import Pins, Subsignal\n'), ((4738, 4751), 'litex.build.generic_platform.Pins', 'Pins', (['"""dbg:1"""'], {}), "('dbg:1')\n", (4742, 4751), False, 'from litex.build.generic_platform import Pins, Subsignal\n'), ((4796, 4809), 'litex.build.generic_platform.Pins', 'Pins', (['"""dbg:2"""'], {}), "('dbg:2')\n", (4800, 4809), False, 'from litex.build.generic_platform import Pins, Subsignal\n'), ((4854, 4867), 'litex.build.generic_platform.Pins', 'Pins', (['"""dbg:3"""'], {}), "('dbg:3')\n", (4858, 4867), False, 'from litex.build.generic_platform import Pins, Subsignal\n')]
|
import unittest
from code.permutation import load
class TestLoadData(unittest.TestCase):
features, box_office = load()
def test_load_returns_not_None(self):
self.assertIsNotNone(load())
def test_load_features_shape(self):
self.assertEquals((39360, 2), self.features.shape)
def test_load_box_office_shape(self):
self.assertEquals((8304, 2), self.box_office.shape)
|
[
"code.permutation.load"
] |
[((120, 126), 'code.permutation.load', 'load', ([], {}), '()\n', (124, 126), False, 'from code.permutation import load\n'), ((199, 205), 'code.permutation.load', 'load', ([], {}), '()\n', (203, 205), False, 'from code.permutation import load\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2015-2016 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FIWARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with <EMAIL>
from qautils.remote.fabric_utils import FabricUtils
from subprocess import Popen, PIPE
import os
__copyright__ = "Copyright 2015-2016"
__license__ = " Apache License, Version 2.0"
COMMAND_SYNC = "sync.py"
OUTPUT_PARALLEL_LOGS = "sync_*"
class GlanceSyncRemoteCmdClient:
""" Remote GlanceSync client for testing purposes """
def __init__(self, master_hostname, master_username, master_password, configuration_file_path,
master_keyfile=None, glancesyc_bin_path=None):
"""
Init GlanceSync client.
:param master_hostname (string): Hostname of Master.
:param master_username (string): Username.
:param master_password (string): Password.
:param configuration_file_path (string): Path where configuration file is located
:param master_keyfile (string): SSH private key file
:param glancesyc_bin_path (string): Path where GlanceSyn binary are located
:return:
"""
self.fabric_utils = FabricUtils(master_hostname, master_username, master_password, master_keyfile)
self.conf_file_path = configuration_file_path
self.conf_file_backup_path = None
self.bin_path = glancesyc_bin_path
self.host = master_hostname
def change_configuration_file(self, section, key, value):
"""
Change properties in the configuration file.
:param section (String): Section.
:param key (String): Property name.
:param value (String): Property value.
:return (String): Command output
"""
command = "crudini --set {config_file} {section} {key} {value}".format(config_file=self.conf_file_path,
section=section, key=key, value=value)
return self.execute_command(command)
def backup_glancesync_config_file(self, backup_dir):
"""
Create a backup of configuration file.
:param backup_dir (String): Copy the GlanceSync configuration file to tmp backup_dir
:return: None
"""
self.conf_file_backup_path = "{backup_dir}/glancesync.conf.backup".format(backup_dir=backup_dir)
command = "cp -f {config_file} {backup_file}".format(config_file=self.conf_file_path,
backup_file=self.conf_file_backup_path)
return self.execute_command(command)
def restore_backup(self):
"""
Restore backup of the configuration file.
:return: None
"""
if self.conf_file_backup_path:
command = "cp -f {backup_file} {config_file}".format(backup_file=self.conf_file_backup_path,
config_file=self.conf_file_path)
return self.execute_command(command)
def get_output_log_list(self):
"""
This method return the content of executing a 'ls' command filtering by output parallel logs dir name
:return (String): Command output
"""
command = "ls -d {output_files_pater}*/*".format(bin_path=self.bin_path,
output_files_pater=OUTPUT_PARALLEL_LOGS)
return self.execute_command(command)
def get_output_log_content(self, file_absolute_path):
"""
This method return the content of the given file.
:param file_absolute_path: Absolute path of the file (given by get_output_log_list function)
:return (String): Command output (content of the file)
"""
command = "cat {file_absolute_path}".format(file_absolute_path=file_absolute_path)
return self.execute_command(command)
def clean_all_parallel_output_logs(self):
"""
Remove all output files coming from a parallel execution
:return (String): Command output
"""
command = "rm -rf {output_files_pater}".format(bin_path=self.bin_path,
output_files_pater=OUTPUT_PARALLEL_LOGS)
return self.execute_command(command)
def sync(self, list_nodes=None, options=None):
"""
Execute SYNC command. If options are given, they will be passed to the GlanceSync CLI.
:param list_nodes (String): String with the list of nodes. e.i:
"Burgos"
"master:Burgos"
"Burgos target2:Madrid"
"master:Burgos target2:Madrid"
:param options (String): GlanceSync CLI options.
:return (String): Command output
"""
command = "{}/{}".format(self.bin_path, COMMAND_SYNC) if self.bin_path is not None else "sync"
command = "{command} {options}".format(command=command, options=options) if options else command
command = "{command} {list_nodes}".format(command=command, list_nodes=list_nodes) if list_nodes else command
return self.execute_command(command)
def execute_command(self, command):
if self.host == "localhost" or self.host == "fiwareglancesync":
return self.execute_command_locally(command)
else:
return self.fabric_utils.execute_command(command)
def execute_command_locally(self, command):
p = Popen(command, shell=True, stdout=PIPE)
metadatajson, err = p.communicate()
if err:
return None
if not metadatajson:
return 'ok'
return metadatajson
|
[
"qautils.remote.fabric_utils.FabricUtils",
"subprocess.Popen"
] |
[((1775, 1853), 'qautils.remote.fabric_utils.FabricUtils', 'FabricUtils', (['master_hostname', 'master_username', 'master_password', 'master_keyfile'], {}), '(master_hostname, master_username, master_password, master_keyfile)\n', (1786, 1853), False, 'from qautils.remote.fabric_utils import FabricUtils\n'), ((6067, 6106), 'subprocess.Popen', 'Popen', (['command'], {'shell': '(True)', 'stdout': 'PIPE'}), '(command, shell=True, stdout=PIPE)\n', (6072, 6106), False, 'from subprocess import Popen, PIPE\n')]
|
#!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2012,2013,2014,2015,2016,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the vulcan2 related commands."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
from notificationtest import VerifyNotificationsMixin
class TestVulcan20(VerifyNotificationsMixin, TestBrokerCommand):
def add_utcluster(self, name, metacluster):
command = ["add_esx_cluster", "--cluster=%s" % name,
"--metacluster=%s" % metacluster, "--room=utroom1",
"--buildstatus=build",
"--domain=unittest", "--down_hosts_threshold=0",
"--archetype=esx_cluster",
"--personality=vulcan2-server-dev"]
self.noouttest(command)
# metacluster aligned svc tests
def test_400_addvcenterservices(self):
command = ["add_required_service", "--service", "vcenter",
"--archetype", "vmhost", "--personality", "vulcan2-server-dev"]
self.noouttest(command)
command = ["add_required_service", "--service", "vcenter",
"--archetype", "metacluster", "--personality", "vulcan2"]
self.noouttest(command)
def test_410_bindvcenterservices(self):
command = ["bind_client", "--metacluster", "utmc8",
"--service", "vcenter", "--instance", "ut"]
err = self.statustest(command)
# The service should be bound to the metacluster and to the hosts, but
# not to the clusters as they do not require it
self.matchoutput(err, "Metacluster utmc8 adding binding for "
"service instance vcenter/ut", command)
self.matchoutput(err, "Host evh80.aqd-unittest.ms.com adding binding "
"for service instance vcenter/ut", command)
self.matchoutput(err, "Host evh81.aqd-unittest.ms.com adding binding "
"for service instance vcenter/ut", command)
self.matchclean(err, "utecl", command)
command = ["show", "host", "--hostname", "evh80.aqd-unittest.ms.com"]
out = self.commandtest(command)
self.matchoutput(out,
"Uses Service: vcenter Instance: ut",
command)
command = "show metacluster --metacluster utmc8"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Member Alignment: Service vcenter Instance ut", command)
def test_420_failmaxclientcount(self):
command = ["update_service", "--service", "vcenter", "--instance", "ut",
"--max_clients", "17"]
self.noouttest(command)
command = ["map", "service", "--service", "vcenter", "--instance", "ut",
"--building", "ut"] + self.valid_just_sn
self.noouttest(command)
self.add_utcluster("utpgcl2", "utmc8")
command = ["make", "cluster", "--cluster", "utmc8"]
out = self.badrequesttest(command)
self.matchoutput(out, "Please use the --metacluster option for "
"metaclusters.", command)
self.matchoutput(out,
"The available instances ['ut'] for service vcenter "
"are at full capacity.",
command)
command = ["unmap", "service", "--service", "vcenter",
"--instance", "ut", "--building", "ut"] + self.valid_just_sn
self.noouttest(command)
self.statustest(["del_cluster", "--cluster=utpgcl2"])
def test_430_unbindvcenterservices(self):
command = ["del_required_service", "--service", "vcenter",
"--archetype", "metacluster", "--personality", "vulcan2"]
self.noouttest(command)
command = ["del_required_service", "--service", "vcenter",
"--archetype", "vmhost", "--personality", "vulcan2-server-dev"]
self.noouttest(command)
self.noouttest(["unbind_client", "--metacluster", "utmc8",
"--service", "vcenter"])
def test_440_unmapvcenterservices(self):
command = ["unmap", "service", "--service", "vcenter",
"--instance", "ut", "--building", "ut",
"--personality", "vulcan2-server-dev", "--archetype", "vmhost"]
self.noouttest(command)
command = ["make", "--hostname", "evh80.aqd-unittest.ms.com"]
err = self.statustest(command)
self.matchoutput(err, "Host evh80.aqd-unittest.ms.com removing "
"binding for service instance vcenter/ut", command)
command = ["show", "host", "--hostname", "evh80.aqd-unittest.ms.com"]
out = self.commandtest(command)
self.matchclean(out,
"Uses Service: vcenter Instance: ut",
command)
#
# service binding conflicts
#
def test_500_add_mc_esx_service(self):
command = ["add", "service", "--service", "esx_management_server", "--instance", "ut.mc"]
self.noouttest(command)
command = ["add_required_service", "--service", "esx_management_server",
"--archetype", "metacluster", "--personality", "vulcan2"]
self.noouttest(command)
command = ["map", "service", "--service", "esx_management_server", "--instance", "ut.mc",
"--building", "ut", "--personality", "vulcan2",
"--archetype", "metacluster"]
self.noouttest(command)
command = ["rebind_client", "--metacluster", "utmc8",
"--service", "esx_management_server", "--instance", "ut.mc"]
err = self.statustest(command)
self.matchoutput(err,
"Metacluster utmc8 adding binding for service "
"instance esx_management_server/ut.mc",
command)
for cluster in ["utecl12", "utecl13"]:
self.searchoutput(err,
"ESX Cluster %s removing binding for service "
"instance esx_management_server/ut.[ab]" % cluster,
command)
self.matchoutput(err,
"ESX Cluster %s adding binding for service "
"instance esx_management_server/ut.mc" % cluster,
command)
for host in ["evh80", "evh81"]:
self.searchoutput(err,
"Host %s.aqd-unittest.ms.com removing binding for "
"service instance esx_management_server/ut.[ab]" % host,
command)
self.matchoutput(err,
"Host %s.aqd-unittest.ms.com adding binding for "
"service instance esx_management_server/ut.mc" % host,
command)
def test_510_fail_make_host(self):
command = ["make", "--hostname", "evh80.aqd-unittest.ms.com"]
out = self.badrequesttest(command)
self.matchoutput(out,
"ESX Metacluster utmc8 is set to use service instance "
"esx_management_server/ut.mc, but that instance is "
"not in a service map for "
"host evh80.aqd-unittest.ms.com.",
command)
def test_510_fail_make_cluster(self):
command = ["make", "cluster", "--cluster", "utecl12"]
out = self.badrequesttest(command)
self.matchoutput(out,
"ESX Metacluster utmc8 is set to use service instance "
"esx_management_server/ut.mc, but that instance is "
"not in a service map for ESX cluster utecl12.",
command)
self.matchoutput(out,
"ESX Metacluster utmc8 is set to use service instance "
"esx_management_server/ut.mc, but that instance is "
"not in a service map for "
"host evh80.aqd-unittest.ms.com.",
command)
def test_520_verify_client_count(self):
command = ["show_service", "--service=esx_management_server",
"--instance=ut.mc"]
out = self.commandtest(command)
self.searchoutput(out, r"^ Client Count: 16$", command)
def test_530_verify_mixed_client_count(self):
self.add_utcluster("utpgcl3", "utmc8")
command = ["bind_client", "--cluster", "utpgcl3", "--service",
"esx_management_server", "--instance", "ut.mc"]
err = self.statustest(command)
self.matchoutput(err, "ESX Cluster utpgcl3 adding binding for service "
"instance esx_management_server/ut.mc", command)
command = ["show_service", "--service=esx_management_server",
"--instance=ut.mc"]
out = self.commandtest(command)
self.searchoutput(out, r"^ Client Count: 24$", command)
# Can't unbind an an aligned service here and don't want unalign it
def test_538_del_utpgcl3(self):
self.statustest(["del_cluster", "--cluster=utpgcl3"])
def test_540_remove_mc_esx_service(self):
command = ["del_required_service", "--service", "esx_management_server",
"--archetype", "metacluster", "--personality", "vulcan2"]
self.noouttest(command)
command = ["unbind_client", "--metacluster", "utmc8",
"--service", "esx_management_server"]
self.noouttest(command)
command = ["unmap", "service", "--service", "esx_management_server", "--instance", "ut.mc",
"--building", "ut", "--personality", "vulcan2",
"--archetype", "metacluster"]
self.noouttest(command)
out = self.statustest(["make_cluster", "--cluster", "utecl12"])
self.matchoutput(out, "removing binding for service instance "
"esx_management_server/ut.mc", command)
self.searchoutput(out, "adding binding for service instance "
"esx_management_server/ut.[ab]", command)
out = self.statustest(["make_cluster", "--cluster", "utecl13"])
self.matchoutput(out, "removing binding for service instance "
"esx_management_server/ut.mc", command)
self.searchoutput(out, "adding binding for service instance "
"esx_management_server/ut.[ab]", command)
command = ["del", "service", "--service", "esx_management_server", "--instance", "ut.mc"]
self.noouttest(command)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestVulcan20)
unittest.TextTestRunner(verbosity=2).run(suite)
|
[
"unittest.TextTestRunner",
"unittest.TestLoader",
"utils.import_depends"
] |
[((851, 873), 'utils.import_depends', 'utils.import_depends', ([], {}), '()\n', (871, 873), False, 'import utils\n'), ((11466, 11487), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (11485, 11487), False, 'import unittest\n'), ((11528, 11564), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (11551, 11564), False, 'import unittest\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from submarine.client.api.environment_client import EnvironmentClient
from submarine.client.models.environment_spec import EnvironmentSpec
from submarine.client.models.kernel_spec import KernelSpec
@pytest.mark.e2e
def test_environment_e2e():
submarine_client = EnvironmentClient(host="http://localhost:8080")
kernel_spec = KernelSpec(
name="submarine_jupyter_py3",
channels=["defaults"],
conda_dependencies=[],
pip_dependencies=[],
)
environment_spec = EnvironmentSpec(
name="mytest",
kernel_spec=kernel_spec,
docker_image="apache/submarine:jupyter-notebook-gpu-0.7.0",
)
environment = submarine_client.create_environment(environment_spec=environment_spec)
environment_name = environment["environmentSpec"]["name"]
submarine_client.get_environment(environment_name)
submarine_client.list_environments()
submarine_client.delete_environment(environment_name)
|
[
"submarine.client.models.environment_spec.EnvironmentSpec",
"submarine.client.models.kernel_spec.KernelSpec",
"submarine.client.api.environment_client.EnvironmentClient"
] |
[((1059, 1106), 'submarine.client.api.environment_client.EnvironmentClient', 'EnvironmentClient', ([], {'host': '"""http://localhost:8080"""'}), "(host='http://localhost:8080')\n", (1076, 1106), False, 'from submarine.client.api.environment_client import EnvironmentClient\n'), ((1125, 1236), 'submarine.client.models.kernel_spec.KernelSpec', 'KernelSpec', ([], {'name': '"""submarine_jupyter_py3"""', 'channels': "['defaults']", 'conda_dependencies': '[]', 'pip_dependencies': '[]'}), "(name='submarine_jupyter_py3', channels=['defaults'],\n conda_dependencies=[], pip_dependencies=[])\n", (1135, 1236), False, 'from submarine.client.models.kernel_spec import KernelSpec\n'), ((1295, 1415), 'submarine.client.models.environment_spec.EnvironmentSpec', 'EnvironmentSpec', ([], {'name': '"""mytest"""', 'kernel_spec': 'kernel_spec', 'docker_image': '"""apache/submarine:jupyter-notebook-gpu-0.7.0"""'}), "(name='mytest', kernel_spec=kernel_spec, docker_image=\n 'apache/submarine:jupyter-notebook-gpu-0.7.0')\n", (1310, 1415), False, 'from submarine.client.models.environment_spec import EnvironmentSpec\n')]
|
from . import base
import logging
logger = logging.getLogger(__name__)
class Team(base.Base):
pass
|
[
"logging.getLogger"
] |
[((45, 72), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (62, 72), False, 'import logging\n')]
|
from .proxy import get_proxy
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
class Driver:
def __init__(self, *, ad_block: bool = False, use_proxy: bool = False, proxy: str = None):
self.ad_block = ad_block
self.use_proxy = use_proxy
self.proxy = proxy
self.driver = None
def __exit__(self, exc_type, exc_value, traceback):
if self.driver:
self.driver.close()
def __enter__(self):
if self.use_proxy or self.proxy:
if not self.proxy:
addr = self.get_proxy()
else:
addr = self.proxy
else:
proxy, addr = None, None
chrome_options = Options()
if addr:
chrome_options.add_argument(f"--proxy-server={addr}")
chrome_options.add_argument("--headless")
chrome_options.add_argument("--no-sandbox")
if self.ad_block:
chrome_options.add_extension("/home/ubuntu/adblock_ext.crx")
self.driver = webdriver.Chrome(
"/home/ubuntu/chromedriver", options=chrome_options
)
return self.driver
@staticmethod
def get_proxy():
proxy = get_proxy()
if not proxy:
return None
return proxy["host"] + ":" + str(proxy["port"])
|
[
"selenium.webdriver.chrome.options.Options",
"selenium.webdriver.Chrome"
] |
[((728, 737), 'selenium.webdriver.chrome.options.Options', 'Options', ([], {}), '()\n', (735, 737), False, 'from selenium.webdriver.chrome.options import Options\n'), ((1048, 1117), 'selenium.webdriver.Chrome', 'webdriver.Chrome', (['"""/home/ubuntu/chromedriver"""'], {'options': 'chrome_options'}), "('/home/ubuntu/chromedriver', options=chrome_options)\n", (1064, 1117), False, 'from selenium import webdriver\n')]
|
import sys
from PyQt5.QtWidgets import QApplication
from PyQt5.QtWidgets import QStyle
from PyQt5.QtCore import Qt
import MainWindow
if __name__ == '__main__':
app = QApplication(sys.argv)
# Got a lot of errors related to Gtk on Linux:
# Gtk-CRITICAL **: IA__gtk_widget_style_get: assertion 'GTK_IS_WIDGET (widget)' failed
# A possible fix is provided here, by setting the fusion style:
# http://stackoverflow.com/questions/35351024/pyqt5-gtk-critical-ia-gtk-widget-style-get-assertion-gtk-is-widget-widg
if sys.platform == "linux" or sys.platform == "linux2":
app.setStyle("fusion")
window = MainWindow.MainWindow("Alertor")
# Put in the center of the screen.
window.setGeometry(QStyle.alignedRect(Qt.LeftToRight,
Qt.AlignCenter,
window.size(),
app.desktop().availableGeometry()))
window.show()
sys.exit(app.exec_())
|
[
"PyQt5.QtWidgets.QApplication",
"MainWindow.MainWindow"
] |
[((173, 195), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (185, 195), False, 'from PyQt5.QtWidgets import QApplication\n'), ((639, 671), 'MainWindow.MainWindow', 'MainWindow.MainWindow', (['"""Alertor"""'], {}), "('Alertor')\n", (660, 671), False, 'import MainWindow\n')]
|
import numpy as np
from unittest import TestCase
from diffprivlib.mechanisms import Exponential
from diffprivlib.utils import global_seed
class TestExponential(TestCase):
def setup_method(self, method):
if method.__name__ .endswith("prob"):
global_seed(314159)
self.mech = Exponential()
def teardown_method(self, method):
del self.mech
def test_not_none(self):
self.assertIsNotNone(self.mech)
def test_class(self):
from diffprivlib.mechanisms import DPMechanism
self.assertTrue(issubclass(Exponential, DPMechanism))
def test_no_params(self):
with self.assertRaises(ValueError):
self.mech.randomise("A")
def test_no_epsilon(self):
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
]
self.mech.set_utility(utility_list)
with self.assertRaises(ValueError):
self.mech.randomise("A")
def test_inf_epsilon(self):
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
]
self.mech.set_utility(utility_list).set_epsilon(float("inf"))
# print(_mech.randomise("A"))
for i in range(1000):
self.assertEqual(self.mech.randomise("A"), "A")
def test_neg_epsilon(self):
with self.assertRaises(ValueError):
self.mech.set_epsilon(-1)
def test_complex_epsilon(self):
with self.assertRaises(TypeError):
self.mech.set_epsilon(1+2j)
def test_string_epsilon(self):
with self.assertRaises(TypeError):
self.mech.set_epsilon("Two")
def test_non_zero_delta(self):
with self.assertRaises(ValueError):
self.mech.set_epsilon_delta(1, 0.5)
def test_no_utility(self):
self.mech.set_epsilon(1)
with self.assertRaises(ValueError):
self.mech.randomise("1")
def test_hierarchy_first(self):
utility_list = [
["A", "B", 1],
["A", "2", 2],
["B", "2", 2]
]
self.mech.set_utility(utility_list)
self.assertIsNotNone(self.mech)
def test_non_string_hierarchy(self):
utility_list = [
["A", "B", 1],
["A", 2, 2],
["B", 2, 2]
]
with self.assertRaises(TypeError):
self.mech.set_utility(utility_list)
def test_missing_utilities(self):
utility_list = [
["A", "B", 1],
["A", "C", 2]
]
with self.assertRaises(ValueError):
self.mech.set_utility(utility_list)
def test_wrong_utilities(self):
utility_list = (
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
)
with self.assertRaises(TypeError):
self.mech.set_utility(utility_list)
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", "2"]
]
with self.assertRaises(TypeError):
self.mech.set_utility(utility_list)
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", -2]
]
with self.assertRaises(ValueError):
self.mech.set_utility(utility_list)
def test_non_string_input(self):
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
]
self.mech.set_epsilon(1).set_utility(utility_list)
with self.assertRaises(TypeError):
self.mech.randomise(2)
def test_outside_domain(self):
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
]
self.mech.set_epsilon(1).set_utility(utility_list)
with self.assertRaises(ValueError):
self.mech.randomise("D")
def test_get_utility_list(self):
self.assertIsNone(self.mech.get_utility_list())
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
]
self.mech.set_epsilon(1).set_utility(utility_list)
_utility_list = self.mech.get_utility_list()
self.assertEqual(len(_utility_list), len(utility_list))
def test_self_in_utility(self):
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2],
["A", "A", 5]
]
self.mech.set_epsilon(1).set_utility(utility_list)
_utility_list = self.mech.get_utility_list()
self.assertEqual(len(_utility_list) + 1, len(utility_list))
self.assertEqual(self.mech._get_utility("A", "A"), 0)
def test_distrib_prob(self):
epsilon = np.log(2)
runs = 20000
utility_list = [
["A", "B", 1],
["A", "C", 2],
["B", "C", 2]
]
self.mech.set_epsilon(epsilon).set_utility(utility_list)
count = [0, 0, 0]
for i in range(runs):
val = self.mech.randomise("A")
if val == "A":
count[0] += 1
elif val == "B":
count[1] += 1
elif val == "C":
count[2] += 1
# print("A: %d, B: %d, C: %d" % (count[0], count[1], count[2]))
self.assertLessEqual(count[0] / runs, np.exp(epsilon) * count[2] / runs + 0.05)
self.assertAlmostEqual(count[0] / count[1], count[1] / count[2], delta=0.1)
def test_repr(self):
repr_ = repr(self.mech.set_epsilon(1))
self.assertIn(".Exponential(", repr_)
|
[
"diffprivlib.utils.global_seed",
"numpy.exp",
"diffprivlib.mechanisms.Exponential",
"numpy.log"
] |
[((309, 322), 'diffprivlib.mechanisms.Exponential', 'Exponential', ([], {}), '()\n', (320, 322), False, 'from diffprivlib.mechanisms import Exponential\n'), ((4765, 4774), 'numpy.log', 'np.log', (['(2)'], {}), '(2)\n', (4771, 4774), True, 'import numpy as np\n'), ((268, 287), 'diffprivlib.utils.global_seed', 'global_seed', (['(314159)'], {}), '(314159)\n', (279, 287), False, 'from diffprivlib.utils import global_seed\n'), ((5370, 5385), 'numpy.exp', 'np.exp', (['epsilon'], {}), '(epsilon)\n', (5376, 5385), True, 'import numpy as np\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 14 10:53:06 2018
@author: anonymous
"""
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import sys
sys.path.append('..')
if __name__ == '__main__':
from distributions import Categorical
from ego_map import EgoMap
else:
from .distributions import Categorical
from .ego_map import EgoMap
# A temporary solution from the master branch.
# https://github.com/pytorch/pytorch/blob/7752fe5d4e50052b3b0bbc9109e599f8157febc0/torch/nn/init.py#L312
# Remove after the next version of PyTorch gets release.
def orthogonal(tensor, gain=1):
if tensor.ndimension() < 2:
raise ValueError("Only tensors with 2 or more dimensions are supported")
rows = tensor.size(0)
cols = tensor[0].numel()
flattened = torch.Tensor(rows, cols).normal_(0, 1)
if rows < cols:
flattened.t_()
# Compute the qr factorization
q, r = torch.qr(flattened)
# Make Q uniform according to https://arxiv.org/pdf/math-ph/0609050.pdf
d = torch.diag(r, 0)
ph = d.sign()
q *= ph.expand_as(q)
if rows < cols:
q.t_()
tensor.view_as(q).copy_(q)
tensor.mul_(gain)
return tensor
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1 or classname.find('Linear') != -1:
orthogonal(m.weight.data)
print(m.__class__.__name__)
print(m.weight.size())
if m.bias is not None:
m.bias.data.fill_(0)
class Lin_View(nn.Module):
def __init__(self):
super(Lin_View, self).__init__()
def forward(self, x):
return x.view(x.size()[0], -1)
class FFPolicy(nn.Module):
def __init__(self):
super(FFPolicy, self).__init__()
def forward(self, inputs, states, masks):
raise NotImplementedError
def act(self, inputs, states, masks,
deterministic=False, **kwargs):
result = self(inputs, states, masks, **kwargs)
x = result['x']
actions = self.dist.sample(x, deterministic=deterministic)
action_log_probs, dist_entropy, action_probs = self.dist.logprobs_and_entropy(x, actions)
del result['x']
result['actions'] = actions
result['dist_entropy'] = dist_entropy
result['action_log_probs'] = action_log_probs
result['action_probs'] = action_probs
return result
def evaluate_actions(self, inputs, states,
masks, actions, pred_depths=False, **kwargs):
if pred_depths:
result = self(inputs, states, masks, pred_depths=pred_depths, **kwargs)
x = result['x']
action_log_probs, dist_entropy, action_probs = self.dist.logprobs_and_entropy(x, actions)
del result['x']
result['actions'] = actions
result['dist_entropy'] = dist_entropy
result['action_log_probs'] = action_log_probs
result['action_probs'] = action_probs
return result
else:
result = self(inputs, states, masks, **kwargs)
x = result['x']
action_log_probs, dist_entropy, action_probs = self.dist.logprobs_and_entropy(x, actions)
del result['x']
result['actions'] = actions
result['dist_entropy'] = dist_entropy
result['action_log_probs'] = action_log_probs
result['action_probs'] = action_probs
return result
def get_action_value_and_probs(self, inputs, states, masks,
deterministic=False, **kwargs):
result = self(inputs, states, masks, **kwargs)
x = result['x']
actions = self.dist.sample(x, deterministic=deterministic)
result['actions'] = actions
result['action_softmax'] = F.softmax(self.dist(x),dim=1)
del result['x']
return result
class CNNPolicy(FFPolicy):
def __init__(self, num_inputs, input_shape, params):
super(CNNPolicy, self).__init__()
if params.pretrained_vae:
class Args:
pass
args=Args()
args.hidden2=params.hidden_size
self.vae = VAE2(args)
else:
self.conv_head = nn.Sequential(nn.Conv2d(num_inputs, params.conv1_size, 8, stride=4),
nn.ReLU(True),
nn.Conv2d(params.conv1_size, params.conv2_size, 4, stride=2),
nn.ReLU(True),
nn.Conv2d(params.conv2_size, params.conv3_size, 3, stride=1),
nn.ReLU(True))
if params.action_prediction:
# an additional conv head for action prediction
self.cnn_action_head = nn.Conv2d(params.conv3_size, params.num_actions,1,1)
if params.predict_depth:
# predict depth with a 1x1 conv
self.depth_head = nn.Conv2d(params.conv3_size, 8, 1, 1)
conv_input = torch.Tensor(torch.randn((1,) + input_shape))
print(conv_input.size(), self.conv_head(conv_input).size(), self.conv_head(conv_input).size())
self.conv_out_size = self.conv_head(conv_input).nelement()
self.hidden_size = params.hidden_size
if params.skip_fc:
if params.recurrent_policy:
assert params.use_lstm == False, 'Cannot have both GRU and LSTM!'
#self.gru = MaskedGRU(self.conv_out_size, self.hidden_size)
self.gru = nn.GRUCell(self.conv_out_size, self.hidden_size)
if params.stacked_gru:
#self.gru2 = MaskedGRU(self.hidden_size, self.hidden_size)
self.gru2 = nn.GRUCell(self.hidden_size, self.hidden_size)
if params.use_lstm:
self.lstm = nn.LSTMCell(self.conv_out_size, self.hidden_size)
else:
if not params.pretrained_vae:
self.linear1 = nn.Linear(self.conv_out_size, self.hidden_size)
if params.recurrent_policy:
assert params.use_lstm == False, 'Cannot have both GRU and LSTM!'
#self.gru = MaskedGRU(self.hidden_size, self.hidden_size)
if params.pos_as_obs:
self.gru = nn.GRUCell(self.hidden_size + 4, self.hidden_size) # x, y sin(orientation), cos(orientation)
else:
self.gru = nn.GRUCell(self.hidden_size, self.hidden_size)
if params.stacked_gru:
#self.gru2 = MaskedGRU(self.hidden_size, self.hidden_size)
self.gru2 = nn.GRUCell(self.hidden_size, self.hidden_size)
if params.use_lstm:
self.lstm = nn.LSTMCell(self.hidden_size, self.hidden_size)
if params.gru_skip:
self.critic_linear = nn.Linear(self.hidden_size*2, 1)
self.dist = Categorical(self.hidden_size*2, params.num_actions)
else:
self.critic_linear = nn.Linear(self.hidden_size, 1)
self.dist = Categorical(self.hidden_size, params.num_actions)
if params.loop_detect:
self.loop_linear = nn.Linear(self.hidden_size, 1)
self.params = params
self.train()
self.reset_parameters()
if params.pretrained_vae:
enc_checkpoint = torch.load(params.pretrained_vae, map_location=lambda storage, loc: storage)
self.vae.load_state_dict(enc_checkpoint['model'])
self.vae.eval()
@property
def state_size(self):
if hasattr(self, 'gru') or hasattr(self, 'lstm'):
return self.hidden_size
else:
return 1
def load_conv_head(self, old_model):
for i in range(0, 6, 2):
self.conv_head[i].weight.data = old_model.conv_head[i].weight.data.clone()
self.conv_head[i].bias.data = old_model.conv_head[i].bias.data.clone()
self.conv_head[i].weight.requires_grad = False
self.conv_head[i].bias.requires_grad = False
def load_linear_layer(self, old_model):
self.linear1.weight.data = old_model.linear1.weight.data.clone()
self.linear1.bias.data = old_model.linear1.bias.data.clone()
self.linear1.weight.requires_grad = False
self.linear1.bias.requires_grad = False
def reset_parameters(self):
self.apply(weights_init)
relu_gain = nn.init.calculate_gain('relu')
if not self.params.pretrained_vae:
for i in range(0, 6, 2):
self.conv_head[i].weight.data.mul_(relu_gain)
self.linear1.weight.data.mul_(relu_gain)
if self.params.loop_detect:
self.loop_linear.weight.data.mul_(relu_gain)
if hasattr(self, 'gru'):
#self.gru.reset_parameters()
orthogonal(self.gru.weight_ih.data)
orthogonal(self.gru.weight_hh.data)
self.gru.bias_ih.data.fill_(0)
self.gru.bias_hh.data.fill_(0)
if self.params.learn_init_state:
self.init_state = nn.Parameter(torch.randn(1, self.hidden_size) * 0.00)
if self.params.gru_forget_init:
self.gru.bias_ih.data.uniform_(-self.params.gru_bias_range, self.params.gru_bias_range)
self.gru.bias_hh.data.uniform_(-self.params.gru_bias_range, self.params.gru_bias_range)
if hasattr(self, 'gru2'):
#self.gru2.reset_parameters()
orthogonal(self.gru2.weight_ih.data)
orthogonal(self.gru2.weight_hh.data)
self.gru2.bias_ih.data.fill_(0)
self.gru2.bias_hh.data.fill_(0)
if self.params.learn_init_state:
self.init_state2 = nn.Parameter(torch.randn(1, self.hidden_size) * 0.00)
if self.params.gru_forget_init:
self.gru2.bias_ih.data.uniform_(-self.params.gru_bias_range, self.params.gru_bias_range)
self.gru2.bias_hh.data.uniform_(-self.params.gru_bias_range, self.params.gru_bias_range)
if hasattr(self, 'lstm'):
orthogonal(self.lstm.weight_ih.data)
orthogonal(self.lstm.weight_hh.data)
self.lstm.bias_ih.data.fill_(1)
self.lstm.bias_hh.data.fill_(1)
if self.params.gate_init:
self.lstm.bias_ih.data.fill_(0)
self.lstm.bias_hh.data.fill_(0)
if self.dist.__class__.__name__ == "DiagGaussian":
self.dist.fc_mean.weight.data.mul_(0.01)
def pred_depth(self, inputs):
x = self.conv_head(inputs * (1.0/255.0))
return self.depth_head(x)
def forward(self, inputs, states, masks,
pred_depth=False, pos_deltas_origins=None, **kwargs):
depth_preds = None
action_preds = None
if self.params.pos_as_obs:
assert pos_deltas_origins is not None
if self.params.pretrained_vae:
with torch.no_grad():
self.vae.eval()
x = self.vae.encode(inputs * (1.0/128.0))[0].detach()
x = x.view(-1, self.params.hidden_size)
else:
x = self.conv_head(inputs * (1.0/self.params.image_scalar))
if pred_depth:
depth_preds = self.depth_head(x)
if self.params.action_prediction:
action_preds = self.cnn_action_head(x)
x = x.view(-1, self.conv_out_size)
if not self.params.skip_fc:
x = self.linear1(x)
x = F.relu(x)
if self.params.gru_skip:
rnn_input = x
if hasattr(self, 'gru'):
if hasattr(self,'gru2'):
states2 = kwargs['states2']
assert states2 is not None
else:
states2 = None
# x, states = self.gru(x, states, masks)
# if hasattr(self,'gru2'):
# x, states2 = self.gru2(x, states2, masks)
if inputs.size(0) == states.size(0):
if self.params.pos_as_obs:
dxys, dthetas = pos_deltas_origins[:,3:5], pos_deltas_origins[:,5:6]
x = torch.cat([x, dxys, torch.sin(dthetas), torch.cos(dthetas)], dim=1)
x = states = self.gru(x, states*masks)
if hasattr(self,'gru2'):
x = states2 = self.gru2(x, states2*masks)
else:
x = x.view(-1, states.size(0), x.size(1))
masks = masks.view(-1, states.size(0), 1)
outputs = []
for i in range(x.size(0)):
if hasattr(self, 'gru2'):
hx = states = self.gru(x[i], states * masks[i])
hx2 = states2 = self.gru2(hx, states2 * masks[i])
outputs.append(hx2)
else:
inp = x[i]
if self.params.pos_as_obs:
dxys, dthetas = pos_deltas_origins[i, :,3:5], pos_deltas_origins[i, :,5:6]
inp = torch.cat([inp, dxys, torch.sin(dthetas), torch.cos(dthetas)], dim=1)
hx = states = self.gru(inp, states * masks[i])
outputs.append(hx)
x = torch.cat(outputs, 0)
loop_preds = None
if self.params.loop_detect:
loop_preds = F.sigmoid(self.loop_linear(x))
if self.params.gru_skip:
x = torch.cat([rnn_input, x], 1)
result = {'values': self.critic_linear(x),
'x': x,
'states': states,
'states2': states2,
'depth_preds': depth_preds,
'action_preds': action_preds,
'loop_preds': loop_preds}
return result
class EgoMap0_Policy(FFPolicy):
def __init__(self, num_inputs, input_shape, params):
super(EgoMap0_Policy, self).__init__()
self.params = params
if params.new_padding:
if params.skip_cnn_relu:
self.conv_head = nn.Sequential(nn.Conv2d(num_inputs, params.conv1_size, 8, stride=4, padding=2),
nn.ReLU(True),
nn.Conv2d(params.conv1_size, params.conv2_size, 4, stride=2, padding=1),
nn.ReLU(True),
nn.Conv2d(params.conv2_size, params.ego_num_chans, 3, stride=1, padding=1))
else:
self.conv_head = nn.Sequential(nn.Conv2d(num_inputs, params.conv1_size, 8, stride=4, padding=2),
nn.ReLU(True),
nn.Conv2d(params.conv1_size, params.conv2_size, 4, stride=2, padding=1),
nn.ReLU(True),
nn.Conv2d(params.conv2_size, params.ego_num_chans, 3, stride=1, padding=1),
nn.ReLU())
else:
if params.skip_cnn_relu:
self.conv_head = nn.Sequential(nn.Conv2d(num_inputs, params.conv1_size, 8, stride=4),
nn.ReLU(True),
nn.Conv2d(params.conv1_size, params.conv2_size, 4, stride=2),
nn.ReLU(True),
nn.Conv2d(params.conv2_size, params.ego_num_chans, 3, stride=1))
else:
self.conv_head = nn.Sequential(nn.Conv2d(num_inputs, params.conv1_size, 8, stride=4),
nn.ReLU(True),
nn.Conv2d(params.conv1_size, params.conv2_size, 4, stride=2),
nn.ReLU(True),
nn.Conv2d(params.conv2_size, params.ego_num_chans, 3, stride=1),
nn.ReLU())
self.ego_map = EgoMap(params)
ac = 0 # addtional channel
if params.ego_curiousity:
ac = 1
if not params.ego_skip_global:
self.ego_head = nn.Sequential(nn.Conv2d(params.ego_num_chans + ac, params.ego_num_chans, 3, stride=1, padding=1),
nn.ReLU(True),
nn.Conv2d(params.ego_num_chans, params.ego_num_chans, 4, stride=2, padding=0),
nn.ReLU(True),
nn.Conv2d(params.ego_num_chans, params.ego_num_chans, 4, stride=2, padding=0),
nn.ReLU())
ego_head_input = torch.Tensor(torch.randn((1,) + (params.ego_num_chans + ac,
2*params.ego_half_size,
2*params.ego_half_size,)))
ego_out_size = self.ego_head(ego_head_input).nelement()
# This has to be defined as an extension as I do not know the size in advance
if params.ego_use_tanh:
ego_extension = nn.Sequential(Lin_View(),
nn.Linear(ego_out_size, 256),
nn.ReLU(True),
nn.Linear(256, params.ego_hidden_size),
nn.Tanh())
else:
ego_extension = nn.Sequential(Lin_View(),
nn.Linear(ego_out_size, 256),
nn.ReLU(True),
nn.Linear(256, params.ego_hidden_size),
nn.ReLU(True))
self.ego_head = nn.Sequential(self.ego_head, ego_extension)
ego_out_size = params.ego_hidden_size
self.ego_out_size = ego_out_size
conv_input = torch.Tensor(torch.randn((1,) + input_shape))
conv_out_size = self.conv_head(conv_input).nelement()
if params.ego_curiousity:
conv_out_size += 4*10
self.conv_out_size = conv_out_size
print('conv out size', self.conv_out_size)
if params.ego_skip and not params.ego_skip_global and not params.merge_later:
self.linear1 = nn.Linear(ego_out_size + conv_out_size, params.hidden_size)
elif params.ego_skip and (params.merge_later or params.ego_skip_global):
print('params.ego_skip and (params.merge_later or params.ego_skip_global)')
self.linear1 = nn.Linear(conv_out_size, params.hidden_size)
else:
self.linear1 = nn.Linear(ego_out_size, params.hidden_size)
if params.recurrent_policy:
assert params.use_lstm == False, 'Cannot have both GRU and LSTM!'
if params.merge_later and not params.ego_skip_global:
self.gru = nn.GRUCell(params.hidden_size + ego_out_size, params.hidden_size)
else:
self.gru = nn.GRUCell(params.hidden_size, params.hidden_size)
if params.ego_query:
if params.ego_query_scalar:
query_out_size = params.ego_num_chans + 1 + ac
else:
query_out_size = params.ego_num_chans + ac
if params.ego_skip_global:
self.query_head = nn.Linear(params.hidden_size, query_out_size)
else:
self.query_head = nn.Linear(ego_out_size + params.hidden_size, query_out_size)
if params.query_position:
self.critic_linear = nn.Linear(params.hidden_size + params.ego_num_chans + 2 + ac, 1)
self.dist = Categorical(params.hidden_size + params.ego_num_chans + 2 + ac, params.num_actions)
else:
self.critic_linear = nn.Linear(params.hidden_size + params.ego_num_chans + ac, 1)
self.dist = Categorical(params.hidden_size + params.ego_num_chans + ac, params.num_actions)
else:
self.critic_linear = nn.Linear(params.hidden_size, 1)
self.dist = Categorical(params.hidden_size, params.num_actions)
print(params.num_actions, ' actions')
self.train()
self.reset_parameters()
if params.ego_curiousity:
# Load the forward model
class Args:
pass
args = Args()
args.hidden2 = 128
args.load_vae = ''
args.shared_action_size = 32
args.load_forward = ''
save_name = params.forward_model_name
model = ForwardModel(args)
checkpoint = torch.load(save_name, map_location=lambda storage, loc: storage)
model.load_state_dict(checkpoint['model'])
model.eval()
self.forward_model = model
@property
def state_size(self):
if hasattr(self, 'gru') or hasattr(self, 'lstm'):
return self.params.hidden_size
else:
return 1
def load_conv_head(self, old_model):
for i in range(0, 6, 2):
self.conv_head[i].weight.data = old_model.conv_head[i].weight.data.clone()
self.conv_head[i].bias.data = old_model.conv_head[i].bias.data.clone()
self.conv_head[i].weight.requires_grad = False
self.conv_head[i].bias.requires_grad = False
def reset_parameters(self):
self.apply(weights_init)
relu_gain = nn.init.calculate_gain('relu')
for i in range(0, 6, 2):
self.conv_head[i].weight.data.mul_(relu_gain)
if not self.params.ego_skip_global:
self.ego_head[0][i].weight.data.mul_(relu_gain)
if not self.params.ego_skip_global:
for i in range(1, 5, 2):
self.ego_head[1][i].weight.data.mul_(relu_gain)
self.linear1.weight.data.mul_(relu_gain)
if hasattr(self, 'gru'):
orthogonal(self.gru.weight_ih.data)
orthogonal(self.gru.weight_hh.data)
self.gru.bias_ih.data.fill_(0)
self.gru.bias_hh.data.fill_(0)
if self.params.learn_init_state:
self.init_state = nn.Parameter(torch.randn(1, self.params.hidden_size) * 0.05)
if hasattr(self, 'lstm'):
orthogonal(self.lstm.weight_ih.data)
orthogonal(self.lstm.weight_hh.data)
self.lstm.bias_ih.data.fill_(0)
self.lstm.bias_hh.data.fill_(0)
if self.params.gate_init:
self.lstm.bias_ih.data[self.hidden_size: 2*self.params.hidden_size].fill_(1)
print('hidden gate initialized')
if self.dist.__class__.__name__ == "DiagGaussian":
self.dist.fc_mean.weight.data.mul_(0.01)
def forward(self, inputs, states, masks,
pred_depth=False,
ego_states=None, ego_depths=None,
pos_deltas_origins=None, states2=None,
prev_obs=None, prev_actions=None, curiousity=None):
assert ego_states is not None, 'Trying to apply ego update with states'
assert ego_depths is not None, 'Trying to apply ego update with no depths'
assert pos_deltas_origins is not None, 'Trying to apply ego update with no pdo'
if self.params.ego_curiousity:
assert prev_obs is not None
assert prev_actions is not None
if curiousity is None:
curiousity = self.calculate_curiousity(inputs, prev_obs, pos_deltas_origins, prev_actions)
conv_out = self.conv_head(inputs * (1.0/255.0))
if self.params.ego_curiousity: # concat the curiousity vectors for ego reading
assert conv_out.size(0) == curiousity.size(0)
assert masks.size(0) == curiousity.size(0)
conv_out = torch.cat([conv_out, curiousity*masks.view(-1,1,1,1)], 1)
if inputs.size(0) == states.size(0):
xxs, yys, thetas = pos_deltas_origins[:,0], pos_deltas_origins[:,1], pos_deltas_origins[:,2]
origin_x, origin_y = pos_deltas_origins[:,6], pos_deltas_origins[:,7]
ego_states = self.ego_map.ego_mapper(conv_out, ego_depths, ego_states * masks.view(-1,1,1,1), xxs, yys, thetas, masks, origin_x, origin_y)
x = ego_rots = self.ego_map.rotate_for_read(ego_states, xxs, yys, thetas, origin_x, origin_y)
else:
x = conv_out.view(-1, states.size(0), * conv_out.size()[1:])
masks = masks.view(-1, states.size(0), 1)
read_outputs = []
for i in range(x.size(0)):
xxs, yys, thetas = pos_deltas_origins[i, :,0], pos_deltas_origins[i, :,1], pos_deltas_origins[i, :,2]
origin_x, origin_y = pos_deltas_origins[i, :,6], pos_deltas_origins[i, :,7]
ego_states = self.ego_map.ego_mapper(x[i], ego_depths[i], ego_states * masks[i].unsqueeze(-1).unsqueeze(-1),
xxs, yys, thetas, masks[i], origin_x, origin_y)
read = self.ego_map.rotate_for_read(ego_states, xxs, yys, thetas, origin_x, origin_y)
read_outputs.append(read)
x = ego_rots = torch.cat(read_outputs, 0)
if self.params.ego_skip_global: #do not include a global CNN read on egomap
if self.params.skip_cnn_relu: # Relu was not applied earlier so apply now
conv_out = F.relu(conv_out.view(-1, self.conv_out_size))
else:
conv_out = conv_out.view(-1, self.conv_out_size)
x = self.linear1(conv_out)
x = F.relu(x)
else:
x = ego_reads = self.ego_head(x).view(-1, self.ego_out_size)
if not self.params.ego_skip: #do not include skip connect
x = self.linear1(x)
x = F.relu(x)
if self.params.ego_skip and not self.params.merge_later:
if self.params.skip_cnn_relu: # Relu was not applied earlier so apply now
conv_out = F.relu(conv_out.view(-1, self.conv_out_size))
else:
conv_out = conv_out.view(-1, self.conv_out_size)
x = torch.cat([x, conv_out], dim=1)
x = self.linear1(x)
x = F.relu(x)
if self.params.ego_skip and self.params.merge_later:
if self.params.skip_cnn_relu: # Relu was not applied earlier so apply now
conv_out = F.relu(conv_out.view(-1, self.conv_out_size))
else:
conv_out = conv_out.view(-1, self.conv_out_size)
y = self.linear1(conv_out)
y = F.relu(y)
x = torch.cat([x, y], dim=1)
if hasattr(self, 'gru'):
if inputs.size(0) == states.size(0):
if self.params.learn_init_state:
x = states = self.gru(x, states * masks + (1-masks)*self.init_state.clone().repeat(states.size(0), 1))
else:
x = states = self.gru(x, states * masks)
else:
x = x.view(-1, states.size(0), x.size(1))
outputs = []
for i in range(x.size(0)):
if self.params.learn_init_state:
hx = states = self.gru(x[i], states * masks[i] + (1-masks[i])*self.init_state.clone().repeat(states.size(0), 1))
else:
hx = states = self.gru(x[i], states * masks[i])
outputs.append(hx)
x = torch.cat(outputs, 0)
# note as gru and lstm gave comparable results, just gru is used
if self.params.ego_query:
if self.params.ego_skip_global:
query_vectors = self.query_head(x)
else:
query_vectors = self.query_head(torch.cat([x, ego_reads], dim=1))
if self.params.query_relu:
query_vectors = F.relu(query_vectors)
context_vectors = self.ego_map.query(query_vectors, ego_rots)
# print('context norms')
# print(context_vectors.norm(), x.norm())
x = torch.cat([x, context_vectors], dim=1)
result = {'values': self.critic_linear(x),
'x': x,
'states': states,
'ego_states': ego_states,
'curiousity': curiousity}
return result
def calculate_curiousity(self, inputs, prev_obs, pos_deltas_origins, actions):
with torch.no_grad():
self.forward_model.eval()
actions = actions.view(-1)
# calulate
# 1. VAE encoder + decode of current obs
o2_hat, mus1, logvars1 = self.forward_model.vae(inputs/128.0)
# 2. forward prediction of previous obs with actions and deltas
o1_hat, mus0, logvars0 = self.forward_model.vae(prev_obs/128.0)
deltas = pos_deltas_origins[:, 3:5]
action_emb = self.forward_model.action_embedding(actions, deltas)
# TODO: test logvar conversion
p_in = torch.cat([action_emb, mus0, logvars0], 1)
p_out = self.forward_model.p_mlp(p_in)
mus_t1_hat, logvars_t1_hat = p_out[:,:self.forward_model.args.hidden2], p_out[:,self.forward_model.args.hidden2:]
z = self.forward_model.vae.reparametrize(mus_t1_hat, logvars_t1_hat)
o2t1_hat = self.forward_model.vae.decode(z)
# TODO: compare with abs error
error = ((o2_hat.detach() - o2t1_hat.detach())**2).mean(dim=1, keepdim=True)
error = F.interpolate(error, size=(8,14))
if not self.params.new_padding:
error = error[:,:,2:-2,2:-2]
return error * (1.0/100.0) # normlizing to range of around 0-5
if __name__ == '__main__':
from doom_a2c.arguments import parse_game_args
params = parse_game_args()
params.num_actions = 5
# ego_model = EgoMap0_Policy(3, (3, 64, 112), params)
# neural_map_model = NeuralMapPolicy(3, (3, 64, 112), params)
# ego_states = torch.zeros(2, params.ego_num_chans,
# params.ego_half_size*2 -1,
# params.ego_half_size*2 -1)
# query_vector = torch.randn(2,params.ego_num_chans)
# result, scores = neural_map_model.query(query_vector, ego_states)
# result
# scores
model = CNNPolicy(3, (3, 64, 112), params)
example_input = torch.randn(1,3,64,112)
out = model.conv_head[:4](example_input)
print(out.size())
|
[
"torch.nn.LSTMCell",
"torch.randn",
"distributions.Categorical",
"torch.cat",
"torch.cos",
"torch.nn.init.calculate_gain",
"torch.no_grad",
"doom_a2c.arguments.parse_game_args",
"sys.path.append",
"torch.load",
"torch.diag",
"torch.Tensor",
"torch.nn.functional.relu",
"torch.nn.Linear",
"torch.nn.GRUCell",
"torch.qr",
"torch.nn.Tanh",
"torch.nn.Conv2d",
"torch.nn.ReLU",
"torch.nn.Sequential",
"ego_map.EgoMap",
"torch.nn.functional.interpolate",
"torch.sin"
] |
[((201, 222), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (216, 222), False, 'import sys\n'), ((966, 985), 'torch.qr', 'torch.qr', (['flattened'], {}), '(flattened)\n', (974, 985), False, 'import torch\n'), ((1070, 1086), 'torch.diag', 'torch.diag', (['r', '(0)'], {}), '(r, 0)\n', (1080, 1086), False, 'import torch\n'), ((31343, 31360), 'doom_a2c.arguments.parse_game_args', 'parse_game_args', ([], {}), '()\n', (31358, 31360), False, 'from doom_a2c.arguments import parse_game_args\n'), ((31956, 31982), 'torch.randn', 'torch.randn', (['(1)', '(3)', '(64)', '(112)'], {}), '(1, 3, 64, 112)\n', (31967, 31982), False, 'import torch\n'), ((8789, 8819), 'torch.nn.init.calculate_gain', 'nn.init.calculate_gain', (['"""relu"""'], {}), "('relu')\n", (8811, 8819), True, 'import torch.nn as nn\n'), ((16744, 16758), 'ego_map.EgoMap', 'EgoMap', (['params'], {}), '(params)\n', (16750, 16758), False, 'from ego_map import EgoMap\n'), ((22393, 22423), 'torch.nn.init.calculate_gain', 'nn.init.calculate_gain', (['"""relu"""'], {}), "('relu')\n", (22415, 22423), True, 'import torch.nn as nn\n'), ((836, 860), 'torch.Tensor', 'torch.Tensor', (['rows', 'cols'], {}), '(rows, cols)\n', (848, 860), False, 'import torch\n'), ((7164, 7198), 'torch.nn.Linear', 'nn.Linear', (['(self.hidden_size * 2)', '(1)'], {}), '(self.hidden_size * 2, 1)\n', (7173, 7198), True, 'import torch.nn as nn\n'), ((7221, 7274), 'distributions.Categorical', 'Categorical', (['(self.hidden_size * 2)', 'params.num_actions'], {}), '(self.hidden_size * 2, params.num_actions)\n', (7232, 7274), False, 'from distributions import Categorical\n'), ((7320, 7350), 'torch.nn.Linear', 'nn.Linear', (['self.hidden_size', '(1)'], {}), '(self.hidden_size, 1)\n', (7329, 7350), True, 'import torch.nn as nn\n'), ((7375, 7424), 'distributions.Categorical', 'Categorical', (['self.hidden_size', 'params.num_actions'], {}), '(self.hidden_size, params.num_actions)\n', (7386, 7424), False, 'from distributions import Categorical\n'), ((7496, 7526), 'torch.nn.Linear', 'nn.Linear', (['self.hidden_size', '(1)'], {}), '(self.hidden_size, 1)\n', (7505, 7526), True, 'import torch.nn as nn\n'), ((7682, 7758), 'torch.load', 'torch.load', (['params.pretrained_vae'], {'map_location': '(lambda storage, loc: storage)'}), '(params.pretrained_vae, map_location=lambda storage, loc: storage)\n', (7692, 7758), False, 'import torch\n'), ((14100, 14128), 'torch.cat', 'torch.cat', (['[rnn_input, x]', '(1)'], {}), '([rnn_input, x], 1)\n', (14109, 14128), False, 'import torch\n'), ((18553, 18596), 'torch.nn.Sequential', 'nn.Sequential', (['self.ego_head', 'ego_extension'], {}), '(self.ego_head, ego_extension)\n', (18566, 18596), True, 'import torch.nn as nn\n'), ((18742, 18773), 'torch.randn', 'torch.randn', (['((1,) + input_shape)'], {}), '((1,) + input_shape)\n', (18753, 18773), False, 'import torch\n'), ((19135, 19194), 'torch.nn.Linear', 'nn.Linear', (['(ego_out_size + conv_out_size)', 'params.hidden_size'], {}), '(ego_out_size + conv_out_size, params.hidden_size)\n', (19144, 19194), True, 'import torch.nn as nn\n'), ((20908, 20940), 'torch.nn.Linear', 'nn.Linear', (['params.hidden_size', '(1)'], {}), '(params.hidden_size, 1)\n', (20917, 20940), True, 'import torch.nn as nn\n'), ((20965, 21016), 'distributions.Categorical', 'Categorical', (['params.hidden_size', 'params.num_actions'], {}), '(params.hidden_size, params.num_actions)\n', (20976, 21016), False, 'from distributions import Categorical\n'), ((21543, 21607), 'torch.load', 'torch.load', (['save_name'], {'map_location': '(lambda storage, loc: storage)'}), '(save_name, map_location=lambda storage, loc: storage)\n', (21553, 21607), False, 'import torch\n'), ((26361, 26387), 'torch.cat', 'torch.cat', (['read_outputs', '(0)'], {}), '(read_outputs, 0)\n', (26370, 26387), False, 'import torch\n'), ((26789, 26798), 'torch.nn.functional.relu', 'F.relu', (['x'], {}), '(x)\n', (26795, 26798), True, 'import torch.nn.functional as F\n'), ((29524, 29562), 'torch.cat', 'torch.cat', (['[x, context_vectors]'], {'dim': '(1)'}), '([x, context_vectors], dim=1)\n', (29533, 29562), False, 'import torch\n'), ((29898, 29913), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (29911, 29913), False, 'import torch\n'), ((30495, 30537), 'torch.cat', 'torch.cat', (['[action_emb, mus0, logvars0]', '(1)'], {}), '([action_emb, mus0, logvars0], 1)\n', (30504, 30537), False, 'import torch\n'), ((31023, 31057), 'torch.nn.functional.interpolate', 'F.interpolate', (['error'], {'size': '(8, 14)'}), '(error, size=(8, 14))\n', (31036, 31057), True, 'import torch.nn.functional as F\n'), ((4451, 4504), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_inputs', 'params.conv1_size', '(8)'], {'stride': '(4)'}), '(num_inputs, params.conv1_size, 8, stride=4)\n', (4460, 4504), True, 'import torch.nn as nn\n'), ((4549, 4562), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (4556, 4562), True, 'import torch.nn as nn\n'), ((4607, 4667), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.conv1_size', 'params.conv2_size', '(4)'], {'stride': '(2)'}), '(params.conv1_size, params.conv2_size, 4, stride=2)\n', (4616, 4667), True, 'import torch.nn as nn\n'), ((4712, 4725), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (4719, 4725), True, 'import torch.nn as nn\n'), ((4770, 4830), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.conv2_size', 'params.conv3_size', '(3)'], {'stride': '(1)'}), '(params.conv2_size, params.conv3_size, 3, stride=1)\n', (4779, 4830), True, 'import torch.nn as nn\n'), ((4875, 4888), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (4882, 4888), True, 'import torch.nn as nn\n'), ((5034, 5088), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.conv3_size', 'params.num_actions', '(1)', '(1)'], {}), '(params.conv3_size, params.num_actions, 1, 1)\n', (5043, 5088), True, 'import torch.nn as nn\n'), ((5216, 5253), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.conv3_size', '(8)', '(1)', '(1)'], {}), '(params.conv3_size, 8, 1, 1)\n', (5225, 5253), True, 'import torch.nn as nn\n'), ((5297, 5328), 'torch.randn', 'torch.randn', (['((1,) + input_shape)'], {}), '((1,) + input_shape)\n', (5308, 5328), False, 'import torch\n'), ((5836, 5884), 'torch.nn.GRUCell', 'nn.GRUCell', (['self.conv_out_size', 'self.hidden_size'], {}), '(self.conv_out_size, self.hidden_size)\n', (5846, 5884), True, 'import torch.nn as nn\n'), ((6144, 6193), 'torch.nn.LSTMCell', 'nn.LSTMCell', (['self.conv_out_size', 'self.hidden_size'], {}), '(self.conv_out_size, self.hidden_size)\n', (6155, 6193), True, 'import torch.nn as nn\n'), ((6281, 6328), 'torch.nn.Linear', 'nn.Linear', (['self.conv_out_size', 'self.hidden_size'], {}), '(self.conv_out_size, self.hidden_size)\n', (6290, 6328), True, 'import torch.nn as nn\n'), ((7046, 7093), 'torch.nn.LSTMCell', 'nn.LSTMCell', (['self.hidden_size', 'self.hidden_size'], {}), '(self.hidden_size, self.hidden_size)\n', (7057, 7093), True, 'import torch.nn as nn\n'), ((11414, 11429), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (11427, 11429), False, 'import torch\n'), ((12014, 12023), 'torch.nn.functional.relu', 'F.relu', (['x'], {}), '(x)\n', (12020, 12023), True, 'import torch.nn.functional as F\n'), ((13860, 13881), 'torch.cat', 'torch.cat', (['outputs', '(0)'], {}), '(outputs, 0)\n', (13869, 13881), False, 'import torch\n'), ((16940, 17026), 'torch.nn.Conv2d', 'nn.Conv2d', (['(params.ego_num_chans + ac)', 'params.ego_num_chans', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(params.ego_num_chans + ac, params.ego_num_chans, 3, stride=1,\n padding=1)\n', (16949, 17026), True, 'import torch.nn as nn\n'), ((17067, 17080), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (17074, 17080), True, 'import torch.nn as nn\n'), ((17125, 17202), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.ego_num_chans', 'params.ego_num_chans', '(4)'], {'stride': '(2)', 'padding': '(0)'}), '(params.ego_num_chans, params.ego_num_chans, 4, stride=2, padding=0)\n', (17134, 17202), True, 'import torch.nn as nn\n'), ((17247, 17260), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (17254, 17260), True, 'import torch.nn as nn\n'), ((17305, 17382), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.ego_num_chans', 'params.ego_num_chans', '(4)'], {'stride': '(2)', 'padding': '(0)'}), '(params.ego_num_chans, params.ego_num_chans, 4, stride=2, padding=0)\n', (17314, 17382), True, 'import torch.nn as nn\n'), ((17427, 17436), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (17434, 17436), True, 'import torch.nn as nn\n'), ((17485, 17588), 'torch.randn', 'torch.randn', (['((1,) + (params.ego_num_chans + ac, 2 * params.ego_half_size, 2 * params.\n ego_half_size))'], {}), '((1,) + (params.ego_num_chans + ac, 2 * params.ego_half_size, 2 *\n params.ego_half_size))\n', (17496, 17588), False, 'import torch\n'), ((19394, 19438), 'torch.nn.Linear', 'nn.Linear', (['conv_out_size', 'params.hidden_size'], {}), '(conv_out_size, params.hidden_size)\n', (19403, 19438), True, 'import torch.nn as nn\n'), ((19480, 19523), 'torch.nn.Linear', 'nn.Linear', (['ego_out_size', 'params.hidden_size'], {}), '(ego_out_size, params.hidden_size)\n', (19489, 19523), True, 'import torch.nn as nn\n'), ((19732, 19797), 'torch.nn.GRUCell', 'nn.GRUCell', (['(params.hidden_size + ego_out_size)', 'params.hidden_size'], {}), '(params.hidden_size + ego_out_size, params.hidden_size)\n', (19742, 19797), True, 'import torch.nn as nn\n'), ((19843, 19893), 'torch.nn.GRUCell', 'nn.GRUCell', (['params.hidden_size', 'params.hidden_size'], {}), '(params.hidden_size, params.hidden_size)\n', (19853, 19893), True, 'import torch.nn as nn\n'), ((20209, 20254), 'torch.nn.Linear', 'nn.Linear', (['params.hidden_size', 'query_out_size'], {}), '(params.hidden_size, query_out_size)\n', (20218, 20254), True, 'import torch.nn as nn\n'), ((20307, 20367), 'torch.nn.Linear', 'nn.Linear', (['(ego_out_size + params.hidden_size)', 'query_out_size'], {}), '(ego_out_size + params.hidden_size, query_out_size)\n', (20316, 20367), True, 'import torch.nn as nn\n'), ((20460, 20524), 'torch.nn.Linear', 'nn.Linear', (['(params.hidden_size + params.ego_num_chans + 2 + ac)', '(1)'], {}), '(params.hidden_size + params.ego_num_chans + 2 + ac, 1)\n', (20469, 20524), True, 'import torch.nn as nn\n'), ((20553, 20641), 'distributions.Categorical', 'Categorical', (['(params.hidden_size + params.ego_num_chans + 2 + ac)', 'params.num_actions'], {}), '(params.hidden_size + params.ego_num_chans + 2 + ac, params.\n num_actions)\n', (20564, 20641), False, 'from distributions import Categorical\n'), ((20692, 20752), 'torch.nn.Linear', 'nn.Linear', (['(params.hidden_size + params.ego_num_chans + ac)', '(1)'], {}), '(params.hidden_size + params.ego_num_chans + ac, 1)\n', (20701, 20752), True, 'import torch.nn as nn\n'), ((20781, 20860), 'distributions.Categorical', 'Categorical', (['(params.hidden_size + params.ego_num_chans + ac)', 'params.num_actions'], {}), '(params.hidden_size + params.ego_num_chans + ac, params.num_actions)\n', (20792, 20860), False, 'from distributions import Categorical\n'), ((27036, 27045), 'torch.nn.functional.relu', 'F.relu', (['x'], {}), '(x)\n', (27042, 27045), True, 'import torch.nn.functional as F\n'), ((27422, 27453), 'torch.cat', 'torch.cat', (['[x, conv_out]'], {'dim': '(1)'}), '([x, conv_out], dim=1)\n', (27431, 27453), False, 'import torch\n'), ((27510, 27519), 'torch.nn.functional.relu', 'F.relu', (['x'], {}), '(x)\n', (27516, 27519), True, 'import torch.nn.functional as F\n'), ((27942, 27951), 'torch.nn.functional.relu', 'F.relu', (['y'], {}), '(y)\n', (27948, 27951), True, 'import torch.nn.functional as F\n'), ((27972, 27996), 'torch.cat', 'torch.cat', (['[x, y]'], {'dim': '(1)'}), '([x, y], dim=1)\n', (27981, 27996), False, 'import torch\n'), ((28872, 28893), 'torch.cat', 'torch.cat', (['outputs', '(0)'], {}), '(outputs, 0)\n', (28881, 28893), False, 'import torch\n'), ((29293, 29314), 'torch.nn.functional.relu', 'F.relu', (['query_vectors'], {}), '(query_vectors)\n', (29299, 29314), True, 'import torch.nn.functional as F\n'), ((6036, 6082), 'torch.nn.GRUCell', 'nn.GRUCell', (['self.hidden_size', 'self.hidden_size'], {}), '(self.hidden_size, self.hidden_size)\n', (6046, 6082), True, 'import torch.nn as nn\n'), ((6595, 6645), 'torch.nn.GRUCell', 'nn.GRUCell', (['(self.hidden_size + 4)', 'self.hidden_size'], {}), '(self.hidden_size + 4, self.hidden_size)\n', (6605, 6645), True, 'import torch.nn as nn\n'), ((6741, 6787), 'torch.nn.GRUCell', 'nn.GRUCell', (['self.hidden_size', 'self.hidden_size'], {}), '(self.hidden_size, self.hidden_size)\n', (6751, 6787), True, 'import torch.nn as nn\n'), ((6939, 6985), 'torch.nn.GRUCell', 'nn.GRUCell', (['self.hidden_size', 'self.hidden_size'], {}), '(self.hidden_size, self.hidden_size)\n', (6949, 6985), True, 'import torch.nn as nn\n'), ((14738, 14802), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_inputs', 'params.conv1_size', '(8)'], {'stride': '(4)', 'padding': '(2)'}), '(num_inputs, params.conv1_size, 8, stride=4, padding=2)\n', (14747, 14802), True, 'import torch.nn as nn\n'), ((14848, 14861), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (14855, 14861), True, 'import torch.nn as nn\n'), ((14907, 14978), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.conv1_size', 'params.conv2_size', '(4)'], {'stride': '(2)', 'padding': '(1)'}), '(params.conv1_size, params.conv2_size, 4, stride=2, padding=1)\n', (14916, 14978), True, 'import torch.nn as nn\n'), ((15024, 15037), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (15031, 15037), True, 'import torch.nn as nn\n'), ((15083, 15157), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.conv2_size', 'params.ego_num_chans', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(params.conv2_size, params.ego_num_chans, 3, stride=1, padding=1)\n', (15092, 15157), True, 'import torch.nn as nn\n'), ((15224, 15288), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_inputs', 'params.conv1_size', '(8)'], {'stride': '(4)', 'padding': '(2)'}), '(num_inputs, params.conv1_size, 8, stride=4, padding=2)\n', (15233, 15288), True, 'import torch.nn as nn\n'), ((15334, 15347), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (15341, 15347), True, 'import torch.nn as nn\n'), ((15393, 15464), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.conv1_size', 'params.conv2_size', '(4)'], {'stride': '(2)', 'padding': '(1)'}), '(params.conv1_size, params.conv2_size, 4, stride=2, padding=1)\n', (15402, 15464), True, 'import torch.nn as nn\n'), ((15510, 15523), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (15517, 15523), True, 'import torch.nn as nn\n'), ((15569, 15643), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.conv2_size', 'params.ego_num_chans', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(params.conv2_size, params.ego_num_chans, 3, stride=1, padding=1)\n', (15578, 15643), True, 'import torch.nn as nn\n'), ((15689, 15698), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (15696, 15698), True, 'import torch.nn as nn\n'), ((15811, 15864), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_inputs', 'params.conv1_size', '(8)'], {'stride': '(4)'}), '(num_inputs, params.conv1_size, 8, stride=4)\n', (15820, 15864), True, 'import torch.nn as nn\n'), ((15910, 15923), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (15917, 15923), True, 'import torch.nn as nn\n'), ((15969, 16029), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.conv1_size', 'params.conv2_size', '(4)'], {'stride': '(2)'}), '(params.conv1_size, params.conv2_size, 4, stride=2)\n', (15978, 16029), True, 'import torch.nn as nn\n'), ((16075, 16088), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (16082, 16088), True, 'import torch.nn as nn\n'), ((16134, 16197), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.conv2_size', 'params.ego_num_chans', '(3)'], {'stride': '(1)'}), '(params.conv2_size, params.ego_num_chans, 3, stride=1)\n', (16143, 16197), True, 'import torch.nn as nn\n'), ((16264, 16317), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_inputs', 'params.conv1_size', '(8)'], {'stride': '(4)'}), '(num_inputs, params.conv1_size, 8, stride=4)\n', (16273, 16317), True, 'import torch.nn as nn\n'), ((16363, 16376), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (16370, 16376), True, 'import torch.nn as nn\n'), ((16422, 16482), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.conv1_size', 'params.conv2_size', '(4)'], {'stride': '(2)'}), '(params.conv1_size, params.conv2_size, 4, stride=2)\n', (16431, 16482), True, 'import torch.nn as nn\n'), ((16528, 16541), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (16535, 16541), True, 'import torch.nn as nn\n'), ((16587, 16650), 'torch.nn.Conv2d', 'nn.Conv2d', (['params.conv2_size', 'params.ego_num_chans', '(3)'], {'stride': '(1)'}), '(params.conv2_size, params.ego_num_chans, 3, stride=1)\n', (16596, 16650), True, 'import torch.nn as nn\n'), ((16696, 16705), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (16703, 16705), True, 'import torch.nn as nn\n'), ((18002, 18030), 'torch.nn.Linear', 'nn.Linear', (['ego_out_size', '(256)'], {}), '(ego_out_size, 256)\n', (18011, 18030), True, 'import torch.nn as nn\n'), ((18066, 18079), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (18073, 18079), True, 'import torch.nn as nn\n'), ((18115, 18153), 'torch.nn.Linear', 'nn.Linear', (['(256)', 'params.ego_hidden_size'], {}), '(256, params.ego_hidden_size)\n', (18124, 18153), True, 'import torch.nn as nn\n'), ((18189, 18198), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (18196, 18198), True, 'import torch.nn as nn\n'), ((18310, 18338), 'torch.nn.Linear', 'nn.Linear', (['ego_out_size', '(256)'], {}), '(ego_out_size, 256)\n', (18319, 18338), True, 'import torch.nn as nn\n'), ((18374, 18387), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (18381, 18387), True, 'import torch.nn as nn\n'), ((18423, 18461), 'torch.nn.Linear', 'nn.Linear', (['(256)', 'params.ego_hidden_size'], {}), '(256, params.ego_hidden_size)\n', (18432, 18461), True, 'import torch.nn as nn\n'), ((18497, 18510), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (18504, 18510), True, 'import torch.nn as nn\n'), ((29171, 29203), 'torch.cat', 'torch.cat', (['[x, ego_reads]'], {'dim': '(1)'}), '([x, ego_reads], dim=1)\n', (29180, 29203), False, 'import torch\n'), ((9470, 9502), 'torch.randn', 'torch.randn', (['(1)', 'self.hidden_size'], {}), '(1, self.hidden_size)\n', (9481, 9502), False, 'import torch\n'), ((10133, 10165), 'torch.randn', 'torch.randn', (['(1)', 'self.hidden_size'], {}), '(1, self.hidden_size)\n', (10144, 10165), False, 'import torch\n'), ((23211, 23250), 'torch.randn', 'torch.randn', (['(1)', 'self.params.hidden_size'], {}), '(1, self.params.hidden_size)\n', (23222, 23250), False, 'import torch\n'), ((12688, 12706), 'torch.sin', 'torch.sin', (['dthetas'], {}), '(dthetas)\n', (12697, 12706), False, 'import torch\n'), ((12708, 12726), 'torch.cos', 'torch.cos', (['dthetas'], {}), '(dthetas)\n', (12717, 12726), False, 'import torch\n'), ((13651, 13669), 'torch.sin', 'torch.sin', (['dthetas'], {}), '(dthetas)\n', (13660, 13669), False, 'import torch\n'), ((13671, 13689), 'torch.cos', 'torch.cos', (['dthetas'], {}), '(dthetas)\n', (13680, 13689), False, 'import torch\n')]
|
#!/usr/bin/env python
from collections import namedtuple
from typing import (
Dict,
List,
Optional,
)
from hummingbot.core.data_type.order_book_row import OrderBookRow
from hummingbot.core.data_type.order_book_message import (
OrderBookMessage,
OrderBookMessageType,
)
NdaxOrderBookEntry = namedtuple("NdaxOrderBookEntry", "mdUpdateId accountId actionDateTime actionType lastTradePrice orderId price productPairCode quantity side")
NdaxTradeEntry = namedtuple("NdaxTradeEntry", "tradeId productPairCode quantity price order1 order2 tradeTime direction takerSide blockTrade orderClientId")
class NdaxOrderBookMessage(OrderBookMessage):
_DELETE_ACTION_TYPE = 2
_BUY_SIDE = 0
_SELL_SIDE = 1
def __new__(
cls,
message_type: OrderBookMessageType,
content: Dict[str, any],
timestamp: Optional[float] = None,
*args,
**kwargs,
):
if timestamp is None:
if message_type is OrderBookMessageType.SNAPSHOT:
raise ValueError("timestamp must not be None when initializing snapshot messages.")
timestamp = content["timestamp"]
return super(NdaxOrderBookMessage, cls).__new__(
cls, message_type, content, timestamp=timestamp, *args, **kwargs
)
@property
def update_id(self) -> int:
if self.type in [OrderBookMessageType.DIFF, OrderBookMessageType.SNAPSHOT]:
entry: NdaxOrderBookEntry = self.content["data"][0]
return int(entry.actionDateTime)
elif self.type == OrderBookMessageType.TRADE:
entry: NdaxTradeEntry = self.content["data"][0]
return int(entry.tradeTime)
@property
def trade_id(self) -> int:
entry: NdaxTradeEntry = self.content["data"][0]
return entry.tradeId
@property
def trading_pair(self) -> str:
return self.content["trading_pair"]
@property
def asks(self) -> List[OrderBookRow]:
entries: List[NdaxOrderBookEntry] = self.content["data"]
return [self._order_book_row_for_entry(entry) for entry in entries if entry.side == self._SELL_SIDE]
@property
def bids(self) -> List[OrderBookRow]:
entries: List[NdaxOrderBookEntry] = self.content["data"]
return [self._order_book_row_for_entry(entry) for entry in entries if entry.side == self._BUY_SIDE]
def _order_book_row_for_entry(self, entry: NdaxOrderBookEntry) -> OrderBookRow:
price = float(entry.price)
amount = float(entry.quantity) if entry.actionType != self._DELETE_ACTION_TYPE else 0.0
update_id = entry.mdUpdateId
return OrderBookRow(price, amount, update_id)
def __eq__(self, other) -> bool:
return type(self) == type(other) and self.type == other.type and self.timestamp == other.timestamp
def __lt__(self, other) -> bool:
# If timestamp is the same, the ordering is snapshot < diff < trade
return (self.timestamp < other.timestamp or (self.timestamp == other.timestamp and self.type.value < other.type.value))
def __hash__(self) -> int:
return hash((self.type, self.timestamp))
|
[
"collections.namedtuple",
"hummingbot.core.data_type.order_book_row.OrderBookRow"
] |
[((313, 463), 'collections.namedtuple', 'namedtuple', (['"""NdaxOrderBookEntry"""', '"""mdUpdateId accountId actionDateTime actionType lastTradePrice orderId price productPairCode quantity side"""'], {}), "('NdaxOrderBookEntry',\n 'mdUpdateId accountId actionDateTime actionType lastTradePrice orderId price productPairCode quantity side'\n )\n", (323, 463), False, 'from collections import namedtuple\n'), ((472, 620), 'collections.namedtuple', 'namedtuple', (['"""NdaxTradeEntry"""', '"""tradeId productPairCode quantity price order1 order2 tradeTime direction takerSide blockTrade orderClientId"""'], {}), "('NdaxTradeEntry',\n 'tradeId productPairCode quantity price order1 order2 tradeTime direction takerSide blockTrade orderClientId'\n )\n", (482, 620), False, 'from collections import namedtuple\n'), ((2647, 2685), 'hummingbot.core.data_type.order_book_row.OrderBookRow', 'OrderBookRow', (['price', 'amount', 'update_id'], {}), '(price, amount, update_id)\n', (2659, 2685), False, 'from hummingbot.core.data_type.order_book_row import OrderBookRow\n')]
|
from bing_image_downloader import downloader
downloader.download("people from drone view", limit=1000, output_dir='images', adult_filter_off=False)
# First one is the query for search
# Second one is the limit of images to download
# Third one is the output directory
# Fourth one is the adult filter off (which is closed in this case)
# Additional notes: First pictures can be irrelevant but its will be okay let the program keep running.
|
[
"bing_image_downloader.downloader.download"
] |
[((46, 153), 'bing_image_downloader.downloader.download', 'downloader.download', (['"""people from drone view"""'], {'limit': '(1000)', 'output_dir': '"""images"""', 'adult_filter_off': '(False)'}), "('people from drone view', limit=1000, output_dir=\n 'images', adult_filter_off=False)\n", (65, 153), False, 'from bing_image_downloader import downloader\n')]
|
# Copyright (c) 2019-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import pytest
from codegen_sources.preprocessing.lang_processors.java_processor import JavaProcessor
from pathlib import Path
processor = JavaProcessor(root_folder=Path(__file__).parents[4].joinpath("tree-sitter"))
TESTS = []
TESTS.append(
(
r"""
public class HelloWorld
{
public void main(String[] args) {
System.out.println("Hello \n World!");
}
}""",
[
"public",
"class",
"HelloWorld",
"{",
"public",
"void",
"main",
"(",
"String",
"[",
"]",
"args",
")",
"{",
"System",
".",
"out",
".",
"println",
"(",
'" Hello ▁ \\n ▁ World ! "',
")",
";",
"}",
"}",
],
)
)
TESTS.append(
(
r"""
overload((byte)1);
overload(1L);
overload(1.0f);""",
[
"overload",
"(",
"(",
"byte",
")",
"1",
")",
";",
"overload",
"(",
"1L",
")",
";",
"overload",
"(",
"1.0f",
")",
";",
],
)
)
TESTS.append(
(
r"""Runnable r = ()-> System.out.print("Run method");""",
[
"Runnable",
"r",
"=",
"(",
")",
"->",
"System",
".",
"out",
".",
"print",
"(",
'" Run ▁ method "',
")",
";",
],
)
)
TESTS.append(
(
r"""String s = "Hi I am\nMarie";""",
["String", "s", "=", '" Hi ▁ I ▁ am \\n Marie "', ";"],
)
)
TESTS2 = []
TESTS2.append(
r"""
import java.util.concurrent.TimeUnit;
public class Mensuration{ //mensuration of a child
private int height;
private int weight;
private String child_name;
public Mensuration(int height, int weight, String name):{
this.height = height;
this.weight = weight;
this.child_name = name;
}
public int get_height(){
return height;
}
public int get_weight(){
return weight;
}
public String get_name(){
String s = "Name:\n" + child_name;
return s;
}
}"""
)
TESTS2.append(
r"""
private enum Answer {
YES {
@Override public String toString() {
return "yes";
}
},
NO,
MAYBE
}"""
)
TESTS2.append(
r"""
return new MyClass() {
@Override public void method() {
if (condition()) {
try {
something();
} catch (ProblemException e) {
recover();
}
} else if (otherCondition()) {
somethingElse();
} else {
lastThing();
}
}
};"""
)
TESTS2.append(
r"""
public boolean equals(Object o_) {
if ( o_ == null ) {
return false;
}
if ( o_.getClass() != this.getClass() ) {
return false;
}
Pair<?, ?> o = (Pair<?, ?>) o_;
return x.equals(o.x) && y.equals(o.y);
}
}
"""
)
TESTS3 = []
TESTS3.append(
(
r"""/*
This is the docstring !!
*/
/* ---------- */
public class HelloWorld
{
public void main(String[] args) {
System.out.println("Hello \n World!");
}
}""",
[
"/* STRNEWLINE This ▁ is ▁ the ▁ docstring ▁ ! ! STRNEWLINE */",
"public",
"class",
"HelloWorld",
"{",
"public",
"void",
"main",
"(",
"String",
"[",
"]",
"args",
")",
"{",
"System",
".",
"out",
".",
"println",
"(",
'" Hello ▁ \\n ▁ World ! "',
")",
";",
"}",
"}",
],
)
)
TESTS3.append(
(
r"""
overload((byte)1);
// this is my comfff
// ----- ***
overload(1L); // this is my comfff
overload(1.0f);""",
[
"overload",
"(",
"(",
"byte",
")",
"1",
")",
";",
"// ▁ this ▁ is ▁ my ▁ comfff ENDCOM",
"overload",
"(",
"1L",
")",
";",
"// ▁ this ▁ is ▁ my ▁ comfff ENDCOM",
"overload",
"(",
"1.0f",
")",
";",
],
)
)
TESTS_TOKENIZE_DETOKENIZE_STRING = [
r"""public int read ( ) throws IOException {
int current = super . read ( ) ;
if ( current == '\r' || ( current == '\n' && lastChar != '\r' ) ) {
lineCounter ++ ;
}
lastChar = current ;
return lastChar ;
}""",
r"""public int curly_brackets ( ) throws IOException {
System . out . println ( "This } is the output" ) ;
System . out . println ( "This {} is the output" ) ;
System . out . println ( '}' ) ;
}""",
r"""public int commas ( ) throws IOException {
System . out . println ( "This ; is the output" ) ;
System . out . println ( "This , is the output" ) ;
System . out . println ( ';' ) ;
System . out . println ( ',' ) ;
}""",
r"""public void inException ( ) {
throw new IllegalArgumentException ( "Type \'" + typeToEvaluate + "\' is not a Class, " + "ParameterizedType, GenericArrayType or TypeVariable. Can't extract type." ) ;
}
""",
]
TESTS_DONT_PROCESS_STRINGS = [
(
r"""
public class HelloWorld
{
// This is a comment
public void main(String[] args) {
System.out.println("Hello \n World!");
}
}""",
[
"public",
"class",
"HelloWorld",
"{",
"// This is a comment ENDCOM",
"public",
"void",
"main",
"(",
"String",
"[",
"]",
"args",
")",
"{",
"System",
".",
"out",
".",
"println",
"(",
'"Hello \\n World!"',
")",
";",
"}",
"}",
],
),
(
r"""
public class HelloEarth
{
/* This is a
multiline
comment */
public void main(String[] args) {
System.out.println("Hello \nEarth!");
}
}""",
[
"public",
"class",
"HelloEarth",
"{",
"/* This is a\\n multiline\\n comment */",
"public",
"void",
"main",
"(",
"String",
"[",
"]",
"args",
")",
"{",
"System",
".",
"out",
".",
"println",
"(",
'"Hello \\nEarth!"',
")",
";",
"}",
"}",
],
),
]
TESTS_BACK_R_CHAR = [
(
"""
public class HelloWorld
{\r
public void main(String[] args) {
System.out.println("Hello \rWorld!");
}
}""",
[
"public",
"class",
"HelloWorld",
"{",
"public",
"void",
"main",
"(",
"String",
"[",
"]",
"args",
")",
"{",
"System",
".",
"out",
".",
"println",
"(",
'"Hello World!"',
")",
";",
"}",
"}",
],
)
]
TESTS_IMPORTS = [
(
(
r"""
import java.lang.*;
import javafx.util.Pair;
public class HelloWorld
{
public void main(String[] args) {
System.out.println("Hello \n World!");
}
}""",
[
"import",
"java",
".",
"lang",
".",
"*",
";",
"import",
"javafx",
".",
"util",
".",
"Pair",
";",
"public",
"class",
"HelloWorld",
"{",
"public",
"void",
"main",
"(",
"String",
"[",
"]",
"args",
")",
"{",
"System",
".",
"out",
".",
"println",
"(",
'" Hello ▁ \\n ▁ World ! "',
")",
";",
"}",
"}",
],
)
)
]
def test_java_tokenizer_discarding_comments():
for i, (x, y) in enumerate(TESTS):
y_ = processor.tokenize_code(x)
if y_ != y:
line_diff = [
j for j, (line, line_) in enumerate(zip(y, y_)) if line != line_
]
line_diff = line_diff[-1] if len(line_diff) > 0 else -1
raise Exception(
f"Difference at {line_diff}\nExpected:\n==========\n{y}\nbut found:\n==========\n{y_}"
)
def test_tokenize_imports():
for i, (x, y) in enumerate(TESTS_IMPORTS):
y_ = processor.tokenize_code(x)
if y_ != y:
line_diff = [
j for j, (line, line_) in enumerate(zip(y, y_)) if line != line_
]
line_diff = line_diff[-1] if len(line_diff) > 0 else -1
raise Exception(
f"Difference at {line_diff}\nExpected:\n==========\n{y}\nbut found:\n==========\n{y_}"
)
def test_java_detokenizer_discarding_comments():
for i, x in enumerate([x[0] for x in TESTS] + [x[0] for x in TESTS3] + TESTS2):
tokens = processor.tokenize_code(x)
x_ = processor.detokenize_code(tokens)
tokens_ = processor.tokenize_code(x_)
if tokens != tokens:
line_diff = [
j
for j, (line, line_) in enumerate(zip(tokens, tokens_))
if line != line_
]
raise Exception(
f"Difference at {line_diff}\n========== Original:\n{x}\n========== Tokenized {tokens} \n Detokenized:\n{x_} \n Retokenized {tokens_}"
)
def test_java_tokenizer_keeping_comments():
for i, (x, y) in enumerate(TESTS3):
y_ = processor.tokenize_code(x, keep_comments=True)
if y_ != y:
line_diff = [
j for j, (line, line_) in enumerate(zip(y, y_)) if line != line_
]
line_diff = line_diff[-1] if len(line_diff) > 0 else -1
raise Exception(
f"Difference at {line_diff}\nExpected:\n==========\n{y}\nbut found:\n==========\n{y_}"
)
def test_dont_process_strings():
for i, (x, y) in enumerate(TESTS_DONT_PROCESS_STRINGS):
y_ = processor.tokenize_code(x, keep_comments=True, process_strings=False)
print(y_)
if y_ != y:
line_diff = [
j for j, (line, line_) in enumerate(zip(y, y_)) if line != line_
]
line_diff = line_diff[-1] if len(line_diff) > 0 else -1
raise Exception(
f"Difference at {line_diff}\nExpected:\n==========\n{y}\nbut found:\n==========\n{y_}"
)
def test_backr_chars():
for i, (x, y) in enumerate(TESTS_BACK_R_CHAR):
y_ = processor.tokenize_code(x, keep_comments=True, process_strings=False)
print(y_)
if y_ != y:
line_diff = [
j for j, (line, line_) in enumerate(zip(y, y_)) if line != line_
]
line_diff = line_diff[-1] if len(line_diff) > 0 else -1
raise Exception(
f"Difference at {line_diff}\nExpected:\n==========\n{y}\nbut found:\n==========\n{y_}"
)
def test_java_detokenizer_keeping_comments():
for i, x in enumerate([x[0] for x in TESTS] + [x[0] for x in TESTS3] + TESTS2):
tokens = processor.tokenize_code(x, keep_comments=True)
x_ = processor.detokenize_code(tokens)
tokens_ = processor.tokenize_code(x_, keep_comments=True)
if tokens != tokens_:
line_diff = [
j
for j, (line, line_) in enumerate(zip(tokens, tokens_))
if line != line_
]
raise Exception(
f"Difference at {line_diff}\n========== Original:\n{x}\n========== Tokenized {tokens} \n Detokenized:\n{x_} \n Retokenized {tokens_}"
)
def test_tokenize_detokenize():
test_detokenize_invertible(TESTS_TOKENIZE_DETOKENIZE_STRING)
@pytest.mark.skip("Helper function")
def test_detokenize_invertible(test_examples):
for i, x in enumerate(test_examples):
x_ = processor.detokenize_code(processor.tokenize_code(x, keep_comments=True))
if x_.strip() != x.strip():
raise Exception(
f"Expected:\n==========\n{x.strip()}\nbut found:\n==========\n{x_.strip()}"
)
|
[
"pathlib.Path",
"pytest.mark.skip"
] |
[((13180, 13215), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Helper function"""'], {}), "('Helper function')\n", (13196, 13215), False, 'import pytest\n'), ((359, 373), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (363, 373), False, 'from pathlib import Path\n')]
|
"""
Script to find all contrast curve .csv files,
read them, and re-write them in the TRICERATOPS
contrast folder using the required data format.
"""
# CDD
# Created: 5/28/22
# Updated: 5/28/22
import numpy as np
import pandas as pd
import os as os
import glob as glob
#Defaults
verbose = False
#Files and directories
tridir = '/Users/courtney/Documents/data/toi_paper_data/contrast_curves_for_triceratops/'
#Get list of final images
namestr = '/Users/courtney/Documents/data/shaneAO/*/reduced*/*/*/contrast_curve.csv'
flist = glob.glob(namestr)
print('Files: ', len(flist))
#Loop through these files!
counter = 0
for ff in np.arange(len(flist)):
file = flist[ff]
#Use current filename to set final filename
parts = file.split('/')
filt = parts[-2] #filter is second to last part of filename
tic = parts[-3] #TIC is third to last part of filename
night = parts[-4].split('reduced_')[1] #reduced_[NIGHT] is fourth to last part of filename
#Don't include Kepler or K2 targets
if tic[0] == 'K': #Catch Kepler or K2 prefixes
continue
if tic[0] == 'E': #catch EPIC prefixes
continue
#Remove T or TIC prefix
if 'TIC' in tic:
if verbose:
print('TIC name: ', tic)
tic = tic.split('TIC')[1]
if verbose:
print('renamed as : ', tic)
if 'T' in tic:
if verbose:
print('T name: ', tic)
tic = tic.split('T')[1]
if verbose:
print('renamed as : ', tic)
#Recast to drop leading zeros and spaces
tic = str(int(tic))
#Set output file
outname = tic+'_'+filt+'_'+night+'_contrast_curve.csv'
#Read in the contrast curve
c = pd.read_csv(file)
#Drop the error column
c = c[['arcsec','dmag']]
#Don't keep any rows with missing values
c = c.dropna()
#Write TRICERATOPS-friendly output file
c.to_csv(tridir+outname,index=False,header=False)
counter += 1
print('Saved ', counter, ' contrast curves in TRICERATOPS format.')
|
[
"pandas.read_csv",
"glob.glob"
] |
[((533, 551), 'glob.glob', 'glob.glob', (['namestr'], {}), '(namestr)\n', (542, 551), True, 'import glob as glob\n'), ((1703, 1720), 'pandas.read_csv', 'pd.read_csv', (['file'], {}), '(file)\n', (1714, 1720), True, 'import pandas as pd\n')]
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS
Community Edition) available.
Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import urllib.parse
from django.contrib.auth import authenticate
from .utils import gen_oauth_login_url
from bklogin.bkauth import actions
from bklogin.bkauth.constants import REDIRECT_FIELD_NAME
from bklogin.common.log import logger
def login(request):
"""
登录处理
"""
# 获取用户实际请求的URL, 目前account.REDIRECT_FIELD_NAME = 'c_url'
redirect_to = request.GET.get(REDIRECT_FIELD_NAME, "")
# 获取用户实际访问的蓝鲸应用
app_id = request.GET.get("app_id", "")
# 来自注销
is_from_logout = bool(request.GET.get("is_from_logout") or 0)
# google登录回调后会自动添加code参数
code = request.GET.get("code")
# 若没有code参数,则表示需要跳转到google登录
if code is None or is_from_logout:
# 生成跳转到google登录的链接
google_oauth_login_url, state = gen_oauth_login_url({"app_id": app_id, REDIRECT_FIELD_NAME: redirect_to})
# 将state 设置于session,Oauth2.0特有的,防止csrf攻击的
request.session["state"] = state
# 直接调用蓝鲸登录重定向方法
response = actions.login_redirect_response(request, google_oauth_login_url, is_from_logout)
logger.debug(
"custom_login:oauth.google code is None or is_from_logout! code=%s, is_from_logout=%s",
code,
is_from_logout,
)
return response
# 已经有企业认证票据参数(如code参数),表示企业登录后的回调或企业认证票据还存在
# oauth2.0 特有处理逻辑,防止csrf攻击
# 处理state参数
state = request.GET.get("state", "")
state_dict = dict(urllib.parse.parse_qsl(state))
app_id = state_dict.get("app_id")
redirect_to = state_dict.get(REDIRECT_FIELD_NAME, "")
state_from_session = request.session.get("state")
# 校验state,防止csrf攻击
if state != state_from_session:
logger.debug(
"custom_login:oauth.google state != state_from_session [state=%s, state_from_session=%s]",
state,
state_from_session,
)
return actions.login_failed_response(request, redirect_to, app_id)
# 验证用户登录是否OK
user = authenticate(code=code)
if user is None:
logger.debug("custom_login:oauth.google user is None, will redirect_to=%s", redirect_to)
# 直接调用蓝鲸登录失败处理方法
return actions.login_failed_response(request, redirect_to, app_id)
# 成功,则调用蓝鲸登录成功的处理函数,并返回响应
logger.debug("custom_login:oauth.google login success, will redirect_to=%s", redirect_to)
return actions.login_success_response(request, user, redirect_to, app_id)
|
[
"bklogin.bkauth.actions.login_failed_response",
"bklogin.common.log.logger.debug",
"bklogin.bkauth.actions.login_success_response",
"django.contrib.auth.authenticate",
"bklogin.bkauth.actions.login_redirect_response"
] |
[((2659, 2682), 'django.contrib.auth.authenticate', 'authenticate', ([], {'code': 'code'}), '(code=code)\n', (2671, 2682), False, 'from django.contrib.auth import authenticate\n'), ((2935, 3028), 'bklogin.common.log.logger.debug', 'logger.debug', (['"""custom_login:oauth.google login success, will redirect_to=%s"""', 'redirect_to'], {}), "('custom_login:oauth.google login success, will redirect_to=%s',\n redirect_to)\n", (2947, 3028), False, 'from bklogin.common.log import logger\n'), ((3036, 3102), 'bklogin.bkauth.actions.login_success_response', 'actions.login_success_response', (['request', 'user', 'redirect_to', 'app_id'], {}), '(request, user, redirect_to, app_id)\n', (3066, 3102), False, 'from bklogin.bkauth import actions\n'), ((1687, 1772), 'bklogin.bkauth.actions.login_redirect_response', 'actions.login_redirect_response', (['request', 'google_oauth_login_url', 'is_from_logout'], {}), '(request, google_oauth_login_url, is_from_logout\n )\n', (1718, 1772), False, 'from bklogin.bkauth import actions\n'), ((1776, 1908), 'bklogin.common.log.logger.debug', 'logger.debug', (['"""custom_login:oauth.google code is None or is_from_logout! code=%s, is_from_logout=%s"""', 'code', 'is_from_logout'], {}), "(\n 'custom_login:oauth.google code is None or is_from_logout! code=%s, is_from_logout=%s'\n , code, is_from_logout)\n", (1788, 1908), False, 'from bklogin.common.log import logger\n'), ((2377, 2517), 'bklogin.common.log.logger.debug', 'logger.debug', (['"""custom_login:oauth.google state != state_from_session [state=%s, state_from_session=%s]"""', 'state', 'state_from_session'], {}), "(\n 'custom_login:oauth.google state != state_from_session [state=%s, state_from_session=%s]'\n , state, state_from_session)\n", (2389, 2517), False, 'from bklogin.common.log import logger\n'), ((2570, 2629), 'bklogin.bkauth.actions.login_failed_response', 'actions.login_failed_response', (['request', 'redirect_to', 'app_id'], {}), '(request, redirect_to, app_id)\n', (2599, 2629), False, 'from bklogin.bkauth import actions\n'), ((2712, 2804), 'bklogin.common.log.logger.debug', 'logger.debug', (['"""custom_login:oauth.google user is None, will redirect_to=%s"""', 'redirect_to'], {}), "('custom_login:oauth.google user is None, will redirect_to=%s',\n redirect_to)\n", (2724, 2804), False, 'from bklogin.common.log import logger\n'), ((2841, 2900), 'bklogin.bkauth.actions.login_failed_response', 'actions.login_failed_response', (['request', 'redirect_to', 'app_id'], {}), '(request, redirect_to, app_id)\n', (2870, 2900), False, 'from bklogin.bkauth import actions\n')]
|
"""
ShakeDrop-ResNet for CIFAR/SVHN, implemented in Gluon.
Original paper: 'ShakeDrop Regularization for Deep Residual Learning,' https://arxiv.org/abs/1802.02375.
"""
__all__ = ['CIFARShakeDropResNet', 'shakedropresnet20_cifar10', 'shakedropresnet20_cifar100', 'shakedropresnet20_svhn']
import os
import numpy as np
import mxnet as mx
from mxnet import cpu
from mxnet.gluon import nn, HybridBlock
from .common import conv1x1_block, conv3x3_block
from .resnet import ResBlock, ResBottleneck
class ShakeDrop(mx.autograd.Function):
"""
ShakeDrop function.
Parameters:
----------
p : float
ShakeDrop specific probability (of life) for Bernoulli random variable.
"""
def __init__(self, p):
super(ShakeDrop, self).__init__()
self.p = p
def forward(self, x):
if mx.autograd.is_training():
b = np.random.binomial(n=1, p=self.p)
alpha = mx.nd.random.uniform_like(x.slice(begin=(None, 0, 0, 0), end=(None, 1, 1, 1)), low=-1.0, high=1.0)
y = mx.nd.broadcast_mul(b + alpha - b * alpha, x)
self.save_for_backward(b)
else:
y = self.p * x
return y
def backward(self, dy):
b, = self.saved_tensors
beta = mx.nd.random.uniform_like(dy.slice(begin=(None, 0, 0, 0), end=(None, 1, 1, 1)), low=0.0, high=1.0)
return mx.nd.broadcast_mul(b + beta - b * beta, dy)
class ShakeDropResUnit(HybridBlock):
"""
ShakeDrop-ResNet unit with residual connection.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
strides : int or tuple/list of 2 int
Strides of the convolution.
bn_use_global_stats : bool
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
bottleneck : bool
Whether to use a bottleneck or simple block in units.
life_prob : float
Residual branch life probability.
"""
def __init__(self,
in_channels,
out_channels,
strides,
bn_use_global_stats,
bottleneck,
life_prob,
**kwargs):
super(ShakeDropResUnit, self).__init__(**kwargs)
self.life_prob = life_prob
self.resize_identity = (in_channels != out_channels) or (strides != 1)
body_class = ResBottleneck if bottleneck else ResBlock
with self.name_scope():
self.body = body_class(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
bn_use_global_stats=bn_use_global_stats)
if self.resize_identity:
self.identity_conv = conv1x1_block(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
bn_use_global_stats=bn_use_global_stats,
activation=None)
self.activ = nn.Activation("relu")
# self.shake_drop = ShakeDrop(self.life_prob)
def hybrid_forward(self, F, x):
if self.resize_identity:
identity = self.identity_conv(x)
else:
identity = x
x = self.body(x)
x = ShakeDrop(self.life_prob)(x) + identity
# x = self.shake_drop(x) + identity
x = self.activ(x)
return x
class CIFARShakeDropResNet(HybridBlock):
"""
ShakeDrop-ResNet model for CIFAR from 'ShakeDrop Regularization for Deep Residual Learning,'
https://arxiv.org/abs/1802.02375.
Parameters:
----------
channels : list of list of int
Number of output channels for each unit.
init_block_channels : int
Number of output channels for the initial unit.
bottleneck : bool
Whether to use a bottleneck or simple block in units.
life_probs : list of float
Residual branch life probability for each unit.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
Useful for fine-tuning.
in_channels : int, default 3
Number of input channels.
in_size : tuple of two ints, default (32, 32)
Spatial size of the expected input image.
classes : int, default 10
Number of classification classes.
"""
def __init__(self,
channels,
init_block_channels,
bottleneck,
life_probs,
bn_use_global_stats=False,
in_channels=3,
in_size=(32, 32),
classes=10,
**kwargs):
super(CIFARShakeDropResNet, self).__init__(**kwargs)
self.in_size = in_size
self.classes = classes
with self.name_scope():
self.features = nn.HybridSequential(prefix="")
self.features.add(conv3x3_block(
in_channels=in_channels,
out_channels=init_block_channels,
bn_use_global_stats=bn_use_global_stats))
in_channels = init_block_channels
k = 0
for i, channels_per_stage in enumerate(channels):
stage = nn.HybridSequential(prefix="stage{}_".format(i + 1))
with stage.name_scope():
for j, out_channels in enumerate(channels_per_stage):
strides = 2 if (j == 0) and (i != 0) else 1
stage.add(ShakeDropResUnit(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
bn_use_global_stats=bn_use_global_stats,
bottleneck=bottleneck,
life_prob=life_probs[k]))
in_channels = out_channels
k += 1
self.features.add(stage)
self.features.add(nn.AvgPool2D(
pool_size=8,
strides=1))
self.output = nn.HybridSequential(prefix="")
self.output.add(nn.Flatten())
self.output.add(nn.Dense(
units=classes,
in_units=in_channels))
def hybrid_forward(self, F, x):
x = self.features(x)
x = self.output(x)
return x
def get_shakedropresnet_cifar(classes,
blocks,
bottleneck,
model_name=None,
pretrained=False,
ctx=cpu(),
root=os.path.join("~", ".mxnet", "models"),
**kwargs):
"""
Create ShakeDrop-ResNet model for CIFAR with specific parameters.
Parameters:
----------
classes : int
Number of classification classes.
blocks : int
Number of blocks.
bottleneck : bool
Whether to use a bottleneck or simple block in units.
model_name : str or None, default None
Model name for loading pretrained model.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
assert (classes in [10, 100])
if bottleneck:
assert ((blocks - 2) % 9 == 0)
layers = [(blocks - 2) // 9] * 3
else:
assert ((blocks - 2) % 6 == 0)
layers = [(blocks - 2) // 6] * 3
init_block_channels = 16
channels_per_layers = [16, 32, 64]
channels = [[ci] * li for (ci, li) in zip(channels_per_layers, layers)]
if bottleneck:
channels = [[cij * 4 for cij in ci] for ci in channels]
total_layers = sum(layers)
final_death_prob = 0.5
life_probs = [1.0 - float(i + 1) / float(total_layers) * final_death_prob for i in range(total_layers)]
net = CIFARShakeDropResNet(
channels=channels,
init_block_channels=init_block_channels,
bottleneck=bottleneck,
life_probs=life_probs,
classes=classes,
**kwargs)
if pretrained:
if (model_name is None) or (not model_name):
raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.")
from .model_store import get_model_file
net.load_parameters(
filename=get_model_file(
model_name=model_name,
local_model_store_dir_path=root),
ctx=ctx)
return net
def shakedropresnet20_cifar10(classes=10, **kwargs):
"""
ShakeDrop-ResNet-20 model for CIFAR-10 from 'ShakeDrop Regularization for Deep Residual Learning,'
https://arxiv.org/abs/1802.02375.
Parameters:
----------
classes : int, default 10
Number of classification classes.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_shakedropresnet_cifar(classes=classes, blocks=20, bottleneck=False,
model_name="shakedropresnet20_cifar10", **kwargs)
def shakedropresnet20_cifar100(classes=100, **kwargs):
"""
ShakeDrop-ResNet-20 model for CIFAR-100 from 'ShakeDrop Regularization for Deep Residual Learning,'
https://arxiv.org/abs/1802.02375.
Parameters:
----------
classes : int, default 100
Number of classification classes.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_shakedropresnet_cifar(classes=classes, blocks=20, bottleneck=False,
model_name="shakedropresnet20_cifar100", **kwargs)
def shakedropresnet20_svhn(classes=10, **kwargs):
"""
ShakeDrop-ResNet-20 model for SVHN from 'ShakeDrop Regularization for Deep Residual Learning,'
https://arxiv.org/abs/1802.02375.
Parameters:
----------
classes : int, default 10
Number of classification classes.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_shakedropresnet_cifar(classes=classes, blocks=20, bottleneck=False,
model_name="shakedropresnet20_svhn", **kwargs)
def _test():
import numpy as np
import mxnet as mx
pretrained = False
models = [
(shakedropresnet20_cifar10, 10),
(shakedropresnet20_cifar100, 100),
(shakedropresnet20_svhn, 10),
]
for model, classes in models:
net = model(pretrained=pretrained)
ctx = mx.cpu()
if not pretrained:
net.initialize(ctx=ctx)
# net.hybridize()
net_params = net.collect_params()
weight_count = 0
for param in net_params.values():
if (param.shape is None) or (not param._differentiable):
continue
weight_count += np.prod(param.shape)
print("m={}, {}".format(model.__name__, weight_count))
assert (model != shakedropresnet20_cifar10 or weight_count == 272474)
assert (model != shakedropresnet20_cifar100 or weight_count == 278324)
assert (model != shakedropresnet20_svhn or weight_count == 272474)
x = mx.nd.zeros((14, 3, 32, 32), ctx=ctx)
# y = net(x)
with mx.autograd.record():
y = net(x)
y.backward()
assert (y.shape == (14, classes))
if __name__ == "__main__":
_test()
|
[
"numpy.random.binomial",
"mxnet.autograd.is_training",
"mxnet.autograd.record",
"mxnet.gluon.nn.HybridSequential",
"mxnet.gluon.nn.Dense",
"mxnet.gluon.nn.Activation",
"mxnet.nd.zeros",
"mxnet.nd.broadcast_mul",
"mxnet.cpu",
"mxnet.gluon.nn.AvgPool2D",
"os.path.join",
"mxnet.gluon.nn.Flatten",
"numpy.prod"
] |
[((6743, 6748), 'mxnet.cpu', 'cpu', ([], {}), '()\n', (6746, 6748), False, 'from mxnet import cpu\n'), ((6785, 6822), 'os.path.join', 'os.path.join', (['"""~"""', '""".mxnet"""', '"""models"""'], {}), "('~', '.mxnet', 'models')\n", (6797, 6822), False, 'import os\n'), ((834, 859), 'mxnet.autograd.is_training', 'mx.autograd.is_training', ([], {}), '()\n', (857, 859), True, 'import mxnet as mx\n'), ((1378, 1422), 'mxnet.nd.broadcast_mul', 'mx.nd.broadcast_mul', (['(b + beta - b * beta)', 'dy'], {}), '(b + beta - b * beta, dy)\n', (1397, 1422), True, 'import mxnet as mx\n'), ((11422, 11430), 'mxnet.cpu', 'mx.cpu', ([], {}), '()\n', (11428, 11430), True, 'import mxnet as mx\n'), ((12081, 12118), 'mxnet.nd.zeros', 'mx.nd.zeros', (['(14, 3, 32, 32)'], {'ctx': 'ctx'}), '((14, 3, 32, 32), ctx=ctx)\n', (12092, 12118), True, 'import mxnet as mx\n'), ((877, 910), 'numpy.random.binomial', 'np.random.binomial', ([], {'n': '(1)', 'p': 'self.p'}), '(n=1, p=self.p)\n', (895, 910), True, 'import numpy as np\n'), ((1046, 1091), 'mxnet.nd.broadcast_mul', 'mx.nd.broadcast_mul', (['(b + alpha - b * alpha)', 'x'], {}), '(b + alpha - b * alpha, x)\n', (1065, 1091), True, 'import mxnet as mx\n'), ((3082, 3103), 'mxnet.gluon.nn.Activation', 'nn.Activation', (['"""relu"""'], {}), "('relu')\n", (3095, 3103), False, 'from mxnet.gluon import nn, HybridBlock\n'), ((4961, 4991), 'mxnet.gluon.nn.HybridSequential', 'nn.HybridSequential', ([], {'prefix': '""""""'}), "(prefix='')\n", (4980, 4991), False, 'from mxnet.gluon import nn, HybridBlock\n'), ((6202, 6232), 'mxnet.gluon.nn.HybridSequential', 'nn.HybridSequential', ([], {'prefix': '""""""'}), "(prefix='')\n", (6221, 6232), False, 'from mxnet.gluon import nn, HybridBlock\n'), ((11752, 11772), 'numpy.prod', 'np.prod', (['param.shape'], {}), '(param.shape)\n', (11759, 11772), True, 'import numpy as np\n'), ((12153, 12173), 'mxnet.autograd.record', 'mx.autograd.record', ([], {}), '()\n', (12171, 12173), True, 'import mxnet as mx\n'), ((6104, 6140), 'mxnet.gluon.nn.AvgPool2D', 'nn.AvgPool2D', ([], {'pool_size': '(8)', 'strides': '(1)'}), '(pool_size=8, strides=1)\n', (6116, 6140), False, 'from mxnet.gluon import nn, HybridBlock\n'), ((6261, 6273), 'mxnet.gluon.nn.Flatten', 'nn.Flatten', ([], {}), '()\n', (6271, 6273), False, 'from mxnet.gluon import nn, HybridBlock\n'), ((6303, 6348), 'mxnet.gluon.nn.Dense', 'nn.Dense', ([], {'units': 'classes', 'in_units': 'in_channels'}), '(units=classes, in_units=in_channels)\n', (6311, 6348), False, 'from mxnet.gluon import nn, HybridBlock\n')]
|
# Bare ``settings.py`` for running tests for url_filter
import os
from sqlalchemy import create_engine
DEBUG = True
INTERNAL_IPS = ['127.0.0.1']
if os.environ.get('USE_POSTGRES') == 'True':
SQLALCHEMY_ENGINE = create_engine('postgresql://postgres:test@localhost:5432', echo=True)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'postgres',
'USER': 'postgres',
'PASSWORD': '<PASSWORD>',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
else:
SQLALCHEMY_ENGINE = create_engine('sqlite:///url_filter.sqlite', echo=True)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'url_filter.sqlite'
}
}
INSTALLED_APPS = (
'test_project.generic',
'test_project.many_to_many',
'test_project.many_to_one',
'test_project.one_to_one',
'url_filter',
'debug_toolbar',
'debug_toolbar_alchemy',
'django_extensions',
'rest_framework',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.staticfiles',
)
STATIC_URL = '/static/'
SECRET_KEY = 'foo'
MIDDLEWARE = [
'test_project.middleware.SQLAlchemySessionMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
},
]
ROOT_URLCONF = 'test_project.urls'
REST_FRAMEWORK = {
'DEFAULT_FILTER_BACKENDS': [
'url_filter.integrations.drf.DjangoFilterBackend',
],
}
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar_alchemy.panels.sql.SQLPanel',
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]
DEBUG_TOOLBAR_CONFIG = {
'ALCHEMY_DB_ALIASES': 'test_project.middleware.dbs',
}
|
[
"os.environ.get",
"sqlalchemy.create_engine"
] |
[((152, 182), 'os.environ.get', 'os.environ.get', (['"""USE_POSTGRES"""'], {}), "('USE_POSTGRES')\n", (166, 182), False, 'import os\n'), ((218, 287), 'sqlalchemy.create_engine', 'create_engine', (['"""postgresql://postgres:test@localhost:5432"""'], {'echo': '(True)'}), "('postgresql://postgres:test@localhost:5432', echo=True)\n", (231, 287), False, 'from sqlalchemy import create_engine\n'), ((591, 646), 'sqlalchemy.create_engine', 'create_engine', (['"""sqlite:///url_filter.sqlite"""'], {'echo': '(True)'}), "('sqlite:///url_filter.sqlite', echo=True)\n", (604, 646), False, 'from sqlalchemy import create_engine\n')]
|
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from ckeditor.fields import RichTextField
from ckeditor_uploader.fields import RichTextUploadingField
from django.urls import reverse
import django.db.models.deletion
def upload_location(instance,filename):
return "%s/%s" %(instance.id,filename)
# //blog categories
class Category(models.Model):
name = models.CharField(max_length=100)
slug = models.SlugField(max_length=100,unique=True)
# created_at = models.DateTimeField(auto_now_add=True,blank=True,default=None)
# updated_at = models.DateTimeField(auto_now=True, blank=True,default= None)
class Meta:
ordering = ('name',)
verbose_name ='category'
verbose_name_plural ='categories'
# def get_absolute_url(self):
# return reverse('blog:post_category',args=[self.slug])
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('blog:post_by_category',args=[self.slug])
# return "/blog/%s/" % self.slug
class Post(models.Model):
user = models.ForeignKey(User,default=None,null=True, on_delete=django.db.models.deletion.SET_NULL)
title = models.CharField(max_length=100)
body = RichTextUploadingField(blank=True,null=True)
date_published = models.DateTimeField(auto_now_add=True)
tags = models.CharField(max_length=100)
category = models.ForeignKey(Category, null=True, on_delete=django.db.models.deletion.SET_NULL)
image = models.ImageField(upload_to=upload_location)
post_file = models.FileField(blank=True,null=True,upload_to="blog/files/%Y/m/$D/")
def __str__(self):
return self.title
class BlogComment(models.Model):
post = models.ForeignKey(Post,default=None,on_delete=django.db.models.deletion.SET_NULL, null=True, related_name='comments')
commented_by = models.ForeignKey(User,default=None,on_delete=django.db.models.deletion.SET_NULL,null=True)
date=models.DateTimeField(auto_now_add=True)
comment= models.TextField()
approved_comment = models.BooleanField(default=False)
def approve(self):
self.approved_comment = True
self.save()
def __str__(self):
return self.comment
|
[
"django.db.models.FileField",
"django.db.models.TextField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"ckeditor_uploader.fields.RichTextUploadingField",
"django.db.models.SlugField",
"django.db.models.BooleanField",
"django.db.models.ImageField",
"django.urls.reverse",
"django.db.models.DateTimeField"
] |
[((411, 443), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (427, 443), False, 'from django.db import models\n'), ((452, 497), 'django.db.models.SlugField', 'models.SlugField', ([], {'max_length': '(100)', 'unique': '(True)'}), '(max_length=100, unique=True)\n', (468, 497), False, 'from django.db import models\n'), ((1047, 1146), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'default': 'None', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL'}), '(User, default=None, null=True, on_delete=django.db.models\n .deletion.SET_NULL)\n', (1064, 1146), False, 'from django.db import models\n'), ((1149, 1181), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1165, 1181), False, 'from django.db import models\n'), ((1190, 1235), 'ckeditor_uploader.fields.RichTextUploadingField', 'RichTextUploadingField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1212, 1235), False, 'from ckeditor_uploader.fields import RichTextUploadingField\n'), ((1253, 1292), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1273, 1292), False, 'from django.db import models\n'), ((1301, 1333), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1317, 1333), False, 'from django.db import models\n'), ((1346, 1435), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Category'], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL'}), '(Category, null=True, on_delete=django.db.models.deletion.\n SET_NULL)\n', (1363, 1435), False, 'from django.db import models\n'), ((1440, 1484), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': 'upload_location'}), '(upload_to=upload_location)\n', (1457, 1484), False, 'from django.db import models\n'), ((1498, 1570), 'django.db.models.FileField', 'models.FileField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""blog/files/%Y/m/$D/"""'}), "(blank=True, null=True, upload_to='blog/files/%Y/m/$D/')\n", (1514, 1570), False, 'from django.db import models\n'), ((1651, 1775), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Post'], {'default': 'None', 'on_delete': 'django.db.models.deletion.SET_NULL', 'null': '(True)', 'related_name': '"""comments"""'}), "(Post, default=None, on_delete=django.db.models.deletion.\n SET_NULL, null=True, related_name='comments')\n", (1668, 1775), False, 'from django.db import models\n'), ((1785, 1884), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'default': 'None', 'on_delete': 'django.db.models.deletion.SET_NULL', 'null': '(True)'}), '(User, default=None, on_delete=django.db.models.deletion.\n SET_NULL, null=True)\n', (1802, 1884), False, 'from django.db import models\n'), ((1883, 1922), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1903, 1922), False, 'from django.db import models\n'), ((1933, 1951), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1949, 1951), False, 'from django.db import models\n'), ((1972, 2006), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1991, 2006), False, 'from django.db import models\n'), ((924, 974), 'django.urls.reverse', 'reverse', (['"""blog:post_by_category"""'], {'args': '[self.slug]'}), "('blog:post_by_category', args=[self.slug])\n", (931, 974), False, 'from django.urls import reverse\n')]
|
import numpy as np
import pandas as pd
import pulp
from laptimize.curve_approximation import CurveApproximator
from laptimize.log import LogFactory
class LAPModel(object):
"""solve the linear approximated LP problem and sub problems"""
def __init__(self, name='nlp_problem'):
self.logger = LogFactory.get_logger()
self.lp_variables = dict()
self.segment = pd.DataFrame()
self.curve = pd.DataFrame()
self.lp_slack = pulp.LpVariable.dict('p_%s', ['p1'], lowBound=0)
self.model = pulp.LpProblem(name, pulp.LpMinimize)
self.objective_expressions = []
self.constraint_expression = []
def initialize(self, segment=pd.DataFrame(), curve=pd.DataFrame(), name='nlp_sub_problem'):
"""
initialize variables for branching sub problems
Parameters
----------
lp_variables: dict
linear problem related variables
segment: pandas data frame
updated piecewise segment for decision variables
curve: pandas data frame
function values of objective and constraints function for each segment values
name: string
problem name
Returns
-------
self
"""
self.lp_variables = dict()
self.segment = segment
self.curve = curve
self.model = pulp.LpProblem(name, pulp.LpMinimize)
self.objective_expressions = []
self.constraint_expression = []
return self
def generate_variable_names(self, no_of_segments, node_name):
"""
generate weight variables names for approximated lp problem
Parameters
---------
no_of_segments: int
no of piecewise linear segment
node_name: string
non-linear decision variable name
Returns
------
variable_names: list
weight variable names
ex:[x1_1,x1_2,x1_3]
"""
variable_names = []
for i in range(0, no_of_segments):
variable_names.append("%s_%s" % (node_name, i))
return variable_names
def define_weights_for_segment(self, variable_names, name):
"""
create linear problem related variables using piece wise variables
Parameters
---------
variable_names: list
piece wise variable list
name: string
decision variable name
Returns
-------
self.lp_variable : dict
update lp_variable dictionary with weight linear problem variables
"""
self.lp_variables[name] = pulp.LpVariable.dict('l_%s', variable_names, lowBound=0, upBound=1)
return self.lp_variables[name]
def fill_constraint_objective_arrays(self, lp_allocation, constraint):
"""
update objective and constraints expression lists for linear problem
Parameters
---------
lp_allocation: dict
linear problem variables
constraint: pandas data frame
problem data frame
Returns
-------
weights: dict
weights constraints expression
problem_expressions: dict
collection of objective and constraints expression
"""
try:
problem_expressions = pd.DataFrame()
for index in constraint.index:
constraint_expression = []
weights = []
for key in lp_allocation:
constraint_expression.append(lp_allocation[key] * self.curve.loc[key][index])
weights.append(lp_allocation[key])
problem_expressions[index] = list(constraint_expression)
return weights, problem_expressions
except Exception as err:
self.logger.info('fill_constraint_objective_arrays method ended with error ')
self.logger.error(str(err))
raise
def add_sub_problem(self, segment_key, k):
"""
add sub problem constraint related to the weight variable
Parameters
----------
segment_key: string
branching variable key
k: list
branching sub variables key ex : [x1_1, x1_2]
Returns
-------
self
"""
# adding a sub problem
for key in self.lp_variables[segment_key]:
if key in k:
continue
else:
self.model += self.lp_variables[segment_key][key] == 0
self.segment = self.segment.drop([key])
self.curve = self.curve.drop([key])
def add_weights_sum_constraint_to_model(self, weights):
self.model += pulp.lpSum(weights) == 1
def add_model_constraint_and_objective(self, constraints, values):
"""
add constraint and objective function to the pulp lp problem
Parameters
----------
constraints: pandas data frame
problem data frame
values: pandas series
right side values for the constraints
Returns
-------
self
"""
try:
# Add objective function to model.
self.model += pulp.lpSum(constraints.objective) + self.lp_slack['p1']
constraints = constraints.drop(['objective'], axis=1)
for constraint_expression in constraints:
self.model += (pulp.lpSum(constraints[constraint_expression]) + self.lp_slack['p1']) <= values[
constraint_expression]
except Exception as err:
self.logger.info('add_model_constraint_and_objective method ended with error ')
self.logger.error(str(err))
raise
def solve_model(self):
"""
problem solve method for lp problems
"""
try:
solver = pulp.PULP_CBC_CMD(msg=0)
self.model.solve(solver)
except Exception as err:
self.logger.info('solve_model method ended with error ')
self.logger.error(str(err))
raise
def model_solver(self, constraints_df, partition_len):
"""
solve the initial lp problem with piecewise linear variables(weights)
Parameters
----------
constraints_df: pandas data frame
which include problem related details,data frame version of problem dictionary
Returns
-------
lp_variables: dict
pulp solution for the lp weight variables
lp_slack: dict
value of the lp slack variable
segment: pandas data frame
segment values for each decision variable
curve: pandas data frame
function values of objective and constraints function for each segment values
"""
try:
constraint_values = pd.DataFrame()
constraints = constraints_df.drop(['value'])
# Iterate over constrains and build model.
for _, constraint in constraints.iterrows():
# piecewise linear segments.
x_array = np.append(np.arange(constraint.capacity[0], constraint.capacity[1], partition_len),
constraint.capacity[1])
no_of_segments = len(x_array)
constraint = constraint.drop(['capacity'])
variable_names = self.generate_variable_names(no_of_segments, constraint.name)
# lp variable.
lp_allocation = self.define_weights_for_segment(variable_names, constraint.name)
# segment value.
segment = pd.DataFrame({'key': [constraint.name] * len(x_array), 'segment': x_array})
segment.index = variable_names
self.segment = pd.concat([self.segment, segment])
# curve approximation for each segment.
curve = pd.DataFrame(CurveApproximator().get_curve_approximation(constraint, x_array))
curve.index = variable_names
self.curve = pd.concat([self.curve, curve])
weights, problem_values = self.fill_constraint_objective_arrays(lp_allocation, constraint)
constraint_values = pd.concat([constraint_values, problem_values], axis=0)
self.add_weights_sum_constraint_to_model(weights)
self.add_model_constraint_and_objective(constraint_values, constraints_df.loc['value'])
self.solve_model()
return self.lp_variables, self.segment, self.curve
except Exception as err:
self.logger.info('model_solver method ended with error ')
self.logger.error(str(err))
raise
def global_solver(self, segment_key, k, constraints_df):
"""
solve the given sub lp problem with branching rule
Parameters
----------
segment_key: str
branching variable key ex: x1
k: list
branching sub variables key ex : [x1_1, x1-2]
constraints_df: pandas data frame
which include problem related details,data frame version of problem dictionary
Returns
-------
lp_variables: dict
pulp solution for the lp weight variables
lp_slack: dict
value of the lp slack variable
segment: pandas data frame
segment values for each decision variable
curve: pandas data frame
function values of objective and constraints functions for each segment values
"""
# Iterate over constrains and build model.
try:
constraint_values = pd.DataFrame()
constraints = constraints_df.drop(['value'])
for _, constraint in constraints.iterrows():
constraint = constraint.drop(['capacity'])
segment = self.segment[self.segment.key == constraint.name]['segment'].to_dict()
variable_names = list(segment.keys())
lp_allocation = self.define_weights_for_segment(variable_names, constraint.name)
weights, problem_values = self.fill_constraint_objective_arrays(lp_allocation, constraint)
constraint_values = pd.concat([constraint_values, problem_values], axis=0)
self.add_weights_sum_constraint_to_model(weights)
# adding sub problem
self.add_sub_problem(segment_key, k)
self.add_model_constraint_and_objective(constraint_values, constraints_df.loc['value'])
self.solve_model()
return self.lp_variables, self.segment, self.curve
except Exception as err:
self.logger.info('global_solver method ended with error ')
self.logger.error(str(err))
raise
|
[
"pandas.DataFrame",
"pulp.lpSum",
"laptimize.curve_approximation.CurveApproximator",
"pulp.LpVariable.dict",
"numpy.arange",
"pulp.LpProblem",
"pulp.PULP_CBC_CMD",
"pandas.concat",
"laptimize.log.LogFactory.get_logger"
] |
[((310, 333), 'laptimize.log.LogFactory.get_logger', 'LogFactory.get_logger', ([], {}), '()\n', (331, 333), False, 'from laptimize.log import LogFactory\n'), ((392, 406), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (404, 406), True, 'import pandas as pd\n'), ((428, 442), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (440, 442), True, 'import pandas as pd\n'), ((467, 515), 'pulp.LpVariable.dict', 'pulp.LpVariable.dict', (['"""p_%s"""', "['p1']"], {'lowBound': '(0)'}), "('p_%s', ['p1'], lowBound=0)\n", (487, 515), False, 'import pulp\n'), ((537, 574), 'pulp.LpProblem', 'pulp.LpProblem', (['name', 'pulp.LpMinimize'], {}), '(name, pulp.LpMinimize)\n', (551, 574), False, 'import pulp\n'), ((689, 703), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (701, 703), True, 'import pandas as pd\n'), ((711, 725), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (723, 725), True, 'import pandas as pd\n'), ((1368, 1405), 'pulp.LpProblem', 'pulp.LpProblem', (['name', 'pulp.LpMinimize'], {}), '(name, pulp.LpMinimize)\n', (1382, 1405), False, 'import pulp\n'), ((2636, 2703), 'pulp.LpVariable.dict', 'pulp.LpVariable.dict', (['"""l_%s"""', 'variable_names'], {'lowBound': '(0)', 'upBound': '(1)'}), "('l_%s', variable_names, lowBound=0, upBound=1)\n", (2656, 2703), False, 'import pulp\n'), ((3334, 3348), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (3346, 3348), True, 'import pandas as pd\n'), ((4733, 4752), 'pulp.lpSum', 'pulp.lpSum', (['weights'], {}), '(weights)\n', (4743, 4752), False, 'import pulp\n'), ((5893, 5917), 'pulp.PULP_CBC_CMD', 'pulp.PULP_CBC_CMD', ([], {'msg': '(0)'}), '(msg=0)\n', (5910, 5917), False, 'import pulp\n'), ((6885, 6899), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (6897, 6899), True, 'import pandas as pd\n'), ((9694, 9708), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (9706, 9708), True, 'import pandas as pd\n'), ((5247, 5280), 'pulp.lpSum', 'pulp.lpSum', (['constraints.objective'], {}), '(constraints.objective)\n', (5257, 5280), False, 'import pulp\n'), ((7826, 7860), 'pandas.concat', 'pd.concat', (['[self.segment, segment]'], {}), '([self.segment, segment])\n', (7835, 7860), True, 'import pandas as pd\n'), ((8094, 8124), 'pandas.concat', 'pd.concat', (['[self.curve, curve]'], {}), '([self.curve, curve])\n', (8103, 8124), True, 'import pandas as pd\n'), ((8269, 8323), 'pandas.concat', 'pd.concat', (['[constraint_values, problem_values]'], {'axis': '(0)'}), '([constraint_values, problem_values], axis=0)\n', (8278, 8323), True, 'import pandas as pd\n'), ((10273, 10327), 'pandas.concat', 'pd.concat', (['[constraint_values, problem_values]'], {'axis': '(0)'}), '([constraint_values, problem_values], axis=0)\n', (10282, 10327), True, 'import pandas as pd\n'), ((7150, 7222), 'numpy.arange', 'np.arange', (['constraint.capacity[0]', 'constraint.capacity[1]', 'partition_len'], {}), '(constraint.capacity[0], constraint.capacity[1], partition_len)\n', (7159, 7222), True, 'import numpy as np\n'), ((5454, 5500), 'pulp.lpSum', 'pulp.lpSum', (['constraints[constraint_expression]'], {}), '(constraints[constraint_expression])\n', (5464, 5500), False, 'import pulp\n'), ((7954, 7973), 'laptimize.curve_approximation.CurveApproximator', 'CurveApproximator', ([], {}), '()\n', (7971, 7973), False, 'from laptimize.curve_approximation import CurveApproximator\n')]
|
# Generated by Django 3.2.7 on 2021-10-17 07:36
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('x509_pki', '0009_auto_20211017_0921'),
]
operations = [
migrations.RemoveField(
model_name='keystore',
name='crl',
),
migrations.CreateModel(
name='CrlStore',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('crl', models.TextField(blank=True, null=True, verbose_name='Serialized CRL certificate')),
('certificate', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='x509_pki.certificate')),
],
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.models.TextField",
"django.db.models.OneToOneField",
"django.db.models.AutoField"
] |
[((269, 326), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""keystore"""', 'name': '"""crl"""'}), "(model_name='keystore', name='crl')\n", (291, 326), False, 'from django.db import migrations, models\n'), ((468, 561), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (484, 561), False, 'from django.db import migrations, models\n'), ((584, 671), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Serialized CRL certificate"""'}), "(blank=True, null=True, verbose_name=\n 'Serialized CRL certificate')\n", (600, 671), False, 'from django.db import migrations, models\n'), ((701, 798), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""x509_pki.certificate"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'x509_pki.certificate')\n", (721, 798), False, 'from django.db import migrations, models\n')]
|
import websockets
import asyncio
import time
import json
from normalise.phemex_normalisation import NormalisePhemex
from helpers.read_config import get_symbols
from sink_connector.kafka_producer import KafkaProducer
from sink_connector.ws_to_kafka import produce_messages
from source_connector.websocket_connector import connect
url = 'wss://phemex.com/ws'
async def main():
raw_producer = KafkaProducer("phemex-raw")
normalised_producer = KafkaProducer("phemex-normalised")
trades_producer = KafkaProducer("phemex-trades")
symbols = get_symbols('phemex')
await connect(url, handle_phemex, raw_producer, normalised_producer, trades_producer, symbols)
async def handle_phemex(ws, raw_producer, normalised_producer, trades_producer, symbols):
for symbol in symbols:
subscribe_message = {
"id": 1234, # random id
"method": "orderbook.subscribe",
"params": [symbol]
}
await ws.send(json.dumps(subscribe_message))
subscribe_message['method'] = "trade.subscribe"
await ws.send(json.dumps(subscribe_message))
await produce_messages(ws, raw_producer, normalised_producer, trades_producer, NormalisePhemex().normalise)
if __name__ == "__main__":
asyncio.run(main())
|
[
"sink_connector.kafka_producer.KafkaProducer",
"helpers.read_config.get_symbols",
"json.dumps",
"source_connector.websocket_connector.connect",
"normalise.phemex_normalisation.NormalisePhemex"
] |
[((411, 438), 'sink_connector.kafka_producer.KafkaProducer', 'KafkaProducer', (['"""phemex-raw"""'], {}), "('phemex-raw')\n", (424, 438), False, 'from sink_connector.kafka_producer import KafkaProducer\n'), ((466, 500), 'sink_connector.kafka_producer.KafkaProducer', 'KafkaProducer', (['"""phemex-normalised"""'], {}), "('phemex-normalised')\n", (479, 500), False, 'from sink_connector.kafka_producer import KafkaProducer\n'), ((524, 554), 'sink_connector.kafka_producer.KafkaProducer', 'KafkaProducer', (['"""phemex-trades"""'], {}), "('phemex-trades')\n", (537, 554), False, 'from sink_connector.kafka_producer import KafkaProducer\n'), ((570, 591), 'helpers.read_config.get_symbols', 'get_symbols', (['"""phemex"""'], {}), "('phemex')\n", (581, 591), False, 'from helpers.read_config import get_symbols\n'), ((603, 695), 'source_connector.websocket_connector.connect', 'connect', (['url', 'handle_phemex', 'raw_producer', 'normalised_producer', 'trades_producer', 'symbols'], {}), '(url, handle_phemex, raw_producer, normalised_producer,\n trades_producer, symbols)\n', (610, 695), False, 'from source_connector.websocket_connector import connect\n'), ((1010, 1039), 'json.dumps', 'json.dumps', (['subscribe_message'], {}), '(subscribe_message)\n', (1020, 1039), False, 'import json\n'), ((1123, 1152), 'json.dumps', 'json.dumps', (['subscribe_message'], {}), '(subscribe_message)\n', (1133, 1152), False, 'import json\n'), ((1244, 1261), 'normalise.phemex_normalisation.NormalisePhemex', 'NormalisePhemex', ([], {}), '()\n', (1259, 1261), False, 'from normalise.phemex_normalisation import NormalisePhemex\n')]
|
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
from mpa.utils.logger import get_logger
logger = get_logger()
class CDLIterator:
def __init__(self, cdl):
self._cdl = cdl
self._index = 0
self._cdl_iter = [iter(dl) for dl in self._cdl.loaders]
def __next__(self):
if self._index < self._cdl.max_iter:
batches = {}
for i, it in enumerate(self._cdl_iter):
if i == 0:
batches = next(it)
else:
try:
batches[f'extra_{i-1}'] = next(it)
except StopIteration:
self._cdl_iter[1] = iter(self._cdl.loaders[1])
batches[f'extra_{i-1}'] = next(self._cdl_iter[1])
self._index += 1
return batches
raise StopIteration
class ComposedDL(object):
class DummySampler(object):
''' dummy sampler class to relay set_epoch() call to the
list of data loaders in the CDL
'''
def __init__(self, cdl):
self.cdl = cdl
def set_epoch(self, epoch):
loaders = self.cdl.loaders
for loader in loaders:
loader.sampler.set_epoch(epoch)
def __init__(self, loaders=[]):
self.loaders = loaders
self.max_iter = len(self.loaders[0])
logger.info(f'possible max iterations = {self.max_iter}')
self._sampler = ComposedDL.DummySampler(self)
def __len__(self):
return self.max_iter
def __iter__(self):
return CDLIterator(self)
@property
def sampler(self):
return self._sampler
|
[
"mpa.utils.logger.get_logger"
] |
[((130, 142), 'mpa.utils.logger.get_logger', 'get_logger', ([], {}), '()\n', (140, 142), False, 'from mpa.utils.logger import get_logger\n')]
|
#!/usr/bin/env python
"""
Command line tool for distance 2 self calculation
"""
import sys
import argparse
import csv
import logging
import os
import pandas
import itertools as itr
from Distance2SelfBinding import Distance2Self
from DistanceMatrix import DistanceMatrix
from Fred2.Core import Allele
def read_hla_input(input, hla_header):
"""
reads in the hla file
header are defined as:
:param hla_file:
:return: list(Allele)
"""
return map(Allele, set(pandas.DataFrame.from_csv(input, sep="\t", index_col=False)[hla_header]))
def load_blossum(blos):
"""
loads a BLOSUM matrix
:param str blos: Specifeis the BLOSUm matrix to lead
:return: dict(str, dict(str, float)) - A BLOSUM1 matrix
"""
try:
mod = __import__('DistanceMatrices', fromlist=[blos])
return getattr(mod, blos)
except:
mod = __import__('DistanceMatrices', fromlist=["BLOSUM50_distances"])
return DistanceMatrix(getattr(mod, "BLOSUM50_distances"))
def main():
parser = argparse.ArgumentParser(
description="Distance to self calculation",
)
subparsers = parser.add_subparsers(help='Distance2Self offers two sub-command', dest="sub_command")
parser_gen = subparsers.add_parser('generate',
help='Command lets you generate an distance trie based on a provided peptide list')
parser_gen.add_argument("-i", "--input",
required=True,
type=str,
help="Peptide with immunogenicity file (from epitopeprediction)",
)
parser_gen.add_argument("-s", "--sequence",
required=False,
default="neopeptide",
type=str,
help="The columns name of the peptide sequences",
)
parser_gen.add_argument("-o", "--output",
required=True,
type=str,
help="Specifies the output path. Results will be written to CSV",
)
parser_gen.add_argument("-b", "--blosum",
required=False,
default="BLOSUM50",
type=str,
help="Specifies BLOSUM distance matrix (default BLOSUM50; available BLOSUM45, BLOSUM90)",
)
#Prediction sub-command
parser_pred = subparsers.add_parser('predict',
help='Command calculates the distance to self for a provided list of peptides')
parser_pred.add_argument("-t", "--trie",
required=False,
default=None,
type=str,
help="Specifies a custom distance trie to use"
)
parser_pred.add_argument("-s", "--sequence",
required=False,
default="neopeptide",
type=str,
help="The columns name of the peptide sequences",
)
parser_pred.add_argument("-k", "--k",
required=False,
default=1,
type=int,
help="Specifies the number of closest self-peptides to find"
)
parser_pred.add_argument("-b", "--blosum",
required=False,
default="BLOSUM50",
type=str,
help="Specifies BLOSUm distance matrix (default BLOSUM50; available BLOSUM45, BLOSUM90)",
)
parser_pred.add_argument("-a", "--alleles",
required=False,
default="HLA",
type=str,
help="Specifies the HLA allele column header of the peptide input file",
)
parser_pred.add_argument("-i", "--input",
required=True,
type=str,
help="Peptide with immunogenicity file (from epitopeprediction)",
)
parser_pred.add_argument("-o", "--output",
required=True,
type=str,
help="Specifies the output path. Results will be written to CSV",
)
args = parser.parse_args()
blos = load_blossum("{blos}_distance".format(blos=args.blosum.strip().upper()))
dist2self = Distance2Self(blos,saveTrieFile=True)
df = pandas.DataFrame.from_csv(args.input, sep="\t", index_col=False)
peps = list(set(df[args.sequence]))
if args.sub_command == "generate":
peps.sort(key=len)
for plength, peps in itr.groupby(peps, key=len):
dist2self.generate_trie(peps, peptideLength=plength, outfile="{path}_l{peplength}.trie".format(
path=os.path.splitext(args.output)[0],
peplength=plength))
else:
peps.sort(key=len)
for plength, peps in itr.groupby(peps, key=len):
alleles = read_hla_input(args.input, args.alleles)
pathToTrie = args.trie if args.trie is not None and os.path.isfile(args.trie) else None
res = dist2self.calculate_distances(peps, alleles=alleles, hla_header=args.alleles, pep_header=args.sequence,
pathToTrie=pathToTrie, n=args.k)
merged = pandas.merge(df, res, how="outer",on=[args.sequence,args.alleles])
merged.to_csv(args.output, sep="\t",index=False)
if __name__ == "__main__":
sys.exit(main())
|
[
"argparse.ArgumentParser",
"pandas.DataFrame.from_csv",
"pandas.merge",
"Distance2SelfBinding.Distance2Self",
"os.path.isfile",
"os.path.splitext",
"itertools.groupby"
] |
[((1036, 1103), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Distance to self calculation"""'}), "(description='Distance to self calculation')\n", (1059, 1103), False, 'import argparse\n'), ((4702, 4740), 'Distance2SelfBinding.Distance2Self', 'Distance2Self', (['blos'], {'saveTrieFile': '(True)'}), '(blos, saveTrieFile=True)\n', (4715, 4740), False, 'from Distance2SelfBinding import Distance2Self\n'), ((4749, 4813), 'pandas.DataFrame.from_csv', 'pandas.DataFrame.from_csv', (['args.input'], {'sep': '"""\t"""', 'index_col': '(False)'}), "(args.input, sep='\\t', index_col=False)\n", (4774, 4813), False, 'import pandas\n'), ((4950, 4976), 'itertools.groupby', 'itr.groupby', (['peps'], {'key': 'len'}), '(peps, key=len)\n', (4961, 4976), True, 'import itertools as itr\n'), ((5344, 5370), 'itertools.groupby', 'itr.groupby', (['peps'], {'key': 'len'}), '(peps, key=len)\n', (5355, 5370), True, 'import itertools as itr\n'), ((5759, 5827), 'pandas.merge', 'pandas.merge', (['df', 'res'], {'how': '"""outer"""', 'on': '[args.sequence, args.alleles]'}), "(df, res, how='outer', on=[args.sequence, args.alleles])\n", (5771, 5827), False, 'import pandas\n'), ((489, 548), 'pandas.DataFrame.from_csv', 'pandas.DataFrame.from_csv', (['input'], {'sep': '"""\t"""', 'index_col': '(False)'}), "(input, sep='\\t', index_col=False)\n", (514, 548), False, 'import pandas\n'), ((5499, 5524), 'os.path.isfile', 'os.path.isfile', (['args.trie'], {}), '(args.trie)\n', (5513, 5524), False, 'import os\n'), ((5157, 5186), 'os.path.splitext', 'os.path.splitext', (['args.output'], {}), '(args.output)\n', (5173, 5186), False, 'import os\n')]
|
"""
index.py
Copyright 2015 <NAME>
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
from w3af.core.ui.api import app
from w3af.core.ui.api.utils.auth import requires_auth
from flask import jsonify
@app.route('/', methods=['GET'])
@requires_auth
def index():
return jsonify({'docs': 'http://docs.w3af.org/en/latest/api/index.html'})
|
[
"flask.jsonify",
"w3af.core.ui.api.app.route"
] |
[((811, 842), 'w3af.core.ui.api.app.route', 'app.route', (['"""/"""'], {'methods': "['GET']"}), "('/', methods=['GET'])\n", (820, 842), False, 'from w3af.core.ui.api import app\n'), ((882, 948), 'flask.jsonify', 'jsonify', (["{'docs': 'http://docs.w3af.org/en/latest/api/index.html'}"], {}), "({'docs': 'http://docs.w3af.org/en/latest/api/index.html'})\n", (889, 948), False, 'from flask import jsonify\n')]
|
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class LogAnalyticsEmBridgeSummaryReport(object):
"""
Log-Analytics EM Bridge counts summary.
"""
def __init__(self, **kwargs):
"""
Initializes a new LogAnalyticsEmBridgeSummaryReport object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param compartment_id:
The value to assign to the compartment_id property of this LogAnalyticsEmBridgeSummaryReport.
:type compartment_id: str
:param active_em_bridge_count:
The value to assign to the active_em_bridge_count property of this LogAnalyticsEmBridgeSummaryReport.
:type active_em_bridge_count: int
:param creating_em_bridge_count:
The value to assign to the creating_em_bridge_count property of this LogAnalyticsEmBridgeSummaryReport.
:type creating_em_bridge_count: int
:param needs_attention_em_bridge_count:
The value to assign to the needs_attention_em_bridge_count property of this LogAnalyticsEmBridgeSummaryReport.
:type needs_attention_em_bridge_count: int
:param deleted_em_bridge_count:
The value to assign to the deleted_em_bridge_count property of this LogAnalyticsEmBridgeSummaryReport.
:type deleted_em_bridge_count: int
:param total_em_bridge_count:
The value to assign to the total_em_bridge_count property of this LogAnalyticsEmBridgeSummaryReport.
:type total_em_bridge_count: int
"""
self.swagger_types = {
'compartment_id': 'str',
'active_em_bridge_count': 'int',
'creating_em_bridge_count': 'int',
'needs_attention_em_bridge_count': 'int',
'deleted_em_bridge_count': 'int',
'total_em_bridge_count': 'int'
}
self.attribute_map = {
'compartment_id': 'compartmentId',
'active_em_bridge_count': 'activeEmBridgeCount',
'creating_em_bridge_count': 'creatingEmBridgeCount',
'needs_attention_em_bridge_count': 'needsAttentionEmBridgeCount',
'deleted_em_bridge_count': 'deletedEmBridgeCount',
'total_em_bridge_count': 'totalEmBridgeCount'
}
self._compartment_id = None
self._active_em_bridge_count = None
self._creating_em_bridge_count = None
self._needs_attention_em_bridge_count = None
self._deleted_em_bridge_count = None
self._total_em_bridge_count = None
@property
def compartment_id(self):
"""
**[Required]** Gets the compartment_id of this LogAnalyticsEmBridgeSummaryReport.
Compartment Identifier `OCID]`__.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:return: The compartment_id of this LogAnalyticsEmBridgeSummaryReport.
:rtype: str
"""
return self._compartment_id
@compartment_id.setter
def compartment_id(self, compartment_id):
"""
Sets the compartment_id of this LogAnalyticsEmBridgeSummaryReport.
Compartment Identifier `OCID]`__.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:param compartment_id: The compartment_id of this LogAnalyticsEmBridgeSummaryReport.
:type: str
"""
self._compartment_id = compartment_id
@property
def active_em_bridge_count(self):
"""
**[Required]** Gets the active_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
Total number of ACTIVE enterprise manager bridges.
:return: The active_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
:rtype: int
"""
return self._active_em_bridge_count
@active_em_bridge_count.setter
def active_em_bridge_count(self, active_em_bridge_count):
"""
Sets the active_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
Total number of ACTIVE enterprise manager bridges.
:param active_em_bridge_count: The active_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
:type: int
"""
self._active_em_bridge_count = active_em_bridge_count
@property
def creating_em_bridge_count(self):
"""
**[Required]** Gets the creating_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
Number of enterprise manager bridges in CREATING state.
:return: The creating_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
:rtype: int
"""
return self._creating_em_bridge_count
@creating_em_bridge_count.setter
def creating_em_bridge_count(self, creating_em_bridge_count):
"""
Sets the creating_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
Number of enterprise manager bridges in CREATING state.
:param creating_em_bridge_count: The creating_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
:type: int
"""
self._creating_em_bridge_count = creating_em_bridge_count
@property
def needs_attention_em_bridge_count(self):
"""
**[Required]** Gets the needs_attention_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
Number of enterprise manager bridges in NEEDS_ATTENTION state.
:return: The needs_attention_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
:rtype: int
"""
return self._needs_attention_em_bridge_count
@needs_attention_em_bridge_count.setter
def needs_attention_em_bridge_count(self, needs_attention_em_bridge_count):
"""
Sets the needs_attention_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
Number of enterprise manager bridges in NEEDS_ATTENTION state.
:param needs_attention_em_bridge_count: The needs_attention_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
:type: int
"""
self._needs_attention_em_bridge_count = needs_attention_em_bridge_count
@property
def deleted_em_bridge_count(self):
"""
**[Required]** Gets the deleted_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
Number of enterprise manager bridges in DELETED state.
:return: The deleted_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
:rtype: int
"""
return self._deleted_em_bridge_count
@deleted_em_bridge_count.setter
def deleted_em_bridge_count(self, deleted_em_bridge_count):
"""
Sets the deleted_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
Number of enterprise manager bridges in DELETED state.
:param deleted_em_bridge_count: The deleted_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
:type: int
"""
self._deleted_em_bridge_count = deleted_em_bridge_count
@property
def total_em_bridge_count(self):
"""
**[Required]** Gets the total_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
Total number of enterprise manager bridges.
:return: The total_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
:rtype: int
"""
return self._total_em_bridge_count
@total_em_bridge_count.setter
def total_em_bridge_count(self, total_em_bridge_count):
"""
Sets the total_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
Total number of enterprise manager bridges.
:param total_em_bridge_count: The total_em_bridge_count of this LogAnalyticsEmBridgeSummaryReport.
:type: int
"""
self._total_em_bridge_count = total_em_bridge_count
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
|
[
"oci.util.formatted_flat_dict"
] |
[((8403, 8428), 'oci.util.formatted_flat_dict', 'formatted_flat_dict', (['self'], {}), '(self)\n', (8422, 8428), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n')]
|
import re
import decimal
import datetime
import collections
from . import processors
from . import _compat
import mt940
class Model(object):
pass
class Date(datetime.date, Model):
'''Just a regular date object which supports dates given as strings
Args:
year (str): The year (0-100), will automatically add 2000 when needed
month (str): The month
day (str): The day
'''
def __new__(cls, *args, **kwargs):
if kwargs:
year = kwargs.get('year')
month = kwargs.get('month')
day = kwargs.get('day')
year = int(year, 10)
if year < 1000:
year += 2000
month = int(month, 10)
day = int(day, 10)
return datetime.date.__new__(cls, year, month, day)
else:
# For pickling the date object uses it's own binary format
# No need to do anything special there :)
return datetime.date.__new__(cls, *args, **kwargs)
class Amount(Model):
'''Amount object containing currency and amount
Args:
amount (str): Amount using either a , or a . as decimal separator
status (str): Either C or D for credit or debit respectively
currency (str): A 3 letter currency (e.g. EUR)
>>> Amount('123.45', 'C', 'EUR')
<123.45 EUR>
>>> Amount('123.45', 'D', 'EUR')
<-123.45 EUR>
'''
def __init__(self, amount, status, currency=None, **kwargs):
self.amount = decimal.Decimal(amount.replace(',', '.'))
self.currency = currency
# C = credit, D = debit
if status == 'D':
self.amount = -self.amount
def __repr__(self):
return '<%s %s>' % (
self.amount,
self.currency,
)
class Balance(Model):
'''Parse balance statement
Args:
status (str): Either C or D for credit or debit respectively
amount (Amount): Object containing the amount and currency
date (date): The balance date
>>> balance = Balance('C', '0.00', Date(2010, 7, 22))
>>> balance.status
'C'
>>> balance.amount.amount
Decimal('0.00')
>>> isinstance(balance.date, Date)
True
>>> balance.date.year, balance.date.month, balance.date.day
(2010, 7, 22)
>>> Balance()
<None @ None>
'''
def __init__(self, status=None, amount=None, date=None, **kwargs):
if amount and not isinstance(amount, Amount):
amount = Amount(amount, status, kwargs.get('currency'))
self.status = status
self.amount = amount
self.date = date
def __repr__(self):
return '<%s>' % self
def __str__(self):
return '%s @ %s' % (
self.amount,
self.date,
)
class Transactions(collections.Sequence):
'''
Collection of :py:class:`Transaction` objects with global properties such
as begin and end balance
'''
#: Using the processors you can pre-process data before creating objects
#: and modify them after creating the objects
DEFAULT_PROCESSORS = dict(
pre_account_identification=[],
post_account_identification=[],
pre_available_balance=[],
post_available_balance=[],
pre_closing_balance=[],
post_closing_balance=[],
pre_intermediate_closing_balance=[],
post_intermediate_closing_balance=[],
pre_final_closing_balance=[],
post_final_closing_balance=[],
pre_forward_available_balance=[],
post_forward_available_balance=[],
pre_opening_balance=[],
post_opening_balance=[],
pre_intermediate_opening_balance=[],
post_intermediate_opening_balance=[],
pre_final_opening_balance=[],
post_final_opening_balance=[],
pre_related_reference=[],
post_related_reference=[],
pre_statement=[],
post_statement=[processors.date_cleanup_post_processor],
pre_statement_number=[],
post_statement_number=[],
pre_transaction_details=[],
post_transaction_details=[],
pre_transaction_reference_number=[],
post_transaction_reference_number=[],
)
def __init__(self, processors=None):
self.processors = self.DEFAULT_PROCESSORS.copy()
if processors:
self.processors.update(processors)
self.transactions = []
self.data = {}
@property
def currency(self):
balance = mt940.utils.coalesce(
self.data.get('final_opening_balance'),
self.data.get('opening_balance'),
self.data.get('intermediate_opening_balance'),
self.data.get('available_balance'),
self.data.get('forward_available_balance'),
self.data.get('final_closing_balance'),
self.data.get('closing_balance'),
self.data.get('intermediate_closing_balance'),
)
if balance:
return balance.amount.currency
def parse(self, data):
'''Parses mt940 data, expects a string with data
Args:
data (str): The MT940 data
Returns: :py:class:`list` of :py:class:`Transaction`
'''
# We don't like carriage returns in case of Windows files so let's just
# replace them with nothing
data = data.replace('\r', '')
# The pattern is a bit annoying to match by regex, even with a greedy
# match it's difficult to get both the beginning and the end so we're
# working around it in a safer way to get everything.
tag_re = re.compile(
r'^:(?P<full_tag>(?P<tag>[0-9]{2})(?P<sub_tag>[A-Z])?):',
re.MULTILINE)
matches = list(tag_re.finditer(data))
transaction = Transaction(self)
self.transactions.append(transaction)
for i, match in enumerate(matches):
tag_id = int(match.group('tag'))
assert tag_id in mt940.tags.TAG_BY_ID, 'Unknown tag %r' \
'in line: %r' % (tag_id, match.group(0))
tag = mt940.tags.TAG_BY_ID.get(match.group('full_tag')) \
or mt940.tags.TAG_BY_ID[tag_id]
# Nice trick to get all the text that is part of this tag, python
# regex matches have a `end()` and `start()` to indicate the start
# and end index of the match.
if matches[i + 1:]:
tag_data = data[match.end():matches[i + 1].start()].strip()
else:
tag_data = data[match.end():].strip()
tag_dict = tag.parse(self, tag_data)
# Preprocess data before creating the object
for processor in self.processors.get('pre_%s' % tag.slug):
tag_dict = processor(self, tag, tag_dict)
result = tag(self, tag_dict)
# Postprocess the object
for processor in self.processors.get('post_%s' % tag.slug):
result = processor(self, tag, tag_dict, result)
if isinstance(tag, mt940.tags.Statement):
if transaction.data.get('id'):
transaction = Transaction(self, result)
self.transactions.append(transaction)
else:
transaction.data.update(result)
elif tag.scope is Transaction:
# Combine multiple results together as one string, Rabobank has
# multiple :86: tags for a single transaction
for k, v in _compat.iteritems(result):
if k in transaction.data:
transaction.data[k] += '\n%s' % v.strip()
else:
transaction.data[k] = v
elif tag.scope is Transactions: # pragma: no branch
self.data.update(result)
return self.transactions
def __getitem__(self, key):
return self.transactions[key]
def __len__(self):
return len(self.transactions)
def __repr__(self):
return '<%s[%s]>' % (
self.__class__.__name__,
']['.join('%s: %s' % (k.replace('_balance', ''), v)
for k, v in _compat.iteritems(self.data)
if k.endswith('balance'))
)
class Transaction(Model):
def __init__(self, transactions, data=None):
self.transactions = transactions
self.data = {}
self.update(data)
def update(self, data):
if data:
self.data.update(data)
def __repr__(self):
return '<%s[%s] %s>' % (
self.__class__.__name__,
self.data.get('date'),
self.data.get('amount'),
)
|
[
"datetime.date.__new__",
"re.compile"
] |
[((5605, 5691), 're.compile', 're.compile', (['"""^:(?P<full_tag>(?P<tag>[0-9]{2})(?P<sub_tag>[A-Z])?):"""', 're.MULTILINE'], {}), "('^:(?P<full_tag>(?P<tag>[0-9]{2})(?P<sub_tag>[A-Z])?):', re.\n MULTILINE)\n", (5615, 5691), False, 'import re\n'), ((764, 808), 'datetime.date.__new__', 'datetime.date.__new__', (['cls', 'year', 'month', 'day'], {}), '(cls, year, month, day)\n', (785, 808), False, 'import datetime\n'), ((967, 1010), 'datetime.date.__new__', 'datetime.date.__new__', (['cls', '*args'], {}), '(cls, *args, **kwargs)\n', (988, 1010), False, 'import datetime\n')]
|
#!/usr/bin/env python
# Name: testManager.py
# Abstract:
# A python tool to launch and manage EPICS CA and PVA stress tests
# Uses threading and paramiko ssh transport to run needed clients and servers on
# each host machine which will be used in the test.
#
# Example:
# stressTest/testManager.py --testDir /path/to/test/top --testName yourTestName
#
# Requested features to be added:
#
#==============================================================
from __future__ import print_function
import argparse
import concurrent.futures
import io
import datetime
import glob
import locale
import os
import re
import pprint
#import paramiko
#import procServUtils
import signal
import socket
import string
import subprocess
import sys
import tempfile
import textwrap
import threading
import time
procList = []
activeTests = []
testFutures = {}
testExecutor = None
testDir = None
def makePrintable( rawOutput ):
if isinstance( rawOutput, str ) and rawOutput.startswith( "b'" ):
rawOutput = eval(rawOutput)
if isinstance( rawOutput, bytes ):
rawOutput = rawOutput.decode()
if isinstance( rawOutput, list ):
filtered = []
for line in rawOutput:
filtered.append( makePrintable( line ) )
return filtered
if not isinstance( rawOutput, str ):
return str(rawOutput)
# Filter string for printable characters
printable = string.printable.replace( '\r', '' )
return ''.join(c for c in rawOutput if c in printable )
def getDateTimeFromFile( filePath ):
dateTime = None
try:
with open( filePath, 'r' ) as f:
lines = f.readlines()
for line in lines:
line = line.strip()
if len(line.strip()) == 0:
continue
dateTime = datetime.datetime.strptime( line, "%a %b %d %H:%M:%S %Z %Y" )
break
except:
pass
#if dateTime:
# print( "file %s dateTime: %s" % ( filePath, dateTime ) )
return dateTime
class StressTest(object):
'''class StressTest( pathToTestTop )
Path must contain ...
'''
def __init__( self, pathToTestTop ):
self._pathToTestTop = pathToTestTop
self._clientList = []
self._testDuration = None
self._startTest = None
def startTest( self ):
self._startTest = datetime.datetime.now()
print( "Start: %s at %s" % ( self._pathToTestTop, self._startTest.strftime("%c") ) )
try:
# Remove any stale stopTest file
os.remove( os.path.join( self._pathToTestTop, "stopTest" ) )
except:
pass
def stopTest( self ):
print( "Stop: %s" % self._pathToTestTop )
activeTests.remove( self )
def monitorTest( self ):
print( "Monitor: %s" % self._pathToTestTop )
stopTime = self.getStopTime()
if stopTime:
currentTime = datetime.datetime.now()
if currentTime > stopTime:
self.stopTest()
def getTestTop( self ):
return self._pathToTestTop
def getTestDuration( self ):
return self._testDuration
def getStopTime( self ):
stopTime = getDateTimeFromFile( os.path.join( self._pathToTestTop, "stopTest" ) )
if self._testDuration is not None:
schedStop = self._startTest + datetime.timedelta( seconds=self._testDuration )
if not stopTime or stopTime > schedStop:
stopTime = schedStop
#if stopTime:
# print( "test %s stopTime: %s" % ( self._pathToTestTop, stopTime ) )
return stopTime
def isActiveTest( pathToTestTop ):
for test in activeTests:
if pathToTestTop == test.getTestTop():
return True
return False
def checkStartTest( startTestPath, options ):
stressTestTop = os.path.split( startTestPath )[0]
if isActiveTest( stressTestTop ):
return
#print( "checkStartTime( %s )" % ( startTestPath ) )
currentTime = datetime.datetime.now()
startTime = getDateTimeFromFile( startTestPath )
if startTime is None:
return
timeSinceStart = currentTime - startTime
if timeSinceStart.total_seconds() > 2:
#print( "checkStartTime( %s ) was %d seconds ago." % ( startTestPath, timeSinceStart.total_seconds() ) )
return
stressTest = StressTest( stressTestTop )
activeTests.append( stressTest )
stressTest.startTest()
return
# Pre-compile regular expressions for speed
macroDefRegExp = re.compile( r"^\s*([a-zA-Z0-9_]*)\s*=\s*(\S*)\s*$" )
macroDefQuotedRegExp = re.compile( r"^\s*([a-zA-Z0-9_]*)\s*=\s*'([^']*)'\s*$" )
macroDefDQuotedRegExp = re.compile( r'^\s*([a-zA-Z0-9_]*)\s*=\s*"([^"]*)"\s*$' )
macroRefRegExp = re.compile( r"^([^\$]*)\$([a-zA-Z0-9_]+)(.*)$" )
def expandMacros( strWithMacros, macroDict ):
#print( "expandMacros(%s)" % strWithMacros )
global macroRefRegExp
if type(strWithMacros) is list:
expandedStrList = []
for unexpandedStr in strWithMacros:
expandedStr = expandMacros( unexpandedStr, macroDict )
expandedStrList += [ expandedStr ]
return expandedStrList
while True:
macroMatch = macroRefRegExp.search( strWithMacros )
if not macroMatch:
break
macroName = macroMatch.group(2)
if macroName in macroDict:
# Expand this macro and continue
strWithMacros = macroMatch.group(1) + macroDict[macroName] + macroMatch.group(3)
#print( "expandMacros: Expanded %s in %s ..." % ( macroName, strWithMacros ) )
continue
# Check for other macros in the string
return macroMatch.group(1) + '$' + macroMatch.group(2) + expandMacros( macroMatch.group(3), macroDict )
return strWithMacros
def hasMacros( strWithMacros ):
global macroRefRegExp
macrosFound = False
if type(strWithMacros) is list:
for unexpandedStr in strWithMacros:
if ( hasMacros( unexpandedStr ) ):
macrosFound = True
return macrosFound
if macroRefRegExp.search( strWithMacros ) is not None:
macrosFound = True
return macrosFound
def getClientConfig( config, clientName ):
for c in config.get('servers'):
if c.get('CLIENT_NAME') == clientName:
return c
for c in config.get('clients'):
if c.get('CLIENT_NAME') == clientName:
return c
return None
def getEnvFromFile( fileName, env, verbose=False ):
if verbose:
print( "getEnvFromFile: %s" % fileName )
try:
with open( fileName, 'r' ) as f:
lines = f.readlines()
for line in lines:
line = line.strip()
if line.startswith('#'):
continue
match = ( macroDefRegExp.search(line) or \
macroDefQuotedRegExp.search(line) or \
macroDefDQuotedRegExp.search(line) )
if not match:
continue
macroName = match.group(1)
macroValue = match.group(2)
env[macroName] = macroValue
if verbose:
print( "getEnvFromFile: %s = %s" % ( macroName, macroValue ) )
except:
pass
return env
def readClientConfig( clientConfig, clientName, verbose=False ):
'''Duplicates the readIfFound env handling in launch_client.sh.'''
clientConfig[ 'CLIENT_NAME' ] = clientName
SCRIPTDIR = clientConfig[ 'SCRIPTDIR' ]
testTop = clientConfig[ 'TEST_TOP' ]
getEnvFromFile( os.path.join( SCRIPTDIR, 'stressTestDefault.env' ), clientConfig, verbose=verbose )
getEnvFromFile( os.path.join( SCRIPTDIR, 'stressTestDefault.env.local' ), clientConfig, verbose=verbose )
getEnvFromFile( os.path.join( testTop, '..', 'siteDefault.env' ), clientConfig, verbose=verbose )
getEnvFromFile( os.path.join( testTop, 'siteDefault.env' ), clientConfig, verbose=verbose )
#getEnvFromFile( os.path.join( TEST_HOST_DIR, 'host.env' ), clientConfig, verbose=verbose )
getEnvFromFile( os.path.join( testTop, 'test.env' ), clientConfig, verbose=verbose )
# Read env from clientName.env to get TEST_APPTYPE
if 'TEST_APPTYPE' in clientConfig:
print( "TODO: TEST_APPTYPE %s already defined in %s clientConfig!" % ( clientConfig['TEST_APPTYPE'], clientName ) )
else:
getEnvFromFile( os.path.join( testTop, clientName + '.env' ), clientConfig, verbose=verbose )
if 'TEST_APPTYPE' in clientConfig:
getEnvFromFile( os.path.join( SCRIPTDIR, clientConfig['TEST_APPTYPE'] + 'Default.env' ), clientConfig, verbose=verbose )
# Reread env from clientName.env to override ${TEST_APPTYPE}Default.env
getEnvFromFile( os.path.join( testTop, clientName + '.env' ), clientConfig, verbose=verbose )
# Make sure PYPROC_ID isn't in the clientConfig so it doesn't get expanded
if 'PYPROC_ID' in clientConfig:
del clientConfig['PYPROC_ID']
# Expand macros in clientConfig
for key in clientConfig:
clientConfig[key] = expandMacros( clientConfig[key], clientConfig )
return clientConfig
def runRemote( *args, **kws ):
config = args[0]
clientName = args[1]
testTop = config[ 'TEST_TOP' ]
verbose = kws.get( 'verbose', False )
if verbose:
print( "runRemote client %s:" % clientName )
clientConfig = getClientConfig( config, clientName )
if not clientConfig:
print( "runRemote client %s unable to read test config!" % clientName )
return None
TEST_START_DELAY = clientConfig.get( 'TEST_START_DELAY', 0 )
if TEST_START_DELAY:
try:
TEST_START_DELAY = float(TEST_START_DELAY)
time.sleep( TEST_START_DELAY )
except ValueError:
print( "client %s config has invalid TEST_START_DELAY: %s" % ( clientName, TEST_START_DELAY ) )
else:
TEST_START_DELAY = 0.0
TEST_LAUNCHER = clientConfig.get('TEST_LAUNCHER')
TEST_LAUNCHER = expandMacros( TEST_LAUNCHER, clientConfig )
if hasMacros( TEST_LAUNCHER ):
print( "runRemote Error: TEST_LAUNCHER has unexpanded macros!\n\t%s\n" % TEST_LAUNCHER )
return
hostName = clientConfig.get('TEST_HOST')
if not hostName:
print( "runRemote Error: client %s TEST_HOST not specified!\n" % clientName )
return
cmdList = [ 'ssh', '-t', '-t', hostName ]
cmdList += TEST_LAUNCHER.split()
sshRemote = subprocess.Popen( cmdList, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE )
#sshRemote = subprocess.Popen( cmdList, stdin=None, stdout=subprocess.PIPE )
procList.append( sshRemote )
TEST_DURATION = clientConfig.get( 'TEST_DURATION' )
if TEST_DURATION:
try:
TEST_DURATION = float(TEST_DURATION)
print( "client %s sleeping for TEST_DURATION %f" % ( clientName, TEST_DURATION ), flush=True )
time.sleep( TEST_DURATION )
except ValueError:
print( "client %s config has invalid TEST_DURATION: %s" % ( clientName, TEST_DURATION ) )
print( "client %s terminate remote" % ( clientName ), flush=True )
#testRemote.stop()
sshRemote.terminate()
while True:
if verbose:
print( "client %s fetching output ...\r" % ( clientName ), flush=True )
try:
(out,err) = sshRemote.communicate( timeout=1 )
break
except subprocess.TimeoutExpired:
pass
print( "ssh client %s done." % ( clientName ), flush=True )
#print( "ssh output type is %s." % ( type(out) ), flush=True )
return makePrintable( out )
def generateGatewayPVLists( clientConfig, verbose=False ):
gwPrefix = clientConfig['TEST_GW_PREFIX']
testTop = clientConfig['TEST_TOP']
provider = clientConfig['TEST_PROVIDER']
gwPvList = []
if provider == 'pva':
gwPvList.append( gwPrefix + 'cache' )
gwPvList.append( gwPrefix + 'clients' )
gwPvList.append( gwPrefix + 'ds:byhost:rx' )
gwPvList.append( gwPrefix + 'ds:byhost:tx' )
gwPvList.append( gwPrefix + 'ds:bypv:rx' )
gwPvList.append( gwPrefix + 'ds:bypv:tx' )
gwPvList.append( gwPrefix + 'refs' )
gwPvList.append( gwPrefix + 'stats' )
gwPvList.append( gwPrefix + 'us:byhost:rx' )
gwPvList.append( gwPrefix + 'us:byhost:tx' )
gwPvList.append( gwPrefix + 'us:bypv:rx' )
gwPvList.append( gwPrefix + 'us:bypv:tx' )
elif provider == 'ca':
gwPvList.append( gwPrefix + 'vctotal' )
gwPvList.append( gwPrefix + 'pvtotal' )
gwPvList.append( gwPrefix + 'connected' )
gwPvList.append( gwPrefix + 'active' )
gwPvList.append( gwPrefix + 'inactive' )
gwPvList.append( gwPrefix + 'unconnected' )
gwPvList.append( gwPrefix + 'connecting' )
gwPvList.append( gwPrefix + 'disconnected' )
gwPvList.append( gwPrefix + 'dead' )
gwPvList.append( gwPrefix + 'clientEventRate' )
gwPvList.append( gwPrefix + 'clientPostRate' )
gwPvList.append( gwPrefix + 'existTestRate' )
gwPvList.append( gwPrefix + 'loopRate' )
gwPvList.append( gwPrefix + 'cpuFract' )
gwPvList.append( gwPrefix + 'load' )
gwPvList.append( gwPrefix + 'serverEventRate' )
gwPvList.append( gwPrefix + 'serverPostRate' )
else:
print( "generateGatewayPVLists: Invalid TEST_PROVIDER: %s" % provider )
return
clientHost = clientConfig.get( 'TEST_HOST' )
clientName = clientConfig.get( 'CLIENT_NAME' )
nClients = int( clientConfig.get( 'TEST_N_CLIENTS' ) )
clientPvFileName = os.path.join( testTop, clientHost, 'clients', '%s00' % ( clientName ), "pvs.list" )
os.makedirs( os.path.dirname( clientPvFileName ), mode=0o775, exist_ok=True )
print( "generateGatewayPVLists: Writing %d pvs to %s" % ( len(gwPvList), clientPvFileName ) )
with open( clientPvFileName, 'w' ) as f:
for pv in gwPvList:
f.write( "%s\n" % pv )
def generateClientPVLists( testTop, config, verbose=False ):
'''Create PV Lists for clients.'''
allCounterPvs = []
allCircBuffPvs = []
allRatePvs = []
servers = config.get( 'servers' )
for s in servers:
serverConfig = getClientConfig( config, s.get('CLIENT_NAME') )
pvPrefix = serverConfig[ 'TEST_PV_PREFIX' ]
serverHost = serverConfig[ 'TEST_HOST' ]
serverName = serverConfig[ 'CLIENT_NAME' ]
nCounters = int( serverConfig[ 'TEST_N_COUNTERS' ] )
nServers = int( serverConfig[ 'TEST_N_SERVERS' ] )
for iServer in range( nServers ):
# Generate list of Count and CircBuff PVs for each server
CounterPvs = [ "%s%02u:Count%02u" % ( pvPrefix, iServer, n ) for n in range( nCounters ) ]
CircBuffPvs = [ "%s%02u:CircBuff%02u" % ( pvPrefix, iServer, n ) for n in range( nCounters ) ]
RatePvs = [ "%s%02u:Rate%02u" % ( pvPrefix, iServer, n ) for n in range( nCounters ) ]
allCounterPvs += CounterPvs
allCircBuffPvs += CircBuffPvs
allRatePvs += RatePvs
# Write server pvs.list (not read by loadServer)
# Each loadServer instance gets it's PV's via $TEST_DB
serverPvFileName = os.path.join( testTop, serverHost, 'clients', '%s%02u' % ( serverName, iServer ), "pvs.list" )
os.makedirs( os.path.dirname( serverPvFileName ), mode=0o775, exist_ok=True )
if verbose:
print( "generateClientPVLists: Writing %d pvs to\n%s" % ( len(CounterPvs) *3, serverPvFileName ) )
with open( serverPvFileName, 'w' ) as f:
for pv in CounterPvs:
f.write( "%s\n" % pv )
for pv in CircBuffPvs:
f.write( "%s\n" % pv )
for pv in RatePvs:
f.write( "%s\n" % pv )
clients = config.get( 'clients' )
nPvs = len(allCounterPvs)
for clientConfig in clients:
appType = clientConfig.get( 'TEST_APPTYPE' )
if appType == 'pvGetGateway':
generateGatewayPVLists( clientConfig, verbose=False )
continue
clientHost = clientConfig[ 'TEST_HOST' ]
clientName = clientConfig[ 'CLIENT_NAME' ]
nClients = int( clientConfig[ 'TEST_N_CLIENTS' ] )
nClientsTotal = nClients * len(clients)
nPvPerClient = int( len(allCounterPvs) / nClients )
for iClient in range( nClients ):
if appType == 'pvGetArray':
clientPvList = allCircBuffPvs[ iClient : len(allCircBuffPvs) : nClients ]
else:
clientPvList = allCounterPvs[ iClient : len(allCounterPvs) : nClients ]
clientPvList += allRatePvs[ iClient : len(allRatePvs) : nClients ]
clientPvFileName = os.path.join( testTop, clientHost, 'clients', '%s%02u' % ( clientName, iClient ), "pvs.list" )
os.makedirs( os.path.dirname( clientPvFileName ), mode=0o775, exist_ok=True )
if verbose:
print( "generateClientPVLists: Writing %d of %d pvs to\n%s" % ( len(clientPvList), nPvs, clientPvFileName ) )
with open( clientPvFileName, 'w' ) as f:
for pv in clientPvList:
f.write( "%s\n" % pv )
return
def clientFetchResult( future ):
clientName = testFutures[future]
try:
clientResult = future.result()
except Exception as e:
print( "%s: Exception: %s" % ( clientName, e ) )
else:
print( "clientResult for %s:" % ( clientName ) )
if clientResult:
#print( "clientResult type is %s." % ( type(clientResult) ), flush=True )
#if isinstance( clientResult, str ) and clientResult.startswith( "b'" ):
# clientResult = eval(clientResult)
# print( "eval clientResult type is %s." % ( type(clientResult) ), flush=True )
#if isinstance( clientResult, bytes ):
# clientResult = clientResult.decode()
# print( "decoded clientResult type is %s." % ( type(clientResult) ), flush=True )
clientResult = makePrintable( clientResult )
#print( "filtered clientResult type is %s." % ( type(clientResult) ), flush=True )
if isinstance( clientResult, list ):
for line in clientResult:
print( "%s" % line )
else:
#if isinstance( clientResult, str ):
# clientResult = clientResult.splitlines()
# print( "split clientResult type is %s." % ( type(clientResult) ), flush=True )
print( clientResult )
else:
print( clientResult )
def runTest( testTop, config, verbose=False ):
servers = config.get( 'servers' )
clients = config.get( 'clients' )
TEST_NAME = config[ 'TEST_NAME' ]
if verbose:
print( "runTest %s for %d servers and %d clients:" % ( TEST_NAME, len(servers), len(clients) ) )
for s in servers:
print( "%20s: host %16s, TEST_LAUNCHER: %s" % ( s.get('CLIENT_NAME'), s.get('TEST_HOST'), s.get('TEST_LAUNCHER') ) )
for c in clients:
print( "%20s: host %16s, TEST_LAUNCHER: %s" % ( c.get('CLIENT_NAME'), c.get('TEST_HOST'), c.get('TEST_LAUNCHER') ) )
# Update test configuration
with open( os.path.join( testTop, 'testConfig.json' ), 'w' ) as f:
f.write( '# Generated file: Updated on each test run from $TEST_TOP/*.env\n' )
pprint.pprint( config, stream = f )
# Create PV lists
generateClientPVLists( testTop, config, verbose=verbose )
global testExecutor
global testFutures
testExecutor = concurrent.futures.ThreadPoolExecutor( max_workers=None )
testFutures = {}
for c in servers:
clientName = c.get('CLIENT_NAME')
testFutures[ testExecutor.submit( runRemote, config, clientName, verbose=verbose ) ] = clientName
for c in clients:
clientName = c.get('CLIENT_NAME')
testFutures[ testExecutor.submit( runRemote, config, clientName, verbose=verbose ) ] = clientName
print( "Launched %d testFutures ..." % len(testFutures), flush=True )
for future in testFutures:
future.add_done_callback( clientFetchResult )
while True:
( done, not_done ) = concurrent.futures.wait( testFutures, timeout=1.0 )
if len(not_done) == 0:
break
if verbose:
print( "Waiting on %d futures ...\r" % len(not_done) )
print( "shutdown testExecutor...", flush=True )
testExecutor.shutdown( wait=True )
return
def killProcesses( ):
global procList
global testDir
global testFutures
if testDir:
killGlob = os.path.join( testDir, "*", "clients", "*.killer" )
print( 'killProcesses: Checking for killFiles: %s' % killGlob )
for killFile in glob.glob( os.path.join( testDir, "*", "*.killer" ) ):
hostName = os.path.split( os.path.split( os.path.split(killFile)[0] )[0] )[1]
print( 'killProcesses: ssh %s %s' % ( hostName, killFile ), flush=True )
#subprocess.check_status( "ssh %s %s" % ( hostName, killFile ) )
# killFile already has "ssh $host pid"
subprocess.check_status( "%s" % ( killFile ) )
time.sleep(0.5)
time.sleep(1.0)
for proc in procList:
if proc is not None and proc.returncode is None:
print( 'killProcesses: kill process %d' % ( proc.pid ), flush=True )
proc.kill()
#proc.terminate()
time.sleep(1.0)
print( 'killProcesses: Checking %d testFutures ...' % ( len(testFutures) ), flush=True )
# First kill clients
for future in testFutures:
if not future.done():
clientName = testFutures[future]
if clientName.find('Server') < 0:
print( 'killProcesses: Cancel future for %s' % ( clientName ), flush=True )
time.sleep(0.5)
future.cancel()
time.sleep(1.0)
# kill remaining futures
for future in testFutures:
if not future.done():
clientName = testFutures[future]
print( 'killProcesses: Cancel future for %s' % ( clientName ), flush=True )
time.sleep(0.5)
future.cancel()
print( 'killProcesses: Shutdown testExecutor', flush=True )
time.sleep(0.5)
testExecutor.shutdown( wait=True )
def stressTest_signal_handler( signum, frame ):
print( "\nstressTest_signal_handler: Received signal %d" % signum, flush=True )
killProcesses()
print( 'stressTest_signal_handler: done.', flush=True )
time.sleep(0.5)
# Install signal handler
signal.signal( signal.SIGINT, stressTest_signal_handler )
signal.signal( signal.SIGTERM, stressTest_signal_handler )
# Can't catch SIGKILL
#signal.signal( signal.SIGKILL, stressTest_signal_handler )
def process_options():
#if argv is None:
# argv = sys.argv[1:]
description = 'stressTest/testManager.py manages launching one or more remote stressTest clients and/or servers.\n'
epilog_fmt = '\nExamples:\n' \
'stressTest/testManager.py -t "/path/to/testTop/*"\n'
epilog = textwrap.dedent( epilog_fmt )
parser = argparse.ArgumentParser( description=description, formatter_class=argparse.RawDescriptionHelpFormatter, epilog=epilog )
#parser.add_argument( 'cmd', help='Command to launch. Should be an executable file.' )
#parser.add_argument( 'arg', nargs='*', help='Arguments for command line. Enclose options in quotes.' )
parser.add_argument( '-v', '--verbose', action="store_true", help='show more verbose output.' )
parser.add_argument( '-t', '--testDir', action="store", required=True, help='Path to test directory. Can contain * and other glob syntax.' )
options = parser.parse_args( )
return options
def main( options, argv=None):
#if options.verbose:
# print( "logDir=%s\n" % options.logDir )
if options.verbose:
print( "testDir=%s\n" % options.testDir )
global testDir
testDir = options.testDir
testConfig = {}
servers = []
clients = []
# Read test.env
SCRIPTDIR = os.path.abspath( os.path.dirname( __file__ ) )
TEST_NAME = os.path.split(testDir)[1]
testConfig[ 'SCRIPTDIR'] = SCRIPTDIR
testConfig[ 'TEST_NAME'] = TEST_NAME
testConfig[ 'TEST_TOP' ] = testDir
getEnvFromFile( os.path.join( options.testDir, "test.env" ), testConfig, verbose=options.verbose )
for envFile in glob.glob( os.path.join( options.testDir, "*.env" ) ):
baseName = os.path.split( envFile )[1]
if baseName == "test.env":
continue
# Client configuration
clientConfig = testConfig.copy()
clientName = baseName.replace( ".env", "" )
readClientConfig( clientConfig, clientName, verbose=options.verbose )
if baseName.find( "Server" ) >= 0:
servers.append( clientConfig.copy() )
else:
clients.append( clientConfig.copy() )
testConfig[ 'servers' ] = servers
testConfig[ 'clients' ] = clients
return runTest( options.testDir, testConfig, verbose=options.verbose )
if __name__ == '__main__':
status = 0
options = process_options()
debug = 1
if debug:
status = main( options )
try:
if not debug:
status = main( options )
print( "main() status=" , status )
except BaseException as e:
print( e )
print( "Caught exception during main!" )
pass
# Kill any processes still running
killProcesses()
sys.exit(status)
|
[
"textwrap.dedent",
"subprocess.Popen",
"subprocess.check_status",
"os.path.join",
"argparse.ArgumentParser",
"os.path.dirname",
"time.sleep",
"datetime.datetime.strptime",
"datetime.timedelta",
"pprint.pprint",
"string.printable.replace",
"signal.signal",
"os.path.split",
"datetime.datetime.now",
"sys.exit",
"re.compile"
] |
[((4487, 4541), 're.compile', 're.compile', (['"""^\\\\s*([a-zA-Z0-9_]*)\\\\s*=\\\\s*(\\\\S*)\\\\s*$"""'], {}), "('^\\\\s*([a-zA-Z0-9_]*)\\\\s*=\\\\s*(\\\\S*)\\\\s*$')\n", (4497, 4541), False, 'import re\n'), ((4564, 4621), 're.compile', 're.compile', (['"""^\\\\s*([a-zA-Z0-9_]*)\\\\s*=\\\\s*\'([^\']*)\'\\\\s*$"""'], {}), '("^\\\\s*([a-zA-Z0-9_]*)\\\\s*=\\\\s*\'([^\']*)\'\\\\s*$")\n', (4574, 4621), False, 'import re\n'), ((4645, 4702), 're.compile', 're.compile', (['"""^\\\\s*([a-zA-Z0-9_]*)\\\\s*=\\\\s*"([^"]*)"\\\\s*$"""'], {}), '(\'^\\\\s*([a-zA-Z0-9_]*)\\\\s*=\\\\s*"([^"]*)"\\\\s*$\')\n', (4655, 4702), False, 'import re\n'), ((4726, 4773), 're.compile', 're.compile', (['"""^([^\\\\$]*)\\\\$([a-zA-Z0-9_]+)(.*)$"""'], {}), "('^([^\\\\$]*)\\\\$([a-zA-Z0-9_]+)(.*)$')\n", (4736, 4773), False, 'import re\n'), ((22781, 22836), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'stressTest_signal_handler'], {}), '(signal.SIGINT, stressTest_signal_handler)\n', (22794, 22836), False, 'import signal\n'), ((22840, 22896), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'stressTest_signal_handler'], {}), '(signal.SIGTERM, stressTest_signal_handler)\n', (22853, 22896), False, 'import signal\n'), ((1392, 1426), 'string.printable.replace', 'string.printable.replace', (["'\\r'", '""""""'], {}), "('\\r', '')\n", (1416, 1426), False, 'import string\n'), ((3962, 3985), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3983, 3985), False, 'import datetime\n'), ((10496, 10571), 'subprocess.Popen', 'subprocess.Popen', (['cmdList'], {'stdin': 'subprocess.DEVNULL', 'stdout': 'subprocess.PIPE'}), '(cmdList, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE)\n', (10512, 10571), False, 'import subprocess\n'), ((13688, 13765), 'os.path.join', 'os.path.join', (['testTop', 'clientHost', '"""clients"""', "('%s00' % clientName)", '"""pvs.list"""'], {}), "(testTop, clientHost, 'clients', '%s00' % clientName, 'pvs.list')\n", (13700, 13765), False, 'import os\n'), ((21417, 21432), 'time.sleep', 'time.sleep', (['(1.0)'], {}), '(1.0)\n', (21427, 21432), False, 'import time\n'), ((21656, 21671), 'time.sleep', 'time.sleep', (['(1.0)'], {}), '(1.0)\n', (21666, 21671), False, 'import time\n'), ((22103, 22118), 'time.sleep', 'time.sleep', (['(1.0)'], {}), '(1.0)\n', (22113, 22118), False, 'import time\n'), ((22467, 22482), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (22477, 22482), False, 'import time\n'), ((22739, 22754), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (22749, 22754), False, 'import time\n'), ((23297, 23324), 'textwrap.dedent', 'textwrap.dedent', (['epilog_fmt'], {}), '(epilog_fmt)\n', (23312, 23324), False, 'import textwrap\n'), ((23340, 23462), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'description', 'formatter_class': 'argparse.RawDescriptionHelpFormatter', 'epilog': 'epilog'}), '(description=description, formatter_class=argparse.\n RawDescriptionHelpFormatter, epilog=epilog)\n', (23363, 23462), False, 'import argparse\n'), ((25713, 25729), 'sys.exit', 'sys.exit', (['status'], {}), '(status)\n', (25721, 25729), False, 'import sys\n'), ((2319, 2342), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2340, 2342), False, 'import datetime\n'), ((3800, 3828), 'os.path.split', 'os.path.split', (['startTestPath'], {}), '(startTestPath)\n', (3813, 3828), False, 'import os\n'), ((7596, 7644), 'os.path.join', 'os.path.join', (['SCRIPTDIR', '"""stressTestDefault.env"""'], {}), "(SCRIPTDIR, 'stressTestDefault.env')\n", (7608, 7644), False, 'import os\n'), ((7700, 7754), 'os.path.join', 'os.path.join', (['SCRIPTDIR', '"""stressTestDefault.env.local"""'], {}), "(SCRIPTDIR, 'stressTestDefault.env.local')\n", (7712, 7754), False, 'import os\n'), ((7810, 7856), 'os.path.join', 'os.path.join', (['testTop', '""".."""', '"""siteDefault.env"""'], {}), "(testTop, '..', 'siteDefault.env')\n", (7822, 7856), False, 'import os\n'), ((7912, 7952), 'os.path.join', 'os.path.join', (['testTop', '"""siteDefault.env"""'], {}), "(testTop, 'siteDefault.env')\n", (7924, 7952), False, 'import os\n'), ((8104, 8137), 'os.path.join', 'os.path.join', (['testTop', '"""test.env"""'], {}), "(testTop, 'test.env')\n", (8116, 8137), False, 'import os\n'), ((13789, 13822), 'os.path.dirname', 'os.path.dirname', (['clientPvFileName'], {}), '(clientPvFileName)\n', (13804, 13822), False, 'import os\n'), ((19593, 19624), 'pprint.pprint', 'pprint.pprint', (['config'], {'stream': 'f'}), '(config, stream=f)\n', (19606, 19624), False, 'import pprint\n'), ((20819, 20868), 'os.path.join', 'os.path.join', (['testDir', '"""*"""', '"""clients"""', '"""*.killer"""'], {}), "(testDir, '*', 'clients', '*.killer')\n", (20831, 20868), False, 'import os\n'), ((24299, 24324), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (24314, 24324), False, 'import os\n'), ((24345, 24367), 'os.path.split', 'os.path.split', (['testDir'], {}), '(testDir)\n', (24358, 24367), False, 'import os\n'), ((24513, 24554), 'os.path.join', 'os.path.join', (['options.testDir', '"""test.env"""'], {}), "(options.testDir, 'test.env')\n", (24525, 24554), False, 'import os\n'), ((24626, 24664), 'os.path.join', 'os.path.join', (['options.testDir', '"""*.env"""'], {}), "(options.testDir, '*.env')\n", (24638, 24664), False, 'import os\n'), ((1777, 1836), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['line', '"""%a %b %d %H:%M:%S %Z %Y"""'], {}), "(line, '%a %b %d %H:%M:%S %Z %Y')\n", (1803, 1836), False, 'import datetime\n'), ((2885, 2908), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2906, 2908), False, 'import datetime\n'), ((3182, 3227), 'os.path.join', 'os.path.join', (['self._pathToTestTop', '"""stopTest"""'], {}), "(self._pathToTestTop, 'stopTest')\n", (3194, 3227), False, 'import os\n'), ((8426, 8468), 'os.path.join', 'os.path.join', (['testTop', "(clientName + '.env')"], {}), "(testTop, clientName + '.env')\n", (8438, 8468), False, 'import os\n'), ((8567, 8636), 'os.path.join', 'os.path.join', (['SCRIPTDIR', "(clientConfig['TEST_APPTYPE'] + 'Default.env')"], {}), "(SCRIPTDIR, clientConfig['TEST_APPTYPE'] + 'Default.env')\n", (8579, 8636), False, 'import os\n'), ((8776, 8818), 'os.path.join', 'os.path.join', (['testTop', "(clientName + '.env')"], {}), "(testTop, clientName + '.env')\n", (8788, 8818), False, 'import os\n'), ((9755, 9783), 'time.sleep', 'time.sleep', (['TEST_START_DELAY'], {}), '(TEST_START_DELAY)\n', (9765, 9783), False, 'import time\n'), ((10949, 10974), 'time.sleep', 'time.sleep', (['TEST_DURATION'], {}), '(TEST_DURATION)\n', (10959, 10974), False, 'import time\n'), ((15341, 15435), 'os.path.join', 'os.path.join', (['testTop', 'serverHost', '"""clients"""', "('%s%02u' % (serverName, iServer))", '"""pvs.list"""'], {}), "(testTop, serverHost, 'clients', '%s%02u' % (serverName,\n iServer), 'pvs.list')\n", (15353, 15435), False, 'import os\n'), ((16919, 17013), 'os.path.join', 'os.path.join', (['testTop', 'clientHost', '"""clients"""', "('%s%02u' % (clientName, iClient))", '"""pvs.list"""'], {}), "(testTop, clientHost, 'clients', '%s%02u' % (clientName,\n iClient), 'pvs.list')\n", (16931, 17013), False, 'import os\n'), ((19442, 19482), 'os.path.join', 'os.path.join', (['testTop', '"""testConfig.json"""'], {}), "(testTop, 'testConfig.json')\n", (19454, 19482), False, 'import os\n'), ((20978, 21016), 'os.path.join', 'os.path.join', (['testDir', '"""*"""', '"""*.killer"""'], {}), "(testDir, '*', '*.killer')\n", (20990, 21016), False, 'import os\n'), ((21337, 21377), 'subprocess.check_status', 'subprocess.check_status', (["('%s' % killFile)"], {}), "('%s' % killFile)\n", (21360, 21377), False, 'import subprocess\n'), ((21396, 21411), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (21406, 21411), False, 'import time\n'), ((22354, 22369), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (22364, 22369), False, 'import time\n'), ((24689, 24711), 'os.path.split', 'os.path.split', (['envFile'], {}), '(envFile)\n', (24702, 24711), False, 'import os\n'), ((2519, 2564), 'os.path.join', 'os.path.join', (['self._pathToTestTop', '"""stopTest"""'], {}), "(self._pathToTestTop, 'stopTest')\n", (2531, 2564), False, 'import os\n'), ((3317, 3363), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'self._testDuration'}), '(seconds=self._testDuration)\n', (3335, 3363), False, 'import datetime\n'), ((15461, 15494), 'os.path.dirname', 'os.path.dirname', (['serverPvFileName'], {}), '(serverPvFileName)\n', (15476, 15494), False, 'import os\n'), ((17039, 17072), 'os.path.dirname', 'os.path.dirname', (['clientPvFileName'], {}), '(clientPvFileName)\n', (17054, 17072), False, 'import os\n'), ((22050, 22065), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (22060, 22065), False, 'import time\n'), ((21075, 21098), 'os.path.split', 'os.path.split', (['killFile'], {}), '(killFile)\n', (21088, 21098), False, 'import os\n')]
|
import boto3
import click
elbList = boto3.client('elbv2')
rgapi = boto3.client('resourcegroupstaggingapi')
"""
def lst_targets(lbar):
tgs = []
tgroups = elbList.describe_target_groups(LoadBalancerArn=lbar)
for tg in tgroups['TargetGroups']:
targetgps = tg['TargetGroupArn']
print(targetgps)
return tgs
@click.group()
def cli():
""awssnapelb manages snapshots""
@cli.group('tgroups')
def tgroups():
""Commands for listing target groups based on loadbalancers""
@tgroups.command('listtgs')
@click.option('--lbar', default=None,help="only the elb's for the project (tag Project:<name>)")
def lst_tgroups(lbar):
target_groups = lst_targets(lbar)
for tg in target_groups:
print(tg)
if __name__ == '__main__':
cli()
"""
"""
def lst_tg(project):
tgs = []
resources = rgapi.get_resources(TagFilters=[{'Key':'Project','Values':[project]}],ResourceTypeFilters=['elasticloadbalancing:targetgroup'])
for tg in resources['ResourceTagMappingList']:
print(tg['ResourceARN'])
return tgs
@click.group()
def cli():
""list the target groups""
@cli.group('tgroups')
def tgroups():
"" Command for listing targetgroups""
@tgroups.command('list')
@click.option('--project', default=None,help="only the elb's for the project (tag Project:<name>)")
def list_tg(project):
target_groups = lst_tg(project)
for tg in target_groups:
print(tg)
if __name__ == '__main__':
cli()
"""
import boto3
import click
elbList = boto3.client('elbv2')
rgapi = boto3.client('resourcegroupstaggingapi')
def list_tg(lbname):
target = []
loadbalancers = elbList.describe_load_balancers(Names=[lbname])
for lb in loadbalancers['LoadBalancers']:
lbalancer = lb['LoadBalancerName']
lbalancerarn = lb['LoadBalancerArn']
tgs = elbList.describe_target_groups(LoadBalancerArn=lbalancerarn)
for tg in tgs['TargetGroups']:
targetgps = tg['TargetGroupArn']
print(targetgps)
return target
@click.group()
def cli():
"""list the target groups"""
@cli.group('tgroups')
def tgroups():
""" Command for listing targetgroups"""
@tgroups.command('list')
@click.option('--lbname', default=None,help="only the elb's for the project (tag Project:<name>)")
def lst_tg(lbname):
target_groups = list_tg(lbname)
for tg in target_groups:
print(tg)
if __name__ == '__main__':
cli()
|
[
"click.group",
"click.option",
"boto3.client"
] |
[((36, 57), 'boto3.client', 'boto3.client', (['"""elbv2"""'], {}), "('elbv2')\n", (48, 57), False, 'import boto3\n'), ((66, 106), 'boto3.client', 'boto3.client', (['"""resourcegroupstaggingapi"""'], {}), "('resourcegroupstaggingapi')\n", (78, 106), False, 'import boto3\n'), ((1504, 1525), 'boto3.client', 'boto3.client', (['"""elbv2"""'], {}), "('elbv2')\n", (1516, 1525), False, 'import boto3\n'), ((1534, 1574), 'boto3.client', 'boto3.client', (['"""resourcegroupstaggingapi"""'], {}), "('resourcegroupstaggingapi')\n", (1546, 1574), False, 'import boto3\n'), ((2022, 2035), 'click.group', 'click.group', ([], {}), '()\n', (2033, 2035), False, 'import click\n'), ((2187, 2290), 'click.option', 'click.option', (['"""--lbname"""'], {'default': 'None', 'help': '"""only the elb\'s for the project (tag Project:<name>)"""'}), '(\'--lbname\', default=None, help=\n "only the elb\'s for the project (tag Project:<name>)")\n', (2199, 2290), False, 'import click\n')]
|
import uuid
import pytest
from circle_core.message import ModuleMessage
from circle_core.serialize import serialize
from circle_core.types import BlobMetadata
from circle_core.workers.blobstore import StoredBlob
@pytest.mark.parametrize( # noqa: F811
('payload', 'expected'),
[
(
{'data': BlobMetadata('text/plain', 'deadbeafdeadbeafdeadbeafdeadbeaf', None)},
'''\
{"boxId": "539ce356a7cb4bfc853ec1a8147f021f", "counter": 0, "payload": {"data": {"$data": null, \
"$source": "text/plain", "$type": "deadbeafdeadbeafdeadbeafdeadbeaf"}}, "timestamp": "1545895047.000"}\
'''
),
(
{'data': StoredBlob('text/plain', 'deadbeafdeadbeafdeadbeafdeadbeaf', None)},
'''\
{"boxId": "539ce356a7cb4bfc853ec1a8147f021f", "counter": 0, "payload": {"data": {"$data": null, \
"$source": "text/plain", "$type": "deadbeafdeadbeafdeadbeafdeadbeaf"}}, "timestamp": "1545895047.000"}\
'''
)
]
)
def test_message_jsonize(payload, expected):
message = ModuleMessage(uuid.UUID('539CE356-A7CB-4BFC-853E-C1A8147F021F'), '1545895047.000', 0, payload)
serialized = serialize(message)
assert serialized == expected
|
[
"circle_core.types.BlobMetadata",
"circle_core.serialize.serialize",
"uuid.UUID",
"circle_core.workers.blobstore.StoredBlob"
] |
[((1139, 1157), 'circle_core.serialize.serialize', 'serialize', (['message'], {}), '(message)\n', (1148, 1157), False, 'from circle_core.serialize import serialize\n'), ((1041, 1090), 'uuid.UUID', 'uuid.UUID', (['"""539CE356-A7CB-4BFC-853E-C1A8147F021F"""'], {}), "('539CE356-A7CB-4BFC-853E-C1A8147F021F')\n", (1050, 1090), False, 'import uuid\n'), ((322, 390), 'circle_core.types.BlobMetadata', 'BlobMetadata', (['"""text/plain"""', '"""deadbeafdeadbeafdeadbeafdeadbeaf"""', 'None'], {}), "('text/plain', 'deadbeafdeadbeafdeadbeafdeadbeaf', None)\n", (334, 390), False, 'from circle_core.types import BlobMetadata\n'), ((658, 724), 'circle_core.workers.blobstore.StoredBlob', 'StoredBlob', (['"""text/plain"""', '"""deadbeafdeadbeafdeadbeafdeadbeaf"""', 'None'], {}), "('text/plain', 'deadbeafdeadbeafdeadbeafdeadbeaf', None)\n", (668, 724), False, 'from circle_core.workers.blobstore import StoredBlob\n')]
|
"""
Testcases for the pytest plugin of Yagot.
Note: 'testdir' is a fixture provided by the pytester plugin of pytest.
See https://docs.pytest.org/en/latest/reference.html#testdir for details.
"""
import pytest
def test_help_message(testdir):
"""
Test that the Yagot plugin's options appear in the pytest help message.
"""
result = testdir.runpytest(
'--help',
)
result.stdout.fnmatch_lines([
'*Yagot:',
'* --yagot*',
'* --yagot-leaks-only*',
'* --yagot-ignore-types=*',
])
assert result.ret == 0
def test_disabled(testdir):
"""
Test with the Yagot plugin disabled.
"""
test_code = """
def test_clean():
_ = dict()
"""
testdir.makepyfile(test_code)
result = testdir.runpytest()
assert result.ret == 0
def test_collected_clean(testdir):
"""
Test with the Yagot plugin enabled for collected objects but no collected
objects produced.
"""
test_code = """
def test_clean():
_ = dict()
"""
testdir.makepyfile(test_code)
result = testdir.runpytest('--yagot')
assert result.ret == 0
def test_uncollectable_clean(testdir):
"""
Test with the Yagot plugin enabled for uncollectable objects but no
uncollectable objects produced.
"""
test_code = """
def test_clean():
_ = dict()
"""
testdir.makepyfile(test_code)
result = testdir.runpytest('--yagot', '--yagot-leaks-only')
assert result.ret == 0
def test_collected_selfref(testdir):
"""
Test with the Yagot plugin enabled for collected objects and collected
objects produced as self-referencing dict.
"""
test_code = """
def test_clean():
d1 = dict()
d1['self'] = d1
"""
testdir.makepyfile(test_code)
result = testdir.runpytest('--yagot')
result.stdout.fnmatch_lines([
'*There were 1 collected or uncollectable object(s) '
'caused by function test_collected_selfref.py::test_clean*',
])
assert result.ret == 1
def test_collected_selfref_ignored(testdir):
"""
Test with the Yagot plugin enabled for collected objects and collected
objects produced as self-referencing dict, ignoring dict types.
"""
test_code = """
def test_clean():
d1 = dict()
d1['self'] = d1
"""
testdir.makepyfile(test_code)
result = testdir.runpytest('--yagot', '--yagot-ignore-types=dict,list')
assert result.ret == 0
def test_collected_selfref_failed(testdir):
"""
Test with the Yagot plugin enabled for collected objects and collected
objects produced as self-referencing dict, but testcase failed.
"""
test_code = """
def test_fail():
d1 = dict()
d1['self'] = d1
assert False
"""
testdir.makepyfile(test_code)
result = testdir.runpytest('--yagot')
result.stdout.fnmatch_lines([
'*test_collected_selfref_failed.py:4: AssertionError*',
])
assert result.ret == 1
@pytest.mark.xfail(reason="Increased reference count is not detected")
def test_uncollectable_incref(testdir):
"""
Test with the Yagot plugin enabled for uncollectable objects and
uncollectable object produced with increased reference count.
"""
test_code = """
import sys
import gc
import yagot
import test_leaky
def test_leak():
l1 = [1, 2]
assert gc.is_tracked(l1)
assert sys.getrefcount(l1) == 2
test_leaky.incref(l1)
assert sys.getrefcount(l1) == 3
"""
testdir.makepyfile(test_code)
result = testdir.runpytest('--yagot', '--yagot-leaks-only')
result.stdout.fnmatch_lines([
'*There were 1 uncollectable object(s) '
'caused by function test_leak.py::test_leak*',
])
assert result.ret == 1
|
[
"pytest.mark.xfail"
] |
[((3019, 3088), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'reason': '"""Increased reference count is not detected"""'}), "(reason='Increased reference count is not detected')\n", (3036, 3088), False, 'import pytest\n')]
|
#!/usr/bin/env python3
"""
Author : fleide <<EMAIL>>
Date : 2020-12-04
Purpose: Picnic
"""
import argparse
# --------------------------------------------------
def get_args():
"""Get command-line arguments"""
parser = argparse.ArgumentParser(
description='Rock the Casbah',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('foods',
metavar='foods',
nargs='+', # we can take more than one
help='Item(s) to bring')
parser.add_argument('-s',
'--sorted',
help='Sort the items (default: False)',
action='store_true') # magic!
return parser.parse_args()
# --------------------------------------------------
def main():
"""Make a jazz noise here"""
args = get_args()
sorted = args.sorted
foods = args.foods
if sorted:
foods.sort()
bringing = ''
if (len(foods) == 1):
bringing = foods[0]
elif (len(foods) == 2):
bringing = f'{foods[0]} and {foods[1]}'
else:
last_food = foods.pop(-1)
bringing = ', '.join(foods) + f', and {last_food}'
print(f'You are bringing {bringing}.')
# --------------------------------------------------
if __name__ == '__main__':
main()
|
[
"argparse.ArgumentParser"
] |
[((232, 347), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Rock the Casbah"""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description='Rock the Casbah', formatter_class=\n argparse.ArgumentDefaultsHelpFormatter)\n", (255, 347), False, 'import argparse\n')]
|
"""Cross-reference object definition.
"""
import typing
import fastobo
from .utils.meta import roundrepr, typechecked
__all__ = ["Xref"]
@roundrepr
class Xref(object):
"""A cross-reference to another document or resource.
Cross-references (xrefs for short) can be used to back-up definitions of
entities, synonyms, or to link ontological entities to other resources
they may have been derived from. Although originally intended to provide
links to databases, cross-references in OBO ontologies gained additional
purposes, such as helping for header macros expansion, or being used to
alias external relationships with local unprefixed IDs.
The OBO format version 1.4 expects references to be proper OBO identifiers
that can be translated to actual IRIs, which is a breaking change from the
previous format. Therefore, cross-references are encouraged to be given as
plain IRIs or as prefixed IDs using an ID from the IDspace mapping defined
in the header.
Example:
A cross-reference in the Mammalian Phenotype ontology linking a term
to some related Web resource:
>>> mp = pronto.Ontology.from_obo_library("mp.obo")
>>> mp["MP:0030151"].name
'abnormal buccinator muscle morphology'
>>> mp["MP:0030151"].xrefs
frozenset({Xref('https://en.wikipedia.org/wiki/Buccinator_muscle')})
Caution:
`Xref` instances compare only using their identifiers; this means it
is not possible to have several cross-references with the same
identifier and different descriptions in the same set.
Todo:
Make sure to resolve header macros for xrefs expansion (such as
``treat-xrefs-as-is_a``) when creating an ontology, or provide a
method on `~pronto.Ontology` doing so when called.
"""
id: str
description: typing.Optional[str]
__slots__ = ("__weakref__", "id", "description") # noqa: E0602
@typechecked()
def __init__(self, id: str, description: typing.Optional[str] = None):
"""Create a new cross-reference.
Arguments:
id (str): the identifier of the cross-reference, either as a URL,
a prefixed identifier, or an unprefixed identifier.
description (str or None): a human-readable description of the
cross-reference, if any.
"""
# check the id is valid using fastobo
if not fastobo.id.is_valid(id):
raise ValueError("invalid identifier: {}".format(id))
self.id: str = id
self.description = description
def __eq__(self, other: object) -> bool:
if isinstance(other, Xref):
return self.id == other.id
return False
def __gt__(self, other: object) -> bool:
if isinstance(other, Xref):
return self.id > other.id
return NotImplemented
def __ge__(self, other: object) -> bool:
if isinstance(other, Xref):
return self.id >= other.id
return NotImplemented
def __lt__(self, other: object) -> bool:
if isinstance(other, Xref):
return self.id < other.id
return NotImplemented
def __le__(self, other: object) -> bool:
if isinstance(other, Xref):
return self.id <= other.id
return NotImplemented
def __hash__(self):
return hash(self.id)
|
[
"fastobo.id.is_valid"
] |
[((2457, 2480), 'fastobo.id.is_valid', 'fastobo.id.is_valid', (['id'], {}), '(id)\n', (2476, 2480), False, 'import fastobo\n')]
|
from blesuite.pybt.roles import LECentral, LEPeripheral
from blesuite.pybt.core import Connection
from blesuite.pybt.gatt import UUID, AttributeDatabase, Server
from blesuite.pybt.gap import GAP
from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, \
gatt_procedure_read_handle, gatt_procedure_read_handle_async, \
gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, \
gatt_procedure_discover_primary_services, \
gatt_procedure_discover_secondary_services, \
gatt_procedure_discover_characteristics, \
gatt_procedure_discover_includes, \
gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, \
gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, \
gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, \
gatt_procedure_read_multiple_handles, \
gatt_procedure_read_multiple_handles_async, \
gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async
from blesuite.smart_scan import blesuite_smart_scan
from blesuite.entities.gatt_device import BLEDevice
from blesuite.event_handler import BTEventHandler
import logging
import gevent
import os
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
ROLE_CENTRAL = 0x00
ROLE_PERIPHERAL = 0x01
PUBLIC_DEVICE_ADDRESS = 0x00
RANDOM_DEVICE_ADDRESS = 0x01
class BLEConnection(object):
"""
BLEConnection is used to represent a connection between the BLEConnection manager
and a BLE device. This object is commonly returned to the user to represent a connection and is passed
to further BLEConnectionManager functions to interact with the connections.
:param address: The address of the peer BLEDevice that the HCI device is connected to.
:param address_type: The address type of the peer BLEDevice [Central = 0x00 | Peripheral = 0x01]
:param connection_handle: The connection handle used to interact with the associated peer BLE device.
:type address: str
:type address_type: int
:type connection_handle: int
"""
def __init__(self, address, address_type, connection_handle=None):
self.address = address
self.address_type = address_type
self.connection_handle = connection_handle
self.interval_min = None
self.interval_max = None
self.mtu = 23 # default as per spec
def __repr__(self):
return '<{} address={}, type={}>'.format(
self.__class__.__name__,
self.address,
{0: "random", 1: "public"}.get(self.address_type, "Unknown")
)
class BLEConnectionManager(object):
"""
BLEConnectionManager is used to manage connections to Bluetooth Low Energy Devices.
The connection manager is associated with an HCI device, such as a Bluetooth USB adapter,
and is responsible for creating the BLE stack and providing a user-friendly interface for
interacting with the BLE stack in order to send and receive packets.
:param adapter: BTLE adapter on host machine to use for connection (defaults to first found adapter). If left blank, the host's default adapter is used.
:param role: Type of role to create for the HCI device [central | peripheral]
:param our_address_type: Type of address for our Bluetooth Adapter. [public | random] (default: "public"). Note: We currently only support static random addresses, not resolvable or non-resolvable private addresses.
:param random_address: If our address type is set to random, supply a random address or one will be randomly generated ("AA:BB:CC:DD:EE:FF") (default: None)
:param psm: Specific PSM (default: 0)
:param mtu: Specific MTU (default: 23 as per spec BLUETOOTH SPECIFICATION Version 4.2 [Vol 3, Part G] 5.2.1)
:param gatt_server: GATT Server from pybt. Used to assign a custom blesuite.pybt.gatt Server object as the server for a peripheral. Alternatively, by default if the peripheral role is configured, a GATT Server object will be created with no services or characteristics that the user can add to through BLEConnectionManager class methods.
:param event_handler: BTEventHandler class instance that will be called when packets are received by the blesuite.pybt.core packet routing class (SocketHandler).
:param att_operation_event_hook: ATT operation hook functions triggered when the ATT server receives an ATT request
:param att_security_event_hook: ATT security hook functions triggered when the ATT server receives an ATT request and security checks are made
:type att_security_event_hook: blesuite.event_handler.ATTSecurityHook
:type att_operation_event_hook: blesuite.event_handler.ATTEventHook
:type adapter: int
:type role: str
:type our_address_type: str
:type random_address: str
:type psm: int
:type mtu: int
:type gatt_server: Server
:type event_handler: BTEventHandler
"""
def __init__(self, adapter, role, our_address_type="public", random_address=None,
psm=0, mtu=23, gatt_server=None, event_handler=None, att_operation_event_hook=None,
att_security_event_hook=None):
self.role_name = role
self.adapter = adapter
self.requester = None
self.responses = []
self.response_counter = 0
self.psm = psm
self.mtu = mtu
self.gatt_server = gatt_server
self.event_handler = event_handler
self.att_operation_event_hook = att_operation_event_hook
self.att_security_event_hook = att_security_event_hook
self.address = None
self.our_address_type_name = our_address_type
if self.our_address_type_name.lower() == "random":
self.our_address_type = RANDOM_DEVICE_ADDRESS
else:
self.our_address_type = PUBLIC_DEVICE_ADDRESS
if self.our_address_type == RANDOM_DEVICE_ADDRESS and random_address is None:
self.random_address = ':'.join(map(lambda x: x.encode('hex'), os.urandom(6)))
elif self.our_address_type == RANDOM_DEVICE_ADDRESS:
self.random_address = random_address
else:
self.random_address = None
self.central = None
self.stack_connection = None
self.connections = []
if role is 'central':
logger.debug("creating central")
self._create_central()
logger.debug("creating PyBT connection")
self._create_stack_connection(ROLE_CENTRAL)
logger.debug("creating listeners")
self._start_listeners()
elif role is 'peripheral':
logger.debug("creating peripheral role")
self._create_peripheral()
logger.debug("creating PyBT connection")
self._create_stack_connection(ROLE_PERIPHERAL)
logger.debug("creating listeners")
self._start_listeners()
else:
logger.error("Unknown role: %s" % role)
raise RuntimeError("Unknown role: %s" % role)
self.address = self.role.stack.addr
def __enter__(self):
return self
def __del__(self):
if self.stack_connection is not None:
for connection in self.connections:
if self.stack_connection.is_connected(connection.connection_handle):
self.stack_connection.disconnect(connection.connection_handle, 0x16)
self.stack_connection.destroy()
self.stack_connection = None
def __exit__(self, exc_type, exc_val, exc_tb):
logger.debug("Exiting bleConnectionManager. exc_type:%s exc_val:%s exc_tb:%s" % (exc_type, exc_val, exc_tb))
if self.stack_connection is not None:
self.stack_connection.destroy()
self.stack_connection = None
if self.role is not None:
self.role.destroy()
self.role = None
def _create_central(self):
if self.adapter is None:
self.role = LECentral(address_type=self.our_address_type, random=self.random_address,
att_operation_event_hook=self.att_operation_event_hook)
else:
self.role = LECentral(adapter=self.adapter, address_type=self.our_address_type, random=self.random_address,
att_operation_event_hook=self.att_operation_event_hook)
def _create_peripheral(self):
if self.gatt_server is None:
self.attribute_db = AttributeDatabase(event_handler=self.att_security_event_hook)
self.gatt_server = Server(self.attribute_db)
self.gatt_server.set_mtu(self.mtu)
if self.adapter is None:
self.role = LEPeripheral(self.gatt_server, mtu=self.mtu, address_type=self.our_address_type,
random=self.random_address,
att_operation_event_hook=self.att_operation_event_hook)
else:
self.role = LEPeripheral(self.gatt_server, adapter=self.adapter, mtu=self.mtu,
address_type=self.our_address_type, random=self.random_address,
att_operation_event_hook=self.att_operation_event_hook)
def _create_stack_connection(self, role_type):
if self.event_handler is None:
self.event_handler = BTEventHandler(self)
self.stack_connection = Connection(self.role, role_type, self.event_handler)
def _start_listeners(self):
self.stack_connection.start()
def get_address(self):
""" Get the address of the HCI device represented by the BLEConnectionManager.
:return: The HCI device address
:rtype: str
"""
return self.address
def get_discovered_devices(self):
"""
Get a dictionary of address seen during a scan and the associated advertising data.
:return: Dictionary of seen addresses and advertising data
:rtype: dict {"<address>":(<addressTypeInt>, "<advertisingData>")}
"""
return self.stack_connection.seen
def set_event_handler(self, event_class):
"""
Set the BTEventHandler for the pybt.core.SocketHandler class that will trigger when a Bluetooth Event
is received by the stack.
:param event_class: Event handler class instance.
:type event_class: BTEventHandler
:return: Success state
:rtype: bool
"""
logger.debug("Trying to set event handler")
self.event_handler = event_class
if self.stack_connection.socket_handler is not None:
logger.debug("Stack connection found, setting event handler")
self.stack_connection.set_event_handler(event_class)
return True
return False
def set_att_operation_hook(self, event_class):
"""
Set the ATTEventHook for the pybt.att.AttributeProtocol class that will trigger when an ATT operation
against the ATT database running locally is received.
:param event_class: ATT event class hook instance.
:type event_class: ATTEventHook
:return: Success state
:rtype: bool
"""
logger.debug("Trying to set ATT operation hook")
self.att_operation_event_hook = event_class
self.role.att.event_handler = self.att_operation_event_hook
return True
def set_att_security_hook(self, event_class):
"""
Set the ATTSecurityHook for the pybt.gatt.AttributeDatabase class that will trigger when a security
check against an ATT operation acting on the ATT database occurs. These checks cover encryption,
authentication, and authorization.
:param event_class: ATT security event hook class instance.
:type event_class: ATTSecurityHook
:return: Success state
:rtype: bool
"""
logger.debug("Trying to set ATT security hook")
self.att_security_event_hook = event_class
if self.gatt_server is None:
logger.debug("No GATT server running, setting security hook failed.")
return False
self.gatt_server.db.att_security_hooks = self.att_security_event_hook
return True
def is_connected(self, connection):
""" Return whether the specified connection is connected to the peer device.
:return: Return connection status
:rtype: bool
"""
return self.stack_connection.is_connected(connection.connection_handle)
def init_connection(self, address, address_type):
"""
Create BLEConnection object that represents the host's connection to a BLE peripheral.
:param address: BD_ADDR of target BLE Peripheral
:param address_type: Address type of target BLE Peripheral [public | random]
:type address: string
:type address_type: string
:return: Return BLEConnection object that is used in any communication function.
:rtype: BLEConnection
"""
address = address.upper()
if address_type == "public":
address_type = PUBLIC_DEVICE_ADDRESS
elif address_type == "private":
address_type = RANDOM_DEVICE_ADDRESS
ble_connection = BLEConnection(address, address_type)
self.connections.append(ble_connection)
return ble_connection
def get_bleconnection_from_connection_handle(self, connection_handle):
"""
Lookup a BLEConnection based on a supplied connection handle value.
:param connection_handle: Connection handle used to look up an existing BLEConnection
:type connection_handle: int
:return: BLEConnection or None
:rtype: BLEConnection or None
"""
for connection in self.connections:
if connection.connection_handle is not None and connection.connection_handle == connection_handle:
return connection
return None
def connect(self, ble_connection, timeout=15):
"""
Initiate a connection with a peer BLEDevice.
:param ble_connection: BLEConnection that represents the connection between our HCI device and the peer
:type ble_connection: BLEConnection
:param timeout: Connection timeout in seconds (default: 15)
:type timeout: int
:return: Connected status
:rtype: bool
"""
import time
start = time.time()
if not self.stack_connection.is_connected(ble_connection.connection_handle):
request = self.stack_connection.connect(ble_connection.connection_handle, ble_connection.address,
kind=ble_connection.address_type)
while not request.has_response():
if timeout is not None and time.time() - start >= timeout:
logger.debug("Connection failed: Connection timeout reached.")
return False
logger.debug("Is not connected")
gevent.sleep(1)
ble_connection.connection_handle = request.response.conn_handle
logger.debug("Connected")
return True
def disconnect(self, connection, reason=0x16):
"""
Disconnect from a peer BLE device.
:param connection: BLEConnection to disconnect
:type connection: BLEConnection
:param reason: The reason for the disconnection (default: 0x16 - Connection terminated by local host). Reasons defined in BLUETOOTH SPECIFICATION Version 5.0 | Vol 2, Part E page 777
:type reason: int
"""
self.stack_connection.disconnect(connection.connection_handle, reason)
def pair(self, ble_connection, timeout=15):
"""
Initiate pairing with a peer BLE device. This method is blocking and will wait
until a paired connection is received, pairing fails, or the timeout is reached.
If custom pairing request parameters are required, configure
the parameters prior to calling this function.
:param ble_connection: The BLEConnection to initiate pairing on
:type ble_connection: BLEConnection
:param timeout: Pairing timeout in seconds (default: 15)
:type timeout: int
:return: Pairing status
:rtype: bool
"""
import time
self.initiate_pairing(ble_connection)
start = time.time()
while not self.role.smp.get_connection_encryption_status(ble_connection.connection_handle):
if self.role.smp.did_pairing_fail(ble_connection.address):
logger.debug("Pairing Failed")
return False
if timeout is not None and time.time() - start >= timeout:
return False
logger.debug("Pairing in progress. Pairing Failed: %s " % self.role.smp.did_pairing_fail(ble_connection.address))
gevent.sleep(1)
logger.debug("Paired")
return True
def initiate_pairing(self, ble_connection):
"""
Send pairing request to peer device. This is meant as an asynchronous way for a user to initiate pairing
and manage the connection while waiting for the pairing process to complete. Use BLEConnectionManager.pair
for a synchronous pairing procedure.
:param ble_connection: The BLEConnection to initiate pairing on
:type ble_connection: BLEConnection
:return:
:rtype:
"""
if not self.is_connected(ble_connection):
self.connect(ble_connection)
self.role.smp.send_pairing_request(ble_connection.address, ble_connection.connection_handle)
def is_pairing_in_progress(self, ble_connection):
"""
Retrieve pairing status of BLEConnection
:param ble_connection: The BLEConnection to view the pairing status of
:type ble_connection: BLEConnection
:return: Status of BLE pairing
:rtype: bool
"""
return self.role.smp.is_pairing_in_progress(ble_connection.address)
def did_pairing_fail(self, ble_connection):
"""
Lookup whether a pairing failed status was triggered
:param ble_connection: The BLEConnection to check for a pairing failure
:type ble_connection: BLEConnection
:return: Pairing failure status (True means failure was triggered)
:rtype: bool
"""
return self.role.smp.did_pairing_fail(ble_connection.address)
def is_connection_encrypted(self, ble_connection):
"""
Retrieve BLEConnection encryption status
:param ble_connection: The BLEConnection to check the encryption status of
:type ble_connection: BLEConnection
:return: Encryption status
:rtype: bool
"""
return self.role.smp.get_connection_encryption_status(ble_connection.connection_handle)
def resume_connection_encryption(self, ble_connection):
"""
Initiate BLEConnection encryption with encryption keys present in the Security Manager's LongTermKeyDatabase.
Encryption key look-up is done based on the address of the peer device's address.
:param ble_connection: The BLEConnection to resume encryption on
:type ble_connection: BLEConnection
:return: Result of encryption initiation with existing keys (True if encryption initiation was successfully start, False if encryption keys were not found)
:rtype: bool
"""
result = self.role.smp.initiate_encryption_with_existing_keys(ble_connection.address,
ble_connection.address_type,
ble_connection.connection_handle, self.address,
self.our_address_type, self.role)
return result
def get_security_manager_long_term_key_database(self):
"""
Retrieve the LongTermKeyDatabase from the Security Manager
:return: LongTermKeyDatabase from the Security Manager
:rtype: blesuite.pybt.sm.LongTermKeyDatabase
"""
return self.role.smp.long_term_key_db
def add_key_to_security_manager_long_term_key_database(self, address, address_type, ltk, ediv, rand, irk, csrk, security_mode,
security_level):
"""
Add an entry to the LongTermKeyDatabase that will be used for encryption key lookups when encryption
on a BLEConnection is initiated
:param address: Address of peer device (byte form, big-endian)
:type address: str
:param address_type: Address type of peer device
:type address_type: int
:param ltk: Long term key for peer (big-endian)
:type ltk: str
:param ediv: EDIV for peer. Required for LE Legacy encryption resumption
:type ediv: int
:param rand: Encryption Random for peer (big-endian). Required for LE Legacy encryption resumption
:type rand: str
:param irk: IRK for peer (big-endian)
:type irk: str
:param csrk: CSRK for peer
:type csrk: str
:param security_mode: Security mode associated with encryption keys. This mode will be applied to a connection encrypted with these keys.
:type security_mode: int
:param security_level: Security level associated with encryption keys. This level will be applied to a connection encrypted with these keys.
:type security_level: int
:return:
:rtype:
"""
self.role.smp.long_term_key_db.add_long_term_key_entry(address, address_type,
ltk, ediv, rand, irk, csrk, security_mode,
security_level)
def export_security_manager_long_term_key_database_for_storage(self):
"""
Export Security Manager LongTermKeyDatabase as a list of dictionary containing BLE
encryption properties (LTK, EDIV, random,
CSRK, IRK, security mode, security level) with integers and hex encoded strings
:return: LongTermKeyDatabase as a list of dictionaries with integers and hex encoded strings (user-friendly exportable version)
:rtype: dict
"""
ltk_db = self.role.smp.long_term_key_db.get_long_term_key_database()
for entry in ltk_db:
temp = entry['address']
if temp is not None:
temp = temp.encode('hex')
entry['address'] = temp
temp = entry['ltk']
if temp is not None:
temp = temp.encode('hex')
entry['ltk'] = temp
temp = entry['rand']
if temp is not None:
temp = temp.encode('hex')
entry['rand'] = temp
temp = entry['irk']
if temp is not None:
temp = temp.encode('hex')
entry['irk'] = temp
temp = entry['csrk']
if temp is not None:
temp = temp.encode('hex')
entry['csrk'] = temp
return ltk_db
def import_long_term_key_database_to_security_manager(self, long_term_key_database):
"""
Import LongTermKeyDatabase and apply it to the Security Manager. Import database format is identical
to the LongTermKeyDatabase export format with integers and hex encoded strings. The function will perform
some input validation to ensure proper encoding and value types.
:param long_term_key_database: List of dictionaries of LongTermKeyDatabase entries with integers and hex encoded strings
:type long_term_key_database: list of dict
:return:
:rtype:
"""
import blesuite.utils.validators as validator
for entry in long_term_key_database:
keys = entry.keys()
if 'address' in keys:
peer_address = entry['address'].decode('hex')
else:
peer_address = "00" * 6
if 'address_type' in keys:
peer_address_type = entry['address_type']
else:
peer_address_type = 0
if 'ltk' in keys:
ltk = validator.validate_ltk(entry['ltk']).decode('hex')
else:
raise validator.InvalidSMLTK(None)
if 'ediv' in keys:
ediv = entry['ediv']
else:
ediv = 0
if 'rand' in keys:
rand = validator.validate_rand(entry['rand']).decode('hex')
else:
rand = '\x00' * 8
if 'irk' in keys:
irk = validator.validate_irk(entry['irk']).decode('hex')
else:
irk = '\x00' * 16
if 'csrk' in keys:
csrk = validator.validate_csrk(entry['csrk']).decode('hex')
else:
csrk = '\x00' * 16
if 'security_mode' in keys:
mode = entry['security_mode']
else:
mode = 1
if 'security_level' in keys:
level = entry['security_level']
else:
level = 1
mode, level = validator.validate_att_security_mode(mode, level)
self.role.smp.long_term_key_db.add_long_term_key_entry(peer_address, peer_address_type, ltk, ediv, rand,
irk, csrk, mode, level)
def get_security_manager_protocol_default_pairing_parameters(self):
"""
Get the default pairing parameters that will be applied to Security Managers by default.
The pairing parameters are used by the devices to determine the type of pairing to use, the temporary key
sharing method (association model), and which keys will be exchanged when pairing is complete (if any).
See BLUETOOTH SPECIFICATION Version 5.0 | Vol 3, Part H
page 2340 - 2342 for more details.
(Security Managers are created per BLE connection and can be modified independently)
:return: {io_cap, oob, mitm, bond, lesc, keypress, ct2, rfu, max_key_size, initiator_key_distribution, responder_key_distribution}
:rtype: dict
"""
return self.role.smp.get_default_pairing_parameters()
def set_security_manager_protocol_default_pairing_parameters(self, default_io_cap=0x03, default_oob=0x00,
default_mitm=0x00,
default_bond=0x01, default_lesc=0x00,
default_keypress=0x00,
default_ct2=0x01, default_rfu=0x00,
default_max_key_size=16,
default_initiator_key_distribution=0x01,
default_responder_key_distribution=0x01):
"""
Set the default pairing parameters that will be applied to Security Managers by default.
The pairing parameters are used by the devices to determine the type of pairing to use, the temporary key
sharing method (association model), and which keys will be exchanged when pairing is complete (if any).
See BLUETOOTH SPECIFICATION Version 5.0 | Vol 3, Part H
page 2340 - 2342 for more details.
(Security Managers are created per BLE connection and can be modified independently)
:param default_io_cap: IO Capabilities (default: 0x03 - No Input, No Output)
:type default_io_cap: int
:param default_oob: Out-of-band Data present and available (default: 0x00)
:type default_oob: int
:param default_mitm: Request man-in-the-middle pairing protections (default: 0x01)
:type default_mitm: int
:param default_bond: Request bonding (default: 0x01)
:type default_bond: int
:param default_lesc: LE Secure Connections supported (default: 0x00)
:type default_lesc: int
:param default_keypress: Keypress notifications (default: 0x00)
:type default_keypress: int
:param default_ct2: CT2 (default: 0x01)
:type default_ct2: int
:param default_rfu: Reserved for future use bits (default: 0x00)
:type default_rfu: int
:param default_max_key_size: Max encryption key size (default: 16)
:type default_max_key_size: int
:param default_initiator_key_distribution: Requested keys to be sent by the initiator (central) (default: 0x01)
:type default_initiator_key_distribution: int
:param default_responder_key_distribution: Requested keys to be sent by the responder (peripheral) (default: 0x01)
:type default_responder_key_distribution: int
:return:
:rtype:
"""
self.role.smp.set_default_pairing_parameters(default_io_cap, default_oob, default_mitm, default_bond,
default_lesc, default_keypress, default_ct2, default_rfu,
default_max_key_size, default_initiator_key_distribution,
default_responder_key_distribution)
def get_security_manager_protocol_pairing_parameters_for_connection(self, ble_connection):
"""
Get the default pairing parameters for the Security Manager associated with a BLEConnection (based on the
peer address).
The pairing parameters are used by the devices to determine the type of pairing to use, the temporary key
sharing method (association model), and which keys will be exchanged when pairing is complete (if any).
See BLUETOOTH SPECIFICATION Version 5.0 | Vol 3, Part H
page 2340 - 2342 for more details.
:param ble_connection: BLEConnection to modify Security Manager pairing parameters of
:type ble_connection: BLEConnection
:return: {io_cap, oob, mitm, bond, lesc, keypress, ct2, rfu, max_key_size, initiator_key_distribution, responder_key_distribution}
:rtype: dict
"""
return self.role.smp.get_pairing_parameters_for_connection(ble_connection.address)
def set_security_manager_protocol_pairing_parameters_for_connection(self, ble_connection, io_cap=0x03, oob=0x00,
mitm=0x00,
bond=0x01, lesc=0x00, keypress=0x0, ct2=0x01,
rfu=0x00, max_key_size=16,
initiator_key_distribution=0x01,
responder_key_distribution=0x01):
"""
Set the default pairing parameters for the Security Manager associated with a BLEConnection (based on the
peer address).
The pairing parameters are used by the devices to determine the type of pairing to use, the temporary key
sharing method (association model), and which keys will be exchanged when pairing is complete (if any).
See BLUETOOTH SPECIFICATION Version 5.0 | Vol 3, Part H
page 2340 - 2342 for more details.
:param ble_connection: BLEConnection to modify Security Manager pairing parameters of
:type ble_connection: BLEConnection
:param io_cap: IO Capabilities (default: 0x03 - No Input, No Output)
:type io_cap: int
:param oob: Out-of-band Data present and available (default: 0x00)
:type oob: int
:param mitm: Request man-in-the-middle pairing protections (default: 0x01)
:type mitm: int
:param bond: Request bonding (default: 0x01)
:type bond: int
:param lesc: LE Secure Connections supported (default: 0x00)
:type lesc: int
:param keypress: Keypress notifications (default: 0x00)
:type keypress: int
:param ct2: CT2 (default: 0x01)
:type ct2: int
:param rfu: Reserved for future use bits (default: 0x00)
:type rfu: int
:param max_key_size: Max encryption key size (default: 16)
:type max_key_size: int
:param initiator_key_distribution: Requested keys to be sent by the initiator (central) (default: 0x01)
:type initiator_key_distribution: int
:param responder_key_distribution: Requested keys to be sent by the responder (peripheral) (default: 0x01)
:type responder_key_distribution: int
:return: Success status of pairing parameter configuration (False is returned if BLEConnection does not have a valid connection or a security manager set)
:rtype: bool
"""
return self.role.smp.set_pairing_parameters_for_connection(ble_connection.address, io_cap, oob, mitm,
bond, lesc, keypress, ct2, rfu, max_key_size,
initiator_key_distribution,
responder_key_distribution)
def decode_gap_data(self, data):
"""
Decode GAP data into GAP class object
:param data: GAP binary data
:type data: str
:return: GAP object containing the GAP data that has been parsed
:rtype: blesuite.pybt.gap.GAP
"""
gap = GAP()
try:
gap.decode(data)
except Exception as e:
if "Data too short" in str(e):
logger.debug("Data too short, leaving off malformed data")
else:
raise e
return gap
def generate_gap_data_dict(self, gap):
"""
Generates a dictionary of user-friendly strings that describe the GAP data in the supplied GAP object.
:param gap: GAP object to retrieve data from
:type gap: blesuite.pybt.gap.GAP
:return: Dictionary of readable strings that represent the GAP data stored in the object
:rtype: dict
"""
return gap.gap_dict()
# Scanning/Discovery Functions
def scan(self, timeout):
"""
Carry-out BLE scan for the specified timeout and return discovered devices.
:param timeout: Scan timeout in seconds
:type timeout: int
:return: Discovered devices
:rtype: dict
"""
import time
self.start_scan()
start = time.time() * 1000
logger.debug("Starting sleep loop")
# comparing time in ms
while ((time.time() * 1000) - start) < timeout:
logger.debug("Scanning...")
gevent.sleep(1)
self.stop_scan()
logger.debug("Done scanning!")
discovered_devices = self.get_discovered_devices()
return discovered_devices
def start_scan(self):
"""
Enable scanning on HCI device.
:return:
:rtype:
"""
self.stack_connection.scan("on")
def stop_scan(self):
"""
Stop scanning on HCI device
:return:
:rtype:
"""
self.stack_connection.scan("off")
def advertise_and_wait_for_connection(self):
"""
Begin advertising with the HCI device and wait for a connection to be established.
:return: Status of connection with a peer device and the BLEConnection
:rtype: tuple - bool, (BLEConnection | None)
"""
self.start_advertising()
while self.is_advertising():
gevent.sleep(1)
if len(self.stack_connection.connection_statuses.keys()) > 0:
connection_handle = self.stack_connection.connection_statuses.keys()[0]
peer_address = self.stack_connection.peer_addresses_by_connection_handle[connection_handle]
peer_address_type = self.stack_connection.connected_addr_type_by_connection_handle[connection_handle]
return True, BLEConnection(peer_address, peer_address_type, connection_handle=connection_handle)
else:
logger.error("Advertising stopped and no connections are present. Something went wrong.")
return False, None
def start_advertising(self):
"""
Enable advertising on HCI device.
:return:
:rtype:
"""
self.stack_connection.start_advertising()
def stop_advertising(self):
"""
Disable advertising on HCI device.
:return:
:rtype:
"""
self.stack_connection.stop_advertising()
def is_advertising(self):
"""
Retrieve advertising status of HCI device.
:return: Status of advertising
:rtype: bool
"""
return self.stack_connection.is_advertising()
def set_advertising_data(self, data):
"""
Set advertising data.
:param data: Data to include in advertising packets
:type data: str
:return:
:rtype:
"""
self.stack_connection.set_advertising_data(data)
def set_scan_response_data(self, data):
"""
Set scan response data.
:param data: Data to return when a scan packet is received.
:type data: str
:return:
:rtype:
"""
self.stack_connection.set_scan_response_data(data)
def set_advertising_parameters(self, advertisement_type, channel_map, interval_min, interval_max,
destination_addr, destination_addr_type):
"""
Set advertising parameters. See: BLUETOOTH SPECIFICATION Version 5.0 | Vol 2, Part E page 1251
:param advertisement_type: Advertising packet type (see blesuite.utils.GAP_ADV_TYPES)
:type advertisement_type: int
:param channel_map: Bit field that indicates the advertising channels to use. (Channel 37 - 0x01, Channel 38 - 0x02, Channel 39 - 0x04, all channels - 0x07)
:type channel_map: int
:param interval_min: Minimum advertising interval for undirected and low duty cycle directed advertising. (Range 0x00020 - 0x4000, default 0x0800 or 1.28 seconds. Time conversion = interval * 0.625ms)
:type interval_min: int
:param interval_max: Maximum advertising interval for undirected and low duty cycle directed advertising. (Range 0x00020 - 0x4000, default 0x0800 or 1.28 seconds. Time conversion = interval * 0.625ms)
:type interval_max: int
:param destination_addr: Destination address for directed advertising (set to 00:00:00:00:00:00 if using undirected advertising)
:type destination_addr: str
:param destination_addr_type: Destination address type (set to 0x00 if using undirected advertising)
:type destination_addr_type: int
:return:
:rtype:
"""
self.stack_connection.set_advertising_parameters(advertisement_type, channel_map, interval_min, interval_max,
destination_addr, destination_addr_type)
def set_local_name(self, name, enforce_null_termination=True):
"""
Set the local name of the HCI device. (Bluetooth Spec says the value needs to be null terminated. If it is
intended to write a string that is not null terminated, then set the enforcement flag to False).
:param name: Local name to write to HCI device
:type name: str
:param enforce_null_termination: Flag to enforce null termination (default: True)
:type enforce_null_termination: bool
:return:
:rtype:
"""
if enforce_null_termination:
if len(name) != 248:
padding = 248 - len(name)
name = name + ('\0' * padding)
self.stack_connection.set_local_name(name)
def get_gatt_server(self):
"""
Retrieve the GATT server for the BLEConnectionManager instance.
:return: GATT Server
:rtype: blesuite.pybt.gatt.Server
"""
return self.gatt_server
def set_server_mtu(self, mtu):
"""
Configures the MTU (max transmission unit) on the GATT server and ATT class instance. MTU is used
to restrict the size of data the stack returns in ATT packets. Note: The MTU used by the class
is determined by the MTUs exchanged by both connected BLE devices (uses the minimum value of the
exchanged MTUs).
:param mtu: MTU size in bytes (Bluetooth Spec default is 23 bytes)
:type mtu: int
:return:
:rtype:
"""
self.mtu = mtu
self.role.att.set_mtu(mtu)
def get_server_mtu(self):
"""
Returns the MTU size from the GATT server.
:return: GATT server MTU (bytes)
:rtype: int
"""
if self.role.att.gatt_server is not None:
return self.role.att.gatt_server.mtu
def initialize_gatt_server_from_ble_device(self, ble_device, use_handles_from_ble_device=False):
"""
Initializes the GATT server based on a supplied BLEDevice entity. All services, includes, characteristics,
and descriptors are retrieved from the BLEDevice entity and added to the GATT server using the
properties and permissions configured in the BLEDevice object.
:param ble_device: BLEDevice object to replicate with the GATT server
:type ble_device: BLEDevice
:param use_handles_from_ble_device: Flag to indicate that the GATT server should use the attribute handles specified in each BLE entity withhin the BLEDevice. If set to false (default), then the GATT server will automatically assign handles in the order that entites are added to the server.
:type use_handles_from_ble_device: bool
:return:
:rtype:
"""
from pybt.gatt import GATTService, GATTCharacteristic, GATTCharacteristicDescriptorDeclaration,\
GATTInclude, UUID
if self.gatt_server is None:
att_db = AttributeDatabase()
self.gatt_server = Server(att_db)
self.gatt_server.set_mtu(self.mtu)
for service in ble_device.get_services():
gatt_service = GATTService(UUID(service.attribute_type), UUID(service.uuid))
gatt_service.start = service.start
gatt_service.end = service.end
gatt_service.handle = service.start
for incl in service.get_includes():
include_1 = GATTInclude(incl.included_service_att_handle, incl.included_service_end_group_handle,
UUID(incl.included_service_uuid),
incl.include_definition_attribute_properties,
incl.include_definition_attribute_read_permission,
incl.include_definition_attribute_write_permission,
incl.include_definition_attribute_require_authorization)
include_1.handle = incl.handle
gatt_service.add_include(include_1)
for characteristic in service.get_characteristics():
# create general characteristic (note: this method doesn't apply permissions and properties to the
# characteristic declaration descriptor)
characteristic_1 = GATTCharacteristic(characteristic.value, characteristic.gatt_properties,
UUID(characteristic.uuid),
characteristic.characteristic_value_attribute_properties,
characteristic.characteristic_value_attribute_read_permission,
characteristic.characteristic_value_attribute_write_permission,
characteristic.characteristic_value_attribute_require_authorization)
# update characteristic declaration descriptor with configured permissions and authz
characteristic_1.declaration.attribute_properties = characteristic.characteristic_definition_attribute_properties
characteristic_1.declaration.attribute_read_permission = characteristic.characteristic_definition_attribute_read_permission
characteristic_1.declaration.attribute_write_permission = characteristic.characteristic_definition_attribute_write_permission
characteristic_1.declaration.require_authorization = characteristic.characteristic_definition_attribute_require_authorization
characteristic_1.declaration.handle = characteristic.handle
characteristic_1.declaration.value_attribute_handle = characteristic.value_handle
characteristic_1.value_declaration.handle = characteristic.value_handle
for descriptor in characteristic.get_descriptors():
# characteristic declaration is already created when we created the characteristic attribute
if descriptor.type == 0x2803:
pass
descriptor_1 = GATTCharacteristicDescriptorDeclaration(UUID(descriptor.uuid),
descriptor.value,
descriptor.characteristic_descriptor_attribute_properties,
descriptor.characteristic_descriptor_attribute_read_permission,
descriptor.characteristic_descriptor_attribute_write_permission,
descriptor.characteristic_descriptor_attribute_require_authorization)
descriptor_1.handle = descriptor.handle
characteristic_1.add_descriptor(descriptor_1)
gatt_service.add_characteristic(characteristic_1)
self.gatt_server.add_service(gatt_service)
self.gatt_server.refresh_database(calculate_handles=(not use_handles_from_ble_device))
def set_extended_inquiry_response(self, fec_required=0, formatted_eir_data=None):
"""
Set the extended inquiry response on the HCI device.
:param fec_required: FEC required (default: 0)
:type fec_required: 0
:param formatted_eir_data: Formatted extended inquiry response data (default: None)
:type formatted_eir_data: str
:return:
:rtype:
"""
self.stack_connection.set_eir_response(fec_required=fec_required, formatted_eir_data=formatted_eir_data)
def read_remote_used_features(self, connection):
"""
Issues a read remote used features command to the connected peer device.
:param connection: BLEConnection of target connection
:type connection: BLEConnection
:return:
:rtype:
"""
self.stack_connection.read_remote_used_features(connection.connection_handle)
return
# ATT Packets / GATT Procedures
def exchange_mtu(self, connection, mtu, timeout=15 * 1000):
"""
Sends Exchange MTU packet using the supplied BLEConnection object
and returns a GATTRequest object containing the request or any received errors.
Synchronous method. Note: Sending this packet as a peripheral will not
change the MTU configured on the GATT server.
:param connection: BLEConnection with connection to target device
:param mtu: Desired MTU (bytes)
:param timeout: Timeout for exhange MTU response (in milliseconds)
:type connection: BLEConnection
:type mtu: int
:rtype: blesuite.pybt.core.GATTRequest
"""
request = self.stack_connection.exchange_mtu_sync(mtu, connection.connection_handle, timeout=timeout)
if request.has_error():
logger.debug("Exchange MTU Response Error")
else:
logger.debug("Exchange MTU Response Data(str): %s" % request.response.data)
if not request.has_error() and request.has_response():
connection.mtu = mtu
return request
def gatt_discover_primary_services(self, connection, device=None):
"""
Discover primary GATT services of a peer GATT server and populate (or generate) a BLEDevice object
with the discovered entities.
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param device: BLEDevice to populate. If None is supplied (default) a new BLEDevice object with the discovered entities will be added.
:type device: BLEDevice
:return: Populated BLEDevice
:rtype: BLEDevice
"""
if device is None:
device = BLEDevice(connection.address)
return gatt_procedure_discover_primary_services(self, connection, device)
def gatt_discover_secondary_services(self, connection, device=None):
"""
Discover secondary GATT services of a peer GATT server and populate (or generate) a BLEDevice object
with the discovered entities.
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param device: BLEDevice to populate. If None is supplied (default) a new BLEDevice object with the discovered entities will be added.
:type device: BLEDevice
:return: Populated BLEDevice
:rtype: BLEDevice
"""
if device is None:
device = BLEDevice(connection.address)
return gatt_procedure_discover_secondary_services(self, connection, device)
def gatt_discover_characteristics(self, connection, device=None):
"""
Discover GATT characteristics of a peer GATT server and populate (or generate) a BLEDevice object
with the discovered entities.
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param device: BLEDevice to populate. If None is supplied (default) a new BLEDevice object with the discovered entities will be added.
:type device: BLEDevice
:return: Populated BLEDevice
:rtype: BLEDevice
"""
if device is None:
device = BLEDevice(connection.address)
return gatt_procedure_discover_characteristics(self, connection, device)
def gatt_discover_includes(self, connection, device=None):
"""
Discover GATT service includes of a peer GATT server and populate (or generate) a BLEDevice object
with the discovered entities.
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param device: BLEDevice to populate. If None is supplied (default) a new BLEDevice object with the discovered entities will be added.
:type device: BLEDevice
:return: Populated BLEDevice
:rtype: BLEDevice
"""
if device is None:
device = BLEDevice(connection.address)
return gatt_procedure_discover_includes(self, connection, device)
def gatt_discover_descriptors(self, connection, device):
"""
Discover GATT characteristic descriptors of a peer GATT server and populate (or generate) a BLEDevice object
with the discovered entities.
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param device: BLEDevice to populate. If None is supplied (default) a new BLEDevice object with the discovered entities will be added.
:type device: BLEDevice
:return: Populated BLEDevice
:rtype: BLEDevice
"""
return gatt_procedure_discover_descriptors(self, connection, device)
def smart_scan(self, connection, device=None, look_for_device_info=True, attempt_desc_read=False,
timeout=15 * 1000):
"""
Initiate a BLE Smart Scan, which is an all inclusive way to scan a BLE peripheral for all
services, includes, characteristics, and descriptors. The scan can also attempt to reach from each
attribute handle discovered during the scan (regardless of GATT properties returned by the server) in
order to quickly view data exposed by the device.
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param device: BLEDevice to populate. If None is supplied (default) a new BLEDevice object with the discovered entities will be added.
:type device: BLEDevice
:param look_for_device_info: Flag to indicate the scan should scan for several basic types of information based on UUIDs defined by the Bluetooth Special Interest Group (default: True)
:type look_for_device_info: bool
:param attempt_desc_read: Flag to indicate the scan should attempt to read from each attribute discovered during the scan (default: False). Note: This may significantly slow down the scan. If the target peripheral disconnects, the scan will attempt to reconnect to the server.
:type attempt_desc_read: bool
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: Populated BLEDevice
:rtype: BLEDevice
"""
if device is None:
device = BLEDevice(connection.address)
return blesuite_smart_scan(self, connection, device, look_for_device_info=look_for_device_info,
attempt_desc_read=attempt_desc_read, timeout=timeout)
def gatt_write_handle(self, connection, handle, data, timeout=15 * 1000):
"""
Send an ATT Write request to the peer device associated with the supplied BLEConnection, attribute
handle, and data. This is a synchronous call that will wait for either a successful response, error response,
or the specified timeout (milliseconds) to be reached.
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param handle: Attribute handle of target attribute (0x01 - 0xFFFF)
:type handle: int
:param data: Data to place in ATT write request.
:type data: str
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_write_handle(self.stack_connection, connection.connection_handle, handle,
data, timeout=timeout)
def gatt_write_handle_async(self, connection, handle, data, timeout=15 * 1000):
"""
Send an ATT Write request to the peer device associated with the supplied BLEConnection, attribute
handle, and data. This is an asynchronous call that will send the request to the peer device and
return a GATTRequest object that can be monitored for a GATTResponse or GATTError (either through a valid
peer response, peer error response, or timeout error triggering).
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param handle: Attribute handle of target attribute (0x01 - 0xFFFF)
:type handle: int
:param data: Data to place in ATT write request.
:type data: str
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_write_handle_async(self.stack_connection, connection.connection_handle, handle, data,
timeout=timeout)
def gatt_write_command_handle(self, connection, handle, data):
"""
Send an ATT Write Command request to the peer device associated with the supplied BLEConnection, attribute
handle, and data. This is an asynchronous call that will send the request to the peer device. No GATTRequest
will be generated since this command should not ever receive a response from the peer.
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param handle: Attribute handle of target attribute (0x01 - 0xFFFF)
:type handle: int
:param data: Data to place in ATT write request.
:type data: str
:param timeout: Request timeout (milliseconds)
:type timeout: int
"""
gatt_procedure_write_command_handle(self.stack_connection, connection.connection_handle, handle, data)
def gatt_prepare_write_handle(self, connection, handle, data, offset, timeout=15 * 1000):
"""
Send an ATT Prepare Write request to the peer device associated with the supplied BLEConnection, attribute
handle, offset, and data. This is a synchronous call that will wait for either a successful response,
error response,
or the specified timeout (milliseconds) to be reached.
Note: Prepare write is used in conjunction with execute write to write a large set of data.
The user will send a series of prepare
write requests with data and the correct offsets to set a large value for a write operation. An execute
write request will then be issued to carry out the write. (Permission / Auth checks should happen on the
prepare write request).
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param handle: Attribute handle of target attribute (0x01 - 0xFFFF)
:type handle: int
:param data: Data to place in ATT write request.
:type data: str
:param offset: Offset to write the data
:type offset: int
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_prepare_write_handle(self.stack_connection, connection.connection_handle, handle,
data, offset, timeout=timeout)
def gatt_prepare_write_handle_async(self, connection, handle, data, offset, timeout=15 * 1000):
"""
Send an ATT Prepare Write request to the peer device associated with the supplied BLEConnection, attribute
handle, offset, and data. This is an asynchronous call that will send the request to the peer device and
return a GATTRequest object that can be monitored for a GATTResponse or GATTError (either through a valid
peer response, peer error response, or timeout error triggering).
Note: Prepare write is used in conjunction with execute write to write a large set of data.
The user will send a series of prepare
write requests with data and the correct offsets to set a large value for a write operation. An execute
write request will then be issued to carry out the write. (Permission / Auth checks should happen on the
prepare write request).
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param handle: Attribute handle of target attribute (0x01 - 0xFFFF)
:type handle: int
:param data: Data to place in ATT write request.
:type data: str
:param offset: Offset to write the data
:type offset: int
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_prepare_write_handle_async(self.stack_connection, connection.connection_handle,
handle, data, offset, timeout=timeout)
def gatt_execute_write(self, connection, flags, timeout=15 * 1000):
"""
Send an ATT Execute Write request to the peer device associated with the supplied BLEConnection and flag.
This is a synchronous call that will wait for either a successful response, error response,
or the specified timeout (milliseconds) to be reached.
Note: Execute write is used in conjunction with prepare write
to write a large set of data. The user will send a series of prepare
write requests with data and the correct offsets to set a large value for a write operation. An execute
write request will then be issued to carry out the write. (Permission / Auth checks should happen on the
prepare write request).
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param flags: Specifies which execute write operation should be performed (0x00 - Cancel all prepared writes, 0x01 - Immediately write all pending prepared values.
:type flags: int
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_execute_write(self.stack_connection, connection.connection_handle, flags, timeout=timeout)
def gatt_execute_write_async(self, connection, flags, timeout=15 * 1000):
"""
Send an ATT Execute Write request to the peer device associated with the supplied BLEConnection and flag.
This is an asynchronous call that will send the request to the peer device and
return a GATTRequest object that can be monitored for a GATTResponse or GATTError (either through a valid
peer response, peer error response, or timeout error triggering).
Note: Execute write is used in conjunction with prepare write
to write a large set of data. The user will send a series of prepare
write requests with data and the correct offsets to set a large value for a write operation. An execute
write request will then be issued to carry out the write. (Permission / Auth checks should happen on the
prepare write request).
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param flags: Specifies which execute write operation should be performed (0x00 - Cancel all prepared writes, 0x01 - Immediately write all pending prepared values.
:type flags: int
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_execute_write_async(self.stack_connection, connection.connection_handle, flags,
timeout=timeout)
def gatt_read_handle(self, connection, handle, timeout=15 * 1000):
"""
Send an ATT Read request to the peer device associated with the supplied BLEConnection and attribute
handle. This is a synchronous call that will wait for either a successful response, error response,
or the specified timeout (milliseconds) to be reached.
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param handle: Attribute handle of target attribute (0x01 - 0xFFFF)
:type handle: int
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_read_handle(self.stack_connection, connection.connection_handle, handle, timeout=timeout)
def gatt_read_handle_async(self, connection, handle, timeout=15 * 1000):
"""
Send an ATT Read request to the peer device associated with the supplied BLEConnection and attribute
handle. This is an asynchronous call that will send the request to the peer device and
return a GATTRequest object that can be monitored for a GATTResponse or GATTError (either through a valid
peer response, peer error response, or timeout error triggering).
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param handle: Attribute handle of target attribute (0x01 - 0xFFFF)
:type handle: int
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_read_handle_async(self.stack_connection, connection.connection_handle, handle,
timeout=timeout)
def gatt_read_multiple_handles(self, connection, handles, timeout=15 * 1000):
"""
Send an ATT Read Multiple request to the peer device associated with the supplied BLEConnection and
a set of attribute handles.
This is a synchronous call that will wait for either a successful response, error response,
or the specified timeout (milliseconds) to be reached.
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param handles: A list of attribute handles for target attributes (0x01 - 0xFFFF)
:type handles: list of int
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_read_multiple_handles(self.stack_connection, connection.connection_handle,
handles, timeout=timeout)
def gatt_read_multiple_handles_async(self, connection, handles, timeout=15 * 1000):
"""
Send an ATT Read Multiple request to the peer device associated with the supplied BLEConnection and
a set of attribute handles.
This is an asynchronous call that will send the request to the peer device and
return a GATTRequest object that can be monitorged for a GATTResponse or GATTError (either through a valid
return a GATTRequest object that can be monitored for a GATTResponse or GATTError (either through a valid
peer response, peer error response, or timeout error triggering).
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param handles: A list of attribute handles for target attributes (0x01 - 0xFFFF)
:type handles: list of int
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_read_multiple_handles_async(self.stack_connection, connection.connection_handle, handles,
timeout=timeout)
def gatt_read_blob_handle(self, connection, handle, offset, timeout=15 * 1000):
"""
Send an ATT Blob Read request to the peer device associated with the supplied BLEConnection, attribute
handle, and an offset. This is a synchronous call that will wait for either a successful response,
error response,
or the specified timeout (milliseconds) to be reached.
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param handle: Attribute handle of target attribute (0x01 - 0xFFFF)
:type handle: int
:param offset: Offset to begin reading attribute value
:type offset: int
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_read_blob_handle(self.stack_connection, connection.connection_handle, handle, offset,
timeout=timeout)
def gatt_read_blob_handle_async(self, connection, handle, offset, timeout=15 * 1000):
"""
Send an ATT Blob Read request to the peer device associated with the supplied BLEConnection, attribute
handle, and an offset. This is an asynchronous call that will send the request to the peer device and
return a GATTRequest object that can be monitored for a GATTResponse or GATTError (either through a valid
peer response, peer error response, or timeout error triggering).
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param handle: Attribute handle of target attribute (0x01 - 0xFFFF)
:type handle: int
:param offset: Offset to begin reading attribute value
:type offset: int
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_read_blob_handle_async(self.stack_connection, connection.connection_handle, handle,
offset, timeout=timeout)
def gatt_read_uuid(self, connection, uuid, timeout=15 * 1000):
"""
Send an ATT Read request to the peer device associated with the supplied BLEConnection and GATT UUID.
This is a synchronous call that will wait for either a successful response, error response,
or the specified timeout (milliseconds) to be reached.
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param uuid: UUID of target GATT entity (16-bit and 128-bit UUIDs are accepted)
:type uuid: str
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_read_uuid(self.stack_connection, connection.connection_handle, UUID(uuid),
timeout=timeout)
def gatt_read_uuid_async(self, connection, uuid, timeout=15 * 1000):
"""
Send an ATT Read request to the peer device associated with the supplied BLEConnection and GATT UUID.
This is an asynchronous call that will send the request to the peer device and
return a GATTRequest object that can be monitored for a GATTResponse or GATTError (either through a valid
peer response, peer error response, or timeout error triggering).
:param connection: BLEConnection with the connected GATT server
:type connection: BLEConnection
:param uuid: UUID of target GATT entity (16-bit and 128-bit UUIDs are accepted)
:type uuid: str
:param timeout: Request timeout (milliseconds)
:type timeout: int
:return: GATTRequest that contains the GATTResponse or GATTError result
:rtype: blesuite.pybt.core.GATTRequest
"""
return gatt_procedure_read_uuid_async(self.stack_connection, connection.connection_handle, UUID(uuid),
timeout=timeout)
def att_send_raw(self, connection, body):
"""
Sends a raw ATT packet using the supplied BLEConnection object
and data supplied. The function does not apply a standard ATT header the supplied body, but L2CAP
and HCI encapsulation is handled.
Note: Valid ATT packets can be constructed using
packets defined in scapy.layers.bluetooth
or using random data for fuzzing.
:param connection: BLEConnection to target device
:param body: ATT request body
:rtype: GATTRequest
"""
request = self.stack_connection.send_raw_att(body, connection.connection_handle)
return request
def l2cap_send_raw(self, connection, body):
"""
Sends a raw L2CAP packet using the supplied BLEConnection object
and data supplied. The function does not apply a standard L2CAP header to the user supplied value,
but HCI encapsulation is applied.
Note: Valid L2CAP packets can be constructed using packets defined in scapy.layers.bluetooth
or using random data for fuzzing.
:param connection: BLEConnection to target device
:param body: L2CAP request body
:rtype: GATTRequest
"""
request = self.stack_connection.send_raw_l2cap(body, connection.connection_handle)
return request
|
[
"blesuite.entities.gatt_device.BLEDevice",
"blesuite.gatt_procedures.gatt_procedure_execute_write",
"blesuite.smart_scan.blesuite_smart_scan",
"blesuite.pybt.gatt.AttributeDatabase",
"blesuite.gatt_procedures.gatt_procedure_read_multiple_handles",
"blesuite.pybt.roles.LEPeripheral",
"blesuite.utils.validators.validate_irk",
"logging.NullHandler",
"blesuite.gatt_procedures.gatt_procedure_read_handle_async",
"blesuite.gatt_procedures.gatt_procedure_prepare_write_handle_async",
"pybt.gatt.UUID",
"blesuite.gatt_procedures.gatt_procedure_write_handle_async",
"blesuite.gatt_procedures.gatt_procedure_write_command_handle",
"blesuite.gatt_procedures.gatt_procedure_execute_write_async",
"blesuite.pybt.roles.LECentral",
"os.urandom",
"blesuite.gatt_procedures.gatt_procedure_discover_characteristics",
"blesuite.utils.validators.validate_att_security_mode",
"blesuite.event_handler.BTEventHandler",
"blesuite.gatt_procedures.gatt_procedure_discover_includes",
"blesuite.pybt.gap.GAP",
"blesuite.utils.validators.validate_ltk",
"blesuite.utils.validators.validate_rand",
"blesuite.gatt_procedures.gatt_procedure_write_handle",
"blesuite.gatt_procedures.gatt_procedure_read_blob_handle",
"blesuite.utils.validators.InvalidSMLTK",
"gevent.sleep",
"blesuite.gatt_procedures.gatt_procedure_prepare_write_handle",
"blesuite.gatt_procedures.gatt_procedure_discover_primary_services",
"blesuite.pybt.gatt.Server",
"blesuite.gatt_procedures.gatt_procedure_read_blob_handle_async",
"blesuite.gatt_procedures.gatt_procedure_discover_secondary_services",
"blesuite.gatt_procedures.gatt_procedure_read_multiple_handles_async",
"time.time",
"blesuite.gatt_procedures.gatt_procedure_discover_descriptors",
"blesuite.pybt.core.Connection",
"blesuite.gatt_procedures.gatt_procedure_read_handle",
"blesuite.utils.validators.validate_csrk",
"logging.getLogger"
] |
[((1647, 1674), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1664, 1674), False, 'import logging\n'), ((1693, 1714), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (1712, 1714), False, 'import logging\n'), ((9844, 9896), 'blesuite.pybt.core.Connection', 'Connection', (['self.role', 'role_type', 'self.event_handler'], {}), '(self.role, role_type, self.event_handler)\n', (9854, 9896), False, 'from blesuite.pybt.core import Connection\n'), ((14950, 14961), 'time.time', 'time.time', ([], {}), '()\n', (14959, 14961), False, 'import time\n'), ((16945, 16956), 'time.time', 'time.time', ([], {}), '()\n', (16954, 16956), False, 'import time\n'), ((34463, 34468), 'blesuite.pybt.gap.GAP', 'GAP', ([], {}), '()\n', (34466, 34468), False, 'from blesuite.pybt.gap import GAP\n'), ((50225, 50291), 'blesuite.gatt_procedures.gatt_procedure_discover_primary_services', 'gatt_procedure_discover_primary_services', (['self', 'connection', 'device'], {}), '(self, connection, device)\n', (50265, 50291), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((50989, 51057), 'blesuite.gatt_procedures.gatt_procedure_discover_secondary_services', 'gatt_procedure_discover_secondary_services', (['self', 'connection', 'device'], {}), '(self, connection, device)\n', (51031, 51057), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((51749, 51814), 'blesuite.gatt_procedures.gatt_procedure_discover_characteristics', 'gatt_procedure_discover_characteristics', (['self', 'connection', 'device'], {}), '(self, connection, device)\n', (51788, 51814), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((52500, 52558), 'blesuite.gatt_procedures.gatt_procedure_discover_includes', 'gatt_procedure_discover_includes', (['self', 'connection', 'device'], {}), '(self, connection, device)\n', (52532, 52558), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((53174, 53235), 'blesuite.gatt_procedures.gatt_procedure_discover_descriptors', 'gatt_procedure_discover_descriptors', (['self', 'connection', 'device'], {}), '(self, connection, device)\n', (53209, 53235), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((54867, 55014), 'blesuite.smart_scan.blesuite_smart_scan', 'blesuite_smart_scan', (['self', 'connection', 'device'], {'look_for_device_info': 'look_for_device_info', 'attempt_desc_read': 'attempt_desc_read', 'timeout': 'timeout'}), '(self, connection, device, look_for_device_info=\n look_for_device_info, attempt_desc_read=attempt_desc_read, timeout=timeout)\n', (54886, 55014), False, 'from blesuite.smart_scan import blesuite_smart_scan\n'), ((55964, 56080), 'blesuite.gatt_procedures.gatt_procedure_write_handle', 'gatt_procedure_write_handle', (['self.stack_connection', 'connection.connection_handle', 'handle', 'data'], {'timeout': 'timeout'}), '(self.stack_connection, connection.\n connection_handle, handle, data, timeout=timeout)\n', (55991, 56080), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((57156, 57278), 'blesuite.gatt_procedures.gatt_procedure_write_handle_async', 'gatt_procedure_write_handle_async', (['self.stack_connection', 'connection.connection_handle', 'handle', 'data'], {'timeout': 'timeout'}), '(self.stack_connection, connection.\n connection_handle, handle, data, timeout=timeout)\n', (57189, 57278), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((58136, 58243), 'blesuite.gatt_procedures.gatt_procedure_write_command_handle', 'gatt_procedure_write_command_handle', (['self.stack_connection', 'connection.connection_handle', 'handle', 'data'], {}), '(self.stack_connection, connection.\n connection_handle, handle, data)\n', (58171, 58243), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((59676, 59808), 'blesuite.gatt_procedures.gatt_procedure_prepare_write_handle', 'gatt_procedure_prepare_write_handle', (['self.stack_connection', 'connection.connection_handle', 'handle', 'data', 'offset'], {'timeout': 'timeout'}), '(self.stack_connection, connection.\n connection_handle, handle, data, offset, timeout=timeout)\n', (59711, 59808), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((61402, 61540), 'blesuite.gatt_procedures.gatt_procedure_prepare_write_handle_async', 'gatt_procedure_prepare_write_handle_async', (['self.stack_connection', 'connection.connection_handle', 'handle', 'data', 'offset'], {'timeout': 'timeout'}), '(self.stack_connection, connection\n .connection_handle, handle, data, offset, timeout=timeout)\n', (61443, 61540), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((62913, 63023), 'blesuite.gatt_procedures.gatt_procedure_execute_write', 'gatt_procedure_execute_write', (['self.stack_connection', 'connection.connection_handle', 'flags'], {'timeout': 'timeout'}), '(self.stack_connection, connection.\n connection_handle, flags, timeout=timeout)\n', (62941, 63023), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((64457, 64573), 'blesuite.gatt_procedures.gatt_procedure_execute_write_async', 'gatt_procedure_execute_write_async', (['self.stack_connection', 'connection.connection_handle', 'flags'], {'timeout': 'timeout'}), '(self.stack_connection, connection.\n connection_handle, flags, timeout=timeout)\n', (64491, 64573), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((65442, 65551), 'blesuite.gatt_procedures.gatt_procedure_read_handle', 'gatt_procedure_read_handle', (['self.stack_connection', 'connection.connection_handle', 'handle'], {'timeout': 'timeout'}), '(self.stack_connection, connection.\n connection_handle, handle, timeout=timeout)\n', (65468, 65551), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((66480, 66595), 'blesuite.gatt_procedures.gatt_procedure_read_handle_async', 'gatt_procedure_read_handle_async', (['self.stack_connection', 'connection.connection_handle', 'handle'], {'timeout': 'timeout'}), '(self.stack_connection, connection.\n connection_handle, handle, timeout=timeout)\n', (66512, 66595), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((67515, 67635), 'blesuite.gatt_procedures.gatt_procedure_read_multiple_handles', 'gatt_procedure_read_multiple_handles', (['self.stack_connection', 'connection.connection_handle', 'handles'], {'timeout': 'timeout'}), '(self.stack_connection, connection.\n connection_handle, handles, timeout=timeout)\n', (67551, 67635), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((68792, 68917), 'blesuite.gatt_procedures.gatt_procedure_read_multiple_handles_async', 'gatt_procedure_read_multiple_handles_async', (['self.stack_connection', 'connection.connection_handle', 'handles'], {'timeout': 'timeout'}), '(self.stack_connection,\n connection.connection_handle, handles, timeout=timeout)\n', (68834, 68917), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((69914, 70036), 'blesuite.gatt_procedures.gatt_procedure_read_blob_handle', 'gatt_procedure_read_blob_handle', (['self.stack_connection', 'connection.connection_handle', 'handle', 'offset'], {'timeout': 'timeout'}), '(self.stack_connection, connection.\n connection_handle, handle, offset, timeout=timeout)\n', (69945, 70036), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((71131, 71259), 'blesuite.gatt_procedures.gatt_procedure_read_blob_handle_async', 'gatt_procedure_read_blob_handle_async', (['self.stack_connection', 'connection.connection_handle', 'handle', 'offset'], {'timeout': 'timeout'}), '(self.stack_connection, connection.\n connection_handle, handle, offset, timeout=timeout)\n', (71168, 71259), False, 'from blesuite.gatt_procedures import gatt_procedure_write_handle, gatt_procedure_write_handle_async, gatt_procedure_read_handle, gatt_procedure_read_handle_async, gatt_procedure_read_uuid, gatt_procedure_read_uuid_async, gatt_procedure_discover_primary_services, gatt_procedure_discover_secondary_services, gatt_procedure_discover_characteristics, gatt_procedure_discover_includes, gatt_procedure_discover_descriptors, gatt_procedure_prepare_write_handle, gatt_procedure_prepare_write_handle_async, gatt_procedure_execute_write, gatt_procedure_execute_write_async, gatt_procedure_write_command_handle, gatt_procedure_read_multiple_handles, gatt_procedure_read_multiple_handles_async, gatt_procedure_read_blob_handle, gatt_procedure_read_blob_handle_async\n'), ((8414, 8547), 'blesuite.pybt.roles.LECentral', 'LECentral', ([], {'address_type': 'self.our_address_type', 'random': 'self.random_address', 'att_operation_event_hook': 'self.att_operation_event_hook'}), '(address_type=self.our_address_type, random=self.random_address,\n att_operation_event_hook=self.att_operation_event_hook)\n', (8423, 8547), False, 'from blesuite.pybt.roles import LECentral, LEPeripheral\n'), ((8616, 8777), 'blesuite.pybt.roles.LECentral', 'LECentral', ([], {'adapter': 'self.adapter', 'address_type': 'self.our_address_type', 'random': 'self.random_address', 'att_operation_event_hook': 'self.att_operation_event_hook'}), '(adapter=self.adapter, address_type=self.our_address_type, random=\n self.random_address, att_operation_event_hook=self.att_operation_event_hook\n )\n', (8625, 8777), False, 'from blesuite.pybt.roles import LECentral, LEPeripheral\n'), ((8906, 8967), 'blesuite.pybt.gatt.AttributeDatabase', 'AttributeDatabase', ([], {'event_handler': 'self.att_security_event_hook'}), '(event_handler=self.att_security_event_hook)\n', (8923, 8967), False, 'from blesuite.pybt.gatt import UUID, AttributeDatabase, Server\n'), ((8999, 9024), 'blesuite.pybt.gatt.Server', 'Server', (['self.attribute_db'], {}), '(self.attribute_db)\n', (9005, 9024), False, 'from blesuite.pybt.gatt import UUID, AttributeDatabase, Server\n'), ((9129, 9303), 'blesuite.pybt.roles.LEPeripheral', 'LEPeripheral', (['self.gatt_server'], {'mtu': 'self.mtu', 'address_type': 'self.our_address_type', 'random': 'self.random_address', 'att_operation_event_hook': 'self.att_operation_event_hook'}), '(self.gatt_server, mtu=self.mtu, address_type=self.\n our_address_type, random=self.random_address, att_operation_event_hook=\n self.att_operation_event_hook)\n', (9141, 9303), False, 'from blesuite.pybt.roles import LECentral, LEPeripheral\n'), ((9406, 9600), 'blesuite.pybt.roles.LEPeripheral', 'LEPeripheral', (['self.gatt_server'], {'adapter': 'self.adapter', 'mtu': 'self.mtu', 'address_type': 'self.our_address_type', 'random': 'self.random_address', 'att_operation_event_hook': 'self.att_operation_event_hook'}), '(self.gatt_server, adapter=self.adapter, mtu=self.mtu,\n address_type=self.our_address_type, random=self.random_address,\n att_operation_event_hook=self.att_operation_event_hook)\n', (9418, 9600), False, 'from blesuite.pybt.roles import LECentral, LEPeripheral\n'), ((9791, 9811), 'blesuite.event_handler.BTEventHandler', 'BTEventHandler', (['self'], {}), '(self)\n', (9805, 9811), False, 'from blesuite.event_handler import BTEventHandler\n'), ((17443, 17458), 'gevent.sleep', 'gevent.sleep', (['(1)'], {}), '(1)\n', (17455, 17458), False, 'import gevent\n'), ((25956, 26005), 'blesuite.utils.validators.validate_att_security_mode', 'validator.validate_att_security_mode', (['mode', 'level'], {}), '(mode, level)\n', (25992, 26005), True, 'import blesuite.utils.validators as validator\n'), ((35530, 35541), 'time.time', 'time.time', ([], {}), '()\n', (35539, 35541), False, 'import time\n'), ((35732, 35747), 'gevent.sleep', 'gevent.sleep', (['(1)'], {}), '(1)\n', (35744, 35747), False, 'import gevent\n'), ((36638, 36653), 'gevent.sleep', 'gevent.sleep', (['(1)'], {}), '(1)\n', (36650, 36653), False, 'import gevent\n'), ((43188, 43207), 'blesuite.pybt.gatt.AttributeDatabase', 'AttributeDatabase', ([], {}), '()\n', (43205, 43207), False, 'from blesuite.pybt.gatt import UUID, AttributeDatabase, Server\n'), ((43239, 43253), 'blesuite.pybt.gatt.Server', 'Server', (['att_db'], {}), '(att_db)\n', (43245, 43253), False, 'from blesuite.pybt.gatt import UUID, AttributeDatabase, Server\n'), ((50180, 50209), 'blesuite.entities.gatt_device.BLEDevice', 'BLEDevice', (['connection.address'], {}), '(connection.address)\n', (50189, 50209), False, 'from blesuite.entities.gatt_device import BLEDevice\n'), ((50944, 50973), 'blesuite.entities.gatt_device.BLEDevice', 'BLEDevice', (['connection.address'], {}), '(connection.address)\n', (50953, 50973), False, 'from blesuite.entities.gatt_device import BLEDevice\n'), ((51704, 51733), 'blesuite.entities.gatt_device.BLEDevice', 'BLEDevice', (['connection.address'], {}), '(connection.address)\n', (51713, 51733), False, 'from blesuite.entities.gatt_device import BLEDevice\n'), ((52455, 52484), 'blesuite.entities.gatt_device.BLEDevice', 'BLEDevice', (['connection.address'], {}), '(connection.address)\n', (52464, 52484), False, 'from blesuite.entities.gatt_device import BLEDevice\n'), ((54821, 54850), 'blesuite.entities.gatt_device.BLEDevice', 'BLEDevice', (['connection.address'], {}), '(connection.address)\n', (54830, 54850), False, 'from blesuite.entities.gatt_device import BLEDevice\n'), ((72200, 72210), 'pybt.gatt.UUID', 'UUID', (['uuid'], {}), '(uuid)\n', (72204, 72210), False, 'from pybt.gatt import GATTService, GATTCharacteristic, GATTCharacteristicDescriptorDeclaration, GATTInclude, UUID\n'), ((73285, 73295), 'pybt.gatt.UUID', 'UUID', (['uuid'], {}), '(uuid)\n', (73289, 73295), False, 'from pybt.gatt import GATTService, GATTCharacteristic, GATTCharacteristicDescriptorDeclaration, GATTInclude, UUID\n'), ((15545, 15560), 'gevent.sleep', 'gevent.sleep', (['(1)'], {}), '(1)\n', (15557, 15560), False, 'import gevent\n'), ((25048, 25076), 'blesuite.utils.validators.InvalidSMLTK', 'validator.InvalidSMLTK', (['None'], {}), '(None)\n', (25070, 25076), True, 'import blesuite.utils.validators as validator\n'), ((43391, 43419), 'pybt.gatt.UUID', 'UUID', (['service.attribute_type'], {}), '(service.attribute_type)\n', (43395, 43419), False, 'from pybt.gatt import GATTService, GATTCharacteristic, GATTCharacteristicDescriptorDeclaration, GATTInclude, UUID\n'), ((43421, 43439), 'pybt.gatt.UUID', 'UUID', (['service.uuid'], {}), '(service.uuid)\n', (43425, 43439), False, 'from pybt.gatt import GATTService, GATTCharacteristic, GATTCharacteristicDescriptorDeclaration, GATTInclude, UUID\n'), ((6440, 6453), 'os.urandom', 'os.urandom', (['(6)'], {}), '(6)\n', (6450, 6453), False, 'import os\n'), ((35640, 35651), 'time.time', 'time.time', ([], {}), '()\n', (35649, 35651), False, 'import time\n'), ((43781, 43813), 'pybt.gatt.UUID', 'UUID', (['incl.included_service_uuid'], {}), '(incl.included_service_uuid)\n', (43785, 43813), False, 'from pybt.gatt import GATTService, GATTCharacteristic, GATTCharacteristicDescriptorDeclaration, GATTInclude, UUID\n'), ((44679, 44704), 'pybt.gatt.UUID', 'UUID', (['characteristic.uuid'], {}), '(characteristic.uuid)\n', (44683, 44704), False, 'from pybt.gatt import GATTService, GATTCharacteristic, GATTCharacteristicDescriptorDeclaration, GATTInclude, UUID\n'), ((17244, 17255), 'time.time', 'time.time', ([], {}), '()\n', (17253, 17255), False, 'import time\n'), ((24957, 24993), 'blesuite.utils.validators.validate_ltk', 'validator.validate_ltk', (["entry['ltk']"], {}), "(entry['ltk'])\n", (24979, 24993), True, 'import blesuite.utils.validators as validator\n'), ((25244, 25282), 'blesuite.utils.validators.validate_rand', 'validator.validate_rand', (["entry['rand']"], {}), "(entry['rand'])\n", (25267, 25282), True, 'import blesuite.utils.validators as validator\n'), ((25402, 25438), 'blesuite.utils.validators.validate_irk', 'validator.validate_irk', (["entry['irk']"], {}), "(entry['irk'])\n", (25424, 25438), True, 'import blesuite.utils.validators as validator\n'), ((25560, 25598), 'blesuite.utils.validators.validate_csrk', 'validator.validate_csrk', (["entry['csrk']"], {}), "(entry['csrk'])\n", (25583, 25598), True, 'import blesuite.utils.validators as validator\n'), ((46429, 46450), 'pybt.gatt.UUID', 'UUID', (['descriptor.uuid'], {}), '(descriptor.uuid)\n', (46433, 46450), False, 'from pybt.gatt import GATTService, GATTCharacteristic, GATTCharacteristicDescriptorDeclaration, GATTInclude, UUID\n'), ((15332, 15343), 'time.time', 'time.time', ([], {}), '()\n', (15341, 15343), False, 'import time\n')]
|
#!/usr/bin/env python
import itertools
from cloud.clouds import get_region, Cloud, get_regions
from test_steps.do_test import do_batch
from util.utils import set_cwd, random_id, Timer, init_logger
init_logger()
def test1():
run_id = random_id()
t1 = (get_region(Cloud.GCP, "us-east1"), {})
t2 = (get_region(Cloud.GCP, "us-central1"), {})
t3 = (get_region(Cloud.AWS, "us-east-1"), {})
t4 = (get_region(Cloud.AWS, "us-east-2"), {})
test_input = [
(t1, t2),
(t2, t1),
(t3, t4),
(t4, t3),
]
do_batch(run_id, test_input)
def test2():
run_id = random_id()
regions = get_regions()[:40]
region_pairs = itertools.product(regions, regions)
test_input = [((r[0], {}), (r[1], {})) for r in region_pairs]
do_batch(run_id, test_input)
if __name__ == "__main__":
with Timer("Full run"):
set_cwd()
test2()
|
[
"util.utils.Timer",
"util.utils.random_id",
"util.utils.init_logger",
"test_steps.do_test.do_batch",
"cloud.clouds.get_region",
"util.utils.set_cwd",
"itertools.product",
"cloud.clouds.get_regions"
] |
[((199, 212), 'util.utils.init_logger', 'init_logger', ([], {}), '()\n', (210, 212), False, 'from util.utils import set_cwd, random_id, Timer, init_logger\n'), ((241, 252), 'util.utils.random_id', 'random_id', ([], {}), '()\n', (250, 252), False, 'from util.utils import set_cwd, random_id, Timer, init_logger\n'), ((556, 584), 'test_steps.do_test.do_batch', 'do_batch', (['run_id', 'test_input'], {}), '(run_id, test_input)\n', (564, 584), False, 'from test_steps.do_test import do_batch\n'), ((613, 624), 'util.utils.random_id', 'random_id', ([], {}), '()\n', (622, 624), False, 'from util.utils import set_cwd, random_id, Timer, init_logger\n'), ((677, 712), 'itertools.product', 'itertools.product', (['regions', 'regions'], {}), '(regions, regions)\n', (694, 712), False, 'import itertools\n'), ((783, 811), 'test_steps.do_test.do_batch', 'do_batch', (['run_id', 'test_input'], {}), '(run_id, test_input)\n', (791, 811), False, 'from test_steps.do_test import do_batch\n'), ((263, 296), 'cloud.clouds.get_region', 'get_region', (['Cloud.GCP', '"""us-east1"""'], {}), "(Cloud.GCP, 'us-east1')\n", (273, 296), False, 'from cloud.clouds import get_region, Cloud, get_regions\n'), ((312, 348), 'cloud.clouds.get_region', 'get_region', (['Cloud.GCP', '"""us-central1"""'], {}), "(Cloud.GCP, 'us-central1')\n", (322, 348), False, 'from cloud.clouds import get_region, Cloud, get_regions\n'), ((364, 398), 'cloud.clouds.get_region', 'get_region', (['Cloud.AWS', '"""us-east-1"""'], {}), "(Cloud.AWS, 'us-east-1')\n", (374, 398), False, 'from cloud.clouds import get_region, Cloud, get_regions\n'), ((414, 448), 'cloud.clouds.get_region', 'get_region', (['Cloud.AWS', '"""us-east-2"""'], {}), "(Cloud.AWS, 'us-east-2')\n", (424, 448), False, 'from cloud.clouds import get_region, Cloud, get_regions\n'), ((639, 652), 'cloud.clouds.get_regions', 'get_regions', ([], {}), '()\n', (650, 652), False, 'from cloud.clouds import get_region, Cloud, get_regions\n'), ((850, 867), 'util.utils.Timer', 'Timer', (['"""Full run"""'], {}), "('Full run')\n", (855, 867), False, 'from util.utils import set_cwd, random_id, Timer, init_logger\n'), ((877, 886), 'util.utils.set_cwd', 'set_cwd', ([], {}), '()\n', (884, 886), False, 'from util.utils import set_cwd, random_id, Timer, init_logger\n')]
|
# Copyright (c) 2019 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed
# under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import toposort
from concourse.model.base import (
ModelBase,
select_attr,
)
from util import not_none
from concourse.model.resources import RepositoryConfig, ResourceIdentifier
class JobVariant(ModelBase):
def __init__(self, name: str, raw_dict: dict, resource_registry, *args, **kwargs):
self._main_repository_name = None
self._resource_registry = not_none(resource_registry)
self.variant_name = name
super().__init__(raw_dict=raw_dict, *args, **kwargs)
def _known_attributes(self):
return {
'steps',
'traits',
'repo',
'repos',
}
def _children(self):
yield from self.steps()
yield from self.traits().values()
yield from self.repositories()
def traits(self):
return self._traits_dict
def trait(self, name):
return self._traits_dict[name]
def has_trait(self, name):
return name in self.traits()
def job_name(self):
return '{b}-{n}-job'.format(
b=self.main_repository().branch(),
n=self.variant_name,
)
def meta_resource_name(self):
meta_res = self._resource_registry.resource(
ResourceIdentifier(type_name='meta', base_name=self.variant_name)
)
return meta_res.resource_identifier().name()
def steps(self):
return self._steps_dict.values()
def step_names(self):
return map(select_attr('name'), self.steps())
def ordered_steps(self):
dependencies = {
step.name: step.depends() for step in self.steps()
}
try:
result = list(toposort.toposort(dependencies))
except toposort.CircularDependencyError as de:
# remove cirular dependencies caused by synthetic steps
# (custom steps' dependencies should "win")
for step_name, step_dependencies in de.data.items():
step = self.step(step_name)
if not step.is_synthetic:
continue # only patch away synthetic steps' dependencies
for step_dependency_name in step_dependencies:
step_dependency = self.step(step_dependency_name)
if step_dependency.is_synthetic:
continue # leave dependencies between synthetic steps
# patch out dependency from synthetic step to custom step
dependencies[step_name].remove(step_dependency_name)
# try again - if there is still a cyclic dependency, this is probably caused
# by a user error - so let it propagate
result = toposort.toposort(dependencies)
# result contains a generator yielding tuples of step name in the correct execution order.
# each tuple can/should be parallelised
return result
def add_step(self, step: 'PipelineStep'):
if self.has_step(step.name):
raise ValueError('conflict: pipeline definition already contained step {s}'.format(
s=step.name
)
)
self._steps_dict[step.name] = step
def step(self, name):
return self._steps_dict[name]
def has_step(self, step_name):
return step_name in self.step_names()
def pr_repository(self, name):
pr_repo = self.repository(name)
return RepositoryConfig(
raw_dict=dict(pr_repo.raw),
logical_name=name,
qualifier='pr',
is_pull_request=True
)
def repositories(self):
# TODO: introduce a common base class for "input resources"
# (where Github and PR are two examples, and "time" will be the third)
return self._repos_dict.values()
def repository_names(self):
return self._repos_dict.keys()
def repository(self, name):
return self._repos_dict[name]
def has_main_repository(self):
return self._main_repository_name is not None
def main_repository(self):
return self.repository(self._main_repository_name)
def publish_repositories(self):
return self._publish_repos_dict.values()
def publish_repository(self, name):
return self._publish_repos_dict[name]
def has_publish_repository(self, name):
return name in self._publish_repos_dict
def __repr__(self):
return f'JobVariant: {self.variant_name}'
|
[
"concourse.model.resources.ResourceIdentifier",
"util.not_none",
"toposort.toposort",
"concourse.model.base.select_attr"
] |
[((1117, 1144), 'util.not_none', 'not_none', (['resource_registry'], {}), '(resource_registry)\n', (1125, 1144), False, 'from util import not_none\n'), ((1967, 2032), 'concourse.model.resources.ResourceIdentifier', 'ResourceIdentifier', ([], {'type_name': '"""meta"""', 'base_name': 'self.variant_name'}), "(type_name='meta', base_name=self.variant_name)\n", (1985, 2032), False, 'from concourse.model.resources import RepositoryConfig, ResourceIdentifier\n'), ((2205, 2224), 'concourse.model.base.select_attr', 'select_attr', (['"""name"""'], {}), "('name')\n", (2216, 2224), False, 'from concourse.model.base import ModelBase, select_attr\n'), ((2407, 2438), 'toposort.toposort', 'toposort.toposort', (['dependencies'], {}), '(dependencies)\n', (2424, 2438), False, 'import toposort\n'), ((3424, 3455), 'toposort.toposort', 'toposort.toposort', (['dependencies'], {}), '(dependencies)\n', (3441, 3455), False, 'import toposort\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `simu_linear` package."""
import pytest
import numpy as np
from thermo.chemical import Chemical
from pandangas import simu_linear as sim
from pandangas import topology as top
from fixtures import simple_network
def test_solve():
# 3 * x0 + x1 = 9 and x0 + 2 * x1 = 8 <=> x0 = 2, x1 = 3
a = np.array([[3, 1], [1, 2]])
b = np.array([9, 8])
assert np.array_equal(sim.solve(a, b), np.array([2.0, 3.0]))
def test_weird():
a = np.array([1, 0, 0, 1, 0, 1])
waited = np.array([[1, 0, 0, 0, 0, 0], [0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 0, 1]])
assert np.array_equal(sim.weird(a), waited)
def test_create_a(simple_network):
gas = Chemical("natural gas", T=10 + 273.15, P=1.022e5)
net = simple_network
g = top.graphs_by_level_as_dict(net)
graph = g["BP"]
a = sim.create_a(graph, gas)
assert a.shape == (20, 20)
def test_create_k(simple_network):
gas = Chemical("natural gas", T=10 + 273.15, P=1.022e5)
net = simple_network
g = top.graphs_by_level_as_dict(net)
graph = g["BP"]
k = sim.create_k(graph, gas)
assert k.shape == (len(graph.edges),)
for ik in k:
assert int(ik) == 49975
def test_create_b(simple_network):
net = simple_network
loads = sim._scaled_loads_as_dict(net)
p_ops = sim._operating_pressures_as_dict(net)
g = top.graphs_by_level_as_dict(net)
graph = g["BP"]
b = sim.create_b(graph, loads, p_ops)
assert b.shape == (20,)
def test_run_one_level_BP_shape(simple_network):
net = simple_network
g = top.graphs_by_level_as_dict(net)
graph = g["BP"]
p_nodes, m_dot_pipes, m_dot_nodes, gas = sim.run_one_level(net, "BP")
assert p_nodes.shape == (len(graph.nodes),)
assert m_dot_pipes.shape == (len(graph.edges),)
assert m_dot_nodes.shape == (len(graph.nodes),)
def test_run_one_level_BP_values(simple_network):
net = simple_network
g = top.graphs_by_level_as_dict(net)
graph = g["BP"]
p_nodes, m_dot_pipes, m_dot_nodes, gas = sim.run_one_level(net, "BP")
assert p_nodes.round().tolist() == [102200.0, 102190.0, 102188.0, 102193.0, 102190.0, 102200.0]
assert m_dot_pipes.round(5).tolist() == [2.1e-04, 2.4e-04, 3.0e-05, 7.0e-05, -1.4e-04, 7.0e-05, -2.0e-04, 1.0e-05]
assert m_dot_nodes.round(5).tolist() == [-0.00045, 0.00026, 0.00026, 0.0, 0.00026, -0.00034]
|
[
"pandangas.simu_linear.create_b",
"pandangas.simu_linear.solve",
"pandangas.simu_linear.create_a",
"pandangas.simu_linear.run_one_level",
"pandangas.simu_linear._operating_pressures_as_dict",
"numpy.array",
"pandangas.topology.graphs_by_level_as_dict",
"thermo.chemical.Chemical",
"pandangas.simu_linear._scaled_loads_as_dict",
"pandangas.simu_linear.create_k",
"pandangas.simu_linear.weird"
] |
[((363, 389), 'numpy.array', 'np.array', (['[[3, 1], [1, 2]]'], {}), '([[3, 1], [1, 2]])\n', (371, 389), True, 'import numpy as np\n'), ((398, 414), 'numpy.array', 'np.array', (['[9, 8]'], {}), '([9, 8])\n', (406, 414), True, 'import numpy as np\n'), ((508, 536), 'numpy.array', 'np.array', (['[1, 0, 0, 1, 0, 1]'], {}), '([1, 0, 0, 1, 0, 1])\n', (516, 536), True, 'import numpy as np\n'), ((550, 620), 'numpy.array', 'np.array', (['[[1, 0, 0, 0, 0, 0], [0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 0, 1]]'], {}), '([[1, 0, 0, 0, 0, 0], [0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 0, 1]])\n', (558, 620), True, 'import numpy as np\n'), ((716, 766), 'thermo.chemical.Chemical', 'Chemical', (['"""natural gas"""'], {'T': '(10 + 273.15)', 'P': '(102200.0)'}), "('natural gas', T=10 + 273.15, P=102200.0)\n", (724, 766), False, 'from thermo.chemical import Chemical\n'), ((799, 831), 'pandangas.topology.graphs_by_level_as_dict', 'top.graphs_by_level_as_dict', (['net'], {}), '(net)\n', (826, 831), True, 'from pandangas import topology as top\n'), ((860, 884), 'pandangas.simu_linear.create_a', 'sim.create_a', (['graph', 'gas'], {}), '(graph, gas)\n', (872, 884), True, 'from pandangas import simu_linear as sim\n'), ((963, 1013), 'thermo.chemical.Chemical', 'Chemical', (['"""natural gas"""'], {'T': '(10 + 273.15)', 'P': '(102200.0)'}), "('natural gas', T=10 + 273.15, P=102200.0)\n", (971, 1013), False, 'from thermo.chemical import Chemical\n'), ((1046, 1078), 'pandangas.topology.graphs_by_level_as_dict', 'top.graphs_by_level_as_dict', (['net'], {}), '(net)\n', (1073, 1078), True, 'from pandangas import topology as top\n'), ((1107, 1131), 'pandangas.simu_linear.create_k', 'sim.create_k', (['graph', 'gas'], {}), '(graph, gas)\n', (1119, 1131), True, 'from pandangas import simu_linear as sim\n'), ((1297, 1327), 'pandangas.simu_linear._scaled_loads_as_dict', 'sim._scaled_loads_as_dict', (['net'], {}), '(net)\n', (1322, 1327), True, 'from pandangas import simu_linear as sim\n'), ((1340, 1377), 'pandangas.simu_linear._operating_pressures_as_dict', 'sim._operating_pressures_as_dict', (['net'], {}), '(net)\n', (1372, 1377), True, 'from pandangas import simu_linear as sim\n'), ((1386, 1418), 'pandangas.topology.graphs_by_level_as_dict', 'top.graphs_by_level_as_dict', (['net'], {}), '(net)\n', (1413, 1418), True, 'from pandangas import topology as top\n'), ((1447, 1480), 'pandangas.simu_linear.create_b', 'sim.create_b', (['graph', 'loads', 'p_ops'], {}), '(graph, loads, p_ops)\n', (1459, 1480), True, 'from pandangas import simu_linear as sim\n'), ((1593, 1625), 'pandangas.topology.graphs_by_level_as_dict', 'top.graphs_by_level_as_dict', (['net'], {}), '(net)\n', (1620, 1625), True, 'from pandangas import topology as top\n'), ((1691, 1719), 'pandangas.simu_linear.run_one_level', 'sim.run_one_level', (['net', '"""BP"""'], {}), "(net, 'BP')\n", (1708, 1719), True, 'from pandangas import simu_linear as sim\n'), ((1957, 1989), 'pandangas.topology.graphs_by_level_as_dict', 'top.graphs_by_level_as_dict', (['net'], {}), '(net)\n', (1984, 1989), True, 'from pandangas import topology as top\n'), ((2055, 2083), 'pandangas.simu_linear.run_one_level', 'sim.run_one_level', (['net', '"""BP"""'], {}), "(net, 'BP')\n", (2072, 2083), True, 'from pandangas import simu_linear as sim\n'), ((441, 456), 'pandangas.simu_linear.solve', 'sim.solve', (['a', 'b'], {}), '(a, b)\n', (450, 456), True, 'from pandangas import simu_linear as sim\n'), ((458, 478), 'numpy.array', 'np.array', (['[2.0, 3.0]'], {}), '([2.0, 3.0])\n', (466, 478), True, 'import numpy as np\n'), ((647, 659), 'pandangas.simu_linear.weird', 'sim.weird', (['a'], {}), '(a)\n', (656, 659), True, 'from pandangas import simu_linear as sim\n')]
|
from django.urls import path, include
from .views import BookViewSet, LoanViewSet, UserViewSet, SelfView, AllBooksView
from rest_framework import routers
router = routers.DefaultRouter()
router.register(r'books', BookViewSet)
router.register(r'loans', LoanViewSet)
router.register(r'users', UserViewSet)
urlpatterns = [
path('self/', SelfView.as_view()),
path('allbooks/', AllBooksView.as_view()),
path('', include(router.urls)),
]
|
[
"rest_framework.routers.DefaultRouter",
"django.urls.include"
] |
[((164, 187), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (185, 187), False, 'from rest_framework import routers\n'), ((421, 441), 'django.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (428, 441), False, 'from django.urls import path, include\n')]
|
# Copyright 2019 NeuroData (http://neurodata.io)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from sklearn.utils.validation import check_is_fitted
from ..utils import import_graph, is_almost_symmetric
from .base import BaseEmbedMulti
from .svd import select_dimension, selectSVD
class MultipleASE(BaseEmbedMulti):
r"""
Multiple Adjacency Spectral Embedding (MASE) embeds arbitrary number of input
graphs with matched vertex sets.
For a population of undirected graphs, MASE assumes that the population of graphs
is sampled from :math:`VR^{(i)}V^T` where :math:`V \in \mathbb{R}^{n\times d}` and
:math:`R^{(i)} \in \mathbb{R}^{d\times d}`. Score matrices, :math:`R^{(i)}`, are
allowed to vary for each graph, but are symmetric. All graphs share a common a
latent position matrix :math:`V`.
For a population of directed graphs, MASE assumes that the population is sampled
from :math:`UR^{(i)}V^T` where :math:`U \in \mathbb{R}^{n\times d_1}`,
:math:`V \in \mathbb{R}^{n\times d_2}`, and
:math:`R^{(i)} \in \mathbb{R}^{d_1\times d_2}`. In this case, score matrices
:math:`R^{(i)}` can be assymetric and non-square, but all graphs still share a
common latent position matrices :math:`U` and :math:`V`.
Parameters
----------
n_components : int or None, default = None
Desired dimensionality of output data. If "full",
n_components must be <= min(X.shape). Otherwise, n_components must be
< min(X.shape). If None, then optimal dimensions will be chosen by
:func:`~graspy.embed.select_dimension` using ``n_elbows`` argument.
n_elbows : int, optional, default: 2
If ``n_components=None``, then compute the optimal embedding dimension using
:func:`~graspy.embed.select_dimension`. Otherwise, ignored.
algorithm : {'randomized' (default), 'full', 'truncated'}, optional
SVD solver to use:
- 'randomized'
Computes randomized svd using
:func:`sklearn.utils.extmath.randomized_svd`
- 'full'
Computes full svd using :func:`scipy.linalg.svd`
- 'truncated'
Computes truncated svd using :func:`scipy.sparse.linalg.svds`
n_iter : int, optional (default = 5)
Number of iterations for randomized SVD solver. Not used by 'full' or
'truncated'. The default is larger than the default in randomized_svd
to handle sparse matrices that may have large slowly decaying spectrum.
scaled : bool, optional (default=False)
Whether to scale individual eigenvectors with eigenvalues in first embedding
stage.
Attributes
----------
n_graphs_ : int
Number of graphs
n_vertices_ : int
Number of vertices in each graph
latent_left_ : array, shape (n_samples, n_components)
Estimated left latent positions of the graph.
latent_right_ : array, shape (n_samples, n_components), or None
Estimated right latent positions of the graph. Only computed when the an input
graph is directed, or adjacency matrix is assymetric. Otherwise, None.
scores_ : array, shape (n_samples, n_components, n_components)
Estimated :math:`\hat{R}` matrices for each input graph.
Notes
-----
When an input graph is directed, `n_components` of `latent_left_` may not be equal
to `n_components` of `latent_right_`.
"""
def __init__(
self,
n_components=None,
n_elbows=2,
algorithm="randomized",
n_iter=5,
scaled=False,
):
if not isinstance(scaled, bool):
msg = "scaled must be a boolean, not {}".format(scaled)
raise TypeError(msg)
super().__init__(
n_components=n_components,
n_elbows=n_elbows,
algorithm=algorithm,
n_iter=n_iter,
)
self.scaled = scaled
def _reduce_dim(self, graphs):
# first embed into log2(n_vertices) for each graph
n_components = int(np.ceil(np.log2(np.min(self.n_vertices_))))
# embed individual graphs
embeddings = [
selectSVD(
graph,
n_components=n_components,
algorithm=self.algorithm,
n_iter=self.n_iter,
)
for graph in graphs
]
Us, Ds, Vs = zip(*embeddings)
# Choose the best embedding dimension for each graphs
if self.n_components is None:
embedding_dimensions = []
for D in Ds:
elbows, _ = select_dimension(D, n_elbows=self.n_elbows)
embedding_dimensions.append(elbows[-1])
# Choose the max of all of best embedding dimension of all graphs
best_dimension = int(np.ceil(np.max(embedding_dimensions)))
else:
best_dimension = self.n_components
if not self.scaled:
Us = np.hstack([U[:, :best_dimension] for U in Us])
Vs = np.hstack([V.T[:, :best_dimension] for V in Vs])
else:
# Equivalent to ASE
Us = np.hstack(
[
U[:, :best_dimension] @ np.diag(np.sqrt(D[:best_dimension]))
for U, D in zip(Us, Ds)
]
)
Vs = np.hstack(
[
V.T[:, :best_dimension] @ np.diag(np.sqrt(D[:best_dimension]))
for V, D in zip(Vs, Ds)
]
)
# Second SVD for vertices
# The notation is slightly different than the paper
Uhat, _, _ = selectSVD(
Us,
n_components=self.n_components,
n_elbows=self.n_elbows,
algorithm=self.algorithm,
n_iter=self.n_iter,
)
Vhat, _, _ = selectSVD(
Vs,
n_components=self.n_components,
n_elbows=self.n_elbows,
algorithm=self.algorithm,
n_iter=self.n_iter,
)
return Uhat, Vhat
def fit(self, graphs, y=None):
"""
Fit the model with graphs.
Parameters
----------
graphs : list of nx.Graph or ndarray, or ndarray
If list of nx.Graph, each Graph must contain same number of nodes.
If list of ndarray, each array must have shape (n_vertices, n_vertices).
If ndarray, then array must have shape (n_graphs, n_vertices, n_vertices).
Returns
-------
self : object
Returns an instance of self.
"""
graphs = self._check_input_graphs(graphs)
# Check if undirected
undirected = all(is_almost_symmetric(g) for g in graphs)
# embed
Uhat, Vhat = self._reduce_dim(graphs)
self.latent_left_ = Uhat
if not undirected:
self.latent_right_ = Vhat
self.scores_ = Uhat.T @ graphs @ Vhat
else:
self.latent_right_ = None
self.scores_ = Uhat.T @ graphs @ Uhat
return self
def fit_transform(self, graphs, y=None):
"""
Fit the model with graphs and apply the embedding on graphs.
n_components is either automatically determined or based on user input.
Parameters
----------
graphs : list of nx.Graph or ndarray, or ndarray
If list of nx.Graph, each Graph must contain same number of nodes.
If list of ndarray, each array must have shape (n_vertices, n_vertices).
If ndarray, then array must have shape (n_graphs, n_vertices, n_vertices).
Returns
-------
out : array-like, shape (n_vertices, n_components) if input
graphs were symmetric. If graphs were directed, returns tuple of
two arrays (same shape as above) where the first corresponds to the
left latent positions, and the right to the right latent positions
"""
return self._fit_transform(graphs)
|
[
"numpy.min",
"numpy.sqrt",
"numpy.max",
"numpy.hstack"
] |
[((5492, 5538), 'numpy.hstack', 'np.hstack', (['[U[:, :best_dimension] for U in Us]'], {}), '([U[:, :best_dimension] for U in Us])\n', (5501, 5538), True, 'import numpy as np\n'), ((5556, 5604), 'numpy.hstack', 'np.hstack', (['[V.T[:, :best_dimension] for V in Vs]'], {}), '([V.T[:, :best_dimension] for V in Vs])\n', (5565, 5604), True, 'import numpy as np\n'), ((4595, 4619), 'numpy.min', 'np.min', (['self.n_vertices_'], {}), '(self.n_vertices_)\n', (4601, 4619), True, 'import numpy as np\n'), ((5354, 5382), 'numpy.max', 'np.max', (['embedding_dimensions'], {}), '(embedding_dimensions)\n', (5360, 5382), True, 'import numpy as np\n'), ((5749, 5776), 'numpy.sqrt', 'np.sqrt', (['D[:best_dimension]'], {}), '(D[:best_dimension])\n', (5756, 5776), True, 'import numpy as np\n'), ((5954, 5981), 'numpy.sqrt', 'np.sqrt', (['D[:best_dimension]'], {}), '(D[:best_dimension])\n', (5961, 5981), True, 'import numpy as np\n')]
|
import logging
import sys
import traceback
from cliff import show
import pkg_resources
LOG = logging.getLogger(__name__)
class EntryPointShow(show.ShowOne):
"""Shows the details for a single entry point.
"""
def get_parser(self, prog_name):
p = super(EntryPointShow, self).get_parser(prog_name)
p.add_argument(
'group',
help='the name of the group to show',
)
p.add_argument(
'name',
help='the name of the entry point to show',
)
p.add_argument(
'--distribution',
default=None,
help='the name of the distribution if name is not unique',
)
return p
def take_action(self, parsed_args):
if parsed_args.distribution:
LOG.debug(
'Loading %s from %s using distribution %s',
parsed_args.name,
parsed_args.group,
parsed_args.distribution,
)
dist = pkg_resources.get_distribution(parsed_args.distribution)
ep = pkg_resources.get_entry_info(
dist,
parsed_args.group,
parsed_args.name,
)
else:
LOG.debug(
'Looking for %s in group %s',
parsed_args.name,
parsed_args.group,
)
try:
ep = next(pkg_resources.iter_entry_points(
parsed_args.group,
parsed_args.name,
))
except StopIteration:
raise ValueError('Could not find %r in %r' % (
parsed_args.name,
parsed_args.group,
))
try:
ep.load()
except Exception:
tb = traceback.format_exception(*sys.exc_info())
else:
tb = ''
return (
('Module', 'Member', 'Distribution', 'Path', 'Error'),
(ep.module_name,
'.'.join(ep.attrs),
str(ep.dist),
ep.dist.location,
tb),
)
|
[
"pkg_resources.get_distribution",
"sys.exc_info",
"pkg_resources.get_entry_info",
"pkg_resources.iter_entry_points",
"logging.getLogger"
] |
[((96, 123), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (113, 123), False, 'import logging\n'), ((1019, 1075), 'pkg_resources.get_distribution', 'pkg_resources.get_distribution', (['parsed_args.distribution'], {}), '(parsed_args.distribution)\n', (1049, 1075), False, 'import pkg_resources\n'), ((1093, 1164), 'pkg_resources.get_entry_info', 'pkg_resources.get_entry_info', (['dist', 'parsed_args.group', 'parsed_args.name'], {}), '(dist, parsed_args.group, parsed_args.name)\n', (1121, 1164), False, 'import pkg_resources\n'), ((1437, 1505), 'pkg_resources.iter_entry_points', 'pkg_resources.iter_entry_points', (['parsed_args.group', 'parsed_args.name'], {}), '(parsed_args.group, parsed_args.name)\n', (1468, 1505), False, 'import pkg_resources\n'), ((1865, 1879), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1877, 1879), False, 'import sys\n')]
|
from flask import Flask, request, redirect, render_template, session
from flask.json import jsonify
import os
app = Flask(__name__)
# convert transaction Hach to a 6 base
def reVal(num):
if (num >= 0 and num <= 9):
return chr(num + ord('0'))
else:
return chr(num - 10 + ord('A'))
# Utility function to reverse a string
def strev(str):
len = len(str)
for i in range(int(len / 2)):
temp = str[i]
str[i] = str[len - i - 1]
str[len - i - 1] = temp
def fromDeci(res, base, inputNum):
#index = 0 Initialize index of result
# Convert input number is given base
# by repeatedly dividing it by base
# and taking remainder
while (inputNum > 0):
res+= reVal(inputNum % base)
inputNum = int(inputNum / base)
# Reverse the result
res = res[::-1]
return res
# Driver Code
#convert hach of a transaction to 6 base
Hash=0x755e1278c22c92c4ea0b5a44b3dd52a8a84ca59531849d0e279c84eb289da8f2
Hash_in_dec = int(Hash[1:],16)
base = 6
res = ""
Hash_in_dec_list=str(fromDeci(res, base, Hash_in_dec))
# address of transaction converted to decimal
address_bradg=0xfff923f5a1016e422ddb5d5b7d3ef8152957d2a5
address1=int(address_bradg[1:], 16)
# write in text file note using the disposition from Hash_in_dec_list and note number from address
Hash_in_dec_list=[int(i) for i in str(list)]
j=0
while j < len(Hash_in_dec_list):
with open("note.txt",'r+') as f:
line = f.readlines()
note=""
f.seek(0)
for index,line in enumerate(line):
if index==Hash_in_dec_list[j]:
note+=line.strip()+ str(address1[j])+'\n'
else:
note+=line.strip()+ '-\n'
f.write(note)
f.close()
j+=1
# import Hash from javascrypt web page
@app.route('/', methods=['POST'])
def postmethod():
TxHash = request.get_json()
print(TxHash)
return jsonify()
|
[
"flask.json.jsonify",
"flask.Flask",
"flask.request.get_json"
] |
[((117, 132), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (122, 132), False, 'from flask import Flask, request, redirect, render_template, session\n'), ((1912, 1930), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1928, 1930), False, 'from flask import Flask, request, redirect, render_template, session\n'), ((1960, 1969), 'flask.json.jsonify', 'jsonify', ([], {}), '()\n', (1967, 1969), False, 'from flask.json import jsonify\n')]
|
from indy_node.server.upgrade_log import UpgradeLog
from indy_node.test import waits
from indy_node.test.upgrade.helper import checkUpgradeScheduled, sdk_ensure_upgrade_sent
from plenum.common.constants import VERSION
from plenum.common.messages.node_messages import Propagate
from plenum.common.request import Request
from plenum.test.delayers import req_delay, ppgDelay
from plenum.test.test_node import getNonPrimaryReplicas
def test_forced_upgrade_handled_once_if_request_received_after_propagate(
looper, nodeSet, sdk_pool_handle, sdk_wallet_trustee,
validUpgradeExpForceTrue):
"""
Verifies that POOL_UPGRADE force=true request is handled one time in case
the node commits the transaction to the ledger but during the 3PC-process
receives the request directly from the client after a PROPAGATE from some
other node
"""
slow_node = getNonPrimaryReplicas(nodeSet, instId=0)[-1].node
slow_node.clientIbStasher.delay(req_delay())
slow_node.nodeIbStasher.delay(ppgDelay(sender_filter='Beta'))
slow_node.nodeIbStasher.delay(ppgDelay(sender_filter='Gamma'))
original_process_propagate = slow_node.nodeMsgRouter.routes[Propagate]
original_process_request = slow_node.clientMsgRouter.routes[Request]
def patched_process_propagate(msg: Propagate, frm: str):
original_process_propagate(msg, frm)
slow_node.clientIbStasher.reset_delays_and_process_delayeds()
slow_node.nodeMsgRouter.routes[Propagate] = original_process_propagate
def patched_process_request(request: Request, frm: str):
original_process_request(request, frm)
slow_node.nodeIbStasher.reset_delays_and_process_delayeds()
slow_node.clientMsgRouter.routes[Request] = original_process_request
slow_node.nodeMsgRouter.routes[Propagate] = patched_process_propagate
slow_node.clientMsgRouter.routes[Request] = patched_process_request
init_len = len(list(slow_node.upgrader._actionLog))
sdk_ensure_upgrade_sent(looper, sdk_pool_handle, sdk_wallet_trustee,
validUpgradeExpForceTrue)
looper.runFor(waits.expectedUpgradeScheduled())
checkUpgradeScheduled([slow_node], validUpgradeExpForceTrue[VERSION])
if init_len ==0:
# first upgrade - should be only one scheduled
assert len(list(slow_node.upgrader._actionLog)) == 1
else:
# one upgrade were already scheduled. we should cancel it and schedule new one
# so action log should be increased by 2
assert len(list(slow_node.upgrader._actionLog)) == init_len + 2
assert slow_node.upgrader._actionLog.last_event.ev_type == UpgradeLog.Events.scheduled
|
[
"indy_node.test.waits.expectedUpgradeScheduled",
"plenum.test.delayers.ppgDelay",
"plenum.test.delayers.req_delay",
"indy_node.test.upgrade.helper.sdk_ensure_upgrade_sent",
"indy_node.test.upgrade.helper.checkUpgradeScheduled",
"plenum.test.test_node.getNonPrimaryReplicas"
] |
[((1983, 2081), 'indy_node.test.upgrade.helper.sdk_ensure_upgrade_sent', 'sdk_ensure_upgrade_sent', (['looper', 'sdk_pool_handle', 'sdk_wallet_trustee', 'validUpgradeExpForceTrue'], {}), '(looper, sdk_pool_handle, sdk_wallet_trustee,\n validUpgradeExpForceTrue)\n', (2006, 2081), False, 'from indy_node.test.upgrade.helper import checkUpgradeScheduled, sdk_ensure_upgrade_sent\n'), ((2164, 2233), 'indy_node.test.upgrade.helper.checkUpgradeScheduled', 'checkUpgradeScheduled', (['[slow_node]', 'validUpgradeExpForceTrue[VERSION]'], {}), '([slow_node], validUpgradeExpForceTrue[VERSION])\n', (2185, 2233), False, 'from indy_node.test.upgrade.helper import checkUpgradeScheduled, sdk_ensure_upgrade_sent\n'), ((969, 980), 'plenum.test.delayers.req_delay', 'req_delay', ([], {}), '()\n', (978, 980), False, 'from plenum.test.delayers import req_delay, ppgDelay\n'), ((1016, 1046), 'plenum.test.delayers.ppgDelay', 'ppgDelay', ([], {'sender_filter': '"""Beta"""'}), "(sender_filter='Beta')\n", (1024, 1046), False, 'from plenum.test.delayers import req_delay, ppgDelay\n'), ((1082, 1113), 'plenum.test.delayers.ppgDelay', 'ppgDelay', ([], {'sender_filter': '"""Gamma"""'}), "(sender_filter='Gamma')\n", (1090, 1113), False, 'from plenum.test.delayers import req_delay, ppgDelay\n'), ((2125, 2157), 'indy_node.test.waits.expectedUpgradeScheduled', 'waits.expectedUpgradeScheduled', ([], {}), '()\n', (2155, 2157), False, 'from indy_node.test import waits\n'), ((882, 922), 'plenum.test.test_node.getNonPrimaryReplicas', 'getNonPrimaryReplicas', (['nodeSet'], {'instId': '(0)'}), '(nodeSet, instId=0)\n', (903, 922), False, 'from plenum.test.test_node import getNonPrimaryReplicas\n')]
|
# Copyright (C) 2021, Pyronear contributors.
# This program is licensed under the Apache License version 2.
# See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.
from typing import List
from fastapi import APIRouter, Path, Security
from app.api import crud
from app.db import accesses
from app.api.schemas import AccessRead, AccessType
from app.api.deps import get_current_access
router = APIRouter()
@router.get("/{access_id}/", response_model=AccessRead, summary="Get information about a specific access")
async def get_access(access_id: int = Path(..., gt=0), _=Security(get_current_access, scopes=[AccessType.admin])):
"""
Based on a access_id, retrieves information about the specified access
"""
return await crud.get_entry(accesses, access_id)
@router.get("/", response_model=List[AccessRead], summary="Get the list of all accesses")
async def fetch_accesses(_=Security(get_current_access, scopes=[AccessType.admin])):
"""
Retrieves the list of all accesses and their information
"""
return await crud.fetch_all(accesses)
|
[
"app.api.crud.get_entry",
"fastapi.Path",
"fastapi.Security",
"app.api.crud.fetch_all",
"fastapi.APIRouter"
] |
[((438, 449), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (447, 449), False, 'from fastapi import APIRouter, Path, Security\n'), ((597, 612), 'fastapi.Path', 'Path', (['...'], {'gt': '(0)'}), '(..., gt=0)\n', (601, 612), False, 'from fastapi import APIRouter, Path, Security\n'), ((616, 671), 'fastapi.Security', 'Security', (['get_current_access'], {'scopes': '[AccessType.admin]'}), '(get_current_access, scopes=[AccessType.admin])\n', (624, 671), False, 'from fastapi import APIRouter, Path, Security\n'), ((937, 992), 'fastapi.Security', 'Security', (['get_current_access'], {'scopes': '[AccessType.admin]'}), '(get_current_access, scopes=[AccessType.admin])\n', (945, 992), False, 'from fastapi import APIRouter, Path, Security\n'), ((782, 817), 'app.api.crud.get_entry', 'crud.get_entry', (['accesses', 'access_id'], {}), '(accesses, access_id)\n', (796, 817), False, 'from app.api import crud\n'), ((1089, 1113), 'app.api.crud.fetch_all', 'crud.fetch_all', (['accesses'], {}), '(accesses)\n', (1103, 1113), False, 'from app.api import crud\n')]
|
#############################################################################
#
# A test for the PyroNS_NTService program
# Author: <NAME> <EMAIL>
#
# This is part of "Pyro" - Python Remote Objects
# Which is (c) <NAME> - <EMAIL>
#
#############################################################################
import unittest
import win32serviceutil
import win32service
import time
import Pyro.nsc
ServiceName = 'PyroNS'
class Test(unittest.TestCase):
def setUp(self):
win32serviceutil.StartService(ServiceName)
def testStartPending(self):
svcType, svcState, svcControls, err, svcErr, svcCP, svcWH = \
win32serviceutil.QueryServiceStatus(ServiceName)
assert svcState & win32service.SERVICE_START_PENDING
def testFullyStarted(self):
self._waitForStarted()
svcType, svcState, svcControls, err, svcErr, svcCP, svcWH = \
win32serviceutil.QueryServiceStatus(ServiceName)
assert svcType & win32service.SERVICE_WIN32_OWN_PROCESS
assert svcState & win32service.SERVICE_RUNNING
assert svcControls & win32service.SERVICE_ACCEPT_STOP
def testStop(self):
self._waitForStarted()
svcType, svcState, svcControls, err, svcErr, svcCP, svcWH = \
win32serviceutil.StopService(ServiceName)
assert svcState & win32service.SERVICE_STOPPED
assert svcType & win32service.SERVICE_WIN32_OWN_PROCESS
def testNameserverAvailable(self):
self._waitForStarted()
ctrl = Pyro.nsc.PyroNSControl()
ctrl.args(None)
ctrl.ping()
def testNameserverShutdownFromNsc(self):
self._waitForStarted()
ctrl = Pyro.nsc.PyroNSControl()
ctrl.args(None)
ctrl.shutdown()
for each in range(100):
svcType, svcState, svcControls, err, svcErr, svcCP, svcWH = \
win32serviceutil.QueryServiceStatus(ServiceName)
if svcState & win32service.SERVICE_STOPPED:
return
time.sleep(0.20)
self.fail()
def tearDown(self):
for each in range(1000):
svcType, svcState, svcControls, err, svcErr, svcCP, svcWH = \
win32serviceutil.QueryServiceStatus(ServiceName)
if svcState & win32service.SERVICE_RUNNING:
svcType, svcState, svcControls, err, svcErr, svcCP, svcWH = \
win32serviceutil.StopService(ServiceName)
time.sleep(0.1)
elif svcState & win32service.SERVICE_STOPPED:
time.sleep(0.10)
break
else:
time.sleep(0.10)
assert svcState & win32service.SERVICE_STOPPED
time.sleep(3)
def _waitForStarted(self):
for each in range(100):
svcType, svcState, svcControls, err, svcErr, svcCP, svcWH = \
win32serviceutil.QueryServiceStatus(ServiceName)
if svcState & win32service.SERVICE_RUNNING:
break
else:
time.sleep(0.10)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"win32serviceutil.StartService",
"win32serviceutil.StopService",
"time.sleep",
"win32serviceutil.QueryServiceStatus"
] |
[((2653, 2668), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2666, 2668), False, 'import unittest\n'), ((478, 520), 'win32serviceutil.StartService', 'win32serviceutil.StartService', (['ServiceName'], {}), '(ServiceName)\n', (507, 520), False, 'import win32serviceutil\n'), ((622, 670), 'win32serviceutil.QueryServiceStatus', 'win32serviceutil.QueryServiceStatus', (['ServiceName'], {}), '(ServiceName)\n', (657, 670), False, 'import win32serviceutil\n'), ((852, 900), 'win32serviceutil.QueryServiceStatus', 'win32serviceutil.QueryServiceStatus', (['ServiceName'], {}), '(ServiceName)\n', (887, 900), False, 'import win32serviceutil\n'), ((1180, 1221), 'win32serviceutil.StopService', 'win32serviceutil.StopService', (['ServiceName'], {}), '(ServiceName)\n', (1208, 1221), False, 'import win32serviceutil\n'), ((2345, 2358), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (2355, 2358), False, 'import time\n'), ((1692, 1740), 'win32serviceutil.QueryServiceStatus', 'win32serviceutil.QueryServiceStatus', (['ServiceName'], {}), '(ServiceName)\n', (1727, 1740), False, 'import win32serviceutil\n'), ((1802, 1817), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (1812, 1817), False, 'import time\n'), ((1953, 2001), 'win32serviceutil.QueryServiceStatus', 'win32serviceutil.QueryServiceStatus', (['ServiceName'], {}), '(ServiceName)\n', (1988, 2001), False, 'import win32serviceutil\n'), ((2485, 2533), 'win32serviceutil.QueryServiceStatus', 'win32serviceutil.QueryServiceStatus', (['ServiceName'], {}), '(ServiceName)\n', (2520, 2533), False, 'import win32serviceutil\n'), ((2122, 2163), 'win32serviceutil.StopService', 'win32serviceutil.StopService', (['ServiceName'], {}), '(ServiceName)\n', (2150, 2163), False, 'import win32serviceutil\n'), ((2168, 2183), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (2178, 2183), False, 'import time\n'), ((2604, 2619), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (2614, 2619), False, 'import time\n'), ((2237, 2252), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (2247, 2252), False, 'import time\n'), ((2277, 2292), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (2287, 2292), False, 'import time\n')]
|
#!/usr/bin/python3
if __name__=="__main__":
# executable in ubuntu: https://stackoverflow.com/a/64641595/2351696
import models
from forms import home
models.create_tables_if_not_exist()
root=home.Tk()
root['bg']='black'
root.resizable(0,0)
frmmenu=home.FormMenu(root)
root.mainloop()
|
[
"models.create_tables_if_not_exist",
"forms.home.Tk",
"forms.home.FormMenu"
] |
[((159, 194), 'models.create_tables_if_not_exist', 'models.create_tables_if_not_exist', ([], {}), '()\n', (192, 194), False, 'import models\n'), ((201, 210), 'forms.home.Tk', 'home.Tk', ([], {}), '()\n', (208, 210), False, 'from forms import home\n'), ((261, 280), 'forms.home.FormMenu', 'home.FormMenu', (['root'], {}), '(root)\n', (274, 280), False, 'from forms import home\n')]
|
# -*- coding: utf-8 -*-
'''
Manage Data Pipelines
.. versionadded:: 2016.3.0
Be aware that this interacts with Amazon's services, and so may incur charges.
This module uses ``boto3``, which can be installed via package, or pip.
This module accepts explicit AWS credentials but can also utilize
IAM roles assigned to the instance through Instance Profiles. Dynamic
credentials are then automatically obtained from AWS API and no further
configuration is necessary. More information available `here
<http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html>`_.
If IAM roles are not used you need to specify them either in a pillar file or
in the minion's config file:
.. code-block:: yaml
datapipeline.keyid: <KEY>
datapipeline.key: <KEY>
It's also possible to specify ``key``, ``keyid`` and ``region`` via a profile,
either passed in as a dict, or as a string to pull from pillars or minion
config:
.. code-block:: yaml
myprofile:
keyid: <KEY>
key: <KEY>
region: us-east-1
.. code-block:: yaml
Ensure daily data pipeline exists:
boto_datapipeline.present:
- name: my-datapipeline
- pipeline_objects:
DefaultSchedule:
name: Every 1 day
fields:
period: 1 Day
type: Schedule
startAt: FIRST_ACTIVATION_DATE_TIME
- parameter_values:
myDDBTableName: my-dynamo-table
'''
# Import Python libs
from __future__ import absolute_import
import copy
import datetime
import difflib
# Import Salt lobs
import salt.utils.json
from salt.ext import six
from salt.ext.six.moves import zip
def __virtual__():
'''
Only load if boto is available.
'''
return 'boto_datapipeline' if 'boto_datapipeline.create_pipeline' in __salt__ else False
def present(name, pipeline_objects=None,
pipeline_objects_from_pillars='boto_datapipeline_pipeline_objects',
parameter_objects=None,
parameter_objects_from_pillars='boto_datapipeline_parameter_objects',
parameter_values=None,
parameter_values_from_pillars='boto_datapipeline_parameter_values',
region=None,
key=None, keyid=None, profile=None):
'''
Ensure the data pipeline exists with matching definition.
name
Name of the service to ensure a data pipeline exists for.
pipeline_objects
Pipeline objects to use. Will override objects read from pillars.
pipeline_objects_from_pillars
The pillar key to use for lookup.
parameter_objects
Parameter objects to use. Will override objects read from pillars.
parameter_objects_from_pillars
The pillar key to use for lookup.
parameter_values
Parameter values to use. Will override values read from pillars.
parameter_values_from_pillars
The pillar key to use for lookup.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string)
that contains a dict with region, key and keyid.
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
pipeline_objects = pipeline_objects or {}
parameter_objects = parameter_objects or {}
parameter_values = parameter_values or {}
present, old_pipeline_definition = _pipeline_present_with_definition(
name,
_pipeline_objects(pipeline_objects_from_pillars, pipeline_objects),
_parameter_objects(parameter_objects_from_pillars, parameter_objects),
_parameter_values(parameter_values_from_pillars, parameter_values),
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if present:
ret['comment'] = 'AWS data pipeline {0} present'.format(name)
return ret
if __opts__['test']:
ret['comment'] = 'Data pipeline {0} is set to be created or updated'.format(name)
ret['result'] = None
return ret
result_create_pipeline = __salt__['boto_datapipeline.create_pipeline'](
name,
name,
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if 'error' in result_create_pipeline:
ret['result'] = False
ret['comment'] = 'Failed to create data pipeline {0}: {1}'.format(
name, result_create_pipeline['error'])
return ret
pipeline_id = result_create_pipeline['result']
result_pipeline_definition = __salt__['boto_datapipeline.put_pipeline_definition'](
pipeline_id,
_pipeline_objects(pipeline_objects_from_pillars, pipeline_objects),
parameter_objects=_parameter_objects(parameter_objects_from_pillars, parameter_objects),
parameter_values=_parameter_values(parameter_values_from_pillars, parameter_values),
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if 'error' in result_pipeline_definition:
if _immutable_fields_error(result_pipeline_definition):
# If update not possible, delete and retry
result_delete_pipeline = __salt__['boto_datapipeline.delete_pipeline'](
pipeline_id,
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if 'error' in result_delete_pipeline:
ret['result'] = False
ret['comment'] = 'Failed to delete data pipeline {0}: {1}'.format(
pipeline_id, result_delete_pipeline['error'])
return ret
result_create_pipeline = __salt__['boto_datapipeline.create_pipeline'](
name,
name,
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if 'error' in result_create_pipeline:
ret['result'] = False
ret['comment'] = 'Failed to create data pipeline {0}: {1}'.format(
name, result_create_pipeline['error'])
return ret
pipeline_id = result_create_pipeline['result']
result_pipeline_definition = __salt__['boto_datapipeline.put_pipeline_definition'](
pipeline_id,
_pipeline_objects(pipeline_objects_from_pillars, pipeline_objects),
parameter_objects=_parameter_objects(parameter_objects_from_pillars, parameter_objects),
parameter_values=_parameter_values(parameter_values_from_pillars, parameter_values),
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if 'error' in result_pipeline_definition:
# Still erroring after possible retry
ret['result'] = False
ret['comment'] = 'Failed to create data pipeline {0}: {1}'.format(
name, result_pipeline_definition['error'])
return ret
result_activate_pipeline = __salt__['boto_datapipeline.activate_pipeline'](
pipeline_id,
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if 'error' in result_activate_pipeline:
ret['result'] = False
ret['comment'] = 'Failed to create data pipeline {0}: {1}'.format(
name, result_pipeline_definition['error'])
return ret
pipeline_definition_result = __salt__['boto_datapipeline.get_pipeline_definition'](
pipeline_id,
version='active',
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if 'error' in pipeline_definition_result:
new_pipeline_definition = {}
else:
new_pipeline_definition = _standardize(pipeline_definition_result['result'])
if not old_pipeline_definition:
ret['changes']['new'] = 'Pipeline created.'
ret['comment'] = 'Data pipeline {0} created'.format(name)
else:
ret['changes']['diff'] = _diff(old_pipeline_definition, new_pipeline_definition)
ret['comment'] = 'Data pipeline {0} updated'.format(name)
return ret
def _immutable_fields_error(result_pipeline_definition):
'''Return true if update pipeline failed due to immutable fields
Some fields cannot be changed after a pipeline has been activated.
http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-manage-pipeline-modify-console.html#dp-edit-pipeline-limits
'''
for e in result_pipeline_definition['error']:
for e2 in e["errors"]:
if "can not be changed" in e2:
return True
return False
def _pipeline_present_with_definition(name, expected_pipeline_objects,
expected_parameter_objects,
expected_parameter_values, region, key,
keyid, profile):
'''
Return true if the pipeline exists and the definition matches.
name
The name of the pipeline.
expected_pipeline_objects
Pipeline objects that must match the definition.
expected_parameter_objects
Parameter objects that must match the definition.
expected_parameter_values
Parameter values that must match the definition.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string)
that contains a dict with region, key and keyid.
'''
result_pipeline_id = __salt__['boto_datapipeline.pipeline_id_from_name'](
name,
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if 'error' in result_pipeline_id:
return False, {}
pipeline_id = result_pipeline_id['result']
pipeline_definition_result = __salt__['boto_datapipeline.get_pipeline_definition'](
pipeline_id,
version='active',
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if 'error' in pipeline_definition_result:
return False, {}
pipeline_definition = _standardize(pipeline_definition_result['result'])
pipeline_objects = pipeline_definition.get('pipelineObjects')
parameter_objects = pipeline_definition.get('parameterObjects')
parameter_values = pipeline_definition.get('parameterValues')
present = (_recursive_compare(_cleaned(pipeline_objects), _cleaned(expected_pipeline_objects)) and
_recursive_compare(parameter_objects, expected_parameter_objects) and
_recursive_compare(parameter_values, expected_parameter_values))
return present, pipeline_definition
def _cleaned(_pipeline_objects):
"""Return standardized pipeline objects to be used for comparing
Remove year, month, and day components of the startDateTime so that data
pipelines with the same time of day but different days are considered
equal.
"""
pipeline_objects = copy.deepcopy(_pipeline_objects)
for pipeline_object in pipeline_objects:
if pipeline_object['id'] == 'DefaultSchedule':
for field_object in pipeline_object['fields']:
if field_object['key'] == 'startDateTime':
start_date_time_string = field_object['stringValue']
start_date_time = datetime.datetime.strptime(start_date_time_string,
"%Y-%m-%dT%H:%M:%S")
field_object['stringValue'] = start_date_time.strftime("%H:%M:%S")
return pipeline_objects
def _recursive_compare(v1, v2):
'''
Return v1 == v2. Compares list, dict, recursively.
'''
if isinstance(v1, list):
if v2 is None:
v2 = []
if len(v1) != len(v2):
return False
v1.sort(key=_id_or_key)
v2.sort(key=_id_or_key)
for x, y in zip(v1, v2):
if not _recursive_compare(x, y):
return False
return True
elif isinstance(v1, dict):
if v2 is None:
v2 = {}
v1 = dict(v1)
v2 = dict(v2)
if sorted(v1) != sorted(v2):
return False
for k in v1:
if not _recursive_compare(v1[k], v2[k]):
return False
return True
else:
return v1 == v2
def _id_or_key(list_item):
'''
Return the value at key 'id' or 'key'.
'''
if isinstance(list_item, dict):
if 'id' in list_item:
return list_item['id']
if 'key' in list_item:
return list_item['key']
return list_item
def _diff(old_pipeline_definition, new_pipeline_definition):
'''
Return string diff of pipeline definitions.
'''
old_pipeline_definition.pop('ResponseMetadata', None)
new_pipeline_definition.pop('ResponseMetadata', None)
diff = difflib.unified_diff(
salt.utils.json.dumps(old_pipeline_definition, indent=4).splitlines(True),
salt.utils.json.dumps(new_pipeline_definition, indent=4).splitlines(True),
)
return str('').join(diff) # future lint: disable=blacklisted-function
def _standardize(structure):
'''
Return standardized format for lists/dictionaries.
Lists of dictionaries are sorted by the value of the dictionary at
its primary key ('id' or 'key'). OrderedDict's are converted to
basic dictionaries.
'''
def mutating_helper(structure):
if isinstance(structure, list):
structure.sort(key=_id_or_key)
for each in structure:
mutating_helper(each)
elif isinstance(structure, dict):
structure = dict(structure)
for k, v in six.iteritems(structure):
mutating_helper(k)
mutating_helper(v)
new_structure = copy.deepcopy(structure)
mutating_helper(new_structure)
return new_structure
def _pipeline_objects(pipeline_objects_from_pillars, pipeline_object_overrides):
'''
Return a list of pipeline objects that compose the pipeline
pipeline_objects_from_pillars
The pillar key to use for lookup
pipeline_object_overrides
Pipeline objects to use. Will override objects read from pillars.
'''
from_pillars = copy.deepcopy(__salt__['pillar.get'](pipeline_objects_from_pillars))
from_pillars.update(pipeline_object_overrides)
pipeline_objects = _standardize(_dict_to_list_ids(from_pillars))
for pipeline_object in pipeline_objects:
pipeline_object['fields'] = _properties_from_dict(pipeline_object['fields'])
return pipeline_objects
def _parameter_objects(parameter_objects_from_pillars, parameter_object_overrides):
'''
Return a list of parameter objects that configure the pipeline
parameter_objects_from_pillars
The pillar key to use for lookup
parameter_object_overrides
Parameter objects to use. Will override objects read from pillars.
'''
from_pillars = copy.deepcopy(__salt__['pillar.get'](parameter_objects_from_pillars))
from_pillars.update(parameter_object_overrides)
parameter_objects = _standardize(_dict_to_list_ids(from_pillars))
for parameter_object in parameter_objects:
parameter_object['attributes'] = _properties_from_dict(parameter_object['attributes'])
return parameter_objects
def _parameter_values(parameter_values_from_pillars, parameter_value_overrides):
'''
Return a dictionary of parameter values that configure the pipeline
parameter_values_from_pillars
The pillar key to use for lookup
parameter_value_overrides
Parameter values to use. Will override values read from pillars.
'''
from_pillars = copy.deepcopy(__salt__['pillar.get'](parameter_values_from_pillars))
from_pillars.update(parameter_value_overrides)
parameter_values = _standardize(from_pillars)
return _properties_from_dict(parameter_values, key_name='id')
def _dict_to_list_ids(objects):
'''
Convert a dictionary to a list of dictionaries, where each element has
a key value pair {'id': key}. This makes it easy to override pillar values
while still satisfying the boto api.
'''
list_with_ids = []
for key, value in six.iteritems(objects):
element = {'id': key}
element.update(value)
list_with_ids.append(element)
return list_with_ids
def _properties_from_dict(d, key_name='key'):
'''
Transforms dictionary into pipeline object properties.
The output format conforms to boto's specification.
Example input:
{
'a': '1',
'b': {
'ref': '2'
},
}
Example output:
[
{
'key': 'a',
'stringValue': '1',
},
{
'key': 'b',
'refValue': '2',
},
]
'''
fields = []
for key, value in six.iteritems(d):
if isinstance(value, dict):
fields.append({
key_name: key,
'refValue': value['ref'],
})
else:
fields.append({
key_name: key,
'stringValue': value,
})
return fields
def absent(name, region=None, key=None, keyid=None, profile=None):
'''
Ensure a pipeline with the service_name does not exist
name
Name of the service to ensure a data pipeline does not exist for.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string)
that contains a dict with region, key and keyid.
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
result_pipeline_id = __salt__['boto_datapipeline.pipeline_id_from_name'](
name,
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if 'error' not in result_pipeline_id:
pipeline_id = result_pipeline_id['result']
if __opts__['test']:
ret['comment'] = 'Data pipeline {0} set to be deleted.'.format(name)
ret['result'] = None
return ret
else:
__salt__['boto_datapipeline.delete_pipeline'](
pipeline_id,
region=region,
key=key,
keyid=keyid,
profile=profile,
)
ret['changes']['old'] = {'pipeline_id': pipeline_id}
ret['changes']['new'] = None
else:
ret['comment'] = 'AWS data pipeline {0} absent.'.format(name)
return ret
|
[
"salt.ext.six.moves.zip",
"copy.deepcopy",
"datetime.datetime.strptime",
"salt.ext.six.iteritems"
] |
[((11166, 11198), 'copy.deepcopy', 'copy.deepcopy', (['_pipeline_objects'], {}), '(_pipeline_objects)\n', (11179, 11198), False, 'import copy\n'), ((14023, 14047), 'copy.deepcopy', 'copy.deepcopy', (['structure'], {}), '(structure)\n', (14036, 14047), False, 'import copy\n'), ((16449, 16471), 'salt.ext.six.iteritems', 'six.iteritems', (['objects'], {}), '(objects)\n', (16462, 16471), False, 'from salt.ext import six\n'), ((17161, 17177), 'salt.ext.six.iteritems', 'six.iteritems', (['d'], {}), '(d)\n', (17174, 17177), False, 'from salt.ext import six\n'), ((12097, 12108), 'salt.ext.six.moves.zip', 'zip', (['v1', 'v2'], {}), '(v1, v2)\n', (12100, 12108), False, 'from salt.ext.six.moves import zip\n'), ((13906, 13930), 'salt.ext.six.iteritems', 'six.iteritems', (['structure'], {}), '(structure)\n', (13919, 13930), False, 'from salt.ext import six\n'), ((11528, 11599), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['start_date_time_string', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(start_date_time_string, '%Y-%m-%dT%H:%M:%S')\n", (11554, 11599), False, 'import datetime\n')]
|
import numpy as np
from numpy.core.function_base import linspace
EPS = 1e-9
class CameraSensor:
"""
Defines the camera sensor properties
"""
def __init__(self, image_size, pitch, RGB=False, name=None, bayer_pattern=None):
self.img_size = np.array([image_size[0], image_size[1]]) # image height, width (in pixels)
self.img_cntr = np.array([int(image_size[0]/2), int(image_size[1]/2)])
if RGB:
self.type = 'RGB'
self.C = 3 # number of channels
if bayer_pattern is not None:
self.bayer_pattern = bayer_pattern
else:
self.bayer_pattern = 'RGGB'
else:
self.type = 'Mono'
self.C = 1
if len(pitch)==1:
self.px_size = np.array([pitch, pitch]) # should be in meters
else:
self.px_size = np.array([pitch[0], pitch[1]]) # should be in meters
self.name = name # name of camera sensor (optional)
# create coordinate system for image plane
dh, dw = self.px_size[0], self.px_size[1]
h , w = dh*self.img_size[0], dw*self.img_size[1]
self.x_sensor = np.linspace( -w/2 + dw/2 , +w/2 - dw/2 + EPS, self.img_size[1])
self.y_sensor = np.linspace( -h/2 + dh/2 , +h/2 - dh/2 + EPS, self.img_size[0])
self.X_sensor, self.Y_sensor = np.meshgrid(self.x_sensor, self.y_sensor)
def get_physical_sensor_size(self):
"""
Returns the physical sensor size (in units of mm x mm)
"""
height_mm = np.float(self.px_size[0]*self.img_size[0])*1000
width_mm = np.float(self.px_size[1]*self.img_size[1])*1000
return height_mm, width_mm
class Lens:
def __init__(self, f, D=None):
self.f = f
self.D = D # D is set to None if its value is irrelevant
class Mask:
"""
Class for creating an amplitude/phase mask.
"""
def __init__(self, mask_pattern, mask_size):
"""
mask_pattern (numpy.ndarray): 2D array of values (real or complex)
mask_size (list or numpy.array): Physical size of mask (h x w). Units of meters
mask_pattern array values should have magnitude should be between [0, 1] for realistic mask patterns.
"""
self.mask = mask_pattern # mask pattern can be a complex-valued as well (numpy 2D array)
self.mask_size = np.array([mask_size[0], mask_size[1]])
self.mask_pitch = np.array([mask_size[0]/mask_pattern.shape[0], mask_size[1]/mask_pattern.shape[1]])
# create coordinate system on mask-plane
h, w = self.mask_size[0], self.mask_size[1]
dh, dw = self.mask_pitch[0], self.mask_pitch[1]
self.x_mask = np.linspace( -w/2 + dw/2 , +w/2 - dw/2 + EPS, num=self.mask.shape[1])
self.y_mask = np.linspace( -h/2 + dh/2 , +h/2 - dh/2 + EPS, num=self.mask.shape[0])
self.X_mask, self.Y_mask = np.meshgrid(self.x_mask, self.y_mask)
|
[
"numpy.meshgrid",
"numpy.array",
"numpy.float",
"numpy.linspace"
] |
[((264, 304), 'numpy.array', 'np.array', (['[image_size[0], image_size[1]]'], {}), '([image_size[0], image_size[1]])\n', (272, 304), True, 'import numpy as np\n'), ((1217, 1286), 'numpy.linspace', 'np.linspace', (['(-w / 2 + dw / 2)', '(+w / 2 - dw / 2 + EPS)', 'self.img_size[1]'], {}), '(-w / 2 + dw / 2, +w / 2 - dw / 2 + EPS, self.img_size[1])\n', (1228, 1286), True, 'import numpy as np\n'), ((1305, 1374), 'numpy.linspace', 'np.linspace', (['(-h / 2 + dh / 2)', '(+h / 2 - dh / 2 + EPS)', 'self.img_size[0]'], {}), '(-h / 2 + dh / 2, +h / 2 - dh / 2 + EPS, self.img_size[0])\n', (1316, 1374), True, 'import numpy as np\n'), ((1408, 1449), 'numpy.meshgrid', 'np.meshgrid', (['self.x_sensor', 'self.y_sensor'], {}), '(self.x_sensor, self.y_sensor)\n', (1419, 1449), True, 'import numpy as np\n'), ((2438, 2476), 'numpy.array', 'np.array', (['[mask_size[0], mask_size[1]]'], {}), '([mask_size[0], mask_size[1]])\n', (2446, 2476), True, 'import numpy as np\n'), ((2503, 2594), 'numpy.array', 'np.array', (['[mask_size[0] / mask_pattern.shape[0], mask_size[1] / mask_pattern.shape[1]]'], {}), '([mask_size[0] / mask_pattern.shape[0], mask_size[1] / mask_pattern\n .shape[1]])\n', (2511, 2594), True, 'import numpy as np\n'), ((2765, 2840), 'numpy.linspace', 'np.linspace', (['(-w / 2 + dw / 2)', '(+w / 2 - dw / 2 + EPS)'], {'num': 'self.mask.shape[1]'}), '(-w / 2 + dw / 2, +w / 2 - dw / 2 + EPS, num=self.mask.shape[1])\n', (2776, 2840), True, 'import numpy as np\n'), ((2857, 2932), 'numpy.linspace', 'np.linspace', (['(-h / 2 + dh / 2)', '(+h / 2 - dh / 2 + EPS)'], {'num': 'self.mask.shape[0]'}), '(-h / 2 + dh / 2, +h / 2 - dh / 2 + EPS, num=self.mask.shape[0])\n', (2868, 2932), True, 'import numpy as np\n'), ((2962, 2999), 'numpy.meshgrid', 'np.meshgrid', (['self.x_mask', 'self.y_mask'], {}), '(self.x_mask, self.y_mask)\n', (2973, 2999), True, 'import numpy as np\n'), ((788, 812), 'numpy.array', 'np.array', (['[pitch, pitch]'], {}), '([pitch, pitch])\n', (796, 812), True, 'import numpy as np\n'), ((884, 914), 'numpy.array', 'np.array', (['[pitch[0], pitch[1]]'], {}), '([pitch[0], pitch[1]])\n', (892, 914), True, 'import numpy as np\n'), ((1602, 1646), 'numpy.float', 'np.float', (['(self.px_size[0] * self.img_size[0])'], {}), '(self.px_size[0] * self.img_size[0])\n', (1610, 1646), True, 'import numpy as np\n'), ((1669, 1713), 'numpy.float', 'np.float', (['(self.px_size[1] * self.img_size[1])'], {}), '(self.px_size[1] * self.img_size[1])\n', (1677, 1713), True, 'import numpy as np\n')]
|
###############################################################################
# WordTokenizer
from nimbusml import Pipeline, FileDataStream
from nimbusml.datasets import get_dataset
from nimbusml.preprocessing.text import WordTokenizer
# data input (as a FileDataStream)
path = get_dataset("wiki_detox_train").as_filepath()
data = FileDataStream.read_csv(path, sep='\t')
print(data.head())
# Sentiment SentimentText
# 0 1 ==RUDE== Dude, you are rude upload that carl p...
# 1 1 == OK! == IM GOING TO VANDALIZE WILD ONES WIK...
# 2 1 Stop trolling, zapatancas, calling me a liar m...
# 3 1 ==You're cool== You seem like a really cool g...
# 4 1 ::::: Why are you threatening me? I'm not bein...
tokenize = WordTokenizer(char_array_term_separators=[" "]) << {'wt': 'SentimentText'}
pipeline = Pipeline([tokenize])
tokenize.fit(data)
y = tokenize.transform(data)
print(y.drop(labels='SentimentText', axis=1).head())
# Sentiment wt.000 wt.001 wt.002 wt.003 wt.004 wt.005 ... wt.366 wt.367 wt.368 wt.369 wt.370 wt.371 wt.372
# 0 1 ==RUDE== Dude, you are rude upload ... None None None None None None None
# 1 1 == OK! == IM GOING TO ... None None None None None None None
# 2 1 Stop trolling, zapatancas, calling me a ... None None None None None None None
# 3 1 ==You're cool== You seem like a ... None None None None None None None
# 4 1 ::::: Why are you threatening me? ... None None None None None None None
|
[
"nimbusml.FileDataStream.read_csv",
"nimbusml.Pipeline",
"nimbusml.datasets.get_dataset",
"nimbusml.preprocessing.text.WordTokenizer"
] |
[((337, 376), 'nimbusml.FileDataStream.read_csv', 'FileDataStream.read_csv', (['path'], {'sep': '"""\t"""'}), "(path, sep='\\t')\n", (360, 376), False, 'from nimbusml import Pipeline, FileDataStream\n'), ((889, 909), 'nimbusml.Pipeline', 'Pipeline', (['[tokenize]'], {}), '([tokenize])\n', (897, 909), False, 'from nimbusml import Pipeline, FileDataStream\n'), ((803, 850), 'nimbusml.preprocessing.text.WordTokenizer', 'WordTokenizer', ([], {'char_array_term_separators': "[' ']"}), "(char_array_term_separators=[' '])\n", (816, 850), False, 'from nimbusml.preprocessing.text import WordTokenizer\n'), ((283, 314), 'nimbusml.datasets.get_dataset', 'get_dataset', (['"""wiki_detox_train"""'], {}), "('wiki_detox_train')\n", (294, 314), False, 'from nimbusml.datasets import get_dataset\n')]
|
import argparse
import sys
import json
from zsearch_definitions import protocols
from ztag.stream import Stream, Incoming, Outgoing, InputFile, OutputFile
from ztag.transform import Transform, Decoder, Encoder
from ztag.decoders import JSONDecoder
from ztag.encoders import JSONEncoder
from ztag.annotation import Annotation
from ztag.annotator import Annotator, AnnotationTesting
from ztag.transformer import ZMapTransformer
from ztag.log import Logger
from ztag.classargs import subclass_of
from datetime import datetime
def non_negative(s):
x = int(s)
if x < 0:
raise argparse.ArgumentTypeError
return x
def uint16(s):
x = int(s)
if x < 0 or x > 65535:
raise argparse.ArgumentTypeError
return x
def zsearch_protocol(s):
try:
result = protocols.Protocol.from_pretty_name(s)
return result
except KeyError as e:
raise argparse.ArgumentTypeError(e)
def zsearch_subprotocol(s):
try:
return protocols.Subprotocol.from_pretty_name(s)
except KeyError as e:
raise argparse.ArgumentTypeError(e)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port', type=uint16,
help="Target port")
parser.add_argument('-P', '--protocol',
type=zsearch_protocol)
parser.add_argument('-S', '--subprotocol',
type=zsearch_subprotocol)
parser.add_argument('-T', '--destination', default="full_ipv4",
type=str, choices=["full_ipv4", "alexa_top1mil"])
parser.add_argument('-s', '--scan-id', required=False, type=non_negative)
# parser.add_argument('-t', '--tags', type=tag_class)
parser.add_argument('-I', '--incoming', type=subclass_of(Incoming),
default=None)
parser.add_argument('-D', '--decoder', type=subclass_of(Decoder),
default=JSONDecoder)
parser.add_argument('-X', '--transform', type=subclass_of(Transform),
default=None)
parser.add_argument('-E', '--encoder', type=subclass_of(Encoder),
default=JSONEncoder)
parser.add_argument('-O', '--outgoing', type=subclass_of(Outgoing),
default=OutputFile)
parser.add_argument('-i', '--input-file', default=sys.stdin,
type=argparse.FileType('r'))
parser.add_argument('-l', '--log-file', type=argparse.FileType('w'),
default=sys.stderr)
parser.add_argument('--updates-file', default=sys.stderr,
type=argparse.FileType('w'))
parser.add_argument('-v', '--log-level', type=int, default=Logger.INFO,
choices=range(0, Logger.TRACE + 1))
parser.add_argument('-m', '--metadata-file', type=argparse.FileType('w'),
default=sys.stderr)
parser.add_argument('--strip-domain-prefix', type=str, default=None)
parser.add_argument('-d', '--debug', action='store_true')
parser.add_argument('-t', '--tests', action='store_true')
parser.add_argument('--safe-import', action='store_true')
parser.add_argument('--no-safe-tests', action='store_true')
args = parser.parse_args()
Annotation.load_annotations(args.safe_import)
if args.tests:
sys.exit(AnnotationTesting().run(args.no_safe_tests))
if not args.port:
sys.stderr.write("ERROR: port (-p/--port) required\n")
sys.exit(1)
if not args.protocol:
proto_string = ", ".join(protocols.Protocol._by_pretty_name.keys())
sys.stderr.write("ERROR: protocol (-P/--protocol) required\n")
sys.stderr.write("Registered Protocols: %s\n" % proto_string)
sys.exit(1)
if not args.subprotocol:
subproto_string = ", ".join(
protocols.Subprotocol._by_pretty_name.keys())
sys.stderr.write("ERROR: subprotocol (-S/--subprotocol) required\n")
sys.stderr.write("Registered SubProtocols: %s\n" % subproto_string)
sys.exit(1)
metadata = dict()
port = args.port
protocol = args.protocol
subprotocol = args.subprotocol
scan_id = args.scan_id or 0
transform_kwargs = dict()
transform_args = list()
logger = Logger(args.log_file, log_level=args.log_level)
if args.strip_domain_prefix:
if not args.strip_domain_prefix.endswith("."):
args.strip_domain_prefix += "."
logger.info("stripping prefix %s" % args.strip_domain_prefix)
transform_kwargs['strip_domain_prefix'] = args.strip_domain_prefix
if args.transform is not None:
transform = args.transform(port, protocol, subprotocol, scan_id,
*transform_args, **transform_kwargs)
else:
transform = ZMapTransformer.find_transform(port, protocol, subprotocol,
scan_id, *transform_args, **transform_kwargs)
if args.incoming is not None:
incoming = args.incoming(input_file=args.input_file)
elif transform.incoming is not None:
incoming = transform.incoming(input_file=args.input_file)
else:
incoming = InputFile(input_file=args.input_file)
if args.decoder is not None:
decoder = args.decoder(logger=logger)
elif transform.decoder is not None:
decoder = transform.decoder(logger=logger)
else:
decoder = JSONDecoder(logger=logger)
encoder = args.encoder(port, protocol, subprotocol, scan_id)
outgoing = args.outgoing(output_file=sys.stdout, logger=logger,
destination=args.destination)
tagger = Annotator(port, protocol, subprotocol,
debug=args.debug, logger=logger)
num_tags = len(tagger.eligible_tags)
logger.info("found %d tags" % num_tags)
metadata['eligible_tags'] = num_tags
transforms = [
decoder,
transform,
tagger,
encoder,
]
s = Stream(incoming, outgoing, transforms=transforms, logger=logger, updates=args.updates_file)
start_time = datetime.utcnow()
handled, skipped = s.run()
end_time = datetime.utcnow()
duration = end_time - start_time
logger.info("handled %d records" % handled)
logger.info("skipped %d records" % skipped)
metadata['records_handled'] = handled
metadata['records_skipped'] = skipped
metadata['start_time'] = Logger.rfc_time_from_utc(start_time)
metadata['end_time'] = Logger.rfc_time_from_utc(end_time)
metadata['duration'] = int(duration.total_seconds())
args.metadata_file.write(json.dumps(metadata))
args.metadata_file.write("\n")
args.metadata_file.flush()
if __name__ == "__main__":
main()
|
[
"zsearch_definitions.protocols.Subprotocol._by_pretty_name.keys",
"argparse.ArgumentParser",
"ztag.annotator.Annotator",
"json.dumps",
"datetime.datetime.utcnow",
"ztag.annotator.AnnotationTesting",
"zsearch_definitions.protocols.Protocol._by_pretty_name.keys",
"argparse.ArgumentTypeError",
"zsearch_definitions.protocols.Subprotocol.from_pretty_name",
"ztag.stream.Stream",
"ztag.stream.InputFile",
"zsearch_definitions.protocols.Protocol.from_pretty_name",
"argparse.FileType",
"ztag.transformer.ZMapTransformer.find_transform",
"ztag.annotation.Annotation.load_annotations",
"ztag.decoders.JSONDecoder",
"ztag.log.Logger",
"sys.exit",
"ztag.log.Logger.rfc_time_from_utc",
"ztag.classargs.subclass_of",
"sys.stderr.write"
] |
[((1122, 1147), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1145, 1147), False, 'import argparse\n'), ((3244, 3289), 'ztag.annotation.Annotation.load_annotations', 'Annotation.load_annotations', (['args.safe_import'], {}), '(args.safe_import)\n', (3271, 3289), False, 'from ztag.annotation import Annotation\n'), ((4251, 4298), 'ztag.log.Logger', 'Logger', (['args.log_file'], {'log_level': 'args.log_level'}), '(args.log_file, log_level=args.log_level)\n', (4257, 4298), False, 'from ztag.log import Logger\n'), ((5647, 5718), 'ztag.annotator.Annotator', 'Annotator', (['port', 'protocol', 'subprotocol'], {'debug': 'args.debug', 'logger': 'logger'}), '(port, protocol, subprotocol, debug=args.debug, logger=logger)\n', (5656, 5718), False, 'from ztag.annotator import Annotator, AnnotationTesting\n'), ((5971, 6067), 'ztag.stream.Stream', 'Stream', (['incoming', 'outgoing'], {'transforms': 'transforms', 'logger': 'logger', 'updates': 'args.updates_file'}), '(incoming, outgoing, transforms=transforms, logger=logger, updates=\n args.updates_file)\n', (5977, 6067), False, 'from ztag.stream import Stream, Incoming, Outgoing, InputFile, OutputFile\n'), ((6080, 6097), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (6095, 6097), False, 'from datetime import datetime\n'), ((6146, 6163), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (6161, 6163), False, 'from datetime import datetime\n'), ((6412, 6448), 'ztag.log.Logger.rfc_time_from_utc', 'Logger.rfc_time_from_utc', (['start_time'], {}), '(start_time)\n', (6436, 6448), False, 'from ztag.log import Logger\n'), ((6476, 6510), 'ztag.log.Logger.rfc_time_from_utc', 'Logger.rfc_time_from_utc', (['end_time'], {}), '(end_time)\n', (6500, 6510), False, 'from ztag.log import Logger\n'), ((798, 836), 'zsearch_definitions.protocols.Protocol.from_pretty_name', 'protocols.Protocol.from_pretty_name', (['s'], {}), '(s)\n', (833, 836), False, 'from zsearch_definitions import protocols\n'), ((983, 1024), 'zsearch_definitions.protocols.Subprotocol.from_pretty_name', 'protocols.Subprotocol.from_pretty_name', (['s'], {}), '(s)\n', (1021, 1024), False, 'from zsearch_definitions import protocols\n'), ((3403, 3457), 'sys.stderr.write', 'sys.stderr.write', (['"""ERROR: port (-p/--port) required\n"""'], {}), "('ERROR: port (-p/--port) required\\n')\n", (3419, 3457), False, 'import sys\n'), ((3466, 3477), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3474, 3477), False, 'import sys\n'), ((3588, 3650), 'sys.stderr.write', 'sys.stderr.write', (['"""ERROR: protocol (-P/--protocol) required\n"""'], {}), "('ERROR: protocol (-P/--protocol) required\\n')\n", (3604, 3650), False, 'import sys\n'), ((3659, 3720), 'sys.stderr.write', 'sys.stderr.write', (["('Registered Protocols: %s\\n' % proto_string)"], {}), "('Registered Protocols: %s\\n' % proto_string)\n", (3675, 3720), False, 'import sys\n'), ((3729, 3740), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3737, 3740), False, 'import sys\n'), ((3873, 3941), 'sys.stderr.write', 'sys.stderr.write', (['"""ERROR: subprotocol (-S/--subprotocol) required\n"""'], {}), "('ERROR: subprotocol (-S/--subprotocol) required\\n')\n", (3889, 3941), False, 'import sys\n'), ((3950, 4017), 'sys.stderr.write', 'sys.stderr.write', (["('Registered SubProtocols: %s\\n' % subproto_string)"], {}), "('Registered SubProtocols: %s\\n' % subproto_string)\n", (3966, 4017), False, 'import sys\n'), ((4026, 4037), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4034, 4037), False, 'import sys\n'), ((4788, 4898), 'ztag.transformer.ZMapTransformer.find_transform', 'ZMapTransformer.find_transform', (['port', 'protocol', 'subprotocol', 'scan_id', '*transform_args'], {}), '(port, protocol, subprotocol, scan_id, *\n transform_args, **transform_kwargs)\n', (4818, 4898), False, 'from ztag.transformer import ZMapTransformer\n'), ((6598, 6618), 'json.dumps', 'json.dumps', (['metadata'], {}), '(metadata)\n', (6608, 6618), False, 'import json\n'), ((899, 928), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['e'], {}), '(e)\n', (925, 928), False, 'import argparse\n'), ((1065, 1094), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['e'], {}), '(e)\n', (1091, 1094), False, 'import argparse\n'), ((1758, 1779), 'ztag.classargs.subclass_of', 'subclass_of', (['Incoming'], {}), '(Incoming)\n', (1769, 1779), False, 'from ztag.classargs import subclass_of\n'), ((1867, 1887), 'ztag.classargs.subclass_of', 'subclass_of', (['Decoder'], {}), '(Decoder)\n', (1878, 1887), False, 'from ztag.classargs import subclass_of\n'), ((1984, 2006), 'ztag.classargs.subclass_of', 'subclass_of', (['Transform'], {}), '(Transform)\n', (1995, 2006), False, 'from ztag.classargs import subclass_of\n'), ((2094, 2114), 'ztag.classargs.subclass_of', 'subclass_of', (['Encoder'], {}), '(Encoder)\n', (2105, 2114), False, 'from ztag.classargs import subclass_of\n'), ((2210, 2231), 'ztag.classargs.subclass_of', 'subclass_of', (['Outgoing'], {}), '(Outgoing)\n', (2221, 2231), False, 'from ztag.classargs import subclass_of\n'), ((2371, 2393), 'argparse.FileType', 'argparse.FileType', (['"""r"""'], {}), "('r')\n", (2388, 2393), False, 'import argparse\n'), ((2444, 2466), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {}), "('w')\n", (2461, 2466), False, 'import argparse\n'), ((2603, 2625), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {}), "('w')\n", (2620, 2625), False, 'import argparse\n'), ((2817, 2839), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {}), "('w')\n", (2834, 2839), False, 'import argparse\n'), ((3537, 3578), 'zsearch_definitions.protocols.Protocol._by_pretty_name.keys', 'protocols.Protocol._by_pretty_name.keys', ([], {}), '()\n', (3576, 3578), False, 'from zsearch_definitions import protocols\n'), ((3819, 3863), 'zsearch_definitions.protocols.Subprotocol._by_pretty_name.keys', 'protocols.Subprotocol._by_pretty_name.keys', ([], {}), '()\n', (3861, 3863), False, 'from zsearch_definitions import protocols\n'), ((5176, 5213), 'ztag.stream.InputFile', 'InputFile', ([], {'input_file': 'args.input_file'}), '(input_file=args.input_file)\n', (5185, 5213), False, 'from ztag.stream import Stream, Incoming, Outgoing, InputFile, OutputFile\n'), ((5413, 5439), 'ztag.decoders.JSONDecoder', 'JSONDecoder', ([], {'logger': 'logger'}), '(logger=logger)\n', (5424, 5439), False, 'from ztag.decoders import JSONDecoder\n'), ((3327, 3346), 'ztag.annotator.AnnotationTesting', 'AnnotationTesting', ([], {}), '()\n', (3344, 3346), False, 'from ztag.annotator import Annotator, AnnotationTesting\n')]
|
import os
import sys
import argparse
import logging
# import project modules
# Add base project path (two directories up)
currPath = os.path.dirname(os.path.realpath(__file__))
rootPath = os.path.dirname(os.path.dirname(currPath))
sys.path.append(rootPath)
from rtCommon.utils import loadConfigFile, installLoggers
from rtCommon.structDict import StructDict
from rtCommon.projectInterface import Web
defaultConfig = os.path.join(currPath, 'conf/amygActivation.toml')
expScript = os.path.join(currPath, 'amygActivation.py')
initScript = os.path.join(currPath, 'initialize.py')
finalizeScript = os.path.join(currPath, 'finalize.py')
if __name__ == "__main__":
installLoggers(logging.INFO, logging.INFO, filename=os.path.join(currPath, 'logs/webServer.log'))
argParser = argparse.ArgumentParser()
argParser.add_argument('--filesremote', '-x', default=False, action='store_true',
help='dicom files retrieved from remote server')
argParser.add_argument('--config', '-c', default=defaultConfig, type=str,
help='experiment file (.json or .toml)')
args = argParser.parse_args()
# HERE: Set the path to the fMRI Python script to run here
params = StructDict({'fmriPyScript': expScript,
'initScript': initScript,
'finalizeScript': finalizeScript,
'filesremote': args.filesremote,
'port': 8888,
})
cfg = loadConfigFile(args.config)
web = Web()
web.start(params, cfg)
|
[
"sys.path.append",
"argparse.ArgumentParser",
"os.path.realpath",
"os.path.dirname",
"rtCommon.projectInterface.Web",
"rtCommon.structDict.StructDict",
"os.path.join",
"rtCommon.utils.loadConfigFile"
] |
[((231, 256), 'sys.path.append', 'sys.path.append', (['rootPath'], {}), '(rootPath)\n', (246, 256), False, 'import sys\n'), ((417, 467), 'os.path.join', 'os.path.join', (['currPath', '"""conf/amygActivation.toml"""'], {}), "(currPath, 'conf/amygActivation.toml')\n", (429, 467), False, 'import os\n'), ((480, 523), 'os.path.join', 'os.path.join', (['currPath', '"""amygActivation.py"""'], {}), "(currPath, 'amygActivation.py')\n", (492, 523), False, 'import os\n'), ((537, 576), 'os.path.join', 'os.path.join', (['currPath', '"""initialize.py"""'], {}), "(currPath, 'initialize.py')\n", (549, 576), False, 'import os\n'), ((594, 631), 'os.path.join', 'os.path.join', (['currPath', '"""finalize.py"""'], {}), "(currPath, 'finalize.py')\n", (606, 631), False, 'import os\n'), ((149, 175), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (165, 175), False, 'import os\n'), ((204, 229), 'os.path.dirname', 'os.path.dirname', (['currPath'], {}), '(currPath)\n', (219, 229), False, 'import os\n'), ((779, 804), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (802, 804), False, 'import argparse\n'), ((1223, 1377), 'rtCommon.structDict.StructDict', 'StructDict', (["{'fmriPyScript': expScript, 'initScript': initScript, 'finalizeScript':\n finalizeScript, 'filesremote': args.filesremote, 'port': 8888}"], {}), "({'fmriPyScript': expScript, 'initScript': initScript,\n 'finalizeScript': finalizeScript, 'filesremote': args.filesremote,\n 'port': 8888})\n", (1233, 1377), False, 'from rtCommon.structDict import StructDict\n'), ((1509, 1536), 'rtCommon.utils.loadConfigFile', 'loadConfigFile', (['args.config'], {}), '(args.config)\n', (1523, 1536), False, 'from rtCommon.utils import loadConfigFile, installLoggers\n'), ((1548, 1553), 'rtCommon.projectInterface.Web', 'Web', ([], {}), '()\n', (1551, 1553), False, 'from rtCommon.projectInterface import Web\n'), ((716, 760), 'os.path.join', 'os.path.join', (['currPath', '"""logs/webServer.log"""'], {}), "(currPath, 'logs/webServer.log')\n", (728, 760), False, 'import os\n')]
|
from cosalib.cmdlib import run_verbose
def remove_azure_image(image, resource_group, auth, profile):
print(f"Azure: removing image {image}")
try:
run_verbose(['ore', 'azure',
'--azure-auth', auth,
'--azure-profile', profile,
'delete-image-arm',
'--image-name', image,
'--resource-group', resource_group])
except SystemExit:
raise Exception("Failed to remove image")
|
[
"cosalib.cmdlib.run_verbose"
] |
[((164, 330), 'cosalib.cmdlib.run_verbose', 'run_verbose', (["['ore', 'azure', '--azure-auth', auth, '--azure-profile', profile,\n 'delete-image-arm', '--image-name', image, '--resource-group',\n resource_group]"], {}), "(['ore', 'azure', '--azure-auth', auth, '--azure-profile',\n profile, 'delete-image-arm', '--image-name', image, '--resource-group',\n resource_group])\n", (175, 330), False, 'from cosalib.cmdlib import run_verbose\n')]
|
#!/usr/bin/env python
"""
Segmentation methods to find regions of interest in the time and frequency domain.
"""
#
# Authors: <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
#
# License: New BSD License
# =============================================================================
# Load the modules
# =============================================================================
# Import external modules
import numpy as np
from scipy.stats import iqr
from skimage import measure
import pandas as pd
import sys
_MIN_ = sys.float_info.min
# Import internal modules
from maad.util import (plot2d, rand_cmap)
#%%
#****************************************************************************
# private functions
#****************************************************************************
def _double_threshold_rel (im, bin_std=6, bin_per=0.5,
verbose=False, display=False, savefig=None, **kwargs):
"""
Binarize an image based on a double relative threshold.
The values used for the thresholding depends on the values found in the
image. => relative threshold
Parameters
----------
im : 2d ndarray of scalars
Spectrogram (or image)
bin_std : scalar, optional, default is 6
Set the first threshold. This threshold is not an absolute value but
depends on values that are similar to 75th percentile (pseudo_mean) and
a sort of std value of the image.
threshold1 = "pseudo_mean" + "std" * bin_std
Value higher than threshold1 are set to 1, they are the seeds for
the second step. The others are set to 0.
bin_per: scalar, optional, defautl is 0.5
Set how much the second threshold is lower than the first
threshold value. From 0 to 1. ex: 0.1 = 10 %.
threshold2 = threshold1 (1-bin_per)
Value higher than threshold2 and connected (directly or not) to the
seeds are set to 1, the other remains 0
verbose : boolean, optional, default is False
print messages
display : boolean, optional, default is False
Display the signal if True
savefig : string, optional, default is None
Root filename (with full path) is required to save the figures. Postfix
is added to the root filename.
\*\*kwargs, optional. This parameter is used by plt.plot and savefig functions
- savefilename : str, optional, default :'_spectro_after_noise_subtraction.png'
Postfix of the figure filename
- figsize : tuple of integers, optional, default: (4,10)
width, height in inches.
- title : string, optional, default : 'Spectrogram'
title of the figure
- xlabel : string, optional, default : 'Time [s]'
label of the horizontal axis
- ylabel : string, optional, default : 'Amplitude [AU]'
label of the vertical axis
- cmap : string or Colormap object, optional, default is 'gray'
See https://matplotlib.org/examples/color/colormaps_reference.html
in order to get all the existing colormaps
examples: 'hsv', 'hot', 'bone', 'tab20c', 'jet', 'seismic',
'viridis'...
- vmin, vmax : scalar, optional, default: None
`vmin` and `vmax` are used in conjunction with norm to normalize
luminance data. Note if you pass a `norm` instance, your
settings for `vmin` and `vmax` will be ignored.
- extent : scalars (left, right, bottom, top), optional, default: None
The location, in data-coordinates, of the lower-left and
upper-right corners. If `None`, the image is positioned such that
the pixel centers fall on zero-based (row, column) indices.
- dpi : integer, optional, default is 96
Dot per inch.
For printed version, choose high dpi (i.e. dpi=300) => slow
For screen version, choose low dpi (i.e. dpi=96) => fast
- format : string, optional, default is 'png'
Format to save the figure
... and more, see matplotlib
Returns
-------
im_out: binary image
References
----------
.. [1] from MATLAB: Threshold estimation (from Oliveira et al, 2015)
Adapted by <NAME> Dec 12, 2017
"""
# test if im is full of zeros
if not im.any() :
im_out = np.zeros(im.shape)
else:
# Compute the qth percentile of the data along the specified axis
val1 = np.percentile(im[np.where(im>0)],75) # value corresponding to the limit between the 75% lowest value and 25% largest value
# The interquartile range (IQR) is the difference between the 75th and
# 25th percentile of the data. It is a measure of the dispersion similar
# to standard deviation or variance, but is much more robust against outliers
val2 = iqr(im[np.where(im>0)])*bin_std
# Threshold : qth percentile + sort of std
h_th = val1 + val2
# Low threshold limit
l_th = (h_th-h_th*bin_per)
if verbose :
print(72 * '_')
print('Double thresholding with values relative to the image...')
print ('**********************************************************')
print (' high threshold value %.2f | low threshold value %.2f' % (h_th, l_th))
print ('**********************************************************')
# binarisation
im_t1 = im > h_th # mask1
im_t2 = im > l_th # mask2
im_t3 = im * im_t1 # selected parts of the image
#find index of regions which meet the criteria
conncomp_t2 = measure.label(im_t2) #Find connected components in binary image
rprops = measure.regionprops(conncomp_t2,im_t3)
rprops_mean_intensity = [region['mean_intensity'] for region in rprops]
rprops_mean_intensity = np.asarray(rprops_mean_intensity)
rprops_label = [region['label'] for region in rprops]
rprops_label = np.asarray(rprops_label)
[ind]=np.where(rprops_mean_intensity>0)
im_out = np.isin(conncomp_t2, rprops_label[ind]) # test if the indice is in the matrix of indices
im_out =im_out*1 # boolean to 0,1 conversion
# Display
if display :
ylabel =kwargs.pop('ylabel','Frequency [Hz]')
xlabel =kwargs.pop('xlabel','Time [sec]')
title =kwargs.pop('title','binary image => MASK')
cmap =kwargs.pop('cmap','gray')
vmin=kwargs.pop('vmin',0)
vmax=kwargs.pop('vmax',1)
extent=kwargs.pop('extent',None)
if extent is None :
xlabel = 'pseudotime [points]'
ylabel = 'pseudofrequency [points]'
_, fig = plot2d (im_out,
extent = extent,
title = title,
ylabel = ylabel,
xlabel = xlabel,
vmin = vmin,
vmax = vmax,
cmap = cmap,
**kwargs)
# SAVE FIGURE
if savefig is not None :
dpi =kwargs.pop('dpi',96)
format=kwargs.pop('format','png')
filename=kwargs.pop('filename','_spectro_binary')
filename = savefig+filename+'.'+format
if verbose :
print('\n''save figure : %s' %filename)
fig.savefig(filename, bbox_inches='tight', dpi=dpi, format=format,
**kwargs)
return im_out
#%%
def _double_threshold_abs(im, bin_h=0.7, bin_l=0.2,
verbose=False,display=False, savefig=None, **kwargs):
"""
Binarize an image based on a double relative threshold.
The values used for the thresholding are independent of the values in the
image => absolute threshold
Parameters
----------
im : 2d ndarray of scalars
Spectrogram (or image)
bin_h : scalar, optional, default is 0.7
Set the first threshold. Value higher than this value are set to 1,
the others are set to 0. They are the seeds for the second step
bin_l: scalar, optional, defautl is 0.2
Set the second threshold. Value higher than this value and connected
to the seeds or to other pixels connected to the seeds are set to 1,
the other remains 0
verbose : boolean, optional, default is False
print messages
display : boolean, optional, default is False
Display the signal if True
savefig : string, optional, default is None
Root filename (with full path) is required to save the figures. Postfix
is added to the root filename.
\*\*kwargs, optional. This parameter is used by plt.plot and savefig functions
- savefilename : str, optional, default :'_spectro_after_noise_subtraction.png'
Postfix of the figure filename
- figsize : tuple of integers, optional, default: (4,10)
width, height in inches.
- title : string, optional, default : 'Spectrogram'
title of the figure
- xlabel : string, optional, default : 'Time [s]'
label of the horizontal axis
- ylabel : string, optional, default : 'Amplitude [AU]'
label of the vertical axis
- cmap : string or Colormap object, optional, default is 'gray'
See https://matplotlib.org/examples/color/colormaps_reference.html
in order to get all the existing colormaps
examples: 'hsv', 'hot', 'bone', 'tab20c', 'jet', 'seismic',
'viridis'...
- vmin, vmax : scalar, optional, default: None
`vmin` and `vmax` are used in conjunction with norm to normalize
luminance data. Note if you pass a `norm` instance, your
settings for `vmin` and `vmax` will be ignored.
- extent : scalars (left, right, bottom, top), optional, default: None
The location, in data-coordinates, of the lower-left and
upper-right corners. If `None`, the image is positioned such that
the pixel centers fall on zero-based (row, column) indices.
- dpi : integer, optional, default is 96
Dot per inch.
For printed version, choose high dpi (i.e. dpi=300) => slow
For screen version, choose low dpi (i.e. dpi=96) => fast
- format : string, optional, default is 'png'
Format to save the figure
... and more, see matplotlib
Returns
-------
im_out: binary image
References
----------
.. [1] <NAME>. A computational approach to edge detection. IEEE Transactions on Pattern Analysis and Machine Intelligence. 1986; vol. 8, pp.679-698. `DOI: 10.1109/TPAMI.1986.4767851 <https://doi.org/10.1109/TPAMI.1986.4767851>`_
"""
# binarisation
im_t1 = im > bin_h # mask1
im_t2 = im > bin_l # mask2
im_t3 = im * im_t1 # selected parts of the image
#find index of regions which meet the criteria
conncomp_t2 = measure.label(im_t2) #Find connected components in binary image
rprops = measure.regionprops(conncomp_t2,im_t3)
rprops_mean_intensity = [region['mean_intensity'] for region in rprops]
rprops_mean_intensity = np.asarray(rprops_mean_intensity)
rprops_label = [region['label'] for region in rprops]
rprops_label = np.asarray(rprops_label)
[ind]=np.where(rprops_mean_intensity>0)
im_out = np.isin(conncomp_t2, rprops_label[ind]) # test if the indice is in the maxtrix of indices
im_out =im_out*1 # boolean to 0,1 conversion
if verbose :
print(72 * '_')
print('Double thresholding with absolute values...')
print ('**********************************************************')
print (' Number of rois %.2f | Rois cover %.2f%' % (len(rprops_label),
sum(im_out)/(im_out.shape[1]*im_out.shape[0])*100))
print ('**********************************************************')
# Display
if display :
ylabel =kwargs.pop('ylabel','Frequency [Hz]')
xlabel =kwargs.pop('xlabel','Time [sec]')
title =kwargs.pop('title','binary image => MASK')
cmap =kwargs.pop('cmap','gray')
vmin=kwargs.pop('vmin',0)
vmax=kwargs.pop('vmax',1)
extent=kwargs.pop('extent',None)
if extent is None :
xlabel = 'pseudotime [points]'
ylabel = 'pseudofrequency [points]'
_, fig = plot2d (im_out,
extent = extent,
title = title,
ylabel = ylabel,
xlabel = xlabel,
vmin = vmin,
vmax = vmax,
cmap = cmap,
**kwargs)
# SAVE FIGURE
if savefig is not None :
dpi =kwargs.pop('dpi',96)
format=kwargs.pop('format','png')
filename=kwargs.pop('filename','_spectro_binary')
filename = savefig+filename+'.'+format
if verbose :
print('\n''save figure : %s' %filename)
fig.savefig(filename, bbox_inches='tight', dpi=dpi, format=format,
**kwargs)
return im_out
#%%
# =============================================================================
# public functions
# =============================================================================
def create_mask(im, mode_bin = 'relative',
verbose= False, display = False, savefig = None, **kwargs):
"""
Binarize an image based on a double threshold.
Parameters
----------
im : 2d ndarray of scalars
Spectrogram (or image)
mode_bin : string in {'relative', 'absolute'}, optional, default is 'relative'
if 'absolute' [1]_ , a double threshold with absolute value is performed
with two parameters (see \*\*kwargs section)
if 'relative' [2]_, a relative double threshold is performed with two
parameters (see \*\*kwargs section)
verbose : boolean, optional, default is False
print messages
display : boolean, optional, default is False
Display the signal if True
savefig : string, optional, default is None
Root filename (with full path) is required to save the figures. Postfix
is added to the root filename.
\*\*kwargs, optional. This parameter is used by the maad functions as well
as the plt.plot and savefig functions.
All the input arguments required or optional in the signature of the
functions above can be passed as kwargs :
if 'absolute' [1]_
- bin_h : scalar, optional, default is 0.7
Set the first threshold. Value higher than this value are set to 1,
the others are set to 0. They are the seeds for the second step
- bin_l: scalar, optional, defautl is 0.2
Set the second threshold. Value higher than this value and connected
to the seeds or to other pixels connected to the seeds (6-connectivity)
are set to 1, the other remains 0
if 'relative' [2]_ :
- bin_std : scalar, optional, default is 6
bin_std is needed to compute the threshold1.
This threshold is not an absolute value but depends on values that are
similar to 75th percentile (pseudo_mean) and a sort of std value of
the image.
threshold1 = "pseudo_mean" + "std" * bin_std
Value higher than threshold1 are set to 1, they are the seeds for
the second step. The others are set to 0.
- bin_per: scalar, optional, defautl is 0.5
Set how much the second threshold is lower than the first
threshold value. From 0 to 1. ex: 0.1 = 10 %.
threshold2 = threshold1 (1-bin_per)
Value higher than threshold2 and connected (6-connectivity) to the
seeds are set to 1, the other remains 0
... and more, see matplotlib
Returns
-------
im_bin: binary image
References
----------
.. [1] <NAME>. A computational approach to edge detection. IEEE Transactions on Pattern Analysis and Machine Intelligence. 1986; vol. 8, pp.679-698. `DOI: 10.1109/TPAMI.1986.4767851 <https://doi.org/10.1109/TPAMI.1986.4767851>`_
.. [2] from MATLAB: Threshold estimation (Oliveira et al, 2015)
Examples
--------
Load audio recording and convert it into spectrogram
>>> s, fs = maad.sound.load('../data/cold_forest_daylight.wav')
>>> Sxx,tn,fn,ext = maad.sound.spectrogram (s, fs, fcrop=(0,10000))
Convert linear spectrogram into dB
>>> Sxx_dB = maad.util.power2dB(Sxx) +96
Smooth the spectrogram
>>> Sxx_dB_blurred = maad.rois.smooth(Sxx_dB)
Detection of the acoustic signature => creation of a mask
>>> im_bin = maad.rois.create_mask(Sxx_dB_blurred, bin_std=1.5, bin_per=0.25, mode='relative')
Plot spectrograms
>>> import matplotlib.pyplot as plt
>>> fig, (ax1, ax2) = plt.subplots(2, 1)
>>> maad.util.plot2d(Sxx_dB, ax=ax1, extent=ext, title='original', vmin=10, vmax=70)
>>> maad.util.plot2d(im_bin, ax=ax2, extent=ext, title='mask)')
>>> fig.set_size_inches(13,8)
>>> fig.tight_layout()
"""
if mode_bin == 'relative':
bin_std=kwargs.pop('bin_std', 6)
bin_per=kwargs.pop('bin_per', 0.5)
im_bin = _double_threshold_rel(im, bin_std, bin_per,
verbose, display, savefig, **kwargs)
elif mode_bin == 'absolute':
bin_h=kwargs.pop('bin_h', 0.7)
bin_l=kwargs.pop('bin_l', 0.3)
im_bin = _double_threshold_abs(im, bin_h, bin_l,
verbose, display, savefig, **kwargs)
return im_bin
#%%
def select_rois(im_bin, min_roi=None ,max_roi=None,
verbose=False, display=False, savefig = None, **kwargs):
"""
Select regions of interest based on its dimensions.
The input is a binary mask, and the output is an image with labelled pixels.
Parameters
----------
im : 2d ndarray of scalars
Spectrogram (or image)
min_roi, max_roi : scalars, optional, default : None
Define the minimum and the maximum area possible for an ROI. If None,
the minimum ROI area is 1 pixel and the maximum ROI area is the area of
the image
verbose : boolean, optional, default is False
print messages
display : boolean, optional, default is False
Display the signal if True
savefig : string, optional, default is None
Root filename (with full path) is required to save the figures. Postfix
is added to the root filename.
\*\*kwargs, optional. This parameter is used by plt.plot and savefig functions
- savefilename : str, optional, default :'_spectro_after_noise_subtraction.png'
Postfix of the figure filename
- figsize : tuple of integers, optional, default: (4,10)
width, height in inches.
- title : string, optional, default : 'Spectrogram'
title of the figure
- xlabel : string, optional, default : 'Time [s]'
label of the horizontal axis
- ylabel : string, optional, default : 'Amplitude [AU]'
label of the vertical axis
- cmap : string or Colormap object, optional, default is 'gray'
See https://matplotlib.org/examples/color/colormaps_reference.html
in order to get all the existing colormaps
examples: 'hsv', 'hot', 'bone', 'tab20c', 'jet', 'seismic',
'viridis'...
- vmin, vmax : scalar, optional, default: None
`vmin` and `vmax` are used in conjunction with norm to normalize
luminance data. Note if you pass a `norm` instance, your
settings for `vmin` and `vmax` will be ignored.
- extent : scalars (left, right, bottom, top), optional, default: None
The location, in data-coordinates, of the lower-left and
upper-right corners. If `None`, the image is positioned such that
the pixel centers fall on zero-based (row, column) indices.
- dpi : integer, optional, default is 96
Dot per inch.
For printed version, choose high dpi (i.e. dpi=300) => slow
For screen version, choose low dpi (i.e. dpi=96) => fast
- format : string, optional, default is 'png'
Format to save the figure
... and more, see matplotlib
Returns
-------
im_rois: 2d ndarray
image with labels as values
rois: pandas DataFrame
Regions of interest with future descriptors will be computed.
Array have column names: ``labelID``, ``label``, ``min_y``, ``min_x``,
``max_y``, ``max_x``,
Use the function ``maad.util.format_features`` before using
centroid_features to format of the ``rois`` DataFrame
correctly.
Examples
--------
Load audio recording compute the spectrogram in dB.
>>> s, fs = maad.sound.load('../data/cold_forest_daylight.wav')
>>> Sxx,tn,fn,ext = maad.sound.spectrogram (s, fs, fcrop=(0,20000), display=True)
>>> Sxx_dB = maad.util.power2dB(Sxx) +96
Smooth the spectrogram
>>> Sxx_dB_blurred = maad.sound.smooth(Sxx_dB)
Using image binarization, detect isolated region in the time-frequency domain with high density of energy, i.e. regions of interest (ROIs).
>>> im_bin = maad.rois.create_mask(Sxx_dB_blurred, bin_std=1.5, bin_per=0.5, mode='relative')
Select ROIs from the binary mask.
>>> im_rois, df_rois = maad.rois.select_rois(im_bin, display=True)
We detected the background noise as a ROI, and that multiple ROIs are mixed in a single region. To have better results, it is adviced to preprocess the spectrogram to remove the background noise before creating the mask.
>>> Sxx_noNoise = maad.sound.median_equalizer(Sxx)
>>> Sxx_noNoise_dB = maad.util.power2dB(Sxx_noNoise)
>>> Sxx_noNoise_dB_blurred = maad.sound.smooth(Sxx_noNoise_dB)
>>> im_bin2 = maad.rois.create_mask(Sxx_noNoise_dB_blurred, bin_std=6, bin_per=0.5, mode='relative')
>>> im_rois2, df_rois2 = maad.rois.select_rois(im_bin2, display=True)
"""
# test if max_roi and min_roi are defined
if max_roi is None:
# the maximum ROI is set to the aera of the image
max_roi=im_bin.shape[0]*im_bin.shape[1]
if min_roi is None:
# the min ROI area is set to 1 pixel
min_roi = 1
if verbose :
print(72 * '_')
print('Automatic ROIs selection in progress...')
print ('**********************************************************')
print (' Min ROI area %d pix² | Max ROI area %d pix²' % (min_roi, max_roi))
print ('**********************************************************')
labels = measure.label(im_bin) #Find connected components in binary image
rprops = measure.regionprops(labels)
rois_bbox = []
rois_label = []
for roi in rprops:
# select the rois depending on their size
if (roi.area >= min_roi) & (roi.area <= max_roi):
# get the label
rois_label.append(roi.label)
# get rectangle coordonates
rois_bbox.append (roi.bbox)
im_rois = np.isin(labels, rois_label) # test if the indice is in the matrix of indices
im_rois = im_rois* labels
# create a list with labelID and labelName (None in this case)
rois_label = list(zip(rois_label,['unknown']*len(rois_label)))
# test if there is a roi
if len(rois_label)>0 :
# create a dataframe rois containing the coordonates and the label
rois = np.concatenate((np.asarray(rois_label), np.asarray(rois_bbox)), axis=1)
rois = pd.DataFrame(rois, columns = ['labelID', 'label', 'min_y','min_x','max_y', 'max_x'])
# force type to integer
rois = rois.astype({'label': str,'min_y':int,'min_x':int,'max_y':int, 'max_x':int})
# compensate half-open interval of bbox from skimage
rois.max_y -= 1
rois.max_x -= 1
else :
rois = []
rois = pd.DataFrame(rois, columns = ['labelID', 'label', 'min_y','min_x','max_y', 'max_x'])
rois = rois.astype({'label': str,'min_y':int,'min_x':int,'max_y':int, 'max_x':int})
# Display
if display :
ylabel =kwargs.pop('ylabel','Frequency [Hz]')
xlabel =kwargs.pop('xlabel','Time [sec]')
title =kwargs.pop('title','Selected ROIs')
extent=kwargs.pop('extent',None)
if extent is None :
xlabel = 'pseudotime [points]'
ylabel = 'pseudofrequency [points]'
# randcmap = rand_cmap(len(rois_label))
# cmap =kwargs.pop('cmap',randcmap)
cmap =kwargs.pop('cmap','tab20')
_, fig = plot2d (im_rois,
extent = extent,
title = title,
ylabel = ylabel,
xlabel = xlabel,
cmap = cmap,
**kwargs)
# SAVE FIGURE
if savefig is not None :
dpi =kwargs.pop('dpi',96)
format=kwargs.pop('format','png')
filename=kwargs.pop('filename','_spectro_selectrois')
filename = savefig+filename+'.'+format
fig.savefig(filename, bbox_inches='tight', dpi=dpi, format=format,
**kwargs)
return im_rois, rois
#%%
def rois_to_imblobs(im_zeros, rois):
"""
Take a matrix full of zeros and add ones in delimited regions defined by rois.
Parameters
----------
im_zeros : ndarray
matrix full of zeros with the size to the image where the rois come from.
rois : DataFrame
rois must have the columns names:((min_y, min_x, max_y, max_x) which
correspond to the bounding box coordinates
Returns
-------
im_blobs : ndarray
matrix with 1 corresponding to the rois and 0 elsewhere
Examples
--------
>>> from maad import rois, util
>>> import pandas as pd
>>> import numpy as np
>>> im_zeros = np.zeros((100,300))
>>> df_rois = pd.DataFrame({'min_y': [10, 40], 'min_x': [10, 200], 'max_y': [60, 80], 'max_x': [110, 250]})
>>> im_blobs = rois.rois_to_imblobs(im_zeros, df_rois)
>>> util.plot2d(im_blobs)
"""
# Check format of the input data
if type(rois) is not pd.core.frame.DataFrame :
raise TypeError('Rois must be of type pandas DataFrame')
if not(('min_y' and 'min_x' and 'max_y' and 'max_x') in rois) :
raise TypeError('Array must be a Pandas DataFrame with column names:((min_y, min_x, max_y, max_x). Check example in documentation.')
# select the columns
rois_bbox = rois[['min_y', 'min_x', 'max_y', 'max_x']]
# roi to image blob
for min_y, min_x, max_y, max_x in rois_bbox.values:
im_zeros[int(min_y):int(max_y+1), int(min_x):int(max_x+1)] = 1
im_blobs = im_zeros.astype(int)
return im_blobs
|
[
"pandas.DataFrame",
"numpy.isin",
"numpy.asarray",
"numpy.zeros",
"maad.util.plot2d",
"skimage.measure.label",
"numpy.where",
"skimage.measure.regionprops"
] |
[((12272, 12292), 'skimage.measure.label', 'measure.label', (['im_t2'], {}), '(im_t2)\n', (12285, 12292), False, 'from skimage import measure\n'), ((12354, 12393), 'skimage.measure.regionprops', 'measure.regionprops', (['conncomp_t2', 'im_t3'], {}), '(conncomp_t2, im_t3)\n', (12373, 12393), False, 'from skimage import measure\n'), ((12511, 12544), 'numpy.asarray', 'np.asarray', (['rprops_mean_intensity'], {}), '(rprops_mean_intensity)\n', (12521, 12544), True, 'import numpy as np\n'), ((12634, 12658), 'numpy.asarray', 'np.asarray', (['rprops_label'], {}), '(rprops_label)\n', (12644, 12658), True, 'import numpy as np\n'), ((12682, 12717), 'numpy.where', 'np.where', (['(rprops_mean_intensity > 0)'], {}), '(rprops_mean_intensity > 0)\n', (12690, 12717), True, 'import numpy as np\n'), ((12738, 12777), 'numpy.isin', 'np.isin', (['conncomp_t2', 'rprops_label[ind]'], {}), '(conncomp_t2, rprops_label[ind])\n', (12745, 12777), True, 'import numpy as np\n'), ((25235, 25256), 'skimage.measure.label', 'measure.label', (['im_bin'], {}), '(im_bin)\n', (25248, 25256), False, 'from skimage import measure\n'), ((25318, 25345), 'skimage.measure.regionprops', 'measure.regionprops', (['labels'], {}), '(labels)\n', (25337, 25345), False, 'from skimage import measure\n'), ((25759, 25786), 'numpy.isin', 'np.isin', (['labels', 'rois_label'], {}), '(labels, rois_label)\n', (25766, 25786), True, 'import numpy as np\n'), ((4785, 4803), 'numpy.zeros', 'np.zeros', (['im.shape'], {}), '(im.shape)\n', (4793, 4803), True, 'import numpy as np\n'), ((6188, 6208), 'skimage.measure.label', 'measure.label', (['im_t2'], {}), '(im_t2)\n', (6201, 6208), False, 'from skimage import measure\n'), ((6274, 6313), 'skimage.measure.regionprops', 'measure.regionprops', (['conncomp_t2', 'im_t3'], {}), '(conncomp_t2, im_t3)\n', (6293, 6313), False, 'from skimage import measure\n'), ((6443, 6476), 'numpy.asarray', 'np.asarray', (['rprops_mean_intensity'], {}), '(rprops_mean_intensity)\n', (6453, 6476), True, 'import numpy as np\n'), ((6578, 6602), 'numpy.asarray', 'np.asarray', (['rprops_label'], {}), '(rprops_label)\n', (6588, 6602), True, 'import numpy as np\n'), ((6634, 6669), 'numpy.where', 'np.where', (['(rprops_mean_intensity > 0)'], {}), '(rprops_mean_intensity > 0)\n', (6642, 6669), True, 'import numpy as np\n'), ((6698, 6737), 'numpy.isin', 'np.isin', (['conncomp_t2', 'rprops_label[ind]'], {}), '(conncomp_t2, rprops_label[ind])\n', (6705, 6737), True, 'import numpy as np\n'), ((13912, 14031), 'maad.util.plot2d', 'plot2d', (['im_out'], {'extent': 'extent', 'title': 'title', 'ylabel': 'ylabel', 'xlabel': 'xlabel', 'vmin': 'vmin', 'vmax': 'vmax', 'cmap': 'cmap'}), '(im_out, extent=extent, title=title, ylabel=ylabel, xlabel=xlabel,\n vmin=vmin, vmax=vmax, cmap=cmap, **kwargs)\n', (13918, 14031), False, 'from maad.util import plot2d, rand_cmap\n'), ((26263, 26351), 'pandas.DataFrame', 'pd.DataFrame', (['rois'], {'columns': "['labelID', 'label', 'min_y', 'min_x', 'max_y', 'max_x']"}), "(rois, columns=['labelID', 'label', 'min_y', 'min_x', 'max_y',\n 'max_x'])\n", (26275, 26351), True, 'import pandas as pd\n'), ((26653, 26741), 'pandas.DataFrame', 'pd.DataFrame', (['rois'], {'columns': "['labelID', 'label', 'min_y', 'min_x', 'max_y', 'max_x']"}), "(rois, columns=['labelID', 'label', 'min_y', 'min_x', 'max_y',\n 'max_x'])\n", (26665, 26741), True, 'import pandas as pd\n'), ((27407, 27505), 'maad.util.plot2d', 'plot2d', (['im_rois'], {'extent': 'extent', 'title': 'title', 'ylabel': 'ylabel', 'xlabel': 'xlabel', 'cmap': 'cmap'}), '(im_rois, extent=extent, title=title, ylabel=ylabel, xlabel=xlabel,\n cmap=cmap, **kwargs)\n', (27413, 27505), False, 'from maad.util import plot2d, rand_cmap\n'), ((7466, 7585), 'maad.util.plot2d', 'plot2d', (['im_out'], {'extent': 'extent', 'title': 'title', 'ylabel': 'ylabel', 'xlabel': 'xlabel', 'vmin': 'vmin', 'vmax': 'vmax', 'cmap': 'cmap'}), '(im_out, extent=extent, title=title, ylabel=ylabel, xlabel=xlabel,\n vmin=vmin, vmax=vmax, cmap=cmap, **kwargs)\n', (7472, 7585), False, 'from maad.util import plot2d, rand_cmap\n'), ((4933, 4949), 'numpy.where', 'np.where', (['(im > 0)'], {}), '(im > 0)\n', (4941, 4949), True, 'import numpy as np\n'), ((26190, 26212), 'numpy.asarray', 'np.asarray', (['rois_label'], {}), '(rois_label)\n', (26200, 26212), True, 'import numpy as np\n'), ((26214, 26235), 'numpy.asarray', 'np.asarray', (['rois_bbox'], {}), '(rois_bbox)\n', (26224, 26235), True, 'import numpy as np\n'), ((5330, 5346), 'numpy.where', 'np.where', (['(im > 0)'], {}), '(im > 0)\n', (5338, 5346), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.utils.timezone import now
class AccountAlertMiddleware(object):
def process_request(self, request):
if getattr(request, 'user', None) and request.user.is_authenticated():
alerts = request.user.accountalert_set.filter(
force=True,
executed_at=None,
due_on__lt=now()).exclude(url=request.path)
if alerts.count() > 0:
return HttpResponseRedirect(alerts[0].url)
|
[
"django.utils.timezone.now",
"django.http.HttpResponseRedirect"
] |
[((507, 542), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['alerts[0].url'], {}), '(alerts[0].url)\n', (527, 542), False, 'from django.http import HttpResponseRedirect\n'), ((415, 420), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (418, 420), False, 'from django.utils.timezone import now\n')]
|
# Generated by Django 2.2.10 on 2020-05-18 10:48
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0008_alter_field_classification_on_event'),
('core', '0031_delete_item'),
]
operations = [
migrations.DeleteModel(
name='Instance',
),
]
|
[
"django.db.migrations.DeleteModel"
] |
[((282, 321), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Instance"""'}), "(name='Instance')\n", (304, 321), False, 'from django.db import migrations\n')]
|
from channels import Group
def ws_add(message):
print('Connection Recieved')
message.reply_channel.send({'accept':True})
Group('chat').add(message.reply_channel)
def ws_message(message):
print('Message received:{}'.format(message.content['text']))
Group('chat').send({'text': message.content['text']})
def ws_disconnect(message):
print('Connection closed')
Group('chat').discard(message.reply_channel)
|
[
"channels.Group"
] |
[((125, 138), 'channels.Group', 'Group', (['"""chat"""'], {}), "('chat')\n", (130, 138), False, 'from channels import Group\n'), ((255, 268), 'channels.Group', 'Group', (['"""chat"""'], {}), "('chat')\n", (260, 268), False, 'from channels import Group\n'), ((367, 380), 'channels.Group', 'Group', (['"""chat"""'], {}), "('chat')\n", (372, 380), False, 'from channels import Group\n')]
|
# Copyright (c) 2010-2011 Lazy 8 Studios, LLC.
# All rights reserved.
from front.lib import db, utils
from front.tools import dump_user_routes, replay_game
from front.tests import base
from front.tests.base import points
class TestDumpUserRoutes(base.TestCase):
def setUp(self):
super(TestDumpUserRoutes, self).setUp()
self.create_user('<EMAIL>', 'pw')
def test_dump_user_routes(self):
user = self.get_logged_in_user()
chip_result = self.create_target_and_move(**points.FIRST_MOVE)
target_one = self.last_chip_value_for_path(['user', 'rovers', '*', 'targets', '*'], chip_result)
start_delay = utils.in_seconds(hours=4)
arrival_delta = utils.in_seconds(hours=10)
self.advance_now(seconds=start_delay)
chip_result = self.create_target(arrival_delta=arrival_delta, **points.SECOND_MOVE)
target_two = self.last_chip_value_for_path(['user', 'rovers', '*', 'targets', '*'], chip_result)
with db.commit_or_rollback(self.get_ctx()) as ctx:
with db.conn(ctx) as ctx:
routes_by_rover, all_targets = dump_user_routes.targets_as_route_for_user_id(ctx, user.user_id)
# Only one rover so far.
self.assertEqual(len(routes_by_rover), 1)
route = routes_by_rover[0][1]
# Only two user created targets.
self.assertEqual(route.num_points(), 2)
points_iter = route.iterpoints()
# The first points arrival_delta is going to be strange, since we create the initial
# lander points and then some amount of time goes by before the user can create their
# first target.
point = points_iter.next()
# However, the second point's arrival_delta should equal the amount we delayed before
# creating it plus how long its travel time was.
point = points_iter.next()
self.assertEqual(point.arrival_delta, arrival_delta)
self.assertEqual(point.start_delay, start_delay)
# Pass the dumped route through replay_game and see if the data looks correct.
with db.commit_or_rollback(self.get_ctx()) as ctx:
with db.conn(ctx) as ctx:
tool = replay_game.ReplayGame(ctx, '<EMAIL>',
route_structs=[route.to_struct()], verbose=False)
tool.run()
user = self.get_user_by_email('<EMAIL>')
rover = user.rovers.active()[0]
# Verify that the start_time and arrival_time fields made the round trip intact.
last_two_targets = rover.targets.by_arrival_time()[-2:]
replay_target_one, replay_target_two = last_two_targets[0], last_two_targets[1]
self._assert_targets_same_times(target_one, replay_target_one)
self._assert_targets_same_times(target_two, replay_target_two)
def _assert_targets_same_times(self, original, replay):
# NOTE: Ideally we could verify lat==lat,lng==lng but loss of precision prevents this currently.
self.assertEqual(original['start_time'], replay.start_time)
self.assertEqual(original['arrival_time'], replay.arrival_time)
|
[
"front.lib.db.conn",
"front.tools.dump_user_routes.targets_as_route_for_user_id",
"front.lib.utils.in_seconds"
] |
[((652, 677), 'front.lib.utils.in_seconds', 'utils.in_seconds', ([], {'hours': '(4)'}), '(hours=4)\n', (668, 677), False, 'from front.lib import db, utils\n'), ((702, 728), 'front.lib.utils.in_seconds', 'utils.in_seconds', ([], {'hours': '(10)'}), '(hours=10)\n', (718, 728), False, 'from front.lib import db, utils\n'), ((1049, 1061), 'front.lib.db.conn', 'db.conn', (['ctx'], {}), '(ctx)\n', (1056, 1061), False, 'from front.lib import db, utils\n'), ((1117, 1181), 'front.tools.dump_user_routes.targets_as_route_for_user_id', 'dump_user_routes.targets_as_route_for_user_id', (['ctx', 'user.user_id'], {}), '(ctx, user.user_id)\n', (1162, 1181), False, 'from front.tools import dump_user_routes, replay_game\n'), ((2148, 2160), 'front.lib.db.conn', 'db.conn', (['ctx'], {}), '(ctx)\n', (2155, 2160), False, 'from front.lib import db, utils\n')]
|
"""
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module for executing `traceroute` commands via subprocess
"""
from collections import namedtuple
import asyncio
from magma.magmad.check import subprocess_workflow
DEFAULT_TTL = 30
DEFAULT_BYTES_PER_PACKET = 60
TracerouteParams = namedtuple('TracerouteParams',
['host_or_ip', 'max_hops', 'bytes_per_packet'])
TracerouteResult = namedtuple('TracerouteResult',
['error', 'host_or_ip', 'stats'])
TracerouteStats = namedtuple('TracerouteStats', ['hops'])
TracerouteHop = namedtuple('TracerouteHop', ['idx', 'probes'])
TracerouteProbe = namedtuple('TracerouteProbe',
['hostname', 'ip_addr', 'rtt_ms'])
def traceroute(params):
"""
Execute some `traceroute` commands via subprocess.
Args:
params ([TracerouteParams]): params for the `traceroute` commands
Returns:
[TracerouteResult]: stats from the executed `traceroute` commands
"""
return subprocess_workflow.exec_and_parse_subprocesses(
params,
_get_traceroute_command_args_list,
parse_traceroute_output,
)
@asyncio.coroutine
def traceroute_async(params, loop=None):
"""
Execute some `traceroute` commands asynchronously and return results.
Args:
params ([TracerouteParams]): params for the `traceroute` commands
loop: event loop to run in (optional)
Returns:
[TracerouteResult]: stats from the executed `traceroute` commands
"""
return subprocess_workflow.exec_and_parse_subprocesses_async(
params,
_get_traceroute_command_args_list,
parse_traceroute_output,
loop=loop,
)
def _get_traceroute_command_args_list(param):
return [
'traceroute',
'-m', str(param.max_hops or DEFAULT_TTL),
param.host_or_ip,
str(param.bytes_per_packet or DEFAULT_BYTES_PER_PACKET),
]
def parse_traceroute_output(stdout, stderr, param):
def create_error_result(error_msg):
return TracerouteResult(
error=error_msg,
host_or_ip=param.host_or_ip,
stats=None,
)
if stderr:
return create_error_result(stderr)
else:
try:
stats = TracerouteParser().parse(stdout)
return TracerouteResult(
error=None,
host_or_ip=param.host_or_ip,
stats=stats,
)
except ValueError as e:
msg = 'Error while parsing output. ' \
'Original exception message:\n{}'.format(str(e.args[0]))
return create_error_result(msg)
except IndexError as e:
msg = 'Error while parsing output - an incomplete line ' \
'was encountered. Original exception message:\n{}' \
.format(str(e.args[0]))
return create_error_result(msg)
class TracerouteParser(object):
HostnameAndIP = namedtuple('HostnameAndIP', ['hostname', 'ip'])
DEFAULT_ENDPOINT = HostnameAndIP(hostname=None, ip=None)
def __init__(self):
self._probe_endpoint = self.DEFAULT_ENDPOINT
def parse(self, output):
"""
Raises:
ValueError, IndexError
"""
output_lines = output.decode('ascii').strip().split('\n')
output_lines.pop(0) # strip header line
hops = []
for line in output_lines:
self._probe_endpoint = self.DEFAULT_ENDPOINT
hops.append(self._parse_hop(line))
return TracerouteStats(hops)
def _parse_hop(self, line):
hop_split = line.split()
hop_idx = int(hop_split.pop(0))
probes = []
while hop_split:
probe = self._parse_next_probe(hop_split)
if probe:
probes.append(probe)
return TracerouteHop(idx=hop_idx, probes=probes)
def _parse_next_probe(self, tokens):
head_token = tokens.pop(0)
if head_token == '*':
return TracerouteProbe(hostname=self._probe_endpoint.hostname,
ip_addr=self._probe_endpoint.ip,
rtt_ms=0)
lookahead_token = tokens.pop(0)
if lookahead_token == 'ms':
return TracerouteProbe(hostname=self._probe_endpoint.hostname,
ip_addr=self._probe_endpoint.ip,
rtt_ms=float(head_token))
else:
ip_addr = lookahead_token[1:-1]
self._probe_endpoint = self.HostnameAndIP(hostname=head_token,
ip=ip_addr)
return None
|
[
"magma.magmad.check.subprocess_workflow.exec_and_parse_subprocesses_async",
"magma.magmad.check.subprocess_workflow.exec_and_parse_subprocesses",
"collections.namedtuple"
] |
[((708, 786), 'collections.namedtuple', 'namedtuple', (['"""TracerouteParams"""', "['host_or_ip', 'max_hops', 'bytes_per_packet']"], {}), "('TracerouteParams', ['host_or_ip', 'max_hops', 'bytes_per_packet'])\n", (718, 786), False, 'from collections import namedtuple\n'), ((837, 901), 'collections.namedtuple', 'namedtuple', (['"""TracerouteResult"""', "['error', 'host_or_ip', 'stats']"], {}), "('TracerouteResult', ['error', 'host_or_ip', 'stats'])\n", (847, 901), False, 'from collections import namedtuple\n'), ((951, 990), 'collections.namedtuple', 'namedtuple', (['"""TracerouteStats"""', "['hops']"], {}), "('TracerouteStats', ['hops'])\n", (961, 990), False, 'from collections import namedtuple\n'), ((1008, 1054), 'collections.namedtuple', 'namedtuple', (['"""TracerouteHop"""', "['idx', 'probes']"], {}), "('TracerouteHop', ['idx', 'probes'])\n", (1018, 1054), False, 'from collections import namedtuple\n'), ((1074, 1138), 'collections.namedtuple', 'namedtuple', (['"""TracerouteProbe"""', "['hostname', 'ip_addr', 'rtt_ms']"], {}), "('TracerouteProbe', ['hostname', 'ip_addr', 'rtt_ms'])\n", (1084, 1138), False, 'from collections import namedtuple\n'), ((1449, 1568), 'magma.magmad.check.subprocess_workflow.exec_and_parse_subprocesses', 'subprocess_workflow.exec_and_parse_subprocesses', (['params', '_get_traceroute_command_args_list', 'parse_traceroute_output'], {}), '(params,\n _get_traceroute_command_args_list, parse_traceroute_output)\n', (1496, 1568), False, 'from magma.magmad.check import subprocess_workflow\n'), ((1977, 2113), 'magma.magmad.check.subprocess_workflow.exec_and_parse_subprocesses_async', 'subprocess_workflow.exec_and_parse_subprocesses_async', (['params', '_get_traceroute_command_args_list', 'parse_traceroute_output'], {'loop': 'loop'}), '(params,\n _get_traceroute_command_args_list, parse_traceroute_output, loop=loop)\n', (2030, 2113), False, 'from magma.magmad.check import subprocess_workflow\n'), ((3413, 3460), 'collections.namedtuple', 'namedtuple', (['"""HostnameAndIP"""', "['hostname', 'ip']"], {}), "('HostnameAndIP', ['hostname', 'ip'])\n", (3423, 3460), False, 'from collections import namedtuple\n')]
|
import tensorflow as tf
from tensorflow import keras
from models.layers import recurrent_dense
def lstm_guide_only_nolin_hp2_model(args,lstm_units=64, dense_units=16, recurrent_layers=1, dropout=0.0):
seq = keras.Input(shape=(None, 4)) # 4
x = keras.layers.Bidirectional(keras.layers.LSTM(lstm_units, dropout=dropout))(seq)
x = keras.layers.Dense(dense_units)(x)
for _ in range(recurrent_layers):
x = recurrent_dense(x, dense_units)
outputs = keras.layers.Dense(1)(x)
return keras.Model(inputs=[seq], outputs=outputs)
|
[
"tensorflow.keras.layers.Dense",
"tensorflow.keras.Input",
"models.layers.recurrent_dense",
"tensorflow.keras.Model",
"tensorflow.keras.layers.LSTM"
] |
[((214, 242), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': '(None, 4)'}), '(shape=(None, 4))\n', (225, 242), False, 'from tensorflow import keras\n'), ((514, 556), 'tensorflow.keras.Model', 'keras.Model', ([], {'inputs': '[seq]', 'outputs': 'outputs'}), '(inputs=[seq], outputs=outputs)\n', (525, 556), False, 'from tensorflow import keras\n'), ((345, 376), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['dense_units'], {}), '(dense_units)\n', (363, 376), False, 'from tensorflow import keras\n'), ((431, 462), 'models.layers.recurrent_dense', 'recurrent_dense', (['x', 'dense_units'], {}), '(x, dense_units)\n', (446, 462), False, 'from models.layers import recurrent_dense\n'), ((478, 499), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {}), '(1)\n', (496, 499), False, 'from tensorflow import keras\n'), ((284, 330), 'tensorflow.keras.layers.LSTM', 'keras.layers.LSTM', (['lstm_units'], {'dropout': 'dropout'}), '(lstm_units, dropout=dropout)\n', (301, 330), False, 'from tensorflow import keras\n')]
|
#! /usr/bin/python3
'''
Python script to pull through terminal the fileTree
'''
import sys
from src.CanvasBackend import semester_board as bk
if __name__ == '__main__':
bk.Profile(sys.argv[1])
|
[
"src.CanvasBackend.semester_board.Profile"
] |
[((175, 198), 'src.CanvasBackend.semester_board.Profile', 'bk.Profile', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (185, 198), True, 'from src.CanvasBackend import semester_board as bk\n')]
|
import logging
import os
import numpy as np
import argparse
import open3d as o3d
from urllib.request import urlretrieve
from util.visualization import get_colored_point_cloud_feature
from util.misc import extract_features
from model.resunet import ResUNetBN2C
import torch
if not os.path.isfile('ResUNetBN2C-16feat-3conv.pth'):
print('Downloading weights...')
urlretrieve(
"https://node1.chrischoy.org/data/publications/fcgf/2019-09-18_14-15-59.pth",
'ResUNetBN2C-16feat-3conv.pth')
if not os.path.isfile('redkitchen-20.ply'):
print('Downloading a mesh...')
urlretrieve("https://node1.chrischoy.org/data/publications/fcgf/redkitchen-20.ply",
'redkitchen-20.ply')
def demo(config):
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
checkpoint = torch.load(config.model)
model = ResUNetBN2C(1, 16, normalize_feature=True, conv1_kernel_size=3, D=3)
model.load_state_dict(checkpoint['state_dict'])
model.eval()
model = model.to(device)
pcd = o3d.io.read_point_cloud(config.input)
xyz_down, feature = extract_features(
model,
xyz=np.array(pcd.points),
voxel_size=config.voxel_size,
device=device,
skip_check=True)
vis_pcd = o3d.geometry.PointCloud()
vis_pcd.points = o3d.utility.Vector3dVector(xyz_down)
vis_pcd = get_colored_point_cloud_feature(vis_pcd,
feature.detach().cpu().numpy(),
config.voxel_size)
#o3d.visualization.draw_geometries([vis_pcd])
#o3d.io.write_triangle_mesh('/home/curnis/result/fcgf/mesh.obj', vis_pcd)
print(type(vis_pcd))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'-i',
'--input',
default='redkitchen-20.ply',
type=str,
help='path to a pointcloud file')
parser.add_argument(
'-m',
'--model',
default='ResUNetBN2C-16feat-3conv.pth',
type=str,
help='path to latest checkpoint (default: None)')
parser.add_argument(
'--voxel_size',
default=0.025,
type=float,
help='voxel size to preprocess point cloud')
config = parser.parse_args()
demo(config)
|
[
"argparse.ArgumentParser",
"torch.load",
"open3d.io.read_point_cloud",
"open3d.geometry.PointCloud",
"model.resunet.ResUNetBN2C",
"urllib.request.urlretrieve",
"os.path.isfile",
"torch.cuda.is_available",
"numpy.array",
"open3d.utility.Vector3dVector"
] |
[((283, 329), 'os.path.isfile', 'os.path.isfile', (['"""ResUNetBN2C-16feat-3conv.pth"""'], {}), "('ResUNetBN2C-16feat-3conv.pth')\n", (297, 329), False, 'import os\n'), ((367, 498), 'urllib.request.urlretrieve', 'urlretrieve', (['"""https://node1.chrischoy.org/data/publications/fcgf/2019-09-18_14-15-59.pth"""', '"""ResUNetBN2C-16feat-3conv.pth"""'], {}), "(\n 'https://node1.chrischoy.org/data/publications/fcgf/2019-09-18_14-15-59.pth'\n , 'ResUNetBN2C-16feat-3conv.pth')\n", (378, 498), False, 'from urllib.request import urlretrieve\n'), ((510, 545), 'os.path.isfile', 'os.path.isfile', (['"""redkitchen-20.ply"""'], {}), "('redkitchen-20.ply')\n", (524, 545), False, 'import os\n'), ((582, 695), 'urllib.request.urlretrieve', 'urlretrieve', (['"""https://node1.chrischoy.org/data/publications/fcgf/redkitchen-20.ply"""', '"""redkitchen-20.ply"""'], {}), "(\n 'https://node1.chrischoy.org/data/publications/fcgf/redkitchen-20.ply',\n 'redkitchen-20.ply')\n", (593, 695), False, 'from urllib.request import urlretrieve\n'), ((809, 833), 'torch.load', 'torch.load', (['config.model'], {}), '(config.model)\n', (819, 833), False, 'import torch\n'), ((844, 912), 'model.resunet.ResUNetBN2C', 'ResUNetBN2C', (['(1)', '(16)'], {'normalize_feature': '(True)', 'conv1_kernel_size': '(3)', 'D': '(3)'}), '(1, 16, normalize_feature=True, conv1_kernel_size=3, D=3)\n', (855, 912), False, 'from model.resunet import ResUNetBN2C\n'), ((1015, 1052), 'open3d.io.read_point_cloud', 'o3d.io.read_point_cloud', (['config.input'], {}), '(config.input)\n', (1038, 1052), True, 'import open3d as o3d\n'), ((1231, 1256), 'open3d.geometry.PointCloud', 'o3d.geometry.PointCloud', ([], {}), '()\n', (1254, 1256), True, 'import open3d as o3d\n'), ((1276, 1312), 'open3d.utility.Vector3dVector', 'o3d.utility.Vector3dVector', (['xyz_down'], {}), '(xyz_down)\n', (1302, 1312), True, 'import open3d as o3d\n'), ((1693, 1718), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1716, 1718), False, 'import argparse\n'), ((755, 780), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (778, 780), False, 'import torch\n'), ((1116, 1136), 'numpy.array', 'np.array', (['pcd.points'], {}), '(pcd.points)\n', (1124, 1136), True, 'import numpy as np\n')]
|
import numpy as np
import sys
import os, psutil
from collections import deque
def mem() :
process = psutil.Process(os.getpid())
print(process.memory_info().rss / 1000000, "Mb", file=sys.stderr)
class Graph:
def neighbors(self, node):
return self.neighbors_array[self.index[node]:self.index[node]+self.deg[node]] # thanks to great implementation of numpy, this is a view and not a copy
def __init__(self, left, right, number_nodes):
self.nb_nodes = number_nodes
self.nb_edges = len(left)
self.deg = np.zeros(self.nb_nodes, dtype = np.int32)
uniques,counts = np.unique(np.concatenate((l1,l2)), return_counts=True)
self.deg = np.zeros(maxIdx+1, dtype = np.int32)
for unique, count in zip(uniques,counts):
self.deg[unique] = count
self.index = np.zeros(self.nb_nodes, dtype = np.int32)
for i in range(1, self.nb_nodes):
self.index[i] = self.index[i-1]+self.deg[i-1]
mutable_index = np.copy(self.index)
self.neighbors_array = np.zeros(self.index[self.nb_nodes-1]+self.deg[self.nb_nodes-1], dtype = np.int32) # memory of size sum number of degrees
for a, b in zip(left, right):
self.neighbors_array[mutable_index[a]] = b
self.neighbors_array[mutable_index[b]] = a
mutable_index[a]+=1
mutable_index[b]+=1
if __name__ == "__main__":
argv = sys.argv[1:]
estimNbAretes = int(argv[1])
#lecture du fichier et constitution du tableau des arêtes
l1 = np.zeros(estimNbAretes, dtype=np.int32)
l2 = np.zeros(estimNbAretes, dtype=np.int32)
with open(argv[0], 'r') as f:
count=0
for line in f:
if line[0]!='#':
newline=line.split()
a = int(newline[0],10)
b = int(newline[1],10)
l1[count]=a
l2[count]=b
count+=1
maxIdx = max(np.max(l1),np.max(l2))
l1 = l1[:count]
l2 = l2[:count]
G = Graph(l1, l2, maxIdx+1)
del l1
del l2
mem()
#on peut retourner le nombre de sommets et d'arêtes
print("n="+str(G.nb_nodes))
print("m="+str(G.nb_edges))
#calcul et retour du degré max
degMax=np.max(G.deg)
print("degmax="+str(degMax))
#calcul et retour de las distance entre u et v
u=int(argv[2])
v=int(argv[3])
res = -1
#on procède à un BFS partant de u en utilisant une file pour la visite (to_visit) et en retenant les noeuds vus (seen) et leur distance
if u==v:
res = 0
else:
seen=np.zeros(maxIdx+1, dtype=np.int32)
dist=np.zeros(maxIdx+1, dtype=np.int32)
seen[u]=1
to_visit=deque([])
for w in G.neighbors(u):
seen[w]=1
dist[w]=1
to_visit.append(w)
while to_visit:
w=to_visit.popleft()
if w==v:
res = dist[w]
break
else:
for z in G.neighbors(w):
if not seen[z]:
to_visit.append(z)
seen[z]=1
dist[z]=dist[w]+1
mem()
if res == -1:
print("dist="+str(float('inf')))
else:
print("dist="+str(res))
|
[
"os.getpid",
"numpy.concatenate",
"numpy.copy",
"numpy.zeros",
"numpy.max",
"collections.deque"
] |
[((1543, 1582), 'numpy.zeros', 'np.zeros', (['estimNbAretes'], {'dtype': 'np.int32'}), '(estimNbAretes, dtype=np.int32)\n', (1551, 1582), True, 'import numpy as np\n'), ((1592, 1631), 'numpy.zeros', 'np.zeros', (['estimNbAretes'], {'dtype': 'np.int32'}), '(estimNbAretes, dtype=np.int32)\n', (1600, 1631), True, 'import numpy as np\n'), ((2238, 2251), 'numpy.max', 'np.max', (['G.deg'], {}), '(G.deg)\n', (2244, 2251), True, 'import numpy as np\n'), ((118, 129), 'os.getpid', 'os.getpid', ([], {}), '()\n', (127, 129), False, 'import os, psutil\n'), ((545, 584), 'numpy.zeros', 'np.zeros', (['self.nb_nodes'], {'dtype': 'np.int32'}), '(self.nb_nodes, dtype=np.int32)\n', (553, 584), True, 'import numpy as np\n'), ((686, 722), 'numpy.zeros', 'np.zeros', (['(maxIdx + 1)'], {'dtype': 'np.int32'}), '(maxIdx + 1, dtype=np.int32)\n', (694, 722), True, 'import numpy as np\n'), ((831, 870), 'numpy.zeros', 'np.zeros', (['self.nb_nodes'], {'dtype': 'np.int32'}), '(self.nb_nodes, dtype=np.int32)\n', (839, 870), True, 'import numpy as np\n'), ((997, 1016), 'numpy.copy', 'np.copy', (['self.index'], {}), '(self.index)\n', (1004, 1016), True, 'import numpy as np\n'), ((1048, 1138), 'numpy.zeros', 'np.zeros', (['(self.index[self.nb_nodes - 1] + self.deg[self.nb_nodes - 1])'], {'dtype': 'np.int32'}), '(self.index[self.nb_nodes - 1] + self.deg[self.nb_nodes - 1], dtype\n =np.int32)\n', (1056, 1138), True, 'import numpy as np\n'), ((1947, 1957), 'numpy.max', 'np.max', (['l1'], {}), '(l1)\n', (1953, 1957), True, 'import numpy as np\n'), ((1958, 1968), 'numpy.max', 'np.max', (['l2'], {}), '(l2)\n', (1964, 1968), True, 'import numpy as np\n'), ((2575, 2611), 'numpy.zeros', 'np.zeros', (['(maxIdx + 1)'], {'dtype': 'np.int32'}), '(maxIdx + 1, dtype=np.int32)\n', (2583, 2611), True, 'import numpy as np\n'), ((2621, 2657), 'numpy.zeros', 'np.zeros', (['(maxIdx + 1)'], {'dtype': 'np.int32'}), '(maxIdx + 1, dtype=np.int32)\n', (2629, 2657), True, 'import numpy as np\n'), ((2688, 2697), 'collections.deque', 'deque', (['[]'], {}), '([])\n', (2693, 2697), False, 'from collections import deque\n'), ((622, 646), 'numpy.concatenate', 'np.concatenate', (['(l1, l2)'], {}), '((l1, l2))\n', (636, 646), True, 'import numpy as np\n')]
|
#!/bin/env python
""" Part of weight_app
:copyright: (c) 2012 by <NAME>.
:license: BSD, see LICENSE for more details.
"""
# testsuite
# run: python main.py
import unittest
import os
import sys
def suite():
from base import BaseTest
from test_importer import ImportTest
from test_forms import FormTest
suite = unittest.TestSuite()
# Testmodules
suite.addTest(unittest.makeSuite(BaseTest))
suite.addTest(unittest.makeSuite(ImportTest))
suite.addTest(unittest.makeSuite(FormTest))
return suite
if __name__ == '__main__':
# this_file=os.path.join(os.path.dirname(__file__),'../../env_weight/bin/activate_this.py')
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
os.environ['TEST'] = 'yes'
unittest.main(defaultTest='suite')
|
[
"unittest.main",
"os.path.dirname",
"unittest.makeSuite",
"unittest.TestSuite"
] |
[((339, 359), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (357, 359), False, 'import unittest\n'), ((788, 822), 'unittest.main', 'unittest.main', ([], {'defaultTest': '"""suite"""'}), "(defaultTest='suite')\n", (801, 822), False, 'import unittest\n'), ((396, 424), 'unittest.makeSuite', 'unittest.makeSuite', (['BaseTest'], {}), '(BaseTest)\n', (414, 424), False, 'import unittest\n'), ((444, 474), 'unittest.makeSuite', 'unittest.makeSuite', (['ImportTest'], {}), '(ImportTest)\n', (462, 474), False, 'import unittest\n'), ((494, 522), 'unittest.makeSuite', 'unittest.makeSuite', (['FormTest'], {}), '(FormTest)\n', (512, 522), False, 'import unittest\n'), ((718, 743), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (733, 743), False, 'import os\n')]
|
#!/usr/bin/python3
from MessageConverters.MessageConverter import MessageConverter
import logging
from datetime import datetime
from datetime import timezone
class MCF(MessageConverter):
msg_types = {
0x01: "time_sync_request",
0x04: "t_p_rh",
0x05: "uart",
0x09: "power",
0x0A: "io",
0x0B: "report_data",
0x0C: "t_p_rh_lux_voc",
0x0D: "analog_data",
0x0E: "t_p_rh_lux_voc_co2",
0x0F: "special_data",
0x10: "digital_data",
0x11: "length_error"
}
def __init__(self, devicename=None):
super().__init__(devicename)
def __parse_time(self):
a = (
self.payload.pop(0) |
self.payload.pop(0) << 8 |
self.payload.pop(0) << 16 |
self.payload.pop(0) << 24)
value = list(a.to_bytes(4, byteorder='little'))
self.logger.info(f"time payload: {value}")
year = 2000 + (value[3] >> 1)
month = ((value[3] & 0x01) << 3) | (value[2] >> 5)
day = value[2] & 0x1f
hours = value[1] >> 3
minutes = ((value[1] & 0x7) << 3) | (value[0] >> 5)
seconds = value[0] & 0x1f
self.logger.info(
f'year : {year}, '
f'month : {month}, '
f'day : {day}, '
f'hours : {hours}, '
f'minutes : {minutes}, '
f'seconds : {seconds}')
# datetime(year, month, day, hour, minute, second, microsecond)
date_time_obj = datetime(year, month, day, hours, minutes, seconds)
return int(datetime.timestamp(date_time_obj))
def parse_time_sync_request(self):
entry = {}
if (len(self.payload) == 0):
self.logger.warn("message has no content - skipping.")
return entry
fields = {}
# sync id
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8 |
self.payload.pop(0) << 16 |
self.payload.pop(0) << 24)
fields['sync_id'] = value
# sync version
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8 |
self.payload.pop(0) << 16)
fields['sync version'] = value
# application type
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8)
fields["app_type"] = value
# option
value = self.payload.pop(0)
fields["option"] = value
entry['fields'] = fields
return entry
def parse_t_p_rh(self):
entry = {}
if (len(self.payload) == 0):
self.logger.warn("message has no content - skipping.")
return entry
fields = {}
# time
fields['time'] = self.__parse_time()
# temperature
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8) / 100
fields['temperature'] = value
# humidity
value = self.payload.pop(0) / 2
fields["humidity"] = value
# pressure1
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8 |
self.payload.pop(0) << 16)
fields["pressure"] = value
entry['fields'] = fields
return entry
def parse_uart(self):
entry = {}
if (len(self.payload) == 0):
self.logger.warn("message has no content - skipping.")
return entry
self.logger.warn("parse function not implemented - skipping.")
return entry
def parse_power(self):
# TODO: there is a 2nd payload version woth more data..
entry = {}
if (len(self.payload) == 0):
self.logger.warn("message has no content - skipping.")
return entry
fields = {}
# time
fields['time'] = self.__parse_time()
# active energy
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8 |
self.payload.pop(0) << 16 |
self.payload.pop(0) << 24)
fields['active_energy'] = value
# active energy
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8 |
self.payload.pop(0) << 16 |
self.payload.pop(0) << 24)
fields['active_energy'] = value
# reactive energy
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8 |
self.payload.pop(0) << 16 |
self.payload.pop(0) << 24)
fields['reactive_energy'] = value
# apparent energy
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8 |
self.payload.pop(0) << 16 |
self.payload.pop(0) << 24)
fields['apparent_energy'] = value
# running_time
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8 |
self.payload.pop(0) << 16 |
self.payload.pop(0) << 24)
fields['running_time'] = value
entry['fields'] = fields
return entry
def parse_io(self):
entry = {}
if (len(self.payload) == 0):
self.logger.warn("message has no content - skipping.")
return entry
fields = {}
# time
fields['time'] = self.__parse_time()
# inputs
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8 |
self.payload.pop(0) << 16 |
self.payload.pop(0) << 24)
fields['inputs'] = bin(value)[2:]
# outputs
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8 |
self.payload.pop(0) << 16 |
self.payload.pop(0) << 24)
fields['outputs'] = bin(value)[2:]
# events
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8 |
self.payload.pop(0) << 16 |
self.payload.pop(0) << 24)
fields['events'] = bin(value)[2:]
entry['fields'] = fields
return entry
def parse_report_data(self):
entry = {}
if (len(self.payload) == 0):
self.logger.warn("message has no content - skipping.")
return entry
self.logger.warn("parse function not implemented - skipping.")
return entry
def parse_t_p_rh_lux_voc(self):
entry = {}
if (len(self.payload) == 0):
self.logger.warn("message has no content - skipping.")
return entry
fields = {}
# first part is identical with t_p_rh
fields.update(self.parse_t_p_rh()['fields'])
# illuminance
value = value = (
self.payload.pop(0) |
self.payload.pop(0) << 8)
fields['illuminance'] = value
# voc
value = value = (
self.payload.pop(0) |
self.payload.pop(0) << 8)
fields['voc'] = value
entry['fields'] = fields
return entry
def parse_analog_data(self):
entry = {}
if (len(self.payload) == 0):
self.logger.warn("message has no content - skipping.")
return entry
self.logger.warn("parse function not implemented - skipping.")
return entry
def parse_t_p_rh_lux_voc_co2(self):
entry = {}
if (len(self.payload) == 0):
self.logger.warn("message has no content - skipping.")
return entry
fields = {}
# first part is identical with t_p_rh
fields.update(self.parse_t_p_rh_lux_voc()['fields'])
# co2
value = value = (
self.payload.pop(0) |
self.payload.pop(0) << 8)
fields['co2'] = value
entry['fields'] = fields
return entry
def parse_special_data(self):
entry = {}
if (len(self.payload) == 0):
self.logger.warn("message has no content - skipping.")
return entry
self.logger.warn("parse function not implemented - skipping.")
return entry
def parse_digital_data(self):
entry = {}
if (len(self.payload) == 0):
self.logger.warn("message has no content - skipping.")
return entry
fields = {}
# type
value = self.payload.pop(0)
fields['type'] = value
if value == 0:
for num in range(16):
if (self.payload):
value = value = (
self.payload.pop(0) |
self.payload.pop(0) << 8)
fields[f'input_{num}'] = value
elif (value == 1):
# time
fields['time'] = self.__parse_time()
# frequency
value = (
self.payload.pop(0) |
self.payload.pop(0) << 8)
fields['frequency'] = value/10
# battery pecentage (optional)
if (self.payload):
value = self.payload.pop(0)
fields['battery_percentage'] = value
elif (value == 2):
for num in range(5):
if (self.payload):
fields[f'time_{num}'] = self.__parse_time()
value = value = (
self.payload.pop(0) |
self.payload.pop(0) << 8)
fields[f'input_{num}'] = value
# battery pecentage (optional)
if (self.payload):
value = self.payload.pop(0)
fields['battery_percentage'] = value
else:
self.logger.warn(f'unknown type "{value}" - skipping.')
entry['fields'] = fields
return entry
def parse_serial_data(self):
entry = {}
if (len(self.payload) == 0):
self.logger.warn("message has no content - skipping.")
return entry
self.logger.warn("parse function not implemented - skipping.")
return entry
def parse_length_error(self):
entry = {}
if (len(self.payload) == 0):
self.logger.warn("message has no content - skipping.")
return entry
fields = {}
# ignore seq no
self.payload.pop(0) | self.payload.pop(0) << 8
# bat level
value = self.payload.pop(0)
fields['batt_level'] = str(value)
# hw&fw version
value = self.payload.pop(0)
fields['hwfw'] = str(value)
entry['fields'] = fields
return entry
def _hasDownlinkMessage(self):
return self.downlinkMessage is not None
def _getDownlinkMessage(self):
return self.downlinkMessage
def _convert(self, payload, port):
'''
ldc publish format:
[
{
"measurement": "abc",
"tags": {
"tag_a": "irgendwas",
"tag_b": "irgendwasanderes"
},
"time": "2021-02-01",
"fields": {
"field_a": "value_a",
"field_n": "value_n"
}
}
]
'''
publ_array = []
dt = datetime.utcnow()
self.current_ts = int(dt.replace(tzinfo=timezone.utc).timestamp())
self.current_time = dt.strftime('%Y-%m-%dT%H:%M:%SZ')
self.payload = list(bytearray(payload))
self.logger.debug(
"decoding payload {}. servertime is {} (ts: {})".format(
payload,
self.current_time,
self.current_ts))
try:
self.cursor = 0
while len(self.payload) > 0:
# header
messagetype_byte = self.payload.pop(0)
self.logger.debug("message type: {}".format(
hex(messagetype_byte)))
messagetype = self.msg_types.get(messagetype_byte, None)
if messagetype:
method_name = "parse_" + messagetype
method = getattr(self, method_name, lambda: None)
if method:
entry = method()
self.payload = []
if entry:
# add common tags and fields
entry["ts"] = self.current_time
if "tags" not in entry:
entry["tags"] = {}
entry["tags"]["devicename"] = self.devicename
entry["tags"]["messagetype"] = messagetype
self.logger.debug(
"method_name: {}, result:{}".format(
method_name,
entry))
publ_array.extend([entry])
else:
self.logger.exception(
"Method for {} nor implemented".format(
method_name))
else:
self.logger.exception(
"Unknown Message Type: {}".format(messagetype_byte))
except Exception:
self.logger.exception("Error while trying to decode payload..")
return publ_array
|
[
"datetime.datetime.utcnow",
"datetime.datetime.timestamp",
"datetime.datetime"
] |
[((1558, 1609), 'datetime.datetime', 'datetime', (['year', 'month', 'day', 'hours', 'minutes', 'seconds'], {}), '(year, month, day, hours, minutes, seconds)\n', (1566, 1609), False, 'from datetime import datetime\n'), ((11266, 11283), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (11281, 11283), False, 'from datetime import datetime\n'), ((1630, 1663), 'datetime.datetime.timestamp', 'datetime.timestamp', (['date_time_obj'], {}), '(date_time_obj)\n', (1648, 1663), False, 'from datetime import datetime\n')]
|
import hmac
from http import cookies
import json
from typing import Callable, TYPE_CHECKING
from urllib.parse import urlencode
import thor
from thor.http import HttpClient, get_header
from thor.http.error import HttpError
from redbot.resource import HttpResource
from redbot.type import RawHeaderListType
token_client = HttpClient()
token_client.idle_timeout = 30
token_client.connect_timeout = 10
token_client.read_timeout = 10
token_client.max_server_conn = 30
if TYPE_CHECKING:
from redbot.webui import RedWebUi # pylint: disable=cyclic-import,unused-import
class CaptchaHandler:
def __init__(
self,
webui: "RedWebUi",
client_id: str,
continue_test: Callable,
error_response: Callable,
) -> None:
self.webui = webui
self.client_id = client_id
self.continue_test = continue_test
self.error_response = error_response
self.secret = webui.config.get("hcaptcha_secret", "").encode("utf-8")
self.token_lifetime = webui.config.getint("token_lifetime", fallback=300)
def run(self) -> None:
captcha_token = self.webui.body_args.get("captcha_token", [None])[0]
cookie_str = b", ".join(get_header(self.webui.req_headers, b"cookie"))
try:
cookiejar = cookies.SimpleCookie(
cookie_str.decode("utf-8", "replace")
) # type: cookies.SimpleCookie
except cookies.CookieError:
self.error_response(
b"400",
b"Bad Request",
"Sorry, your cookies appear corrupted. Please try again.",
f"Cookie Parse Error: {cookie_str.decode('utf-8', 'replace')}",
)
return
human_time = cookiejar.get("human_time", None)
human_hmac = cookiejar.get("human_hmac", None)
if human_time and human_time.value.isdigit() and human_hmac:
if self.verify_human(int(human_time.value), human_hmac.value):
self.continue_test()
else:
self.error_response(
b"403",
b"Forbidden",
"I need to double-check that you're human; please resubmit.",
"Invalid human token",
)
elif captcha_token:
self.verify_captcha(captcha_token)
else:
self.error_response(
b"403",
b"Forbidden",
"I need to double-check that you're human; please resubmit.",
"Invalid captcha.",
)
def verify_captcha(self, presented_token: str) -> None:
exchange = token_client.exchange()
@thor.events.on(exchange)
def error(err_msg: HttpError) -> None:
self.error_response(
b"403",
b"Forbidden",
"There was a problem with the Captcha server; please try again soon.",
f"Captcha error: {err_msg}.",
)
@thor.events.on(exchange)
def response_start(
status: bytes, phrase: bytes, headers: RawHeaderListType
) -> None:
exchange.tmp_status = status
exchange.tmp_res_body = b""
@thor.events.on(exchange)
def response_body(chunk: bytes) -> None:
exchange.tmp_res_body += chunk
@thor.events.on(exchange)
def response_done(trailers: RawHeaderListType) -> None:
try:
results = json.loads(exchange.tmp_res_body)
except ValueError:
if exchange.tmp_status != b"200":
e_str = f"Captcha server returned {exchange.tmp_status.decode('utf-8')} status code"
else:
e_str = f"Captcha server response error"
self.error_response(
b"500",
b"Internal Server Error",
e_str,
e_str,
)
return
if results["success"]:
self.continue_test(self.issue_human())
else:
e_str = f"Captcha errors: {', '.join(results.get('error-codes', ['unknown error']))}"
self.error_response(
b"403",
b"Forbidden",
e_str,
e_str,
)
request_form = {
"secret": self.secret,
"response": presented_token,
"remoteip": self.client_id,
}
exchange.request_start(
b"POST",
b"https://hcaptcha.com/siteverify",
[[b"content-type", b"application/x-www-form-urlencoded"]],
)
exchange.request_body(urlencode(request_form).encode("utf-8", "replace"))
exchange.request_done({})
def issue_human(self) -> RawHeaderListType:
"""
Return cookie headers for later verification that this is a human.
"""
human_time = str(int(thor.time()) + self.token_lifetime)
human_hmac = hmac.new(
self.secret, bytes(human_time, "ascii"), "sha512"
).hexdigest()
return [
(
b"Set-Cookie",
f"human_time={human_time}; Max-Age={self.token_lifetime}; SameSite=Strict".encode(
"ascii"
),
),
(
b"Set-Cookie",
f"human_hmac={human_hmac}; Max-Age={self.token_lifetime}; SameSite=Strict".encode(
"ascii"
),
),
]
def verify_human(self, human_time: int, human_hmac: str) -> bool:
"""
Check the user's human HMAC.
"""
computed_hmac = hmac.new(self.secret, bytes(str(human_time), "ascii"), "sha512")
is_valid = human_hmac == computed_hmac.hexdigest()
if is_valid and human_time >= thor.time():
return True
else:
return False
|
[
"thor.http.get_header",
"json.loads",
"urllib.parse.urlencode",
"thor.time",
"thor.http.HttpClient",
"thor.events.on"
] |
[((323, 335), 'thor.http.HttpClient', 'HttpClient', ([], {}), '()\n', (333, 335), False, 'from thor.http import HttpClient, get_header\n'), ((2693, 2717), 'thor.events.on', 'thor.events.on', (['exchange'], {}), '(exchange)\n', (2707, 2717), False, 'import thor\n'), ((3009, 3033), 'thor.events.on', 'thor.events.on', (['exchange'], {}), '(exchange)\n', (3023, 3033), False, 'import thor\n'), ((3238, 3262), 'thor.events.on', 'thor.events.on', (['exchange'], {}), '(exchange)\n', (3252, 3262), False, 'import thor\n'), ((3365, 3389), 'thor.events.on', 'thor.events.on', (['exchange'], {}), '(exchange)\n', (3379, 3389), False, 'import thor\n'), ((1206, 1251), 'thor.http.get_header', 'get_header', (['self.webui.req_headers', "b'cookie'"], {}), "(self.webui.req_headers, b'cookie')\n", (1216, 1251), False, 'from thor.http import HttpClient, get_header\n'), ((3497, 3530), 'json.loads', 'json.loads', (['exchange.tmp_res_body'], {}), '(exchange.tmp_res_body)\n', (3507, 3530), False, 'import json\n'), ((5922, 5933), 'thor.time', 'thor.time', ([], {}), '()\n', (5931, 5933), False, 'import thor\n'), ((4751, 4774), 'urllib.parse.urlencode', 'urlencode', (['request_form'], {}), '(request_form)\n', (4760, 4774), False, 'from urllib.parse import urlencode\n'), ((5014, 5025), 'thor.time', 'thor.time', ([], {}), '()\n', (5023, 5025), False, 'import thor\n')]
|
from contextlib import contextmanager
from devpi_common.metadata import parse_requirement
from operator import itemgetter
from pluggy import HookimplMarker
from tempfile import NamedTemporaryFile
from subprocess import call
import appdirs
import attr
import json
import os
import textwrap
import traceback
client_hookimpl = HookimplMarker("devpiclient")
devpi_pr_data_dir = appdirs.user_data_dir("devpi-pr", "devpi")
def get_message_from_file(f):
lines = f.read().decode('utf-8').splitlines()
msg = '\n'.join(x for x in lines if not x.strip().startswith('#'))
return msg.strip()
def get_message(hub, msg):
if msg and msg.strip():
return msg
editor = os.environ.get("EDITOR")
if not editor:
hub.fatal("No EDITOR environment variable set.")
with NamedTemporaryFile(prefix="devpi-pr-", suffix=".txt") as tf:
tf.write(textwrap.dedent("""\n
# Please enter the message for your pull request.
# Lines starting with '#' will be ignored.
# An empty message aborts the current command.""").encode('utf-8'))
tf.flush()
try:
result = call([editor, tf.name])
except Exception as e:
hub.fatal(''.join(traceback.format_exception(e.__class__, e, None)))
if result != 0:
hub.fatal("Error (%s) calling editor %s" % (result, editor))
tf.seek(0)
msg = get_message_from_file(tf)
if not msg:
# try to reopen the file. vim seems to replace it.
with open(tf.name, 'rb') as f:
msg = get_message_from_file(f)
if msg:
return msg
hub.fatal("A message is required.")
@contextmanager
def devpi_pr_review_lock(hub):
if not os.path.exists(devpi_pr_data_dir):
os.mkdir(devpi_pr_data_dir)
lock_fn = os.path.join(devpi_pr_data_dir, "reviews.lock")
try:
with open(lock_fn, "x"):
yield
except FileExistsError:
hub.fatal(
"There is an existing lock at %s\n"
"This can happen if a previous devpi-pr command crashed. "
"If you are sure there is no other devpi-pr command still running, "
"you can remove the file." % lock_fn)
else:
if os.path.exists(lock_fn):
os.remove(lock_fn)
@contextmanager
def devpi_pr_review_data(hub):
with devpi_pr_review_lock(hub):
fn = os.path.join(devpi_pr_data_dir, "reviews.json")
if os.path.exists(fn):
with open(fn, "rb") as f:
data = f.read().decode("utf-8")
else:
data = ""
if not data:
original = None
info = {}
else:
original = json.loads(data)
info = dict(original)
yield info
if info != original:
with open(fn, "wb") as f:
f.write(json.dumps(info).encode("utf-8"))
def full_indexname(hub, prname):
if '/' in prname:
try:
user, prname = prname.split('/')
except ValueError:
hub.fatal("Invalid index name")
else:
user = hub.current.get_auth_user()
if user is None:
hub.fatal("not logged in")
return "%s/%s" % (user, prname)
@attr.s
class PRIndexInfos:
user = attr.ib(type=str)
index = attr.ib(type=str)
indexname = attr.ib(type=str)
url = attr.ib(type=str)
ixconfig = attr.ib(type=dict)
def require_pr_index(hub, name):
hub.requires_login()
current = hub.require_valid_current_with_index()
indexname = full_indexname(hub, name)
(user, index) = indexname.split('/')
url = current.get_index_url(indexname, slash=False)
result = hub.http_api("get", url, fatal=False)
if result.reason != 'OK':
hub.fatal("Couldn't access pr index '%s': %s" % (
name, result.reason))
ixconfig = result.result
if ixconfig['type'] != 'pr':
hub.fatal("The index '%s' is not a pr index" % name)
return PRIndexInfos(user, index, indexname, url, ixconfig)
def new_pr_arguments(parser):
""" Create a new pull request.
"""
parser.add_argument(
"name", metavar="NAME", type=str, action="store", nargs=1,
help="pull request name")
parser.add_argument(
"target", metavar="TARGETSPEC", type=str, nargs=1,
action="store",
help="target index of form 'USER/NAME'")
parser.add_argument(
"pkgspec", metavar="PKGSPEC", type=str, nargs="*",
default=None, action="store",
help="releases in format 'name==version' which are added to "
"this pull request.")
def new_pr(hub, args):
(name,) = args.name
(target,) = args.target
reqs = []
for pkgspec in args.pkgspec:
req = parse_requirement(pkgspec)
if len(req.specs) != 1 or req.specs[0][0] != '==':
hub.fatal(
"The release specification needs to be of this form: name==version")
reqs.append(req)
indexname = full_indexname(hub, name)
url = hub.current.get_index_url(indexname, slash=False)
hub.http_api("put", url, dict(
type="pr", bases=target,
states=["new"], messages=["New pull request"]))
for req in reqs:
hub.http_api(
"push",
hub.current.index,
kvdict=dict(
name=req.project_name,
version="%s" % req.specs[0][1],
targetindex=indexname),
fatal=True)
def abort_pr_review_arguments(parser):
""" Abort review of pull request.
"""
parser.add_argument(
"name", type=str, action="store", nargs=1,
help="pull request name")
def abort_pr_review(hub, args):
(name,) = args.name
indexinfos = require_pr_index(hub, name)
with devpi_pr_review_data(hub) as review_data:
if indexinfos.indexname in review_data:
hub.info("Aborted review of '%s'" % indexinfos.indexname)
del review_data[indexinfos.indexname]
else:
hub.error("No review of '%s' active" % indexinfos.indexname)
def approve_pr_arguments(parser):
""" Approve reviewed pull request.
"""
parser.add_argument(
"name", type=str, action="store", nargs=1,
help="pull request name")
parser.add_argument(
"-s", "--serial", type=str, action="store",
help="pull request serial, only required if not using 'review-pr' first")
parser.add_argument(
"-m", "--message", action="store",
help="Message to add on submit.")
parser.add_argument(
"-k", "--keep-index", action="store_true",
help="Keep the pr index instead of deleting it after approval.")
def approve_pr(hub, args):
(name,) = args.name
indexinfos = require_pr_index(hub, name)
serial = args.serial
if serial is None:
with devpi_pr_review_data(hub) as review_data:
if indexinfos.indexname not in review_data:
hub.fatal(
"No review data found for '%s', "
"it looks like you did not use review-pr or "
"you forgot the --serial option." % indexinfos.indexname)
serial = "%s" % review_data[indexinfos.indexname]
message = get_message(hub, args.message)
hub.http_api(
"patch", indexinfos.url, [
"states+=approved",
"messages+=%s" % message],
headers={'X-Devpi-PR-Serial': serial})
if not args.keep_index:
hub.http_api("delete", indexinfos.url)
with devpi_pr_review_data(hub) as review_data:
review_data.pop(indexinfos.indexname, None)
def list_prs_arguments(parser):
""" List pull requests.
"""
parser.add_argument(
"indexname", type=str, action="store", nargs="?",
help="index name, specified as NAME or USER/NAME. If no index "
"is specified use the current index")
parser.add_argument(
"-a", "--all-states", action="store_true",
help="Output normally hidden states.")
parser.add_argument(
"-m", "--messages", action="store_true",
help="Include state change messages in output.")
def merge_pr_data(data1, data2):
states = set(data1).union(data2)
result = {}
for state in states:
state_data = result[state] = {}
state_data1 = data1.get(state, {})
state_data2 = data2.get(state, {})
users = set(state_data1).union(state_data2)
for user in users:
user_data1 = set(
tuple(
(k, tuple(v) if isinstance(v, list) else v)
for k, v in x.items())
for x in state_data1.get(user, []))
user_data2 = set(
tuple(
(k, tuple(v) if isinstance(v, list) else v)
for k, v in x.items())
for x in state_data2.get(user, []))
state_data[user] = list(
dict(x)
for x in user_data1.union(user_data2))
return result
def get_prs(users_prs):
result = []
for user, prs in users_prs.items():
for pr in prs:
result.append(dict(pr, name="%s/%s" % (user, pr['name'])))
return sorted(result, key=itemgetter("name", "base", "last_serial"))
def create_pr_list_output(users_prs, review_data, include_messages):
out = []
prs = get_prs(users_prs)
longest_name = max(len(pr["name"]) for pr in prs)
longest_base = max(len(pr["base"]) for pr in prs)
longest_serial = max(len("%d" % pr["last_serial"]) for pr in prs)
fmt = "{0:<%d} -> {1:<%d} at serial {2:>%d}{3}" % (longest_name, longest_base, longest_serial)
for pr in prs:
if pr["name"] in review_data:
active = " (reviewing)"
else:
active = ""
out.append(fmt.format(
pr["name"], pr["base"], pr["last_serial"], active))
if not include_messages:
continue
for state, by, message in zip(pr['states'], pr['by'], pr['messages']):
out.append(" %s by %s:\n%s" % (
state, by, textwrap.indent(message, " ")))
out.append("")
return "\n".join(out)
def list_prs(hub, args):
indexname = args.indexname
current = hub.require_valid_current_with_index()
index_url = current.get_index_url(indexname, slash=False)
r = hub.http_api("get", index_url, fatal=False, type="indexconfig")
ixconfig = r.result or {}
hidden_states = set()
if not args.all_states:
hidden_states.add("approved")
pull_requests_allowed = ixconfig.get("pull_requests_allowed", False)
is_pr_index = ixconfig["type"] == "pr"
if pull_requests_allowed or is_pr_index:
list_url = index_url.asdir().joinpath("+pr-list")
r = hub.http_api("get", list_url, type="pr-list")
index_data = r.result
else:
index_data = {}
if not is_pr_index and not args.all_states:
hidden_states.add("new")
user = current.get_auth_user()
if user:
login_status = "logged in as %s" % user
else:
login_status = "not logged in"
hub.info("current devpi index: %s (%s)" % (current.index, login_status))
if user:
user_url = current.get_user_url(indexname)
list_url = user_url.asdir().joinpath("+pr-list")
r = hub.http_api("get", list_url, type="pr-list")
user_data = r.result
if is_pr_index and not args.all_states:
user_data.pop("new", None)
else:
user_data = {}
pr_data = merge_pr_data(index_data, user_data)
if not pr_data:
hub.line("no pull requests")
return
for state in sorted(pr_data):
if state in hidden_states:
continue
with devpi_pr_review_data(hub) as review_data:
out = create_pr_list_output(
pr_data[state], review_data, args.messages)
hub.line("%s pull requests" % state)
hub.line(textwrap.indent(out, " "))
def reject_pr_arguments(parser):
""" Reject pull request.
"""
parser.add_argument(
"name", type=str, action="store", nargs=1,
help="pull request name")
parser.add_argument(
"-m", "--message", action="store",
help="Message to add on reject.")
def reject_pr(hub, args):
(name,) = args.name
indexinfos = require_pr_index(hub, name)
message = get_message(hub, args.message)
hub.http_api("patch", indexinfos.url, [
"states+=rejected",
"messages+=%s" % message])
def review_pr_arguments(parser):
""" Start reviewing a submitted pull request.
"""
parser.add_argument(
"name", type=str, action="store", nargs=1,
help="pull request name")
parser.add_argument(
"-u", "--update", action="store_true",
help="Update the serial of the review.")
def review_pr(hub, args):
(name,) = args.name
indexinfos = require_pr_index(hub, name)
(targetindex,) = indexinfos.ixconfig['bases']
targeturl = hub.current.get_index_url(targetindex)
r = hub.http_api("get", targeturl.asdir().joinpath("+pr-list"), type="pr-list")
pending_prs = r.result.get("pending")
if not pending_prs:
hub.fatal("There are no pending PRs.")
users_prs = pending_prs.get(indexinfos.user)
for prs in users_prs:
if prs["name"] == indexinfos.index:
last_serial = prs["last_serial"]
break
else:
hub.fatal("Could not find PR '%s'." % indexinfos.indexname)
with devpi_pr_review_data(hub) as review_data:
if indexinfos.indexname in review_data:
if args.update:
hub.info("Updated review of '%s' to serial %s" % (
indexinfos.indexname, last_serial))
else:
hub.warn("Already reviewing '%s' at serial %s" % (
indexinfos.indexname, review_data[indexinfos.indexname]))
return
else:
hub.info(
"Started review of '%s' at serial %s" % (
indexinfos.indexname, last_serial))
review_data[indexinfos.indexname] = last_serial
def submit_pr_arguments(parser):
""" Submit an existing pull request for review.
"""
parser.add_argument(
"name", type=str, action="store", nargs=1,
help="pull request name")
parser.add_argument(
"-m", "--message", action="store",
help="Message to add on submit.")
def submit_pr(hub, args):
(name,) = args.name
indexinfos = require_pr_index(hub, name)
message = get_message(hub, args.message)
hub.http_api("patch", indexinfos.url, [
"states+=pending",
"messages+=%s" % message])
def cancel_pr_arguments(parser):
""" Cancel submitted state of pull request by submitter.
"""
parser.add_argument(
"name", type=str, action="store", nargs=1,
help="pull request name")
parser.add_argument(
"-m", "--message", action="store",
help="Message to add on cancel.")
def cancel_pr(hub, args):
(name,) = args.name
indexinfos = require_pr_index(hub, name)
message = get_message(hub, args.message)
hub.http_api("patch", indexinfos.url, [
"states+=new",
"messages+=%s" % message])
def delete_pr_arguments(parser):
""" Completely remove a pull request including any uploaded packages.
"""
parser.add_argument(
"name", type=str, action="store", nargs=1,
help="pull request name")
def delete_pr(hub, args):
(name,) = args.name
indexinfos = require_pr_index(hub, name)
hub.http_api("delete", indexinfos.url)
@client_hookimpl
def devpiclient_subcommands():
return [
(new_pr_arguments, "new-pr", "devpi_pr.client:new_pr"),
(submit_pr_arguments, "submit-pr", "devpi_pr.client:submit_pr"),
(list_prs_arguments, "list-prs", "devpi_pr.client:list_prs"),
(review_pr_arguments, "review-pr", "devpi_pr.client:review_pr"),
(abort_pr_review_arguments, "abort-pr-review", "devpi_pr.client:abort_pr_review"),
(approve_pr_arguments, "approve-pr", "devpi_pr.client:approve_pr"),
(reject_pr_arguments, "reject-pr", "devpi_pr.client:reject_pr"),
(cancel_pr_arguments, "cancel-pr", "devpi_pr.client:cancel_pr"),
(delete_pr_arguments, "delete-pr", "devpi_pr.client:delete_pr")]
|
[
"textwrap.dedent",
"tempfile.NamedTemporaryFile",
"os.mkdir",
"appdirs.user_data_dir",
"os.remove",
"json.loads",
"devpi_common.metadata.parse_requirement",
"traceback.format_exception",
"pluggy.HookimplMarker",
"attr.ib",
"os.path.exists",
"textwrap.indent",
"json.dumps",
"os.environ.get",
"subprocess.call",
"operator.itemgetter",
"os.path.join"
] |
[((326, 355), 'pluggy.HookimplMarker', 'HookimplMarker', (['"""devpiclient"""'], {}), "('devpiclient')\n", (340, 355), False, 'from pluggy import HookimplMarker\n'), ((376, 418), 'appdirs.user_data_dir', 'appdirs.user_data_dir', (['"""devpi-pr"""', '"""devpi"""'], {}), "('devpi-pr', 'devpi')\n", (397, 418), False, 'import appdirs\n'), ((684, 708), 'os.environ.get', 'os.environ.get', (['"""EDITOR"""'], {}), "('EDITOR')\n", (698, 708), False, 'import os\n'), ((1833, 1880), 'os.path.join', 'os.path.join', (['devpi_pr_data_dir', '"""reviews.lock"""'], {}), "(devpi_pr_data_dir, 'reviews.lock')\n", (1845, 1880), False, 'import os\n'), ((3297, 3314), 'attr.ib', 'attr.ib', ([], {'type': 'str'}), '(type=str)\n', (3304, 3314), False, 'import attr\n'), ((3327, 3344), 'attr.ib', 'attr.ib', ([], {'type': 'str'}), '(type=str)\n', (3334, 3344), False, 'import attr\n'), ((3361, 3378), 'attr.ib', 'attr.ib', ([], {'type': 'str'}), '(type=str)\n', (3368, 3378), False, 'import attr\n'), ((3389, 3406), 'attr.ib', 'attr.ib', ([], {'type': 'str'}), '(type=str)\n', (3396, 3406), False, 'import attr\n'), ((3422, 3440), 'attr.ib', 'attr.ib', ([], {'type': 'dict'}), '(type=dict)\n', (3429, 3440), False, 'import attr\n'), ((794, 847), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'prefix': '"""devpi-pr-"""', 'suffix': '""".txt"""'}), "(prefix='devpi-pr-', suffix='.txt')\n", (812, 847), False, 'from tempfile import NamedTemporaryFile\n'), ((1748, 1781), 'os.path.exists', 'os.path.exists', (['devpi_pr_data_dir'], {}), '(devpi_pr_data_dir)\n', (1762, 1781), False, 'import os\n'), ((1791, 1818), 'os.mkdir', 'os.mkdir', (['devpi_pr_data_dir'], {}), '(devpi_pr_data_dir)\n', (1799, 1818), False, 'import os\n'), ((2259, 2282), 'os.path.exists', 'os.path.exists', (['lock_fn'], {}), '(lock_fn)\n', (2273, 2282), False, 'import os\n'), ((2413, 2460), 'os.path.join', 'os.path.join', (['devpi_pr_data_dir', '"""reviews.json"""'], {}), "(devpi_pr_data_dir, 'reviews.json')\n", (2425, 2460), False, 'import os\n'), ((2472, 2490), 'os.path.exists', 'os.path.exists', (['fn'], {}), '(fn)\n', (2486, 2490), False, 'import os\n'), ((4775, 4801), 'devpi_common.metadata.parse_requirement', 'parse_requirement', (['pkgspec'], {}), '(pkgspec)\n', (4792, 4801), False, 'from devpi_common.metadata import parse_requirement\n'), ((1144, 1167), 'subprocess.call', 'call', (['[editor, tf.name]'], {}), '([editor, tf.name])\n', (1148, 1167), False, 'from subprocess import call\n'), ((2296, 2314), 'os.remove', 'os.remove', (['lock_fn'], {}), '(lock_fn)\n', (2305, 2314), False, 'import os\n'), ((2722, 2738), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (2732, 2738), False, 'import json\n'), ((9258, 9299), 'operator.itemgetter', 'itemgetter', (['"""name"""', '"""base"""', '"""last_serial"""'], {}), "('name', 'base', 'last_serial')\n", (9268, 9299), False, 'from operator import itemgetter\n'), ((11982, 12010), 'textwrap.indent', 'textwrap.indent', (['out', '""" """'], {}), "(out, ' ')\n", (11997, 12010), False, 'import textwrap\n'), ((872, 1082), 'textwrap.dedent', 'textwrap.dedent', (['"""\n\n # Please enter the message for your pull request.\n # Lines starting with \'#\' will be ignored.\n # An empty message aborts the current command."""'], {}), '(\n """\n\n # Please enter the message for your pull request.\n # Lines starting with \'#\' will be ignored.\n # An empty message aborts the current command."""\n )\n', (887, 1082), False, 'import textwrap\n'), ((1229, 1277), 'traceback.format_exception', 'traceback.format_exception', (['e.__class__', 'e', 'None'], {}), '(e.__class__, e, None)\n', (1255, 1277), False, 'import traceback\n'), ((10124, 10160), 'textwrap.indent', 'textwrap.indent', (['message', '""" """'], {}), "(message, ' ')\n", (10139, 10160), False, 'import textwrap\n'), ((2883, 2899), 'json.dumps', 'json.dumps', (['info'], {}), '(info)\n', (2893, 2899), False, 'import json\n')]
|
import gc
import time
import logging
import aiohttp
import asyncio
import socket
import pytest
from aiohttp.test_utils import TestServer
import aioamqp
import aioamqp.channel
import aioamqp.protocol
import aiohttp.web
import asyncpg
from docker.client import DockerClient
from docker.utils import kwargs_from_env
from async_generator import yield_, async_generator
from aioapp.app import Application
# отключаем логи ошибок, чтоб не засирать вывод
# logging.basicConfig(level=logging.CRITICAL)
logging.basicConfig(
format='%(asctime)-15s %(message)s %(filename)s %(lineno)s %(funcName)s')
aioamqp.channel.logger.level = logging.CRITICAL
aioamqp.protocol.logger.level = logging.CRITICAL
@pytest.fixture(scope='session')
def event_loop():
asyncio.set_event_loop_policy(asyncio.DefaultEventLoopPolicy())
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
gc.collect()
loop.close()
@pytest.fixture(scope='session')
def loop(event_loop):
return event_loop
def get_free_port():
sock = socket.socket()
try:
sock.bind(('', 0))
return sock.getsockname()[1]
finally:
sock.close()
@pytest.fixture(scope='session')
async def postgres(loop):
tag = 'latest'
image = 'postgres'
host = '127.0.0.1'
timeout = 60
unused_tcp_port = get_free_port()
client = DockerClient(version='auto', **kwargs_from_env())
client.images.pull(image, tag=tag)
print('Stating %s:%s on %s:%s' % (image, tag, host, unused_tcp_port))
cont = client.containers.run('%s:%s' % (image, tag), detach=True,
ports={'5432/tcp': ('0.0.0.0',
unused_tcp_port)})
try:
start_time = time.time()
conn = None
while conn is None:
if start_time + timeout < time.time():
raise Exception("Initialization timeout, failed to "
"initialize postgresql container")
try:
conn = await asyncpg.connect(
'postgresql://postgres@%s:%s/postgres'
'' % (host, unused_tcp_port),
loop=loop)
except Exception as e:
time.sleep(.1)
await conn.close()
yield (host, unused_tcp_port)
finally:
cont.kill()
cont.remove()
@pytest.fixture(scope='session')
async def rabbit(loop, rabbit_override_addr):
if rabbit_override_addr:
yield rabbit_override_addr.split(':')
return
tag = '3.7.1'
image = 'rabbitmq:{}'.format(tag)
host = '0.0.0.0'
timeout = 60
unused_tcp_port = get_free_port()
client = DockerClient(version='auto', **kwargs_from_env())
print('Stating rabbitmq %s on %s:%s' % (image, host, unused_tcp_port))
cont = client.containers.run(image, detach=True,
ports={'5672/tcp': ('0.0.0.0',
unused_tcp_port)})
try:
start_time = time.time()
conn = transport = None
while conn is None:
if start_time + timeout < time.time():
raise Exception("Initialization timeout, failed t o "
"initialize rabbitmq container")
try:
transport, conn = await aioamqp.connect(host, unused_tcp_port,
loop=loop)
except Exception:
time.sleep(.1)
await conn.close()
transport.close()
yield (host, unused_tcp_port)
finally:
cont.kill()
cont.remove()
@pytest.fixture
@async_generator
async def client(loop):
async with aiohttp.ClientSession(loop=loop) as client:
await yield_(client)
@pytest.fixture(scope='session')
def tracer_server(loop):
"""Factory to create a TestServer instance, given an app.
test_server(app, **kwargs)
"""
servers = []
async def go(**kwargs):
def tracer_handle(request):
return aiohttp.web.Response(text='', status=201)
app = aiohttp.web.Application()
app.router.add_post('/api/v2/spans', tracer_handle)
server = TestServer(app, port=None)
await server.start_server(loop=loop, **kwargs)
servers.append(server)
return server
yield go
async def finalize():
while servers:
await servers.pop().close()
loop.run_until_complete(finalize())
@pytest.fixture
async def app(tracer_server, loop):
tracer_host = '127.0.0.1'
tracer_port = (await tracer_server()).port
tracer_addr = 'http://%s:%s/' % (tracer_host, tracer_port)
app = Application(loop=loop)
app.setup_logging(tracer_driver='zipkin', tracer_addr=tracer_addr,
tracer_name='test')
yield app
await app.run_shutdown()
|
[
"aiohttp.web.Response",
"aioapp.app.Application",
"asyncpg.connect",
"logging.basicConfig",
"asyncio.DefaultEventLoopPolicy",
"socket.socket",
"pytest.fixture",
"aiohttp.test_utils.TestServer",
"asyncio.get_event_loop_policy",
"time.time",
"aiohttp.ClientSession",
"gc.collect",
"docker.utils.kwargs_from_env",
"time.sleep",
"aioamqp.connect",
"aiohttp.web.Application",
"async_generator.yield_"
] |
[((495, 593), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)-15s %(message)s %(filename)s %(lineno)s %(funcName)s"""'}), "(format=\n '%(asctime)-15s %(message)s %(filename)s %(lineno)s %(funcName)s')\n", (514, 593), False, 'import logging\n'), ((694, 725), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (708, 725), False, 'import pytest\n'), ((924, 955), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (938, 955), False, 'import pytest\n'), ((1160, 1191), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (1174, 1191), False, 'import pytest\n'), ((2392, 2423), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (2406, 2423), False, 'import pytest\n'), ((3833, 3864), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (3847, 3864), False, 'import pytest\n'), ((891, 903), 'gc.collect', 'gc.collect', ([], {}), '()\n', (901, 903), False, 'import gc\n'), ((1034, 1049), 'socket.socket', 'socket.socket', ([], {}), '()\n', (1047, 1049), False, 'import socket\n'), ((4737, 4759), 'aioapp.app.Application', 'Application', ([], {'loop': 'loop'}), '(loop=loop)\n', (4748, 4759), False, 'from aioapp.app import Application\n'), ((778, 810), 'asyncio.DefaultEventLoopPolicy', 'asyncio.DefaultEventLoopPolicy', ([], {}), '()\n', (808, 810), False, 'import asyncio\n'), ((1753, 1764), 'time.time', 'time.time', ([], {}), '()\n', (1762, 1764), False, 'import time\n'), ((3052, 3063), 'time.time', 'time.time', ([], {}), '()\n', (3061, 3063), False, 'import time\n'), ((3757, 3789), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'loop': 'loop'}), '(loop=loop)\n', (3778, 3789), False, 'import aiohttp\n'), ((4149, 4174), 'aiohttp.web.Application', 'aiohttp.web.Application', ([], {}), '()\n', (4172, 4174), False, 'import aiohttp\n'), ((4252, 4278), 'aiohttp.test_utils.TestServer', 'TestServer', (['app'], {'port': 'None'}), '(app, port=None)\n', (4262, 4278), False, 'from aiohttp.test_utils import TestServer\n'), ((823, 854), 'asyncio.get_event_loop_policy', 'asyncio.get_event_loop_policy', ([], {}), '()\n', (852, 854), False, 'import asyncio\n'), ((1385, 1402), 'docker.utils.kwargs_from_env', 'kwargs_from_env', ([], {}), '()\n', (1400, 1402), False, 'from docker.utils import kwargs_from_env\n'), ((2739, 2756), 'docker.utils.kwargs_from_env', 'kwargs_from_env', ([], {}), '()\n', (2754, 2756), False, 'from docker.utils import kwargs_from_env\n'), ((3815, 3829), 'async_generator.yield_', 'yield_', (['client'], {}), '(client)\n', (3821, 3829), False, 'from async_generator import yield_, async_generator\n'), ((4092, 4133), 'aiohttp.web.Response', 'aiohttp.web.Response', ([], {'text': '""""""', 'status': '(201)'}), "(text='', status=201)\n", (4112, 4133), False, 'import aiohttp\n'), ((1851, 1862), 'time.time', 'time.time', ([], {}), '()\n', (1860, 1862), False, 'import time\n'), ((3162, 3173), 'time.time', 'time.time', ([], {}), '()\n', (3171, 3173), False, 'import time\n'), ((2046, 2142), 'asyncpg.connect', 'asyncpg.connect', (["('postgresql://postgres@%s:%s/postgres' % (host, unused_tcp_port))"], {'loop': 'loop'}), "('postgresql://postgres@%s:%s/postgres' % (host,\n unused_tcp_port), loop=loop)\n", (2061, 2142), False, 'import asyncpg\n'), ((2254, 2269), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (2264, 2269), False, 'import time\n'), ((3369, 3418), 'aioamqp.connect', 'aioamqp.connect', (['host', 'unused_tcp_port'], {'loop': 'loop'}), '(host, unused_tcp_port, loop=loop)\n', (3384, 3418), False, 'import aioamqp\n'), ((3521, 3536), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (3531, 3536), False, 'import time\n')]
|
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.shortcuts import render
from django.views.decorators.http import require_POST
from django.views.defaults import page_not_found, server_error
import basket
import commonware
from commonware.response.decorators import xframe_allow
from django_browserid.views import Verify
from tower import ugettext as _
from affiliates.base.forms import NewsletterSubscriptionForm
from affiliates.base.http import JSONResponse
from affiliates.base.utils import redirect
from affiliates.facebook.utils import in_facebook_app
from affiliates.links.models import Link
log = commonware.log.getLogger('a.facebook')
def home(request):
if request.user.is_authenticated():
return redirect('base.dashboard')
else:
return render(request, 'base/home.html', {
'affiliate_count': User.objects.count(),
'link_count': Link.objects.count(),
'click_count': Link.objects.total_link_clicks(),
})
def about(request):
return render(request, 'base/about.html')
def terms(request):
return render(request, 'base/terms.html')
@login_required
def dashboard(request):
# Replace request.user and prefetch related items that we need.
request.user = (User.objects
.prefetch_related('link_set__datapoint_set',
'link_set__banner_variation')
.get(pk=request.user.pk))
# Sort links in python to use prefetched data
links = sorted(request.user.link_set.all(), lambda x, y: cmp(x.created, y.created))
return render(request, 'base/dashboard.html', {
'links': links,
})
@require_POST
def newsletter_subscribe(request):
form = NewsletterSubscriptionForm(request.POST)
if form.is_valid():
data = form.cleaned_data
try:
basket.subscribe(data['email'], 'affiliates',
format=data['format'], country=data['country'],
source_url=request.build_absolute_uri())
except basket.BasketException as e:
log.error('Error subscribing email {0} to mailing list: {1}'.format(data['email'], e))
return JSONResponse({'error': 'basket_error'}, status=500)
return JSONResponse({'success': 'success'})
@xframe_allow
def handler404(request):
if in_facebook_app(request):
return render(request, 'facebook/error.html', status=404)
else:
return page_not_found(request)
@xframe_allow
def handler500(request):
if in_facebook_app(request):
return render(request, 'facebook/error.html', status=500)
else:
return server_error(request)
def strings(request):
return render(request, 'base/strings.html')
class BrowserIDVerify(Verify):
def login_failure(self, msg=None):
if not msg:
msg = _('Login failed. Firefox Affiliates has stopped accepting new users.')
messages.error(self.request, msg)
return JSONResponse({'redirect': self.failure_url})
|
[
"affiliates.base.forms.NewsletterSubscriptionForm",
"django.contrib.auth.models.User.objects.prefetch_related",
"affiliates.base.http.JSONResponse",
"django.contrib.messages.error",
"tower.ugettext",
"django.views.defaults.server_error",
"commonware.log.getLogger",
"affiliates.base.utils.redirect",
"affiliates.links.models.Link.objects.count",
"django.shortcuts.render",
"affiliates.facebook.utils.in_facebook_app",
"django.views.defaults.page_not_found",
"django.contrib.auth.models.User.objects.count",
"affiliates.links.models.Link.objects.total_link_clicks"
] |
[((708, 746), 'commonware.log.getLogger', 'commonware.log.getLogger', (['"""a.facebook"""'], {}), "('a.facebook')\n", (732, 746), False, 'import commonware\n'), ((1116, 1150), 'django.shortcuts.render', 'render', (['request', '"""base/about.html"""'], {}), "(request, 'base/about.html')\n", (1122, 1150), False, 'from django.shortcuts import render\n'), ((1183, 1217), 'django.shortcuts.render', 'render', (['request', '"""base/terms.html"""'], {}), "(request, 'base/terms.html')\n", (1189, 1217), False, 'from django.shortcuts import render\n'), ((1690, 1746), 'django.shortcuts.render', 'render', (['request', '"""base/dashboard.html"""', "{'links': links}"], {}), "(request, 'base/dashboard.html', {'links': links})\n", (1696, 1746), False, 'from django.shortcuts import render\n'), ((1824, 1864), 'affiliates.base.forms.NewsletterSubscriptionForm', 'NewsletterSubscriptionForm', (['request.POST'], {}), '(request.POST)\n', (1850, 1864), False, 'from affiliates.base.forms import NewsletterSubscriptionForm\n'), ((2366, 2402), 'affiliates.base.http.JSONResponse', 'JSONResponse', (["{'success': 'success'}"], {}), "({'success': 'success'})\n", (2378, 2402), False, 'from affiliates.base.http import JSONResponse\n'), ((2451, 2475), 'affiliates.facebook.utils.in_facebook_app', 'in_facebook_app', (['request'], {}), '(request)\n', (2466, 2475), False, 'from affiliates.facebook.utils import in_facebook_app\n'), ((2640, 2664), 'affiliates.facebook.utils.in_facebook_app', 'in_facebook_app', (['request'], {}), '(request)\n', (2655, 2664), False, 'from affiliates.facebook.utils import in_facebook_app\n'), ((2814, 2850), 'django.shortcuts.render', 'render', (['request', '"""base/strings.html"""'], {}), "(request, 'base/strings.html')\n", (2820, 2850), False, 'from django.shortcuts import render\n'), ((823, 849), 'affiliates.base.utils.redirect', 'redirect', (['"""base.dashboard"""'], {}), "('base.dashboard')\n", (831, 849), False, 'from affiliates.base.utils import redirect\n'), ((2492, 2542), 'django.shortcuts.render', 'render', (['request', '"""facebook/error.html"""'], {'status': '(404)'}), "(request, 'facebook/error.html', status=404)\n", (2498, 2542), False, 'from django.shortcuts import render\n'), ((2568, 2591), 'django.views.defaults.page_not_found', 'page_not_found', (['request'], {}), '(request)\n', (2582, 2591), False, 'from django.views.defaults import page_not_found, server_error\n'), ((2681, 2731), 'django.shortcuts.render', 'render', (['request', '"""facebook/error.html"""'], {'status': '(500)'}), "(request, 'facebook/error.html', status=500)\n", (2687, 2731), False, 'from django.shortcuts import render\n'), ((2757, 2778), 'django.views.defaults.server_error', 'server_error', (['request'], {}), '(request)\n', (2769, 2778), False, 'from django.views.defaults import page_not_found, server_error\n'), ((3040, 3073), 'django.contrib.messages.error', 'messages.error', (['self.request', 'msg'], {}), '(self.request, msg)\n', (3054, 3073), False, 'from django.contrib import messages\n'), ((3089, 3133), 'affiliates.base.http.JSONResponse', 'JSONResponse', (["{'redirect': self.failure_url}"], {}), "({'redirect': self.failure_url})\n", (3101, 3133), False, 'from affiliates.base.http import JSONResponse\n'), ((1347, 1437), 'django.contrib.auth.models.User.objects.prefetch_related', 'User.objects.prefetch_related', (['"""link_set__datapoint_set"""', '"""link_set__banner_variation"""'], {}), "('link_set__datapoint_set',\n 'link_set__banner_variation')\n", (1376, 1437), False, 'from django.contrib.auth.models import User\n'), ((2961, 3031), 'tower.ugettext', '_', (['"""Login failed. Firefox Affiliates has stopped accepting new users."""'], {}), "('Login failed. Firefox Affiliates has stopped accepting new users.')\n", (2962, 3031), True, 'from tower import ugettext as _\n'), ((942, 962), 'django.contrib.auth.models.User.objects.count', 'User.objects.count', ([], {}), '()\n', (960, 962), False, 'from django.contrib.auth.models import User\n'), ((990, 1010), 'affiliates.links.models.Link.objects.count', 'Link.objects.count', ([], {}), '()\n', (1008, 1010), False, 'from affiliates.links.models import Link\n'), ((1039, 1071), 'affiliates.links.models.Link.objects.total_link_clicks', 'Link.objects.total_link_clicks', ([], {}), '()\n', (1069, 1071), False, 'from affiliates.links.models import Link\n'), ((2302, 2353), 'affiliates.base.http.JSONResponse', 'JSONResponse', (["{'error': 'basket_error'}"], {'status': '(500)'}), "({'error': 'basket_error'}, status=500)\n", (2314, 2353), False, 'from affiliates.base.http import JSONResponse\n')]
|
from os import path
from rinoh.font import Typeface
from rinoh.font.style import REGULAR, BOLD, ITALIC, CONDENSED
from rinoh.font.opentype import OpenTypeFont
__all__ = ['typeface']
def otf(style, variant=''):
filename = 'texgyreheros{}-{}.otf'.format(variant, style)
return path.join(path.dirname(__file__), filename)
typeface = Typeface('TeX Gyre Heros',
OpenTypeFont(otf('regular'), weight=REGULAR),
OpenTypeFont(otf('italic'), weight=REGULAR, slant=ITALIC),
OpenTypeFont(otf('bold'), weight=BOLD),
OpenTypeFont(otf('bolditalic'), weight=BOLD, slant=ITALIC),
OpenTypeFont(otf('regular', 'cn'),
width=CONDENSED, weight=REGULAR),
OpenTypeFont(otf('italic', 'cn'),
width=CONDENSED, weight=REGULAR, slant=ITALIC),
OpenTypeFont(otf('bold', 'cn'),
width=CONDENSED, weight=BOLD),
OpenTypeFont(otf('bolditalic', 'cn'),
width=CONDENSED, weight=BOLD, slant=ITALIC))
|
[
"os.path.dirname"
] |
[((298, 320), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (310, 320), False, 'from os import path\n')]
|