code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
"""
A subclass of tkinter.PhotoImage that connects a
vtkImageData to a photo widget.
Created by <NAME>, August 2002
"""
from __future__ import absolute_import
import sys
if sys.hexversion < 0x03000000:
# for Python2
import Tkinter as tkinter
else:
# for Python3
import tkinter
from .vtkLoadPythonTkWidgets import vtkLoadPythonTkWidgets
class vtkTkPhotoImage ( tkinter.PhotoImage ):
"""
A subclass of PhotoImage with helper functions
for displaying vtkImageData
"""
def __init__ ( self, **kw ):
# Caller the superclass
tkinter.PhotoImage.__init__ ( self, kw )
vtkLoadPythonTkWidgets ( self.tk )
def PutImageSlice ( self, image, z, orientation='transverse', window=256, level=128 ):
t = str ( image.__this__ )
s = 'vtkImageDataToTkPhoto %s %s %d %s %d %d' % ( t, self.name, z, orientation, window, level )
self.tk.eval ( s )
|
[
"tkinter.PhotoImage.__init__"
] |
[((601, 638), 'tkinter.PhotoImage.__init__', 'tkinter.PhotoImage.__init__', (['self', 'kw'], {}), '(self, kw)\n', (628, 638), False, 'import tkinter\n')]
|
# python: 3.6
# encoding: utf-8
import torch
import torch.nn as nn
from fastNLP.modules.utils import initial_parameter
# import torch.nn.functional as F
class Conv(nn.Module):
"""Basic 1-d convolution module, initialized with xavier_uniform.
:param int in_channels:
:param int out_channels:
:param tuple kernel_size:
:param int stride:
:param int padding:
:param int dilation:
:param int groups:
:param bool bias:
:param str activation:
:param str initial_method:
"""
def __init__(self, in_channels, out_channels, kernel_size,
stride=1, padding=0, dilation=1,
groups=1, bias=True, activation='relu', initial_method=None):
super(Conv, self).__init__()
self.conv = nn.Conv1d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias)
# xavier_uniform_(self.conv.weight)
activations = {
'relu': nn.ReLU(),
'tanh': nn.Tanh()}
if activation in activations:
self.activation = activations[activation]
else:
raise Exception(
'Should choose activation function from: ' +
', '.join([x for x in activations]))
initial_parameter(self, initial_method)
def forward(self, x):
x = torch.transpose(x, 1, 2) # [N,L,C] -> [N,C,L]
x = self.conv(x) # [N,C_in,L] -> [N,C_out,L]
x = self.activation(x)
x = torch.transpose(x, 1, 2) # [N,C,L] -> [N,L,C]
return x
|
[
"torch.nn.ReLU",
"torch.nn.Tanh",
"torch.nn.Conv1d",
"fastNLP.modules.utils.initial_parameter",
"torch.transpose"
] |
[((772, 945), 'torch.nn.Conv1d', 'nn.Conv1d', ([], {'in_channels': 'in_channels', 'out_channels': 'out_channels', 'kernel_size': 'kernel_size', 'stride': 'stride', 'padding': 'padding', 'dilation': 'dilation', 'groups': 'groups', 'bias': 'bias'}), '(in_channels=in_channels, out_channels=out_channels, kernel_size=\n kernel_size, stride=stride, padding=padding, dilation=dilation, groups=\n groups, bias=bias)\n', (781, 945), True, 'import torch.nn as nn\n'), ((1421, 1460), 'fastNLP.modules.utils.initial_parameter', 'initial_parameter', (['self', 'initial_method'], {}), '(self, initial_method)\n', (1438, 1460), False, 'from fastNLP.modules.utils import initial_parameter\n'), ((1500, 1524), 'torch.transpose', 'torch.transpose', (['x', '(1)', '(2)'], {}), '(x, 1, 2)\n', (1515, 1524), False, 'import torch\n'), ((1644, 1668), 'torch.transpose', 'torch.transpose', (['x', '(1)', '(2)'], {}), '(x, 1, 2)\n', (1659, 1668), False, 'import torch\n'), ((1122, 1131), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1129, 1131), True, 'import torch.nn as nn\n'), ((1153, 1162), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (1160, 1162), True, 'import torch.nn as nn\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
assemble.py
This module finds and forms essential structure components, which are the
smallest building blocks that form every repeat in the song.
These functions ensure that each time step of a song is contained in at most
one of the song's essential structure components by checking that there are no
overlapping repeats in time. When repeats overlap, they undergo a process
where they are divided until there are only non-overlapping pieces left.
The module contains the following functions:
* breakup_overlaps_by_intersect
Extracts repeats in input_pattern_obj that has the starting indices
of the repeats, into the essential structure components using bw_vec,
that has the lengths of each repeat.
* check_overlaps
Compares every pair of groups, determining if there are any repeats
in any pairs of the groups that overlap.
* __compare_and_cut
Compares two rows of repeats labeled RED and BLUE, and determines if
there are any overlaps in time between them. If there are overlaps,
we cut the repeats in RED and BLUE into up to 3 pieces.
* __num_of_parts
Determines the number of blocks of consecutive time steps in a list
of time steps. A block of consecutive time steps represents a
distilled section of a repeat.
* __inds_to_rows
Expands a vector containing the starting indices of a piece or two
of a repeat into a matrix representation recording when these pieces
occur in the song with 1's. All remaining entries are marked with
0's.
* __merge_based_on_length
Merges repeats that are the same length, as set by full_bandwidth,
and are repeats of the same piece of structure.
* __merge_rows
Merges rows that have at least one common repeat. These common
repeat(s) must occur at the same time step and be of a common length.
* hierarchical_structure
Distills the repeats encoded in matrix_no_overlaps (and key_no_overlaps)
to the essential structure components and then builds the hierarchical
representation. Optionally outputs visualizations of the hierarchical
representations.
"""
import numpy as np
from inspect import signature
from .search import find_all_repeats, find_complete_list_anno_only
from .utilities import reconstruct_full_block, get_annotation_lst, get_y_labels
from .transform import remove_overlaps
import matplotlib.pyplot as plt
import matplotlib.ticker as plticker
def breakup_overlaps_by_intersect(input_pattern_obj, bw_vec, thresh_bw):
"""
Extracts repeats in input_pattern_obj that has the starting indices of the
repeats, into the essential structure components using bw_vec, that has the
lengths of each repeat. The essential structure components are the
smallest building blocks that form every repeat in the song.
Args
----
input_pattern_obj : np.ndarray
Binary matrix with 1's where repeats begin
and 0's otherwise.
bw_vec : np.ndarray
Vector containing the lengths of the repeats
encoded in input_pattern_obj.
thresh_bw : int
Smallest allowable repeat length.
Returns
-------
pattern_no_overlaps : np.ndrray
Binary matrix with 1's where repeats of
essential structure components begin.
pattern_no_overlaps_key : np.ndarray
Vector containing the lengths of the repeats
of essential structure components in
pattern_no_overlaps.
"""
sig = signature(breakup_overlaps_by_intersect)
params = sig.parameters
if len(params) < 3:
T = 0
else:
T = thresh_bw
if bw_vec.ndim == 1:
# Convert a 1D array into 2D vector
bw_vec = bw_vec[None, :].reshape(-1, 1)
# Initialize input_pattern_obj
pno = input_pattern_obj
# Sort bw_vec and pattern_no_overlaps (pno) so that we process the
# biggest pieces first
# Part 1: Sort the lengths in bw_vec in descending order
desc_bw_vec = np.sort(bw_vec)[::-1] # [::-1] reverses order
# Part 2: Sort the indices of bw_vec in descending order
bw_inds = np.flip(np.argsort(bw_vec, axis=0))
row_bw_inds = np.transpose(bw_inds).flatten()
pno = pno[row_bw_inds, :]
T_inds = np.nonzero(bw_vec == T)
T_inds = np.array(T_inds) - 1
if T_inds.size == 0:
T_inds = max(bw_vec.shape)
pno_block = reconstruct_full_block(pno, desc_bw_vec)
# Check stopping condition -- Are there overlaps?
while np.sum(np.sum(pno_block[:T_inds, :], axis=0) > 1) > 0:
# Find all overlaps by comparing the rows of repeats pairwise
overlaps_pno_block = check_overlaps(pno_block)
# Remove the rows with bandwidth T or less from consideration
overlaps_pno_block[T_inds:, ] = 0
overlaps_pno_block[:, T_inds:] = 0
# Find the first two groups of repeats that overlap, calling one group
# RED and the other group BLUE
[ri, bi] = overlaps_pno_block.nonzero()
ri = ri[0]
bi = bi[0]
# RED overlap
red = pno[ri, :]
RL = desc_bw_vec[ri, :]
# BLUE overlap
blue = pno[bi, :]
BL = desc_bw_vec[bi, :]
# Compare the repeats in RED and BLUE, cutting the repeats in those
# groups into non-overlapping pieces
union_mat, union_length = __compare_and_cut(red, RL, blue, BL)
pno = np.delete(pno, [ri, bi], axis=0)
bw_vec = np.delete(desc_bw_vec, [ri, bi], axis=0)
# Stack the new repeats
if union_mat.size != 0:
pno = np.vstack((pno, union_mat))
bw_vec = np.vstack((bw_vec, union_length))
# Check there are any repeats of length 1 that should be merged into
# other groups of repeats of length 1 and merge them if necessary
if sum(union_length == 1) > 0:
pno, bw_vec = __merge_based_on_length(pno, bw_vec, 1)
# AGAIN, Sort bw_vec and pno so that we process the biggest
# pieces first
# Part 1: Sort the lengths in bw_vec and indices in descending order
desc_bw_vec = np.sort(bw_vec, axis=0)[::-1]
bw_inds = np.flip(np.argsort(bw_vec, axis=0))
row_bw_inds = np.transpose(bw_inds).flatten()
pno = pno[row_bw_inds, :]
# Find the first row that contains repeats of length less than T and
# remove these rows from consideration during the next check of the
# stopping condition
T_inds = np.amin(desc_bw_vec == T) - 1
if T_inds < 0:
T_inds = np.array([])
else:
T_inds = np.array(T_inds) # T_inds is converted into an array
if T_inds.size == 0:
T_inds = max(desc_bw_vec.shape)
pno_block = reconstruct_full_block(pno, desc_bw_vec)
# Sort the lengths in bw_vec in ascending order
bw_vec = np.sort(desc_bw_vec, axis=0)
# Sort the indices of bw_vec in ascending order
bw_inds = np.argsort(desc_bw_vec, axis=0)
pattern_no_overlaps = pno[bw_inds, :].reshape((pno.shape[0], -1))
pattern_no_overlaps_key = bw_vec
output = (pattern_no_overlaps, pattern_no_overlaps_key)
return output
def check_overlaps(input_mat):
"""
Compares every pair of groups and determines if there are any repeats in
any pairs of the groups that overlap.
Args
----
input_mat : np.array[int]
Matrix to be checked for overlaps.
Returns
-------
overlaps_yn : np.array[bool]
Logical array where (i,j) = 1 if row i of input matrix and row j
of input matrix overlap and (i,j) = 0 elsewhere.
"""
# Get the number of rows and columns
rs = input_mat.shape[0]
ws = input_mat.shape[1]
# compare_left -- Every row of input_mat is repeated rs times to create
# a sub-matrix. We stack these sub-matrices on top of each other.
compare_left = np.zeros(((rs * rs), ws))
for i in range(rs):
compare_add = input_mat[i, :]
compare_add_mat = np.tile(compare_add, (rs, 1))
a = i * rs
b = (i + 1) * rs
compare_left[a:b, :] = compare_add_mat
# compare_right -- Stack rs copies of input_mat on top of itself
compare_right = np.tile(input_mat, (rs, 1))
# If input_mat is not binary, create binary temporary objects
compare_left = compare_left > 0
compare_right = compare_right > 0
# Empty matrix to store overlaps
compare_all = np.zeros((compare_left.shape[0], 1))
# For each row
for i in range(compare_left.shape[0]):
# Create new counter
num_overlaps = 0
for j in range(compare_left.shape[1]):
if compare_left[i, j] == 1 and compare_right[i, j] == 1:
# inc count
num_overlaps = num_overlaps + 1
# Append num_overlaps to matrix
compare_all[i, 0] = num_overlaps
compare_all = compare_all > 0
overlap_mat = np.reshape(compare_all, (rs, rs))
# If overlap_mat is symmetric, only keep the upper-triangular portion.
# If not, keep all of overlap_mat.
check_mat = np.allclose(overlap_mat, overlap_mat.T)
if check_mat:
overlap_mat = np.triu(overlap_mat, 1)
overlaps_yn = overlap_mat
return overlaps_yn
def __compare_and_cut(red, red_len, blue, blue_len):
"""
Compares two rows of repeats labeled RED and BLUE, and determines if there
are any overlaps in time between them. If there is, then we cut the
repeats in RED and BLUE into up to 3 pieces.
Args
----
red : np.ndarray
Binary row vector encoding a set of repeats with 1's where each
repeat starts and 0's otherwise.
red_len : int
Length of repeats encoded in red.
blue : np.ndarray
Binary row vector encoding a set of repeats with 1's where each
repeat starts and 0's otherwise.
blue_len : int
Length of repeats encoded in blue.
Returns
-------
union_mat : np.ndarray
Binary matrix representation of up to three rows encoding
non-overlapping repeats cut from red and blue.
union_length : np.ndarray
Vector containing the lengths of the repeats encoded in union_mat.
"""
# Find the total time steps in red
sn = red.shape[0]
assert sn == blue.shape[0]
# Find all starting indices in red and store them as a 2d array
start_red = np.flatnonzero(red)
start_red = start_red[None, :]
# Find all starting indices in blue and store them as a 2d array
start_blue = np.flatnonzero(blue)
start_blue = start_blue[None, :]
# Determine if the rows have any intersections
red_block = reconstruct_full_block(red, red_len)
blue_block = reconstruct_full_block(blue, blue_len)
# Find the intersection of red and blue
red_block = red_block > 0
blue_block = blue_block > 0
purple_block = np.logical_and(red_block, blue_block)
# If there is any intersection between the rows, then start comparing one
# repeat in red to one repeat in blue
if purple_block.sum() > 0:
# Find the number of blocks in red and in blue
lsr = max(start_red.shape)
lsb = max(start_blue.shape)
# Build the pairs of starting indices to search, where each pair
# contains a starting index in red and a starting index in blue
red_inds = np.tile(start_red.transpose(), (lsb, 1))
blue_inds = np.tile(start_blue, (lsr, 1))
tem_blue = blue_inds[0][0]
for i in range(0, blue_inds.shape[1]):
for j in range(0, blue_inds.shape[0]):
tem_blue = np.vstack((tem_blue, blue_inds[j][i]))
tem_blue = np.delete(tem_blue, 1, 0)
compare_inds = np.concatenate((tem_blue, red_inds), axis=1)
# Initialize the output variables union_mat and union_length
union_mat = np.array([])
union_length = np.array([])
# Loop over all pairs of starting indices
for start_ind in range(0, lsr * lsb):
# Isolate one repeat in red and one repeat in blue
ri = compare_inds[start_ind, 1]
bi = compare_inds[start_ind, 0]
red_ri = np.arange(ri, ri + red_len)
blue_bi = np.arange(bi, bi + blue_len)
# Determine if the blocks intersect and call the intersection
# purple
purple = np.intersect1d(red_ri, blue_bi)
if purple.size != 0:
# Remove purple from red_ri, call it red_minus_purple
red_minus_purple = np.setdiff1d(red_ri, purple)
# If red_minus_purple is not empty, then see if there are one
# or two parts in red_minus_purple.
# Then cut purple out of all of the repeats in red.
if red_minus_purple.size != 0:
# red_length_vec will have the length(s) of the parts in
# new_red
red_start_mat, red_length_vec = __num_of_parts(
red_minus_purple, ri, start_red
)
# If there are two parts left in red_minus_purple, then
# the new variable new_red, which holds the part(s) of
# red_minus_purple, should have two rows with 1's for the
# starting indices of the resulting pieces and 0's
# elsewhere.
new_red = __inds_to_rows(red_start_mat, sn)
else:
# If red_minus_purple is empty, then set new_red and
# red_length_vec to empty
new_red = np.array([])
red_length_vec = np.array([])
# Noting that purple is only one part and in both red_ri and
# blue_bi, then we need to find where the purple starting
# indices are in all the red_ri
purple_in_red_mat, purple_length_vec = __num_of_parts(
purple, ri, start_red
)
blue_minus_purple = np.setdiff1d(blue_bi, purple)
# If blue_minus_purple is not empty, then see if there are one
# or two parts in blue_minus_purple. Then cut purple out of
# all of the repeats in blue.
if blue_minus_purple.size != 0:
blue_start_mat, blue_length_vec = __num_of_parts(
blue_minus_purple, bi, start_blue
)
new_blue = __inds_to_rows(blue_start_mat, sn)
# If there are two parts left in blue_minus_purple, then the
# new variable new_blue, which holds the part(s) of
# blue_minus_purple, should have two rows with 1's for the
# starting indices of the resulting pieces and 0's elsewhere.
else:
# If blue_minus_purple is empty, then set new_blue and
# blue_length_vec to empty
new_blue = np.array([])
# Also blue_length_vec will have the length(s) of the
# parts in new_blue.
blue_length_vec = np.array([])
# Recalling that purple is only one part and in both red_rd
# and blue_bi, then we need to find where the purple starting
# indices are in all the blue_ri
purple_in_blue_mat, purple_length = __num_of_parts(
purple, bi, start_blue
)
# Union purple_in_red_mat and purple_in_blue_mat to get
# purple_start, which stores all the purple indices
purple_start = np.union1d(purple_in_red_mat[0],
purple_in_blue_mat[0])
# Use purple_start to get new_purple with 1's where the repeats
# in the purple rows start and 0 otherwise.
new_purple = __inds_to_rows(purple_start, sn)
if new_red.size != 0 or new_blue.size != 0:
# Form the outputs
# Use the condition check to avoid errors when stacking
# an empty array
if new_red.size != 0 and new_blue.size == 0:
union_mat = np.vstack((new_red, new_purple))
union_length = np.vstack((red_length_vec,
purple_length))
elif new_red.size == 0 and new_blue.size != 0:
union_mat = np.vstack((new_blue, new_purple))
union_length = np.vstack((blue_length_vec,
purple_length))
else:
union_mat = np.vstack((new_red, new_blue, new_purple))
union_length = np.vstack(
(red_length_vec, blue_length_vec, purple_length)
)
# Merge repeats that are the same length
union_mat, union_length = __merge_based_on_length(
union_mat, union_length, union_length
)
# When we find union_mat and union_length in this group,
# we break out of the for loop to add them to our final
# output
break
elif new_red.size == 0 and new_blue.size == 0:
new_purple_block = reconstruct_full_block(
new_purple, np.array([purple_length])
)
# Only add the new repeat which has no overlaps
if max(new_purple_block[0]) < 2:
union_mat = new_purple
union_length = np.array([purple_length])
break
# Check that there are no overlaps in each row of union_mat
union_mat_add = np.empty((0, sn), int)
union_mat_add_length = np.empty((0, 1), int)
union_mat_rminds = np.empty((0, 1), int)
# Isolate one row at a time, call it union_row
for i in range(0, union_mat.shape[0]):
union_row = union_mat[i, :]
union_row_width = np.array([union_length[i]])
union_row_block = reconstruct_full_block(union_row, union_row_width)
# If there is at least one overlap, then compare and cut that row
# until there are no overlaps
if (np.sum(union_row_block[0] > 1)) > 0:
union_mat_rminds = np.vstack((union_mat_rminds, i))
union_row_new, union_row_new_length = __compare_and_cut(
union_row, union_row_width, union_row, union_row_width
)
# Add union_row_new and union_row_new_length to union_mat_add and
# union_mat_add_length, respectively
union_mat_add = np.vstack((union_mat_add, union_row_new))
union_mat_add_length = np.vstack(
(union_mat_add_length, union_row_new_length)
)
# Remove the old rows from union_mat (as well as the old lengths from
# union_length)
if union_mat_rminds.size != 0:
union_mat = np.delete(union_mat, union_mat_rminds, axis=0)
union_length = np.delete(union_length, union_mat_rminds)
# Add union_row_new and union_row_new_length to union_mat and
# union_length, respectively, such that union_mat is in order by
# lengths in union_length
if union_mat_add.size != 0:
union_mat = np.vstack((union_mat, union_mat_add))
if union_mat_add_length.size != 0:
union_length = np.vstack((np.array([union_length]).T,
union_mat_add_length))
# Make sure union_length is a 2d vector
if union_length.ndim == 1:
union_length = np.array([union_length]).T
if union_mat.size != 0:
total_array = np.hstack((union_mat, union_length))
# Sort the total_array and form the final output
total_array = total_array[np.argsort(total_array[:, -1])]
union_mat = total_array[:, 0:sn]
union_length = np.array([total_array[:, -1]]).T
output = (union_mat, union_length)
return output
def __num_of_parts(input_vec, input_start, input_all_starts):
"""
Determines the number of blocks of consecutive
time steps in a list of time steps. A block of consecutive time steps
represents a distilled section of a repeat. This distilled section will be
replicated and the starting indices of the repeats within it will be
returned.
Args
----
input_vec : np.ndarray
Vector that contains one or two parts of a repeat that are
overlap(s) in time that may need to be replicated
input_start : np.ndarray
Starting index for the part to be replicated.
input_all_starts : np.ndarray
Starting indices for replication.
Returns
-------
start_mat : np.ndarray
Array of one or two rows, containing the starting indices of the
replicated repeats.
length_vec : np.ndarray
Column vector containing the lengths of the replicated parts.
"""
# Determine where input_vec has a break
diff_vec = np.subtract(input_vec[1:], input_vec[:-1])
diff_vec = np.insert(diff_vec, 0, 1)
break_mark = np.where(diff_vec > 1)[0]
# If input_vec is consecutive
if sum(break_mark) == 0:
# Initialize start_vec and end_vec
start_vec = input_vec[0]
end_vec = input_vec[-1]
# Find the difference between the starts
add_vec = start_vec - input_start
# Find the new start of the distilled section
start_mat = input_all_starts + add_vec
# Else if input_vec has a break
else:
# Initialize start_vec and end_vec
start_vec = np.zeros((2, 1))
end_vec = np.zeros((2, 1))
# Find the start and end time step of the first part
start_vec[0] = input_vec[0]
end_vec[0] = input_vec[break_mark - 1]
# Find the start and end time step of the second part
start_vec[1] = input_vec[break_mark]
end_vec[1] = input_vec[-1]
# Find the difference between the starts
add_vec = np.array(start_vec - input_start).astype(int)
# Make sure input_all_starts contains only integers
input_all_starts = np.array(input_all_starts).astype(int)
# Create start_mat with two parts
start_mat = np.vstack(
(input_all_starts + add_vec[0], input_all_starts + add_vec[1])
)
# Get the length of the new repeats
length_vec = (end_vec - start_vec + 1).astype(int)
# Create output
output = (start_mat, length_vec)
return output
def __inds_to_rows(start_mat, row_length):
"""
Expands a vector containing the starting indices of a piece or two of a
repeat into a matrix representation recording when these pieces occur in
the song with 1's. All remaining entries are marked with 0's.
Args
----
start_mat : np.ndarray
Matrix of one or two rows, containing the starting indices.
row_length : int
Length of the rows.
Returns
-------
new_mat : np.ndarray
Matrix of one or two rows, with 1's where the starting indices
and 0's otherwise.
"""
if start_mat.ndim == 1:
# Convert a 1D array into 2D array
start_mat = start_mat[None, :]
# Initialize mat_rows and new_mat
mat_rows = start_mat.shape[0]
new_mat = np.zeros((mat_rows, row_length))
for i in range(0, mat_rows):
inds = start_mat[i, :]
# Let the starting indices be 1
new_mat[i, inds] = 1
return new_mat.astype(int)
def __merge_based_on_length(full_mat, full_bw, target_bw):
"""
Merges repeats that are the same length, as set by full_bandwidth,
and are repeats of the same piece of structure.
Args
----
full_mat : np.ndarray
Binary matrix with ones where repeats start and zeroes otherwise.
full_bw : np.ndarray
Length of repeats encoded in input_mat.
target_bw : np.ndarray
Lengths of repeats that we seek to merge.
Returns
-------
out_mat : np.ndarray
Binary matrix with ones where repeats start and zeros otherwise
with rows of full_mat merged if appropriate.
one_length_vec : np.ndarray
Length of the repeats encoded in out_mat.
"""
# Sort the elements of full_bandwidth
temp_bandwidth = np.sort(full_bw, axis=None)
# Return the indices that would sort full_bandwidth
bnds = np.argsort(full_bw, axis=None)
temp_mat = full_mat[bnds, :]
# Find the unique elements of target_bandwidth
target_bandwidth = np.unique(target_bw)
# Number of columns
target_size = target_bandwidth.shape[0]
for i in range(1, target_size + 1):
test_bandwidth = target_bandwidth[i - 1]
# Check if temp_bandwidth is equal to test_bandwidth
inds = (temp_bandwidth == test_bandwidth)
# If the sum of all inds elements is greater than 1, then execute this
# if statement
if inds.sum() > 1:
# Isolate rows that correspond to test_bandwidth and merge them
merge_bw = temp_mat[inds, :]
merged_mat = __merge_rows(merge_bw, np.array([test_bandwidth]))
# Number of columns
bandwidth_add_size = merged_mat.shape[0]
bandwidth_add = test_bandwidth * np.ones((bandwidth_add_size,
1)).astype(int)
if np.any(inds):
# Convert the boolean array inds into an array of integers
inds = np.array(inds).astype(int)
remove_inds = np.where(inds == 1)
# Delete the rows that meet the condition set by remove_inds
temp_mat = np.delete(temp_mat, remove_inds, axis=0)
temp_bandwidth = np.delete(temp_bandwidth, remove_inds, axis=0)
# Combine rows into a single matrix
temp_mat = np.vstack((temp_mat, merged_mat))
# Indicates temp_bandwidth is an empty array
if temp_bandwidth.size == 0:
temp_bandwidth = np.concatenate(bandwidth_add)
# Indicates temp_bandwidth is not an empty array
elif temp_bandwidth.size > 0:
temp_bandwidth = np.concatenate(
(temp_bandwidth, bandwidth_add.flatten())
)
# Return the indices that would sort temp_bandwidth
bnds = np.argsort(temp_bandwidth)
# Sort the elements of temp_bandwidth
temp_bandwidth = np.sort(temp_bandwidth)
temp_mat = temp_mat[bnds, ]
# Create output
out_mat = temp_mat
out_length_vec = temp_bandwidth
if out_length_vec.size != 1:
out_length_vec = out_length_vec.reshape(-1, 1)
output = (out_mat, out_length_vec)
return output
def __merge_rows(input_mat, input_width):
"""
Merges rows that have at least one common repeat; said common repeat(s)
must occur at the same time step and be of common length.
Args
----
input_mat : np.ndarray
Binary matrix with ones where repeats start and zeroes otherwise.
input_width : int
Length of repeats encoded in input_mat.
Returns
-------
merge_mat : np.ndarray
Binary matrix with ones where repeats start and zeroes otherwise.
"""
# Step 0: initialize temporary variables
not_merge = input_mat # Everything must be checked
merge_mat = np.empty((0, input_mat.shape[1]), int) # Nothing has been merged
merge_key = np.empty(1, int)
rows = input_mat.shape[0] # How many rows to merge?
# Step 1: has every row been checked?
while rows > 0:
# Step 2: start merge process
# Step 2a: choose first unmerged row
row2check = not_merge[0, :]
# Create a comparison matrix
# with copies of row2check stacked
# so that r2c_mat is the same
# size as the set of rows waiting
# to be merged
r2c_mat = np.kron(np.ones((rows, 1)), row2check)
# Step 2b: find indices of unmerged overlapping rows
merge_inds = np.sum(((r2c_mat + not_merge) == 2), axis=1) > 0
# Step 2c: union rows with starting indices in common with row2check
# and remove those rows from input_mat
union_merge = np.sum(not_merge[merge_inds, :], axis=0) > 0
union_merge = union_merge.astype(int)
not_merge = np.delete(not_merge, np.where(merge_inds == 1), 0)
# Step 2d: check that newly merged rows do not cause overlaps within
# row
# If there are conflicts, rerun compare_and_cut
merge_block = reconstruct_full_block(union_merge, input_width)
if np.max(merge_block) > 1:
(union_merge, union_merge_key) = __compare_and_cut(
union_merge, input_width, union_merge, input_width
)
else:
union_merge_key = input_width
# Step 2e: add unions to merge_mat and merge_key
merge_mat = np.vstack((merge_mat, union_merge))
merge_key = np.vstack((merge_key, union_merge_key))
# Step 3: reinitialize rs for stopping condition
rows = not_merge.shape[0]
if np.ndim(merge_mat) == 1:
# Make sure the output is a 2d array
merge_mat = np.array([merge_mat])
return merge_mat.astype(int)
def hierarchical_structure(matrix_no_overlaps, key_no_overlaps, sn, vis=False):
"""
Distills the repeats encoded in matrix_no_overlaps (and key_no_overlaps)
to the essential structure components and then builds the hierarchical
representation. Optionally shows visualizations of the hierarchical structure
via the vis argument.
Args
-----
matrix_no_overlaps : np.ndarray[int]
Binary matrix with 1's where repeats begin and 0's otherwise.
key_no_overlaps : np.ndarray[int]
Vector containing the lengths of the repeats encoded in matrix_no_overlaps.
sn : int
Song length, which is the number of audio shingles.
vis : bool
Shows visualizations if True (default = False).
Returns
-----
full_visualization : np.ndarray[int]
Binary matrix representation for full_matrix_no_overlaps
with blocks of 1's equal to the length's prescribed
in full_key.
full_key : np.ndarray[int]
Vector containing the lengths of the hierarchical
structure encoded in full_matrix_no_overlaps.
full_matrix_no_overlaps : np.ndarray[int]
Binary matrix with 1's where hierarchical
structure begins and 0's otherwise.
full_anno_lst : np.ndarray[int]
Vector containing the annotation markers of the
hierarchical structure encoded in each row of
full_matrix_no_overlaps.
"""
breakup_tuple = breakup_overlaps_by_intersect(matrix_no_overlaps, key_no_overlaps, 0)
# Using pno and pno_key, we build a vector that tells us the order of the
# repeats of the essential structure components
pno = breakup_tuple[0]
pno_key = breakup_tuple[1]
# Get the block representation for pno, called pno_block
pno_block = reconstruct_full_block(pno, pno_key)
if vis:
# IMAGE 1 construction
pno_anno = get_annotation_lst(pno_key)
pno_y_labels = get_y_labels(pno_key, pno_anno)
num_pno_rows = np.size(pno, axis=0)
twos = np.full((num_pno_rows, sn), 2, dtype=int)
vis_array = twos - (pno_block + pno)
fig, ax = plt.subplots(1, 1)
sdm = ax.imshow(vis_array, cmap="gray", aspect=10)
plt.title("Essential Structure Components")
# Set the number of ticks and set tick intervals to be equal
ax.set_yticks(np.arange(0,np.size(pno_y_labels)-1))
# Set the ticklabels along the y axis and remove 0 in vis_y_labels
ax.set_yticklabels(pno_y_labels[1:])
plt.show()
# Assign a unique (nonzero) number for each row in PNO. We refer these
# unique numbers COLORS.
num_colors = pno.shape[0]
num_timesteps = pno.shape[1]
# Create unique color identifier for num_colors
color_lst = np.arange(1, num_colors + 1)
# Turn it into a column
color_lst = color_lst.reshape(np.size(color_lst), 1)
color_mat = np.tile(color_lst, (1, num_timesteps))
# For each time step in row i that equals 1, change the value at that time
# step to i
pno_color = color_mat * pno
pno_color_vec = pno_color.sum(axis=0)
# Find where repeats exist in time, paying special attention to the starts
# and ends of each repeat of an essential structure component
# take sums down columns --- conv to logical
pno_block_vec = (np.sum(pno_block, axis=0)) > 0
pno_block_vec = pno_block_vec.astype(np.float32)
one_vec = pno_block_vec[0 : sn - 1] - pno_block_vec[1:sn]
# Find all the blocks of consecutive time steps that are not contained in
# any of the essential structure components
# We call these blocks zero blocks
# Shift pno_block_vec so that the zero blocks are marked at the correct
# time steps with 1's
if pno_block_vec[0] == 0:
one_vec = np.insert(one_vec, 0, 1)
elif pno_block_vec[0] == 1:
one_vec = np.insert(one_vec, 0, 0)
# Assign one new unique number to all the zero blocks
pno_color_vec[one_vec == 1] = num_colors + 1
# We are only concerned with the order that repeats of the essential
# structure components occur in. So we create a vector that only contains
# the starting indices for each repeat of the essential structure
# components.
# We isolate the starting index of each repeat of the essential structure
# components and save a binary vector with 1 at a time step if a repeat of
# any essential structure component occurs there
non_zero_inds = (pno_color_vec > 0)
num_nzi = non_zero_inds.sum(axis=0)
pno_color_inds_only = pno_color_vec[non_zero_inds]
# For indices that signals the start of a zero block, turn those indices
# back to 0
zero_inds_short = (pno_color_inds_only == (num_colors + 1))
pno_color_inds_only[zero_inds_short] = 0
# Create a binary matrix symm_pno_inds_only such that the (i,j) entry is 1
# if the following three conditions are true:
# 1) a repeat of an essential structure component is the i-th thing in
# the ordering
# 2) a repeat of an essential structure component is the j-th thing in
# the ordering
# 3) the repeat occurring in the i-th place of the ordering and the
# one occurring in the j-th place of the ordering are repeats of the
# same essential structure component.
# If any of the above conditions are not true, then the (i,j) entry of
# symm_pno_inds_only is 0.
# Turn our pattern row into a square matrix by stacking that row the
# number of times equal to the columns in that row
pno_io_mat = np.tile(pno_color_inds_only, (num_nzi, 1))
pno_io_mat = pno_io_mat.astype(np.float32)
pno_io_mask = (
(pno_io_mat > 0).astype(np.float32)
+ (pno_io_mat.transpose() > 0).astype(np.float32)
) == 2
symm_pno_inds_only = (
pno_io_mat.astype(np.float32) == pno_io_mat.transpose(
).astype(np.float32)
) * pno_io_mask
if vis:
# IMAGE 2
fig, ax = plt.subplots(1, 1)
sdm = ax.imshow(symm_pno_inds_only, cmap="binary", aspect=0.8)
plt.title(
"Threshold Self-dissimilarity matrix of" +
"the ordering Essential Structure Components"
)
# this locator puts ticks at regular intervals
loc = plticker.MultipleLocator(base=1.0)
ax.yaxis.set_major_locator(loc)
ax.xaxis.set_major_locator(loc)
plt.show()
# Extract all the diagonals in symm_pno_inds_only and get pairs of
# repeated sublists in the order that repeats of essential structure
# components.
# These pairs of repeated sublists are the basis of our hierarchical
# representation.
nzi_lst = find_all_repeats(symm_pno_inds_only, np.arange(1, num_nzi + 1))
remove_inds = (nzi_lst[:, 0] == nzi_lst[:, 2])
# Remove any pairs of repeats that are two copies of the same repeat (i.e.
# a pair (A,B) where A == B)
if np.any(remove_inds):
remove_inds = np.array(remove_inds).astype(int)
remove = np.where(remove_inds == 1)
nzi_lst = np.delete(nzi_lst, remove, axis=0)
# Add the annotation markers to the pairs in nzi_lst
nzi_lst_anno = find_complete_list_anno_only(nzi_lst, num_nzi)
# Remove the overlaps
output_tuple = remove_overlaps(nzi_lst_anno, num_nzi)
(nzi_matrix_no_overlaps, nzi_key_no_overlaps) = output_tuple[1:3]
# Reconstruct full block
nzi_pattern_block = reconstruct_full_block(nzi_matrix_no_overlaps, nzi_key_no_overlaps)
nzi_rows = nzi_pattern_block.shape[0]
if vis:
# IMAGE 3
fig, ax = plt.subplots(1, 1)
sdm = ax.imshow(nzi_pattern_block, cmap="binary", aspect=0.8)
plt.title(
"Repeated ordered sublists of the" +
"Essential Structure Components"
)
# This locator puts ticks at regular intervals
loc = plticker.MultipleLocator(base=1.0)
ax.yaxis.set_major_locator(loc)
ax.xaxis.set_major_locator(loc)
plt.show()
# IMAGE 4
fig, ax = plt.subplots(1, 1)
sdm = ax.imshow((nzi_pattern_block + nzi_matrix_no_overlaps), cmap="binary",
aspect=0.8)
plt.title(
"Repeated ordered sublists of the" +
"Essential Structure Components" +
"with leading index highlighted"
)
loc = plticker.MultipleLocator(
base=1.0
) # This locator puts ticks at regular intervals
ax.yaxis.set_major_locator(loc)
ax.xaxis.set_major_locator(loc)
plt.show()
nzi_rows = nzi_pattern_block.shape[0]
# Find where all blocks start and end
pattern_starts = np.nonzero(non_zero_inds)[0]
pattern_ends = np.array([pattern_starts[1:] - 1])
pattern_ends = np.insert(pattern_ends, np.shape(pattern_ends)[1], sn - 1)
pattern_lengths = np.array(pattern_ends - pattern_starts + 1)
full_visualization = np.zeros((nzi_rows, sn), dtype=int)
full_matrix_no_overlaps = np.zeros((nzi_rows, sn), dtype=int)
for i in range(0, num_nzi):
repeated_sect = nzi_pattern_block[:, i].reshape(
np.shape(nzi_pattern_block)[0], 1
)
full_visualization[:,
pattern_starts[i]: pattern_ends[i] + 1] = np.tile(
repeated_sect, (1, pattern_lengths[i])
)
full_matrix_no_overlaps[:, pattern_starts[i]] = nzi_matrix_no_overlaps[:, i]
# Get full_key, the matching bandwidth key for full_matrix_no_overlaps
full_key = np.zeros((nzi_rows, 1), dtype=int)
find_key_mat = full_visualization + full_matrix_no_overlaps
for i in range(0, nzi_rows):
one_start = np.where(find_key_mat[i, :] == 2)[0][0]
temp_row = find_key_mat[i, :]
temp_row[0 : one_start + 1] = 1
find_zero = np.where(temp_row == 0)[0][0]
if np.size(find_zero) == 0:
find_zero = sn
find_two = np.where(temp_row == 2)[0][0]
if np.size(find_two) == 0:
find_two = sn
one_end = np.minimum(find_zero, find_two)
full_key[i] = one_end - one_start
full_key_inds = np.argsort(full_key, axis=0)
# Switch to row
full_key_inds = full_key_inds[:, 0]
full_key = np.sort(full_key, axis=0)
full_visualization = full_visualization[full_key_inds, :]
full_matrix_no_overlaps = full_matrix_no_overlaps[full_key_inds, :]
# Remove rows of our hierarchical representation that contain only one
# repeat
inds_remove = np.where(np.sum(full_matrix_no_overlaps, 1) <= 1)
full_key = np.delete(full_key, inds_remove, axis=0)
full_matrix_no_overlaps = np.delete(full_matrix_no_overlaps, inds_remove, axis=0)
full_visualization = np.delete(full_visualization, inds_remove, axis=0)
full_anno_lst = get_annotation_lst(full_key)
output = (full_visualization, full_key, full_matrix_no_overlaps, full_anno_lst)
if vis:
# IMAGE 5
full_anno_lst = get_annotation_lst(full_key)
vis_y_labels = get_y_labels(full_key, full_anno_lst)
num_vis_rows = np.size(full_visualization, axis=0)
twos = np.full((num_vis_rows, sn), 2, dtype=int)
vis_array = twos - (full_visualization + full_matrix_no_overlaps)
fig, ax = plt.subplots(1, 1)
sdm = ax.imshow(vis_array, cmap="gray", aspect=5)
plt.title("Complete Aligned Hierarchies")
# Set the number of ticks and set tick intervals to be equal
ax.set_yticks(np.arange(0,np.size(vis_y_labels)-1))
# Set the ticklabels along the y axis and remove 0 in vis_y_labels
ax.set_yticklabels(vis_y_labels[1:])
plt.show()
return output
|
[
"matplotlib.pyplot.title",
"numpy.triu",
"numpy.sum",
"numpy.amin",
"numpy.empty",
"numpy.allclose",
"numpy.ones",
"numpy.argsort",
"numpy.shape",
"numpy.arange",
"numpy.tile",
"numpy.unique",
"numpy.full",
"numpy.ndim",
"numpy.transpose",
"numpy.insert",
"numpy.max",
"inspect.signature",
"numpy.reshape",
"matplotlib.ticker.MultipleLocator",
"numpy.intersect1d",
"matplotlib.pyplot.subplots",
"numpy.union1d",
"numpy.size",
"numpy.minimum",
"matplotlib.pyplot.show",
"numpy.hstack",
"numpy.sort",
"numpy.delete",
"numpy.concatenate",
"numpy.vstack",
"numpy.subtract",
"numpy.logical_and",
"numpy.flatnonzero",
"numpy.zeros",
"numpy.setdiff1d",
"numpy.nonzero",
"numpy.any",
"numpy.where",
"numpy.array"
] |
[((3662, 3702), 'inspect.signature', 'signature', (['breakup_overlaps_by_intersect'], {}), '(breakup_overlaps_by_intersect)\n', (3671, 3702), False, 'from inspect import signature\n'), ((4414, 4437), 'numpy.nonzero', 'np.nonzero', (['(bw_vec == T)'], {}), '(bw_vec == T)\n', (4424, 4437), True, 'import numpy as np\n'), ((7022, 7050), 'numpy.sort', 'np.sort', (['desc_bw_vec'], {'axis': '(0)'}), '(desc_bw_vec, axis=0)\n', (7029, 7050), True, 'import numpy as np\n'), ((7117, 7148), 'numpy.argsort', 'np.argsort', (['desc_bw_vec'], {'axis': '(0)'}), '(desc_bw_vec, axis=0)\n', (7127, 7148), True, 'import numpy as np\n'), ((8062, 8085), 'numpy.zeros', 'np.zeros', (['(rs * rs, ws)'], {}), '((rs * rs, ws))\n', (8070, 8085), True, 'import numpy as np\n'), ((8388, 8415), 'numpy.tile', 'np.tile', (['input_mat', '(rs, 1)'], {}), '(input_mat, (rs, 1))\n', (8395, 8415), True, 'import numpy as np\n'), ((8613, 8649), 'numpy.zeros', 'np.zeros', (['(compare_left.shape[0], 1)'], {}), '((compare_left.shape[0], 1))\n', (8621, 8649), True, 'import numpy as np\n'), ((9094, 9127), 'numpy.reshape', 'np.reshape', (['compare_all', '(rs, rs)'], {}), '(compare_all, (rs, rs))\n', (9104, 9127), True, 'import numpy as np\n'), ((9260, 9299), 'numpy.allclose', 'np.allclose', (['overlap_mat', 'overlap_mat.T'], {}), '(overlap_mat, overlap_mat.T)\n', (9271, 9299), True, 'import numpy as np\n'), ((10574, 10593), 'numpy.flatnonzero', 'np.flatnonzero', (['red'], {}), '(red)\n', (10588, 10593), True, 'import numpy as np\n'), ((10716, 10736), 'numpy.flatnonzero', 'np.flatnonzero', (['blue'], {}), '(blue)\n', (10730, 10736), True, 'import numpy as np\n'), ((11061, 11098), 'numpy.logical_and', 'np.logical_and', (['red_block', 'blue_block'], {}), '(red_block, blue_block)\n', (11075, 11098), True, 'import numpy as np\n'), ((18202, 18224), 'numpy.empty', 'np.empty', (['(0, sn)', 'int'], {}), '((0, sn), int)\n', (18210, 18224), True, 'import numpy as np\n'), ((18252, 18273), 'numpy.empty', 'np.empty', (['(0, 1)', 'int'], {}), '((0, 1), int)\n', (18260, 18273), True, 'import numpy as np\n'), ((18297, 18318), 'numpy.empty', 'np.empty', (['(0, 1)', 'int'], {}), '((0, 1), int)\n', (18305, 18318), True, 'import numpy as np\n'), ((21476, 21518), 'numpy.subtract', 'np.subtract', (['input_vec[1:]', 'input_vec[:-1]'], {}), '(input_vec[1:], input_vec[:-1])\n', (21487, 21518), True, 'import numpy as np\n'), ((21534, 21559), 'numpy.insert', 'np.insert', (['diff_vec', '(0)', '(1)'], {}), '(diff_vec, 0, 1)\n', (21543, 21559), True, 'import numpy as np\n'), ((23795, 23827), 'numpy.zeros', 'np.zeros', (['(mat_rows, row_length)'], {}), '((mat_rows, row_length))\n', (23803, 23827), True, 'import numpy as np\n'), ((24796, 24823), 'numpy.sort', 'np.sort', (['full_bw'], {'axis': 'None'}), '(full_bw, axis=None)\n', (24803, 24823), True, 'import numpy as np\n'), ((24892, 24922), 'numpy.argsort', 'np.argsort', (['full_bw'], {'axis': 'None'}), '(full_bw, axis=None)\n', (24902, 24922), True, 'import numpy as np\n'), ((25031, 25051), 'numpy.unique', 'np.unique', (['target_bw'], {}), '(target_bw)\n', (25040, 25051), True, 'import numpy as np\n'), ((27930, 27968), 'numpy.empty', 'np.empty', (['(0, input_mat.shape[1])', 'int'], {}), '((0, input_mat.shape[1]), int)\n', (27938, 27968), True, 'import numpy as np\n'), ((28012, 28028), 'numpy.empty', 'np.empty', (['(1)', 'int'], {}), '(1, int)\n', (28020, 28028), True, 'import numpy as np\n'), ((32635, 32663), 'numpy.arange', 'np.arange', (['(1)', '(num_colors + 1)'], {}), '(1, num_colors + 1)\n', (32644, 32663), True, 'import numpy as np\n'), ((32766, 32804), 'numpy.tile', 'np.tile', (['color_lst', '(1, num_timesteps)'], {}), '(color_lst, (1, num_timesteps))\n', (32773, 32804), True, 'import numpy as np\n'), ((35452, 35494), 'numpy.tile', 'np.tile', (['pno_color_inds_only', '(num_nzi, 1)'], {}), '(pno_color_inds_only, (num_nzi, 1))\n', (35459, 35494), True, 'import numpy as np\n'), ((36814, 36833), 'numpy.any', 'np.any', (['remove_inds'], {}), '(remove_inds)\n', (36820, 36833), True, 'import numpy as np\n'), ((38619, 38653), 'numpy.array', 'np.array', (['[pattern_starts[1:] - 1]'], {}), '([pattern_starts[1:] - 1])\n', (38627, 38653), True, 'import numpy as np\n'), ((38754, 38797), 'numpy.array', 'np.array', (['(pattern_ends - pattern_starts + 1)'], {}), '(pattern_ends - pattern_starts + 1)\n', (38762, 38797), True, 'import numpy as np\n'), ((38824, 38859), 'numpy.zeros', 'np.zeros', (['(nzi_rows, sn)'], {'dtype': 'int'}), '((nzi_rows, sn), dtype=int)\n', (38832, 38859), True, 'import numpy as np\n'), ((38890, 38925), 'numpy.zeros', 'np.zeros', (['(nzi_rows, sn)'], {'dtype': 'int'}), '((nzi_rows, sn), dtype=int)\n', (38898, 38925), True, 'import numpy as np\n'), ((39432, 39466), 'numpy.zeros', 'np.zeros', (['(nzi_rows, 1)'], {'dtype': 'int'}), '((nzi_rows, 1), dtype=int)\n', (39440, 39466), True, 'import numpy as np\n'), ((40043, 40071), 'numpy.argsort', 'np.argsort', (['full_key'], {'axis': '(0)'}), '(full_key, axis=0)\n', (40053, 40071), True, 'import numpy as np\n'), ((40148, 40173), 'numpy.sort', 'np.sort', (['full_key'], {'axis': '(0)'}), '(full_key, axis=0)\n', (40155, 40173), True, 'import numpy as np\n'), ((40484, 40524), 'numpy.delete', 'np.delete', (['full_key', 'inds_remove'], {'axis': '(0)'}), '(full_key, inds_remove, axis=0)\n', (40493, 40524), True, 'import numpy as np\n'), ((40556, 40611), 'numpy.delete', 'np.delete', (['full_matrix_no_overlaps', 'inds_remove'], {'axis': '(0)'}), '(full_matrix_no_overlaps, inds_remove, axis=0)\n', (40565, 40611), True, 'import numpy as np\n'), ((40637, 40687), 'numpy.delete', 'np.delete', (['full_visualization', 'inds_remove'], {'axis': '(0)'}), '(full_visualization, inds_remove, axis=0)\n', (40646, 40687), True, 'import numpy as np\n'), ((4162, 4177), 'numpy.sort', 'np.sort', (['bw_vec'], {}), '(bw_vec)\n', (4169, 4177), True, 'import numpy as np\n'), ((4293, 4319), 'numpy.argsort', 'np.argsort', (['bw_vec'], {'axis': '(0)'}), '(bw_vec, axis=0)\n', (4303, 4319), True, 'import numpy as np\n'), ((4451, 4467), 'numpy.array', 'np.array', (['T_inds'], {}), '(T_inds)\n', (4459, 4467), True, 'import numpy as np\n'), ((5567, 5599), 'numpy.delete', 'np.delete', (['pno', '[ri, bi]'], {'axis': '(0)'}), '(pno, [ri, bi], axis=0)\n', (5576, 5599), True, 'import numpy as np\n'), ((5617, 5657), 'numpy.delete', 'np.delete', (['desc_bw_vec', '[ri, bi]'], {'axis': '(0)'}), '(desc_bw_vec, [ri, bi], axis=0)\n', (5626, 5657), True, 'import numpy as np\n'), ((8177, 8206), 'numpy.tile', 'np.tile', (['compare_add', '(rs, 1)'], {}), '(compare_add, (rs, 1))\n', (8184, 8206), True, 'import numpy as np\n'), ((9341, 9364), 'numpy.triu', 'np.triu', (['overlap_mat', '(1)'], {}), '(overlap_mat, 1)\n', (9348, 9364), True, 'import numpy as np\n'), ((11604, 11633), 'numpy.tile', 'np.tile', (['start_blue', '(lsr, 1)'], {}), '(start_blue, (lsr, 1))\n', (11611, 11633), True, 'import numpy as np\n'), ((11852, 11877), 'numpy.delete', 'np.delete', (['tem_blue', '(1)', '(0)'], {}), '(tem_blue, 1, 0)\n', (11861, 11877), True, 'import numpy as np\n'), ((11901, 11945), 'numpy.concatenate', 'np.concatenate', (['(tem_blue, red_inds)'], {'axis': '(1)'}), '((tem_blue, red_inds), axis=1)\n', (11915, 11945), True, 'import numpy as np\n'), ((12036, 12048), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (12044, 12048), True, 'import numpy as np\n'), ((12072, 12084), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (12080, 12084), True, 'import numpy as np\n'), ((18476, 18503), 'numpy.array', 'np.array', (['[union_length[i]]'], {}), '([union_length[i]])\n', (18484, 18503), True, 'import numpy as np\n'), ((19430, 19476), 'numpy.delete', 'np.delete', (['union_mat', 'union_mat_rminds'], {'axis': '(0)'}), '(union_mat, union_mat_rminds, axis=0)\n', (19439, 19476), True, 'import numpy as np\n'), ((19500, 19541), 'numpy.delete', 'np.delete', (['union_length', 'union_mat_rminds'], {}), '(union_length, union_mat_rminds)\n', (19509, 19541), True, 'import numpy as np\n'), ((19760, 19797), 'numpy.vstack', 'np.vstack', (['(union_mat, union_mat_add)'], {}), '((union_mat, union_mat_add))\n', (19769, 19797), True, 'import numpy as np\n'), ((20133, 20169), 'numpy.hstack', 'np.hstack', (['(union_mat, union_length)'], {}), '((union_mat, union_length))\n', (20142, 20169), True, 'import numpy as np\n'), ((21577, 21599), 'numpy.where', 'np.where', (['(diff_vec > 1)'], {}), '(diff_vec > 1)\n', (21585, 21599), True, 'import numpy as np\n'), ((22079, 22095), 'numpy.zeros', 'np.zeros', (['(2, 1)'], {}), '((2, 1))\n', (22087, 22095), True, 'import numpy as np\n'), ((22114, 22130), 'numpy.zeros', 'np.zeros', (['(2, 1)'], {}), '((2, 1))\n', (22122, 22130), True, 'import numpy as np\n'), ((22723, 22796), 'numpy.vstack', 'np.vstack', (['(input_all_starts + add_vec[0], input_all_starts + add_vec[1])'], {}), '((input_all_starts + add_vec[0], input_all_starts + add_vec[1]))\n', (22732, 22796), True, 'import numpy as np\n'), ((29485, 29520), 'numpy.vstack', 'np.vstack', (['(merge_mat, union_merge)'], {}), '((merge_mat, union_merge))\n', (29494, 29520), True, 'import numpy as np\n'), ((29541, 29580), 'numpy.vstack', 'np.vstack', (['(merge_key, union_merge_key)'], {}), '((merge_key, union_merge_key))\n', (29550, 29580), True, 'import numpy as np\n'), ((29681, 29699), 'numpy.ndim', 'np.ndim', (['merge_mat'], {}), '(merge_mat)\n', (29688, 29699), True, 'import numpy as np\n'), ((29771, 29792), 'numpy.array', 'np.array', (['[merge_mat]'], {}), '([merge_mat])\n', (29779, 29792), True, 'import numpy as np\n'), ((31850, 31870), 'numpy.size', 'np.size', (['pno'], {'axis': '(0)'}), '(pno, axis=0)\n', (31857, 31870), True, 'import numpy as np\n'), ((31886, 31927), 'numpy.full', 'np.full', (['(num_pno_rows, sn)', '(2)'], {'dtype': 'int'}), '((num_pno_rows, sn), 2, dtype=int)\n', (31893, 31927), True, 'import numpy as np\n'), ((31991, 32009), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (32003, 32009), True, 'import matplotlib.pyplot as plt\n'), ((32077, 32120), 'matplotlib.pyplot.title', 'plt.title', (['"""Essential Structure Components"""'], {}), "('Essential Structure Components')\n", (32086, 32120), True, 'import matplotlib.pyplot as plt\n'), ((32379, 32389), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (32387, 32389), True, 'import matplotlib.pyplot as plt\n'), ((32727, 32745), 'numpy.size', 'np.size', (['color_lst'], {}), '(color_lst)\n', (32734, 32745), True, 'import numpy as np\n'), ((33191, 33216), 'numpy.sum', 'np.sum', (['pno_block'], {'axis': '(0)'}), '(pno_block, axis=0)\n', (33197, 33216), True, 'import numpy as np\n'), ((33657, 33681), 'numpy.insert', 'np.insert', (['one_vec', '(0)', '(1)'], {}), '(one_vec, 0, 1)\n', (33666, 33681), True, 'import numpy as np\n'), ((35868, 35886), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (35880, 35886), True, 'import matplotlib.pyplot as plt\n'), ((35966, 36069), 'matplotlib.pyplot.title', 'plt.title', (["('Threshold Self-dissimilarity matrix of' +\n 'the ordering Essential Structure Components')"], {}), "('Threshold Self-dissimilarity matrix of' +\n 'the ordering Essential Structure Components')\n", (35975, 36069), True, 'import matplotlib.pyplot as plt\n'), ((36169, 36203), 'matplotlib.ticker.MultipleLocator', 'plticker.MultipleLocator', ([], {'base': '(1.0)'}), '(base=1.0)\n', (36193, 36203), True, 'import matplotlib.ticker as plticker\n'), ((36294, 36304), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (36302, 36304), True, 'import matplotlib.pyplot as plt\n'), ((36616, 36641), 'numpy.arange', 'np.arange', (['(1)', '(num_nzi + 1)'], {}), '(1, num_nzi + 1)\n', (36625, 36641), True, 'import numpy as np\n'), ((36908, 36934), 'numpy.where', 'np.where', (['(remove_inds == 1)'], {}), '(remove_inds == 1)\n', (36916, 36934), True, 'import numpy as np\n'), ((36953, 36987), 'numpy.delete', 'np.delete', (['nzi_lst', 'remove'], {'axis': '(0)'}), '(nzi_lst, remove, axis=0)\n', (36962, 36987), True, 'import numpy as np\n'), ((37480, 37498), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (37492, 37498), True, 'import matplotlib.pyplot as plt\n'), ((37577, 37662), 'matplotlib.pyplot.title', 'plt.title', (["('Repeated ordered sublists of the' + 'Essential Structure Components')"], {}), "('Repeated ordered sublists of the' + 'Essential Structure Components'\n )\n", (37586, 37662), True, 'import matplotlib.pyplot as plt\n'), ((37761, 37795), 'matplotlib.ticker.MultipleLocator', 'plticker.MultipleLocator', ([], {'base': '(1.0)'}), '(base=1.0)\n', (37785, 37795), True, 'import matplotlib.ticker as plticker\n'), ((37886, 37896), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (37894, 37896), True, 'import matplotlib.pyplot as plt\n'), ((37934, 37952), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (37946, 37952), True, 'import matplotlib.pyplot as plt\n'), ((38084, 38203), 'matplotlib.pyplot.title', 'plt.title', (["('Repeated ordered sublists of the' + 'Essential Structure Components' +\n 'with leading index highlighted')"], {}), "('Repeated ordered sublists of the' +\n 'Essential Structure Components' + 'with leading index highlighted')\n", (38093, 38203), True, 'import matplotlib.pyplot as plt\n'), ((38260, 38294), 'matplotlib.ticker.MultipleLocator', 'plticker.MultipleLocator', ([], {'base': '(1.0)'}), '(base=1.0)\n', (38284, 38294), True, 'import matplotlib.ticker as plticker\n'), ((38453, 38463), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (38461, 38463), True, 'import matplotlib.pyplot as plt\n'), ((38571, 38596), 'numpy.nonzero', 'np.nonzero', (['non_zero_inds'], {}), '(non_zero_inds)\n', (38581, 38596), True, 'import numpy as np\n'), ((39170, 39217), 'numpy.tile', 'np.tile', (['repeated_sect', '(1, pattern_lengths[i])'], {}), '(repeated_sect, (1, pattern_lengths[i]))\n', (39177, 39217), True, 'import numpy as np\n'), ((39948, 39979), 'numpy.minimum', 'np.minimum', (['find_zero', 'find_two'], {}), '(find_zero, find_two)\n', (39958, 39979), True, 'import numpy as np\n'), ((40991, 41026), 'numpy.size', 'np.size', (['full_visualization'], {'axis': '(0)'}), '(full_visualization, axis=0)\n', (40998, 41026), True, 'import numpy as np\n'), ((41042, 41083), 'numpy.full', 'np.full', (['(num_vis_rows, sn)', '(2)'], {'dtype': 'int'}), '((num_vis_rows, sn), 2, dtype=int)\n', (41049, 41083), True, 'import numpy as np\n'), ((41176, 41194), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (41188, 41194), True, 'import matplotlib.pyplot as plt\n'), ((41261, 41302), 'matplotlib.pyplot.title', 'plt.title', (['"""Complete Aligned Hierarchies"""'], {}), "('Complete Aligned Hierarchies')\n", (41270, 41302), True, 'import matplotlib.pyplot as plt\n'), ((41561, 41571), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (41569, 41571), True, 'import matplotlib.pyplot as plt\n'), ((4339, 4360), 'numpy.transpose', 'np.transpose', (['bw_inds'], {}), '(bw_inds)\n', (4351, 4360), True, 'import numpy as np\n'), ((5741, 5768), 'numpy.vstack', 'np.vstack', (['(pno, union_mat)'], {}), '((pno, union_mat))\n', (5750, 5768), True, 'import numpy as np\n'), ((5790, 5823), 'numpy.vstack', 'np.vstack', (['(bw_vec, union_length)'], {}), '((bw_vec, union_length))\n', (5799, 5823), True, 'import numpy as np\n'), ((6272, 6295), 'numpy.sort', 'np.sort', (['bw_vec'], {'axis': '(0)'}), '(bw_vec, axis=0)\n', (6279, 6295), True, 'import numpy as np\n'), ((6328, 6354), 'numpy.argsort', 'np.argsort', (['bw_vec'], {'axis': '(0)'}), '(bw_vec, axis=0)\n', (6338, 6354), True, 'import numpy as np\n'), ((6644, 6669), 'numpy.amin', 'np.amin', (['(desc_bw_vec == T)'], {}), '(desc_bw_vec == T)\n', (6651, 6669), True, 'import numpy as np\n'), ((6718, 6730), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (6726, 6730), True, 'import numpy as np\n'), ((6766, 6782), 'numpy.array', 'np.array', (['T_inds'], {}), '(T_inds)\n', (6774, 6782), True, 'import numpy as np\n'), ((12356, 12383), 'numpy.arange', 'np.arange', (['ri', '(ri + red_len)'], {}), '(ri, ri + red_len)\n', (12365, 12383), True, 'import numpy as np\n'), ((12406, 12434), 'numpy.arange', 'np.arange', (['bi', '(bi + blue_len)'], {}), '(bi, bi + blue_len)\n', (12415, 12434), True, 'import numpy as np\n'), ((12552, 12583), 'numpy.intersect1d', 'np.intersect1d', (['red_ri', 'blue_bi'], {}), '(red_ri, blue_bi)\n', (12566, 12583), True, 'import numpy as np\n'), ((18706, 18736), 'numpy.sum', 'np.sum', (['(union_row_block[0] > 1)'], {}), '(union_row_block[0] > 1)\n', (18712, 18736), True, 'import numpy as np\n'), ((18774, 18806), 'numpy.vstack', 'np.vstack', (['(union_mat_rminds, i)'], {}), '((union_mat_rminds, i))\n', (18783, 18806), True, 'import numpy as np\n'), ((19117, 19158), 'numpy.vstack', 'np.vstack', (['(union_mat_add, union_row_new)'], {}), '((union_mat_add, union_row_new))\n', (19126, 19158), True, 'import numpy as np\n'), ((19194, 19249), 'numpy.vstack', 'np.vstack', (['(union_mat_add_length, union_row_new_length)'], {}), '((union_mat_add_length, union_row_new_length))\n', (19203, 19249), True, 'import numpy as np\n'), ((20056, 20080), 'numpy.array', 'np.array', (['[union_length]'], {}), '([union_length])\n', (20064, 20080), True, 'import numpy as np\n'), ((20261, 20291), 'numpy.argsort', 'np.argsort', (['total_array[:, -1]'], {}), '(total_array[:, -1])\n', (20271, 20291), True, 'import numpy as np\n'), ((20357, 20387), 'numpy.array', 'np.array', (['[total_array[:, -1]]'], {}), '([total_array[:, -1]])\n', (20365, 20387), True, 'import numpy as np\n'), ((25893, 25905), 'numpy.any', 'np.any', (['inds'], {}), '(inds)\n', (25899, 25905), True, 'import numpy as np\n'), ((26380, 26413), 'numpy.vstack', 'np.vstack', (['(temp_mat, merged_mat)'], {}), '((temp_mat, merged_mat))\n', (26389, 26413), True, 'import numpy as np\n'), ((26892, 26918), 'numpy.argsort', 'np.argsort', (['temp_bandwidth'], {}), '(temp_bandwidth)\n', (26902, 26918), True, 'import numpy as np\n'), ((26999, 27022), 'numpy.sort', 'np.sort', (['temp_bandwidth'], {}), '(temp_bandwidth)\n', (27006, 27022), True, 'import numpy as np\n'), ((28478, 28496), 'numpy.ones', 'np.ones', (['(rows, 1)'], {}), '((rows, 1))\n', (28485, 28496), True, 'import numpy as np\n'), ((28592, 28632), 'numpy.sum', 'np.sum', (['(r2c_mat + not_merge == 2)'], {'axis': '(1)'}), '(r2c_mat + not_merge == 2, axis=1)\n', (28598, 28632), True, 'import numpy as np\n'), ((28788, 28828), 'numpy.sum', 'np.sum', (['not_merge[merge_inds, :]'], {'axis': '(0)'}), '(not_merge[merge_inds, :], axis=0)\n', (28794, 28828), True, 'import numpy as np\n'), ((28920, 28945), 'numpy.where', 'np.where', (['(merge_inds == 1)'], {}), '(merge_inds == 1)\n', (28928, 28945), True, 'import numpy as np\n'), ((29181, 29200), 'numpy.max', 'np.max', (['merge_block'], {}), '(merge_block)\n', (29187, 29200), True, 'import numpy as np\n'), ((33732, 33756), 'numpy.insert', 'np.insert', (['one_vec', '(0)', '(0)'], {}), '(one_vec, 0, 0)\n', (33741, 33756), True, 'import numpy as np\n'), ((38697, 38719), 'numpy.shape', 'np.shape', (['pattern_ends'], {}), '(pattern_ends)\n', (38705, 38719), True, 'import numpy as np\n'), ((39765, 39783), 'numpy.size', 'np.size', (['find_zero'], {}), '(find_zero)\n', (39772, 39783), True, 'import numpy as np\n'), ((39879, 39896), 'numpy.size', 'np.size', (['find_two'], {}), '(find_two)\n', (39886, 39896), True, 'import numpy as np\n'), ((40428, 40462), 'numpy.sum', 'np.sum', (['full_matrix_no_overlaps', '(1)'], {}), '(full_matrix_no_overlaps, 1)\n', (40434, 40462), True, 'import numpy as np\n'), ((4663, 4700), 'numpy.sum', 'np.sum', (['pno_block[:T_inds, :]'], {'axis': '(0)'}), '(pno_block[:T_inds, :], axis=0)\n', (4669, 4700), True, 'import numpy as np\n'), ((6378, 6399), 'numpy.transpose', 'np.transpose', (['bw_inds'], {}), '(bw_inds)\n', (6390, 6399), True, 'import numpy as np\n'), ((11794, 11832), 'numpy.vstack', 'np.vstack', (['(tem_blue, blue_inds[j][i])'], {}), '((tem_blue, blue_inds[j][i]))\n', (11803, 11832), True, 'import numpy as np\n'), ((12723, 12751), 'numpy.setdiff1d', 'np.setdiff1d', (['red_ri', 'purple'], {}), '(red_ri, purple)\n', (12735, 12751), True, 'import numpy as np\n'), ((14251, 14280), 'numpy.setdiff1d', 'np.setdiff1d', (['blue_bi', 'purple'], {}), '(blue_bi, purple)\n', (14263, 14280), True, 'import numpy as np\n'), ((15906, 15961), 'numpy.union1d', 'np.union1d', (['purple_in_red_mat[0]', 'purple_in_blue_mat[0]'], {}), '(purple_in_red_mat[0], purple_in_blue_mat[0])\n', (15916, 15961), True, 'import numpy as np\n'), ((22487, 22520), 'numpy.array', 'np.array', (['(start_vec - input_start)'], {}), '(start_vec - input_start)\n', (22495, 22520), True, 'import numpy as np\n'), ((22621, 22647), 'numpy.array', 'np.array', (['input_all_starts'], {}), '(input_all_starts)\n', (22629, 22647), True, 'import numpy as np\n'), ((25618, 25644), 'numpy.array', 'np.array', (['[test_bandwidth]'], {}), '([test_bandwidth])\n', (25626, 25644), True, 'import numpy as np\n'), ((26062, 26081), 'numpy.where', 'np.where', (['(inds == 1)'], {}), '(inds == 1)\n', (26070, 26081), True, 'import numpy as np\n'), ((26187, 26227), 'numpy.delete', 'np.delete', (['temp_mat', 'remove_inds'], {'axis': '(0)'}), '(temp_mat, remove_inds, axis=0)\n', (26196, 26227), True, 'import numpy as np\n'), ((26261, 26307), 'numpy.delete', 'np.delete', (['temp_bandwidth', 'remove_inds'], {'axis': '(0)'}), '(temp_bandwidth, remove_inds, axis=0)\n', (26270, 26307), True, 'import numpy as np\n'), ((26546, 26575), 'numpy.concatenate', 'np.concatenate', (['bandwidth_add'], {}), '(bandwidth_add)\n', (26560, 26575), True, 'import numpy as np\n'), ((36857, 36878), 'numpy.array', 'np.array', (['remove_inds'], {}), '(remove_inds)\n', (36865, 36878), True, 'import numpy as np\n'), ((39032, 39059), 'numpy.shape', 'np.shape', (['nzi_pattern_block'], {}), '(nzi_pattern_block)\n', (39040, 39059), True, 'import numpy as np\n'), ((39585, 39618), 'numpy.where', 'np.where', (['(find_key_mat[i, :] == 2)'], {}), '(find_key_mat[i, :] == 2)\n', (39593, 39618), True, 'import numpy as np\n'), ((39723, 39746), 'numpy.where', 'np.where', (['(temp_row == 0)'], {}), '(temp_row == 0)\n', (39731, 39746), True, 'import numpy as np\n'), ((39837, 39860), 'numpy.where', 'np.where', (['(temp_row == 2)'], {}), '(temp_row == 2)\n', (39845, 39860), True, 'import numpy as np\n'), ((13821, 13833), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (13829, 13833), True, 'import numpy as np\n'), ((13871, 13883), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (13879, 13883), True, 'import numpy as np\n'), ((15222, 15234), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (15230, 15234), True, 'import numpy as np\n'), ((15388, 15400), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (15396, 15400), True, 'import numpy as np\n'), ((19871, 19895), 'numpy.array', 'np.array', (['[union_length]'], {}), '([union_length])\n', (19879, 19895), True, 'import numpy as np\n'), ((32225, 32246), 'numpy.size', 'np.size', (['pno_y_labels'], {}), '(pno_y_labels)\n', (32232, 32246), True, 'import numpy as np\n'), ((41407, 41428), 'numpy.size', 'np.size', (['vis_y_labels'], {}), '(vis_y_labels)\n', (41414, 41428), True, 'import numpy as np\n'), ((16522, 16554), 'numpy.vstack', 'np.vstack', (['(new_red, new_purple)'], {}), '((new_red, new_purple))\n', (16531, 16554), True, 'import numpy as np\n'), ((16594, 16636), 'numpy.vstack', 'np.vstack', (['(red_length_vec, purple_length)'], {}), '((red_length_vec, purple_length))\n', (16603, 16636), True, 'import numpy as np\n'), ((25777, 25809), 'numpy.ones', 'np.ones', (['(bandwidth_add_size, 1)'], {}), '((bandwidth_add_size, 1))\n', (25784, 25809), True, 'import numpy as np\n'), ((26005, 26019), 'numpy.array', 'np.array', (['inds'], {}), '(inds)\n', (26013, 26019), True, 'import numpy as np\n'), ((16791, 16824), 'numpy.vstack', 'np.vstack', (['(new_blue, new_purple)'], {}), '((new_blue, new_purple))\n', (16800, 16824), True, 'import numpy as np\n'), ((16864, 16907), 'numpy.vstack', 'np.vstack', (['(blue_length_vec, purple_length)'], {}), '((blue_length_vec, purple_length))\n', (16873, 16907), True, 'import numpy as np\n'), ((17021, 17063), 'numpy.vstack', 'np.vstack', (['(new_red, new_blue, new_purple)'], {}), '((new_red, new_blue, new_purple))\n', (17030, 17063), True, 'import numpy as np\n'), ((17103, 17162), 'numpy.vstack', 'np.vstack', (['(red_length_vec, blue_length_vec, purple_length)'], {}), '((red_length_vec, blue_length_vec, purple_length))\n', (17112, 17162), True, 'import numpy as np\n'), ((17806, 17831), 'numpy.array', 'np.array', (['[purple_length]'], {}), '([purple_length])\n', (17814, 17831), True, 'import numpy as np\n'), ((18061, 18086), 'numpy.array', 'np.array', (['[purple_length]'], {}), '([purple_length])\n', (18069, 18086), True, 'import numpy as np\n')]
|
from __future__ import print_function, division
import numpy as np
import healpy as hp
from matplotlib import pyplot as plt
import geometry
# given nside | number of pixels | resolution (pixel size in degree) | Maximum angular distance (degree) | pixel area (in square degrees)
# 1 | 12 | 58.6323 | 48.1897 | 3437.746771
# 2 | 48 | 29.3162 | 27.5857 | 859.436693
# 4 | 192 | 14.6581 | 14.5722 | 214.859173
# 8 | 768 | 7.3290 | 7.4728 | 53.714793
# 16 | 3072 | 3.6645 | 3.7824 | 13.428698
# 32 | 12288 | 1.8323 | 1.9026 | 3.357175
# 64 | 49152 | 0.9161 | 0.9541 | 0.839294
# 128 | 196608 | 0.4581 | 0.4778 | 0.209823
# 256 | 786432 | 0.2290 | 0.2391 | 0.052456
# 512 | 3145728 | 0.1145 | 0.1196 | 0.013114
# 1024 | 12582912 | 0.0573 | 0.0598 | 0.003278
def calculate_nside_resolution():
NSIDE = [2**i for i in range(11)]
print('given nside | number of pixels | resolution (pixel size in degree) | Maximum angular distance (degree) | pixel area (in square degrees)')
for nside in NSIDE:
npix = hp.nside2npix(nside)
resol = np.rad2deg(hp.nside2resol(nside))
maxrad = np.rad2deg(hp.max_pixrad(nside))
pixarea = hp.nside2pixarea(nside, degrees=True)
print('{0:^11} | {1:^16} | {2:^33.4f} | {3:^33.4f} | {4:^30.6f}'.format(nside, npix, resol, maxrad, pixarea))
if __name__ == '__main__':
calculate_nside_resolution()
# generate random distribution of Euler angles
v = np.random.randn(100,3)
v = v / np.linalg.norm(v, axis=1).repeat(3).reshape(-1,3)
EA = geometry.genEA(v)
phi = EA[:, 0]
# phi += 2 * np.pi
theta = EA[:, 1]
# visulization
hp.mollview()
hp.visufunc.projscatter(theta, phi, 'r.')
hp.graticule()
plt.show()
|
[
"geometry.genEA",
"healpy.max_pixrad",
"healpy.visufunc.projscatter",
"matplotlib.pyplot.show",
"healpy.mollview",
"numpy.random.randn",
"healpy.graticule",
"healpy.nside2pixarea",
"healpy.nside2npix",
"numpy.linalg.norm",
"healpy.nside2resol"
] |
[((2358, 2381), 'numpy.random.randn', 'np.random.randn', (['(100)', '(3)'], {}), '(100, 3)\n', (2373, 2381), True, 'import numpy as np\n'), ((2453, 2470), 'geometry.genEA', 'geometry.genEA', (['v'], {}), '(v)\n', (2467, 2470), False, 'import geometry\n'), ((2557, 2570), 'healpy.mollview', 'hp.mollview', ([], {}), '()\n', (2568, 2570), True, 'import healpy as hp\n'), ((2575, 2616), 'healpy.visufunc.projscatter', 'hp.visufunc.projscatter', (['theta', 'phi', '"""r."""'], {}), "(theta, phi, 'r.')\n", (2598, 2616), True, 'import healpy as hp\n'), ((2621, 2635), 'healpy.graticule', 'hp.graticule', ([], {}), '()\n', (2633, 2635), True, 'import healpy as hp\n'), ((2640, 2650), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2648, 2650), True, 'from matplotlib import pyplot as plt\n'), ((1942, 1962), 'healpy.nside2npix', 'hp.nside2npix', (['nside'], {}), '(nside)\n', (1955, 1962), True, 'import healpy as hp\n'), ((2081, 2118), 'healpy.nside2pixarea', 'hp.nside2pixarea', (['nside'], {'degrees': '(True)'}), '(nside, degrees=True)\n', (2097, 2118), True, 'import healpy as hp\n'), ((1990, 2011), 'healpy.nside2resol', 'hp.nside2resol', (['nside'], {}), '(nside)\n', (2004, 2011), True, 'import healpy as hp\n'), ((2041, 2061), 'healpy.max_pixrad', 'hp.max_pixrad', (['nside'], {}), '(nside)\n', (2054, 2061), True, 'import healpy as hp\n'), ((2394, 2419), 'numpy.linalg.norm', 'np.linalg.norm', (['v'], {'axis': '(1)'}), '(v, axis=1)\n', (2408, 2419), True, 'import numpy as np\n')]
|
import os
from urllib.request import urlopen
import pymex
class UniRecord( pymex.xmlrecord.XmlRecord ):
def __init__(self, root=None):
myDir = os.path.dirname( os.path.realpath(__file__))
self.uniConfig = { "uni_v001": {"IN": os.path.join( myDir, "defUniParse_v001.json"),
"OUT": os.path.join( myDir, "defUniXml_v001.json" ) }
}
self.debug = False
self.url="https://www.uniprot.org/uniprot/%%ACC%%.xml"
self._pdef = None
super().__init__( root, config=self.uniConfig,
postproc = { "geneName": self._geneName,
"protName": self._protName,
"accession": self._accession,
"comment": self._comment,
"xref": self._xref,
"feature": self._feature})
def parseXml(self, filename, ver="uni_v001", debug=False):
res = super().parseXml( filename, ver=ver )
return res
def getRecord(self, ac="P60010"):
upUrl = self.url.replace( "%%ACC%%", ac )
res = self.parseXml( urlopen(upUrl ))
self.record = res
return( res )
def _protName( self, elem, rec, cval ):
if self.debug:
print("protName: elem=", elem)
print("protName: rec.keys=",list(rec.keys()))
if "protein" in rec:
protein = rec["protein"]
rec["_protein"] = {"names":{},"XX":"XX"}
print("XXX",rec.keys())
for cname in protein:
if "recommendedName" == cname:
rec["_protein"]["names"]["rec"]={}
if "fullName" in protein[cname]:
rec["_protein"]["names"]["rec"]["full"] = protein[cname]["fullName"]
rec["_protein"]["names"]["fullName"] =protein[cname]["fullName"]
if "shortName" in protein[cname]:
rec["_protein"]["names"]["rec"]["short"] = protein[cname]["shortName"]
rec["_protein"]["names"]["shortLabel"] = protein[cname]["shortName"]
elif "alternativeName" == cname:
for altname in protein[cname]:
if "alt" not in rec["_protein"]["names"]:
rec["_protein"]["names"]["alt"]=[]
calt = {}
if "fullName" in altname :
calt["full"] = altname["fullName"]
rec["_protein"]["names"].setdefault("alias",[]).append(altname["fullName"])
if "shortName" in altname:
calt["short"] = altname["shortName"]
rec["_protein"]["names"].setdefault("alias",[]).append(altname["shortName"])
rec["_protein"]["names"]["alt"].append( calt )
def _geneName( self, elem, rec, cval ):
if self.debug:
print("geneName: elem=", elem)
print("geneName: rec.keys=",list(rec.keys()))
if "gene" in rec:
gene = rec["gene"]
rec["_gene"] = {"name":{}}
for cgene in gene["name"]:
cval = cgene["value"]
ctype = cgene["type"]
if ctype not in rec["_gene"]["name"]:
if ctype != "primary":
rec["_gene"]["name"][ctype]=[]
else:
rec["_gene"]["name"][ctype]=cval
if ctype != "primary":
rec["_gene"]["name"][ctype].append(cval)
def _accession( self, elem, rec, cval ):
if "_accession" not in rec:
rec["_accession"]={"primary":None}
if rec["_accession"]["primary"] is None:
rec["_accession"]["primary"] = rec["accession"][-1]
else:
if "secondary" not in rec["_accession"]:
rec["_accession"]["secondary"] = []
rec["_accession"]["secondary"].append(rec["accession"][-1])
def _comment( self, elem, rec, cval ):
if self.debug:
print("TYPE:",rec["comment"][-1]["type"])
ccom = rec.setdefault("_comment",{})
ctp = ccom.setdefault(rec["comment"][-1]["type"],[])
ctp.append( rec["comment"][-1] )
def _xref( self, elem, rec, cval ):
if self.debug:
print("XREF TYPE:",rec["dbReference"][-1]["type"])
ccom = rec.setdefault("_xref",{})
ctp = ccom.setdefault(rec["dbReference"][-1]["type"],[])
ctp.append( rec["dbReference"][-1] )
def _feature( self, elem, rec, cval ):
if self.debug:
print("FEATURE TYPE:",rec["feature"][-1]["type"])
ccom = rec.setdefault("_feature",{})
if rec["feature"][-1]["type"] == "sequence variant":
ntp = "variant"
elif rec["feature"][-1]["type"] == "mutagenesis site":
ntp = "mutation"
else:
ntp = rec["feature"][-1]["type"]
ctp = ccom.setdefault(ntp,[])
ctp.append( rec["feature"][-1] )
@property
def entry( self ):
return self.root["uniprot"]["entry"][0]
@property
def accession(self):
return self.root["uniprot"]["entry"][0]["_accession"]
#@property
#def name( self ):
# return self.root["uniprot"]["entry"][0]["name"]
@property
def name( self ):
return { "entry": self.root["uniprot"]["entry"][0]["name"],
"protein": self.protein,
"gene": self.gene }
@property
def protein( self ):
if self._pdef is not None:
return pymex.Protein(self._pdef)
self._pdef = { "names":{"alias":[]},
"xref": [],
"interactorType": { "_names":{"shortLabel":"protein",
"fullName":"protein"},
"_xref":{"primaryRef":{ "db":"psi-mi", "ac":"MI:0326" } } },
"organism": {"_names":{} },
"sequence": self.root["uniprot"]["entry"][0]["sequence"]["value"] }
# xrefs
# names
entry = self.root['uniprot']['entry'][0]
prt = self.root['uniprot']['entry'][0]['_protein']
names = prt['names']
if "rec" in names:
entry_name = names["rec"]
elif "sub" in names:
entry_name = names["sub"]
name = entry_name["full"]
if "short" in entry_name.keys():
label = entry_name["short"]
elif "_gene" in entry:
label = entry["_gene"]["name"]["primary"]
else:
label = self.root['uniprot']['entry'][0]["name"]
prt_alias = []
#for key in prt["names"]:
# pass
alias = []
if "gene" in prt.keys():
for key in prt["gene"]["name"]:
if "primary" != key:
for alias in prt["gene"]["name"][key]:
print(alias)
#protein name aliases
if "names" in prt:
if "alt" in prt["names"]:
for p in prt["names"]["alt"]:
if "full" in p:
alias.append({"value":p["full"],"type":"protein name synonym"})
if "short" in p:
alias.append({"value":p["short"],"type":"protein name synonym"})
# gene name aliases
if "gene" in entry:
for g in entry["gene"]["name"]:
if g["type"] == "primary":
alias.append({"value":g["value"],"type":"gene name"})
else:
alias.append({"value":g["value"],"type":"gene name synonym"})
self._pdef["names"]["shortLabel"]=label
self._pdef["names"]["fullName"]=name
self._pdef["names"]["alias"]=alias
return pymex.Protein(self._pdef)
@property
def gene( self ):
return self.root["uniprot"]["entry"][0]["_gene"]
@property
def taxon( self ):
return self.root["uniprot"]["entry"][0]["organism"]
@property
def xref( self ):
return self.root["uniprot"]["entry"][0]["_xref"]
@property
def feature( self ):
return self.root["uniprot"]["entry"][0]["_feature"]
@property
def comment( self ):
return self.root["uniprot"]["entry"][0]["_comment"]
|
[
"os.path.realpath",
"os.path.join",
"pymex.Protein",
"urllib.request.urlopen"
] |
[((8504, 8529), 'pymex.Protein', 'pymex.Protein', (['self._pdef'], {}), '(self._pdef)\n', (8517, 8529), False, 'import pymex\n'), ((174, 200), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (190, 200), False, 'import os\n'), ((1270, 1284), 'urllib.request.urlopen', 'urlopen', (['upUrl'], {}), '(upUrl)\n', (1277, 1284), False, 'from urllib.request import urlopen\n'), ((6115, 6140), 'pymex.Protein', 'pymex.Protein', (['self._pdef'], {}), '(self._pdef)\n', (6128, 6140), False, 'import pymex\n'), ((248, 292), 'os.path.join', 'os.path.join', (['myDir', '"""defUniParse_v001.json"""'], {}), "(myDir, 'defUniParse_v001.json')\n", (260, 292), False, 'import os\n'), ((342, 384), 'os.path.join', 'os.path.join', (['myDir', '"""defUniXml_v001.json"""'], {}), "(myDir, 'defUniXml_v001.json')\n", (354, 384), False, 'import os\n')]
|
# Helpful classes
import numpy as np
# Helper function for calculating dists
def dists(array):
lens = []
for i in range(len(array)):
lens.append(np.linalg.norm(np.array(array[i][0])-
np.array(array[i][1])))
return lens
# This is for the original shape you want to cut
class Shape:
def __init__(self, ls):
self.edges = np.array(ls)
self.lengths = dists(ls)
self.vertices = self.edges[:,0]
# For the circles that are the point
class Circle:
def __init__(self, vert, rad):
self.vert = vert
self.rad = rad
|
[
"numpy.array"
] |
[((344, 356), 'numpy.array', 'np.array', (['ls'], {}), '(ls)\n', (352, 356), True, 'import numpy as np\n'), ((170, 191), 'numpy.array', 'np.array', (['array[i][0]'], {}), '(array[i][0])\n', (178, 191), True, 'import numpy as np\n'), ((199, 220), 'numpy.array', 'np.array', (['array[i][1]'], {}), '(array[i][1])\n', (207, 220), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
# FeedCrawler
# Projekt von https://github.com/rix1337
import ast
import json
import os
import re
import sys
import time
from functools import wraps
from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response
from passlib.hash import pbkdf2_sha256
from requests.packages.urllib3 import disable_warnings as disable_request_warnings
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from waitress import serve
import feedcrawler.myjdapi
import feedcrawler.search.shared.content_all
import feedcrawler.search.shared.content_shows
from feedcrawler import internal
from feedcrawler import version
from feedcrawler.common import Unbuffered
from feedcrawler.common import decode_base64
from feedcrawler.common import get_to_decrypt
from feedcrawler.common import is_device
from feedcrawler.common import remove_decrypt
from feedcrawler.common import rreplace
from feedcrawler.config import CrawlerConfig
from feedcrawler.db import FeedDb
from feedcrawler.db import ListDb
from feedcrawler.myjd import check_device
from feedcrawler.myjd import do_add_decrypted
from feedcrawler.myjd import do_package_replace
from feedcrawler.myjd import download
from feedcrawler.myjd import get_device
from feedcrawler.myjd import get_if_one_device
from feedcrawler.myjd import get_info
from feedcrawler.myjd import get_packages_in_linkgrabber
from feedcrawler.myjd import get_state
from feedcrawler.myjd import jdownloader_pause
from feedcrawler.myjd import jdownloader_start
from feedcrawler.myjd import jdownloader_stop
from feedcrawler.myjd import move_to_downloads
from feedcrawler.myjd import package_merge
from feedcrawler.myjd import remove_from_linkgrabber
from feedcrawler.myjd import retry_decrypt
from feedcrawler.myjd import update_jdownloader
from feedcrawler.notifiers import notify
from feedcrawler.search import search
helper_active = False
already_added = []
def app_container():
global helper_active
global already_added
base_dir = '.'
if getattr(sys, 'frozen', False):
base_dir = os.path.join(sys._MEIPASS)
app = Flask(__name__, template_folder=os.path.join(base_dir, 'web'))
app.config["TEMPLATES_AUTO_RELOAD"] = True
general = CrawlerConfig('FeedCrawler')
if general.get("prefix"):
prefix = '/' + general.get("prefix")
else:
prefix = ""
def check_auth(config, username, password):
auth_hash = config.get("auth_hash")
if auth_hash and "$pbkdf2-sha256" not in auth_hash:
auth_hash = pbkdf2_sha256.hash(auth_hash)
config.save(
"auth_hash", to_str(auth_hash))
return username == config.get("auth_user") and pbkdf2_sha256.verify(password, auth_hash)
def authenticate():
return Response(
'''<html>
<head><title>401 Authorization Required</title></head>
<body bgcolor="white">
<center><h1>401 Authorization Required</h1></center>
<hr><center>FeedCrawler</center>
</body>
</html>
''', 401,
{'WWW-Authenticate': 'Basic realm="FeedCrawler"'})
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
config = CrawlerConfig('FeedCrawler')
if config.get("auth_user") and config.get("auth_hash"):
auth = request.authorization
if not auth or not check_auth(config, auth.username, auth.password):
return authenticate()
return f(*args, **kwargs)
return decorated
def to_int(i):
if isinstance(i, bytes):
i = i.decode()
i = str(i).strip().replace("None", "")
return int(i) if i else ""
def to_float(i):
i = str(i).strip().replace("None", "")
return float(i) if i else ""
def to_str(i):
return '' if i is None else str(i)
def to_bool(i):
return True if i == "True" else False
if prefix:
@app.route('/')
@requires_auth
def index_prefix():
return redirect(prefix)
@app.route(prefix + '/<path:path>')
@requires_auth
def send_html(path):
return send_from_directory(os.path.join(base_dir, 'web'), path)
@app.route(prefix + '/')
@requires_auth
def index():
return render_template('index.html')
@app.route(prefix + "/api/log/", methods=['GET', 'DELETE'])
@requires_auth
def get_delete_log():
if request.method == 'GET':
try:
log = []
if os.path.isfile(internal.log_file):
logfile = open(internal.log_file)
i = 0
for line in reversed(logfile.readlines()):
if line and line != "\n":
payload = [i]
line = line.replace("]", "")
line = line.replace("[", "")
line = re.sub(r",\d{3}", "", line)
line = line.split(" - ")
for line_part in line:
payload.append(line_part)
log.append(payload)
i += 1
return jsonify(
{
"log": log,
}
)
except:
return "Failed", 400
elif request.method == 'DELETE':
try:
open(internal.log_file, 'w').close()
return "Success", 200
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/log_entry/<b64_entry>", methods=['DELETE'])
@requires_auth
def get_delete_log_entry(b64_entry):
if request.method == 'DELETE':
try:
entry = decode_base64(b64_entry)
log = []
if os.path.isfile(internal.log_file):
logfile = open(internal.log_file)
for line in reversed(logfile.readlines()):
if line and line != "\n":
if entry not in line:
log.append(line)
log = "".join(reversed(log))
with open(internal.log_file, 'w') as file:
file.write(log)
return "Success", 200
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/settings/", methods=['GET', 'POST'])
@requires_auth
def get_post_settings():
if request.method == 'GET':
try:
general_conf = CrawlerConfig('FeedCrawler')
hosters = CrawlerConfig('Hosters')
alerts = CrawlerConfig('Notifications')
ombi = CrawlerConfig('Ombi')
crawljobs = CrawlerConfig('Crawljobs')
mb_conf = CrawlerConfig('ContentAll')
sj_conf = CrawlerConfig('ContentShows')
dj_conf = CrawlerConfig('CustomDJ')
return jsonify(
{
"settings": {
"general": {
"auth_user": general_conf.get("auth_user"),
"auth_hash": general_conf.get("auth_hash"),
"myjd_user": general_conf.get("myjd_user"),
"myjd_pass": general_conf.get("myjd_pass"),
"myjd_device": general_conf.get("myjd_device"),
"port": to_int(general_conf.get("port")),
"prefix": general_conf.get("prefix"),
"interval": to_int(general_conf.get("interval")),
"flaresolverr": general_conf.get("flaresolverr"),
"english": general_conf.get("english"),
"surround": general_conf.get("surround"),
"closed_myjd_tab": general_conf.get("closed_myjd_tab"),
"one_mirror_policy": general_conf.get("one_mirror_policy"),
"packages_per_myjd_page": to_int(general_conf.get("packages_per_myjd_page")),
"prefer_dw_mirror": general_conf.get("prefer_dw_mirror"),
},
"hosters": {
"rapidgator": hosters.get("rapidgator"),
"turbobit": hosters.get("turbobit"),
"uploaded": hosters.get("uploaded"),
"zippyshare": hosters.get("zippyshare"),
"oboom": hosters.get("oboom"),
"ddl": hosters.get("ddl"),
"filefactory": hosters.get("filefactory"),
"uptobox": hosters.get("uptobox"),
"onefichier": hosters.get("1fichier"),
"filer": hosters.get("filer"),
"nitroflare": hosters.get("nitroflare"),
"ironfiles": hosters.get("ironfiles"),
"k2s": hosters.get("k2s"),
},
"alerts": {
"pushbullet": alerts.get("pushbullet"),
"pushover": alerts.get("pushover"),
"homeassistant": alerts.get("homeassistant"),
"telegram": alerts.get("telegram"),
},
"ombi": {
"url": ombi.get("url"),
"api": ombi.get("api"),
},
"crawljobs": {
"autostart": crawljobs.get("autostart"),
"subdir": crawljobs.get("subdir"),
},
"mb": {
"quality": mb_conf.get("quality"),
"search": mb_conf.get("search"),
"ignore": mb_conf.get("ignore"),
"regex": mb_conf.get("regex"),
"imdb_score": to_float(mb_conf.get("imdb")),
"imdb_year": to_int(mb_conf.get("imdbyear")),
"force_dl": mb_conf.get("enforcedl"),
"cutoff": mb_conf.get("cutoff"),
"hevc_retail": mb_conf.get("hevc_retail"),
"retail_only": mb_conf.get("retail_only"),
"hoster_fallback": mb_conf.get("hoster_fallback"),
},
"sj": {
"quality": sj_conf.get("quality"),
"ignore": sj_conf.get("rejectlist"),
"regex": sj_conf.get("regex"),
"hevc_retail": sj_conf.get("hevc_retail"),
"retail_only": sj_conf.get("retail_only"),
"hoster_fallback": sj_conf.get("hoster_fallback"),
},
"mbsj": {
"enabled": mb_conf.get("crawlseasons"),
"quality": mb_conf.get("seasonsquality"),
"packs": mb_conf.get("seasonpacks"),
"source": mb_conf.get("seasonssource"),
},
"dj": {
"quality": dj_conf.get("quality"),
"ignore": dj_conf.get("rejectlist"),
"regex": dj_conf.get("regex"),
"hoster_fallback": dj_conf.get("hoster_fallback"),
}
}
}
)
except:
return "Failed", 400
if request.method == 'POST':
try:
data = request.json
section = CrawlerConfig("FeedCrawler")
section.save(
"auth_user", to_str(data['general']['auth_user']))
auth_hash = data['general']['auth_hash']
if auth_hash and "$pbkdf2-sha256" not in auth_hash:
auth_hash = pbkdf2_sha256.hash(auth_hash)
section.save(
"auth_hash", to_str(auth_hash))
myjd_user = to_str(data['general']['myjd_user'])
myjd_pass = to_str(data['general']['my<PASSWORD>'])
myjd_device = to_str(data['general']['myjd_device'])
if myjd_user and myjd_pass and not myjd_device:
myjd_device = get_if_one_device(myjd_user, myjd_pass)
if myjd_device:
print(u"Gerätename " + myjd_device + " automatisch ermittelt.")
if myjd_user and myjd_pass and myjd_device:
device_check = check_device(myjd_user, myjd_pass, myjd_device)
if not device_check:
myjd_device = get_if_one_device(myjd_user, myjd_pass)
if myjd_device:
print(u"Gerätename " + myjd_device + " automatisch ermittelt.")
else:
print(u"Fehlerhafte My JDownloader Zugangsdaten. Bitte vor dem Speichern prüfen!")
return "Failed", 400
section.save("myjd_user", myjd_user)
section.save("myjd_pass", myjd_pass)
section.save("myjd_device", myjd_device)
section.save("port", to_str(data['general']['port']))
section.save("prefix", to_str(data['general']['prefix']).lower())
interval = to_str(data['general']['interval'])
if to_int(interval) < 5:
interval = '5'
section.save("interval", interval)
section.save("flaresolverr", to_str(data['general']['flaresolverr']))
section.save("english", to_str(data['general']['english']))
section.save("surround", to_str(data['general']['surround']))
section.save("closed_myjd_tab", to_str(data['general']['closed_myjd_tab']))
section.save("one_mirror_policy", to_str(data['general']['one_mirror_policy']))
section.save("packages_per_myjd_page", to_str(data['general']['packages_per_myjd_page']))
section.save("prefer_dw_mirror", to_str(data['general']['prefer_dw_mirror']))
section = CrawlerConfig("Crawljobs")
section.save("autostart", to_str(data['crawljobs']['autostart']))
section.save("subdir", to_str(data['crawljobs']['subdir']))
section = CrawlerConfig("Notifications")
section.save("pushbullet", to_str(data['alerts']['pushbullet']))
section.save("pushover", to_str(data['alerts']['pushover']))
section.save("telegram", to_str(data['alerts']['telegram']))
section.save("homeassistant", to_str(data['alerts']['homeassistant']))
section = CrawlerConfig("Hosters")
section.save("rapidgator", to_str(data['hosters']['rapidgator']))
section.save("turbobit", to_str(data['hosters']['turbobit']))
section.save("uploaded", to_str(data['hosters']['uploaded']))
section.save("zippyshare", to_str(data['hosters']['zippyshare']))
section.save("oboom", to_str(data['hosters']['oboom']))
section.save("ddl", to_str(data['hosters']['ddl']))
section.save("filefactory", to_str(data['hosters']['filefactory']))
section.save("uptobox", to_str(data['hosters']['uptobox']))
section.save("1fichier", to_str(data['hosters']['onefichier']))
section.save("filer", to_str(data['hosters']['filer']))
section.save("nitroflare", to_str(data['hosters']['nitroflare']))
section.save("ironfiles", to_str(data['hosters']['ironfiles']))
section.save("k2s", to_str(data['hosters']['k2s']))
section = CrawlerConfig("Ombi")
section.save("url", to_str(data['ombi']['url']))
section.save("api", to_str(data['ombi']['api']))
section = CrawlerConfig("ContentAll")
section.save("quality", to_str(data['mb']['quality']))
section.save("search", to_str(data['mb']['search']))
section.save("ignore", to_str(data['mb']['ignore']).lower())
section.save("regex", to_str(data['mb']['regex']))
section.save("cutoff", to_str(data['mb']['cutoff']))
section.save("enforcedl", to_str(data['mb']['force_dl']))
section.save("crawlseasons", to_str(data['mbsj']['enabled']))
section.save("seasonsquality", to_str(data['mbsj']['quality']))
section.save("seasonpacks", to_str(data['mbsj']['packs']))
section.save("seasonssource", to_str(data['mbsj']['source']).lower())
section.save("imdbyear", to_str(data['mb']['imdb_year']))
imdb = to_str(data['mb']['imdb_score'])
if re.match('[^0-9]', imdb):
imdb = 0.0
elif imdb == '':
imdb = 0.0
else:
imdb = round(float(to_str(data['mb']['imdb_score']).replace(",", ".")), 1)
if imdb > 10:
imdb = 10.0
section.save("imdb", to_str(imdb))
section.save("hevc_retail", to_str(data['mb']['hevc_retail']))
section.save("retail_only", to_str(data['mb']['retail_only']))
section.save("hoster_fallback", to_str(data['mb']['hoster_fallback']))
section = CrawlerConfig("ContentShows")
section.save("quality", to_str(data['sj']['quality']))
section.save("rejectlist", to_str(data['sj']['ignore']).lower())
section.save("regex", to_str(data['sj']['regex']))
section.save("hevc_retail", to_str(data['sj']['hevc_retail']))
section.save("retail_only", to_str(data['sj']['retail_only']))
section.save("hoster_fallback", to_str(data['sj']['hoster_fallback']))
section = CrawlerConfig("CustomDJ")
section.save("quality", to_str(data['dj']['quality']))
section.save("rejectlist", to_str(data['dj']['ignore']).lower())
section.save("regex", to_str(data['dj']['regex']))
section.save("hoster_fallback", to_str(data['dj']['hoster_fallback']))
return "Success", 201
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/version/", methods=['GET'])
@requires_auth
def get_version():
if request.method == 'GET':
try:
ver = "v." + version.get_version()
if version.update_check()[0]:
updateready = True
updateversion = version.update_check()[1]
print(u'Update steht bereit (' + updateversion +
')! Weitere Informationen unter https://github.com/rix1337/FeedCrawler/releases/latest')
else:
updateready = False
return jsonify(
{
"version": {
"ver": ver,
"update_ready": updateready,
"docker": internal.docker,
"helper_active": helper_active
}
}
)
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/crawltimes/", methods=['GET'])
@requires_auth
def get_crawltimes():
if request.method == 'GET':
try:
crawltimes = FeedDb("crawltimes")
return jsonify(
{
"crawltimes": {
"active": to_bool(crawltimes.retrieve("active")),
"start_time": to_float(crawltimes.retrieve("start_time")),
"end_time": to_float(crawltimes.retrieve("end_time")),
"total_time": crawltimes.retrieve("total_time"),
"next_start": to_float(crawltimes.retrieve("next_start")),
}
}
)
except:
time.sleep(3)
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/hostnames/", methods=['GET'])
@requires_auth
def get_hostnames():
if request.method == 'GET':
try:
hostnames = CrawlerConfig('Hostnames')
dw = hostnames.get('dw')
fx = hostnames.get('fx')
sj = hostnames.get('sj')
dj = hostnames.get('dj')
sf = hostnames.get('sf')
ww = hostnames.get('ww')
nk = hostnames.get('nk')
by = hostnames.get('by')
dw = dw.replace("d", "D", 2).replace("l", "L", 1).replace("w", "W", 1)
fx = fx.replace("f", "F", 1).replace("d", "D", 1).replace("x", "X", 1)
sj = sj.replace("s", "S", 1).replace("j", "J", 1)
dj = dj.replace("d", "D", 1).replace("j", "J", 1)
sf = sf.replace("s", "S", 1).replace("f", "F", 1)
ww = ww.replace("w", "W", 2)
nk = nk.replace("n", "N", 1).replace("k", "K", 1)
by = by.replace("b", "B", 1)
bl = ' / '.join(list(filter(None, [dw, fx, ww, nk, by])))
s = ' / '.join(list(filter(None, [dw, sj, sf])))
sjbl = ' / '.join(list(filter(None, [s, bl])))
if not dw:
dw = "Nicht gesetzt!"
if not fx:
fx = "Nicht gesetzt!"
if not sj:
sj = "Nicht gesetzt!"
if not dj:
dj = "Nicht gesetzt!"
if not sf:
sf = "Nicht gesetzt!"
if not ww:
ww = "Nicht gesetzt!"
if not nk:
nk = "Nicht gesetzt!"
if not by:
by = "Nicht gesetzt!"
if not bl:
bl = "Nicht gesetzt!"
if not s:
s = "Nicht gesetzt!"
if not sjbl:
sjbl = "Nicht gesetzt!"
return jsonify(
{
"hostnames": {
"sj": sj,
"dj": dj,
"sf": sf,
"by": by,
"dw": dw,
"fx": fx,
"nk": nk,
"ww": ww,
"bl": bl,
"s": s,
"sjbl": sjbl
}
}
)
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/blocked_sites/", methods=['GET'])
@requires_auth
def get_blocked_sites():
if request.method == 'GET':
try:
def check(site, db):
return to_bool(str(db.retrieve(site)).replace("Blocked", "True"))
db_status = FeedDb('site_status')
return jsonify(
{
"site_status": {
"SJ": check("SJ", db_status),
"DJ": check("DJ", db_status),
"SF": check("SF", db_status),
"BY": check("BY", db_status),
"DW": check("DW", db_status),
"FX": check("FX", db_status),
"HW": check("HW", db_status),
"NK": check("NK", db_status),
"WW": check("WW", db_status)
}
}
)
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/start_now/", methods=['POST'])
@requires_auth
def start_now():
if request.method == 'POST':
try:
FeedDb('crawltimes').store("startnow", "True")
i = 3
started = False
while i > 0:
if not FeedDb('crawltimes').retrieve("startnow"):
started = True
break
i -= 1
time.sleep(5)
if started:
return "Success", 200
else:
return "Failed", 400
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/search/<title>", methods=['GET'])
@requires_auth
def search_title(title):
if request.method == 'GET':
try:
results = search.get(title)
return jsonify(
{
"results": {
"bl": results[0],
"sj": results[1]
}
}
), 200
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/download_movie/<title>", methods=['POST'])
@requires_auth
def download_movie(title):
if request.method == 'POST':
try:
payload = feedcrawler.search.shared.content_all.get_best_result(title)
if payload:
matches = feedcrawler.search.shared.content_all.download(payload)
return "Success: " + str(matches), 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/download_show/<title>", methods=['POST'])
@requires_auth
def download_show(title):
if request.method == 'POST':
try:
payload = feedcrawler.search.shared.content_shows.get_best_result(title)
if payload:
matches = feedcrawler.search.shared.content_shows.download(payload)
if matches:
return "Success: " + str(matches), 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/download_bl/<payload>", methods=['POST'])
@requires_auth
def download_bl(payload):
if request.method == 'POST':
try:
if feedcrawler.search.shared.content_all.download(payload):
return "Success", 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/download_sj/<payload>", methods=['POST'])
@requires_auth
def download_sj(payload):
if request.method == 'POST':
try:
if feedcrawler.search.shared.content_shows.download(payload):
return "Success", 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/myjd/", methods=['GET'])
@requires_auth
def myjd_info():
if request.method == 'GET':
try:
myjd = get_info()
packages_to_decrypt = get_to_decrypt()
if myjd:
return jsonify(
{
"downloader_state": myjd[1],
"grabber_collecting": myjd[2],
"update_ready": myjd[3],
"packages": {
"downloader": myjd[4][0],
"linkgrabber_decrypted": myjd[4][1],
"linkgrabber_offline": myjd[4][2],
"linkgrabber_failed": myjd[4][3],
"to_decrypt": packages_to_decrypt
}
}
), 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/myjd_state/", methods=['GET'])
@requires_auth
def myjd_state():
if request.method == 'GET':
try:
myjd = get_state()
if myjd:
return jsonify(
{
"downloader_state": myjd[1],
"grabber_collecting": myjd[2]
}
), 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/myjd_move/<linkids>&<uuids>", methods=['POST'])
@requires_auth
def myjd_move(linkids, uuids):
if request.method == 'POST':
try:
linkids_raw = ast.literal_eval(linkids)
linkids = []
if isinstance(linkids_raw, (list, tuple)):
for linkid in linkids_raw:
linkids.append(linkid)
else:
linkids.append(linkids_raw)
uuids_raw = ast.literal_eval(uuids)
uuids = []
if isinstance(uuids_raw, (list, tuple)):
for uuid in uuids_raw:
uuids.append(uuid)
else:
uuids.append(uuids_raw)
if move_to_downloads(linkids, uuids):
return "Success", 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/myjd_remove/<linkids>&<uuids>", methods=['POST'])
@requires_auth
def myjd_remove(linkids, uuids):
if request.method == 'POST':
try:
linkids_raw = ast.literal_eval(linkids)
linkids = []
if isinstance(linkids_raw, (list, tuple)):
for linkid in linkids_raw:
linkids.append(linkid)
else:
linkids.append(linkids_raw)
uuids_raw = ast.literal_eval(uuids)
uuids = []
if isinstance(uuids_raw, (list, tuple)):
for uuid in uuids_raw:
uuids.append(uuid)
else:
uuids.append(uuids_raw)
if remove_from_linkgrabber(linkids, uuids):
return "Success", 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/internal_remove/<name>", methods=['POST'])
@requires_auth
def internal_remove(name):
if request.method == 'POST':
try:
delete = remove_decrypt(name)
if delete:
return "Success", 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/myjd_retry/<linkids>&<uuids>&<b64_links>", methods=['POST'])
@requires_auth
def myjd_retry(linkids, uuids, b64_links):
if request.method == 'POST':
try:
linkids_raw = ast.literal_eval(linkids)
linkids = []
if isinstance(linkids_raw, (list, tuple)):
for linkid in linkids_raw:
linkids.append(linkid)
else:
linkids.append(linkids_raw)
uuids_raw = ast.literal_eval(uuids)
uuids = []
if isinstance(uuids_raw, (list, tuple)):
for uuid in uuids_raw:
uuids.append(uuid)
else:
uuids.append(uuids_raw)
links = decode_base64(b64_links)
links = links.split("\n")
if retry_decrypt(linkids, uuids, links):
return "Success", 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/myjd_update/", methods=['POST'])
@requires_auth
def myjd_update():
if request.method == 'POST':
try:
if update_jdownloader():
return "Success", 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/myjd_start/", methods=['POST'])
@requires_auth
def myjd_start():
if request.method == 'POST':
try:
if jdownloader_start():
return "Success", 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/myjd_pause/<bl>", methods=['POST'])
@requires_auth
def myjd_pause(bl):
if request.method == 'POST':
try:
bl = json.loads(bl)
if jdownloader_pause(bl):
return "Success", 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/myjd_stop/", methods=['POST'])
@requires_auth
def myjd_stop():
if request.method == 'POST':
try:
if jdownloader_stop():
return "Success", 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/myjd_cnl/<uuid>", methods=['POST'])
@requires_auth
def myjd_cnl(uuid):
if request.method == 'POST':
try:
failed = get_info()
if failed:
decrypted_packages = failed[4][1]
offline_packages = failed[4][2]
failed_packages = failed[4][3]
else:
failed_packages = False
decrypted_packages = False
if not failed_packages:
return "Failed", 500
title = False
old_package = False
if failed_packages:
for op in failed_packages:
if str(op['uuid']) == str(uuid):
title = op['name']
old_package = op
break
if not old_package or not title:
return "Failed", 500
known_packages = []
if decrypted_packages:
for dp in decrypted_packages:
known_packages.append(dp['uuid'])
if offline_packages:
for op in offline_packages:
known_packages.append(op['uuid'])
cnl_package = False
grabber_was_collecting = False
i = 12
while i > 0:
i -= 1
time.sleep(5)
if get_info():
grabber_collecting = failed[2]
if grabber_was_collecting or grabber_collecting:
grabber_was_collecting = grabber_collecting
i -= 1
time.sleep(5)
else:
if not grabber_collecting:
decrypted_packages = failed[4][1]
offline_packages = failed[4][2]
another_device = package_merge(decrypted_packages, title, known_packages)[0]
if another_device:
info = get_info()
if info:
grabber_collecting = info[2]
decrypted_packages = info[4][1]
offline_packages = info[4][2]
if not grabber_collecting and decrypted_packages:
for dp in decrypted_packages:
if dp['uuid'] not in known_packages:
cnl_package = dp
i = 0
if not grabber_collecting and offline_packages:
for op in offline_packages:
if op['uuid'] not in known_packages:
cnl_package = op
i = 0
if not cnl_package:
return "No Package added through Click'n'Load in time!", 504
replaced = do_package_replace(old_package, cnl_package)
if replaced:
return "Success", 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/internal_cnl/<name>&<password>", methods=['POST'])
@requires_auth
def internal_cnl(name, password):
if request.method == 'POST':
try:
failed = get_info()
if failed:
decrypted_packages = failed[4][1]
offline_packages = failed[4][2]
else:
decrypted_packages = False
known_packages = []
if decrypted_packages:
for dp in decrypted_packages:
known_packages.append(dp['uuid'])
if offline_packages:
for op in offline_packages:
known_packages.append(op['uuid'])
cnl_packages = []
grabber_was_collecting = False
i = 12
while i > 0:
i -= 1
time.sleep(5)
failed = get_info()
if failed:
grabber_collecting = failed[2]
if grabber_was_collecting or grabber_collecting:
grabber_was_collecting = grabber_collecting
i -= 1
time.sleep(5)
else:
if not grabber_collecting:
decrypted_packages = failed[4][1]
offline_packages = failed[4][2]
if not grabber_collecting and decrypted_packages:
for dp in decrypted_packages:
if dp['uuid'] not in known_packages:
cnl_packages.append(dp)
i = 0
if not grabber_collecting and offline_packages:
for op in offline_packages:
if op['uuid'] not in known_packages:
cnl_packages.append(op)
i = 0
if not cnl_packages:
return "No Package added through Click'n'Load in time!", 504
if do_add_decrypted(name, password, cnl_packages):
remove_decrypt(name)
return "Success", 200
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/api/lists/", methods=['GET', 'POST'])
@requires_auth
def get_post_lists():
if request.method == 'GET':
try:
def get_list(liste):
cont = ListDb(liste).retrieve()
return "\n".join(cont) if cont else ""
return jsonify(
{
"lists": {
"mb": {
"filme": get_list('List_ContentAll_Movies'),
"regex": get_list('List_ContentAll_Movies_Regex'),
},
"sj": {
"serien": get_list('List_ContentShows_Shows'),
"regex": get_list('List_ContentShows_Shows_Regex'),
"staffeln_regex": get_list('List_ContentShows_Seasons_Regex'),
},
"dj": {
"dokus": get_list('List_CustomDJ_Documentaries'),
"regex": get_list('List_CustomDJ_Documentaries_Regex'),
},
"mbsj": {
"staffeln": get_list('List_ContentAll_Seasons'),
}
},
}
)
except:
return "Failed", 400
if request.method == 'POST':
try:
data = request.json
ListDb("List_ContentAll_Movies").store_list(
data['mb']['filme'].split('\n'))
ListDb("List_ContentAll_Seasons").store_list(
data['mbsj']['staffeln'].split('\n'))
ListDb("List_ContentAll_Movies_Regex").store_list(
data['mb']['regex'].split('\n'))
ListDb("List_ContentShows_Shows").store_list(
data['sj']['serien'].split('\n'))
ListDb("List_ContentShows_Shows_Regex").store_list(
data['sj']['regex'].split('\n'))
ListDb("List_ContentShows_Seasons_Regex").store_list(
data['sj']['staffeln_regex'].split('\n'))
ListDb("List_CustomDJ_Documentaries").store_list(
data['dj']['dokus'].split('\n'))
ListDb("List_CustomDJ_Documentaries_Regex").store_list(
data['dj']['regex'].split('\n'))
return "Success", 201
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/redirect_user/<target>", methods=['GET'])
@requires_auth
def redirect_user(target):
if request.method == 'GET':
try:
if target == "captcha":
return redirect("http://getcaptchasolution.com/zuoo67f5cq", code=302)
elif target == "multihoster":
return redirect("http://linksnappy.com/?ref=397097", code=302)
except:
pass
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/sponsors_helper/feedcrawler_helper_sj.user.js", methods=['GET'])
@requires_auth
def feedcrawler_helper_sj():
if request.method == 'GET':
try:
hostnames = CrawlerConfig('Hostnames')
sj = hostnames.get('sj')
dj = hostnames.get('dj')
return """// ==UserScript==
// @name FeedCrawler Helper (SJ/DJ)
// @author rix1337
// @description Forwards decrypted SJ/DJ Download links to FeedCrawler
// @version 0.3.0
// @require https://ajax.googleapis.com/ajax/libs/jquery/3.5.1/jquery.min.js
// @match https://""" + sj + """/*
// @match https://""" + dj + """/*
// @exclude https://""" + sj + """/serie/search?q=*
// @exclude https://""" + dj + """/serie/search?q=*
// ==/UserScript==
document.body.addEventListener('mousedown', function (e) {
if (e.target.tagName != "A") return;
var anchor = e.target;
if (anchor.href.search(/""" + sj + """\/serie\//i) != -1) {
anchor.href = anchor.href + '#' + anchor.text;
} else if (anchor.href.search(/""" + dj + """\/serie\//i) != -1) {
anchor.href = anchor.href + '#' + anchor.text;
}
});
var tag = window.location.hash.replace("#", "").split('|');
var title = tag[0];
var password = tag[1];
if (title) {
$('.wrapper').prepend('<h3>[FeedCrawler Helper] ' + title + '</h3>');
$(".container").hide();
var checkExist = setInterval(async function () {
if ($("tr:contains('" + title + "')").length) {
$(".container").show();
$("tr:contains('" + title + "')")[0].lastChild.firstChild.click();
clearInterval(checkExist);
}
}, 100);
}
""", 200
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/sponsors_helper/feedcrawler_sponsors_helper_dw.user.js", methods=['GET'])
@requires_auth
def feedcrawler_sponsors_helper_dw():
if not helper_active:
return "Forbidden", 403
if request.method == 'GET':
try:
hostnames = CrawlerConfig('Hostnames')
dw = hostnames.get('dw')
return """// ==UserScript==
// @name FeedCrawler Sponsors Helper (DW)
// @author rix1337
// @description Clicks the correct download button on DW sub pages to speed up Click'n'Load
// @version 0.2.0
// @require https://ajax.googleapis.com/ajax/libs/jquery/3.5.1/jquery.min.js
// @match https://""" + dw + """/*
// @grant window.close
// ==/UserScript==
// Hier muss die von außen erreichbare Adresse des FeedCrawlers stehen (nicht bspw. die Docker-interne):
var sponsorsURL = '""" + internal.local_address + """';
// Hier kann ein Wunschhoster eingetragen werden (exakt 'ddownload.com' oder 'rapidgator.net'):
var sponsorsHoster = '';
document.body.addEventListener('mousedown', function (e) {
if (e.target.tagName != "A") return;
var anchor = e.target;
if (anchor.href.search(/""" + dw + """\/download\//i) != -1) {
anchor.href = anchor.href + '#' + anchor.text;
}
});
var tag = window.location.hash.replace("#", "").split('|');
var title = tag[0];
var password = tag[1];
if (title) {
$('.container').prepend('<h3>[FeedCrawler Sponsors Helper] ' + title + '</h3>');
var checkExist = setInterval(async function() {
if (sponsorsHoster && $("span:contains('Download Mirror')").find('a[data-original-title="Download bei ' + sponsorsHoster + '"]').length) {
$("span:contains('Download Mirror')").find('a[data-original-title="Download bei ' + sponsorsHoster + '"]').click();
} else {
$("span:contains('Download Mirror 1')").click();
}
console.log("[FeedCrawler Sponsors Helper] clicked Download button to trigger reCAPTCHA");
clearInterval(checkExist);
}, 100);
var dlExists = setInterval(async function() {
if ($("tr:contains('Download Part')").length) {
var items = $("tr:contains('Download Part')").find("a");
var links = [];
items.each(function(index){
links.push(items[index].href);
})
console.log("[FeedCrawler Sponsors Helper] found download links: " + links);
clearInterval(dlExists);
window.open(sponsorsURL + '/sponsors_helper/to_download/' + btoa(links + '|' + title + '|' + password));
window.close();
}
}, 100);
}
""", 200
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/sponsors_helper/feedcrawler_sponsors_helper_sj.user.js", methods=['GET'])
@requires_auth
def feedcrawler_sponsors_helper_sj():
if not helper_active:
return "Forbidden", 403
if request.method == 'GET':
try:
hostnames = CrawlerConfig('Hostnames')
sj = hostnames.get('sj')
dj = hostnames.get('dj')
return """// ==UserScript==
// @name FeedCrawler Sponsors Helper (SJ/DJ)
// @author rix1337
// @description Clicks the correct download button on SJ/DJ sub pages to speed up Click'n'Load
// @version 0.4.0
// @require https://ajax.googleapis.com/ajax/libs/jquery/3.5.1/jquery.min.js
// @match https://""" + sj + """/*
// @match https://""" + dj + """/*
// @exclude https://""" + sj + """/serie/search?q=*
// @exclude https://""" + dj + """/serie/search?q=*
// @grant window.close
// ==/UserScript==
// Hier muss die von außen erreichbare Adresse des FeedCrawlers stehen (nicht bspw. die Docker-interne):
var sponsorsURL = '""" + internal.local_address + """';
// Hier kann ein Wunschhoster eingetragen werden (ohne www. und .tld):
var sponsorsHoster = '';
$.extend($.expr[':'], {
'containsi': function(elem, i, match, array) {
return (elem.textContent || elem.innerText || '').toLowerCase()
.indexOf((match[3] || "").toLowerCase()) >= 0;
}
});
document.body.addEventListener('mousedown', function (e) {
if (e.target.tagName != "A") return;
var anchor = e.target;
if (anchor.href.search(/""" + sj + """\/serie\//i) != -1) {
anchor.href = anchor.href + '#' + anchor.text;
} else if (anchor.href.search(/""" + dj + """\/serie\//i) != -1) {
anchor.href = anchor.href + '#' + anchor.text;
}
});
function Sleep(milliseconds) {
return new Promise(resolve => setTimeout(resolve, milliseconds));
}
var tag = window.location.hash.replace("#", "").split('|');
var title = tag[0];
var password = tag[1];
if (title && title !== "login") {
$('.wrapper').prepend('<h3>[FeedCrawler Sponsors Helper] ' + title + '</h3>');
$(".container").hide();
var checkExist = setInterval(function() {
async function clickRelease() {
if ($("tr:contains('" + title + "')").length) {
$(".container").show();
$("tr:contains('" + title + "')")[0].lastChild.firstChild.click();
if (sponsorsHelper) {
console.log("[FeedCrawler Sponsors Helper] Clicked Download button of " + title);
await Sleep(500);
var requiresLogin = $(".alert-warning").length;
if (requiresLogin) {
clearInterval(checkExist);
window.open("https://" + $(location).attr('hostname') + "#login|" + btoa(window.location));
window.close()
}
}
clearInterval(checkExist);
} }
clickRelease();
}, 100);
if (sponsorsHelper) {
var dlExists = setInterval(async function() {
if ($("tr:contains('Download Part')").length) {
var items = $("tr:contains('Download Part')").find("a");
var links = [];
items.each(function(index){
links.push(items[index].href);
})
console.log("[FeedCrawler Sponsors Helper] found download links: " + links);
clearInterval(dlExists);
window.open(sponsorsURL + '/sponsors_helper/to_download/' + btoa(links + '|' + title + '|' + password));
window.close();
} else if ( document.body.innerHTML.search("se das Captcha!") && !$('.center-recaptcha').length) {
if ( sponsorsHoster && $("button:containsi('" + sponsorsHoster + "')").length) {
$("button:containsi('" + sponsorsHoster + "')").click();
} else if ( $("button:containsi('1fichier')").length) {
$("button:containsi('1fichier')").click();
} else if ( $("button:containsi('ddownload')").length) {
$("button:containsi('ddownload')").click();
} else if ( $("button:containsi('turbo')").length) {
$("button:containsi('turbo')").click();
} else if ( $("button:containsi('filer')").length) {
$("button:containsi('filer')").click();
} else {
$("div.modal-body").find("button.btn.btn-secondary.btn-block").click();
}
console.log("[FeedCrawler Sponsors Helper] Clicked Download button to trigger reCAPTCHA");
}
}, 100);
}
};
""", 200
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/sponsors_helper/feedcrawler_sponsors_helper_fc.user.js", methods=['GET'])
@requires_auth
def feedcrawler_sponsors_helper_fc():
if not helper_active:
return "Forbidden", 403
if request.method == 'GET':
try:
return """// ==UserScript==
// @name FeedCrawler Sponsors Helper (FC)
// @author rix1337
// @description Forwards Click'n'Load to FeedCrawler
// @version 0.5.0
// @match *.filecrypt.cc/*
// @match *.filecrypt.co/*
// @grant window.close
// ==/UserScript==
// Hier muss die von außen erreichbare Adresse des FeedCrawlers stehen (nicht bspw. die Docker-interne):
var sponsorsURL = '""" + internal.local_address + """';
// Hier kann ein Wunschhoster eingetragen werden (ohne www. und .tld):
var sponsorsHoster = '';
var tag = window.location.hash.replace("#", "").split('|');
var title = tag[0]
var password = tag[1]
var ids = tag[2]
var urlParams = new URLSearchParams(window.location.search);
function Sleep(milliseconds) {
return new Promise(resolve => setTimeout(resolve, milliseconds));
}
var mirrorsAvailable = false;
try {
mirrorsAvailable = document.querySelector('.mirror').querySelectorAll("a");
} catch {}
var cnlAllowed = false;
if (mirrorsAvailable && sponsorsHoster) {
const currentURL = window.location.href;
var desiredMirror = "";
var i;
for (i = 0; i < mirrorsAvailable.length; i++) {
if (mirrorsAvailable[i].text.includes(sponsorsHoster)) {
var ep = "";
var cur_ep = urlParams.get('episode');
if (cur_ep) {
ep = "&episode=" + cur_ep;
}
desiredMirror = mirrorsAvailable[i].href + ep + window.location.hash;
}
}
if (desiredMirror) {
if (!currentURL.includes(desiredMirror)) {
console.log("[FeedCrawler Sponsors Helper] switching to desired Mirror: " + sponsorsHoster);
window.location = desiredMirror;
} else {
console.log("[FeedCrawler Sponsors Helper] already at the desired Mirror: " + sponsorsHoster);
cnlAllowed = true;
}
} else {
console.log("[FeedCrawler Sponsors Helper] desired Mirror not available: " + sponsorsHoster);
cnlAllowed = true;
}
} else {
cnlAllowed = true;
}
var cnlExists = setInterval(async function() {
if (cnlAllowed && document.getElementsByClassName("cnlform").length) {
clearInterval(cnlExists);
document.getElementById("cnl_btn").click();
console.log("[FeedCrawler Sponsors Helper] attempting Click'n'Load");
await Sleep(4000);
window.close();
}
}, 100);
""", 200
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/sponsors_helper/", methods=['GET'])
@requires_auth
def to_decrypt():
global helper_active
helper_active = True
if request.method == 'GET':
return render_template('helper.html')
else:
return "Failed", 405
@app.route(prefix + "/sponsors_helper/api/to_decrypt/", methods=['GET'])
@requires_auth
def to_decrypt_api():
global helper_active
if request.method == 'GET':
try:
helper_active = True
decrypt_name = False
decrypt_url = False
decrypt = get_to_decrypt()
if decrypt:
decrypt = decrypt[0]
decrypt_name = decrypt["name"]
decrypt_url = decrypt["url"].replace("http://", "https://") + "#" + decrypt_name + "|" + decrypt[
"password"]
return jsonify(
{
"to_decrypt": {
"name": decrypt_name,
"url": decrypt_url,
}
}
)
except:
return "Failed", 400
else:
return "Failed", 405
@app.route(prefix + "/sponsors_helper/to_download/<payload>", methods=['GET'])
@requires_auth
def to_download(payload):
if request.method == 'GET':
try:
global already_added
try:
payload = decode_base64(payload.replace("%3D", "=")).split("|")
except:
return "Failed", 400
if payload:
links = payload[0]
package_name = payload[1].replace("%20", "")
name = package_name
try:
password = payload[2]
except:
password = ""
try:
ids = payload[3]
except:
ids = False
FeedDb('crawldog').store(package_name, 'added')
if internal.device:
if ids:
try:
ids = ids.replace("%20", "").split(";")
linkids = ids[0]
uuids = ids[1]
except:
linkids = False
uuids = False
if ids and uuids:
linkids_raw = ast.literal_eval(linkids)
linkids = []
if isinstance(linkids_raw, (list, tuple)):
for linkid in linkids_raw:
linkids.append(linkid)
else:
linkids.append(linkids_raw)
uuids_raw = ast.literal_eval(uuids)
uuids = []
if isinstance(uuids_raw, (list, tuple)):
for uuid in uuids_raw:
uuids.append(uuid)
else:
uuids.append(uuids_raw)
remove_from_linkgrabber(linkids, uuids)
remove_decrypt(package_name)
else:
is_episode = re.findall(r'.*\.(S\d{1,3}E\d{1,3})\..*', package_name)
if not is_episode:
re_name = rreplace(package_name.lower(), "-", ".*", 1)
re_name = re_name.replace(".untouched", ".*").replace("dd+51", "dd.51")
season_string = re.findall(r'.*(s\d{1,3}).*', re_name)
if season_string:
re_name = re_name.replace(season_string[0], season_string[0] + '.*')
codec_tags = [".h264", ".x264"]
for tag in codec_tags:
re_name = re_name.replace(tag, ".*264")
web_tags = [".web-rip", ".webrip", ".webdl", ".web-dl"]
for tag in web_tags:
re_name = re_name.replace(tag, ".web.*")
multigroup = re.findall(r'.*-((.*)\/(.*))', package_name.lower())
if multigroup:
re_name = re_name.replace(multigroup[0][0],
'(' + multigroup[0][1] + '|' + multigroup[0][2] + ')')
else:
re_name = package_name
season_string = re.findall(r'.*(s\d{1,3}).*', re_name.lower())
if season_string:
season_string = season_string[0].replace("s", "S")
else:
season_string = "^unmatchable$"
try:
packages = get_packages_in_linkgrabber()
except feedcrawler.myjdapi.TokenExpiredException:
get_device()
if not internal.device or not is_device(internal.device):
return "Failed", 500
packages = get_packages_in_linkgrabber()
if packages:
failed = packages[0]
offline = packages[1]
try:
if failed:
for package in failed:
if re.match(re.compile(re_name), package['name'].lower()):
episode = re.findall(r'.*\.S\d{1,3}E(\d{1,3})\..*', package['name'])
# ToDo refactor to new code below
if episode:
FeedDb('episode_remover').store(package_name, str(int(episode[0])))
linkids = package['linkids']
uuids = [package['uuid']]
remove_from_linkgrabber(linkids, uuids)
remove_decrypt(package_name)
break
if offline:
for package in offline:
if re.match(re.compile(re_name), package['name'].lower()):
episode = re.findall(r'.*\.S\d{1,3}E(\d{1,3})\..*', package['name'])
# ToDo refactor to new code below
if episode:
FeedDb('episode_remover').store(package_name, str(int(episode[0])))
linkids = package['linkids']
uuids = [package['uuid']]
remove_from_linkgrabber(linkids, uuids)
remove_decrypt(package_name)
break
except:
pass
packages = get_to_decrypt()
if packages:
for package in packages:
if name == package["name"].strip():
name = package["name"]
elif re.match(re.compile(re_name),
package['name'].lower().strip().replace(".untouched", ".*").replace(
"dd+51",
"dd.51")):
episode = re.findall(r'.*\.S\d{1,3}E(\d{1,3})\..*', package['name'])
remove_decrypt(package['name'])
if episode:
episode_to_keep = str(int(episode[0]))
episode = str(episode[0])
if len(episode) == 1:
episode = "0" + episode
name = name.replace(season_string + ".",
season_string + "E" + episode + ".")
episode_in_remover = FeedDb('episode_remover').retrieve(package_name)
if episode_in_remover:
episode_to_keep = episode_in_remover + "|" + episode_to_keep
FeedDb('episode_remover').delete(package_name)
time.sleep(1)
FeedDb('episode_remover').store(package_name, episode_to_keep)
break
time.sleep(1)
remove_decrypt(name)
try:
epoch = int(time.time())
for item in already_added:
if item[0] == package_name:
if int(item[1]) + 30 > epoch:
print(name + u" wurde in den letzten 30 Sekunden bereits hinzugefügt")
return name + u" wurde in den letzten 30 Sekunden bereits hinzugefügt", 400
else:
already_added.remove(item)
download(package_name, "FeedCrawler", links, password)
db = FeedDb('FeedCrawler')
if not db.retrieve(name):
db.store(name, 'added')
try:
notify(["[FeedCrawler Sponsors Helper erfolgreich] - " + name])
except:
print(u"Benachrichtigung konnte nicht versendet werden!")
print(u"[FeedCrawler Sponsors Helper erfolgreich] - " + name)
already_added.append([name, str(epoch)])
return "<script type='text/javascript'>" \
"function closeWindow(){window.close()}window.onload=closeWindow;</script>" \
"[FeedCrawler Sponsors Helper erfolgreich] - " + name, 200
except:
print(name + u" konnte nicht hinzugefügt werden!")
except:
pass
return "Failed", 400
else:
return "Failed", 405
serve(app, host='0.0.0.0', port=internal.port, threads=10, _quiet=True)
def start():
sys.stdout = Unbuffered(sys.stdout)
disable_request_warnings(InsecureRequestWarning)
if version.update_check()[0]:
updateversion = version.update_check()[1]
print(u'Update steht bereit (' + updateversion +
')! Weitere Informationen unter https://github.com/rix1337/FeedCrawler/releases/latest')
app_container()
|
[
"requests.packages.urllib3.disable_warnings",
"feedcrawler.myjd.jdownloader_stop",
"feedcrawler.common.remove_decrypt",
"os.path.isfile",
"flask.jsonify",
"feedcrawler.common.is_device",
"passlib.hash.pbkdf2_sha256.hash",
"os.path.join",
"feedcrawler.myjd.download",
"passlib.hash.pbkdf2_sha256.verify",
"json.loads",
"flask.redirect",
"feedcrawler.myjd.retry_decrypt",
"feedcrawler.myjd.get_device",
"feedcrawler.myjd.get_state",
"re.findall",
"feedcrawler.notifiers.notify",
"flask.render_template",
"feedcrawler.db.FeedDb",
"flask.Response",
"feedcrawler.common.Unbuffered",
"re.sub",
"feedcrawler.myjd.get_packages_in_linkgrabber",
"feedcrawler.common.decode_base64",
"feedcrawler.version.get_version",
"re.match",
"time.sleep",
"feedcrawler.myjd.do_add_decrypted",
"feedcrawler.myjd.package_merge",
"waitress.serve",
"feedcrawler.myjd.get_info",
"functools.wraps",
"feedcrawler.myjd.get_if_one_device",
"feedcrawler.myjd.remove_from_linkgrabber",
"feedcrawler.myjd.move_to_downloads",
"re.compile",
"feedcrawler.myjd.check_device",
"feedcrawler.version.update_check",
"feedcrawler.myjd.update_jdownloader",
"feedcrawler.myjd.jdownloader_pause",
"feedcrawler.myjd.do_package_replace",
"time.time",
"feedcrawler.db.ListDb",
"ast.literal_eval",
"feedcrawler.search.search.get",
"feedcrawler.config.CrawlerConfig",
"feedcrawler.myjd.jdownloader_start",
"feedcrawler.common.get_to_decrypt"
] |
[((2257, 2285), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""FeedCrawler"""'], {}), "('FeedCrawler')\n", (2270, 2285), False, 'from feedcrawler.config import CrawlerConfig\n'), ((69071, 69142), 'waitress.serve', 'serve', (['app'], {'host': '"""0.0.0.0"""', 'port': 'internal.port', 'threads': '(10)', '_quiet': '(True)'}), "(app, host='0.0.0.0', port=internal.port, threads=10, _quiet=True)\n", (69076, 69142), False, 'from waitress import serve\n'), ((69175, 69197), 'feedcrawler.common.Unbuffered', 'Unbuffered', (['sys.stdout'], {}), '(sys.stdout)\n', (69185, 69197), False, 'from feedcrawler.common import Unbuffered\n'), ((69202, 69250), 'requests.packages.urllib3.disable_warnings', 'disable_request_warnings', (['InsecureRequestWarning'], {}), '(InsecureRequestWarning)\n', (69226, 69250), True, 'from requests.packages.urllib3 import disable_warnings as disable_request_warnings\n'), ((2094, 2120), 'os.path.join', 'os.path.join', (['sys._MEIPASS'], {}), '(sys._MEIPASS)\n', (2106, 2120), False, 'import os\n'), ((2808, 3189), 'flask.Response', 'Response', (['"""<html>\n <head><title>401 Authorization Required</title></head>\n <body bgcolor="white">\n <center><h1>401 Authorization Required</h1></center>\n <hr><center>FeedCrawler</center>\n </body>\n </html>\n """', '(401)', '{\'WWW-Authenticate\': \'Basic realm="FeedCrawler"\'}'], {}), '(\n """<html>\n <head><title>401 Authorization Required</title></head>\n <body bgcolor="white">\n <center><h1>401 Authorization Required</h1></center>\n <hr><center>FeedCrawler</center>\n </body>\n </html>\n """\n , 401, {\'WWW-Authenticate\': \'Basic realm="FeedCrawler"\'})\n', (2816, 3189), False, 'from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response\n'), ((3241, 3249), 'functools.wraps', 'wraps', (['f'], {}), '(f)\n', (3246, 3249), False, 'from functools import wraps\n'), ((4407, 4436), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (4422, 4436), False, 'from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response\n'), ((69259, 69281), 'feedcrawler.version.update_check', 'version.update_check', ([], {}), '()\n', (69279, 69281), False, 'from feedcrawler import version\n'), ((2164, 2193), 'os.path.join', 'os.path.join', (['base_dir', '"""web"""'], {}), "(base_dir, 'web')\n", (2176, 2193), False, 'import os\n'), ((2568, 2597), 'passlib.hash.pbkdf2_sha256.hash', 'pbkdf2_sha256.hash', (['auth_hash'], {}), '(auth_hash)\n', (2586, 2597), False, 'from passlib.hash import pbkdf2_sha256\n'), ((2726, 2767), 'passlib.hash.pbkdf2_sha256.verify', 'pbkdf2_sha256.verify', (['password', 'auth_hash'], {}), '(password, auth_hash)\n', (2746, 2767), False, 'from passlib.hash import pbkdf2_sha256\n'), ((3311, 3339), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""FeedCrawler"""'], {}), "('FeedCrawler')\n", (3324, 3339), False, 'from feedcrawler.config import CrawlerConfig\n'), ((4152, 4168), 'flask.redirect', 'redirect', (['prefix'], {}), '(prefix)\n', (4160, 4168), False, 'from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response\n'), ((4289, 4318), 'os.path.join', 'os.path.join', (['base_dir', '"""web"""'], {}), "(base_dir, 'web')\n", (4301, 4318), False, 'import os\n'), ((57488, 57518), 'flask.render_template', 'render_template', (['"""helper.html"""'], {}), "('helper.html')\n", (57503, 57518), False, 'from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response\n'), ((69310, 69332), 'feedcrawler.version.update_check', 'version.update_check', ([], {}), '()\n', (69330, 69332), False, 'from feedcrawler import version\n'), ((4644, 4677), 'os.path.isfile', 'os.path.isfile', (['internal.log_file'], {}), '(internal.log_file)\n', (4658, 4677), False, 'import os\n'), ((5355, 5376), 'flask.jsonify', 'jsonify', (["{'log': log}"], {}), "({'log': log})\n", (5362, 5376), False, 'from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response\n'), ((5987, 6011), 'feedcrawler.common.decode_base64', 'decode_base64', (['b64_entry'], {}), '(b64_entry)\n', (6000, 6011), False, 'from feedcrawler.common import decode_base64\n'), ((6056, 6089), 'os.path.isfile', 'os.path.isfile', (['internal.log_file'], {}), '(internal.log_file)\n', (6070, 6089), False, 'import os\n'), ((6851, 6879), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""FeedCrawler"""'], {}), "('FeedCrawler')\n", (6864, 6879), False, 'from feedcrawler.config import CrawlerConfig\n'), ((6906, 6930), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""Hosters"""'], {}), "('Hosters')\n", (6919, 6930), False, 'from feedcrawler.config import CrawlerConfig\n'), ((6956, 6986), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""Notifications"""'], {}), "('Notifications')\n", (6969, 6986), False, 'from feedcrawler.config import CrawlerConfig\n'), ((7010, 7031), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""Ombi"""'], {}), "('Ombi')\n", (7023, 7031), False, 'from feedcrawler.config import CrawlerConfig\n'), ((7060, 7086), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""Crawljobs"""'], {}), "('Crawljobs')\n", (7073, 7086), False, 'from feedcrawler.config import CrawlerConfig\n'), ((7113, 7140), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""ContentAll"""'], {}), "('ContentAll')\n", (7126, 7140), False, 'from feedcrawler.config import CrawlerConfig\n'), ((7167, 7196), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""ContentShows"""'], {}), "('ContentShows')\n", (7180, 7196), False, 'from feedcrawler.config import CrawlerConfig\n'), ((7223, 7248), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""CustomDJ"""'], {}), "('CustomDJ')\n", (7236, 7248), False, 'from feedcrawler.config import CrawlerConfig\n'), ((12633, 12661), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""FeedCrawler"""'], {}), "('FeedCrawler')\n", (12646, 12661), False, 'from feedcrawler.config import CrawlerConfig\n'), ((15246, 15272), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""Crawljobs"""'], {}), "('Crawljobs')\n", (15259, 15272), False, 'from feedcrawler.config import CrawlerConfig\n'), ((15459, 15489), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""Notifications"""'], {}), "('Notifications')\n", (15472, 15489), False, 'from feedcrawler.config import CrawlerConfig\n'), ((15840, 15864), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""Hosters"""'], {}), "('Hosters')\n", (15853, 15864), False, 'from feedcrawler.config import CrawlerConfig\n'), ((16895, 16916), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""Ombi"""'], {}), "('Ombi')\n", (16908, 16916), False, 'from feedcrawler.config import CrawlerConfig\n'), ((17075, 17102), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""ContentAll"""'], {}), "('ContentAll')\n", (17088, 17102), False, 'from feedcrawler.config import CrawlerConfig\n'), ((17998, 18022), 're.match', 're.match', (['"""[^0-9]"""', 'imdb'], {}), "('[^0-9]', imdb)\n", (18006, 18022), False, 'import re\n'), ((18621, 18650), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""ContentShows"""'], {}), "('ContentShows')\n", (18634, 18650), False, 'from feedcrawler.config import CrawlerConfig\n'), ((19143, 19168), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""CustomDJ"""'], {}), "('CustomDJ')\n", (19156, 19168), False, 'from feedcrawler.config import CrawlerConfig\n'), ((20240, 20366), 'flask.jsonify', 'jsonify', (["{'version': {'ver': ver, 'update_ready': updateready, 'docker': internal.\n docker, 'helper_active': helper_active}}"], {}), "({'version': {'ver': ver, 'update_ready': updateready, 'docker':\n internal.docker, 'helper_active': helper_active}})\n", (20247, 20366), False, 'from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response\n'), ((20878, 20898), 'feedcrawler.db.FeedDb', 'FeedDb', (['"""crawltimes"""'], {}), "('crawltimes')\n", (20884, 20898), False, 'from feedcrawler.db import FeedDb\n'), ((21791, 21817), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""Hostnames"""'], {}), "('Hostnames')\n", (21804, 21817), False, 'from feedcrawler.config import CrawlerConfig\n'), ((23662, 23802), 'flask.jsonify', 'jsonify', (["{'hostnames': {'sj': sj, 'dj': dj, 'sf': sf, 'by': by, 'dw': dw, 'fx': fx,\n 'nk': nk, 'ww': ww, 'bl': bl, 's': s, 'sjbl': sjbl}}"], {}), "({'hostnames': {'sj': sj, 'dj': dj, 'sf': sf, 'by': by, 'dw': dw,\n 'fx': fx, 'nk': nk, 'ww': ww, 'bl': bl, 's': s, 'sjbl': sjbl}})\n", (23669, 23802), False, 'from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response\n'), ((24639, 24660), 'feedcrawler.db.FeedDb', 'FeedDb', (['"""site_status"""'], {}), "('site_status')\n", (24645, 24660), False, 'from feedcrawler.db import FeedDb\n'), ((26378, 26395), 'feedcrawler.search.search.get', 'search.get', (['title'], {}), '(title)\n', (26388, 26395), False, 'from feedcrawler.search import search\n'), ((28913, 28923), 'feedcrawler.myjd.get_info', 'get_info', ([], {}), '()\n', (28921, 28923), False, 'from feedcrawler.myjd import get_info\n'), ((28962, 28978), 'feedcrawler.common.get_to_decrypt', 'get_to_decrypt', ([], {}), '()\n', (28976, 28978), False, 'from feedcrawler.common import get_to_decrypt\n'), ((29986, 29997), 'feedcrawler.myjd.get_state', 'get_state', ([], {}), '()\n', (29995, 29997), False, 'from feedcrawler.myjd import get_state\n'), ((30591, 30616), 'ast.literal_eval', 'ast.literal_eval', (['linkids'], {}), '(linkids)\n', (30607, 30616), False, 'import ast\n'), ((30897, 30920), 'ast.literal_eval', 'ast.literal_eval', (['uuids'], {}), '(uuids)\n', (30913, 30920), False, 'import ast\n'), ((31176, 31209), 'feedcrawler.myjd.move_to_downloads', 'move_to_downloads', (['linkids', 'uuids'], {}), '(linkids, uuids)\n', (31193, 31209), False, 'from feedcrawler.myjd import move_to_downloads\n'), ((31595, 31620), 'ast.literal_eval', 'ast.literal_eval', (['linkids'], {}), '(linkids)\n', (31611, 31620), False, 'import ast\n'), ((31901, 31924), 'ast.literal_eval', 'ast.literal_eval', (['uuids'], {}), '(uuids)\n', (31917, 31924), False, 'import ast\n'), ((32180, 32219), 'feedcrawler.myjd.remove_from_linkgrabber', 'remove_from_linkgrabber', (['linkids', 'uuids'], {}), '(linkids, uuids)\n', (32203, 32219), False, 'from feedcrawler.myjd import remove_from_linkgrabber\n'), ((32587, 32607), 'feedcrawler.common.remove_decrypt', 'remove_decrypt', (['name'], {}), '(name)\n', (32601, 32607), False, 'from feedcrawler.common import remove_decrypt\n'), ((33040, 33065), 'ast.literal_eval', 'ast.literal_eval', (['linkids'], {}), '(linkids)\n', (33056, 33065), False, 'import ast\n'), ((33346, 33369), 'ast.literal_eval', 'ast.literal_eval', (['uuids'], {}), '(uuids)\n', (33362, 33369), False, 'import ast\n'), ((33630, 33654), 'feedcrawler.common.decode_base64', 'decode_base64', (['b64_links'], {}), '(b64_links)\n', (33643, 33654), False, 'from feedcrawler.common import decode_base64\n'), ((33716, 33752), 'feedcrawler.myjd.retry_decrypt', 'retry_decrypt', (['linkids', 'uuids', 'links'], {}), '(linkids, uuids, links)\n', (33729, 33752), False, 'from feedcrawler.myjd import retry_decrypt\n'), ((34096, 34116), 'feedcrawler.myjd.update_jdownloader', 'update_jdownloader', ([], {}), '()\n', (34114, 34116), False, 'from feedcrawler.myjd import update_jdownloader\n'), ((34458, 34477), 'feedcrawler.myjd.jdownloader_start', 'jdownloader_start', ([], {}), '()\n', (34475, 34477), False, 'from feedcrawler.myjd import jdownloader_start\n'), ((34827, 34841), 'json.loads', 'json.loads', (['bl'], {}), '(bl)\n', (34837, 34841), False, 'import json\n'), ((34861, 34882), 'feedcrawler.myjd.jdownloader_pause', 'jdownloader_pause', (['bl'], {}), '(bl)\n', (34878, 34882), False, 'from feedcrawler.myjd import jdownloader_pause\n'), ((35222, 35240), 'feedcrawler.myjd.jdownloader_stop', 'jdownloader_stop', ([], {}), '()\n', (35238, 35240), False, 'from feedcrawler.myjd import jdownloader_stop\n'), ((35594, 35604), 'feedcrawler.myjd.get_info', 'get_info', ([], {}), '()\n', (35602, 35604), False, 'from feedcrawler.myjd import get_info\n'), ((38743, 38787), 'feedcrawler.myjd.do_package_replace', 'do_package_replace', (['old_package', 'cnl_package'], {}), '(old_package, cnl_package)\n', (38761, 38787), False, 'from feedcrawler.myjd import do_package_replace\n'), ((39198, 39208), 'feedcrawler.myjd.get_info', 'get_info', ([], {}), '()\n', (39206, 39208), False, 'from feedcrawler.myjd import get_info\n'), ((41317, 41363), 'feedcrawler.myjd.do_add_decrypted', 'do_add_decrypted', (['name', 'password', 'cnl_packages'], {}), '(name, password, cnl_packages)\n', (41333, 41363), False, 'from feedcrawler.myjd import do_add_decrypted\n'), ((45008, 45034), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""Hostnames"""'], {}), "('Hostnames')\n", (45021, 45034), False, 'from feedcrawler.config import CrawlerConfig\n'), ((46964, 46990), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""Hostnames"""'], {}), "('Hostnames')\n", (46977, 46990), False, 'from feedcrawler.config import CrawlerConfig\n'), ((49792, 49818), 'feedcrawler.config.CrawlerConfig', 'CrawlerConfig', (['"""Hostnames"""'], {}), "('Hostnames')\n", (49805, 49818), False, 'from feedcrawler.config import CrawlerConfig\n'), ((57907, 57923), 'feedcrawler.common.get_to_decrypt', 'get_to_decrypt', ([], {}), '()\n', (57921, 57923), False, 'from feedcrawler.common import get_to_decrypt\n'), ((58222, 58289), 'flask.jsonify', 'jsonify', (["{'to_decrypt': {'name': decrypt_name, 'url': decrypt_url}}"], {}), "({'to_decrypt': {'name': decrypt_name, 'url': decrypt_url}})\n", (58229, 58289), False, 'from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response\n'), ((12922, 12951), 'passlib.hash.pbkdf2_sha256.hash', 'pbkdf2_sha256.hash', (['auth_hash'], {}), '(auth_hash)\n', (12940, 12951), False, 'from passlib.hash import pbkdf2_sha256\n'), ((13336, 13375), 'feedcrawler.myjd.get_if_one_device', 'get_if_one_device', (['myjd_user', 'myjd_pass'], {}), '(myjd_user, myjd_pass)\n', (13353, 13375), False, 'from feedcrawler.myjd import get_if_one_device\n'), ((13596, 13643), 'feedcrawler.myjd.check_device', 'check_device', (['myjd_user', 'myjd_pass', 'myjd_device'], {}), '(myjd_user, myjd_pass, myjd_device)\n', (13608, 13643), False, 'from feedcrawler.myjd import check_device\n'), ((19802, 19823), 'feedcrawler.version.get_version', 'version.get_version', ([], {}), '()\n', (19821, 19823), False, 'from feedcrawler import version\n'), ((19843, 19865), 'feedcrawler.version.update_check', 'version.update_check', ([], {}), '()\n', (19863, 19865), False, 'from feedcrawler import version\n'), ((21507, 21520), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (21517, 21520), False, 'import time\n'), ((25935, 25948), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (25945, 25948), False, 'import time\n'), ((26419, 26477), 'flask.jsonify', 'jsonify', (["{'results': {'bl': results[0], 'sj': results[1]}}"], {}), "({'results': {'bl': results[0], 'sj': results[1]}})\n", (26426, 26477), False, 'from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response\n'), ((36917, 36930), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (36927, 36930), False, 'import time\n'), ((36954, 36964), 'feedcrawler.myjd.get_info', 'get_info', ([], {}), '()\n', (36962, 36964), False, 'from feedcrawler.myjd import get_info\n'), ((39919, 39932), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (39929, 39932), False, 'import time\n'), ((39962, 39972), 'feedcrawler.myjd.get_info', 'get_info', ([], {}), '()\n', (39970, 39972), False, 'from feedcrawler.myjd import get_info\n'), ((41385, 41405), 'feedcrawler.common.remove_decrypt', 'remove_decrypt', (['name'], {}), '(name)\n', (41399, 41405), False, 'from feedcrawler.common import remove_decrypt\n'), ((44470, 44532), 'flask.redirect', 'redirect', (['"""http://getcaptchasolution.com/zuoo67f5cq"""'], {'code': '(302)'}), "('http://getcaptchasolution.com/zuoo67f5cq', code=302)\n", (44478, 44532), False, 'from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response\n'), ((13723, 13762), 'feedcrawler.myjd.get_if_one_device', 'get_if_one_device', (['myjd_user', 'myjd_pass'], {}), '(myjd_user, myjd_pass)\n', (13740, 13762), False, 'from feedcrawler.myjd import get_if_one_device\n'), ((19945, 19967), 'feedcrawler.version.update_check', 'version.update_check', ([], {}), '()\n', (19965, 19967), False, 'from feedcrawler import version\n'), ((25619, 25639), 'feedcrawler.db.FeedDb', 'FeedDb', (['"""crawltimes"""'], {}), "('crawltimes')\n", (25625, 25639), False, 'from feedcrawler.db import FeedDb\n'), ((29031, 29318), 'flask.jsonify', 'jsonify', (["{'downloader_state': myjd[1], 'grabber_collecting': myjd[2], 'update_ready':\n myjd[3], 'packages': {'downloader': myjd[4][0], 'linkgrabber_decrypted':\n myjd[4][1], 'linkgrabber_offline': myjd[4][2], 'linkgrabber_failed':\n myjd[4][3], 'to_decrypt': packages_to_decrypt}}"], {}), "({'downloader_state': myjd[1], 'grabber_collecting': myjd[2],\n 'update_ready': myjd[3], 'packages': {'downloader': myjd[4][0],\n 'linkgrabber_decrypted': myjd[4][1], 'linkgrabber_offline': myjd[4][2],\n 'linkgrabber_failed': myjd[4][3], 'to_decrypt': packages_to_decrypt}})\n", (29038, 29318), False, 'from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response\n'), ((30050, 30119), 'flask.jsonify', 'jsonify', (["{'downloader_state': myjd[1], 'grabber_collecting': myjd[2]}"], {}), "({'downloader_state': myjd[1], 'grabber_collecting': myjd[2]})\n", (30057, 30119), False, 'from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response\n'), ((43138, 43170), 'feedcrawler.db.ListDb', 'ListDb', (['"""List_ContentAll_Movies"""'], {}), "('List_ContentAll_Movies')\n", (43144, 43170), False, 'from feedcrawler.db import ListDb\n'), ((43252, 43285), 'feedcrawler.db.ListDb', 'ListDb', (['"""List_ContentAll_Seasons"""'], {}), "('List_ContentAll_Seasons')\n", (43258, 43285), False, 'from feedcrawler.db import ListDb\n'), ((43372, 43410), 'feedcrawler.db.ListDb', 'ListDb', (['"""List_ContentAll_Movies_Regex"""'], {}), "('List_ContentAll_Movies_Regex')\n", (43378, 43410), False, 'from feedcrawler.db import ListDb\n'), ((43492, 43525), 'feedcrawler.db.ListDb', 'ListDb', (['"""List_ContentShows_Shows"""'], {}), "('List_ContentShows_Shows')\n", (43498, 43525), False, 'from feedcrawler.db import ListDb\n'), ((43608, 43647), 'feedcrawler.db.ListDb', 'ListDb', (['"""List_ContentShows_Shows_Regex"""'], {}), "('List_ContentShows_Shows_Regex')\n", (43614, 43647), False, 'from feedcrawler.db import ListDb\n'), ((43729, 43770), 'feedcrawler.db.ListDb', 'ListDb', (['"""List_ContentShows_Seasons_Regex"""'], {}), "('List_ContentShows_Seasons_Regex')\n", (43735, 43770), False, 'from feedcrawler.db import ListDb\n'), ((43861, 43898), 'feedcrawler.db.ListDb', 'ListDb', (['"""List_CustomDJ_Documentaries"""'], {}), "('List_CustomDJ_Documentaries')\n", (43867, 43898), False, 'from feedcrawler.db import ListDb\n'), ((43980, 44023), 'feedcrawler.db.ListDb', 'ListDb', (['"""List_CustomDJ_Documentaries_Regex"""'], {}), "('List_CustomDJ_Documentaries_Regex')\n", (43986, 44023), False, 'from feedcrawler.db import ListDb\n'), ((44606, 44661), 'flask.redirect', 'redirect', (['"""http://linksnappy.com/?ref=397097"""'], {'code': '(302)'}), "('http://linksnappy.com/?ref=397097', code=302)\n", (44614, 44661), False, 'from flask import Flask, request, redirect, send_from_directory, render_template, jsonify, Response\n'), ((5063, 5090), 're.sub', 're.sub', (['""",\\\\d{3}"""', '""""""', 'line'], {}), "(',\\\\d{3}', '', line)\n", (5069, 5090), False, 'import re\n'), ((37229, 37242), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (37239, 37242), False, 'import time\n'), ((40267, 40280), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (40277, 40280), False, 'import time\n'), ((41796, 41809), 'feedcrawler.db.ListDb', 'ListDb', (['liste'], {}), '(liste)\n', (41802, 41809), False, 'from feedcrawler.db import ListDb\n'), ((59416, 59434), 'feedcrawler.db.FeedDb', 'FeedDb', (['"""crawldog"""'], {}), "('crawldog')\n", (59422, 59434), False, 'from feedcrawler.db import FeedDb\n'), ((60938, 60996), 're.findall', 're.findall', (['""".*\\\\.(S\\\\d{1,3}E\\\\d{1,3})\\\\..*"""', 'package_name'], {}), "('.*\\\\.(S\\\\d{1,3}E\\\\d{1,3})\\\\..*', package_name)\n", (60948, 60996), False, 'import re\n'), ((65321, 65337), 'feedcrawler.common.get_to_decrypt', 'get_to_decrypt', ([], {}), '()\n', (65335, 65337), False, 'from feedcrawler.common import get_to_decrypt\n'), ((67231, 67244), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (67241, 67244), False, 'import time\n'), ((67273, 67293), 'feedcrawler.common.remove_decrypt', 'remove_decrypt', (['name'], {}), '(name)\n', (67287, 67293), False, 'from feedcrawler.common import remove_decrypt\n'), ((67922, 67976), 'feedcrawler.myjd.download', 'download', (['package_name', '"""FeedCrawler"""', 'links', 'password'], {}), "(package_name, 'FeedCrawler', links, password)\n", (67930, 67976), False, 'from feedcrawler.myjd import download\n'), ((68010, 68031), 'feedcrawler.db.FeedDb', 'FeedDb', (['"""FeedCrawler"""'], {}), "('FeedCrawler')\n", (68016, 68031), False, 'from feedcrawler.db import FeedDb\n'), ((25776, 25796), 'feedcrawler.db.FeedDb', 'FeedDb', (['"""crawltimes"""'], {}), "('crawltimes')\n", (25782, 25796), False, 'from feedcrawler.db import FeedDb\n'), ((59959, 59984), 'ast.literal_eval', 'ast.literal_eval', (['linkids'], {}), '(linkids)\n', (59975, 59984), False, 'import ast\n'), ((60377, 60400), 'ast.literal_eval', 'ast.literal_eval', (['uuids'], {}), '(uuids)\n', (60393, 60400), False, 'import ast\n'), ((60766, 60805), 'feedcrawler.myjd.remove_from_linkgrabber', 'remove_from_linkgrabber', (['linkids', 'uuids'], {}), '(linkids, uuids)\n', (60789, 60805), False, 'from feedcrawler.myjd import remove_from_linkgrabber\n'), ((60838, 60866), 'feedcrawler.common.remove_decrypt', 'remove_decrypt', (['package_name'], {}), '(package_name)\n', (60852, 60866), False, 'from feedcrawler.common import remove_decrypt\n'), ((61280, 61318), 're.findall', 're.findall', (['""".*(s\\\\d{1,3}).*"""', 're_name'], {}), "('.*(s\\\\d{1,3}).*', re_name)\n", (61290, 61318), False, 'import re\n'), ((62717, 62746), 'feedcrawler.myjd.get_packages_in_linkgrabber', 'get_packages_in_linkgrabber', ([], {}), '()\n', (62744, 62746), False, 'from feedcrawler.myjd import get_packages_in_linkgrabber\n'), ((67363, 67374), 'time.time', 'time.time', ([], {}), '()\n', (67372, 67374), False, 'import time\n'), ((68207, 68270), 'feedcrawler.notifiers.notify', 'notify', (["['[FeedCrawler Sponsors Helper erfolgreich] - ' + name]"], {}), "(['[FeedCrawler Sponsors Helper erfolgreich] - ' + name])\n", (68213, 68270), False, 'from feedcrawler.notifiers import notify\n'), ((37507, 37563), 'feedcrawler.myjd.package_merge', 'package_merge', (['decrypted_packages', 'title', 'known_packages'], {}), '(decrypted_packages, title, known_packages)\n', (37520, 37563), False, 'from feedcrawler.myjd import package_merge\n'), ((37661, 37671), 'feedcrawler.myjd.get_info', 'get_info', ([], {}), '()\n', (37669, 37671), False, 'from feedcrawler.myjd import get_info\n'), ((62857, 62869), 'feedcrawler.myjd.get_device', 'get_device', ([], {}), '()\n', (62867, 62869), False, 'from feedcrawler.myjd import get_device\n'), ((63060, 63089), 'feedcrawler.myjd.get_packages_in_linkgrabber', 'get_packages_in_linkgrabber', ([], {}), '()\n', (63087, 63089), False, 'from feedcrawler.myjd import get_packages_in_linkgrabber\n'), ((62932, 62958), 'feedcrawler.common.is_device', 'is_device', (['internal.device'], {}), '(internal.device)\n', (62941, 62958), False, 'from feedcrawler.common import is_device\n'), ((65621, 65640), 're.compile', 're.compile', (['re_name'], {}), '(re_name)\n', (65631, 65640), False, 'import re\n'), ((65939, 66000), 're.findall', 're.findall', (['""".*\\\\.S\\\\d{1,3}E(\\\\d{1,3})\\\\..*"""', "package['name']"], {}), "('.*\\\\.S\\\\d{1,3}E(\\\\d{1,3})\\\\..*', package['name'])\n", (65949, 66000), False, 'import re\n'), ((66038, 66069), 'feedcrawler.common.remove_decrypt', 'remove_decrypt', (["package['name']"], {}), "(package['name'])\n", (66052, 66069), False, 'from feedcrawler.common import remove_decrypt\n'), ((63441, 63460), 're.compile', 're.compile', (['re_name'], {}), '(re_name)\n', (63451, 63460), False, 'import re\n'), ((63546, 63607), 're.findall', 're.findall', (['""".*\\\\.S\\\\d{1,3}E(\\\\d{1,3})\\\\..*"""', "package['name']"], {}), "('.*\\\\.S\\\\d{1,3}E(\\\\d{1,3})\\\\..*', package['name'])\n", (63556, 63607), False, 'import re\n'), ((64066, 64105), 'feedcrawler.myjd.remove_from_linkgrabber', 'remove_from_linkgrabber', (['linkids', 'uuids'], {}), '(linkids, uuids)\n', (64089, 64105), False, 'from feedcrawler.myjd import remove_from_linkgrabber\n'), ((64154, 64182), 'feedcrawler.common.remove_decrypt', 'remove_decrypt', (['package_name'], {}), '(package_name)\n', (64168, 64182), False, 'from feedcrawler.common import remove_decrypt\n'), ((64405, 64424), 're.compile', 're.compile', (['re_name'], {}), '(re_name)\n', (64415, 64424), False, 'import re\n'), ((64510, 64571), 're.findall', 're.findall', (['""".*\\\\.S\\\\d{1,3}E(\\\\d{1,3})\\\\..*"""', "package['name']"], {}), "('.*\\\\.S\\\\d{1,3}E(\\\\d{1,3})\\\\..*', package['name'])\n", (64520, 64571), False, 'import re\n'), ((65030, 65069), 'feedcrawler.myjd.remove_from_linkgrabber', 'remove_from_linkgrabber', (['linkids', 'uuids'], {}), '(linkids, uuids)\n', (65053, 65069), False, 'from feedcrawler.myjd import remove_from_linkgrabber\n'), ((65118, 65146), 'feedcrawler.common.remove_decrypt', 'remove_decrypt', (['package_name'], {}), '(package_name)\n', (65132, 65146), False, 'from feedcrawler.common import remove_decrypt\n'), ((67032, 67045), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (67042, 67045), False, 'import time\n'), ((66664, 66689), 'feedcrawler.db.FeedDb', 'FeedDb', (['"""episode_remover"""'], {}), "('episode_remover')\n", (66670, 66689), False, 'from feedcrawler.db import FeedDb\n'), ((67090, 67115), 'feedcrawler.db.FeedDb', 'FeedDb', (['"""episode_remover"""'], {}), "('episode_remover')\n", (67096, 67115), False, 'from feedcrawler.db import FeedDb\n'), ((63799, 63824), 'feedcrawler.db.FeedDb', 'FeedDb', (['"""episode_remover"""'], {}), "('episode_remover')\n", (63805, 63824), False, 'from feedcrawler.db import FeedDb\n'), ((64763, 64788), 'feedcrawler.db.FeedDb', 'FeedDb', (['"""episode_remover"""'], {}), "('episode_remover')\n", (64769, 64788), False, 'from feedcrawler.db import FeedDb\n'), ((66937, 66962), 'feedcrawler.db.FeedDb', 'FeedDb', (['"""episode_remover"""'], {}), "('episode_remover')\n", (66943, 66962), False, 'from feedcrawler.db import FeedDb\n')]
|
import io
import cv2
import numpy as np
def predict(image):
nparr = np.fromstring(image, np.uint8)
img = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
gray_image = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
res, im_png = cv2.imencode(".png", gray_image)
return im_png
def details():
details = {
"doi": "10.1371/journal.pone.0029740",
"example_figure": "https://camo.githubusercontent.com/5eb8b4f1f63dbdbb5c30afb10575d6ebe24bb0a156e6b81296c8191183f33edf/68747470733a2f2f692e6962622e636f2f3559304d3258622f6578616d706c652e706e67",
"description": "Image Uncolorization will vintage your picture to turn them into black and white style.",
}
details += get_doi(details["doi"])
return details
def get_doi(doi):
crossref_url = f"http://api.crossref.org/works/{doi}"
req = requests.get(crossref_url)
return req.content
|
[
"cv2.cvtColor",
"cv2.imdecode",
"numpy.fromstring",
"cv2.imencode"
] |
[((74, 104), 'numpy.fromstring', 'np.fromstring', (['image', 'np.uint8'], {}), '(image, np.uint8)\n', (87, 104), True, 'import numpy as np\n'), ((115, 152), 'cv2.imdecode', 'cv2.imdecode', (['nparr', 'cv2.IMREAD_COLOR'], {}), '(nparr, cv2.IMREAD_COLOR)\n', (127, 152), False, 'import cv2\n'), ((170, 207), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\n', (182, 207), False, 'import cv2\n'), ((226, 258), 'cv2.imencode', 'cv2.imencode', (['""".png"""', 'gray_image'], {}), "('.png', gray_image)\n", (238, 258), False, 'import cv2\n')]
|
import numpy as np
import tensorflow as tf
import pathlib
import general_utilities
class Actor:
def __init__(self, scope, session, n_actions, action_bound,
eval_states, target_states, learning_rate=0.001, tau=0.01):
self.session = session
self.n_actions = n_actions
self.action_bound = action_bound
self.eval_states = eval_states
self.target_states = target_states
self.learning_rate = learning_rate
self.scope = scope
with tf.variable_scope(self.scope):
self.eval_actions = self.build_network(self.eval_states,
scope='eval', trainable=True)
self.target_actions = self.build_network(self.target_states,
scope='target', trainable=False)
self.eval_weights = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
scope=scope + '/eval')
self.target_weights = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
scope=scope + '/target')
self.update_target = [tf.assign(t, (1 - tau) * t + tau * e)
for t, e in zip(self.target_weights, self.eval_weights)]
def build_network(self, x, scope, trainable):
with tf.variable_scope(scope):
W = tf.random_normal_initializer(0.0, 0.1)
b = tf.constant_initializer(0.1)
h1 = tf.layers.dense(x, 50, activation=tf.nn.relu,
kernel_initializer=W, bias_initializer=b,
name='h1', trainable=trainable)
actions = tf.layers.dense(h1, self.n_actions, activation=tf.nn.tanh,
kernel_initializer=W, bias_initializer=b,
name='actions', trainable=trainable)
scaled_actions = tf.multiply(actions, self.action_bound,
name='scaled_actions')
return scaled_actions
def add_gradients(self, action_gradients):
with tf.variable_scope(self.scope):
self.action_gradients = tf.gradients(ys=self.eval_actions,
xs=self.eval_weights,
grad_ys=action_gradients)
optimizer = tf.train.AdamOptimizer(-self.learning_rate)
self.optimize = optimizer.apply_gradients(zip(self.action_gradients,
self.eval_weights))
def learn(self, states):
self.session.run(self.optimize, feed_dict={self.eval_states: states})
self.session.run(self.update_target)
def choose_action(self, state):
return self.session.run(self.eval_actions,
feed_dict={self.eval_states: state[np.newaxis, :]})[0]
class Critic:
def __init__(self, scope, session, n_actions, actor_eval_actions,
actor_target_actions, eval_states, target_states,
rewards, learning_rate=0.001, gamma=0.9, tau=0.01):
self.session = session
self.n_actions = n_actions
self.actor_eval_actions = actor_eval_actions
self.actor_target_actions = actor_target_actions
self.eval_states = eval_states
self.target_states = target_states
self.rewards = rewards
with tf.variable_scope(scope):
self.eval_values = self.build_network(self.eval_states,
self.actor_eval_actions,
'eval', trainable=True)
self.target_values = self.build_network(self.target_states,
self.actor_target_actions,
'target', trainable=False)
self.eval_weights = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
scope=scope + '/eval')
self.target_weights = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
scope=scope + '/target')
self.target = self.rewards + gamma * self.target_values
self.loss = tf.reduce_mean(tf.squared_difference(self.target,
self.eval_values))
self.optimize = tf.train.AdamOptimizer(
learning_rate).minimize(self.loss)
self.action_gradients = tf.gradients(ys=self.eval_values,
xs=self.actor_eval_actions)[0]
self.update_target = [tf.assign(t, (1 - tau) * t + tau * e)
for t, e in zip(self.target_weights, self.eval_weights)]
def build_network(self, x1, x2, scope, trainable):
with tf.variable_scope(scope):
W = tf.random_normal_initializer(0.0, 0.1)
b = tf.constant_initializer(0.1)
h1 = tf.layers.dense(x1, 50, activation=tf.nn.relu,
kernel_initializer=W, bias_initializer=b,
name='h1', trainable=trainable)
h21 = tf.get_variable('h21', [50, 50],
initializer=W, trainable=trainable)
h22 = tf.get_variable('h22', [self.n_actions, 50],
initializer=W, trainable=trainable)
b2 = tf.get_variable('b2', [1, 50],
initializer=b, trainable=trainable)
h3 = tf.nn.relu(tf.matmul(h1, h21) + tf.matmul(x2, h22) + b2)
values = tf.layers.dense(h3, 1, kernel_initializer=W,
bias_initializer=b, name='values',
trainable=trainable)
return values
def learn(self, states, actions, rewards, states_next):
loss, _ = self.session.run([self.loss, self.optimize], feed_dict={self.eval_states: states,
self.actor_eval_actions: actions,
self.rewards: rewards,
self.target_states: states_next})
self.session.run(self.update_target)
return loss
|
[
"tensorflow.get_collection",
"tensorflow.constant_initializer",
"tensorflow.layers.dense",
"tensorflow.variable_scope",
"tensorflow.multiply",
"tensorflow.assign",
"tensorflow.matmul",
"tensorflow.random_normal_initializer",
"tensorflow.squared_difference",
"tensorflow.gradients",
"tensorflow.train.AdamOptimizer",
"tensorflow.get_variable"
] |
[((513, 542), 'tensorflow.variable_scope', 'tf.variable_scope', (['self.scope'], {}), '(self.scope)\n', (530, 542), True, 'import tensorflow as tf\n'), ((886, 957), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.GLOBAL_VARIABLES'], {'scope': "(scope + '/eval')"}), "(tf.GraphKeys.GLOBAL_VARIABLES, scope=scope + '/eval')\n", (903, 957), True, 'import tensorflow as tf\n'), ((1042, 1115), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.GLOBAL_VARIABLES'], {'scope': "(scope + '/target')"}), "(tf.GraphKeys.GLOBAL_VARIABLES, scope=scope + '/target')\n", (1059, 1115), True, 'import tensorflow as tf\n'), ((1396, 1420), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope'], {}), '(scope)\n', (1413, 1420), True, 'import tensorflow as tf\n'), ((1438, 1476), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', (['(0.0)', '(0.1)'], {}), '(0.0, 0.1)\n', (1466, 1476), True, 'import tensorflow as tf\n'), ((1493, 1521), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['(0.1)'], {}), '(0.1)\n', (1516, 1521), True, 'import tensorflow as tf\n'), ((1539, 1662), 'tensorflow.layers.dense', 'tf.layers.dense', (['x', '(50)'], {'activation': 'tf.nn.relu', 'kernel_initializer': 'W', 'bias_initializer': 'b', 'name': '"""h1"""', 'trainable': 'trainable'}), "(x, 50, activation=tf.nn.relu, kernel_initializer=W,\n bias_initializer=b, name='h1', trainable=trainable)\n", (1554, 1662), True, 'import tensorflow as tf\n'), ((1747, 1893), 'tensorflow.layers.dense', 'tf.layers.dense', (['h1', 'self.n_actions'], {'activation': 'tf.nn.tanh', 'kernel_initializer': 'W', 'bias_initializer': 'b', 'name': '"""actions"""', 'trainable': 'trainable'}), "(h1, self.n_actions, activation=tf.nn.tanh,\n kernel_initializer=W, bias_initializer=b, name='actions', trainable=\n trainable)\n", (1762, 1893), True, 'import tensorflow as tf\n'), ((1990, 2052), 'tensorflow.multiply', 'tf.multiply', (['actions', 'self.action_bound'], {'name': '"""scaled_actions"""'}), "(actions, self.action_bound, name='scaled_actions')\n", (2001, 2052), True, 'import tensorflow as tf\n'), ((2186, 2215), 'tensorflow.variable_scope', 'tf.variable_scope', (['self.scope'], {}), '(self.scope)\n', (2203, 2215), True, 'import tensorflow as tf\n'), ((2253, 2340), 'tensorflow.gradients', 'tf.gradients', ([], {'ys': 'self.eval_actions', 'xs': 'self.eval_weights', 'grad_ys': 'action_gradients'}), '(ys=self.eval_actions, xs=self.eval_weights, grad_ys=\n action_gradients)\n', (2265, 2340), True, 'import tensorflow as tf\n'), ((2458, 2501), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['(-self.learning_rate)'], {}), '(-self.learning_rate)\n', (2480, 2501), True, 'import tensorflow as tf\n'), ((3515, 3539), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope'], {}), '(scope)\n', (3532, 3539), True, 'import tensorflow as tf\n'), ((4021, 4092), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.GLOBAL_VARIABLES'], {'scope': "(scope + '/eval')"}), "(tf.GraphKeys.GLOBAL_VARIABLES, scope=scope + '/eval')\n", (4038, 4092), True, 'import tensorflow as tf\n'), ((4177, 4250), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.GLOBAL_VARIABLES'], {'scope': "(scope + '/target')"}), "(tf.GraphKeys.GLOBAL_VARIABLES, scope=scope + '/target')\n", (4194, 4250), True, 'import tensorflow as tf\n'), ((5013, 5037), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope'], {}), '(scope)\n', (5030, 5037), True, 'import tensorflow as tf\n'), ((5055, 5093), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', (['(0.0)', '(0.1)'], {}), '(0.0, 0.1)\n', (5083, 5093), True, 'import tensorflow as tf\n'), ((5110, 5138), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['(0.1)'], {}), '(0.1)\n', (5133, 5138), True, 'import tensorflow as tf\n'), ((5156, 5280), 'tensorflow.layers.dense', 'tf.layers.dense', (['x1', '(50)'], {'activation': 'tf.nn.relu', 'kernel_initializer': 'W', 'bias_initializer': 'b', 'name': '"""h1"""', 'trainable': 'trainable'}), "(x1, 50, activation=tf.nn.relu, kernel_initializer=W,\n bias_initializer=b, name='h1', trainable=trainable)\n", (5171, 5280), True, 'import tensorflow as tf\n'), ((5361, 5429), 'tensorflow.get_variable', 'tf.get_variable', (['"""h21"""', '[50, 50]'], {'initializer': 'W', 'trainable': 'trainable'}), "('h21', [50, 50], initializer=W, trainable=trainable)\n", (5376, 5429), True, 'import tensorflow as tf\n'), ((5482, 5567), 'tensorflow.get_variable', 'tf.get_variable', (['"""h22"""', '[self.n_actions, 50]'], {'initializer': 'W', 'trainable': 'trainable'}), "('h22', [self.n_actions, 50], initializer=W, trainable=trainable\n )\n", (5497, 5567), True, 'import tensorflow as tf\n'), ((5614, 5680), 'tensorflow.get_variable', 'tf.get_variable', (['"""b2"""', '[1, 50]'], {'initializer': 'b', 'trainable': 'trainable'}), "('b2', [1, 50], initializer=b, trainable=trainable)\n", (5629, 5680), True, 'import tensorflow as tf\n'), ((5809, 5914), 'tensorflow.layers.dense', 'tf.layers.dense', (['h3', '(1)'], {'kernel_initializer': 'W', 'bias_initializer': 'b', 'name': '"""values"""', 'trainable': 'trainable'}), "(h3, 1, kernel_initializer=W, bias_initializer=b, name=\n 'values', trainable=trainable)\n", (5824, 5914), True, 'import tensorflow as tf\n'), ((1203, 1240), 'tensorflow.assign', 'tf.assign', (['t', '((1 - tau) * t + tau * e)'], {}), '(t, (1 - tau) * t + tau * e)\n', (1212, 1240), True, 'import tensorflow as tf\n'), ((4411, 4463), 'tensorflow.squared_difference', 'tf.squared_difference', (['self.target', 'self.eval_values'], {}), '(self.target, self.eval_values)\n', (4432, 4463), True, 'import tensorflow as tf\n'), ((4666, 4727), 'tensorflow.gradients', 'tf.gradients', ([], {'ys': 'self.eval_values', 'xs': 'self.actor_eval_actions'}), '(ys=self.eval_values, xs=self.actor_eval_actions)\n', (4678, 4727), True, 'import tensorflow as tf\n'), ((4815, 4852), 'tensorflow.assign', 'tf.assign', (['t', '((1 - tau) * t + tau * e)'], {}), '(t, (1 - tau) * t + tau * e)\n', (4824, 4852), True, 'import tensorflow as tf\n'), ((4555, 4592), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['learning_rate'], {}), '(learning_rate)\n', (4577, 4592), True, 'import tensorflow as tf\n'), ((5742, 5760), 'tensorflow.matmul', 'tf.matmul', (['h1', 'h21'], {}), '(h1, h21)\n', (5751, 5760), True, 'import tensorflow as tf\n'), ((5763, 5781), 'tensorflow.matmul', 'tf.matmul', (['x2', 'h22'], {}), '(x2, h22)\n', (5772, 5781), True, 'import tensorflow as tf\n')]
|
from rubik_cube import RubikCube
r = RubikCube()
r.y_rotate('left', 'down')
r.y_rotate('right', 'up')
print(r)
print()
for _ in range(3):
r.x_rotate('bottom', 'left')
print(r)
print()
for _ in range(7):
r.y_rotate('left', 'up')
r.z_rotate('front', 'clockwise')
r.y_rotate('left', 'up')
r.x_rotate('top', 'right')
r.x_rotate('top', 'right')
r.z_rotate('back', 'clockwise')
for _ in range(32):
r.z_rotate('front', 'anti-clockwise')
r.y_rotate('left', 'up')
r.x_rotate('top', 'right')
r.z_rotate('front', 'clockwise')
r.y_rotate('right', 'down')
r.z_rotate('back', 'anti-clockwise')
r.x_rotate('bottom', 'left')
print(r)
print()
r.z_rotate('back', 'clockwise')
print(r)
|
[
"rubik_cube.RubikCube"
] |
[((38, 49), 'rubik_cube.RubikCube', 'RubikCube', ([], {}), '()\n', (47, 49), False, 'from rubik_cube import RubikCube\n')]
|
import os
import sys
sys.path.append('../')
def test_node2vec():
os.system("python ../scripts/train.py --task unsupervised_node_classification --dataset wikipedia --model node2vec --p_value 0.3 --q_value 0.7 --seed 0 1 2 3 4")
pass
if __name__ == "__main__":
test_node2vec()
|
[
"sys.path.append",
"os.system"
] |
[((22, 44), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (37, 44), False, 'import sys\n'), ((72, 243), 'os.system', 'os.system', (['"""python ../scripts/train.py --task unsupervised_node_classification --dataset wikipedia --model node2vec --p_value 0.3 --q_value 0.7 --seed 0 1 2 3 4"""'], {}), "(\n 'python ../scripts/train.py --task unsupervised_node_classification --dataset wikipedia --model node2vec --p_value 0.3 --q_value 0.7 --seed 0 1 2 3 4'\n )\n", (81, 243), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-04-13 18:21
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('common', '0003_delete_contributors'),
]
operations = [
migrations.CreateModel(
name='WorkContributor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('contribution_type', models.PositiveSmallIntegerField(choices=[(0, 'Author'), (1, 'Editor'), (2, 'Translator')])),
('order', models.PositiveSmallIntegerField(default=0)),
],
),
migrations.RemoveField(
model_name='contributor',
name='contributor_type',
),
migrations.RemoveField(
model_name='contributor',
name='order',
),
migrations.AddField(
model_name='workcontributor',
name='contributor',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='common.Contributor'),
),
migrations.AddField(
model_name='workcontributor',
name='work',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='common.Work'),
),
migrations.AddField(
model_name='work',
name='contributors',
field=models.ManyToManyField(through='common.WorkContributor', to='common.Contributor'),
),
]
|
[
"django.db.models.ManyToManyField",
"django.db.migrations.RemoveField",
"django.db.models.ForeignKey",
"django.db.models.PositiveSmallIntegerField",
"django.db.models.AutoField"
] |
[((767, 840), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""contributor"""', 'name': '"""contributor_type"""'}), "(model_name='contributor', name='contributor_type')\n", (789, 840), False, 'from django.db import migrations, models\n'), ((885, 947), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""contributor"""', 'name': '"""order"""'}), "(model_name='contributor', name='order')\n", (907, 947), False, 'from django.db import migrations, models\n'), ((1105, 1197), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""common.Contributor"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'common.Contributor')\n", (1122, 1197), False, 'from django.db import migrations, models\n'), ((1319, 1404), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""common.Work"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='common.Work'\n )\n", (1336, 1404), False, 'from django.db import migrations, models\n'), ((1523, 1609), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'through': '"""common.WorkContributor"""', 'to': '"""common.Contributor"""'}), "(through='common.WorkContributor', to=\n 'common.Contributor')\n", (1545, 1609), False, 'from django.db import migrations, models\n'), ((437, 530), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (453, 530), False, 'from django.db import migrations, models\n'), ((567, 662), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'choices': "[(0, 'Author'), (1, 'Editor'), (2, 'Translator')]"}), "(choices=[(0, 'Author'), (1, 'Editor'), (2,\n 'Translator')])\n", (599, 662), False, 'from django.db import migrations, models\n'), ((687, 730), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'default': '(0)'}), '(default=0)\n', (719, 730), False, 'from django.db import migrations, models\n')]
|
__author__ = 'edill'
import enaml
from enaml.qt.qt_application import QtApplication
from bubblegum.xrf.model.xrf_model import XRF
def run():
app = QtApplication()
with enaml.imports():
from bubblegum.xrf.view.file_view import FileGui
view = FileGui()
view.xrf_model1 = XRF()
view.xrf_model2 = XRF()
view.show()
app.start()
if __name__ == "__main__":
run()
|
[
"bubblegum.xrf.model.xrf_model.XRF",
"bubblegum.xrf.view.file_view.FileGui",
"enaml.qt.qt_application.QtApplication",
"enaml.imports"
] |
[((153, 168), 'enaml.qt.qt_application.QtApplication', 'QtApplication', ([], {}), '()\n', (166, 168), False, 'from enaml.qt.qt_application import QtApplication\n'), ((264, 273), 'bubblegum.xrf.view.file_view.FileGui', 'FileGui', ([], {}), '()\n', (271, 273), False, 'from bubblegum.xrf.view.file_view import FileGui\n'), ((296, 301), 'bubblegum.xrf.model.xrf_model.XRF', 'XRF', ([], {}), '()\n', (299, 301), False, 'from bubblegum.xrf.model.xrf_model import XRF\n'), ((324, 329), 'bubblegum.xrf.model.xrf_model.XRF', 'XRF', ([], {}), '()\n', (327, 329), False, 'from bubblegum.xrf.model.xrf_model import XRF\n'), ((178, 193), 'enaml.imports', 'enaml.imports', ([], {}), '()\n', (191, 193), False, 'import enaml\n')]
|
import numpy as np
from numpy.random import randn
from numpy.linalg import norm
from numpy.random import permutation
from numpy.testing import assert_array_almost_equal, assert_array_equal
import tensor.utils as tu
from tensor.tensor_train import ttsvd, tt_product
# np.random.seed(20)
shape_A = (3, 4, 5, 6, 7)
A = randn(*shape_A)
A = A / norm(A)
# higher tolerance means worse approximation, but more compression
tol = 0
dim_order = permutation(np.arange(len(shape_A)))
G, ranks = ttsvd(A, tol, dim_order=dim_order, ranks=None)
Ak = tt_product(G, shape_A, dim_order=dim_order)
err = norm(A - Ak) / norm(A)
print('dim order: ', dim_order)
print('shape: ', shape_A)
print('ranks: ', ranks)
print('ttsvd: error = %0.6e' % err)
print('tol: tol = %0.2e' % tol)
print('check tolerance: %d' % (err < tol))
|
[
"tensor.tensor_train.ttsvd",
"numpy.linalg.norm",
"tensor.tensor_train.tt_product",
"numpy.random.randn"
] |
[((318, 333), 'numpy.random.randn', 'randn', (['*shape_A'], {}), '(*shape_A)\n', (323, 333), False, 'from numpy.random import randn\n'), ((487, 533), 'tensor.tensor_train.ttsvd', 'ttsvd', (['A', 'tol'], {'dim_order': 'dim_order', 'ranks': 'None'}), '(A, tol, dim_order=dim_order, ranks=None)\n', (492, 533), False, 'from tensor.tensor_train import ttsvd, tt_product\n'), ((540, 583), 'tensor.tensor_train.tt_product', 'tt_product', (['G', 'shape_A'], {'dim_order': 'dim_order'}), '(G, shape_A, dim_order=dim_order)\n', (550, 583), False, 'from tensor.tensor_train import ttsvd, tt_product\n'), ((342, 349), 'numpy.linalg.norm', 'norm', (['A'], {}), '(A)\n', (346, 349), False, 'from numpy.linalg import norm\n'), ((592, 604), 'numpy.linalg.norm', 'norm', (['(A - Ak)'], {}), '(A - Ak)\n', (596, 604), False, 'from numpy.linalg import norm\n'), ((607, 614), 'numpy.linalg.norm', 'norm', (['A'], {}), '(A)\n', (611, 614), False, 'from numpy.linalg import norm\n')]
|
''' Visualization code for point clouds and 3D bounding boxes with mayavi.
Modified by <NAME>
Date: September 2017
Ref: https://github.com/hengck23/didi-udacity-2017/blob/master/baseline-04/kitti_data/draw.py
'''
import warnings
import numpy as np
try:
import mayavi.mlab as mlab
except ImportError:
warnings.warn("mayavi is not installed")
import pandas as pd
from dataset.prepare_lyft_data import parse_string_to_box, transform_box_from_world_to_sensor_coordinates, \
get_sensor_to_world_transform_matrix_from_sample_data_token
from dataset.prepare_lyft_data_v2 import transform_pc_to_camera_coord
from lyft_dataset_sdk.lyftdataset import LyftDataset
from lyft_dataset_sdk.utils.data_classes import LidarPointCloud
from lyft_dataset_sdk.utils.geometry_utils import box_in_image,BoxVisibility
from skimage.io import imread
import matplotlib.pyplot as plt
class PredViewer(object):
def __init__(self, pred_file, lyftd: LyftDataset):
self.pred_pd = pd.read_csv(pred_file, index_col="Id")
self.lyftd = lyftd
def get_boxes_from_token(self, sample_token):
boxes_str = self.pred_pd.loc[sample_token, 'PredictionString']
sample_token=sample_token
boxes = parse_string_to_box(boxes_str,sample_token=sample_token)
return boxes
def get_sample_record_from_token(self, sample_token):
pass
def render_camera_image(self, ax, sample_token, cam_key='CAM_FRONT', prob_threshold=0.7):
sample_record = self.lyftd.get('sample', sample_token)
camera_token = sample_record['data'][cam_key]
camera_image_path, _, cam_intrinsic = self.lyftd.get_sample_data(camera_token)
boxes = self.get_boxes_from_token(sample_token)
image_array = imread(camera_image_path)
intrinsic = np.identity(3)
ax.imshow(image_array)
for pred_box in boxes:
if pred_box.score > prob_threshold :
box_in_camera_coord = transform_box_from_world_to_sensor_coordinates(pred_box, camera_token, self.lyftd)
if box_in_camera_coord.center[2] > 0:
box_in_camera_coord.render(ax, view=cam_intrinsic, normalize=True, linewidth=2.0)
ax.set_xlim([0, image_array.shape[1]])
ax.set_ylim([image_array.shape[0], 0])
def render_lidar_points(self, ax, sample_token, lidar_key='LIDAR_TOP', prob_threshold=0):
lidar_top_token, lpc = self.get_lidar_points(lidar_key, sample_token)
boxes = self.get_boxes_from_token(sample_token)
for pred_box in boxes:
if pred_box.score > prob_threshold:
box_in_lidar_coord = transform_box_from_world_to_sensor_coordinates(pred_box, lidar_top_token,
self.lyftd)
pts = lpc.points
ax.scatter(pts[0, :], pts[1, :], s=0.05)
ax.set_xlim([-50, 50])
ax.set_ylim([-50, 50])
view_mtx = np.eye(2)
box_in_lidar_coord.render(ax, view=view_mtx)
def get_lidar_points(self, lidar_key, sample_token):
sample_record = self.lyftd.get('sample', sample_token)
lidar_top_token = sample_record['data'][lidar_key]
lidar_path = self.lyftd.get_sample_data_path(lidar_top_token)
lpc = LidarPointCloud.from_file(lidar_path)
return lidar_top_token, lpc
def render_3d_lidar_points(self, sample_token, lidar_key='LIDAR_TOP', prob_threshold=0):
lidar_token, lpc = self.get_lidar_points(lidar_key=lidar_key, sample_token=sample_token)
fig = draw_lidar_simple(np.transpose(lpc.points))
boxes = self.get_boxes_from_token(sample_token)
box_pts = []
for pred_box in boxes:
if pred_box.score > prob_threshold:
box_in_lidar_coord = transform_box_from_world_to_sensor_coordinates(pred_box, lidar_token,
self.lyftd)
box_3d_pts = np.transpose(box_in_lidar_coord.corners())
box_pts.append(box_3d_pts)
draw_gt_boxes3d(box_pts, fig)
def render_3d_lidar_points_to_camera_coordinates(self, sample_token, lidar_key="LIDAR_TOP",
cam_key="CAM_FRONT", prob_threshold=0):
lidar_token, lpc = self.get_lidar_points(lidar_key=lidar_key, sample_token=sample_token)
# Get camera coordiate calibration information
sample_record = self.lyftd.get('sample', sample_token)
camera_token = sample_record['data'][cam_key]
camera_data = self.lyftd.get('sample_data', camera_token)
lidar_record = self.lyftd.get('sample_data', lidar_token)
lpc, _ = transform_pc_to_camera_coord(camera_data, lidar_record, lpc, self.lyftd)
# Transform lidar points
fig = draw_lidar_simple(np.transpose(lpc.points))
boxes = self.get_boxes_from_token(sample_token)
box_pts = []
for pred_box in boxes:
if pred_box.score > prob_threshold:
box_in_lidar_coord = transform_box_from_world_to_sensor_coordinates(pred_box, camera_token,
self.lyftd)
box_3d_pts = np.transpose(box_in_lidar_coord.corners())
box_pts.append(box_3d_pts)
draw_gt_boxes3d(box_pts, fig)
# mlab.view(azimuth=270, elevation=150,
# focalpoint=[0, 0, 0], distance=62.0, figure=fig)
return fig
def draw_lidar_simple(pc, color=None):
''' Draw lidar points. simplest set up. '''
fig = mlab.figure(figure=None, bgcolor=(0, 0, 0), fgcolor=None, engine=None, size=(1600, 1000))
if color is None: color = pc[:, 2]
# draw points
mlab.points3d(pc[:, 0], pc[:, 1], pc[:, 2], color, color=None, mode='point', colormap='cool', scale_factor=1,
figure=fig)
# draw origin
mlab.points3d(0, 0, 0, color=(1, 1, 1), mode='sphere', scale_factor=0.2)
# draw axis
axes = np.array([
[2., 0., 0., 0.],
[0., 2., 0., 0.],
[0., 0., 2., 0.],
], dtype=np.float64)
mlab.plot3d([0, axes[0, 0]], [0, axes[0, 1]], [0, axes[0, 2]], color=(1, 0, 0), tube_radius=None, figure=fig)
mlab.plot3d([0, axes[1, 0]], [0, axes[1, 1]], [0, axes[1, 2]], color=(0, 1, 0), tube_radius=None, figure=fig)
mlab.plot3d([0, axes[2, 0]], [0, axes[2, 1]], [0, axes[2, 2]], color=(0, 0, 1), tube_radius=None, figure=fig)
mlab.view(azimuth=180, elevation=70, focalpoint=[12.0909996, -1.04700089, -2.03249991], distance=62.0, figure=fig)
return fig
def draw_lidar(pc, color=None, fig=None, bgcolor=(0, 0, 0), pts_scale=1, pts_mode='point', pts_color=None):
''' Draw lidar points
Args:
pc: numpy array (n,3) of XYZ
color: numpy array (n) of intensity or whatever
fig: mayavi figure handler, if None create new one otherwise will use it
Returns:
fig: created or used fig
'''
if fig is None: fig = mlab.figure(figure=None, bgcolor=bgcolor, fgcolor=None, engine=None, size=(1600, 1000))
if color is None: color = pc[2, :]
mlab.points3d(pc[0, :], pc[1, :], pc[2, :], color, color=pts_color, mode=pts_mode, colormap='gnuplot',
scale_factor=pts_scale, figure=fig)
# draw origin
mlab.points3d(0, 0, 0, color=(1, 1, 1), mode='sphere', scale_factor=0.2)
# draw axis
axes = np.array([
[2., 0., 0., 0.],
[0., 2., 0., 0.],
[0., 0., 2., 0.],
], dtype=np.float64)
mlab.plot3d([0, axes[0, 0]], [0, axes[0, 1]], [0, axes[0, 2]], color=(1, 0, 0), tube_radius=None, figure=fig)
mlab.plot3d([0, axes[1, 0]], [0, axes[1, 1]], [0, axes[1, 2]], color=(0, 1, 0), tube_radius=None, figure=fig)
mlab.plot3d([0, axes[2, 0]], [0, axes[2, 1]], [0, axes[2, 2]], color=(0, 0, 1), tube_radius=None, figure=fig)
# draw fov (todo: update to real sensor spec.)
fov = np.array([ # 45 degree
[20., 20., 0., 0.],
[20., -20., 0., 0.],
], dtype=np.float64)
mlab.plot3d([0, fov[0, 0]], [0, fov[0, 1]], [0, fov[0, 2]], color=(1, 1, 1), tube_radius=None, line_width=1,
figure=fig)
mlab.plot3d([0, fov[1, 0]], [0, fov[1, 1]], [0, fov[1, 2]], color=(1, 1, 1), tube_radius=None, line_width=1,
figure=fig)
# draw square region
TOP_Y_MIN = -20
TOP_Y_MAX = 20
TOP_X_MIN = 0
TOP_X_MAX = 40
TOP_Z_MIN = -2.0
TOP_Z_MAX = 0.4
x1 = TOP_X_MIN
x2 = TOP_X_MAX
y1 = TOP_Y_MIN
y2 = TOP_Y_MAX
mlab.plot3d([x1, x1], [y1, y2], [0, 0], color=(0.5, 0.5, 0.5), tube_radius=0.1, line_width=1, figure=fig)
mlab.plot3d([x2, x2], [y1, y2], [0, 0], color=(0.5, 0.5, 0.5), tube_radius=0.1, line_width=1, figure=fig)
mlab.plot3d([x1, x2], [y1, y1], [0, 0], color=(0.5, 0.5, 0.5), tube_radius=0.1, line_width=1, figure=fig)
mlab.plot3d([x1, x2], [y2, y2], [0, 0], color=(0.5, 0.5, 0.5), tube_radius=0.1, line_width=1, figure=fig)
# mlab.orientation_axes()
mlab.view(azimuth=180, elevation=70, focalpoint=[12.0909996, -1.04700089, -2.03249991], distance=62.0, figure=fig)
return fig
def draw_gt_boxes3d(gt_boxes3d, fig, color=(1, 1, 1), line_width=1, draw_text=True, text_scale=(1, 1, 1),
color_list=None):
''' Draw 3D bounding boxes
Args:
gt_boxes3d: numpy array (n,8,3) for XYZs of the box corners
fig: mayavi figure handler
color: RGB value tuple in range (0,1), box line color
line_width: box line width
draw_text: boolean, if true, write box indices beside boxes
text_scale: three number tuple
color_list: a list of RGB tuple, if not None, overwrite color.
Returns:
fig: updated fig
'''
num = len(gt_boxes3d)
for n in range(num):
b = gt_boxes3d[n]
if color_list is not None:
color = color_list[n]
if draw_text: mlab.text3d(b[4, 0], b[4, 1], b[4, 2], '%d' % n, scale=text_scale, color=color, figure=fig)
for k in range(0, 4):
# http://docs.enthought.com/mayavi/mayavi/auto/mlab_helper_functions.html
i, j = k, (k + 1) % 4
mlab.plot3d([b[i, 0], b[j, 0]], [b[i, 1], b[j, 1]], [b[i, 2], b[j, 2]], color=color, tube_radius=None,
line_width=line_width, figure=fig)
i, j = k + 4, (k + 1) % 4 + 4
mlab.plot3d([b[i, 0], b[j, 0]], [b[i, 1], b[j, 1]], [b[i, 2], b[j, 2]], color=color, tube_radius=None,
line_width=line_width, figure=fig)
i, j = k, k + 4
mlab.plot3d([b[i, 0], b[j, 0]], [b[i, 1], b[j, 1]], [b[i, 2], b[j, 2]], color=color, tube_radius=None,
line_width=line_width, figure=fig)
# mlab.show(1)
# mlab.view(azimuth=180, elevation=70, focalpoint=[ 12.0909996 , -1.04700089, -2.03249991], distance=62.0, figure=fig)
return fig
if __name__ == '__main__':
import pickle
pfile = "/Users/kanhua/Downloads/3d-object-detection-for-autonomous-vehicles/artifacts/val_pc.pickle"
with open(pfile, 'rb') as fp:
item = pickle.load(fp)
print(type(item))
# point_cloud_3d = np.loadtxt('mayavi/kitti_sample_scan.txt')
fig = draw_lidar_simple(item['pcl'][3])
mlab.savefig('pc_view.jpg', figure=fig)
input()
|
[
"lyft_dataset_sdk.utils.data_classes.LidarPointCloud.from_file",
"dataset.prepare_lyft_data_v2.transform_pc_to_camera_coord",
"mayavi.mlab.text3d",
"mayavi.mlab.figure",
"numpy.eye",
"pandas.read_csv",
"dataset.prepare_lyft_data.transform_box_from_world_to_sensor_coordinates",
"mayavi.mlab.view",
"numpy.identity",
"mayavi.mlab.points3d",
"dataset.prepare_lyft_data.parse_string_to_box",
"numpy.transpose",
"pickle.load",
"numpy.array",
"mayavi.mlab.savefig",
"mayavi.mlab.plot3d",
"warnings.warn",
"skimage.io.imread"
] |
[((5711, 5805), 'mayavi.mlab.figure', 'mlab.figure', ([], {'figure': 'None', 'bgcolor': '(0, 0, 0)', 'fgcolor': 'None', 'engine': 'None', 'size': '(1600, 1000)'}), '(figure=None, bgcolor=(0, 0, 0), fgcolor=None, engine=None, size\n =(1600, 1000))\n', (5722, 5805), True, 'import mayavi.mlab as mlab\n'), ((5862, 5987), 'mayavi.mlab.points3d', 'mlab.points3d', (['pc[:, 0]', 'pc[:, 1]', 'pc[:, 2]', 'color'], {'color': 'None', 'mode': '"""point"""', 'colormap': '"""cool"""', 'scale_factor': '(1)', 'figure': 'fig'}), "(pc[:, 0], pc[:, 1], pc[:, 2], color, color=None, mode='point',\n colormap='cool', scale_factor=1, figure=fig)\n", (5875, 5987), True, 'import mayavi.mlab as mlab\n'), ((6024, 6096), 'mayavi.mlab.points3d', 'mlab.points3d', (['(0)', '(0)', '(0)'], {'color': '(1, 1, 1)', 'mode': '"""sphere"""', 'scale_factor': '(0.2)'}), "(0, 0, 0, color=(1, 1, 1), mode='sphere', scale_factor=0.2)\n", (6037, 6096), True, 'import mayavi.mlab as mlab\n'), ((6124, 6222), 'numpy.array', 'np.array', (['[[2.0, 0.0, 0.0, 0.0], [0.0, 2.0, 0.0, 0.0], [0.0, 0.0, 2.0, 0.0]]'], {'dtype': 'np.float64'}), '([[2.0, 0.0, 0.0, 0.0], [0.0, 2.0, 0.0, 0.0], [0.0, 0.0, 2.0, 0.0]],\n dtype=np.float64)\n', (6132, 6222), True, 'import numpy as np\n'), ((6242, 6355), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[0, axes[0, 0]]', '[0, axes[0, 1]]', '[0, axes[0, 2]]'], {'color': '(1, 0, 0)', 'tube_radius': 'None', 'figure': 'fig'}), '([0, axes[0, 0]], [0, axes[0, 1]], [0, axes[0, 2]], color=(1, 0,\n 0), tube_radius=None, figure=fig)\n', (6253, 6355), True, 'import mayavi.mlab as mlab\n'), ((6356, 6469), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[0, axes[1, 0]]', '[0, axes[1, 1]]', '[0, axes[1, 2]]'], {'color': '(0, 1, 0)', 'tube_radius': 'None', 'figure': 'fig'}), '([0, axes[1, 0]], [0, axes[1, 1]], [0, axes[1, 2]], color=(0, 1,\n 0), tube_radius=None, figure=fig)\n', (6367, 6469), True, 'import mayavi.mlab as mlab\n'), ((6470, 6583), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[0, axes[2, 0]]', '[0, axes[2, 1]]', '[0, axes[2, 2]]'], {'color': '(0, 0, 1)', 'tube_radius': 'None', 'figure': 'fig'}), '([0, axes[2, 0]], [0, axes[2, 1]], [0, axes[2, 2]], color=(0, 0,\n 1), tube_radius=None, figure=fig)\n', (6481, 6583), True, 'import mayavi.mlab as mlab\n'), ((6584, 6703), 'mayavi.mlab.view', 'mlab.view', ([], {'azimuth': '(180)', 'elevation': '(70)', 'focalpoint': '[12.0909996, -1.04700089, -2.03249991]', 'distance': '(62.0)', 'figure': 'fig'}), '(azimuth=180, elevation=70, focalpoint=[12.0909996, -1.04700089, -\n 2.03249991], distance=62.0, figure=fig)\n', (6593, 6703), True, 'import mayavi.mlab as mlab\n'), ((7245, 7388), 'mayavi.mlab.points3d', 'mlab.points3d', (['pc[0, :]', 'pc[1, :]', 'pc[2, :]', 'color'], {'color': 'pts_color', 'mode': 'pts_mode', 'colormap': '"""gnuplot"""', 'scale_factor': 'pts_scale', 'figure': 'fig'}), "(pc[0, :], pc[1, :], pc[2, :], color, color=pts_color, mode=\n pts_mode, colormap='gnuplot', scale_factor=pts_scale, figure=fig)\n", (7258, 7388), True, 'import mayavi.mlab as mlab\n'), ((7425, 7497), 'mayavi.mlab.points3d', 'mlab.points3d', (['(0)', '(0)', '(0)'], {'color': '(1, 1, 1)', 'mode': '"""sphere"""', 'scale_factor': '(0.2)'}), "(0, 0, 0, color=(1, 1, 1), mode='sphere', scale_factor=0.2)\n", (7438, 7497), True, 'import mayavi.mlab as mlab\n'), ((7526, 7624), 'numpy.array', 'np.array', (['[[2.0, 0.0, 0.0, 0.0], [0.0, 2.0, 0.0, 0.0], [0.0, 0.0, 2.0, 0.0]]'], {'dtype': 'np.float64'}), '([[2.0, 0.0, 0.0, 0.0], [0.0, 2.0, 0.0, 0.0], [0.0, 0.0, 2.0, 0.0]],\n dtype=np.float64)\n', (7534, 7624), True, 'import numpy as np\n'), ((7644, 7757), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[0, axes[0, 0]]', '[0, axes[0, 1]]', '[0, axes[0, 2]]'], {'color': '(1, 0, 0)', 'tube_radius': 'None', 'figure': 'fig'}), '([0, axes[0, 0]], [0, axes[0, 1]], [0, axes[0, 2]], color=(1, 0,\n 0), tube_radius=None, figure=fig)\n', (7655, 7757), True, 'import mayavi.mlab as mlab\n'), ((7758, 7871), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[0, axes[1, 0]]', '[0, axes[1, 1]]', '[0, axes[1, 2]]'], {'color': '(0, 1, 0)', 'tube_radius': 'None', 'figure': 'fig'}), '([0, axes[1, 0]], [0, axes[1, 1]], [0, axes[1, 2]], color=(0, 1,\n 0), tube_radius=None, figure=fig)\n', (7769, 7871), True, 'import mayavi.mlab as mlab\n'), ((7872, 7985), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[0, axes[2, 0]]', '[0, axes[2, 1]]', '[0, axes[2, 2]]'], {'color': '(0, 0, 1)', 'tube_radius': 'None', 'figure': 'fig'}), '([0, axes[2, 0]], [0, axes[2, 1]], [0, axes[2, 2]], color=(0, 0,\n 1), tube_radius=None, figure=fig)\n', (7883, 7985), True, 'import mayavi.mlab as mlab\n'), ((8044, 8121), 'numpy.array', 'np.array', (['[[20.0, 20.0, 0.0, 0.0], [20.0, -20.0, 0.0, 0.0]]'], {'dtype': 'np.float64'}), '([[20.0, 20.0, 0.0, 0.0], [20.0, -20.0, 0.0, 0.0]], dtype=np.float64)\n', (8052, 8121), True, 'import numpy as np\n'), ((8155, 8279), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[0, fov[0, 0]]', '[0, fov[0, 1]]', '[0, fov[0, 2]]'], {'color': '(1, 1, 1)', 'tube_radius': 'None', 'line_width': '(1)', 'figure': 'fig'}), '([0, fov[0, 0]], [0, fov[0, 1]], [0, fov[0, 2]], color=(1, 1, 1),\n tube_radius=None, line_width=1, figure=fig)\n', (8166, 8279), True, 'import mayavi.mlab as mlab\n'), ((8296, 8420), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[0, fov[1, 0]]', '[0, fov[1, 1]]', '[0, fov[1, 2]]'], {'color': '(1, 1, 1)', 'tube_radius': 'None', 'line_width': '(1)', 'figure': 'fig'}), '([0, fov[1, 0]], [0, fov[1, 1]], [0, fov[1, 2]], color=(1, 1, 1),\n tube_radius=None, line_width=1, figure=fig)\n', (8307, 8420), True, 'import mayavi.mlab as mlab\n'), ((8657, 8767), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[x1, x1]', '[y1, y2]', '[0, 0]'], {'color': '(0.5, 0.5, 0.5)', 'tube_radius': '(0.1)', 'line_width': '(1)', 'figure': 'fig'}), '([x1, x1], [y1, y2], [0, 0], color=(0.5, 0.5, 0.5), tube_radius=\n 0.1, line_width=1, figure=fig)\n', (8668, 8767), True, 'import mayavi.mlab as mlab\n'), ((8767, 8877), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[x2, x2]', '[y1, y2]', '[0, 0]'], {'color': '(0.5, 0.5, 0.5)', 'tube_radius': '(0.1)', 'line_width': '(1)', 'figure': 'fig'}), '([x2, x2], [y1, y2], [0, 0], color=(0.5, 0.5, 0.5), tube_radius=\n 0.1, line_width=1, figure=fig)\n', (8778, 8877), True, 'import mayavi.mlab as mlab\n'), ((8877, 8987), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[x1, x2]', '[y1, y1]', '[0, 0]'], {'color': '(0.5, 0.5, 0.5)', 'tube_radius': '(0.1)', 'line_width': '(1)', 'figure': 'fig'}), '([x1, x2], [y1, y1], [0, 0], color=(0.5, 0.5, 0.5), tube_radius=\n 0.1, line_width=1, figure=fig)\n', (8888, 8987), True, 'import mayavi.mlab as mlab\n'), ((8987, 9097), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[x1, x2]', '[y2, y2]', '[0, 0]'], {'color': '(0.5, 0.5, 0.5)', 'tube_radius': '(0.1)', 'line_width': '(1)', 'figure': 'fig'}), '([x1, x2], [y2, y2], [0, 0], color=(0.5, 0.5, 0.5), tube_radius=\n 0.1, line_width=1, figure=fig)\n', (8998, 9097), True, 'import mayavi.mlab as mlab\n'), ((9128, 9247), 'mayavi.mlab.view', 'mlab.view', ([], {'azimuth': '(180)', 'elevation': '(70)', 'focalpoint': '[12.0909996, -1.04700089, -2.03249991]', 'distance': '(62.0)', 'figure': 'fig'}), '(azimuth=180, elevation=70, focalpoint=[12.0909996, -1.04700089, -\n 2.03249991], distance=62.0, figure=fig)\n', (9137, 9247), True, 'import mayavi.mlab as mlab\n'), ((11391, 11430), 'mayavi.mlab.savefig', 'mlab.savefig', (['"""pc_view.jpg"""'], {'figure': 'fig'}), "('pc_view.jpg', figure=fig)\n", (11403, 11430), True, 'import mayavi.mlab as mlab\n'), ((312, 352), 'warnings.warn', 'warnings.warn', (['"""mayavi is not installed"""'], {}), "('mayavi is not installed')\n", (325, 352), False, 'import warnings\n'), ((979, 1017), 'pandas.read_csv', 'pd.read_csv', (['pred_file'], {'index_col': '"""Id"""'}), "(pred_file, index_col='Id')\n", (990, 1017), True, 'import pandas as pd\n'), ((1217, 1274), 'dataset.prepare_lyft_data.parse_string_to_box', 'parse_string_to_box', (['boxes_str'], {'sample_token': 'sample_token'}), '(boxes_str, sample_token=sample_token)\n', (1236, 1274), False, 'from dataset.prepare_lyft_data import parse_string_to_box, transform_box_from_world_to_sensor_coordinates, get_sensor_to_world_transform_matrix_from_sample_data_token\n'), ((1746, 1771), 'skimage.io.imread', 'imread', (['camera_image_path'], {}), '(camera_image_path)\n', (1752, 1771), False, 'from skimage.io import imread\n'), ((1792, 1806), 'numpy.identity', 'np.identity', (['(3)'], {}), '(3)\n', (1803, 1806), True, 'import numpy as np\n'), ((3340, 3377), 'lyft_dataset_sdk.utils.data_classes.LidarPointCloud.from_file', 'LidarPointCloud.from_file', (['lidar_path'], {}), '(lidar_path)\n', (3365, 3377), False, 'from lyft_dataset_sdk.utils.data_classes import LidarPointCloud\n'), ((4793, 4865), 'dataset.prepare_lyft_data_v2.transform_pc_to_camera_coord', 'transform_pc_to_camera_coord', (['camera_data', 'lidar_record', 'lpc', 'self.lyftd'], {}), '(camera_data, lidar_record, lpc, self.lyftd)\n', (4821, 4865), False, 'from dataset.prepare_lyft_data_v2 import transform_pc_to_camera_coord\n'), ((7114, 7206), 'mayavi.mlab.figure', 'mlab.figure', ([], {'figure': 'None', 'bgcolor': 'bgcolor', 'fgcolor': 'None', 'engine': 'None', 'size': '(1600, 1000)'}), '(figure=None, bgcolor=bgcolor, fgcolor=None, engine=None, size=(\n 1600, 1000))\n', (7125, 7206), True, 'import mayavi.mlab as mlab\n'), ((11234, 11249), 'pickle.load', 'pickle.load', (['fp'], {}), '(fp)\n', (11245, 11249), False, 'import pickle\n'), ((3639, 3663), 'numpy.transpose', 'np.transpose', (['lpc.points'], {}), '(lpc.points)\n', (3651, 3663), True, 'import numpy as np\n'), ((4932, 4956), 'numpy.transpose', 'np.transpose', (['lpc.points'], {}), '(lpc.points)\n', (4944, 4956), True, 'import numpy as np\n'), ((10037, 10133), 'mayavi.mlab.text3d', 'mlab.text3d', (['b[4, 0]', 'b[4, 1]', 'b[4, 2]', "('%d' % n)"], {'scale': 'text_scale', 'color': 'color', 'figure': 'fig'}), "(b[4, 0], b[4, 1], b[4, 2], '%d' % n, scale=text_scale, color=\n color, figure=fig)\n", (10048, 10133), True, 'import mayavi.mlab as mlab\n'), ((10291, 10432), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[b[i, 0], b[j, 0]]', '[b[i, 1], b[j, 1]]', '[b[i, 2], b[j, 2]]'], {'color': 'color', 'tube_radius': 'None', 'line_width': 'line_width', 'figure': 'fig'}), '([b[i, 0], b[j, 0]], [b[i, 1], b[j, 1]], [b[i, 2], b[j, 2]],\n color=color, tube_radius=None, line_width=line_width, figure=fig)\n', (10302, 10432), True, 'import mayavi.mlab as mlab\n'), ((10508, 10649), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[b[i, 0], b[j, 0]]', '[b[i, 1], b[j, 1]]', '[b[i, 2], b[j, 2]]'], {'color': 'color', 'tube_radius': 'None', 'line_width': 'line_width', 'figure': 'fig'}), '([b[i, 0], b[j, 0]], [b[i, 1], b[j, 1]], [b[i, 2], b[j, 2]],\n color=color, tube_radius=None, line_width=line_width, figure=fig)\n', (10519, 10649), True, 'import mayavi.mlab as mlab\n'), ((10711, 10852), 'mayavi.mlab.plot3d', 'mlab.plot3d', (['[b[i, 0], b[j, 0]]', '[b[i, 1], b[j, 1]]', '[b[i, 2], b[j, 2]]'], {'color': 'color', 'tube_radius': 'None', 'line_width': 'line_width', 'figure': 'fig'}), '([b[i, 0], b[j, 0]], [b[i, 1], b[j, 1]], [b[i, 2], b[j, 2]],\n color=color, tube_radius=None, line_width=line_width, figure=fig)\n', (10722, 10852), True, 'import mayavi.mlab as mlab\n'), ((1957, 2044), 'dataset.prepare_lyft_data.transform_box_from_world_to_sensor_coordinates', 'transform_box_from_world_to_sensor_coordinates', (['pred_box', 'camera_token', 'self.lyftd'], {}), '(pred_box, camera_token, self\n .lyftd)\n', (2003, 2044), False, 'from dataset.prepare_lyft_data import parse_string_to_box, transform_box_from_world_to_sensor_coordinates, get_sensor_to_world_transform_matrix_from_sample_data_token\n'), ((2639, 2728), 'dataset.prepare_lyft_data.transform_box_from_world_to_sensor_coordinates', 'transform_box_from_world_to_sensor_coordinates', (['pred_box', 'lidar_top_token', 'self.lyftd'], {}), '(pred_box, lidar_top_token,\n self.lyftd)\n', (2685, 2728), False, 'from dataset.prepare_lyft_data import parse_string_to_box, transform_box_from_world_to_sensor_coordinates, get_sensor_to_world_transform_matrix_from_sample_data_token\n'), ((3005, 3014), 'numpy.eye', 'np.eye', (['(2)'], {}), '(2)\n', (3011, 3014), True, 'import numpy as np\n'), ((3860, 3946), 'dataset.prepare_lyft_data.transform_box_from_world_to_sensor_coordinates', 'transform_box_from_world_to_sensor_coordinates', (['pred_box', 'lidar_token', 'self.lyftd'], {}), '(pred_box, lidar_token, self.\n lyftd)\n', (3906, 3946), False, 'from dataset.prepare_lyft_data import parse_string_to_box, transform_box_from_world_to_sensor_coordinates, get_sensor_to_world_transform_matrix_from_sample_data_token\n'), ((5153, 5240), 'dataset.prepare_lyft_data.transform_box_from_world_to_sensor_coordinates', 'transform_box_from_world_to_sensor_coordinates', (['pred_box', 'camera_token', 'self.lyftd'], {}), '(pred_box, camera_token, self\n .lyftd)\n', (5199, 5240), False, 'from dataset.prepare_lyft_data import parse_string_to_box, transform_box_from_world_to_sensor_coordinates, get_sensor_to_world_transform_matrix_from_sample_data_token\n')]
|
from sys import exit
from app.knn.knn_utils import *
from app.utils.prediction_utils import *
MODELS_PATH = "app/knn/results/models/"
EXAMPLE_IMG_PREFIX = "example_"
PREDICT_CSV_PREFIX = "knn_predictions_"
ACCURACY_TXT_PREFIX = "accuracy_k"
VAL_SIZE = 0.25
BATCH_SIZE = 2500
BEST_K = 7
# -------------------------------------------------------------------------------------------------------------------- #
def run_knn_test(val_size=VAL_SIZE, k=BEST_K):
print('\n------------- KNN model - predicting ')
print('------------- Loading data ')
X_train, y_train, X_test, y_test = pre_processing_dataset()
(X_train, y_train), (_, _) = split_to_train_and_val(X_train, y_train, val_size)
start_total_time = time.time()
print('------------- Making labels predictions for test data')
start_time = time.time()
predictions_list = predict_prob_with_batches(X_test, X_train, y_train, k, BATCH_SIZE)
print("- Completed in: ", convert_time(time.time() - start_time))
print('\n------------- Predicting labels for test data')
predicted_labels = predict_labels_for_every_batch(predictions_list)
print('------------- Saving prediction results to file')
save_labels_to_csv(predicted_labels, LOGS_PATH, PREDICT_CSV_PREFIX + distance_name + "_k" + str(k))
print('------------- Evaluating accuracy ')
accuracy = calc_accuracy(predicted_labels, y_test)
print('------------- Saving prediction results to file ')
print('------------- Results ')
accuracy_file_path = LOGS_PATH + ACCURACY_TXT_PREFIX + str(k) + '_' + distances_name[used_distance_number]
clear_log_file(accuracy_file_path)
log("KNN\n", accuracy_file_path)
log('Distance calc algorithm: ' + distance_name, accuracy_file_path)
log('k: ' + str(k), accuracy_file_path)
log('Train images qty: ' + str(X_train.shape[0]), accuracy_file_path)
log('Accuracy: ' + str(accuracy) + '%\nTotal calculation time= ' + str(
convert_time(time.time() - start_total_time)), accuracy_file_path)
print('\n------------- Result saved to file ')
return predictions_list, predicted_labels
def select_best_k(X_train, y_train, val_size=VAL_SIZE, batch_size=BATCH_SIZE):
print('------------- Searching for best k value')
start_time = time.time()
(X_train, y_train), (X_val, y_val) = split_to_train_and_val(X_train, y_train, val_size)
err, k = model_select_with_splitting_to_batches(X_val, X_train, y_val, y_train, candidate_k_values(), batch_size)
calc_time = convert_time(time.time() - start_time)
k_searching_path = LOGS_PATH + K_SEARCHING_TXT_PREFIX + str(k)
clear_log_file(k_searching_path)
print('------------- Best k has been found ')
log('One batch size: ' + str(batch_size), k_searching_path)
log('Train images qty: ' + str(X_train.shape[0]), k_searching_path)
log('Validation images qty: ' + str(X_val.shape[0]), k_searching_path)
log('Distance calc algorithm: ' + distance_name, k_searching_path)
log('Best k: ' + str(k) + '\nBest error: ' + str(err) + "\nCalculation time: " + str(calc_time), k_searching_path)
return k
# For quick tests
def get_debased_data(batch_size=500):
return tuple([split_to_batches(d, batch_size)[0] for d in [*pre_processing_dataset()]])
def plot_examples(predictions, predicted_labels):
X_train, y_train, X_test, y_test = load_normal_data()
X_train, X_test = scale_x(X_train, X_test)
image_path = MODELS_PATH + EXAMPLE_IMG_PREFIX
plot_rand_images(X_train, y_train, image_path, 'png')
plot_image_with_predict_bar(X_test, y_test, predictions, predicted_labels, image_path, 'png')
if __name__ == "__main__":
X_train, y_train, X_test, y_test = pre_processing_dataset()
best_k = select_best_k(X_train, y_train)
predictions_list, predicted_labels = run_knn_test(k=best_k)
plot_examples(predictions_list[0], predicted_labels)
exit(0)
|
[
"sys.exit"
] |
[((3894, 3901), 'sys.exit', 'exit', (['(0)'], {}), '(0)\n', (3898, 3901), False, 'from sys import exit\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
manipulated bfgs method from scipy.optimize (V 1.5.2)
"""
#__docformat__ = "restructuredtext en"
# ******NOTICE***************
# optimize.py module by <NAME>
#
# You may copy and use this module as you see fit with no
# guarantee implied provided you keep this notice in all copies.
# *****END NOTICE************
# A collection of optimization algorithms. Version 0.5
# CHANGES
# Added fminbound (July 2001)
# Added brute (Aug. 2002)
# Finished line search satisfying strong Wolfe conditions (Mar. 2004)
# Updated strong Wolfe conditions line search to use
# cubic-interpolation (Mar. 2004)
# Minimization routines
__all__ = ['fmin_bfgs', 'line_search', 'OptimizeResult',
'OptimizeWarning']
__docformat__ = "restructuredtext en"
import warnings
from numpy import (asarray, sqrt, Inf, isinf)
import numpy as np
from scipy.optimize.linesearch import (line_search_wolfe1, line_search_wolfe2,
line_search_wolfe2 as line_search,
LineSearchWarning)
from scipy.optimize._differentiable_functions import ScalarFunction, FD_METHODS
# standard status messages of optimizers
_status_message = {'success': 'Optimization terminated successfully.',
'maxfev': 'Maximum number of function evaluations has '
'been exceeded.',
'maxiter': 'Maximum number of iterations has been '
'exceeded.',
'pr_loss': 'Desired error not necessarily achieved due '
'to precision loss.',
'nan': 'NaN result encountered.',
'out_of_bounds': 'The result is outside of the provided '
'bounds.'}
class MemoizeJac(object):
""" Decorator that caches the return values of a function returning `(fun, grad)`
each time it is called. """
def __init__(self, fun):
self.fun = fun
self.jac = None
self._value = None
self.x = None
def _compute_if_needed(self, x, *args):
if not np.all(x == self.x) or self._value is None or self.jac is None:
self.x = np.asarray(x).copy()
fg = self.fun(x, *args)
self.jac = fg[1]
self._value = fg[0]
def __call__(self, x, *args):
""" returns the the function value """
self._compute_if_needed(x, *args)
return self._value
def derivative(self, x, *args):
self._compute_if_needed(x, *args)
return self.jac
class OptimizeResult(dict):
""" Represents the optimization result.
Attributes
----------
x : ndarray
The solution of the optimization.
success : bool
Whether or not the optimizer exited successfully.
status : int
Termination status of the optimizer. Its value depends on the
underlying solver. Refer to `message` for details.
message : str
Description of the cause of the termination.
fun, jac, hess: ndarray
Values of objective function, its Jacobian and its Hessian (if
available). The Hessians may be approximations, see the documentation
of the function in question.
hess_inv : object
Inverse of the objective function's Hessian; may be an approximation.
Not available for all solvers. The type of this attribute may be
either np.ndarray or scipy.sparse.linalg.LinearOperator.
nfev, njev, nhev : int
Number of evaluations of the objective functions and of its
Jacobian and Hessian.
nit : int
Number of iterations performed by the optimizer.
maxcv : float
The maximum constraint violation.
Notes
-----
There may be additional attributes not listed above depending of the
specific solver. Since this class is essentially a subclass of dict
with attribute accessors, one can see which attributes are available
using the `keys()` method.
"""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
def __repr__(self):
if self.keys():
m = max(map(len, list(self.keys()))) + 1
return '\n'.join([k.rjust(m) + ': ' + repr(v)
for k, v in sorted(self.items())])
else:
return self.__class__.__name__ + "()"
def __dir__(self):
return list(self.keys())
class OptimizeWarning(UserWarning):
pass
def _check_unknown_options(unknown_options):
if unknown_options:
msg = ", ".join(map(str, unknown_options.keys()))
# Stack level 4: this is called from _minimize_*, which is
# called from another function in SciPy. Level 4 is the first
# level in user code.
warnings.warn("Unknown solver options: %s" % msg, OptimizeWarning, 4)
def is_array_scalar(x):
"""Test whether `x` is either a scalar or an array scalar.
"""
return np.size(x) == 1
_epsilon = sqrt(np.finfo(float).eps)
def vecnorm(x, ord=2):
if ord == Inf:
return np.amax(np.abs(x))
elif ord == -Inf:
return np.amin(np.abs(x))
else:
return np.sum(np.abs(x)**ord, axis=0)**(1.0 / ord)
def _prepare_scalar_function(fun, x0, jac=None, args=(), bounds=None,
epsilon=None, finite_diff_rel_step=None,
hess=None):
"""
Creates a ScalarFunction object for use with scalar minimizers
(BFGS/LBFGSB/SLSQP/TNC/CG/etc).
Parameters
----------
fun : callable
The objective function to be minimized.
``fun(x, *args) -> float``
where ``x`` is an 1-D array with shape (n,) and ``args``
is a tuple of the fixed parameters needed to completely
specify the function.
x0 : ndarray, shape (n,)
Initial guess. Array of real elements of size (n,),
where 'n' is the number of independent variables.
jac : {callable, '2-point', '3-point', 'cs', None}, optional
Method for computing the gradient vector. If it is a callable, it
should be a function that returns the gradient vector:
``jac(x, *args) -> array_like, shape (n,)``
If one of `{'2-point', '3-point', 'cs'}` is selected then the gradient
is calculated with a relative step for finite differences. If `None`,
then two-point finite differences with an absolute step is used.
args : tuple, optional
Extra arguments passed to the objective function and its
derivatives (`fun`, `jac` functions).
bounds : sequence, optional
Bounds on variables. 'new-style' bounds are required.
eps : float or ndarray
If `jac is None` the absolute step size used for numerical
approximation of the jacobian via forward differences.
finite_diff_rel_step : None or array_like, optional
If `jac in ['2-point', '3-point', 'cs']` the relative step size to
use for numerical approximation of the jacobian. The absolute step
size is computed as ``h = rel_step * sign(x0) * max(1, abs(x0))``,
possibly adjusted to fit into the bounds. For ``method='3-point'``
the sign of `h` is ignored. If None (default) then step is selected
automatically.
hess : {callable, '2-point', '3-point', 'cs', None}
Computes the Hessian matrix. If it is callable, it should return the
Hessian matrix:
``hess(x, *args) -> {LinearOperator, spmatrix, array}, (n, n)``
Alternatively, the keywords {'2-point', '3-point', 'cs'} select a
finite difference scheme for numerical estimation.
Whenever the gradient is estimated via finite-differences, the Hessian
cannot be estimated with options {'2-point', '3-point', 'cs'} and needs
to be estimated using one of the quasi-Newton strategies.
Returns
-------
sf : ScalarFunction
"""
if callable(jac):
grad = jac
elif jac in FD_METHODS:
# epsilon is set to None so that ScalarFunction is made to use
# rel_step
epsilon = None
grad = jac
else:
# default (jac is None) is to do 2-point finite differences with
# absolute step size. ScalarFunction has to be provided an
# epsilon value that is not None to use absolute steps. This is
# normally the case from most _minimize* methods.
grad = '2-point'
epsilon = epsilon
if hess is None:
# ScalarFunction requires something for hess, so we give a dummy
# implementation here if nothing is provided, return a value of None
# so that downstream minimisers halt. The results of `fun.hess`
# should not be used.
def hess(x, *args):
return None
if bounds is None:
bounds = (-np.inf, np.inf)
# ScalarFunction caches. Reuse of fun(x) during grad
# calculation reduces overall function evaluations.
sf = ScalarFunction(fun, x0, args, grad, hess,
finite_diff_rel_step, bounds, epsilon=epsilon)
return sf
class _LineSearchError(RuntimeError):
pass
def _line_search_wolfe12(f, fprime, xk, pk, gfk, old_fval, old_old_fval,
**kwargs):
"""
Same as line_search_wolfe1, but fall back to line_search_wolfe2 if
suitable step length is not found, and raise an exception if a
suitable step length is not found.
Raises
------
_LineSearchError
If no suitable step size is found
Returns
-------
alpha (float): or None
computed step-size if the algorithm did converge, or None
fc (int):
number of function evaluations
gc(int):
number of gradient evaluations
new_fval(float): or None
new function value at xk + alpha pk
old_fval (float):
old function value
new_slope(float): or None
local slope <fprime(x_new), pk>
"""
extra_condition = kwargs.pop('extra_condition', None)
ret1 = line_search_wolfe1(f, fprime, xk, pk, gfk,
old_fval, old_old_fval,
**kwargs)
if ret1[0] is not None and extra_condition is not None:
xp1 = xk + ret1[0] * pk
if not extra_condition(ret1[0], xp1, ret1[3], ret1[5]):
# Reject step if extra_condition fails
ret1 = (None,ret1[1:])
if ret1[0] is None:
# line search failed: try different one.
with warnings.catch_warnings():
warnings.simplefilter('ignore', LineSearchWarning)
kwargs2 = {}
for key in ('c1', 'c2', 'amax'):
if key in kwargs:
kwargs2[key] = kwargs[key]
ret2 = line_search_wolfe2(f, fprime, xk, pk, gfk,
old_fval, old_old_fval,
extra_condition=extra_condition,
**kwargs2)
# if ret2[0] is None:
# raise _LineSearchError()
# sum up number of function calls
return ret2 + (ret1[1] + ret2[1], ret1[2] + ret2[2])
return ret1 + (ret1[1] , ret1[2])
def fmin_bfgs(f, x0, fprime=None, args=(), gtol=1e-5, norm=Inf,
epsilon=_epsilon, maxiter=None, full_output=0, disp=1,
retall=0, H0 = None, callback=None, self_scaling = False):
"""
Minimize a function using the BFGS algorithm.
Parameters
----------
f : callable f(x,*args)
Objective function to be minimized.
x0 : ndarray
Initial guess.
fprime : callable f'(x,*args), optional
Gradient of f.
args : tuple, optional
Extra arguments passed to f and fprime.
gtol : float, optional
Gradient norm must be less than gtol before successful termination.
norm : float, optional
Order of norm (Inf is max, -Inf is min)
epsilon : int or ndarray, optional
If fprime is approximated, use this value for the step size.
callback : callable, optional
An optional user-supplied function to call after each
iteration. Called as callback(xk), where xk is the
current parameter vector.
maxiter : int, optional
Maximum number of iterations to perform.
full_output : bool, optional
If True,return fopt, func_calls, grad_calls, and warnflag
in addition to xopt.
disp : bool, optional
Print convergence message if True.
retall : bool, optional
Return a list of results at each iteration if True.
H0 : ndarray, optional
Initialization of inverse of Hessian approximation.
Returns
-------
xopt : ndarray
Parameters which minimize f, i.e., f(xopt) == fopt.
fopt : float
Minimum value.
gopt : ndarray
Value of gradient at minimum, f'(xopt), which should be near 0.
Bopt : ndarray
Value of 1/f''(xopt), i.e., the inverse Hessian matrix.
func_calls : int
Number of function_calls made.
grad_calls : int
Number of gradient calls made.
warnflag : integer
1 : Maximum number of iterations exceeded.
2 : Gradient and/or function calls not changing.
3 : NaN result encountered.
allvecs : list
The value of xopt at each iteration. Only returned if retall is True.
See also
--------
minimize: Interface to minimization algorithms for multivariate
functions. See the 'BFGS' `method` in particular.
Notes
-----
Optimize the function, f, whose gradient is given by fprime
using the quasi-Newton method of Broyden, Fletcher, Goldfarb,
and Shanno (BFGS)
References
----------
Wright, and Nocedal 'Numerical Optimization', 1999, p. 198.
"""
opts = {'gtol': gtol,
'norm': norm,
'eps': epsilon,
'disp': disp,
'maxiter': maxiter,
'return_all': retall,
'H0': H0,
'self_scaling': self_scaling}
res = _minimize_bfgs(f, x0, args, fprime, callback=callback, **opts)
if full_output:
retlist = (res['x'], res['fun'], res['jac'], res['hess_inv'],
res['nfev'], res['njev'], res['status'])
if retall:
retlist += (res['allvecs'], )
return retlist
else:
if retall:
return res['x'], res['allvecs']
else:
return res['x']
def _minimize_bfgs(fun, x0, args=(), jac=None, callback=None,
gtol=1e-5, norm=Inf, eps=_epsilon, maxiter=None,
disp=False, return_all=False, H0 = None,
finite_diff_rel_step=None,self_scaling = False,
**unknown_options):
"""
Minimization of scalar function of one or more variables using the
BFGS algorithm.
Options
-------
disp : bool
Set to True to print convergence messages.
maxiter : int
Maximum number of iterations to perform.
gtol : float
Gradient norm must be less than `gtol` before successful
termination.
norm : float
Order of norm (Inf is max, -Inf is min).
eps : float or ndarray
If `jac is None` the absolute step size used for numerical
approximation of the jacobian via forward differences.
return_all : bool, optional
Set to True to return a list of the best solution at each of the
iterations.
H0 : ndarray, optional
Initialization of inverse of Hessian approximation.
finite_diff_rel_step : None or array_like, optional
If `jac in ['2-point', '3-point', 'cs']` the relative step size to
use for numerical approximation of the jacobian. The absolute step
size is computed as ``h = rel_step * sign(x0) * max(1, abs(x0))``,
possibly adjusted to fit into the bounds. For ``method='3-point'``
the sign of `h` is ignored. If None (default) then step is selected
automatically.
self_scaling : bool, optional
whether to use a self-scaling method for updating the matrix
"""
_check_unknown_options(unknown_options)
retall = return_all
x0 = asarray(x0).flatten()
if x0.ndim == 0:
x0.shape = (1,)
if maxiter is None:
maxiter = len(x0) * 200
sf = _prepare_scalar_function(fun, x0, jac, args=args, epsilon=eps,
finite_diff_rel_step=finite_diff_rel_step)
f = sf.fun
myfprime = sf.grad
old_fval = f(x0)
gfk = myfprime(x0)
if not np.isscalar(old_fval):
try:
old_fval = old_fval.item()
except (ValueError, AttributeError):
raise ValueError("The user-provided "
"objective function must "
"return a scalar value.")
k = 0
N = len(x0)
I = np.eye(N, dtype=int)
# initialize Hk with given initial value
if H0 is None:
Hk = I
else:
Hk = H0
# Sets the initial step guess to dx ~ 1
old_old_fval = old_fval + np.linalg.norm(gfk) / 2
xk = x0
if retall:
allvecs = [x0]
allHs = [Hk]
allrhos = [0]
warnflag = 0
gnorm = vecnorm(gfk, ord=norm)
while (gnorm > gtol) and (k < maxiter):
pk = -np.dot(Hk, gfk)
try:
alpha_k, fc, gc, old_fval, old_old_fval, gfkp1 = \
_line_search_wolfe12(f, myfprime, xk, pk, gfk,
old_fval, old_old_fval, amin=1e-100, amax=1e100)
except _LineSearchError:
# Line search failed to find a better solution.
warnflag = 2
break
xkp1 = xk + alpha_k * pk
if retall:
allvecs.append(xkp1)
sk = xkp1 - xk
xk = xkp1
if gfkp1 is None:
gfkp1 = myfprime(xkp1)
yk = gfkp1 - gfk
gfk = gfkp1
if callback is not None:
callback(xk)
k += 1
gnorm = vecnorm(gfk, ord=norm)
if (gnorm <= gtol):
break
if not np.isfinite(old_fval):
# We correctly found +-Inf as optimal value, or something went
# wrong.
warnflag = 2
break
try: # this was handled in numeric, let it remaines for more safety
rhok = 1.0 / (np.dot(yk, sk))
except ZeroDivisionError:
rhok = 1000.0
if disp:
print("Divide-by-zero encountered: rhok assumed large")
if isinf(rhok): # this is patch for NumPy
rhok = 1000.0
if disp:
print("Divide-by-zero encountered: rhok assumed large")
if rhok < 0: # no update
rhok = 0
A1 = I - sk[:, np.newaxis] * yk[np.newaxis, :] * rhok
A2 = I - yk[:, np.newaxis] * sk[np.newaxis, :] * rhok
if self_scaling:
gammak = np.inner(sk,Hk @ sk) * rhok
Hk = 1/gammak * np.dot(A1, np.dot(Hk, A2)) + (rhok * sk[:, np.newaxis] *
sk[np.newaxis, :])
else:
Hk = np.dot(A1, np.dot(Hk, A2)) + (rhok * sk[:, np.newaxis] *
sk[np.newaxis, :])
if retall:
allHs.append(Hk)
allrhos.append(rhok)
fval = old_fval
if warnflag == 2:
msg = _status_message['pr_loss']
elif k >= maxiter:
warnflag = 1
msg = _status_message['maxiter']
elif np.isnan(gnorm) or np.isnan(fval) or np.isnan(xk).any():
warnflag = 3
msg = _status_message['nan']
else:
msg = _status_message['success']
if disp:
print("%s%s" % ("Warning: " if warnflag != 0 else "", msg))
print(" Current function value: %f" % fval)
print(" Iterations: %d" % k)
print(" Function evaluations: %d" % sf.nfev)
print(" Gradient evaluations: %d" % sf.ngev)
result = OptimizeResult(fun=fval, jac=gfk, hess_inv=Hk, nfev=sf.nfev,
njev=sf.ngev, status=warnflag,
success=(warnflag == 0), message=msg, x=xk,
nit=k)
if retall:
result['allvecs'] = [allvecs,allHs,allrhos]
return result
|
[
"numpy.abs",
"numpy.isnan",
"numpy.linalg.norm",
"numpy.inner",
"warnings.simplefilter",
"numpy.isfinite",
"numpy.finfo",
"warnings.catch_warnings",
"numpy.size",
"numpy.asarray",
"numpy.isinf",
"scipy.optimize._differentiable_functions.ScalarFunction",
"numpy.dot",
"numpy.all",
"numpy.isscalar",
"scipy.optimize.linesearch.line_search_wolfe1",
"scipy.optimize.linesearch.line_search_wolfe2",
"numpy.eye",
"warnings.warn"
] |
[((9140, 9232), 'scipy.optimize._differentiable_functions.ScalarFunction', 'ScalarFunction', (['fun', 'x0', 'args', 'grad', 'hess', 'finite_diff_rel_step', 'bounds'], {'epsilon': 'epsilon'}), '(fun, x0, args, grad, hess, finite_diff_rel_step, bounds,\n epsilon=epsilon)\n', (9154, 9232), False, 'from scipy.optimize._differentiable_functions import ScalarFunction, FD_METHODS\n'), ((10202, 10278), 'scipy.optimize.linesearch.line_search_wolfe1', 'line_search_wolfe1', (['f', 'fprime', 'xk', 'pk', 'gfk', 'old_fval', 'old_old_fval'], {}), '(f, fprime, xk, pk, gfk, old_fval, old_old_fval, **kwargs)\n', (10220, 10278), False, 'from scipy.optimize.linesearch import line_search_wolfe1, line_search_wolfe2, line_search_wolfe2 as line_search, LineSearchWarning\n'), ((17091, 17111), 'numpy.eye', 'np.eye', (['N'], {'dtype': 'int'}), '(N, dtype=int)\n', (17097, 17111), True, 'import numpy as np\n'), ((4947, 5016), 'warnings.warn', 'warnings.warn', (["('Unknown solver options: %s' % msg)", 'OptimizeWarning', '(4)'], {}), "('Unknown solver options: %s' % msg, OptimizeWarning, 4)\n", (4960, 5016), False, 'import warnings\n'), ((5125, 5135), 'numpy.size', 'np.size', (['x'], {}), '(x)\n', (5132, 5135), True, 'import numpy as np\n'), ((5159, 5174), 'numpy.finfo', 'np.finfo', (['float'], {}), '(float)\n', (5167, 5174), True, 'import numpy as np\n'), ((16775, 16796), 'numpy.isscalar', 'np.isscalar', (['old_fval'], {}), '(old_fval)\n', (16786, 16796), True, 'import numpy as np\n'), ((18764, 18775), 'numpy.isinf', 'isinf', (['rhok'], {}), '(rhok)\n', (18769, 18775), False, 'from numpy import asarray, sqrt, Inf, isinf\n'), ((5247, 5256), 'numpy.abs', 'np.abs', (['x'], {}), '(x)\n', (5253, 5256), True, 'import numpy as np\n'), ((10692, 10717), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (10715, 10717), False, 'import warnings\n'), ((10731, 10781), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""', 'LineSearchWarning'], {}), "('ignore', LineSearchWarning)\n", (10752, 10781), False, 'import warnings\n'), ((10952, 11066), 'scipy.optimize.linesearch.line_search_wolfe2', 'line_search_wolfe2', (['f', 'fprime', 'xk', 'pk', 'gfk', 'old_fval', 'old_old_fval'], {'extra_condition': 'extra_condition'}), '(f, fprime, xk, pk, gfk, old_fval, old_old_fval,\n extra_condition=extra_condition, **kwargs2)\n', (10970, 11066), False, 'from scipy.optimize.linesearch import line_search_wolfe1, line_search_wolfe2, line_search_wolfe2 as line_search, LineSearchWarning\n'), ((16406, 16417), 'numpy.asarray', 'asarray', (['x0'], {}), '(x0)\n', (16413, 16417), False, 'from numpy import asarray, sqrt, Inf, isinf\n'), ((17295, 17314), 'numpy.linalg.norm', 'np.linalg.norm', (['gfk'], {}), '(gfk)\n', (17309, 17314), True, 'import numpy as np\n'), ((17523, 17538), 'numpy.dot', 'np.dot', (['Hk', 'gfk'], {}), '(Hk, gfk)\n', (17529, 17538), True, 'import numpy as np\n'), ((18318, 18339), 'numpy.isfinite', 'np.isfinite', (['old_fval'], {}), '(old_fval)\n', (18329, 18339), True, 'import numpy as np\n'), ((2140, 2159), 'numpy.all', 'np.all', (['(x == self.x)'], {}), '(x == self.x)\n', (2146, 2159), True, 'import numpy as np\n'), ((5303, 5312), 'numpy.abs', 'np.abs', (['x'], {}), '(x)\n', (5309, 5312), True, 'import numpy as np\n'), ((18584, 18598), 'numpy.dot', 'np.dot', (['yk', 'sk'], {}), '(yk, sk)\n', (18590, 18598), True, 'import numpy as np\n'), ((19154, 19175), 'numpy.inner', 'np.inner', (['sk', '(Hk @ sk)'], {}), '(sk, Hk @ sk)\n', (19162, 19175), True, 'import numpy as np\n'), ((19755, 19770), 'numpy.isnan', 'np.isnan', (['gnorm'], {}), '(gnorm)\n', (19763, 19770), True, 'import numpy as np\n'), ((19774, 19788), 'numpy.isnan', 'np.isnan', (['fval'], {}), '(fval)\n', (19782, 19788), True, 'import numpy as np\n'), ((2225, 2238), 'numpy.asarray', 'np.asarray', (['x'], {}), '(x)\n', (2235, 2238), True, 'import numpy as np\n'), ((19381, 19395), 'numpy.dot', 'np.dot', (['Hk', 'A2'], {}), '(Hk, A2)\n', (19387, 19395), True, 'import numpy as np\n'), ((5346, 5355), 'numpy.abs', 'np.abs', (['x'], {}), '(x)\n', (5352, 5355), True, 'import numpy as np\n'), ((19221, 19235), 'numpy.dot', 'np.dot', (['Hk', 'A2'], {}), '(Hk, A2)\n', (19227, 19235), True, 'import numpy as np\n'), ((19792, 19804), 'numpy.isnan', 'np.isnan', (['xk'], {}), '(xk)\n', (19800, 19804), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""Utils module."""
import json
import re
def camel_case_split(identifier):
"""CamelCase split"""
matches = re.finditer(
".+?(?:(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])|$)",
identifier)
return [m.group(0) for m in matches]
def host_url(request):
return request.host_url
# return "http://localhost:5000/"
def to_json(data):
if isinstance(data, str):
data = data.replace("'", '"')
data = json.loads(data)
return data
|
[
"re.finditer",
"json.loads"
] |
[((144, 229), 're.finditer', 're.finditer', (['""".+?(?:(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])|$)"""', 'identifier'], {}), "('.+?(?:(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])|$)', identifier\n )\n", (155, 229), False, 'import re\n'), ((478, 494), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (488, 494), False, 'import json\n')]
|
from db.repositories.statistics_repository import StatisticsRepository
from model.DTO.Statistics import Statistics as StatisticsDTO
from model.Statistics import Statistics
def get_statistics(stats_repo: StatisticsRepository) -> Statistics:
return stats_repo.get_statistics()
def create_statistics(stats_repo: StatisticsRepository, statistics: StatisticsDTO) -> Statistics:
return stats_repo.create_statistics(statistics)
def update_statistics(stats_repo: StatisticsRepository, id_statistics: int, statistics: StatisticsDTO) -> Statistics:
return stats_repo.update_statistics(id_statistics, statistics)
def create_or_update_statistics(stats_repo: StatisticsRepository, is_mutant: bool):
count_mutant = 0
count_human = 0
ratio = 1.0
statistics = get_statistics(stats_repo)
# calculate the ratio and increase the count for the mutant or human
if is_mutant:
if statistics:
count_mutant = statistics.count_mutant_dna + 1
count_human = statistics.count_human_dna
ratio = count_mutant / count_human
else:
count_mutant = 1
count_human = 0
ratio = 1.0
else:
if statistics:
count_mutant = statistics.count_mutant_dna
count_human = statistics.count_human_dna + 1
ratio = count_mutant / count_human
else:
count_mutant = 0
count_human = 1
ratio = 1.0
if statistics:
statistics_update = StatisticsDTO(
count_mutant_dna=count_mutant,
count_human_dna=count_human,
ratio=ratio
)
update_statistics(stats_repo, statistics.id, statistics_update)
else:
statistics_create = StatisticsDTO(
count_mutant_dna=count_mutant,
count_human_dna=count_human,
ratio=ratio
)
create_statistics(stats_repo, statistics_create)
|
[
"model.DTO.Statistics.Statistics"
] |
[((1508, 1598), 'model.DTO.Statistics.Statistics', 'StatisticsDTO', ([], {'count_mutant_dna': 'count_mutant', 'count_human_dna': 'count_human', 'ratio': 'ratio'}), '(count_mutant_dna=count_mutant, count_human_dna=count_human,\n ratio=ratio)\n', (1521, 1598), True, 'from model.DTO.Statistics import Statistics as StatisticsDTO\n'), ((1752, 1842), 'model.DTO.Statistics.Statistics', 'StatisticsDTO', ([], {'count_mutant_dna': 'count_mutant', 'count_human_dna': 'count_human', 'ratio': 'ratio'}), '(count_mutant_dna=count_mutant, count_human_dna=count_human,\n ratio=ratio)\n', (1765, 1842), True, 'from model.DTO.Statistics import Statistics as StatisticsDTO\n')]
|
import cleaner
cleaner.doClean()
|
[
"cleaner.doClean"
] |
[((16, 33), 'cleaner.doClean', 'cleaner.doClean', ([], {}), '()\n', (31, 33), False, 'import cleaner\n')]
|
from expects import *
import client.api
import client.models
import random
def empty_interface(api_client):
ports = client.api.PortsApi(api_client)
ps = ports.list_ports(kind='dpdk')
expect(ps).not_to(be_empty)
i = client.models.Interface()
i.port_id = ps[0].id
i.config = client.models.InterfaceConfig()
return i
def example_ipv4_interface(api_client):
i = empty_interface(api_client)
i.config.protocols = make_interface_protocols([
client.models.InterfaceProtocolConfigEth(mac_address='00:00:00:00:00:01'),
client.models.InterfaceProtocolConfigIpv4(method='static', static=client.models.InterfaceProtocolConfigIpv4Static(address='1.1.1.1', prefix_length=24)),
])
return i
def example_ipv6_interface(api_client):
i = empty_interface(api_client)
i.config.protocols = make_interface_protocols([
client.models.InterfaceProtocolConfigEth(mac_address='00:00:00:00:00:01'),
client.models.InterfaceProtocolConfigIpv6(method='static', static=client.models.InterfaceProtocolConfigIpv6Static(address='fd00::1', prefix_length=64)),
])
return i
def example_ipv4andv6_interface(api_client):
i = empty_interface(api_client)
i.config.protocols = make_interface_protocols([
client.models.InterfaceProtocolConfigEth(mac_address='00:00:00:00:00:01'),
client.models.InterfaceProtocolConfigIpv4(method='static', static=client.models.InterfaceProtocolConfigIpv4Static(address='1.1.1.1', prefix_length=24)),
client.models.InterfaceProtocolConfigIpv6(method='static', static=client.models.InterfaceProtocolConfigIpv6Static(address='fd00::1', prefix_length=64)),
])
return i
def random_mac(port_id):
octets = list()
octets.append(random.randint(0, 255) & 0xfc)
octets.append((int(port_id) >> 16) & 0xff)
octets.append(int(port_id) & 0xff)
for _i in range(3):
octets.append(random.randint(0, 255))
return '{0:02x}:{1:02x}:{2:02x}:{3:02x}:{4:02x}:{5:02x}'.format(*octets)
def ipv4_interface(api_client, **kwargs):
i = empty_interface(api_client)
method = kwargs.get('method', None)
if method == None and 'ipv4_address' in kwargs:
method = 'static'
if method == 'static':
i.config.protocols = make_interface_protocols([
client.models.InterfaceProtocolConfigEth(mac_address=kwargs.get('mac_address', random_mac(i.port_id))),
client.models.InterfaceProtocolConfigIpv4(
method='static',
static=client.models.InterfaceProtocolConfigIpv4Static(
address=kwargs['ipv4_address'],
prefix_length=kwargs.get('prefix_length', 24),
gateway=kwargs.get('gateway', None)))
])
elif method == 'auto':
i.config.protocols = make_interface_protocols([
client.models.InterfaceProtocolConfigEth(mac_address=random_mac(i.port_id)),
client.models.InterfaceProtocolConfigIpv4(method='auto')
])
else:
i.config.protocols = make_interface_protocols([
client.models.InterfaceProtocolConfigEth(mac_address=random_mac(i.port_id)),
client.models.InterfaceProtocolConfigIpv4(
method='dhcp',
dhcp=client.models.InterfaceProtocolConfigIpv4Dhcp())
])
return i
def ipv6_interface(api_client, **kwargs):
i = empty_interface(api_client)
method = kwargs.get('method', None)
if method == None and 'ipv6_address' in kwargs:
method = 'static'
if method == 'static':
i.config.protocols = make_interface_protocols([
client.models.InterfaceProtocolConfigEth(mac_address=kwargs.get('mac_address', random_mac(i.port_id))),
client.models.InterfaceProtocolConfigIpv6(
method='static',
link_local_address=kwargs.get('ipv6_link_local_address', None),
static=client.models.InterfaceProtocolConfigIpv6Static(
address=kwargs['ipv6_address'],
prefix_length=kwargs.get('prefix_length', 64),
gateway=kwargs.get('gateway', None)))
])
elif method == 'auto':
i.config.protocols = make_interface_protocols([
client.models.InterfaceProtocolConfigEth(mac_address=random_mac(i.port_id)),
client.models.InterfaceProtocolConfigIpv6(
method='auto',
link_local_address=kwargs.get('ipv6_link_local_address', None))
])
else:
i.config.protocols = make_interface_protocols([
client.models.InterfaceProtocolConfigEth(mac_address=random_mac(i.port_id)),
client.models.InterfaceProtocolConfigIpv6(
method='dhcp6',
link_local_address=kwargs.get('ipv6_link_local_address', None),
dhcp6=client.models.InterfaceProtocolConfigIpv6Dhcp6(stateless=True))
])
return i
def as_interface_protocol(p):
expect(p).not_to(be_none)
name = p.__class__.__name__
expect(name).to(start_with('InterfaceProtocolConfig'))
proto = name[23:].lower()
expect(proto).not_to(be_empty)
pc = client.models.InterfaceProtocolConfig()
expect(pc).to(have_property(proto))
setattr(pc, proto, p)
return pc
def make_interface_protocols(config_list):
return list(map(as_interface_protocol, config_list))
|
[
"random.randint"
] |
[((1757, 1779), 'random.randint', 'random.randint', (['(0)', '(255)'], {}), '(0, 255)\n', (1771, 1779), False, 'import random\n'), ((1920, 1942), 'random.randint', 'random.randint', (['(0)', '(255)'], {}), '(0, 255)\n', (1934, 1942), False, 'import random\n')]
|
"""
Modified from https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py
"""
import operator
from functools import reduce
import torch
import torch.nn as nn
class VGG(nn.Module):
"""VGG Model"""
def __init__(self, input_size, num_classes, cfg):
super(VGG, self).__init__()
self.input_size = input_size
self.num_classes = num_classes
# Model
self.features, out_size = self._makeFeatures(cfg)
self.classifier = self._makeClassifier(out_size)
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
def _makeFeatures(self, cfg):
def conv3(in_channels, out_channels):
return [nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True),
]
def maxpool():
return nn.MaxPool2d(kernel_size=2, stride=2)
layers = []
in_channels = self.input_size[0]
out_frac = 1
for out_channels in cfg:
if out_channels == 'M':
layers.append(maxpool())
out_frac *= 2
else:
layers.extend(conv3(in_channels, out_channels))
in_channels = out_channels
assert(self.input_size[1] % out_frac == 0 or self.input_size[2] % out_frac == 0)
out_shape = (in_channels, self.input_size[1] // out_frac, self.input_size[2] // out_frac)
return nn.Sequential(*layers), out_shape
def _makeClassifier(self, in_shape):
return nn.Sequential(
nn.Linear(in_features=reduce(operator.mul, in_shape, 1), out_features=4096),
nn.ReLU(inplace=True),
nn.Dropout(0.5),
nn.Linear(in_features=4096, out_features=4096),
nn.ReLU(inplace=True),
nn.Dropout(0.5),
nn.Linear(in_features=4096, out_features=self.num_classes),
)
def initializeWeights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.constant_(m.bias, 0)
def VGG16(input_size, num_classes):
"""Configuration D, VGG16"""
cfg = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M']
return VGG(input_size, num_classes, cfg=cfg)
|
[
"torch.nn.Dropout",
"torch.nn.ReLU",
"torch.nn.init.kaiming_normal_",
"torch.nn.Sequential",
"torch.nn.Conv2d",
"functools.reduce",
"torch.nn.BatchNorm2d",
"torch.nn.init.constant_",
"torch.nn.init.normal_",
"torch.nn.Linear",
"torch.nn.MaxPool2d"
] |
[((977, 1014), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2)', 'stride': '(2)'}), '(kernel_size=2, stride=2)\n', (989, 1014), True, 'import torch.nn as nn\n'), ((1567, 1589), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (1580, 1589), True, 'import torch.nn as nn\n'), ((1774, 1795), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1781, 1795), True, 'import torch.nn as nn\n'), ((1809, 1824), 'torch.nn.Dropout', 'nn.Dropout', (['(0.5)'], {}), '(0.5)\n', (1819, 1824), True, 'import torch.nn as nn\n'), ((1838, 1884), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': '(4096)', 'out_features': '(4096)'}), '(in_features=4096, out_features=4096)\n', (1847, 1884), True, 'import torch.nn as nn\n'), ((1898, 1919), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1905, 1919), True, 'import torch.nn as nn\n'), ((1933, 1948), 'torch.nn.Dropout', 'nn.Dropout', (['(0.5)'], {}), '(0.5)\n', (1943, 1948), True, 'import torch.nn as nn\n'), ((1962, 2020), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': '(4096)', 'out_features': 'self.num_classes'}), '(in_features=4096, out_features=self.num_classes)\n', (1971, 2020), True, 'import torch.nn as nn\n'), ((755, 817), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'out_channels'], {'kernel_size': '(3)', 'padding': '(1)'}), '(in_channels, out_channels, kernel_size=3, padding=1)\n', (764, 817), True, 'import torch.nn as nn\n'), ((839, 867), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['out_channels'], {}), '(out_channels)\n', (853, 867), True, 'import torch.nn as nn\n'), ((889, 910), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (896, 910), True, 'import torch.nn as nn\n'), ((2160, 2230), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['m.weight'], {'mode': '"""fan_out"""', 'nonlinearity': '"""relu"""'}), "(m.weight, mode='fan_out', nonlinearity='relu')\n", (2183, 2230), True, 'import torch.nn as nn\n'), ((1707, 1740), 'functools.reduce', 'reduce', (['operator.mul', 'in_shape', '(1)'], {}), '(operator.mul, in_shape, 1)\n', (1713, 1740), False, 'from functools import reduce\n'), ((2290, 2318), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (2307, 2318), True, 'import torch.nn as nn\n'), ((2383, 2413), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.weight', '(1)'], {}), '(m.weight, 1)\n', (2400, 2413), True, 'import torch.nn as nn\n'), ((2430, 2458), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (2447, 2458), True, 'import torch.nn as nn\n'), ((2518, 2552), 'torch.nn.init.normal_', 'nn.init.normal_', (['m.weight', '(0)', '(0.01)'], {}), '(m.weight, 0, 0.01)\n', (2533, 2552), True, 'import torch.nn as nn\n'), ((2569, 2597), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (2586, 2597), True, 'import torch.nn as nn\n')]
|
# Imports here
import matplotlib.pyplot as plt
import torch
from torch import nn
from torch import optim
import torch.nn.functional as F
from torchvision import datasets, transforms, models
import numpy as np
from PIL import Image
from collections import OrderedDict
import argparse
import json
import utils
ap = argparse.ArgumentParser(description='Predict.py')
ap.add_argument('--top_k', default=5, dest="top_k", action="store", type=int)
ap.add_argument('--category_names', dest="category_names", action="store", default='cat_to_name.json')
ap.add_argument('--gpu_enabled', dest="gpu_enabled",type = bool, action="store", default="True")
ap.add_argument('--arch', dest="arch", action="store", default="vgg16", type = str)
ap.add_argument('--checkpoint', dest="checkpoint", action="store", default="checkpoint.pth")
ap.add_argument('--img_path', dest="img_path", action="store", default="flowers/test/100/image_07896.jpg")
args = ap.parse_args()
image_path = args.img_path
architecture = args.arch
top_k = args.top_k
gpu_enabled = args.gpu_enabled
checkpoint_path = args.checkpoint
category_names = args.category_names
model = utils.load_checkpoint(checkpoint_path,gpu_enabled)
with open(category_names, 'r') as f:
cat_to_name = json.load(f)
probs, classes = utils.predict(image_path,model,top_k,cat_to_name)
for i in range(len(classes)):
print('Flower name:{}, Probability:{}'.format(classes[i],probs[0][i].tolist()))
|
[
"utils.load_checkpoint",
"utils.predict",
"argparse.ArgumentParser",
"json.load"
] |
[((315, 364), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Predict.py"""'}), "(description='Predict.py')\n", (338, 364), False, 'import argparse\n'), ((1139, 1190), 'utils.load_checkpoint', 'utils.load_checkpoint', (['checkpoint_path', 'gpu_enabled'], {}), '(checkpoint_path, gpu_enabled)\n', (1160, 1190), False, 'import utils\n'), ((1282, 1334), 'utils.predict', 'utils.predict', (['image_path', 'model', 'top_k', 'cat_to_name'], {}), '(image_path, model, top_k, cat_to_name)\n', (1295, 1334), False, 'import utils\n'), ((1249, 1261), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1258, 1261), False, 'import json\n')]
|
from numpy.random import seed
seed(42)
from tensorflow import set_random_seed
set_random_seed(42)
import nltk
from nltk.corpus import stopwords
from xml.dom.minidom import parse
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
warnings.filterwarnings("ignore", category=DeprecationWarning)
import keras as k
from numpy.random import seed
import pandas as pd
from keras.preprocessing.sequence import pad_sequences
from nltk.tokenize import word_tokenize
from os import listdir
import string, sys
import numpy as np
import pickle
from keras.models import load_model
from keras.utils import to_categorical
from keras.callbacks import ModelCheckpoint
from keras_contrib.losses import crf_loss
from keras_contrib.metrics import crf_viterbi_accuracy
import matplotlib.pyplot as plt
nltk.download('punkt')
nltk.download('wordnet')
nltk.download('stopwords')
stopwords_ = set(stopwords.words('english'))
from keras.models import Model, Input
from keras.layers import LSTM, Embedding, concatenate, Dense, TimeDistributed, Dropout, Bidirectional, Lambda, Layer
from keras_contrib.layers import CRF
from keras_contrib.losses import crf_loss
from keras_contrib.metrics import crf_accuracy
sys.path.append("../")
import evaluator
class Learner():
def __init__(self):
print("[WELCOME]... Init learning progress")
def tokenize(self, sentence):
'''
Task :
Given a sentence , calls nltk . tokenize to split it in
tokens , and adds to each token its start / end offset
in the original sentence .
'''
tokens = []
offset = 0
words = word_tokenize(sentence)
for w in words:
if (w in stopwords_) or (w in string.punctuation):
continue
offset = sentence.find(w, offset)
tokens.append((w, offset, offset + len(w) - 1))
offset += len(w) +1
return tokens
def get_tag(self,token, gold):
'''
Task :
Given a token and a list of ground truth entites in a sentence , decide
which is the B-I-O tag for the token
'''
(form, start, end) = token
for (gold_start, gold_end, gold_type) in gold:
if start == gold_start and end <= gold_end:
return "B-" + gold_type
elif start >= gold_start and end <= gold_end:
return "I-" + gold_type
return "O"
def load_data(self, datadir):
'''
Load XML files in given directory , tokenize each sentence , and extract ground truth BIO labels for each token .
'''
result = {}
# process each file in directory
for f in listdir(datadir):
# parse XML file , obtaining a DOM tree
tree = parse(datadir + "/" + f)
# process each sentence in the file
sentences = tree.getElementsByTagName("sentence")
for s in sentences:
sid = s.attributes["id"].value # get sentence id
stext = s.attributes["text"].value # get sentence text
# load ground truth entities .
gold = []
entities = s.getElementsByTagName("entity")
for e in entities:
# for discontinuous entities , we only get the first span
offset = e.attributes["charOffset"].value
(start, end) = offset.split(";")[0].split("-")
gold.append((int(start), int(end), e.attributes["type"].value))
# tokenize text
tokens = self.tokenize(stext)
info_ = []
for tok_ in tokens:
tag_ = self.get_tag(tok_, gold)
n, i1, i2 = tok_
info_.append((n, i1, i2, tag_))
result[sid] = info_
return result
def create_indexs(self, dataset, max_length):
'''
Create index dictionaries both for input ( words ) and output ( labels ) from given dataset .
'''
words = ['<PAD>', '<UNK>']
prefixes = ['<PAD>', '<UNK>']
suffixes = ['<PAD>', '<UNK>']
labels = ['<PAD>']
positions = ['<PAD>','<UNK>']
prevword = ['<PAD>','<UNK>']
nextword = ['<PAD>','<UNK>']
class_rules = ['<PAD>', 'brand', 'drug', 'drug_n', 'group', 'none']
for data in list(dataset.values()):
pos = 0
w_pack_prev = '<START>'
for w_pack in data:
if w_pack[0] not in words:
words.append(w_pack[0])
if w_pack[3] not in labels:
labels.append(w_pack[3])
if w_pack[0][:3] not in prefixes:
prefixes.append(w_pack[0][:3])
if w_pack[0][-3:] not in suffixes:
suffixes.append(w_pack[0][-3:])
if pos not in positions:
positions.append(pos)
if w_pack_prev not in prevword:
prevword.append(w_pack_prev)
if w_pack[0] not in nextword:
nextword.append(w_pack[0])
w_pack_prev = w_pack[0]
pos+=1
if '<END>' not in nextword:
nextword.append('<END>')
words = {k: v for v, k in enumerate(words)}
labels = {k: v for v, k in enumerate(labels)}
prefixes = {k: v for v, k in enumerate(prefixes)}
suffixes = {k: v for v, k in enumerate(suffixes)}
positions = {k: v for v, k in enumerate(positions)}
prevword = {k: v for v, k in enumerate(prevword)}
nextword = {k: v for v, k in enumerate(nextword)}
class_rules = {k: v for v, k in enumerate(class_rules)}
result = {}
result['words'] = words
result['labels'] = labels
result['maxlen'] = max_length
result['prev'] = prevword
result['next'] = nextword
result["pref"] = prefixes
result["suff"] = suffixes
result["position"] = positions
result["class_rules"] = class_rules
return result
def encode_words(self, dataset, idx):
'''
Encode the words in a sentence dataset formed by lists of tokens
into lists of indexes suitable for NN input .
The dataset encoded as a list of sentence , each of them is a list of
word indices . If the word is not in the index , <UNK > code is used . If
the sentence is shorter than max_len it is padded with <PAD > code .
'''
results = []
for sentence in dataset.values():
encoded_sentence = []
for word in sentence:
if word[0] in idx["words"]:
index = idx["words"][word[0]]
else:
index = idx["words"]['<UNK>']
encoded_sentence.append(index)
while len(encoded_sentence) < idx["maxlen"]:
encoded_sentence.append(idx["words"]['<PAD>'])
results.append(np.array(encoded_sentence))
return np.array(results)
def encode_words_lower(self, dataset, idx):
results = []
for sentence in dataset.values():
encoded_sentence = []
for word in sentence:
if word[0].lower() in idx["words_lower"]:
index = idx["words_lower"][word[0].lower()]
else:
index = idx["words_lower"]['<UNK>']
encoded_sentence.append(index)
while len(encoded_sentence) < idx["maxlen"]:
encoded_sentence.append(idx["words_lower"]['<PAD>'])
results.append(np.array(encoded_sentence))
return np.array(results)
def encode_positions(self, dataset, idx):
results = []
for sentence in dataset.values():
encoded_sentence = []
pos = 0
for word in sentence:
if pos in idx["position"]:
index = idx["position"][pos]
else:
index = idx["position"]['<UNK>']
encoded_sentence.append(index)
pos+=1
while len(encoded_sentence) < idx["maxlen"]:
encoded_sentence.append(idx["position"]['<PAD>'])
results.append(np.array(encoded_sentence))
return np.array(results)
def encode_prefixes(self, dataset, idx):
results = []
for sentence in dataset.values():
encoded_sentence = []
for word in sentence:
if word[0][:3] in idx["pref"]:
index = idx["pref"][word[0][:3]]
else:
index = idx["pref"]['<UNK>']
encoded_sentence.append(index)
while len(encoded_sentence) < idx["maxlen"]:
encoded_sentence.append(idx["pref"]['<PAD>'])
results.append(np.array(encoded_sentence))
return np.array(results)
def encode_suffixes(self, dataset, idx):
results = []
for sentence in dataset.values():
encoded_sentence = []
for word in sentence:
if word[0][-3:] in idx["suff"]:
index = idx["suff"][word[0][-3:]]
else:
index = idx["suff"]['<UNK>']
encoded_sentence.append(index)
while len(encoded_sentence) < idx["maxlen"]:
encoded_sentence.append(idx["suff"]['<PAD>'])
results.append(np.array(encoded_sentence))
return np.array(results)
def encode_prevwords(self, dataset, idx):
results = []
for sentence in dataset.values():
encoded_sentence = []
prevword = '<START>'
for word in sentence:
if prevword in idx["prev"]:
index = idx["prev"][prevword]
else:
index = idx["prev"]['<UNK>']
encoded_sentence.append(index)
prevword=word[0]
while len(encoded_sentence) < idx["maxlen"]:
encoded_sentence.append(idx["prev"]['<PAD>'])
results.append(np.array(encoded_sentence))
return np.array(results)
def encode_nextwords(self, dataset, idx):
results = []
for sentence in dataset.values():
encoded_sentence = []
for i in range(len(sentence)-1):
if sentence[i+1][0] in idx["next"]:
index = idx["next"][sentence[i+1][0]]
else:
index = idx["next"]['<UNK>']
encoded_sentence.append(index)
index = idx["next"]['<END>']
while len(encoded_sentence) < idx["maxlen"]:
encoded_sentence.append(idx["next"]['<PAD>'])
results.append(np.array(encoded_sentence))
return np.array(results)
def check_Prefixes(self, tok, pref):
for p in pref:
if str(tok).lower().startswith(p):
return True
return False
def check_Suffixes(self, tok, pref):
for p in pref:
if str(tok).endswith(p):
return True
return False
def check_contains(self, tok, cont):
for p in cont:
if p in str(tok):
return True
return False
def encode_class_rules(self, dataset, idx):
suffixes = ["azole", "idine", "amine", "mycin", "xacin", "ostol", "adiol"]
suffixes_drug = ["ine", "cin", "ium", "vir","ide", "lam", "il", "ril", "cin", "tin"]
#suffixes_brand = ["gen"]
suffixes_brand = []
suffixes_group = ["ines", "ides", "cins", "oles"]
prefixes_drug_n = ['ibog', 'endo', "bombe", "contor", "dmp", "egf", "ginse", "heo", "ibo", "jac", "phen"]
#prefixes_brand = ["SPR", "Acc", "equ", "EQU"]
prefixes_brand = []
prefixes_group = ["beta-adre", "hmg", "monoamine", "calcium", "drugs", "sali", "quino", "ssri", "cepha", "sulfo", "TCA", "thiaz", "benzo", "barb", "contracept", "cortico", "digitalis", "diu", "central", "nervous", "system", "beta", "psycho", "cepha", "macro", "prot", "ace", "mao", "cardiac"]
prefixes_drug = ['digox', 'warfa', 'meth', 'theophy', 'lith', 'keto', 'cime', 'insu', 'fluox', 'alcoh', 'cyclos', 'eryth', 'carba', 'rifa', 'caffe']
contains_drug_n = ["MHD", "NaC", "MC", "gaine", "PTX", "PCP"]
contains_group = ["ids", "urea" ]
contains_brand = ["PEGA", "aspirin", "Aspirin", "XX", "IVA"]
'''
suffixes = ["azole", "idine", "amine", "mycin", "xacin", "ostol", "adiol"]
suffixes_drug = ["ine", "cin", "ium"]
suffixes_brand = ["gen"]
suffixes_group = ["ines", "ides", "cins", "oles"]
'''
results = []
for sentence in dataset.values():
encoded_sentence = []
for word in sentence:
token = word[0]
if self.check_Suffixes(token, suffixes_drug) or self.check_Suffixes(token, suffixes) or self.check_Prefixes(token, prefixes_drug):
index = idx["class_rules"]['drug']
elif self.check_Suffixes(token, suffixes_group) or "agent" in token or self.check_Prefixes(token, prefixes_group) or self.check_contains(token, contains_group):
index = idx["class_rules"]['group']
elif self.check_Prefixes(token, prefixes_drug_n) or self.check_contains(token, contains_drug_n):
index = idx["class_rules"]['drug_n']
elif token.isupper() or self.check_contains(token, contains_brand):
index = idx["class_rules"]['brand']
else:
index = idx["class_rules"]['none']
encoded_sentence.append(index)
while len(encoded_sentence) < idx["maxlen"]:
encoded_sentence.append(idx["class_rules"]['<PAD>'])
results.append(np.array(encoded_sentence))
return np.array(results)
def encode_labels(self, dataset, idx):
'''
Encode the ground truth labels in a sentence dataset formed by lists of
tokens into lists of indexes suitable for NN output .
'''
results = []
for sentence in dataset.values():
encoded_sentence = []
for word in sentence:
index = idx["labels"][word[3]]
encoded_sentence.append(index)
while len(encoded_sentence) < idx["maxlen"]:
index = idx["labels"]['<PAD>']
encoded_sentence.append(index)
results.append(np.array(encoded_sentence))
n_tags = len(idx["labels"])
results = [to_categorical(i, num_classes=n_tags) for i in results]
results = np.array(results)
print(results.shape)
return results
def save_model_and_indexs(self, model, idx, filename):
'''
Save given model and indexs to disk
'''
model.save_weights(filename + '.h5')
with open(filename + '.idx', 'wb') as fp:
pickle.dump(idx, fp, protocol=pickle.HIGHEST_PROTOCOL)
def load_model_and_indexs(self, filename):
'''
Save given model and indexs to disk
'''
with open(filename + '.idx', 'rb') as fp:
data = pickle.load(fp)
n_words = len(data['words'])
n_labels = len(data['labels'])
max_len = data['maxlen']
n_prev = len(data['prev'])
n_next = len(data['next'])
n_pref = len(data["pref"])
n_suff = len(data["suff"])
n_pos = len(data["position"])
n_class = len(data["class_rules"])
numbers=[n_words, n_suff, n_pref,n_pos,n_prev, n_next, n_class]
model = self.defineModel(numbers, n_labels, max_len)
model.load_weights(filename + '.h5')
return model, data
def output_entities(self, dataset, preds, outfile):
'''
Output detected entities in the format expected by the evaluator
'''
# if it's not waiting will print the BI elements without the marks
# in order to not print the O's or print together the BI
wait = False # while it's waiting will not print the elements
name = ''
off_start = '0'
element = {'name': '', 'offset': '', 'type': ''}
f = open(outfile, "w+")
for i, (sid, sentence) in enumerate(dataset.items()):
for ind, token in enumerate(sentence):
curr = preds[i][ind]
if curr == 'O' or curr=='<PAD>': # if it's a O or <PAD> element, we do nothing
wait = True
elif ind == (len(sentence) - 1): # if it's the last element of the sentence
if curr.startswith('B'):
element = {'name': token[0],
'offset': str(token[1]) + '-' + str(token[2]),
'type': curr.split('-')[1] # without B or I
}
elif curr.startswith('I'):
name = token[0] if name is '' else name + ' ' + token[0]
element = {'name': name,
'offset': off_start + '-' + str(token[2]),
'type': curr.split('-')[1]
}
else: # only to check
print('There\'s something wrong')
wait = False
else:
next = preds[i][ind+1]
if curr.startswith('B'):
if next.startswith('O') or next.startswith('B') or next.startswith('<'):
element = {'name': token[0],
'offset': str(token[1]) + '-' + str(token[2]),
'type': curr.split('-')[1] # without B or I
}
wait = False
elif next.startswith('I'):
name = token[0]
off_start = str(token[1])
wait = True
elif curr.startswith('I'):
if next.startswith('O') or next.startswith('B') or next.startswith('<'):
element = {'name': name + ' ' + token[0],
'offset': off_start + '-' + str(token[2]),
'type': curr.split('-')[1]
}
if name == '':
element["name"] = token[0]
wait = False
elif next.startswith('I'):
name = token[0] if name is '' else name + ' ' + token[0]
wait = True
else: # only to check
print('There\'s something wrong2')
if not wait:
f.write(sid + '|' + element['offset'] + '|' + element['name'] + '|' + element['type'] + '\n')
f.close()
def predict(self, modelname, datadir, outfile):
'''
Loads a NN model from file ’modelname ’ and uses it to extract drugs
in datadir . Saves results to ’outfile ’ in the appropriate format .
'''
print("[INFO]... Model in inference process")
# load model and associated encoding data
model, idx = self.load_model_and_indexs(modelname)
# load data to annotate
testdata = self.load_data(datadir)
# encode dataset
X = self.encode_words(testdata, idx)
X_pref = self.encode_prefixes(testdata, idx)
X_suff = self.encode_suffixes(testdata, idx)
X_pos = self.encode_positions(testdata, idx)
X_prev = self.encode_prevwords(testdata, idx)
X_next = self.encode_nextwords(testdata, idx)
X_class_rules = self.encode_class_rules(testdata, idx)
# tag sentences in dataset
Y = model.predict([X, X_suff, X_pref, X_pos, X_prev, X_next, X_class_rules])
reverse_labels= {y: x for x, y in idx['labels'].items()}
Y = [[reverse_labels[np.argmax(y)] for y in s] for s in Y]
# extract entities and dump them to output file
self.output_entities(testdata, Y, outfile)
# evaluate using official evaluator
self.evaluation(datadir, outfile)
def checkOutputs(self, modelname, datadir, outfile):
print("[INFO]... Model in checking process")
# load model and associated encoding data
model, idx = self.load_model_and_indexs(modelname)
# load data to annotate
testdata = self.load_data(datadir)
# encode dataset
Y = self.encode_labels(testdata, idx)
print(idx["labels"])
reverse_labels = {y: x for x, y in idx['labels'].items()}
Y = [[reverse_labels[np.argmax(y)] for y in s] for s in Y]
# extract entities and dump them to output file
self.output_entities(testdata, Y, outfile)
# evaluate using official evaluator
self.evaluation(datadir, outfile)
def evaluation(self, datadir, outfile):
evaluator.evaluate("NER", datadir, outfile)
def learn(self, traindir, validationdir, modelname):
'''
Learns a NN model using traindir as training data , and validationdir
as validation data . Saves learnt model in a file named modelname
'''
print("[INFO]... Model architecture in training process")
# load train and validation data in a suitable form
traindata = self.load_data(traindir)
valdata = self.load_data(validationdir)
# create indexes from training data
max_len = 100
idx = self.create_indexs(traindata, max_len)
# encode datasets
Xtrain = self.encode_words(traindata, idx)
Xtrain_pref = self.encode_prefixes(traindata, idx)
Xtrain_suff = self.encode_suffixes(traindata, idx)
Xtrain_pos = self.encode_positions(traindata, idx)
Xtrain_prev = self.encode_prevwords(traindata, idx)
Xtrain_next = self.encode_nextwords(traindata, idx)
Xtrain_class_rules = self.encode_class_rules(traindata, idx)
Ytrain = self.encode_labels(traindata, idx)
Xval = self.encode_words(valdata, idx)
Xval_pref = self.encode_prefixes(valdata, idx)
Xval_suff = self.encode_suffixes(valdata, idx)
Xval_pos = self.encode_positions(valdata, idx)
Xval_prev = self.encode_prevwords(valdata, idx)
Xval_next = self.encode_nextwords(valdata, idx)
Xval_class_rules = self.encode_class_rules(valdata, idx)
Yval = self.encode_labels(valdata, idx)
n_words=len(idx['words'])
# load the whole embedding into memory
embeddings_index = dict()
f = open('../data/glove.6B/glove.6B.100d.txt', encoding="utf8")
for line in f:
values = line.split()
word = values[0]
coefs = np.asarray(values[1:], dtype='float32')
embeddings_index[word] = coefs
f.close()
embedding_matrix = np.zeros((n_words, max_len))
h=0
for word in idx['words']:
embedding_vector = embeddings_index.get(word)
if embedding_vector is not None:
embedding_matrix[h] = embedding_vector
h+=1
f = open("./embedding_matrix.txt", 'w')
for row in embedding_matrix:
np.savetxt(f,row)
f.close()
# train model
# build network
model = self.build_network(idx)
# Saving the best model only
filepath = modelname+"-{val_crf_viterbi_accuracy:.3f}.hdf5"
checkpoint = ModelCheckpoint(filepath, monitor='val_crf_viterbi_accuracy', verbose=1, save_best_only=True, mode='max')
callbacks_list = [checkpoint]
# Fit the best model
history = model.fit([Xtrain, Xtrain_suff, Xtrain_pref, Xtrain_pos, Xtrain_prev, Xtrain_next, Xtrain_class_rules], Ytrain, validation_data=([Xval, Xval_suff, Xval_pref, Xval_pos, Xval_prev, Xval_next, Xval_class_rules], Yval), batch_size=256, epochs=20, verbose=1, callbacks=callbacks_list)
'''
model.fit(Xtrain, Ytrain, validation_data=(Xval, Yval), batch_size=256)
'''
# save model and indexs , for later use in prediction
self.save_model_and_indexs(model, idx, modelname)
self.plot(history)
return embedding_matrix
def plot(self, history):
# Plot the graph
plt.style.use('ggplot')
accuracy = history.history['crf_viterbi_accuracy']
val_accuracy = history.history['val_crf_viterbi_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
x = range(1, len(accuracy) + 1)
plt.figure(figsize=(12, 5))
plt.subplot(1, 2, 1)
plt.plot(x, accuracy, 'b', label='Training acc')
plt.plot(x, val_accuracy, 'r', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.subplot(1, 2, 2)
plt.plot(x, loss, 'b', label='Training loss')
plt.plot(x, val_loss, 'r', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.savefig("History_model.jpg")
def defineModel(self, numbers, n_labels, max_len):
embedding_matrix=np.loadtxt("./embedding_matrix.txt").reshape(numbers[0], 100)
word_in = Input(shape=(max_len,))
word_emb = Embedding(input_dim=numbers[0], output_dim=100, input_length=max_len, trainable=False, weights = [embedding_matrix])(word_in) # 20-dim embedding
suf_in = Input(shape=(max_len,))
suf_emb = Embedding(input_dim=numbers[1], output_dim=100,
input_length=max_len)(suf_in)
pref_in = Input(shape=(max_len,))
pref_emb = Embedding(input_dim=numbers[2], output_dim=100,
input_length=max_len)(pref_in)
pos_in = Input(shape=(max_len,))
pos_emb = Embedding(input_dim=numbers[3], output_dim=100,
input_length=max_len)(pos_in)
prev_in = Input(shape=(max_len,))
prev_emb = Embedding(input_dim=numbers[4], output_dim=100,
input_length=max_len)(prev_in)
next_in = Input(shape=(max_len,))
next_emb = Embedding(input_dim=numbers[5], output_dim=100,
input_length=max_len)(next_in)
class_rules_in = Input(shape=(max_len,))
class_rules_emb = Embedding(input_dim=numbers[6], output_dim=100,
input_length=max_len)(class_rules_in)
concat = concatenate([word_emb, suf_emb, pref_emb, pos_emb, prev_emb, next_emb, class_rules_emb])
model = Dropout(0.55)(concat)
'''
model = LSTM(units=max_len * 2,
return_sequences=True,
dropout=0.5,
recurrent_dropout=0.5,
kernel_initializer=k.initializers.he_normal())(model)
'''
model = Bidirectional(LSTM(units=32,return_sequences=True,recurrent_dropout=0.3,))(model) # variational biLSTM
#model = Bidirectional(LSTM(units=32,return_sequences=True,recurrent_dropout=0.5,))(model) # variational biLSTM
#model = Bidirectional(LSTM(units=32,return_sequences=True,recurrent_dropout=0.5,))(model) # variational biLSTM
model = TimeDistributed(Dense(n_labels, activation="relu"))(model) # a dense layer as suggested by neuralNer
crf = CRF(units=n_labels, activation='linear') # CRF layer
out = crf(model) # output
# create and compile model
model = Model([word_in, suf_in, pref_in, pos_in, prev_in, next_in, class_rules_in], out)
return model
def build_network(self,idx):
from keras.optimizers import RMSprop
'''
Create network for the learner
'''
# sizes
n_words = len(idx['words'])
n_prev = len(idx['prev'])
n_next = len(idx['next'])
n_pref = len(idx["pref"])
n_suff = len(idx["suff"])
n_pos = len(idx["position"])
n_labels = len(idx['labels'])
n_class = len(idx["class_rules"])
numbers=[n_words, n_suff, n_pref,n_pos,n_prev, n_next, n_class]
max_len = idx['maxlen']
# create network layers
model = self.defineModel(numbers, n_labels, max_len)
# set appropriate parameters (optimizer, loss, etc)
optimizer = RMSprop(lr=0.001, epsilon=None, decay=0.0)
crf = CRF(n_labels, activation='linear') # CRF layer
model.compile(optimizer=optimizer, loss=crf.loss_function, metrics=[crf.accuracy])
model.summary()
return model
if __name__ == '__main__':
learner = Learner()
learner.learn("../data/train", "../data/devel", "firstmodel")
#learner.checkOutputs("firstmodel", "../data/test", "results.txt", emb_matrix)
print("TRAIN")
learner.predict("firstmodel", "../data/train", "results.txt")
print("\nDEVEL")
learner.predict("firstmodel", "../data/devel", "results.txt")
print("\nTEST")
learner.predict("firstmodel", "../data/test", "results.txt")
|
[
"matplotlib.pyplot.title",
"pickle.dump",
"numpy.random.seed",
"numpy.argmax",
"evaluator.evaluate",
"keras.models.Model",
"matplotlib.pyplot.style.use",
"matplotlib.pyplot.figure",
"keras_contrib.layers.CRF",
"pickle.load",
"nltk.download",
"keras.layers.concatenate",
"sys.path.append",
"warnings.simplefilter",
"numpy.savetxt",
"tensorflow.set_random_seed",
"numpy.loadtxt",
"nltk.tokenize.word_tokenize",
"keras.utils.to_categorical",
"keras.callbacks.ModelCheckpoint",
"keras.layers.Dropout",
"matplotlib.pyplot.legend",
"numpy.asarray",
"nltk.corpus.stopwords.words",
"keras.optimizers.RMSprop",
"os.listdir",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.plot",
"warnings.filterwarnings",
"keras.layers.LSTM",
"numpy.zeros",
"keras.models.Input",
"xml.dom.minidom.parse",
"keras.layers.Dense",
"numpy.array",
"keras.layers.Embedding",
"matplotlib.pyplot.savefig"
] |
[((30, 38), 'numpy.random.seed', 'seed', (['(42)'], {}), '(42)\n', (34, 38), False, 'from numpy.random import seed\n'), ((78, 97), 'tensorflow.set_random_seed', 'set_random_seed', (['(42)'], {}), '(42)\n', (93, 97), False, 'from tensorflow import set_random_seed\n'), ((195, 257), 'warnings.simplefilter', 'warnings.simplefilter', ([], {'action': '"""ignore"""', 'category': 'FutureWarning'}), "(action='ignore', category=FutureWarning)\n", (216, 257), False, 'import warnings\n'), ((258, 320), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'DeprecationWarning'}), "('ignore', category=DeprecationWarning)\n", (281, 320), False, 'import warnings\n'), ((812, 834), 'nltk.download', 'nltk.download', (['"""punkt"""'], {}), "('punkt')\n", (825, 834), False, 'import nltk\n'), ((835, 859), 'nltk.download', 'nltk.download', (['"""wordnet"""'], {}), "('wordnet')\n", (848, 859), False, 'import nltk\n'), ((860, 886), 'nltk.download', 'nltk.download', (['"""stopwords"""'], {}), "('stopwords')\n", (873, 886), False, 'import nltk\n'), ((1215, 1237), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (1230, 1237), False, 'import string, sys\n'), ((904, 930), 'nltk.corpus.stopwords.words', 'stopwords.words', (['"""english"""'], {}), "('english')\n", (919, 930), False, 'from nltk.corpus import stopwords\n'), ((1642, 1665), 'nltk.tokenize.word_tokenize', 'word_tokenize', (['sentence'], {}), '(sentence)\n', (1655, 1665), False, 'from nltk.tokenize import word_tokenize\n'), ((2702, 2718), 'os.listdir', 'listdir', (['datadir'], {}), '(datadir)\n', (2709, 2718), False, 'from os import listdir\n'), ((7129, 7146), 'numpy.array', 'np.array', (['results'], {}), '(results)\n', (7137, 7146), True, 'import numpy as np\n'), ((7778, 7795), 'numpy.array', 'np.array', (['results'], {}), '(results)\n', (7786, 7795), True, 'import numpy as np\n'), ((8424, 8441), 'numpy.array', 'np.array', (['results'], {}), '(results)\n', (8432, 8441), True, 'import numpy as np\n'), ((9034, 9051), 'numpy.array', 'np.array', (['results'], {}), '(results)\n', (9042, 9051), True, 'import numpy as np\n'), ((9638, 9655), 'numpy.array', 'np.array', (['results'], {}), '(results)\n', (9646, 9655), True, 'import numpy as np\n'), ((10301, 10318), 'numpy.array', 'np.array', (['results'], {}), '(results)\n', (10309, 10318), True, 'import numpy as np\n'), ((10966, 10983), 'numpy.array', 'np.array', (['results'], {}), '(results)\n', (10974, 10983), True, 'import numpy as np\n'), ((14106, 14123), 'numpy.array', 'np.array', (['results'], {}), '(results)\n', (14114, 14123), True, 'import numpy as np\n'), ((14894, 14911), 'numpy.array', 'np.array', (['results'], {}), '(results)\n', (14902, 14911), True, 'import numpy as np\n'), ((21436, 21479), 'evaluator.evaluate', 'evaluator.evaluate', (['"""NER"""', 'datadir', 'outfile'], {}), "('NER', datadir, outfile)\n", (21454, 21479), False, 'import evaluator\n'), ((23420, 23448), 'numpy.zeros', 'np.zeros', (['(n_words, max_len)'], {}), '((n_words, max_len))\n', (23428, 23448), True, 'import numpy as np\n'), ((24018, 24127), 'keras.callbacks.ModelCheckpoint', 'ModelCheckpoint', (['filepath'], {'monitor': '"""val_crf_viterbi_accuracy"""', 'verbose': '(1)', 'save_best_only': '(True)', 'mode': '"""max"""'}), "(filepath, monitor='val_crf_viterbi_accuracy', verbose=1,\n save_best_only=True, mode='max')\n", (24033, 24127), False, 'from keras.callbacks import ModelCheckpoint\n'), ((24837, 24860), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""ggplot"""'], {}), "('ggplot')\n", (24850, 24860), True, 'import matplotlib.pyplot as plt\n'), ((25123, 25150), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 5)'}), '(figsize=(12, 5))\n', (25133, 25150), True, 'import matplotlib.pyplot as plt\n'), ((25159, 25179), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (25170, 25179), True, 'import matplotlib.pyplot as plt\n'), ((25188, 25236), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'accuracy', '"""b"""'], {'label': '"""Training acc"""'}), "(x, accuracy, 'b', label='Training acc')\n", (25196, 25236), True, 'import matplotlib.pyplot as plt\n'), ((25245, 25299), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'val_accuracy', '"""r"""'], {'label': '"""Validation acc"""'}), "(x, val_accuracy, 'r', label='Validation acc')\n", (25253, 25299), True, 'import matplotlib.pyplot as plt\n'), ((25308, 25353), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and validation accuracy"""'], {}), "('Training and validation accuracy')\n", (25317, 25353), True, 'import matplotlib.pyplot as plt\n'), ((25362, 25374), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (25372, 25374), True, 'import matplotlib.pyplot as plt\n'), ((25383, 25403), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (25394, 25403), True, 'import matplotlib.pyplot as plt\n'), ((25412, 25457), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'loss', '"""b"""'], {'label': '"""Training loss"""'}), "(x, loss, 'b', label='Training loss')\n", (25420, 25457), True, 'import matplotlib.pyplot as plt\n'), ((25466, 25517), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'val_loss', '"""r"""'], {'label': '"""Validation loss"""'}), "(x, val_loss, 'r', label='Validation loss')\n", (25474, 25517), True, 'import matplotlib.pyplot as plt\n'), ((25526, 25567), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and validation loss"""'], {}), "('Training and validation loss')\n", (25535, 25567), True, 'import matplotlib.pyplot as plt\n'), ((25576, 25588), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (25586, 25588), True, 'import matplotlib.pyplot as plt\n'), ((25597, 25629), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""History_model.jpg"""'], {}), "('History_model.jpg')\n", (25608, 25629), True, 'import matplotlib.pyplot as plt\n'), ((25793, 25816), 'keras.models.Input', 'Input', ([], {'shape': '(max_len,)'}), '(shape=(max_len,))\n', (25798, 25816), False, 'from keras.models import Model, Input\n'), ((26008, 26031), 'keras.models.Input', 'Input', ([], {'shape': '(max_len,)'}), '(shape=(max_len,))\n', (26013, 26031), False, 'from keras.models import Model, Input\n'), ((26171, 26194), 'keras.models.Input', 'Input', ([], {'shape': '(max_len,)'}), '(shape=(max_len,))\n', (26176, 26194), False, 'from keras.models import Model, Input\n'), ((26335, 26358), 'keras.models.Input', 'Input', ([], {'shape': '(max_len,)'}), '(shape=(max_len,))\n', (26340, 26358), False, 'from keras.models import Model, Input\n'), ((26498, 26521), 'keras.models.Input', 'Input', ([], {'shape': '(max_len,)'}), '(shape=(max_len,))\n', (26503, 26521), False, 'from keras.models import Model, Input\n'), ((26663, 26686), 'keras.models.Input', 'Input', ([], {'shape': '(max_len,)'}), '(shape=(max_len,))\n', (26668, 26686), False, 'from keras.models import Model, Input\n'), ((26835, 26858), 'keras.models.Input', 'Input', ([], {'shape': '(max_len,)'}), '(shape=(max_len,))\n', (26840, 26858), False, 'from keras.models import Model, Input\n'), ((27013, 27105), 'keras.layers.concatenate', 'concatenate', (['[word_emb, suf_emb, pref_emb, pos_emb, prev_emb, next_emb, class_rules_emb]'], {}), '([word_emb, suf_emb, pref_emb, pos_emb, prev_emb, next_emb,\n class_rules_emb])\n', (27024, 27105), False, 'from keras.layers import LSTM, Embedding, concatenate, Dense, TimeDistributed, Dropout, Bidirectional, Lambda, Layer\n'), ((27898, 27938), 'keras_contrib.layers.CRF', 'CRF', ([], {'units': 'n_labels', 'activation': '"""linear"""'}), "(units=n_labels, activation='linear')\n", (27901, 27938), False, 'from keras_contrib.layers import CRF\n'), ((28039, 28124), 'keras.models.Model', 'Model', (['[word_in, suf_in, pref_in, pos_in, prev_in, next_in, class_rules_in]', 'out'], {}), '([word_in, suf_in, pref_in, pos_in, prev_in, next_in, class_rules_in], out\n )\n', (28044, 28124), False, 'from keras.models import Model, Input\n'), ((28867, 28909), 'keras.optimizers.RMSprop', 'RMSprop', ([], {'lr': '(0.001)', 'epsilon': 'None', 'decay': '(0.0)'}), '(lr=0.001, epsilon=None, decay=0.0)\n', (28874, 28909), False, 'from keras.optimizers import RMSprop\n'), ((28925, 28959), 'keras_contrib.layers.CRF', 'CRF', (['n_labels'], {'activation': '"""linear"""'}), "(n_labels, activation='linear')\n", (28928, 28959), False, 'from keras_contrib.layers import CRF\n'), ((2791, 2815), 'xml.dom.minidom.parse', 'parse', (["(datadir + '/' + f)"], {}), "(datadir + '/' + f)\n", (2796, 2815), False, 'from xml.dom.minidom import parse\n'), ((14820, 14857), 'keras.utils.to_categorical', 'to_categorical', (['i'], {'num_classes': 'n_tags'}), '(i, num_classes=n_tags)\n', (14834, 14857), False, 'from keras.utils import to_categorical\n'), ((15199, 15253), 'pickle.dump', 'pickle.dump', (['idx', 'fp'], {'protocol': 'pickle.HIGHEST_PROTOCOL'}), '(idx, fp, protocol=pickle.HIGHEST_PROTOCOL)\n', (15210, 15253), False, 'import pickle\n'), ((15439, 15454), 'pickle.load', 'pickle.load', (['fp'], {}), '(fp)\n', (15450, 15454), False, 'import pickle\n'), ((23291, 23330), 'numpy.asarray', 'np.asarray', (['values[1:]'], {'dtype': '"""float32"""'}), "(values[1:], dtype='float32')\n", (23301, 23330), True, 'import numpy as np\n'), ((23768, 23786), 'numpy.savetxt', 'np.savetxt', (['f', 'row'], {}), '(f, row)\n', (23778, 23786), True, 'import numpy as np\n'), ((25836, 25954), 'keras.layers.Embedding', 'Embedding', ([], {'input_dim': 'numbers[0]', 'output_dim': '(100)', 'input_length': 'max_len', 'trainable': '(False)', 'weights': '[embedding_matrix]'}), '(input_dim=numbers[0], output_dim=100, input_length=max_len,\n trainable=False, weights=[embedding_matrix])\n', (25845, 25954), False, 'from keras.layers import LSTM, Embedding, concatenate, Dense, TimeDistributed, Dropout, Bidirectional, Lambda, Layer\n'), ((26050, 26119), 'keras.layers.Embedding', 'Embedding', ([], {'input_dim': 'numbers[1]', 'output_dim': '(100)', 'input_length': 'max_len'}), '(input_dim=numbers[1], output_dim=100, input_length=max_len)\n', (26059, 26119), False, 'from keras.layers import LSTM, Embedding, concatenate, Dense, TimeDistributed, Dropout, Bidirectional, Lambda, Layer\n'), ((26214, 26283), 'keras.layers.Embedding', 'Embedding', ([], {'input_dim': 'numbers[2]', 'output_dim': '(100)', 'input_length': 'max_len'}), '(input_dim=numbers[2], output_dim=100, input_length=max_len)\n', (26223, 26283), False, 'from keras.layers import LSTM, Embedding, concatenate, Dense, TimeDistributed, Dropout, Bidirectional, Lambda, Layer\n'), ((26377, 26446), 'keras.layers.Embedding', 'Embedding', ([], {'input_dim': 'numbers[3]', 'output_dim': '(100)', 'input_length': 'max_len'}), '(input_dim=numbers[3], output_dim=100, input_length=max_len)\n', (26386, 26446), False, 'from keras.layers import LSTM, Embedding, concatenate, Dense, TimeDistributed, Dropout, Bidirectional, Lambda, Layer\n'), ((26541, 26610), 'keras.layers.Embedding', 'Embedding', ([], {'input_dim': 'numbers[4]', 'output_dim': '(100)', 'input_length': 'max_len'}), '(input_dim=numbers[4], output_dim=100, input_length=max_len)\n', (26550, 26610), False, 'from keras.layers import LSTM, Embedding, concatenate, Dense, TimeDistributed, Dropout, Bidirectional, Lambda, Layer\n'), ((26706, 26775), 'keras.layers.Embedding', 'Embedding', ([], {'input_dim': 'numbers[5]', 'output_dim': '(100)', 'input_length': 'max_len'}), '(input_dim=numbers[5], output_dim=100, input_length=max_len)\n', (26715, 26775), False, 'from keras.layers import LSTM, Embedding, concatenate, Dense, TimeDistributed, Dropout, Bidirectional, Lambda, Layer\n'), ((26885, 26954), 'keras.layers.Embedding', 'Embedding', ([], {'input_dim': 'numbers[6]', 'output_dim': '(100)', 'input_length': 'max_len'}), '(input_dim=numbers[6], output_dim=100, input_length=max_len)\n', (26894, 26954), False, 'from keras.layers import LSTM, Embedding, concatenate, Dense, TimeDistributed, Dropout, Bidirectional, Lambda, Layer\n'), ((27118, 27131), 'keras.layers.Dropout', 'Dropout', (['(0.55)'], {}), '(0.55)\n', (27125, 27131), False, 'from keras.layers import LSTM, Embedding, concatenate, Dense, TimeDistributed, Dropout, Bidirectional, Lambda, Layer\n'), ((7086, 7112), 'numpy.array', 'np.array', (['encoded_sentence'], {}), '(encoded_sentence)\n', (7094, 7112), True, 'import numpy as np\n'), ((7735, 7761), 'numpy.array', 'np.array', (['encoded_sentence'], {}), '(encoded_sentence)\n', (7743, 7761), True, 'import numpy as np\n'), ((8381, 8407), 'numpy.array', 'np.array', (['encoded_sentence'], {}), '(encoded_sentence)\n', (8389, 8407), True, 'import numpy as np\n'), ((8991, 9017), 'numpy.array', 'np.array', (['encoded_sentence'], {}), '(encoded_sentence)\n', (8999, 9017), True, 'import numpy as np\n'), ((9595, 9621), 'numpy.array', 'np.array', (['encoded_sentence'], {}), '(encoded_sentence)\n', (9603, 9621), True, 'import numpy as np\n'), ((10258, 10284), 'numpy.array', 'np.array', (['encoded_sentence'], {}), '(encoded_sentence)\n', (10266, 10284), True, 'import numpy as np\n'), ((10923, 10949), 'numpy.array', 'np.array', (['encoded_sentence'], {}), '(encoded_sentence)\n', (10931, 10949), True, 'import numpy as np\n'), ((14063, 14089), 'numpy.array', 'np.array', (['encoded_sentence'], {}), '(encoded_sentence)\n', (14071, 14089), True, 'import numpy as np\n'), ((14737, 14763), 'numpy.array', 'np.array', (['encoded_sentence'], {}), '(encoded_sentence)\n', (14745, 14763), True, 'import numpy as np\n'), ((25712, 25748), 'numpy.loadtxt', 'np.loadtxt', (['"""./embedding_matrix.txt"""'], {}), "('./embedding_matrix.txt')\n", (25722, 25748), True, 'import numpy as np\n'), ((27433, 27493), 'keras.layers.LSTM', 'LSTM', ([], {'units': '(32)', 'return_sequences': '(True)', 'recurrent_dropout': '(0.3)'}), '(units=32, return_sequences=True, recurrent_dropout=0.3)\n', (27437, 27493), False, 'from keras.layers import LSTM, Embedding, concatenate, Dense, TimeDistributed, Dropout, Bidirectional, Lambda, Layer\n'), ((27797, 27831), 'keras.layers.Dense', 'Dense', (['n_labels'], {'activation': '"""relu"""'}), "(n_labels, activation='relu')\n", (27802, 27831), False, 'from keras.layers import LSTM, Embedding, concatenate, Dense, TimeDistributed, Dropout, Bidirectional, Lambda, Layer\n'), ((20429, 20441), 'numpy.argmax', 'np.argmax', (['y'], {}), '(y)\n', (20438, 20441), True, 'import numpy as np\n'), ((21151, 21163), 'numpy.argmax', 'np.argmax', (['y'], {}), '(y)\n', (21160, 21163), True, 'import numpy as np\n')]
|
#! /bin/bash
# -*- coding: utf-8 -*-
import logging
import pandas as pd
import numpy as np
import click
from datetime import datetime
logger = logging.getLogger(__name__)
_COLS_TO_CONVERT = [
'market_data_current_price_usd',
'market_data_circulating_supply',
'market_data_ath_usd',
'market_data_high_24h_usd',
'market_data_low_24h_usd',
'KW1',
'KW2',
'KW3',
'KW4',
'KW5',
'KW6',
'KW7',
'KW8',
'KW9',
'KW10',
'KW11',
'KW12',
'KW13',
'KW14',
'KW15',
'KW16',
'KW17',
'KW18',
'KW19',
'KW20',
'KW21',
'KW22',
'KW23',
'KW24',
'KW25',
'KW26',
'KW27',
'KW28',
'KW29',
'KW30',
'KW31',
'KW32',
'KW33',
'KW34',
'KW35',
'KW36',
'KW37',
'KW38',
'KW39',
'ico_data_total_raised'
]
def read_in_data(path_bitcoin_df='data/raw/1_training_data_sets/1_bitcoin_price_data_set.csv',
path_training_df='data/raw/1_training_data_sets/1_training_data.csv',
path_test_df='data/raw/2_classification_data.csv'):
"""Function to read in data
Parameters
----------
path_bitcoin_df : str, optional
Path to bitcoin set, by default 'data/raw/1_training_data_sets/1_bitcoin_price_data_set.csv'
path_training_df : str, optional
Path to training set, by default 'data/raw/1_training_data_sets/1_training_data.csv'
path_test_df : str, optional
Path to training set, by default 'data/raw/2_classification_data.csv'
Returns
-------
tuple (df, df, df)
df_bitcoin, df, df_test
"""
df_bitcoin = pd.read_csv(
path_bitcoin_df, encoding="ISO-8859-1", delimiter=';')
df = pd.read_csv(path_training_df, encoding="ISO-8859-1")
df_test = pd.read_csv(path_test_df, encoding="ISO-8859-1")
logger.info("Shape of df_bitcoin: {}".format(df_bitcoin.shape))
logger.info("Shape of df: {}".format(df.shape))
return df_bitcoin, df, df_test
def clean_data(df_in):
"""This function cleans data and removes errornous data.
Parameters
----------
df_in : DataFrame
DataFrame to be cleaned
ReturnsFeatureEngineering
-------
DataFrame
Cleaned DataFrame
"""
df = df_in.copy()
return df
def preprocess(df_in):
"""This function preprocessed and changes all columns to the right dtype.
Parameters
----------
df_in : DataFrame
Original DataFrame
Returns
-------
DataFrame
Preprocessed DataFrame
"""
def _replace_convert_float(df, column, to_replace=',', replace_with='.', convert_to='float'):
logger.info("Replacing {} ".format(column))
df[column] = df[column].astype(str)
df[column] = df[column].apply(lambda x: x.replace(
to_replace, replace_with)).astype(convert_to)
return df
logger.info("Start preprocessing dataframe")
# Copy DataFrame -> If not you edit the original one in the memory
df = df_in.copy()
for col in _COLS_TO_CONVERT:
df = _replace_convert_float(df, col)
df = df.assign(id = df.loc[:, 'ï..id'])
df = df.drop('ï..id', axis=1)
df = df.rename({'name':'company_name'}, axis=1)
logger.info("Preprocessing done!")
return df
def preprocess_bitcoin(df: pd.DataFrame):
logger.info("Preprocess bitcoin dataset. Shape: {}".format(df.shape))
logger.info("Build timestamps from milliseconds")
df['time'] = df.date_in_ms.apply(
lambda x: datetime.fromtimestamp(x / 1000.0))
logger.info("Remove all bitcoin prices which are not from 2019")
df = df.loc[df.time.dt.year == 2019]
logger.info("Create calendar week.")
df = df.assign(calendar_week=df.time.dt.week)
logger.info("End shape of bitcoin dataset: {}".format(df.shape))
return df
def get_processed_data(path_bitcoin_df='data/raw/1_training_data_sets/1_bitcoin_price_data_set.csv',
path_training_df='data/raw/1_training_data_sets/1_training_data.csv',
path_test_df='data/raw/2_classification_data.csv'):
""" Runs data processing scripts to turn raw data from (../raw) into
cleaned data ready to be analyzed (saved in ../processed).
"""
log_fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(level=logging.INFO, format=log_fmt)
logger.info('making final data sets from raw data')
df_bitcoin, df, df_test = read_in_data()
# Concat for preprocessing
df = pd.concat([df, df_test])
df.loc[df.success.isna(), 'success'] = "TEST"
df = preprocess(df)
# Split into df and df_test again
cols_test = set(df.columns) - set(['success'])
df_test = df.loc[df.success == "TEST", cols_test]
df = df.loc[df.success != "TEST"]
logger.info("Training dataset shape: {}".format(df.shape))
logger.info("Test dataset shape: {}".format(df_test.shape))
assert len(df) == 4757, "Shape of DF has to be 4757"
assert len(df_test) == 1001, "Shape of DF test has to be 1001"
df_bitcoin = preprocess_bitcoin(df_bitcoin)
return df_bitcoin, df, df_test
def get_external_data():
df_gemin_btc_usd = pd.read_csv('data/external/Gemini_BTCUSD_d.csv')
df_gemin_eth_usd = pd.read_csv('data/external/Gemini_ETHUSD_d.csv')
df_gemin_ltc_usd = pd.read_csv('data/external/Gemini_LTCUSD_d.csv')
df_icobench = pd.read_csv('data/external/ico_bench_ended.csv')
return df_gemin_btc_usd, df_gemin_eth_usd, df_gemin_ltc_usd, df_icobench
def _save_processed_data(df_bitcoin, df, df_test, df_gem_btc_usd, df_gem_eth_usd, df_gem_ltc_usd, df_icobench):
df_bitcoin.to_csv('data/processed/df_bitcoin_pp.csv', index=None)
df.to_csv('data/processed/df_train_pp.csv', index=None)
df_test.to_csv('data/processed/df_test_pp.csv', index=None)
df_gem_btc_usd.to_csv('data/processed/df_gem_btc_usd.csv', index=None)
df_gem_eth_usd.to_csv('data/processed/df_gem_eth_usd.csv', index=None)
df_gem_ltc_usd.to_csv('data/processed/df_gem_ltc_usd.csv', index=None)
df_icobench.to_csv('data/processed/df_icobench.csv', index=None)
def preprocess_external_data(df_btc, df_eth, df_ltc):
def preprocess_times(df):
df['Date'] = pd.to_datetime(df.Date)
df = df.loc[df.Date.dt.year == 2019]
df = df.assign(calendar_week=df.Date.dt.week)
return df
df_btc_pp = preprocess_times(df_btc)
df_eth_pp = preprocess_times(df_eth)
df_ltc_pp = preprocess_times(df_ltc)
return df_btc_pp, df_eth_pp, df_ltc_pp
@click.command()
def main():
df_bitcoin, df, df_test = get_processed_data()
df_gemin_btc_usd, df_gemin_eth_usd, df_gemin_ltc_usd, df_icobench = get_external_data()
df_btc_pp, df_eth_pp, df_ltc_pp = preprocess_external_data(
df_gemin_btc_usd, df_gemin_eth_usd, df_gemin_ltc_usd)
_save_processed_data(df_bitcoin, df, df_test,
df_btc_pp, df_eth_pp, df_ltc_pp, df_icobench)
if __name__ == "__main__":
main()
|
[
"logging.basicConfig",
"pandas.read_csv",
"click.command",
"pandas.to_datetime",
"datetime.datetime.fromtimestamp",
"pandas.concat",
"logging.getLogger"
] |
[((144, 171), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (161, 171), False, 'import logging\n'), ((6571, 6586), 'click.command', 'click.command', ([], {}), '()\n', (6584, 6586), False, 'import click\n'), ((1648, 1714), 'pandas.read_csv', 'pd.read_csv', (['path_bitcoin_df'], {'encoding': '"""ISO-8859-1"""', 'delimiter': '""";"""'}), "(path_bitcoin_df, encoding='ISO-8859-1', delimiter=';')\n", (1659, 1714), True, 'import pandas as pd\n'), ((1733, 1785), 'pandas.read_csv', 'pd.read_csv', (['path_training_df'], {'encoding': '"""ISO-8859-1"""'}), "(path_training_df, encoding='ISO-8859-1')\n", (1744, 1785), True, 'import pandas as pd\n'), ((1801, 1849), 'pandas.read_csv', 'pd.read_csv', (['path_test_df'], {'encoding': '"""ISO-8859-1"""'}), "(path_test_df, encoding='ISO-8859-1')\n", (1812, 1849), True, 'import pandas as pd\n'), ((4346, 4401), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': 'log_fmt'}), '(level=logging.INFO, format=log_fmt)\n', (4365, 4401), False, 'import logging\n'), ((4544, 4568), 'pandas.concat', 'pd.concat', (['[df, df_test]'], {}), '([df, df_test])\n', (4553, 4568), True, 'import pandas as pd\n'), ((5213, 5261), 'pandas.read_csv', 'pd.read_csv', (['"""data/external/Gemini_BTCUSD_d.csv"""'], {}), "('data/external/Gemini_BTCUSD_d.csv')\n", (5224, 5261), True, 'import pandas as pd\n'), ((5285, 5333), 'pandas.read_csv', 'pd.read_csv', (['"""data/external/Gemini_ETHUSD_d.csv"""'], {}), "('data/external/Gemini_ETHUSD_d.csv')\n", (5296, 5333), True, 'import pandas as pd\n'), ((5357, 5405), 'pandas.read_csv', 'pd.read_csv', (['"""data/external/Gemini_LTCUSD_d.csv"""'], {}), "('data/external/Gemini_LTCUSD_d.csv')\n", (5368, 5405), True, 'import pandas as pd\n'), ((5424, 5472), 'pandas.read_csv', 'pd.read_csv', (['"""data/external/ico_bench_ended.csv"""'], {}), "('data/external/ico_bench_ended.csv')\n", (5435, 5472), True, 'import pandas as pd\n'), ((6259, 6282), 'pandas.to_datetime', 'pd.to_datetime', (['df.Date'], {}), '(df.Date)\n', (6273, 6282), True, 'import pandas as pd\n'), ((3532, 3566), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['(x / 1000.0)'], {}), '(x / 1000.0)\n', (3554, 3566), False, 'from datetime import datetime\n')]
|
import multiprocessing as mp
from threading import Lock, RLock
from pybot.externals.viewer.websocket_server import WebsocketServer
from pybot.externals import marshalling_backend
from pybot.externals import unpack, pack
class _ThreadHandler(object):
def __init__(self):
self.lock_ = Lock()
self.ev_th_ = None
def setup(self, server):
self.ev_th_ = mp.Process(target=self.run, args=(server,))
self.ev_th_.start()
with self.lock_:
self.server_ = server
def stop(self):
try:
self.ev_th_.join()
except Exception as e:
print('Exiting')
def on_event(self, server, msg):
try:
ch, data = unpack(msg)
print('on_event: ch={}, len={}'.format(ch, len(data)))
server.send_message_to_all(msg)
except Exception as e:
print('Failed to send, client unavailable {}'.format(e))
# Called for every client connecting (after handshake)
def new_client(self, client, server):
self.setup(server)
with self.lock_:
print("New client connected and was given id %d" % client['id'])
# self.server_.send_message_to_all("Hey all, a new client has joined us")
# Called for every client disconnecting
def client_left(self, client, server):
self.setup(server)
with self.lock_:
print("Client(%d) disconnected" % client['id'])
# Called when a client sends a message
def message_received(self, client, server, message):
if len(message) > 200:
message = message[:200]+'..'
print("Client(%d) said: %s" % (client['id'], message))
def run(self, server):
# Setup
if marshalling_backend() == 'lcm':
import lcm
self.m_ = lcm.LCM()
self.sub_ = self.m_.subscribe('.*_COLLECTION.*', self.on_event)
def handle():
# Handler
try:
while True:
self.lc_.handle()
except KeyboardInterrupt:
pass
def cleanup():
pass
elif marshalling_backend() == 'zmq':
import zmq
zmq_server = '127.0.0.1'
zmq_port = 4999
self.m_ = zmq.Context()
self.sub_ = self.m_.socket(zmq.SUB)
self.sub_.connect('tcp://{}:{}'
.format(zmq_server, zmq_port))
self.sub_.setsockopt(zmq.SUBSCRIBE, b'')
print('Starting zmq listener on port {}:{}'
.format(zmq_server, zmq_port))
def handle():
# Handler
try:
while True:
msg = self.sub_.recv()
self.on_event(server, msg)
except KeyboardInterrupt:
pass
def cleanup():
self.sub_.close()
self.m_.term()
# Handle
handle()
PORT=9001
th = _ThreadHandler()
print('Starting server on port {}'.format(PORT))
server = WebsocketServer(PORT)
server.set_fn_new_client(th.new_client)
server.set_fn_client_left(th.client_left)
server.set_fn_message_received(th.message_received)
server.run_forever()
th.stop()
|
[
"lcm.LCM",
"pybot.externals.marshalling_backend",
"threading.Lock",
"pybot.externals.viewer.websocket_server.WebsocketServer",
"multiprocessing.Process",
"pybot.externals.unpack",
"zmq.Context"
] |
[((3291, 3312), 'pybot.externals.viewer.websocket_server.WebsocketServer', 'WebsocketServer', (['PORT'], {}), '(PORT)\n', (3306, 3312), False, 'from pybot.externals.viewer.websocket_server import WebsocketServer\n'), ((298, 304), 'threading.Lock', 'Lock', ([], {}), '()\n', (302, 304), False, 'from threading import Lock, RLock\n'), ((392, 435), 'multiprocessing.Process', 'mp.Process', ([], {'target': 'self.run', 'args': '(server,)'}), '(target=self.run, args=(server,))\n', (402, 435), True, 'import multiprocessing as mp\n'), ((747, 758), 'pybot.externals.unpack', 'unpack', (['msg'], {}), '(msg)\n', (753, 758), False, 'from pybot.externals import unpack, pack\n'), ((1807, 1828), 'pybot.externals.marshalling_backend', 'marshalling_backend', ([], {}), '()\n', (1826, 1828), False, 'from pybot.externals import marshalling_backend\n'), ((1884, 1893), 'lcm.LCM', 'lcm.LCM', ([], {}), '()\n', (1891, 1893), False, 'import lcm\n'), ((2289, 2310), 'pybot.externals.marshalling_backend', 'marshalling_backend', ([], {}), '()\n', (2308, 2310), False, 'from pybot.externals import marshalling_backend\n'), ((2432, 2445), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (2443, 2445), False, 'import zmq\n')]
|
# Generated by Django 3.1.1 on 2020-09-27 06:03
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0006_userprofile_instagram_link'),
]
operations = [
migrations.RenameField(
model_name='userprofile',
old_name='firstname',
new_name='name',
),
migrations.RemoveField(
model_name='userprofile',
name='lastname',
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.migrations.RenameField"
] |
[((236, 327), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""userprofile"""', 'old_name': '"""firstname"""', 'new_name': '"""name"""'}), "(model_name='userprofile', old_name='firstname',\n new_name='name')\n", (258, 327), False, 'from django.db import migrations\n'), ((380, 445), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""userprofile"""', 'name': '"""lastname"""'}), "(model_name='userprofile', name='lastname')\n", (402, 445), False, 'from django.db import migrations\n')]
|
#test write to Arduino
import serial
ser = serial.Serial('/dev/ttyACM2', 9600)
int_encode = b'2'
float_encode = b'42.3'
string1 = "Hello!"
string1_encode = string1.encode()
int1 = 1
int1_encode = b'%d' %int1 # %d is used for integer data types. float = %f
#ser.write(b'3')
#ser.write(b'5')
#ser.write(b'7')
ser.write(int1_encode)
|
[
"serial.Serial"
] |
[((43, 78), 'serial.Serial', 'serial.Serial', (['"""/dev/ttyACM2"""', '(9600)'], {}), "('/dev/ttyACM2', 9600)\n", (56, 78), False, 'import serial\n')]
|
from tweets.models import Comment
from django.db import router
# from posts.views import my_view
from rest_framework import routers
from django.urls.conf import include
from django.urls import path
from tweets.views import TweetViewSet, LikeViewSet, RetweetviewSet, CommentviewSet, index
router = routers.DefaultRouter()
router.register(r'tweets', TweetViewSet)
router.register(r'likes', LikeViewSet)
router.register(r'retweet',RetweetviewSet)
# router.register(r'trends',TrendsviewSet)
router.register(r"comment",CommentviewSet)
urlpatterns = [
path('index/', index),
path("", include(router.urls))
]
|
[
"django.urls.path",
"django.db.router.register",
"rest_framework.routers.DefaultRouter",
"django.urls.conf.include"
] |
[((299, 322), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (320, 322), False, 'from rest_framework import routers\n'), ((323, 362), 'django.db.router.register', 'router.register', (['"""tweets"""', 'TweetViewSet'], {}), "('tweets', TweetViewSet)\n", (338, 362), False, 'from django.db import router\n'), ((364, 401), 'django.db.router.register', 'router.register', (['"""likes"""', 'LikeViewSet'], {}), "('likes', LikeViewSet)\n", (379, 401), False, 'from django.db import router\n'), ((403, 445), 'django.db.router.register', 'router.register', (['"""retweet"""', 'RetweetviewSet'], {}), "('retweet', RetweetviewSet)\n", (418, 445), False, 'from django.db import router\n'), ((489, 531), 'django.db.router.register', 'router.register', (['"""comment"""', 'CommentviewSet'], {}), "('comment', CommentviewSet)\n", (504, 531), False, 'from django.db import router\n'), ((554, 575), 'django.urls.path', 'path', (['"""index/"""', 'index'], {}), "('index/', index)\n", (558, 575), False, 'from django.urls import path\n'), ((590, 610), 'django.urls.conf.include', 'include', (['router.urls'], {}), '(router.urls)\n', (597, 610), False, 'from django.urls.conf import include\n')]
|
from enum import Enum, auto
class AutoName(Enum):
def _generate_next_value_(name, start, count, last_values):
return name.lower()
class CommandType(AutoName):
Unknown = auto()
Help = auto()
Status = auto()
Restart = auto()
Map = auto()
Bots = auto()
Playlist = auto()
Gamemode = auto()
SCP = auto()
TimeLimit = auto()
AutoRestart = auto()
Reset = auto()
Register = auto()
Primary = auto()
List = auto()
def getMessageType(message: str):
if(message.lower().startswith('help')):
return CommandType.Help
if(message.lower().startswith('status')):
return CommandType.Status
if(message.lower().startswith('restart')):
return CommandType.Restart
if(message.lower().startswith('map')):
return CommandType.Map
if(message.lower().startswith('bots')):
return CommandType.Bots
if(message.lower().startswith('playlist')):
return CommandType.Playlist
if(message.lower().startswith('gamemode')):
return CommandType.Gamemode
if(message.lower().startswith('scp')):
return CommandType.SCP
if(message.lower().startswith('timelimit')):
return CommandType.TimeLimit
if(message.lower().startswith('autorestart')):
return CommandType.AutoRestart
if(message.lower().startswith('reset')):
return CommandType.Reset
if(message.lower().startswith('register')):
return CommandType.Register
if(message.lower().startswith('primary')):
return CommandType.Primary
if(message.lower().startswith('list')):
return CommandType.List
|
[
"enum.auto"
] |
[((187, 193), 'enum.auto', 'auto', ([], {}), '()\n', (191, 193), False, 'from enum import Enum, auto\n'), ((205, 211), 'enum.auto', 'auto', ([], {}), '()\n', (209, 211), False, 'from enum import Enum, auto\n'), ((225, 231), 'enum.auto', 'auto', ([], {}), '()\n', (229, 231), False, 'from enum import Enum, auto\n'), ((246, 252), 'enum.auto', 'auto', ([], {}), '()\n', (250, 252), False, 'from enum import Enum, auto\n'), ((263, 269), 'enum.auto', 'auto', ([], {}), '()\n', (267, 269), False, 'from enum import Enum, auto\n'), ((281, 287), 'enum.auto', 'auto', ([], {}), '()\n', (285, 287), False, 'from enum import Enum, auto\n'), ((303, 309), 'enum.auto', 'auto', ([], {}), '()\n', (307, 309), False, 'from enum import Enum, auto\n'), ((325, 331), 'enum.auto', 'auto', ([], {}), '()\n', (329, 331), False, 'from enum import Enum, auto\n'), ((342, 348), 'enum.auto', 'auto', ([], {}), '()\n', (346, 348), False, 'from enum import Enum, auto\n'), ((365, 371), 'enum.auto', 'auto', ([], {}), '()\n', (369, 371), False, 'from enum import Enum, auto\n'), ((390, 396), 'enum.auto', 'auto', ([], {}), '()\n', (394, 396), False, 'from enum import Enum, auto\n'), ((409, 415), 'enum.auto', 'auto', ([], {}), '()\n', (413, 415), False, 'from enum import Enum, auto\n'), ((431, 437), 'enum.auto', 'auto', ([], {}), '()\n', (435, 437), False, 'from enum import Enum, auto\n'), ((452, 458), 'enum.auto', 'auto', ([], {}), '()\n', (456, 458), False, 'from enum import Enum, auto\n'), ((470, 476), 'enum.auto', 'auto', ([], {}), '()\n', (474, 476), False, 'from enum import Enum, auto\n')]
|
"""Runs all Jupyter notebooks in given folders. Folder names (one or multiple)
can be passed as arguments to the script and can be provided relative to the
folder which contains all notebooks (e.g. "notebooks").
Notebooks are run with their enclosing folder as working directory.
Example
-------
If you want to run all Jupyter notebooks in "notebooks/0_prepare_data", run:
$ python run_ipynb.py 0_prepare_data
"""
import sys
from pathlib import Path
import nbformat
from nbconvert.preprocessors import CellExecutionError, ExecutePreprocessor
def validate_input(input_args):
"""Extracts folder names from passed arguments and makes sure that
they are valid.
Parameters
----------
input_args : iterable
All input arguments with the first one being the script name
(i.e. sys.argv)
Returns
-------
list
All valid folder names
"""
# Stops execution of script if no folders are given
if len(input_args) == 1:
raise Exception('You need to specify either one or multiple folders ' +
f'inside "./{str(notebooks_path)}" to run')
# If folders are given, first check if they exist, else stop
folders = [Path(f) for f in input_args[1:]]
for f in folders:
if not (notebooks_path / f).is_dir():
raise Exception(
f'Folder "{str(notebooks_path / f)}" does not exist.')
return folders
def run_notebook(nb_wd, nb_path):
"""Runs a given notebook and saves it
Executes the passed notebook with nb_wd as the working directory.
If an error occurs during the execution, a message is raised to the user
and the notebook is saved anyway, including the traceback.
Parameters
----------
nb_wd : Path or str
Path to the folder which should be used as a working directory for
the execution of the notebook
nb_path : Path or str
Full path to the notebook which should be run.
Returns
-------
Nothing
"""
if not isinstance(nb_wd, Path):
nb_wd = Path(nb_wd)
if not isinstance(nb_path, Path):
nb_path = Path(nb_path)
with nb_path.open() as f:
nb = nbformat.read(f, as_version=nbformat.NO_CONVERT)
# Configure notebook execution mode
# Timeout = None means no restriction on runtime of cells
ep = ExecutePreprocessor(timeout=None, kernel_name='python3')
# The code for the following error handling is taken from the
# official nbconvert documentation:
# https://nbconvert.readthedocs.io/en/latest/execute_api.html
try:
# Run notebook
out = ep.preprocess(nb, {'metadata': {'path': str(nb_wd)}})
except CellExecutionError:
out = None
msg = f'Error executing the notebook "{str(nb_path)}".\n\n'
msg += 'See the notebook for the traceback.\n'
print(msg)
raise
finally:
# Save it. Includes tracebacks should an error have occured.
with nb_path.open('wt') as f:
nbformat.write(nb, f)
return
if __name__ == '__main__':
# Set path to root directory of notebooks
notebooks_path = Path('notebooks')
# Validate input and get folder names
folders = validate_input(sys.argv)
# Get sorted list of all notebooks to run
print('-' * 20)
print('The following notebooks will be executed in order:')
print('-' * 20)
notebooks = []
for f in folders:
nb_found = sorted([
x for x in (notebooks_path / f).iterdir() if x.suffix == '.ipynb'
])
print('\n'.join(str(x) for x in nb_found))
notebooks.append([f, nb_found])
print('-' * 20)
# Run notebooks
for nb_wd, nb_paths in notebooks:
for nb_p in nb_paths:
print(f'Run {str(nb_p)}')
run_notebook(notebooks_path / nb_wd, nb_p)
print('-' * 20)
|
[
"nbconvert.preprocessors.ExecutePreprocessor",
"pathlib.Path",
"nbformat.write",
"nbformat.read"
] |
[((2348, 2404), 'nbconvert.preprocessors.ExecutePreprocessor', 'ExecutePreprocessor', ([], {'timeout': 'None', 'kernel_name': '"""python3"""'}), "(timeout=None, kernel_name='python3')\n", (2367, 2404), False, 'from nbconvert.preprocessors import CellExecutionError, ExecutePreprocessor\n'), ((3145, 3162), 'pathlib.Path', 'Path', (['"""notebooks"""'], {}), "('notebooks')\n", (3149, 3162), False, 'from pathlib import Path\n'), ((1206, 1213), 'pathlib.Path', 'Path', (['f'], {}), '(f)\n', (1210, 1213), False, 'from pathlib import Path\n'), ((2062, 2073), 'pathlib.Path', 'Path', (['nb_wd'], {}), '(nb_wd)\n', (2066, 2073), False, 'from pathlib import Path\n'), ((2130, 2143), 'pathlib.Path', 'Path', (['nb_path'], {}), '(nb_path)\n', (2134, 2143), False, 'from pathlib import Path\n'), ((2187, 2235), 'nbformat.read', 'nbformat.read', (['f'], {'as_version': 'nbformat.NO_CONVERT'}), '(f, as_version=nbformat.NO_CONVERT)\n', (2200, 2235), False, 'import nbformat\n'), ((3016, 3037), 'nbformat.write', 'nbformat.write', (['nb', 'f'], {}), '(nb, f)\n', (3030, 3037), False, 'import nbformat\n')]
|
from genfigs.genfigs import *
# from ofspy.task import Task
# from ofspy.path import Path
import networkx as nx
import random
from collections import Counter
from scipy.optimize import minimize
# from matplotlib import pylab as plt
# import math
import numpy as np
import matplotlib.pyplot as plt
from gurobipy import Model, LinExpr, GRB, GurobiError
from itertools import product
def pickTask(task, time):
element = task.element
task.lastelement = element
element.size += task.size
task.init = time
task.expiration = time + 5
def transTask(task, link, cost, epsilon):
# link.source.size -= task.size
# link.destin.size += task.size
task.lastelement = link.destin
task.element.owner.cash -= cost
link.owner.cash += cost - epsilon
def resolveTask(task, value):
task.element.owner.cash += value
task.element.size -= task.size
class Federate():
def __init__(self, name, cash, linkcost):
self.name = name
self.cash = cash
self.linkcost = linkcost
class Link():
def __init__(self, source, destin, capacity, size, owner):
self.source = source
self.destin = destin
self.capacity = capacity
self.size = size
self.owner = owner
class Task():
def __init__(self, id, element, lastelement, size, value, expiration, init, active, penalty):
self.id = id
self.element = element
self.lastelement = lastelement
self.size = size
self.expiration =expiration
self.init = init
self.active = active
self.penalty = penalty
self.maxvalue = value
def getValue(self, time):
"""
Gets the current value of this contract.
@return: L{float}
"""
# print time, self.initTime
duration = self.expiration - self.init + 1
self.elapsedTime = time - self.init
value = self.maxvalue if self.elapsedTime <= duration else self.penalty if self.elapsedTime > self.expiration \
else self.maxvalue * (1. - self.elapsedTime) / (2. * self.expiration)
return value
class Element():
def __init__(self, name, capacity, size, owner):
self.name = name
self.capacity = capacity
self.size = size
self.owner = owner
def costfunction(f, l):
f2 = l.destin.owner
if f.name == f2.name:
return 0
else:
return f2.linkcost
def optimizeMILP(elements, linklist, destinations, tasklist, time, federates):
global storagepenalty, linkcost, epsilon, value, penalty, linkcapacity, elementcapacity
print(time, [(task.id, task.element.name) for task in tasklist], [task.init for task in tasklist])
print([(l.source.name, l.destin.name) for l in linklist])
lp = Model('LP')
steps = 10
timesteps = range(time, time + steps)
trans = [] # trans[t][i][l] transfer task i from link l at time t
store = [] # store[i][j] store task i
pick = [] # pick[i] if source i picks up the task
resolve = []
J = LinExpr()
for i, task in enumerate(tasklist):
store.insert(i, lp.addVar(vtype=GRB.BINARY))
J.add(store[i], -1* storagepenalty)
r = LinExpr()
r.add(store[i], 1)
lp.addConstr(r <= 1)
# lp.addConstr(r == 0)
for i, task in enumerate(tasklist):
pick.append(lp.addVar(vtype=GRB.BINARY))
J.add(pick[i], -1)
element = task.element
r = LinExpr()
r.add(pick[i], 1)
if task.init < time:
lp.addConstr(r == 1)
else:
lp.addConstr(r <= 1)
for i, t in enumerate(timesteps):
trans.insert(i, [])
resolve.insert(i, [])
for k, task in enumerate(tasklist):
# print(task.element.name, task.lastelement.name)
trans[i].insert(k, [])
resolve[i].insert(k, [])
for j, e in enumerate(elements):
resolve[i][k].insert(j, lp.addVar(vtype=GRB.BINARY))
if e.name in destinations:
J.add(resolve[i][k][j], value)
else:
J.add(resolve[i][k][j], penalty)
if i == 0 and (task.expiration <= time):
r = LinExpr()
element = task.element
j, e = next(((a, b) for a, b in enumerate(elements) if b.name == element.name))
r.add(resolve[i][k][j], 1)
lp.addConstr(r == 1)
for l, link in enumerate(linklist):
trans[i][k].insert(l, lp.addVar(vtype=GRB.BINARY))
J.add(trans[i][k][l], -1*epsilon)
r = LinExpr()
r.add(trans[i][k][l], 1)
lp.addConstr(r <= (1 if (task.size <= (link.capacity - link.size)
and link.source.name not in destinations) else 0))
r.add(pick[k], -1)
lp.addConstr(r <= 0)
r = LinExpr()
r.add(sum(trans[i][k]))
lp.addConstr(r <= 1)
d = link.destin
j, e = next(((a, b) for a, b in enumerate(elements) if b.name == d.name))
# print(link.source.name, link.destin.name, d.name, j, e.name)
r = LinExpr()
r.add(resolve[i][k][j], 1)
lp.addConstr(r <= (1 if (d.name in destinations) else 0))
for i, t in enumerate(timesteps):
for k, task in enumerate(tasklist):
for j, element in enumerate(elements):
inlinks = [(l, li) for l, li in enumerate(linklist) if li.destin.name == element.name]
outlinks = [(l, li) for l, li in enumerate(linklist) if li.source.name == element.name]
# print(i, k, element.name, [e[0] for e in inlinks], [e[0] for e in outlinks])
if i == 0 and element.name == task.element.name:
# print("SOURCE:", i, element.name, [e[0] for e in inlinks], [e[0] for e in outlinks])
r = LinExpr()
for l, li in outlinks:
r.add(trans[i][k][l], -1)
r.add(resolve[i][k][j], -1)
r.add(store[k], -1)
r.add(pick[k], 1)
lp.addConstr(r == 0)
elif element.name in destinations:
r = LinExpr()
# r2 = LinExpr()
for l, li in inlinks:
r.add(trans[i][k][l], 1)
r.add(resolve[i][k][j], -1)
lp.addConstr(r == 0)
else:
r = LinExpr()
# r2 = LinExpr()
for l, li in inlinks:
r.add(trans[i][k][l], 1)
r.add(resolve[i][k][j], -1)
if i< len(timesteps) - 1:
for l, li in outlinks:
r.add(trans[i+1][k][l], -1)
lp.addConstr(r == 0)
#
for k, task in enumerate(tasklist):
r = LinExpr()
r.add(pick[k], -1)
r.add(store[k], 1)
for j, element in enumerate(elements):
for i, t in enumerate(timesteps):
r.add(resolve[i][k][j], 1)
lp.addConstr(r == 0)
for l, li in enumerate(linklist):
r = LinExpr()
for k in range(len(tasklist)):
for i in range(len(timesteps)):
r.add(trans[i][k][l])
lp.addConstr(r <= linkcapacity)
for j, e in enumerate(elements):
r = LinExpr()
for k, task in enumerate([t for t in tasklist if e.name == task.element.name]):
r.add(pick[k], 1)
for i in range(len(timesteps)):
for v in range(len(elements)):
r.add(resolve[i][k][v], -1)
lp.addConstr(r <= elementcapacity)
# for i in range(len(timesteps)):
# rl = [LinExpr() for e in elements]
# for k, task in enumerate(tasklist):
# element = task.element
# j, e = next(((a, b) for a, b in enumerate(elements) if b.name == element.name))
# rl[j].add(store[k], 1)
# rl[j].add(resolve[0][k][j], -1)
#
# for r in rl:
# lp.addConstr(r <= elementcapacity)
for k, task in enumerate(tasklist):
r = LinExpr()
fedtask = task.element.owner
for i in range(len(timesteps)):
for l, li in enumerate(linklist):
r.add(trans[i][k][l], -1*(costfunction(fedtask, li)+epsilon))
r.add(task.getValue(time), 1)
lp.addConstr(r >= 0)
lp.setObjective(J, GRB.MAXIMIZE)
lp.setParam('OutputFlag', False)
lp.optimize()
# print("pick:", pick)
# print("store:", store)
# print("trans:", trans)
# print("resolve:", resolve)
# print("sum of trans:", [sum([sum([e.x for e in a]) for a in l]) for l in trans])
for i, task in enumerate(newtasks):
if pick[i].x>0.5:
pickTask(task, time)
edges = []
for i, t in enumerate(timesteps):
for k, task in enumerate(tasklist):
for l, link in enumerate(linklist):
if trans[i][k][l].x>0.5:
# print('trans is 1')
edges.append((link.source.name, link.destin.name))
print(i, task.id, task.element.name, (link.source.name,link.destin.name))
if task.element.owner == link.owner:
transTask(task, link, 0, epsilon)
else:
transTask(task, link, linkcost, epsilon)
for j, e in enumerate(elements):
if resolve[i][k][j].x>0.5:
print('time ', i, ' resolved task:', task.id, ' element ', j)
# if task.expiration <= time:
# resolveTask(task, task.penalty)
# else:
# resolveTask(task, task.value)
resolveTask(task, task.getValue(time))
for k, task in enumerate(tasklist):
net = 0
fedtask = task.element.owner
for i in range(len(timesteps)):
for l, li in enumerate(linklist):
net -= trans[i][k][l].x * (costfunction(fedtask, li) + epsilon)
net += task.getValue(time)
# print("task ", task.id, " net value ", net, " is stored:", store[k].x)
storedtasks = []
for k, task in enumerate(tasklist):
# print([resolve[i][k][j].x for i, j in product(range(len(timesteps)), range(len(elements)))])
if (pick[k].x and store[k].x) and not any([resolve[i][k][j].x for i, j in product(range(len(timesteps)), range(len(elements)))]):
storedtasks.append(task)
return storedtasks, edges
# def drawSampleNetwork():
# global all_edges, satellites, stations, federate_cost_dict, taskids
# plt.figure()
# loc_dict = {e: loc for e, loc in zip(satellites + stations, [(-0.2-1, 2), (0.7-1,2), (1.5-0.8,2), (0.3-0.2,1), (1.1, 1),(0.5, 0), (1.5, 0)])}
# sat_locs = [loc_dict[e] for e in satellites]
# sta_locs = [loc_dict[e] for e in stations]
#
# loc_element_dict = {loc: i+1 for i, loc in enumerate(sat_locs + sta_locs)}
#
# all_edges_locs = [(loc_dict[e[0]], loc_dict[e[1]]) for e in all_edges]
#
# for edge in all_edges_locs[:]:
# if edge[1] not in sta_locs:
# all_edges_locs.append((edge[1], edge[0]))
# # textloc = zip(satellites[:3], ['$F_1, T_1, S1$', '$F_2, T_2, S2$', '$F_1, T_3, S3$']) +
# textloc = [((sat_locs[0][0], sat_locs[0][1] + 0.2), '$F_1, e_1$'), ((sat_locs[1][0], sat_locs[1][1] + 0.2), '$F_2, e_2$'),
# ((sat_locs[2][0], sat_locs[2][1] + 0.2), '$F_1, e_3$'), ((sta_locs[0][0], sta_locs[0][1] - 0.2), '$F1, e_6 (G)$'),
# ((sta_locs[1][0], sta_locs[1][1] - 0.2), '$F2, e_7 (G)$') ,((sat_locs[3][0] - 0.2, sat_locs[3][1] - 0.1), '$F_2, e_4$'), ((sat_locs[4][0] + 0.2, sat_locs[4][1] - 0.1), '$F_1, e_5$')]
#
# element_federate_dict = {s: v for s,v in zip(sat_locs+sta_locs, [1, 2, 1, 2, 1, 1, 2])}
#
# plt.scatter(*zip(*sat_locs), marker='H', color='k', s=300, facecolors='w', linewidth='2')
# plt.scatter(*zip(*sta_locs), marker='H', color='k', s=400, facecolors='w', linewidth='2')
#
# edge_federate_dict = []
# all_arrows = []
# for edge in all_edges_locs:
# # plt.plot(*zip(*edge), 'k:', linewidth = 0.7)
# # if
# e1e2 = (loc_element_dict[edge[0]], loc_element_dict[edge[1]])
# legend = r'$l_{%d%d}$'%(e1e2[0], e1e2[1])
# # print(label)
# arr1 = plt.arrow(edge[0][0], edge[0][1], 0.9* (edge[1][0] - edge[0][0]), 0.9 * (edge[1][1] - edge[0][1]),
# head_width=0.03, head_length=0.05, linewidth=0.7, fc='k', ec='k', zorder=-1, linestyle = ':')
# # arr2 plt.arrow(edge[1][0], edge[1][1], 0.9* (edge[0][0] - edge[1][0]), 0.9 * (edge[0][1] - edge[1][1]),
# # head_width=0.03, head_length=0.05, linewidth=0.7, fc='k', ec='k', zorder=-1, linestyle = ':')
# x = (edge[0][0] + edge[1][0])/2.
# y = (edge[0][1] + edge[1][1])/2.
# nom , denom = ((edge[1][1] - edge[0][1]), (edge[1][0] - edge[0][0]))
# r = 180/math.pi * np.arctan((edge[1][1] - edge[0][1])/(edge[1][0] - edge[0][0])) if (edge[1][0] - edge[0][0]) != 0 else 'vertical'
# # print(edge, r)
# all_arrows.append((arr1, legend))
# if (nom>=0 and denom>0) or (nom<0 and denom>0):
# x += 0.05
# y += 0.05
# elif (nom==0 and denom<0):
# x -= 0.05
# y -= 0.05
# else:
# x -= 0.05
# y -= 0.05
#
# plt.text(x, y, ha="center", va="center", s=legend, bbox=dict(fc="none", ec="none", lw=2), rotation = r)
#
# for xy, text in textloc:
# plt.text(*xy, ha="center", va="center", s=text, bbox=dict(fc="none", ec="none", lw=2))
#
# plt.text(-0.3, 0.2, ha="left", va="center", s=r'$\zeta_{12}=%d$'%federate_cost_dict['F2'], bbox=dict(fc="none", ec="none", lw=2), rotation = 0)
# plt.text(-0.3, 0.1, ha="left", va="center", s=r'$\zeta_{21}=%d$'%federate_cost_dict['F1'], bbox=dict(fc="none", ec="none", lw=2), rotation = 0)
# plt.text(-0.3, 0.0, ha="left", va="center", s=r'$\zeta_{11}= 0$', bbox=dict(fc="none", ec="none", lw=2), rotation = 0)
# plt.text(-0.3, -0.1, ha="left", va="center", s=r'$\zeta_{22}= 0$', bbox=dict(fc="none", ec="none", lw=2), rotation = 0)
#
# font = FontProperties()
# font.set_style('italic')
# font.set_weight('bold')
# font.set_size('small')
#
# for i, (x, y) in enumerate([sat_locs[t-1] for t in taskids]):
# plt.text(x, y, ha="center", va="center", s='$T_%s$'%str(i+1), bbox=dict(fc="none", ec="none", lw=2), fontproperties=font)
# plt.xticks([])
# plt.yticks([])
# plt.axis('off')
# plt.savefig('sample_network.pdf', bbox_inches='tight')
def plotDirectedNetworkx(elements, edge1, edge2, destinations = [], sources = [], selectedsources = None):
global element_federate_dict
G = nx.DiGraph()
G.add_nodes_from(elements)
G.add_edges_from(edge1)
val_map = {'A': 1.0,
'D': 0.5714285714285714,
'H': 0.0}
# othernodes = [val_map.get(node, 0.25) for node in G.nodes()]
federates = []
for f in set(element_federate_dict.values()):
federates.append([e for e in elements if element_federate_dict[e] == f])
othernodes = [e for e in elements if (e not in destinations and e not in sources)]
destinationvalues = ['k' for node in destinations]
sourcevalues = ['g' for node in sources]
othervalues = ['r' for node in othernodes]
# Specify the edges you want here
red_edges = edge2
edge_colours = ['black' if not edge in red_edges else 'red'
for edge in G.edges()]
black_edges = [edge for edge in G.edges() if edge not in red_edges]
# Need to create a layout when doing
# separate calls to draw nodes and edges
shapes = ['H', 's', 'o']
colors = ['lightgreen', 'gold']
node_shape_dict = {}
node_color_dict = {}
for e in elements:
if e in federates[0]:
node_color_dict[e] = colors[0]
else:
node_color_dict[e] = colors[1]
for e in elements:
if e in sources:
node_shape_dict[e] = shapes[0]
if e in destinations:
node_shape_dict[e] = shapes[1]
else:
node_shape_dict[e] = shapes[2]
pos = nx.circular_layout(G)
for e in elements:
node = nx.draw_networkx_nodes(G, pos, nodelist=[e], cmap=plt.get_cmap('jet'),
node_color=node_color_dict[e], node_size=800, node_shape=node_shape_dict[e], linewidths = 2)
if e in sources:
node.set_edgecolor('k')
# for shape, fed in zip(shapes, federates):
# nx.draw_networkx_nodes(G, pos, nodelist=[e for e in destinations if e in fed], cmap=plt.get_cmap('jet'),
# node_color=node_color_dict[e], node_size=800, node_shape=shape)
# nx.draw_networkx_nodes(G, pos, nodelist=[e for e in sources if e in fed], cmap=plt.get_cmap('jet'),
# node_color=node_color_dict[e], node_size=800,node_shape=shape)
# nx.draw_networkx_nodes(G, pos, nodelist=[e for e in othernodes if e in fed],cmap=plt.get_cmap('jet'),
# node_color=node_color_dict[e], node_size=800, node_shape=shape)
nx.draw_networkx_labels(G, pos)
nx.draw_networkx_edges(G, pos, edgelist=red_edges, edge_color='k', arrows=True, width = 2)
nx.draw_networkx_edges(G, pos, style='dotted' ,edgelist=black_edges, arrows=False)
plt.axis('off')
# plt.savefig('sample_%d_%d.jpg'%(len(sources), len(edge1)), bbox_inches='tight')
# plt.show()
if __name__ == '__main__':
time = 0
federatenames = ['F1', 'F2']
elementnames = ['e1', 'e2', 'e3', 'e4', 'e5', 'e6', 'e7', 'e8', 'e9', 'e10']
stations = elementnames[-2:]
satellites = [e for e in elementnames if e not in stations]
linkcapacity = 2
elementcapacity = 2
seed = 0
random.seed(seed)
element_federate_dict = {e: federatenames[0] if random.random()>0.5 else federatenames[1] for e in elementnames}
# element_federate_dict = {'e1':federatenames[0], 'e2':federatenames[1], 'e3':federatenames[0], 'e4':federatenames[1], 'e5': federatenames[0], 'e6':federatenames[0], 'e7':federatenames[1]}
# all_edges = [(satellites[0],satellites[1]), (satellites[3],stations[0]), (satellites[1],satellites[3]),
# (satellites[2],satellites[4]), (satellites[2],satellites[1]), (satellites[2],satellites[3]), (satellites[3],satellites[4]), (satellites[4],stations[1]), (satellites[2],stations[0])]
# all_possible_edges = [(a,b) for a, b in list(product(elementnames, elementnames)) if (a != b and element_federate_dict[a] != element_federate_dict[b])]
all_possible_edges = [(a,b) for a, b in list(product(elementnames, elementnames)) if (a != b and not (a in stations and b in stations))]
all_edges = random.sample(all_possible_edges, int(len(all_possible_edges)//8))
all_edge_set = set([])
destin_count = 0
for edge in all_edges:
s, d = edge
# if destin_count > len(satellites):
# continue
if s in stations or d in stations:
destin_count += linkcapacity
all_edge_set.add((s,d))
all_edge_set.add((d,s))
all_edges = list(all_edge_set)
id = 1
SP = 100
epsilon = 10
linkcost = 1001
storagepenalty = 100
value = 1000
penalty = -200
size = 1
for linkcost in [0, 400, 600, 1001]:
federatenames = [element_federate_dict[e] for e in elementnames]
federates = [Federate(name = f, cash = 0, linkcost = linkcost) for f in set(federatenames)]
federateDict = {f.name: f for f in federates}
elements = [Element(name = e, capacity=elementcapacity, size = 0, owner = federateDict[f]) for (e,f) in zip(elementnames, federatenames)]
elementDict = {e.name: e for e in elements}
sources = [e for e in elements if e.name not in stations]
print([s.name for s in sources])
linklist = [Link(source = elementDict[e1], destin = elementDict[e2], capacity = linkcapacity, size = 0, owner = elementDict[e2].owner) for (e1, e2) in all_edges]
# print('sources:', [s.name for s in sources])
# newtasks = [Task(id = n + id, element = s, lastelement = s, size = size, value = value, expiration=time + 3, init = time, active = True, penalty = penalty) for n, s in enumerate(sources)]
id += len(sources)
storedtasks = []
for time in range(1):
newtasks = [Task(id = id + n, element=s, lastelement=s, size=size, value=value, expiration=time + 5, init=time, active=True, penalty=penalty) for n, s in enumerate(sources)]
id += len(sources)
tasklist = storedtasks + newtasks
for link in linklist:
link.size = 0
for e in elements:
e.size = 0
storedtasks, edges2 = optimizeMILP(elements = elements, linklist = linklist, destinations = elementnames[-2:], tasklist = tasklist, time = time, federates = federates)
# print([(task.id, task.element.name) for task in storedtasks])
print([f.cash for f in federates])
plotDirectedNetworkx(elementnames, edge1=all_edges, edge2=edges2, destinations=elementnames[-2:], sources = [s.name for s in sources])
plt.savefig('sample_%d_%d_cash_%d_%d_cost_%d_seed_%d.jpg' % (len(sources), len(all_edges),federates[0].cash, federates[1].cash, linkcost, seed), bbox_inches='tight')
plt.close()
|
[
"networkx.draw_networkx_edges",
"matplotlib.pyplot.get_cmap",
"matplotlib.pyplot.close",
"matplotlib.pyplot.axis",
"gurobipy.Model",
"random.random",
"random.seed",
"networkx.draw_networkx_labels",
"networkx.circular_layout",
"itertools.product",
"networkx.DiGraph",
"gurobipy.LinExpr"
] |
[((2766, 2777), 'gurobipy.Model', 'Model', (['"""LP"""'], {}), "('LP')\n", (2771, 2777), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((3030, 3039), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (3037, 3039), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((15137, 15149), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (15147, 15149), True, 'import networkx as nx\n'), ((16580, 16601), 'networkx.circular_layout', 'nx.circular_layout', (['G'], {}), '(G)\n', (16598, 16601), True, 'import networkx as nx\n'), ((17575, 17606), 'networkx.draw_networkx_labels', 'nx.draw_networkx_labels', (['G', 'pos'], {}), '(G, pos)\n', (17598, 17606), True, 'import networkx as nx\n'), ((17611, 17704), 'networkx.draw_networkx_edges', 'nx.draw_networkx_edges', (['G', 'pos'], {'edgelist': 'red_edges', 'edge_color': '"""k"""', 'arrows': '(True)', 'width': '(2)'}), "(G, pos, edgelist=red_edges, edge_color='k', arrows=\n True, width=2)\n", (17633, 17704), True, 'import networkx as nx\n'), ((17706, 17793), 'networkx.draw_networkx_edges', 'nx.draw_networkx_edges', (['G', 'pos'], {'style': '"""dotted"""', 'edgelist': 'black_edges', 'arrows': '(False)'}), "(G, pos, style='dotted', edgelist=black_edges, arrows\n =False)\n", (17728, 17793), True, 'import networkx as nx\n'), ((17793, 17808), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (17801, 17808), True, 'import matplotlib.pyplot as plt\n'), ((18230, 18247), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (18241, 18247), False, 'import random\n'), ((3190, 3199), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (3197, 3199), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((3447, 3456), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (3454, 3456), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((7080, 7089), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (7087, 7089), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((7361, 7370), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (7368, 7370), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((7583, 7592), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (7590, 7592), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((8370, 8379), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (8377, 8379), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((21846, 21857), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (21855, 21857), True, 'import matplotlib.pyplot as plt\n'), ((4224, 4233), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (4231, 4233), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((4636, 4645), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (4643, 4645), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((4955, 4964), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (4962, 4964), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((5263, 5272), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (5270, 5272), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((16690, 16709), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""jet"""'], {}), "('jet')\n", (16702, 16709), True, 'import matplotlib.pyplot as plt\n'), ((18301, 18316), 'random.random', 'random.random', ([], {}), '()\n', (18314, 18316), False, 'import random\n'), ((19078, 19113), 'itertools.product', 'product', (['elementnames', 'elementnames'], {}), '(elementnames, elementnames)\n', (19085, 19113), False, 'from itertools import product\n'), ((6022, 6031), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (6029, 6031), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((6368, 6377), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (6375, 6377), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n'), ((6643, 6652), 'gurobipy.LinExpr', 'LinExpr', ([], {}), '()\n', (6650, 6652), False, 'from gurobipy import Model, LinExpr, GRB, GurobiError\n')]
|
import random, sys
def common_member_set(lista1, lista2):
a_set = set(lista1)
b_set = set(lista2)
if (a_set & b_set):
return sorted(list(a_set & b_set))
else:
return []
def remove_list_duplicates(lista):
cleanlist = []
[cleanlist.append(x) for x in lista if x not in cleanlist]
return cleanlist
def common_member(lista1, lista2):
supportList1 = []
for el in lista1:
if el in lista2:
supportList1.append(el)
supportList1 = remove_list_duplicates(supportList1)
supportList1.sort()
return supportList1
def random_list():
lista = []
for x in range(random.randint(1,30)):
lista.append(random.randint(1,101))
return lista
#a = [1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 1]
#b = [1, 2, 3, 89, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 1]
a = ["leo", "luca", "pippo", "tania", "topolino"]
b = ["giorgio", "piero", "tania", "leo", "asymov", "pino", "umberto"]
#for i in range(10000):
# a = random_list()
# b = random_list()
# if common_member(a,b) != common_member_set(a,b):
# print(common_member(a,b))
# print(common_member+set(a,b))
# else:
# sys.stdout.write(".")
print(a)
print(b)
print(common_member(a,b))
print(common_member_set(a,b))
#lista = []
#for x in range(random.randint(1,101)):
# lista.append(random.randint(1,101))
|
[
"random.randint"
] |
[((673, 694), 'random.randint', 'random.randint', (['(1)', '(30)'], {}), '(1, 30)\n', (687, 694), False, 'import random, sys\n'), ((717, 739), 'random.randint', 'random.randint', (['(1)', '(101)'], {}), '(1, 101)\n', (731, 739), False, 'import random, sys\n')]
|
# Copyright 2019 Indiana Biosciences Research Institute (IBRI)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
import csv
import pprint
import gzip
import time
import subprocess
import collections
import tp.utils
from tempfile import gettempdir, NamedTemporaryFile
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "toxapp.settings")
application = get_wsgi_application()
from django.conf import settings
from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets,\
GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype
from tp.tasks import load_measurement_tech_gene_map, load_module_scores, load_gsa_scores, load_correl_results
from src.computation import Computation
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
def setup_gene_table():
"""
Action: opens gene file, searches for cols with required cols, it makes sure eachs row has a value,
then for each column in rows, it replaces each blank value with a None type, then it creates the oject in the database
Returns: none
:rtype: object
"""
gf = os.path.join(settings.BASE_DIR, config['DEFAULT']['gene_file'])
logger.info('Loading orthology gene table from file %s', gf)
required_cols = ['rat_entrez_gene', 'rat_gene_symbol']
createcount = 0
updatecount = 0
rowcount = 0
with open(gf) as f:
dialect = csv.Sniffer().sniff(f.read(1024))
f.seek(0)
reader = csv.DictReader(f, dialect=dialect)
for row in reader:
rowcount += 1
for col in required_cols:
if row.get(col, None) is not None:
pass
else:
logger.critical('Missing value of %s on row %s of file %s', col, rowcount, gf)
exit(1)
# database needs a None for blank fields
for col in row:
if row[col] == '':
row[col] = None
# lookup the exp obj; update if exists create otherwise
gene = Gene.objects.filter(rat_entrez_gene=row['rat_entrez_gene'])
if gene:
gene.update(**row)
updatecount += 1
else:
Gene.objects.create(**row)
createcount += 1
logging.info('Number of genes created: %s; number updated: %s', createcount, updatecount)
def setup_measurement_tech():
"""
Action: reads the measurement tech and measurement detail files, if there is no object or mapping for these, it creates that object
Returns: measurement object
"""
mt = config['DEFAULT']['measurement_tech']
md = config['DEFAULT']['measurement_detail']
mf = os.path.join(settings.BASE_DIR, config['DEFAULT']['measurement_tech_file'])
logger.info('Checking existence of default measurement technology: %s %s', mt, md)
obj = MeasurementTech.objects.filter(tech=mt, tech_detail=md).first()
mapping = IdentifierVsGeneMap.objects.filter(tech=obj).first()
if not obj or not mapping:
logger.info('Creating measurement technology entry from %s', mf)
recs = load_measurement_tech_gene_map(mf)
if not recs:
logger.critical('Failed to load measurement tech file')
exit(1)
obj = MeasurementTech.objects.filter(tech=mt, tech_detail=md).first()
return obj
def load_DM_TG_experiments():
"""
Action: opens the exoeriments file, looks up the study name, deletes attributes, sets study, results_ready,and tech.
It looks up the object, if it exists, it is updated, if it doesnt, it is created.
Returns: created experiments
"""
ef = os.path.join(settings.BASE_DIR, config['DEFAULT']['experiments_file'])
logger.info('Loading experiments table from file %s', ef)
updatecount = 0
createcount = 0
created_exps = list()
rowcount = 0
with open(ef) as f:
reader = csv.DictReader(f, delimiter='\t')
for row in reader:
rowcount += 1
# lookup the study obj on study name; so little meta data besides name that will not update if exists
study, status = Study.objects.get_or_create(study_name=row['study_name'], source=row['source'], permission='P')
# delete attributes that pertained to study ... don't try loading in exp
del row['source']
del row['study_name']
row['study'] = study
row['results_ready'] = False
row['tech'] = tech_obj
# lookup the exp obj; update if exists create otherwise
exp = Experiment.objects.filter(id=row['id'])
if exp:
exp.update(**row)
updatecount += 1
else:
Experiment.objects.create(**row)
createcount += 1
# exp is a queryset with one instance
created_exps.append(exp.first())
logging.info('Number of experiments created: %s, number updated: %s', createcount, updatecount)
return created_exps
def load_tox_results():
"""
Action: Opens the tox_results file, and removes any existing toxicology results objects.
Each experiment in the file that has a value, create that results object.
Returns: none
"""
tf = os.path.join(settings.BASE_DIR, config['DEFAULT']['tox_results_file'])
logger.info('Loading toxicology results from file %s', tf)
createcount = 0
rowcount = 0
# delete existing data if any
ToxicologyResult.objects.all().delete()
with open(tf) as f:
reader = csv.DictReader(f, delimiter='\t')
for row in reader:
rowcount += 1
exp_obj = compute.get_exp_obj(row['experiment'])
if exp_obj is None:
continue
row['experiment'] = exp_obj
ToxicologyResult.objects.create(**row)
createcount += 1
logging.info('Number of Toxicology results created: %s; number read in file %s', createcount, rowcount)
def load_experiments_vs_outcomes():
"""
Action: Opens experiments vs. tox outcome file, deletes existing, and populates model from file.
Returns: none
"""
tf = os.path.join(settings.BASE_DIR, config['DEFAULT']['experiments_vs_outcomes'])
logger.info('Loading experiment vs. tox outcomes from file %s', tf)
createcount = 0
rowcount = 0
# delete existing data if any
ExperimentVsToxPhenotype.objects.all().delete()
with open(tf) as f:
reader = csv.DictReader(f, delimiter='\t')
for row in reader:
rowcount += 1
exp_obj = compute.get_exp_obj(row['experiment'])
if exp_obj is None:
continue
rec = dict()
rec['experiment'] = exp_obj
# confirm that the experiment ID matches exp name in file
if exp_obj.experiment_name != row['experiment_name']:
raise LookupError('Experiment with id {} has different name in file {} vs. db {}'.format(exp_obj.id, row['experiment_name'], exp_obj.experiment_name))
phenotype, _ = ToxPhenotype.objects.get_or_create(name=row['tox'])
rec['tox'] = phenotype
rec['outcome'] = row['outcome']
rec['type'] = row['type']
ExperimentVsToxPhenotype.objects.create(**rec)
createcount += 1
logging.info('Number of experiment vs. tox phenotype results created: %s; number read in file %s', createcount, rowcount)
def load_geneset_vs_tox_associations():
"""
Action: Opens tox_association_file, removes all preexisting data objects.
Sets phenotype to the object in row tox, if there is a geneset it too gets set.
Then the genesettox object is created.
Returns: none
"""
tf = os.path.join(settings.BASE_DIR, config['DEFAULT']['tox_association_file'])
logger.info('Loading geneset vs toxicology results from file %s', tf)
createcount = 0
rowcount = 0
# delete existing data if any
GeneSetTox.objects.all().delete()
with open(tf) as f:
reader = csv.DictReader(f, delimiter='\t')
for row in reader:
rowcount += 1
phenotype, _ = ToxPhenotype.objects.get_or_create(name=row['tox'])
row['tox'] = phenotype
try:
geneset = GeneSets.objects.get(name=row['geneset'])
except GeneSets.DoesNotExist:
logger.warning('Geneset %s does not exist in database; skipping', row['geneset'])
continue
row['geneset'] = geneset
GeneSetTox.objects.create(**row)
createcount += 1
logging.info('Number of geneset vs tox results created: %s; number read in file %s', createcount, rowcount)
def load_genesets():
"""
Action: Opens core_gene_sets file, sets gsa info with the same name to the current row.
Opens the WGCNA Modules, if a row has missing data, an error is raised, each row coulmn is then set to the values in loading.
Then we read the rgd vs go file. if values are blank, an exception is raised. then the geneset id is set to 1. Then if the row doesnt exist in gsa_info at that row is set to the values.
Then MSigDB signature vs. gene pairs file is read. IF the subcategory is RegNet it is changed from MSigDB to RegNet. then the value in the current gsa_genes is set to 1.
If there is no value in row['sig_name'] then it is generated. if n_genes < 3 or n_genes > 5000 then we drop those sigs. We then update or create the object.
Then we create GeneSetMember objects.
Returns: none
Notes: Can this be broken up for readability?
"""
cf = os.path.join(settings.BASE_DIR, config['DEFAULT']['core_gene_sets'])
logger.info('Loading core gene sets from file %s', cf)
gsa_info = collections.defaultdict(dict)
gsa_genes = collections.defaultdict(dict)
with open(cf) as f:
reader = csv.DictReader(f, delimiter='\t')
for row in reader:
nm = row['name']
if gsa_info.get(nm, None) is not None:
logger.fatal('Conflicting names in %s; gene set names must be unique', cf)
raise RuntimeError()
gsa_info[nm] = row
# read module members - overlaps partially with init_modules in Computation class but we need the gene members
# in the database for drill down of visualizations
module_file = os.path.join(settings.BASE_DIR, 'data/WGCNA_modules.txt')
req_attr_m = ['module', 'rat_entrez_gene_id', 'loading']
with open(module_file) as f:
reader = csv.DictReader(f, delimiter='\t')
for row in reader:
if any(row[i] == '' for i in req_attr_m):
logger.fatal('File %s contains undefined values for one or more required attributes %s on line %s',
module_file, ",".join(req_attr_m), row)
raise RuntimeError()
if not row['module'] in gsa_info:
logger.warning('Module %s is not defined in core_sets; unexpected and skipping', row['module'])
continue
gsa_genes[row['module']][int(row['rat_entrez_gene_id'])] = float(row['loading'])
# read GO vs. gene pairs from flat file
go_file = os.path.join(settings.BASE_DIR, 'data/rgd_vs_GO_expansion.txt')
req_attr_go = ['entrez_gene_id', 'GO_id', 'GO_name', 'GO_type']
with open(go_file) as f:
reader = csv.DictReader(f, delimiter='\t')
for row in reader:
if any(row[i] == '' for i in req_attr_go):
logger.fatal('File %s contains undefined values for one or more required attributes %s on line %s', go_file,
",".join(req_attr_go), row)
raise RuntimeError()
gsa_genes[row['GO_id']][int(row['entrez_gene_id'])] = 1
if not row['GO_id'] in gsa_info:
gsa_info[row['GO_id']] = {'name': row['GO_id'], 'desc': row['GO_name'], 'type': row['GO_type'],
'core_set': False, 'source': 'GO'}
# read MSigDB signature vs. gene pairs from flat file
msigdb_file = os.path.join(settings.BASE_DIR, 'data/MSigDB_and_TF_annotation.txt')
req_attr_msigdb = ['sig_name', 'rat_entrez_gene', 'sub_category', 'description']
with open(msigdb_file) as f:
reader = csv.DictReader(f, delimiter='\t')
for row in reader:
if any(row[i] == '' for i in req_attr_msigdb):
logger.fatal('File %s contains undefined values for one or more required attributes %s on line %s',
msigdb_file, ",".join(req_attr_msigdb), row)
raise RuntimeError()
source = 'MSigDB'
# DAS RegNet networks included in this file - use a separate source for these, not MSigDB
if row['sub_category'] == 'RegNet':
source = 'RegNet'
gsa_genes[row['sig_name']][int(row['rat_entrez_gene'])] = 1
if not row['sig_name'] in gsa_info:
gsa_info[row['sig_name']] = {'name': row['sig_name'], 'desc': row['description'], 'type': row['sub_category'],
'core_set': False, 'source': source}
# eliminate gene sets too small / too large
sigs_to_drop = list()
for sig in gsa_info.keys():
if gsa_info[sig]['core_set']:
continue # don't remove a core set ... shouldn't be any anyway that are too small/big
n_genes = len(list(filter(lambda x: compute.get_gene_obj(x) is not None, gsa_genes[sig])))
if n_genes < 3 or n_genes > 5000:
sigs_to_drop.append(sig)
continue
logger.debug('Eliminated %s gene sets based on size constraint', len(sigs_to_drop))
for s in sigs_to_drop:
gsa_info.pop(s)
gsa_genes.pop(s)
updatecount = 0
createcount = 0
for sig in gsa_info:
if sig not in gsa_genes:
logger.error('No genes defined for signature %s; deleting geneset', sig)
continue
row = gsa_info[sig]
# replace empty values with None - DB expects Null
for k in row:
row[k] = None if row[k] == '' else row[k]
if row[k] == 'TRUE':
row[k] = True
if row[k] == 'FALSE':
row[k] = False
geneset = GeneSets.objects.filter(name=row['name']).first()
if geneset:
for (key, value) in row.items():
setattr(geneset, key, value)
geneset.save()
updatecount += 1
else:
geneset = GeneSets.objects.create(**row)
createcount += 1
# delete any existing genes for the signature
geneset.members.clear()
genes_skipped = 0
genes_loaded = 0
for rat_eg in gsa_genes[sig]:
gene = compute.get_gene_obj(rat_eg)
# geneobj will be None for genes not loaded in the gene model, warn on total skipped only
if not gene:
genes_skipped += 1
continue
weight = gsa_genes[sig][rat_eg]
GeneSetMember.objects.create(geneset=geneset, gene=gene, weight=weight)
genes_loaded += 1
try:
faction_loaded = genes_loaded/(genes_loaded+genes_skipped)
except:
logger.error('Attempting division by zero; no genes in sig %s', sig)
continue
if genes_loaded == 0:
logger.error('No genes were added to geneset %s; deleting it', sig)
geneset.delete()
continue
elif faction_loaded < 0.7:
logger.warning('Fewer than 70 percent of genes in signature %s were in gene model and loaded: %s skipped and %s loaded',\
sig, genes_skipped, genes_loaded)
elif genes_skipped > 0:
logger.debug('Somes genes in signature %s are not in the gene model and skipped: %s skipped and %s loaded',\
sig, genes_skipped, genes_loaded)
else:
logger.debug('Number of genes loaded for signature %s: %s', sig, genes_loaded)
logging.info('Number of core gene sets created: %s, number updated: %s', createcount, updatecount)
def load_fold_change_data():
"""
Action: we read the files in groupfc_file_location. For each file, we read each row and get each experiment object and identifier object if they exist.
Then we append them to the row.and write the file to output.
Returns: none
"""
pgbin = config['DEFAULT']['pgloader_exec']
if not os.path.isfile(pgbin):
logger.fatal('Configured file for pgloader not accessible %s', pgbin)
exit(1)
fc_loc = os.path.join(settings.BASE_DIR, config['DEFAULT']['groupfc_file_location'])
logger.info('Loading group fold change data from dir %s', fc_loc)
pgloader_conf = os.path.join(settings.BASE_DIR, config['DEFAULT']['pgloader_groupfc_conf'])
cmd = pgbin + ' ' + pgloader_conf
outf = NamedTemporaryFile(delete=False, suffix='.txt', dir=tmpdir)
logger.info('Temporary file for loading fold change data is %s', outf.name)
# set environment variable used by pgloader script
os.environ['PG_LOADER_FILE'] = outf.name
createcount = 0
rowcount = 0
files = os.listdir(fc_loc)
for f in files:
if f[-7:] != ".txt.gz":
continue
fp = os.path.join(fc_loc, f)
logging.info('Working on file %s', fp)
with gzip.open(fp, 'rt') as gz:
reader = csv.reader(gz, delimiter='\t')
# get rid of header
next(reader, None)
for row in reader:
rowcount += 1
exp_id = row.pop(0)
probeset = row.pop(0)
exp_obj = compute.get_exp_obj(exp_id)
if exp_obj is None:
continue
identifier_obj = compute.get_identifier_obj(exp_obj.tech, probeset)
if identifier_obj is None:
continue
createcount += 1
row.append(str(exp_id))
row.append(str(identifier_obj.id))
line = '\t'.join(row) + '\n'
outf.write(str.encode(line))
if createcount > 10000:
logger.info('Starting pgload of group fold change data; may take up to 30 minutes')
logger.debug('Running command %s', cmd)
output = subprocess.getoutput(cmd)
logger.debug('Received output %s', output)
logger.info('Loaded %s records out of %s in files', createcount, rowcount)
os.remove(outf.name)
else:
logger.error('Did not receive at least 10000 records for load of fold change result; anything in %s?', outf.name)
exit(1)
def score_experiments(created_exps):
"""
Action: find out if computing initial gsa from tech object and set it to success.
For each experiment in each created one, compute the map_fold_change data from experiment. we then compute module scores, gsa scores and status if they exist.
The values are then saved.
Returns: none
"""
failed_scoring = collections.defaultdict(list)
# don't keep re-initializing GSA calc; these are all RG230-2 exps
success = compute.init_gsa(tech_obj)
if not success:
logger.critical('Failed to initialize GSA calc')
exit(1)
for exp in created_exps:
logger.info('Scoring fold change data for experiment %s', exp.experiment_name)
logger.debug('Retrieving mapped fold change data')
fc_data = compute.map_fold_change_from_exp(exp)
if fc_data is None:
failed_scoring['fold_change_data'].append(exp.experiment_name)
continue
logger.debug('Calculating WGCNA results')
module_scores = compute.score_modules(fc_data)
if module_scores is None:
failed_scoring['WGCNA_calc'].append(exp.experiment_name)
continue
else:
status = load_module_scores(module_scores)
if status is None:
failed_scoring['WGCNA_load'].append(exp.experiment_name)
continue
logger.debug('Calculating GSA results')
gsa_scores = compute.score_gsa(fc_data, last_tech=tech_obj)
if gsa_scores is None:
failed_scoring['GSA_calc'].append(exp.experiment_name)
continue
else:
status = load_gsa_scores(gsa_scores)
if status is None:
failed_scoring['GSA_load'].append(exp.experiment_name)
continue
# set the status as ready
exp.results_ready = True
exp.save()
if failed_scoring:
logger.warning('The following experiments were not successfully scored: %s', pprint.pformat(failed_scoring))
if __name__ == '__main__':
"""
Action: See commments
Returns: none
"""
config = tp.utils.parse_config_file()
tech_obj = None
# file loading requires tmp space ... set up
tmpdir = os.path.join(gettempdir(), '{}'.format(hash(time.time())))
os.makedirs(tmpdir)
compute = Computation(tmpdir)
logger.debug('Creating temporary working directory %s', tmpdir)
# step 1 - load gene info the Gene model
setup_gene_table()
# step 2) establish that RG230-2 microarray is avail, otherwise load it
tech_obj = setup_measurement_tech()
# step 3) load the DM/TG studies and experiments
created_exp_list = load_DM_TG_experiments()
# step 4) load the toxicology results file
load_tox_results()
# step 4b) load experiment vs outcome data; new in may 2019
load_experiments_vs_outcomes()
# step 5) load definition of core gene sets
load_genesets()
# step 6) load the toxicology results file
load_geneset_vs_tox_associations()
# step 7) load the fold change data
load_fold_change_data()
# step 8 - iterate through newly added experiments and perform module / GSA scoring
# commented out - temp for resuming loads
#created_exp_list = Experiment.objects.all()
#tech_obj = created_exp_list[0].tech
score_experiments(created_exp_list)
# step 9 - load the pairwise experiment similarities
correlw = compute.calc_exp_correl(created_exp_list, 'WGCNA')
load_correl_results(compute, correlw, 'WGCNA')
correla = compute.calc_exp_correl(created_exp_list, 'RegNet')
load_correl_results(compute, correla, 'RegNet')
correlp = compute.calc_exp_correl(created_exp_list, 'PathNR')
load_correl_results(compute, correlp, 'PathNR')
|
[
"django.core.wsgi.get_wsgi_application",
"tp.models.Experiment.objects.filter",
"tp.tasks.load_measurement_tech_gene_map",
"os.remove",
"tp.models.ToxicologyResult.objects.all",
"csv.reader",
"tp.tasks.load_module_scores",
"tp.models.Study.objects.get_or_create",
"pprint.pformat",
"tp.models.GeneSets.objects.filter",
"csv.Sniffer",
"tp.models.Gene.objects.create",
"tp.models.ExperimentVsToxPhenotype.objects.create",
"tp.models.Experiment.objects.create",
"collections.defaultdict",
"os.path.isfile",
"tp.models.GeneSets.objects.create",
"subprocess.getoutput",
"os.path.join",
"tp.models.ExperimentVsToxPhenotype.objects.all",
"tp.models.GeneSetMember.objects.create",
"tp.models.GeneSets.objects.get",
"tp.tasks.load_correl_results",
"tp.models.MeasurementTech.objects.filter",
"tp.models.GeneSetTox.objects.all",
"os.environ.setdefault",
"tp.models.IdentifierVsGeneMap.objects.filter",
"tp.models.GeneSetTox.objects.create",
"csv.DictReader",
"tp.models.Gene.objects.filter",
"os.listdir",
"tempfile.NamedTemporaryFile",
"gzip.open",
"os.makedirs",
"tp.tasks.load_gsa_scores",
"tempfile.gettempdir",
"src.computation.Computation",
"time.time",
"logging.info",
"tp.models.ToxPhenotype.objects.get_or_create",
"tp.models.ToxicologyResult.objects.create",
"logging.getLogger"
] |
[((839, 905), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""toxapp.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'toxapp.settings')\n", (860, 905), False, 'import os\n'), ((920, 942), 'django.core.wsgi.get_wsgi_application', 'get_wsgi_application', ([], {}), '()\n', (940, 942), False, 'from django.core.wsgi import get_wsgi_application\n'), ((1321, 1348), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1338, 1348), False, 'import logging\n'), ((1696, 1759), 'os.path.join', 'os.path.join', (['settings.BASE_DIR', "config['DEFAULT']['gene_file']"], {}), "(settings.BASE_DIR, config['DEFAULT']['gene_file'])\n", (1708, 1759), False, 'import os\n'), ((2892, 2985), 'logging.info', 'logging.info', (['"""Number of genes created: %s; number updated: %s"""', 'createcount', 'updatecount'], {}), "('Number of genes created: %s; number updated: %s', createcount,\n updatecount)\n", (2904, 2985), False, 'import logging\n'), ((3304, 3379), 'os.path.join', 'os.path.join', (['settings.BASE_DIR', "config['DEFAULT']['measurement_tech_file']"], {}), "(settings.BASE_DIR, config['DEFAULT']['measurement_tech_file'])\n", (3316, 3379), False, 'import os\n'), ((4266, 4336), 'os.path.join', 'os.path.join', (['settings.BASE_DIR', "config['DEFAULT']['experiments_file']"], {}), "(settings.BASE_DIR, config['DEFAULT']['experiments_file'])\n", (4278, 4336), False, 'import os\n'), ((5522, 5621), 'logging.info', 'logging.info', (['"""Number of experiments created: %s, number updated: %s"""', 'createcount', 'updatecount'], {}), "('Number of experiments created: %s, number updated: %s',\n createcount, updatecount)\n", (5534, 5621), False, 'import logging\n'), ((5883, 5953), 'os.path.join', 'os.path.join', (['settings.BASE_DIR', "config['DEFAULT']['tox_results_file']"], {}), "(settings.BASE_DIR, config['DEFAULT']['tox_results_file'])\n", (5895, 5953), False, 'import os\n'), ((6506, 6614), 'logging.info', 'logging.info', (['"""Number of Toxicology results created: %s; number read in file %s"""', 'createcount', 'rowcount'], {}), "('Number of Toxicology results created: %s; number read in file %s'\n , createcount, rowcount)\n", (6518, 6614), False, 'import logging\n'), ((6793, 6870), 'os.path.join', 'os.path.join', (['settings.BASE_DIR', "config['DEFAULT']['experiments_vs_outcomes']"], {}), "(settings.BASE_DIR, config['DEFAULT']['experiments_vs_outcomes'])\n", (6805, 6870), False, 'import os\n'), ((7974, 8105), 'logging.info', 'logging.info', (['"""Number of experiment vs. tox phenotype results created: %s; number read in file %s"""', 'createcount', 'rowcount'], {}), "(\n 'Number of experiment vs. tox phenotype results created: %s; number read in file %s'\n , createcount, rowcount)\n", (7986, 8105), False, 'import logging\n'), ((8387, 8461), 'os.path.join', 'os.path.join', (['settings.BASE_DIR', "config['DEFAULT']['tox_association_file']"], {}), "(settings.BASE_DIR, config['DEFAULT']['tox_association_file'])\n", (8399, 8461), False, 'import os\n'), ((9258, 9374), 'logging.info', 'logging.info', (['"""Number of geneset vs tox results created: %s; number read in file %s"""', 'createcount', 'rowcount'], {}), "(\n 'Number of geneset vs tox results created: %s; number read in file %s',\n createcount, rowcount)\n", (9270, 9374), False, 'import logging\n'), ((10271, 10339), 'os.path.join', 'os.path.join', (['settings.BASE_DIR', "config['DEFAULT']['core_gene_sets']"], {}), "(settings.BASE_DIR, config['DEFAULT']['core_gene_sets'])\n", (10283, 10339), False, 'import os\n'), ((10414, 10443), 'collections.defaultdict', 'collections.defaultdict', (['dict'], {}), '(dict)\n', (10437, 10443), False, 'import collections\n'), ((10460, 10489), 'collections.defaultdict', 'collections.defaultdict', (['dict'], {}), '(dict)\n', (10483, 10489), False, 'import collections\n'), ((11022, 11079), 'os.path.join', 'os.path.join', (['settings.BASE_DIR', '"""data/WGCNA_modules.txt"""'], {}), "(settings.BASE_DIR, 'data/WGCNA_modules.txt')\n", (11034, 11079), False, 'import os\n'), ((11866, 11929), 'os.path.join', 'os.path.join', (['settings.BASE_DIR', '"""data/rgd_vs_GO_expansion.txt"""'], {}), "(settings.BASE_DIR, 'data/rgd_vs_GO_expansion.txt')\n", (11878, 11929), False, 'import os\n'), ((12761, 12829), 'os.path.join', 'os.path.join', (['settings.BASE_DIR', '"""data/MSigDB_and_TF_annotation.txt"""'], {}), "(settings.BASE_DIR, 'data/MSigDB_and_TF_annotation.txt')\n", (12773, 12829), False, 'import os\n'), ((16787, 16889), 'logging.info', 'logging.info', (['"""Number of core gene sets created: %s, number updated: %s"""', 'createcount', 'updatecount'], {}), "('Number of core gene sets created: %s, number updated: %s',\n createcount, updatecount)\n", (16799, 16889), False, 'import logging\n'), ((17363, 17438), 'os.path.join', 'os.path.join', (['settings.BASE_DIR', "config['DEFAULT']['groupfc_file_location']"], {}), "(settings.BASE_DIR, config['DEFAULT']['groupfc_file_location'])\n", (17375, 17438), False, 'import os\n'), ((17530, 17605), 'os.path.join', 'os.path.join', (['settings.BASE_DIR', "config['DEFAULT']['pgloader_groupfc_conf']"], {}), "(settings.BASE_DIR, config['DEFAULT']['pgloader_groupfc_conf'])\n", (17542, 17605), False, 'import os\n'), ((17655, 17714), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'delete': '(False)', 'suffix': '""".txt"""', 'dir': 'tmpdir'}), "(delete=False, suffix='.txt', dir=tmpdir)\n", (17673, 17714), False, 'from tempfile import gettempdir, NamedTemporaryFile\n'), ((17945, 17963), 'os.listdir', 'os.listdir', (['fc_loc'], {}), '(fc_loc)\n', (17955, 17963), False, 'import os\n'), ((19807, 19836), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (19830, 19836), False, 'import collections\n'), ((21761, 21780), 'os.makedirs', 'os.makedirs', (['tmpdir'], {}), '(tmpdir)\n', (21772, 21780), False, 'import os\n'), ((21795, 21814), 'src.computation.Computation', 'Computation', (['tmpdir'], {}), '(tmpdir)\n', (21806, 21814), False, 'from src.computation import Computation\n'), ((22959, 23005), 'tp.tasks.load_correl_results', 'load_correl_results', (['compute', 'correlw', '"""WGCNA"""'], {}), "(compute, correlw, 'WGCNA')\n", (22978, 23005), False, 'from tp.tasks import load_measurement_tech_gene_map, load_module_scores, load_gsa_scores, load_correl_results\n'), ((23077, 23124), 'tp.tasks.load_correl_results', 'load_correl_results', (['compute', 'correla', '"""RegNet"""'], {}), "(compute, correla, 'RegNet')\n", (23096, 23124), False, 'from tp.tasks import load_measurement_tech_gene_map, load_module_scores, load_gsa_scores, load_correl_results\n'), ((23196, 23243), 'tp.tasks.load_correl_results', 'load_correl_results', (['compute', 'correlp', '"""PathNR"""'], {}), "(compute, correlp, 'PathNR')\n", (23215, 23243), False, 'from tp.tasks import load_measurement_tech_gene_map, load_module_scores, load_gsa_scores, load_correl_results\n'), ((2052, 2086), 'csv.DictReader', 'csv.DictReader', (['f'], {'dialect': 'dialect'}), '(f, dialect=dialect)\n', (2066, 2086), False, 'import csv\n'), ((3728, 3762), 'tp.tasks.load_measurement_tech_gene_map', 'load_measurement_tech_gene_map', (['mf'], {}), '(mf)\n', (3758, 3762), False, 'from tp.tasks import load_measurement_tech_gene_map, load_module_scores, load_gsa_scores, load_correl_results\n'), ((4523, 4556), 'csv.DictReader', 'csv.DictReader', (['f'], {'delimiter': '"""\t"""'}), "(f, delimiter='\\t')\n", (4537, 4556), False, 'import csv\n'), ((6174, 6207), 'csv.DictReader', 'csv.DictReader', (['f'], {'delimiter': '"""\t"""'}), "(f, delimiter='\\t')\n", (6188, 6207), False, 'import csv\n'), ((7108, 7141), 'csv.DictReader', 'csv.DictReader', (['f'], {'delimiter': '"""\t"""'}), "(f, delimiter='\\t')\n", (7122, 7141), False, 'import csv\n'), ((8687, 8720), 'csv.DictReader', 'csv.DictReader', (['f'], {'delimiter': '"""\t"""'}), "(f, delimiter='\\t')\n", (8701, 8720), False, 'import csv\n'), ((10532, 10565), 'csv.DictReader', 'csv.DictReader', (['f'], {'delimiter': '"""\t"""'}), "(f, delimiter='\\t')\n", (10546, 10565), False, 'import csv\n'), ((11191, 11224), 'csv.DictReader', 'csv.DictReader', (['f'], {'delimiter': '"""\t"""'}), "(f, delimiter='\\t')\n", (11205, 11224), False, 'import csv\n'), ((12044, 12077), 'csv.DictReader', 'csv.DictReader', (['f'], {'delimiter': '"""\t"""'}), "(f, delimiter='\\t')\n", (12058, 12077), False, 'import csv\n'), ((12965, 12998), 'csv.DictReader', 'csv.DictReader', (['f'], {'delimiter': '"""\t"""'}), "(f, delimiter='\\t')\n", (12979, 12998), False, 'import csv\n'), ((17232, 17253), 'os.path.isfile', 'os.path.isfile', (['pgbin'], {}), '(pgbin)\n', (17246, 17253), False, 'import os\n'), ((18053, 18076), 'os.path.join', 'os.path.join', (['fc_loc', 'f'], {}), '(fc_loc, f)\n', (18065, 18076), False, 'import os\n'), ((18085, 18123), 'logging.info', 'logging.info', (['"""Working on file %s"""', 'fp'], {}), "('Working on file %s', fp)\n", (18097, 18123), False, 'import logging\n'), ((19095, 19120), 'subprocess.getoutput', 'subprocess.getoutput', (['cmd'], {}), '(cmd)\n', (19115, 19120), False, 'import subprocess\n'), ((19263, 19283), 'os.remove', 'os.remove', (['outf.name'], {}), '(outf.name)\n', (19272, 19283), False, 'import os\n'), ((21711, 21723), 'tempfile.gettempdir', 'gettempdir', ([], {}), '()\n', (21721, 21723), False, 'from tempfile import gettempdir, NamedTemporaryFile\n'), ((2644, 2703), 'tp.models.Gene.objects.filter', 'Gene.objects.filter', ([], {'rat_entrez_gene': "row['rat_entrez_gene']"}), "(rat_entrez_gene=row['rat_entrez_gene'])\n", (2663, 2703), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((3478, 3533), 'tp.models.MeasurementTech.objects.filter', 'MeasurementTech.objects.filter', ([], {'tech': 'mt', 'tech_detail': 'md'}), '(tech=mt, tech_detail=md)\n', (3508, 3533), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((3556, 3600), 'tp.models.IdentifierVsGeneMap.objects.filter', 'IdentifierVsGeneMap.objects.filter', ([], {'tech': 'obj'}), '(tech=obj)\n', (3590, 3600), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((4753, 4853), 'tp.models.Study.objects.get_or_create', 'Study.objects.get_or_create', ([], {'study_name': "row['study_name']", 'source': "row['source']", 'permission': '"""P"""'}), "(study_name=row['study_name'], source=row[\n 'source'], permission='P')\n", (4780, 4853), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((5194, 5233), 'tp.models.Experiment.objects.filter', 'Experiment.objects.filter', ([], {'id': "row['id']"}), "(id=row['id'])\n", (5219, 5233), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((6092, 6122), 'tp.models.ToxicologyResult.objects.all', 'ToxicologyResult.objects.all', ([], {}), '()\n', (6120, 6122), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((6433, 6471), 'tp.models.ToxicologyResult.objects.create', 'ToxicologyResult.objects.create', ([], {}), '(**row)\n', (6464, 6471), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((7018, 7056), 'tp.models.ExperimentVsToxPhenotype.objects.all', 'ExperimentVsToxPhenotype.objects.all', ([], {}), '()\n', (7054, 7056), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((7712, 7763), 'tp.models.ToxPhenotype.objects.get_or_create', 'ToxPhenotype.objects.get_or_create', ([], {'name': "row['tox']"}), "(name=row['tox'])\n", (7746, 7763), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((7893, 7939), 'tp.models.ExperimentVsToxPhenotype.objects.create', 'ExperimentVsToxPhenotype.objects.create', ([], {}), '(**rec)\n', (7932, 7939), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((8611, 8635), 'tp.models.GeneSetTox.objects.all', 'GeneSetTox.objects.all', ([], {}), '()\n', (8633, 8635), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((8802, 8853), 'tp.models.ToxPhenotype.objects.get_or_create', 'ToxPhenotype.objects.get_or_create', ([], {'name': "row['tox']"}), "(name=row['tox'])\n", (8836, 8853), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((9191, 9223), 'tp.models.GeneSetTox.objects.create', 'GeneSetTox.objects.create', ([], {}), '(**row)\n', (9216, 9223), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((15238, 15268), 'tp.models.GeneSets.objects.create', 'GeneSets.objects.create', ([], {}), '(**row)\n', (15261, 15268), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((15767, 15838), 'tp.models.GeneSetMember.objects.create', 'GeneSetMember.objects.create', ([], {'geneset': 'geneset', 'gene': 'gene', 'weight': 'weight'}), '(geneset=geneset, gene=gene, weight=weight)\n', (15795, 15838), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((18138, 18157), 'gzip.open', 'gzip.open', (['fp', '"""rt"""'], {}), "(fp, 'rt')\n", (18147, 18157), False, 'import gzip\n'), ((18187, 18217), 'csv.reader', 'csv.reader', (['gz'], {'delimiter': '"""\t"""'}), "(gz, delimiter='\\t')\n", (18197, 18217), False, 'import csv\n'), ((20665, 20698), 'tp.tasks.load_module_scores', 'load_module_scores', (['module_scores'], {}), '(module_scores)\n', (20683, 20698), False, 'from tp.tasks import load_measurement_tech_gene_map, load_module_scores, load_gsa_scores, load_correl_results\n'), ((21099, 21126), 'tp.tasks.load_gsa_scores', 'load_gsa_scores', (['gsa_scores'], {}), '(gsa_scores)\n', (21114, 21126), False, 'from tp.tasks import load_measurement_tech_gene_map, load_module_scores, load_gsa_scores, load_correl_results\n'), ((21450, 21480), 'pprint.pformat', 'pprint.pformat', (['failed_scoring'], {}), '(failed_scoring)\n', (21464, 21480), False, 'import pprint\n'), ((1983, 1996), 'csv.Sniffer', 'csv.Sniffer', ([], {}), '()\n', (1994, 1996), False, 'import csv\n'), ((2827, 2853), 'tp.models.Gene.objects.create', 'Gene.objects.create', ([], {}), '(**row)\n', (2846, 2853), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((3887, 3942), 'tp.models.MeasurementTech.objects.filter', 'MeasurementTech.objects.filter', ([], {'tech': 'mt', 'tech_detail': 'md'}), '(tech=mt, tech_detail=md)\n', (3917, 3942), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((5355, 5387), 'tp.models.Experiment.objects.create', 'Experiment.objects.create', ([], {}), '(**row)\n', (5380, 5387), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((8933, 8974), 'tp.models.GeneSets.objects.get', 'GeneSets.objects.get', ([], {'name': "row['geneset']"}), "(name=row['geneset'])\n", (8953, 8974), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((14986, 15027), 'tp.models.GeneSets.objects.filter', 'GeneSets.objects.filter', ([], {'name': "row['name']"}), "(name=row['name'])\n", (15009, 15027), False, 'from tp.models import MeasurementTech, IdentifierVsGeneMap, Gene, Study, Experiment, ToxicologyResult, GeneSets, GeneSetMember, GeneSetTox, ToxPhenotype, ExperimentVsToxPhenotype\n'), ((21742, 21753), 'time.time', 'time.time', ([], {}), '()\n', (21751, 21753), False, 'import time\n')]
|
from . import views as frontendview
from django.urls import path
from django.conf.urls import url, include, static
from django.contrib.auth import views as auth_views
from django.contrib import admin
urlpatterns = [
path('',frontendview.home,name='home'),
url(r'^signup/$', frontendview.signup, name='signup'),
url(r'^login/', auth_views.LoginView.as_view(template_name='login.html'), name='login'),
url(r'^logout/', auth_views.LogoutView.as_view(template_name='login.html'),name='logout'),
url(r'^nuevoevento/$', frontendview.newEvento.as_view(), name='nuevo'),
url(r'^ListaEventos$', frontendview.EventosL.as_view(), name='listaEventos'),
url(r'^(?P<pk>\d+)$', frontendview.detalleEvento.as_view(), name='detalle'),
url(r'^editar/(?P<pk>\d+)$', frontendview.modificacionEvento.as_view(), name='editar'),
url(r'^borrar/(?P<pk>\d+)$', frontendview.borrarEvento.as_view(), name='borrar'),
]
|
[
"django.contrib.auth.views.LogoutView.as_view",
"django.contrib.auth.views.LoginView.as_view",
"django.conf.urls.url",
"django.urls.path"
] |
[((220, 260), 'django.urls.path', 'path', (['""""""', 'frontendview.home'], {'name': '"""home"""'}), "('', frontendview.home, name='home')\n", (224, 260), False, 'from django.urls import path\n'), ((264, 316), 'django.conf.urls.url', 'url', (['"""^signup/$"""', 'frontendview.signup'], {'name': '"""signup"""'}), "('^signup/$', frontendview.signup, name='signup')\n", (267, 316), False, 'from django.conf.urls import url, include, static\n'), ((339, 395), 'django.contrib.auth.views.LoginView.as_view', 'auth_views.LoginView.as_view', ([], {'template_name': '"""login.html"""'}), "(template_name='login.html')\n", (367, 395), True, 'from django.contrib.auth import views as auth_views\n'), ((433, 490), 'django.contrib.auth.views.LogoutView.as_view', 'auth_views.LogoutView.as_view', ([], {'template_name': '"""login.html"""'}), "(template_name='login.html')\n", (462, 490), True, 'from django.contrib.auth import views as auth_views\n')]
|
import numpy as np
from icecream import ic
if __name__ == '__main__':
length = 12
size = 6
a = np.ones(size) * -1
counter = 0
for i in range(size):
if i < size-1:
a[i] = i
else:
remain = length - (i+1)
counter += remain
a_mask = np.where(a==-1)[0]
idx = a_mask[0]
ic(a)
ic(a_mask)
ic(idx)
ic(counter)
n =len(a) + counter
ic(n)
p = np.empty_like(a)
p[: idx] = 1/n
p[idx]= 1 - np.sum(p[:idx])
ic(p)
assert np.sum(p) == 1
samp = np.random.choice(a, size=size, replace=True, p=p)
ic(samp)
|
[
"icecream.ic",
"numpy.sum",
"numpy.empty_like",
"numpy.ones",
"numpy.where",
"numpy.random.choice"
] |
[((349, 354), 'icecream.ic', 'ic', (['a'], {}), '(a)\n', (351, 354), False, 'from icecream import ic\n'), ((359, 369), 'icecream.ic', 'ic', (['a_mask'], {}), '(a_mask)\n', (361, 369), False, 'from icecream import ic\n'), ((374, 381), 'icecream.ic', 'ic', (['idx'], {}), '(idx)\n', (376, 381), False, 'from icecream import ic\n'), ((386, 397), 'icecream.ic', 'ic', (['counter'], {}), '(counter)\n', (388, 397), False, 'from icecream import ic\n'), ((426, 431), 'icecream.ic', 'ic', (['n'], {}), '(n)\n', (428, 431), False, 'from icecream import ic\n'), ((440, 456), 'numpy.empty_like', 'np.empty_like', (['a'], {}), '(a)\n', (453, 456), True, 'import numpy as np\n'), ((512, 517), 'icecream.ic', 'ic', (['p'], {}), '(p)\n', (514, 517), False, 'from icecream import ic\n'), ((555, 604), 'numpy.random.choice', 'np.random.choice', (['a'], {'size': 'size', 'replace': '(True)', 'p': 'p'}), '(a, size=size, replace=True, p=p)\n', (571, 604), True, 'import numpy as np\n'), ((609, 617), 'icecream.ic', 'ic', (['samp'], {}), '(samp)\n', (611, 617), False, 'from icecream import ic\n'), ((108, 121), 'numpy.ones', 'np.ones', (['size'], {}), '(size)\n', (115, 121), True, 'import numpy as np\n'), ((306, 323), 'numpy.where', 'np.where', (['(a == -1)'], {}), '(a == -1)\n', (314, 323), True, 'import numpy as np\n'), ((492, 507), 'numpy.sum', 'np.sum', (['p[:idx]'], {}), '(p[:idx])\n', (498, 507), True, 'import numpy as np\n'), ((529, 538), 'numpy.sum', 'np.sum', (['p'], {}), '(p)\n', (535, 538), True, 'import numpy as np\n')]
|
#!/usr/bin/python3
#-*- coding: utf-8 -*-
# coding: utf-8
# pylint: disable=C0103,C0111,W0621
#
# Freebox API SDK / Docs: http://dev.freebox.fr/sdk/os/login/
# version 8
#
from __future__ import print_function
from __future__ import unicode_literals
import os
import subprocess
import sys
# # To install the latest version of Unidecode from the Python package index, use
# # these commands:
# # $ pip install unidecode
# from unidecode import unidecode
import application_config as app_cfg
import export.application_infos
import export.connection
import export.lan
import export.storage
import export.switch
import export.system
import export.wifi
import freebox.api as freebox_api
# ##############################################################################
# ##############################################################################
import logging
FORMAT = "[%(levelname)6s][%(filename)s +%(lineno)s - %(funcName)20s() ] %(message)s"
logging.basicConfig(format=FORMAT)
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
# ##############################################################################
# ##############################################################################
# APPLICATION_VERSION = "1.0.0 2021/03/22"
# Get application version from Git description
APPLICATION_VERSION = "no_description"
try:
APPLICATION_VERSION = subprocess.check_output(
["git", "describe", "--long", "--tags", "--always", "--dirty"],
cwd = os.path.dirname(os.path.realpath(__file__))
).strip().decode('utf-8')
except:
APPLICATION_VERSION = "(git describe error)"
# ##############################################################################
# ##############################################################################
def get_creation_date(file):
stat = os.stat(file)
return stat.st_mtime
# ##############################################################################
# ##############################################################################
def do_checkRegisterStatus():
if not freebox_api.isRegistered():
print("Status: invalid config, auth not done.")
print("Please run `%s --register` to register app." % sys.argv[0])
return False
else:
print("Status: auth already done")
return True
# ##############################################################################
# ##############################################################################
def do_export():
# Set the measurement name's prefix
export._generic.setMeasurementName(app_cfg.measurement_namePrefix())
# Set tags common to all metrics
lCommonTagsDict = {
'host' : app_cfg.freebox_hostname()
}
export._generic.setTagsCommon_dict(lCommonTagsDict)
# Fetch session_token
freebox_api.session_open(
app_cfg.app_id()
)
# --------------------------------------------------------------------------
# Export
# --------------------------------------------------------------------------
if app_cfg.export_all():
export.application_infos.all(__file__, APPLICATION_VERSION)
export.connection.all()
export.lan.config()
export.lan.interfaces()
export.lan.interfaces_hosts()
export.switch.ports_stats()
export.switch.status()
export.system.all()
export.storage.disk()
export.wifi.accessPoints_stations()
else:
if app_cfg.export_application_infos():
export.application_infos.all(__file__, APPLICATION_VERSION)
if app_cfg.export_connection():
export.connection.all()
if app_cfg.export_lan_config():
export.lan.config()
if app_cfg.export_lan_interfaces():
export.lan.interfaces()
if app_cfg.export_lan_interfaces_hosts():
export.lan.interfaces_hosts()
if app_cfg.export_switch_ports_stats():
export.switch.ports_stats()
if app_cfg.export_switch_status():
export.switch.status()
if app_cfg.export_system():
export.system.all()
if app_cfg.export_storage_disk():
export.storage.disk()
if app_cfg.export_wifi_usage():
export.wifi.accessPoints_stations()
# ##############################################################################
# ##############################################################################
# Main
def main():
#
# Initialize the application
#
# Read the configuration from env and command line.
app_cfg.init()
app_cfg.parse_args()
# Initialize the module used to interact with the Freebox API.
freebox_api.init(
pFreeboxHostname = app_cfg.freebox_hostname(),
pAppId = app_cfg.app_id(),
pAppName = app_cfg.app_name(),
pDeviceName = app_cfg.device_name()
)
#
# Execute actions depending on command-line flags
#
if app_cfg.application_register():
# Register the application with the Freebox
freebox_api.login_registerApplication(
app_cfg.app_id(),
app_cfg.app_name(),
APPLICATION_VERSION,
app_cfg.device_name()
)
elif app_cfg.application_registerStatus():
# Check the application registration status with the Freebox
if do_checkRegisterStatus() is True:
return 0
else:
return 1
else:
# Check the application registration status with the Freebox
if not freebox_api.isRegistered():
return 1
else:
# Export metrics
do_export()
return 0
# ##############################################################################
# ##############################################################################
if __name__ == '__main__':
exit( main() )
# log.info ("Application name: %s" % app_cfg.app_name() )
# ##############################################################################
# ##############################################################################
|
[
"application_config.measurement_namePrefix",
"application_config.export_switch_ports_stats",
"application_config.export_storage_disk",
"application_config.app_id",
"application_config.export_switch_status",
"application_config.export_wifi_usage",
"application_config.device_name",
"application_config.parse_args",
"os.stat",
"application_config.export_application_infos",
"os.path.realpath",
"application_config.export_lan_config",
"application_config.export_system",
"application_config.init",
"application_config.export_lan_interfaces",
"logging.basicConfig",
"application_config.export_all",
"freebox.api.isRegistered",
"application_config.export_lan_interfaces_hosts",
"application_config.freebox_hostname",
"application_config.application_register",
"application_config.app_name",
"application_config.application_registerStatus",
"application_config.export_connection",
"logging.getLogger"
] |
[((956, 990), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': 'FORMAT'}), '(format=FORMAT)\n', (975, 990), False, 'import logging\n'), ((998, 1025), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1015, 1025), False, 'import logging\n'), ((1839, 1852), 'os.stat', 'os.stat', (['file'], {}), '(file)\n', (1846, 1852), False, 'import os\n'), ((3080, 3100), 'application_config.export_all', 'app_cfg.export_all', ([], {}), '()\n', (3098, 3100), True, 'import application_config as app_cfg\n'), ((4620, 4634), 'application_config.init', 'app_cfg.init', ([], {}), '()\n', (4632, 4634), True, 'import application_config as app_cfg\n'), ((4639, 4659), 'application_config.parse_args', 'app_cfg.parse_args', ([], {}), '()\n', (4657, 4659), True, 'import application_config as app_cfg\n'), ((5017, 5047), 'application_config.application_register', 'app_cfg.application_register', ([], {}), '()\n', (5045, 5047), True, 'import application_config as app_cfg\n'), ((2083, 2109), 'freebox.api.isRegistered', 'freebox_api.isRegistered', ([], {}), '()\n', (2107, 2109), True, 'import freebox.api as freebox_api\n'), ((2597, 2629), 'application_config.measurement_namePrefix', 'app_cfg.measurement_namePrefix', ([], {}), '()\n', (2627, 2629), True, 'import application_config as app_cfg\n'), ((2715, 2741), 'application_config.freebox_hostname', 'app_cfg.freebox_hostname', ([], {}), '()\n', (2739, 2741), True, 'import application_config as app_cfg\n'), ((2870, 2886), 'application_config.app_id', 'app_cfg.app_id', ([], {}), '()\n', (2884, 2886), True, 'import application_config as app_cfg\n'), ((3491, 3525), 'application_config.export_application_infos', 'app_cfg.export_application_infos', ([], {}), '()\n', (3523, 3525), True, 'import application_config as app_cfg\n'), ((3612, 3639), 'application_config.export_connection', 'app_cfg.export_connection', ([], {}), '()\n', (3637, 3639), True, 'import application_config as app_cfg\n'), ((3690, 3717), 'application_config.export_lan_config', 'app_cfg.export_lan_config', ([], {}), '()\n', (3715, 3717), True, 'import application_config as app_cfg\n'), ((3763, 3794), 'application_config.export_lan_interfaces', 'app_cfg.export_lan_interfaces', ([], {}), '()\n', (3792, 3794), True, 'import application_config as app_cfg\n'), ((3844, 3881), 'application_config.export_lan_interfaces_hosts', 'app_cfg.export_lan_interfaces_hosts', ([], {}), '()\n', (3879, 3881), True, 'import application_config as app_cfg\n'), ((3937, 3972), 'application_config.export_switch_ports_stats', 'app_cfg.export_switch_ports_stats', ([], {}), '()\n', (3970, 3972), True, 'import application_config as app_cfg\n'), ((4026, 4056), 'application_config.export_switch_status', 'app_cfg.export_switch_status', ([], {}), '()\n', (4054, 4056), True, 'import application_config as app_cfg\n'), ((4105, 4128), 'application_config.export_system', 'app_cfg.export_system', ([], {}), '()\n', (4126, 4128), True, 'import application_config as app_cfg\n'), ((4174, 4203), 'application_config.export_storage_disk', 'app_cfg.export_storage_disk', ([], {}), '()\n', (4201, 4203), True, 'import application_config as app_cfg\n'), ((4251, 4278), 'application_config.export_wifi_usage', 'app_cfg.export_wifi_usage', ([], {}), '()\n', (4276, 4278), True, 'import application_config as app_cfg\n'), ((5297, 5333), 'application_config.application_registerStatus', 'app_cfg.application_registerStatus', ([], {}), '()\n', (5331, 5333), True, 'import application_config as app_cfg\n'), ((4782, 4808), 'application_config.freebox_hostname', 'app_cfg.freebox_hostname', ([], {}), '()\n', (4806, 4808), True, 'import application_config as app_cfg\n'), ((4830, 4846), 'application_config.app_id', 'app_cfg.app_id', ([], {}), '()\n', (4844, 4846), True, 'import application_config as app_cfg\n'), ((4870, 4888), 'application_config.app_name', 'app_cfg.app_name', ([], {}), '()\n', (4886, 4888), True, 'import application_config as app_cfg\n'), ((4912, 4933), 'application_config.device_name', 'app_cfg.device_name', ([], {}), '()\n', (4931, 4933), True, 'import application_config as app_cfg\n'), ((5160, 5176), 'application_config.app_id', 'app_cfg.app_id', ([], {}), '()\n', (5174, 5176), True, 'import application_config as app_cfg\n'), ((5190, 5208), 'application_config.app_name', 'app_cfg.app_name', ([], {}), '()\n', (5206, 5208), True, 'import application_config as app_cfg\n'), ((5255, 5276), 'application_config.device_name', 'app_cfg.device_name', ([], {}), '()\n', (5274, 5276), True, 'import application_config as app_cfg\n'), ((5600, 5626), 'freebox.api.isRegistered', 'freebox_api.isRegistered', ([], {}), '()\n', (5624, 5626), True, 'import freebox.api as freebox_api\n'), ((1516, 1542), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1532, 1542), False, 'import os\n')]
|
import logging
from time import sleep
import telegram
from telegram.ext import Updater, CommandHandler
from settings import *
class DailyBot:
def __init__(self, token):
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
level=logging.INFO,
)
self.logger = logging.getLogger("LOG")
self.logger.info("Starting BOT.")
self.updater = Updater(token)
self.dispatcher = self.updater.dispatcher
self.job = self.updater.job_queue
self.job_daily = self.job.run_daily(self.send_daily, time=DAILY_TIME, days=(0, 1, 2, 3, 4))
start_handler = CommandHandler("start", self.send_start)
self.dispatcher.add_handler(start_handler)
example_handler = CommandHandler("example", self.send_example)
self.dispatcher.add_handler(example_handler)
daily_handler = CommandHandler("daily", self.send_daily)
self.dispatcher.add_handler(daily_handler)
self.dispatcher.add_error_handler(self.error)
@staticmethod
def send_type_action(chatbot, update):
"""
Shows status typing when sending message
"""
chatbot.send_chat_action(
chat_id=update.message.chat_id, action=telegram.ChatAction.TYPING
)
sleep(1)
def send_start(self, chatbot, update):
"""
Start command to receive /start message on Telegram.
@BOT = information about the BOT
@update = the user info.
"""
self.logger.info("Start command received.")
self.logger.info(f"{update}")
self.send_type_action(chatbot, update)
chat_id = update.message["chat"]["id"]
if update.message["chat"]["type"] == "private":
name = update.message["chat"]["first_name"]
else:
name = update.message["from_user"]["first_name"]
with open("msg/start.md") as start_file:
try:
start_text = start_file.read()
start_text = start_text.replace("{{name}}", name)
chatbot.send_message(
chat_id=chat_id,
text=start_text,
parse_mode=telegram.ParseMode.MARKDOWN,
)
except Exception as error:
self.logger.error(error)
try:
chat_ids = [int(i) for i in chat_ids]
if chat_id not in chat_ids:
with open("msg/error.md") as error:
error = error.read()
chatbot.send_message(
chat_id=chat_id,
text=error,
parse_mode=telegram.ParseMode.MARKDOWN,
)
except Exception as error:
self.logger.error(error)
return 0
def send_daily(self, chatbot, job):
"""
Sends text on `daily.md` daily to groups on CHAT_ID
@BOT = information about the BOT
@update = the user info.
"""
chat_ids = [int(i) for i in chat_ids]
for chat_id in chat_ids:
self.logger.info(f"Sending daily to {chat_id}")
with open("msg/daily.md") as daily_file:
daily_text = daily_file.read()
chatbot.send_message(
chat_id=chat_id,
text=daily_text,
parse_mode=telegram.ParseMode.MARKDOWN,
)
return 0
def send_example(self, chatbot, update):
"""
Sends example to caller
@chatbot = information about the BOT
@update = the user info.
"""
self.send_type_action(chatbot, update)
self.logger.info("Example command received.")
with open("msg/example.md") as example_file:
example_text = example_file.read()
print(example_text)
chatbot.send_message(
chat_id=update.message.chat_id,
text=example_text,
parse_mode=telegram.ParseMode.MARKDOWN,
)
return 0
def text_message(self, chatbot, update):
self.send_type_action(chatbot, update)
chatbot.send_message(
chat_id=update.message.chat_id,
text="ok",
parse_mode=telegram.ParseMode.MARKDOWN,
)
return 0
def error(self, chatbot, update, error):
self.logger.warning(f'Update "{update}" caused error "{error}"')
return 0
def run(self):
# Start the Bot
self.logger.info("Polling BOT.")
self.updater.start_polling()
# Run the BOT until you press Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT. This should be used most of the time, since
# start_polling() is non-blocking and will stop the BOT gracefully.
self.updater.idle()
return 0
if __name__ == "__main__":
if TOKEN is not None:
if PORT is not None:
BOT = DailyBot(TOKEN)
BOT.updater.start_webhook(
listen="0.0.0.0",
port=int(PORT),
url_path=TOKEN)
if LINK:
BOT.updater.bot.set_webhook(LINK)
else:
BOT.updater.bot.set_webhook(f"https://{NAME}.herokuapp.com/{TOKEN}")
BOT.updater.idle()
else:
# Run on local system once detected that it's not on Heroku nor ngrok
BOT = DailyBot(TOKEN)
BOT.run()
else:
HOUR = int(os.environ.get("HOUR"))
MINUTE = int(os.environ.get("MINUTE"))
print(f"Token {TOKEN}\n"
f"Port {PORT}\n"
f"Name {NAME}\n"
f"Hour {HOUR}\n"
f"Minute {MINUTE}\n")
|
[
"logging.basicConfig",
"time.sleep",
"telegram.ext.Updater",
"telegram.ext.CommandHandler",
"logging.getLogger"
] |
[((186, 293), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""', 'level': 'logging.INFO'}), "(format=\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)\n", (205, 293), False, 'import logging\n'), ((346, 370), 'logging.getLogger', 'logging.getLogger', (['"""LOG"""'], {}), "('LOG')\n", (363, 370), False, 'import logging\n'), ((436, 450), 'telegram.ext.Updater', 'Updater', (['token'], {}), '(token)\n', (443, 450), False, 'from telegram.ext import Updater, CommandHandler\n'), ((670, 710), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""start"""', 'self.send_start'], {}), "('start', self.send_start)\n", (684, 710), False, 'from telegram.ext import Updater, CommandHandler\n'), ((789, 833), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""example"""', 'self.send_example'], {}), "('example', self.send_example)\n", (803, 833), False, 'from telegram.ext import Updater, CommandHandler\n'), ((912, 952), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""daily"""', 'self.send_daily'], {}), "('daily', self.send_daily)\n", (926, 952), False, 'from telegram.ext import Updater, CommandHandler\n'), ((1324, 1332), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (1329, 1332), False, 'from time import sleep\n')]
|
import os
PACKDIR = os.path.abspath(os.path.dirname(__file__))
|
[
"os.path.dirname"
] |
[((36, 61), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (51, 61), False, 'import os\n')]
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from symposion.schedule.cache import db, cache_key, cache_key_user
class Command(BaseCommand):
def delete(self, key):
with db.lock("%s-lock" % key):
db.delete(key)
def handle(self, *args, **options):
if db:
self.delete(cache_key())
for user in User.objects.all():
self.delete(cache_key_user(user))
|
[
"symposion.schedule.cache.db.delete",
"symposion.schedule.cache.cache_key",
"symposion.schedule.cache.db.lock",
"symposion.schedule.cache.cache_key_user",
"django.contrib.auth.models.User.objects.all"
] |
[((240, 264), 'symposion.schedule.cache.db.lock', 'db.lock', (["('%s-lock' % key)"], {}), "('%s-lock' % key)\n", (247, 264), False, 'from symposion.schedule.cache import db, cache_key, cache_key_user\n'), ((278, 292), 'symposion.schedule.cache.db.delete', 'db.delete', (['key'], {}), '(key)\n', (287, 292), False, 'from symposion.schedule.cache import db, cache_key, cache_key_user\n'), ((414, 432), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (430, 432), False, 'from django.contrib.auth.models import User\n'), ((377, 388), 'symposion.schedule.cache.cache_key', 'cache_key', ([], {}), '()\n', (386, 388), False, 'from symposion.schedule.cache import db, cache_key, cache_key_user\n'), ((462, 482), 'symposion.schedule.cache.cache_key_user', 'cache_key_user', (['user'], {}), '(user)\n', (476, 482), False, 'from symposion.schedule.cache import db, cache_key, cache_key_user\n')]
|
import logging
import sys
from PyQt5 import QtWidgets
from .mainwindow import MainWindow
def run():
app = QtWidgets.QApplication(sys.argv)
mw = MainWindow()
try:
mw.openFile(sys.argv[1])
except:
pass
logging.root.setLevel(logging.DEBUG)
app.exec_()
if __name__ == '__main__':
run()
|
[
"PyQt5.QtWidgets.QApplication",
"logging.root.setLevel"
] |
[((114, 146), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (136, 146), False, 'from PyQt5 import QtWidgets\n'), ((240, 276), 'logging.root.setLevel', 'logging.root.setLevel', (['logging.DEBUG'], {}), '(logging.DEBUG)\n', (261, 276), False, 'import logging\n')]
|
import logging
import pytest
import json
import time
from ocs_ci.framework.testlib import scale, E2ETest
from ocs_ci.framework.testlib import skipif_ocs_version
from ocs_ci.ocs import hsbench
from ocs_ci.framework import config
from ocs_ci.ocs.ocp import OCP
from ocs_ci.ocs.bucket_utils import compare_bucket_object_list
from ocs_ci.ocs import scale_noobaa_lib
log = logging.getLogger(__name__)
@pytest.fixture(autouse=True)
def s3bench(request):
s3bench = hsbench.HsBench()
s3bench.create_resource_hsbench()
s3bench.install_hsbench()
def finalizer():
s3bench.cleanup()
request.addfinalizer(finalizer)
return s3bench
@scale
@skipif_ocs_version("<4.9")
class TestScaleBucketReplication(E2ETest):
"""
Test MCG scale bucket replication
"""
MCG_S3_OBJ = 1000
MCG_BUCKET = 50
@pytest.mark.parametrize(
argnames=["bucketclass", "replication_bucketclass"],
argvalues=[
pytest.param(
{
"interface": "OC",
"backingstore_dict": {"aws": [(1, "eu-central-1")]},
},
{"interface": "OC", "backingstore_dict": {"azure": [(1, None)]}},
marks=[pytest.mark.polarion_id("OCS-2721")],
),
pytest.param(
{
"interface": "OC",
"namespace_policy_dict": {
"type": "Single",
"namespacestore_dict": {"aws": [(1, "eu-central-1")]},
},
},
{
"interface": "OC",
"namespace_policy_dict": {
"type": "Single",
"namespacestore_dict": {"azure": [(1, None)]},
},
},
marks=[pytest.mark.polarion_id("OCS-2722")],
),
],
)
def test_scale_unidirectional_bucket_replication(
self,
awscli_pod_session,
mcg_obj,
bucket_factory,
bucketclass,
replication_bucketclass,
s3bench,
wait_time=120,
):
"""
Test unidirectional bucket replication adding objects to:
- Object buckets - backingstore
- Namespace buckets - namespacestore
"""
replication_buckets = bucket_factory(
amount=self.MCG_BUCKET,
bucketclass=replication_bucketclass,
)
endpoints = list()
source_buckets = list()
for bucket in replication_buckets:
replication_policy = ("basic-replication-rule", bucket.name, None)
source_bucket = bucket_factory(
amount=1,
bucketclass=bucketclass,
replication_policy=replication_policy,
)[0]
end_point = (
"http://"
+ mcg_obj.s3_internal_endpoint.split("/")[2].split(":")[0]
+ "/"
+ f"{source_bucket.name}"
)
endpoints.append(end_point)
source_buckets.append(source_bucket)
for endpoint in endpoints:
s3bench.run_benchmark(
num_obj=self.MCG_S3_OBJ,
timeout=7200,
access_key=mcg_obj.access_key_id,
secret_key=mcg_obj.access_key,
end_point=endpoint,
run_mode="pg",
)
time.sleep(wait_time)
# Restart Noobaa-core pod
scale_noobaa_lib.noobaa_running_node_restart(pod_name="noobaa-db")
# Verify bucket replication
for i in range(len(replication_buckets)):
compare_bucket_object_list(
mcg_obj, replication_buckets[i].name, source_buckets[i].name
)
@pytest.mark.parametrize(
argnames=["first_bucketclass", "second_bucketclass"],
argvalues=[
pytest.param(
{
"interface": "OC",
"backingstore_dict": {"aws": [(1, "eu-central-1")]},
},
{"interface": "OC", "backingstore_dict": {"azure": [(1, None)]}},
marks=[pytest.mark.polarion_id("OCS-2723")],
),
],
)
def test_scale_bidirectional_bucket_replication(
self,
awscli_pod_session,
mcg_obj,
bucket_factory,
first_bucketclass,
second_bucketclass,
test_directory_setup,
s3bench,
wait_time=120,
):
"""
Test bidirectional bucket replication.
"""
first_buckets = bucket_factory(
amount=self.MCG_BUCKET, bucketclass=first_bucketclass
)
endpoints = list()
second_buckets = list()
for bucket in first_buckets:
replication_policy = ("basic-replication-rule", bucket.name, None)
second_bucket = bucket_factory(
1,
bucketclass=second_bucketclass,
replication_policy=replication_policy,
)[0]
replication_policy_patch_dict = {
"spec": {
"additionalConfig": {
"replicationPolicy": json.dumps(
[
{
"rule_id": "basic-replication-rule-2",
"destination_bucket": second_bucket.name,
}
]
)
}
}
}
OCP(
kind="obc",
namespace=config.ENV_DATA["cluster_namespace"],
resource_name=bucket.name,
).patch(
params=json.dumps(replication_policy_patch_dict), format_type="merge"
)
first_end_point = (
"http://"
+ mcg_obj.s3_internal_endpoint.split("/")[2].split(":")[0]
+ "/"
+ f"{bucket.name}"
)
second_end_point = (
"http://"
+ mcg_obj.s3_internal_endpoint.split("/")[2].split(":")[0]
+ "/"
+ f"{second_bucket.name}"
)
endpoints.append(first_end_point)
endpoints.append(second_end_point)
second_buckets.append(second_bucket)
# Write objects to the buckets
for endpoint in endpoints:
s3bench.run_benchmark(
num_obj=self.MCG_S3_OBJ,
timeout=7200,
access_key=mcg_obj.access_key_id,
secret_key=mcg_obj.access_key,
end_point=endpoint,
run_mode="pg",
)
time.sleep(wait_time)
# Restart Noobaa-db pod
scale_noobaa_lib.noobaa_running_node_restart(pod_name="noobaa-db")
# Verify bucket replication
for i in range(len(first_buckets)):
compare_bucket_object_list(
mcg_obj, first_buckets[i].name, second_buckets[i].name
)
|
[
"ocs_ci.framework.testlib.skipif_ocs_version",
"pytest.fixture",
"ocs_ci.ocs.scale_noobaa_lib.noobaa_running_node_restart",
"ocs_ci.ocs.ocp.OCP",
"time.sleep",
"ocs_ci.ocs.hsbench.HsBench",
"json.dumps",
"pytest.mark.polarion_id",
"ocs_ci.ocs.bucket_utils.compare_bucket_object_list",
"logging.getLogger"
] |
[((370, 397), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (387, 397), False, 'import logging\n'), ((401, 429), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (415, 429), False, 'import pytest\n'), ((666, 692), 'ocs_ci.framework.testlib.skipif_ocs_version', 'skipif_ocs_version', (['"""<4.9"""'], {}), "('<4.9')\n", (684, 692), False, 'from ocs_ci.framework.testlib import skipif_ocs_version\n'), ((466, 483), 'ocs_ci.ocs.hsbench.HsBench', 'hsbench.HsBench', ([], {}), '()\n', (481, 483), False, 'from ocs_ci.ocs import hsbench\n'), ((3472, 3493), 'time.sleep', 'time.sleep', (['wait_time'], {}), '(wait_time)\n', (3482, 3493), False, 'import time\n'), ((3536, 3602), 'ocs_ci.ocs.scale_noobaa_lib.noobaa_running_node_restart', 'scale_noobaa_lib.noobaa_running_node_restart', ([], {'pod_name': '"""noobaa-db"""'}), "(pod_name='noobaa-db')\n", (3580, 3602), False, 'from ocs_ci.ocs import scale_noobaa_lib\n'), ((6828, 6849), 'time.sleep', 'time.sleep', (['wait_time'], {}), '(wait_time)\n', (6838, 6849), False, 'import time\n'), ((6890, 6956), 'ocs_ci.ocs.scale_noobaa_lib.noobaa_running_node_restart', 'scale_noobaa_lib.noobaa_running_node_restart', ([], {'pod_name': '"""noobaa-db"""'}), "(pod_name='noobaa-db')\n", (6934, 6956), False, 'from ocs_ci.ocs import scale_noobaa_lib\n'), ((3702, 3794), 'ocs_ci.ocs.bucket_utils.compare_bucket_object_list', 'compare_bucket_object_list', (['mcg_obj', 'replication_buckets[i].name', 'source_buckets[i].name'], {}), '(mcg_obj, replication_buckets[i].name,\n source_buckets[i].name)\n', (3728, 3794), False, 'from ocs_ci.ocs.bucket_utils import compare_bucket_object_list\n'), ((7050, 7137), 'ocs_ci.ocs.bucket_utils.compare_bucket_object_list', 'compare_bucket_object_list', (['mcg_obj', 'first_buckets[i].name', 'second_buckets[i].name'], {}), '(mcg_obj, first_buckets[i].name, second_buckets[i\n ].name)\n', (7076, 7137), False, 'from ocs_ci.ocs.bucket_utils import compare_bucket_object_list\n'), ((5642, 5736), 'ocs_ci.ocs.ocp.OCP', 'OCP', ([], {'kind': '"""obc"""', 'namespace': "config.ENV_DATA['cluster_namespace']", 'resource_name': 'bucket.name'}), "(kind='obc', namespace=config.ENV_DATA['cluster_namespace'],\n resource_name=bucket.name)\n", (5645, 5736), False, 'from ocs_ci.ocs.ocp import OCP\n'), ((5826, 5867), 'json.dumps', 'json.dumps', (['replication_policy_patch_dict'], {}), '(replication_policy_patch_dict)\n', (5836, 5867), False, 'import json\n'), ((5257, 5356), 'json.dumps', 'json.dumps', (["[{'rule_id': 'basic-replication-rule-2', 'destination_bucket':\n second_bucket.name}]"], {}), "([{'rule_id': 'basic-replication-rule-2', 'destination_bucket':\n second_bucket.name}])\n", (5267, 5356), False, 'import json\n'), ((1225, 1260), 'pytest.mark.polarion_id', 'pytest.mark.polarion_id', (['"""OCS-2721"""'], {}), "('OCS-2721')\n", (1248, 1260), False, 'import pytest\n'), ((1853, 1888), 'pytest.mark.polarion_id', 'pytest.mark.polarion_id', (['"""OCS-2722"""'], {}), "('OCS-2722')\n", (1876, 1888), False, 'import pytest\n'), ((4214, 4249), 'pytest.mark.polarion_id', 'pytest.mark.polarion_id', (['"""OCS-2723"""'], {}), "('OCS-2723')\n", (4237, 4249), False, 'import pytest\n')]
|
from urllib.parse import urlparse
from itsdangerous.timed import TimedSerializer, TimestampSigner
from requests import Response
from requests.sessions import Session
from django.contrib.auth import get_user_model
from django.shortcuts import reverse
from django.test import override_settings, TestCase
from django.utils.timezone import now
from ...conf.test import override_dynamic_settings
from .utils import TEST_SSO_SETTINGS
User = get_user_model()
SSO_USER_ID = 1
def create_verify_response(data):
signer = TimedSerializer(TEST_SSO_SETTINGS["sso_private_key"])
return signer.dumps(data)
class ConnectionMock:
def __init__(self, user_data=None):
self.session = Session
self.user_data = user_data
def __enter__(self):
self.origin_post = Session.post
def mocked_post(*args, **kwargs):
mocked_response = Response()
requested_url = args[1]
if "/server/request-token/" == urlparse(requested_url).path:
# token generated for private key settings.SSO_PRIVATE_KEY = 'priv1'
mocked_response._content = (
b'{"request_token": "<KEY>'
b'oF0YGEoIYu37QOajkc"}.<KEY>'
)
elif "/server/verify/" == urlparse(requested_url).path:
user_data = {
"id": SSO_USER_ID,
"username": "jkowalski",
"email": "<EMAIL>",
"first_name": "Jan",
"last_name": "Kowalski",
"is_staff": False,
"is_superuser": False,
"is_active": True,
}
if self.user_data:
user_data.update(self.user_data)
mocked_response._content = create_verify_response(user_data)
mocked_response.status_code = 200
return mocked_response
setattr(self.session, "post", mocked_post)
return self.session
def __exit__(self, type, value, traceback):
setattr(self.session, "post", self.origin_post)
class TimestampSignerMock:
def __init__(self):
self.TimestampSigner = TimestampSigner
def __enter__(self):
self.origin_unsign = TimestampSigner.unsign
def mocked_unsign(*args, **kwargs):
s = args[1]
if b'"username": "jkowalski"' in s:
value = s[: s.index(b"}.") + 1] # {...}
timestamp_to_datetime = now()
return value, timestamp_to_datetime
else:
return self.origin_unsign(*args, **kwargs)
setattr(self.TimestampSigner, "unsign", mocked_unsign)
return self.TimestampSigner
def __exit__(self, type, value, traceback):
setattr(self.TimestampSigner, "unsign", self.origin_unsign)
@override_dynamic_settings(enable_sso=False)
def test_sso_login_view_returns_404_if_sso_is_disabled(db, client):
url_to_external_logging = reverse("simple-sso-login")
assert url_to_external_logging == "/sso/client/"
response = client.get(url_to_external_logging)
assert response.status_code == 404
@override_dynamic_settings(**TEST_SSO_SETTINGS)
def test_sso_login_view_initiates_auth_flow(db, client):
url_to_external_logging = reverse("simple-sso-login")
assert url_to_external_logging == "/sso/client/"
with ConnectionMock():
response = client.get(url_to_external_logging)
assert response.status_code == 302
url_parsed = urlparse(response.url)
assert url_parsed.path == "/server/authorize/"
assert url_parsed.query == (
"token=<KEY>nI96XfxqGkm6b1zFToF0YGEoIYu37QOajkc"
)
@override_dynamic_settings(enable_sso=False)
def test_sso_auth_view_returns_404_if_sso_is_disabled(db, client):
url_to_authenticate = reverse("simple-sso-authenticate")
assert url_to_authenticate == "/sso/client/authenticate/"
response = client.get(url_to_authenticate)
assert response.status_code == 404
@override_dynamic_settings(**TEST_SSO_SETTINGS)
def test_sso_auth_view_creates_new_user(db, client):
url_to_authenticate = reverse("simple-sso-authenticate")
assert url_to_authenticate == "/sso/client/authenticate/"
query = (
"next=%2F&access_token=<KEY>"
"Ka3Q2d1dNR1lVYkhzVThvZU0i.XTeRVQ.3XiIMg0AFcJKDFCekse6s43uNLI"
)
url_to_authenticate += "?" + query
with ConnectionMock():
with TimestampSignerMock():
response = client.get(url_to_authenticate)
assert response.status_code == 302
assert response.url == "/"
user = User.objects.first()
assert user.username == "jkowalski"
@override_dynamic_settings(**TEST_SSO_SETTINGS)
def test_sso_auth_view_authenticates_existing_user(user, client):
user.sso_id = SSO_USER_ID
user.save()
url_to_authenticate = reverse("simple-sso-authenticate")
assert url_to_authenticate == "/sso/client/authenticate/"
query = (
"next=%2F&access_token=<KEY>0TnF"
"Ka3Q2d1dNR1lVYkhzVThvZU0i.XTeRVQ.3XiIMg0AFcJKDFCekse6s43uNLI"
)
url_to_authenticate += "?" + query
with ConnectionMock():
with TimestampSignerMock():
response = client.get(url_to_authenticate)
assert response.status_code == 302
assert response.url == "/"
assert User.objects.count() == 1
@override_dynamic_settings(**TEST_SSO_SETTINGS)
def test_sso_auth_view_updates_existing_user_using_data_from_sso(user, client):
user.sso_id = SSO_USER_ID
user.is_active = False
user.save()
url_to_authenticate = reverse("simple-sso-authenticate")
assert url_to_authenticate == "/sso/client/authenticate/"
query = (
"next=%2F&access_token=<KEY>"
"Ka3Q2d1dNR1lVYkhzVThvZU0i.XTeRVQ.3XiIMg0AFcJKDFCekse6s43uNLI"
)
url_to_authenticate += "?" + query
with ConnectionMock():
with TimestampSignerMock():
client.get(url_to_authenticate)
user.refresh_from_db()
assert user.username == "jkowalski"
assert user.email == "<EMAIL>"
assert user.is_active is True
@override_dynamic_settings(**TEST_SSO_SETTINGS)
def test_sso_auth_view_returns_bad_request_error_for_invalid_user_data(db, client):
url_to_authenticate = reverse("simple-sso-authenticate")
assert url_to_authenticate == "/sso/client/authenticate/"
query = (
"next=%2F&access_token=<KEY>jQwRzV6TmphZDRSaEprbjlMbnR0TnF"
"Ka3Q2d1dNR1lVYkhzVThvZU0i.XTeRVQ.3XiIMg0AFcJKDFCekse6s43uNLI"
)
url_to_authenticate += "?" + query
with ConnectionMock({"email": "invalid"}):
with TimestampSignerMock():
response = client.get(url_to_authenticate)
assert response.status_code == 400
|
[
"itsdangerous.timed.TimedSerializer",
"django.utils.timezone.now",
"django.contrib.auth.get_user_model",
"requests.Response",
"django.shortcuts.reverse",
"urllib.parse.urlparse"
] |
[((439, 455), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (453, 455), False, 'from django.contrib.auth import get_user_model\n'), ((522, 575), 'itsdangerous.timed.TimedSerializer', 'TimedSerializer', (["TEST_SSO_SETTINGS['sso_private_key']"], {}), "(TEST_SSO_SETTINGS['sso_private_key'])\n", (537, 575), False, 'from itsdangerous.timed import TimedSerializer, TimestampSigner\n'), ((3011, 3038), 'django.shortcuts.reverse', 'reverse', (['"""simple-sso-login"""'], {}), "('simple-sso-login')\n", (3018, 3038), False, 'from django.shortcuts import reverse\n'), ((3320, 3347), 'django.shortcuts.reverse', 'reverse', (['"""simple-sso-login"""'], {}), "('simple-sso-login')\n", (3327, 3347), False, 'from django.shortcuts import reverse\n'), ((3542, 3564), 'urllib.parse.urlparse', 'urlparse', (['response.url'], {}), '(response.url)\n', (3550, 3564), False, 'from urllib.parse import urlparse\n'), ((3852, 3886), 'django.shortcuts.reverse', 'reverse', (['"""simple-sso-authenticate"""'], {}), "('simple-sso-authenticate')\n", (3859, 3886), False, 'from django.shortcuts import reverse\n'), ((4165, 4199), 'django.shortcuts.reverse', 'reverse', (['"""simple-sso-authenticate"""'], {}), "('simple-sso-authenticate')\n", (4172, 4199), False, 'from django.shortcuts import reverse\n'), ((4883, 4917), 'django.shortcuts.reverse', 'reverse', (['"""simple-sso-authenticate"""'], {}), "('simple-sso-authenticate')\n", (4890, 4917), False, 'from django.shortcuts import reverse\n'), ((5611, 5645), 'django.shortcuts.reverse', 'reverse', (['"""simple-sso-authenticate"""'], {}), "('simple-sso-authenticate')\n", (5618, 5645), False, 'from django.shortcuts import reverse\n'), ((6283, 6317), 'django.shortcuts.reverse', 'reverse', (['"""simple-sso-authenticate"""'], {}), "('simple-sso-authenticate')\n", (6290, 6317), False, 'from django.shortcuts import reverse\n'), ((875, 885), 'requests.Response', 'Response', ([], {}), '()\n', (883, 885), False, 'from requests import Response\n'), ((2514, 2519), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (2517, 2519), False, 'from django.utils.timezone import now\n'), ((965, 988), 'urllib.parse.urlparse', 'urlparse', (['requested_url'], {}), '(requested_url)\n', (973, 988), False, 'from urllib.parse import urlparse\n'), ((1279, 1302), 'urllib.parse.urlparse', 'urlparse', (['requested_url'], {}), '(requested_url)\n', (1287, 1302), False, 'from urllib.parse import urlparse\n')]
|
# -*- coding:utf-8 -*-
"""
通用Easy Mock操作方法
传入:
1.url -- easy mock路径
2.匹配类型 -- 即要替换的目标值
3.替换值 -- 替换目标的值
输出:
1.查看原url的接口内容
2.替换执行是否成功
具体做法:
"""
import requests
import json
import re
from collections import namedtuple
class EasyMock(object):
def __init__(self,project_url,login_info):
self.project_url = project_url
self.path = self.getProjectInfo().path
self.project_id = self.getProjectInfo().project_id
# 登录相关
# 登录的用户名密码
self.login_info = login_info
self.data_token = self.login()
self.h = {"Authorization": "Bearer " + self.data_token}
self.c = {"easy-mock_token": self.data_token}
def login(self):
login_url = r'http://' + self.path + '/api/u/login'
r = requests.post(login_url, data=self.login_info, verify=False)
data_token = json.loads(r.text)['data']['token']
return data_token
def getProjectInfo(self):
project_info = namedtuple("mockURL", ['path', 'project_id'])
if self.project_url.count(r'http://'):
path = self.project_url.split('/')[2]
project_id = self.project_url.split('/')[-1]
else:
path = self.project_url.split('/')[0]
project_id = self.project_url.split('/')[2]
return project_info(
path=path,
project_id=project_id
)
def getMockContent(self):
project_detail_url = r'http://' + self.path + '/api/mock?project_id=' + self.project_id + '&page_size=2000&page_index=1&keywords='
s = requests.get(project_detail_url, headers=self.h, cookies=self.c)
json_text = json.loads(s.text)['data']['mocks']
return json_text
def getMockURL(self):
url_list = [i["url"] for i in self.getMockContent()]
return url_list
def getMockUrlResponse(self,api_url):
for i in self.getMockContent():
if i.get('url') == api_url:
return i.get('mode')
def getMockUrlContent(self,api_url):
for i in self.getMockContent():
if i.get('url') == api_url:
return i
def queryPatternInMock(self,pattern,api_url):
"""返回查询到的匹配值"""
search_result = re.search(pattern, self.getMockUrlResponse(api_url), re.S)
return search_result
def updateContent(self,pattern,api_url,target):
update_url = r'http://' + self.path + '/api/mock/update'
mock_url_content = self.getMockUrlContent(api_url)
search_result = self.queryPatternInMock(pattern,api_url)
if search_result:
replace_content = self.getMockUrlResponse(api_url).replace(search_result.group(1), target)
else:
# ("没有找到要替换内容,准备插入新内容:%s")%target)
function_pattern = r"function\(.*?\).*?{"
# Mock数据中存在条件筛选数据才会允许插入
if self.queryPatternInMock(function_pattern,api_url):
search_result = re.search(function_pattern, self.getMockUrlResponse(api_url), re.S)
target = search_result.group(0) + target
replace_content = self.getMockUrlResponse(api_url).replace(search_result.group(0), target)
else:
raise Exception("没有找到要替换内容,请手动插入到EasyMock中~")
update_data = {"url": api_url, "description": mock_url_content.get('description'),
"id": mock_url_content.get('_id'), "method": mock_url_content.get('method'),
"mode": replace_content}
# print(update_data)
# print(replace_content)
resp = requests.post(url=update_url, data=update_data, headers=self.h, cookies=self.c)
print("已更新Easy Mock数据成功!%s"%resp)
if __name__ == "__main__":
project_url = 'http://10.201.7.226:7300/project/5d0882ce0d79ef1a4f9480e4'
api = '/loanDept2'
target = """if (_req.body.suid === 'u_7wewr1') {
return {"suid": "u_7wewr1",
"product_code": "FUDAI",
"zhitou_user": false,
"details": [{"code": "auth_name", "status": "2", "channel": "", "value": "Easy Mock", "date_time": 1577940461000},
{"code": "auth_enhance", "status": "2", "channel": "", "value": "", "date_time": 1577940461000},
{"code": "auth_credit", "status": "2", "channel": "01", "value": "5000000","date_time": 1577940461000},
{"code": "money", "status": 0, "channel": "", "value": "", "date_time": ""},
{"code": "auth_credit_fail", "status": 0, "channel": "", "value": "", "date_time": ""}]}}"""
# target = """if (_req.body.suid === 'u_7wewr1') { return {"没有找到该用户信息"} }"""
pattern = r"({}.*?)(if|else)".format("if \(_req.body.suid === \'u_7wewr1\'\)")
# 登录的用户名密码
login_info = {
'name': 'caodashan',
'password': '<PASSWORD>'
}
A = EasyMock(project_url,login_info)
print(A.getMockUrlResponse(api))
print(A.getMockURL())
# print(A.getMockUrlContent(api))
# print(A.queryPatternInMock(pattern,api).group(1))
# A.updateContent(pattern,api,target)
|
[
"requests.post",
"collections.namedtuple",
"requests.get",
"json.loads"
] |
[((848, 908), 'requests.post', 'requests.post', (['login_url'], {'data': 'self.login_info', 'verify': '(False)'}), '(login_url, data=self.login_info, verify=False)\n', (861, 908), False, 'import requests\n'), ((1051, 1096), 'collections.namedtuple', 'namedtuple', (['"""mockURL"""', "['path', 'project_id']"], {}), "('mockURL', ['path', 'project_id'])\n", (1061, 1096), False, 'from collections import namedtuple\n'), ((1667, 1731), 'requests.get', 'requests.get', (['project_detail_url'], {'headers': 'self.h', 'cookies': 'self.c'}), '(project_detail_url, headers=self.h, cookies=self.c)\n', (1679, 1731), False, 'import requests\n'), ((3714, 3793), 'requests.post', 'requests.post', ([], {'url': 'update_url', 'data': 'update_data', 'headers': 'self.h', 'cookies': 'self.c'}), '(url=update_url, data=update_data, headers=self.h, cookies=self.c)\n', (3727, 3793), False, 'import requests\n'), ((931, 949), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (941, 949), False, 'import json\n'), ((1753, 1771), 'json.loads', 'json.loads', (['s.text'], {}), '(s.text)\n', (1763, 1771), False, 'import json\n')]
|
"""
sentry.filters.base
~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django.conf import settings as django_settings
from django.utils.datastructures import SortedDict
from sentry.conf import settings
from sentry.models import Event
from .base import Filter, GroupFilter
__all__ = ('StatusFilter', 'LoggerFilter', 'ServerNameFilter', 'SiteFilter',
'LevelFilter')
class StatusFilter(GroupFilter):
label = 'Status'
column = 'status'
default = '0'
def get_choices(self):
return SortedDict([
(0, 'Unresolved'),
(1, 'Resolved'),
])
class LoggerFilter(Filter):
label = 'Logger'
column = 'logger'
class ServerNameFilter(Filter):
label = 'Server Name'
column = 'server_name'
def get_query_set(self, queryset):
if queryset.model == Event:
return queryset.filter(server_name=self.get_value()).distinct()
else:
return queryset.filter(event_set__server_name=self.get_value()).distinct()
class LevelFilter(Filter):
label = 'Level'
column = 'level'
def get_choices(self):
return SortedDict((str(k), v) for k, v in settings.LOG_LEVELS)
def get_query_set(self, queryset):
return queryset.filter(level=self.get_value())
class SiteFilter(Filter):
label = 'Site'
column = 'site'
def process(self, data):
if 'site' in data:
return data
if settings.SITE is None:
if 'django.contrib.sites' in django_settings.INSTALLED_APPS:
from django.contrib.sites.models import Site
try:
settings.SITE = Site.objects.get_current().name
except Site.DoesNotExist:
settings.SITE = ''
else:
settings.SITE = ''
if settings.SITE:
data['site'] = settings.SITE
return data
def get_query_set(self, queryset):
if queryset.model == Event:
return queryset.filter(site=self.get_value()).distinct()
else:
return queryset.filter(event_set__site=self.get_value()).distinct()
|
[
"django.utils.datastructures.SortedDict",
"django.contrib.sites.models.Site.objects.get_current"
] |
[((614, 662), 'django.utils.datastructures.SortedDict', 'SortedDict', (["[(0, 'Unresolved'), (1, 'Resolved')]"], {}), "([(0, 'Unresolved'), (1, 'Resolved')])\n", (624, 662), False, 'from django.utils.datastructures import SortedDict\n'), ((1748, 1774), 'django.contrib.sites.models.Site.objects.get_current', 'Site.objects.get_current', ([], {}), '()\n', (1772, 1774), False, 'from django.contrib.sites.models import Site\n')]
|
from django.contrib import admin
from gameon.users import models
admin.site.register(models.Profile)
|
[
"django.contrib.admin.site.register"
] |
[((67, 102), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Profile'], {}), '(models.Profile)\n', (86, 102), False, 'from django.contrib import admin\n')]
|
from __future__ import annotations
import asyncio
import weakref
from types import TracebackType
from typing import Any, Awaitable, Callable, Optional
from ..config import Config
from ..typing import ASGIFramework, ASGIReceiveCallable, ASGIReceiveEvent, ASGISendEvent, Scope
from ..utils import invoke_asgi
async def _handle(
app: ASGIFramework,
config: Config,
scope: Scope,
receive: ASGIReceiveCallable,
send: Callable[[Optional[ASGISendEvent]], Awaitable[None]],
) -> None:
try:
await invoke_asgi(app, scope, receive, send)
except asyncio.CancelledError:
raise
except Exception:
await config.log.exception("Error in ASGI Framework")
finally:
await send(None)
class TaskGroup:
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
self._tasks: weakref.WeakSet = weakref.WeakSet()
self._exiting = False
async def spawn_app(
self,
app: ASGIFramework,
config: Config,
scope: Scope,
send: Callable[[Optional[ASGISendEvent]], Awaitable[None]],
) -> Callable[[ASGIReceiveEvent], Awaitable[None]]:
app_queue: asyncio.Queue[ASGIReceiveEvent] = asyncio.Queue(config.max_app_queue_size)
self.spawn(_handle, app, config, scope, app_queue.get, send)
return app_queue.put
def spawn(self, func: Callable, *args: Any) -> None:
if self._exiting:
raise RuntimeError("Spawning whilst exiting")
self._tasks.add(self._loop.create_task(func(*args)))
async def __aenter__(self) -> "TaskGroup":
return self
async def __aexit__(self, exc_type: type, exc_value: BaseException, tb: TracebackType) -> None:
self._exiting = True
if exc_type is not None:
self._cancel_tasks()
try:
task = asyncio.gather(*self._tasks)
await task
finally:
task.cancel()
try:
await task
except asyncio.CancelledError:
pass
def _cancel_tasks(self) -> None:
for task in self._tasks:
task.cancel()
|
[
"asyncio.gather",
"asyncio.Queue",
"weakref.WeakSet"
] |
[((883, 900), 'weakref.WeakSet', 'weakref.WeakSet', ([], {}), '()\n', (898, 900), False, 'import weakref\n'), ((1222, 1262), 'asyncio.Queue', 'asyncio.Queue', (['config.max_app_queue_size'], {}), '(config.max_app_queue_size)\n', (1235, 1262), False, 'import asyncio\n'), ((1861, 1889), 'asyncio.gather', 'asyncio.gather', (['*self._tasks'], {}), '(*self._tasks)\n', (1875, 1889), False, 'import asyncio\n')]
|
import time
import os
import argparse
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from scipy.io import savemat
parser = argparse.ArgumentParser()
parser.add_argument('--tol', type=float, default=1e-3)
parser.add_argument('--adjoint', type=eval, default=False)
parser.add_argument('--niters', type=int, default=1000)
parser.add_argument('--lr', type=float, default=0.01)
parser.add_argument('--gpu', type=int, default=0)
parser.add_argument('--experiment_no', type=int, default=3)
args = parser.parse_args()
if args.adjoint:
from torchdiffeq import odeint_adjoint as odeint
else:
from torchdiffeq import odeint
class ODEfunc(nn.Module):
def __init__(self, dim, nhidden):
super(ODEfunc, self).__init__()
# self.elu = nn.ELU(inplace=False)
self.elu = nn.Tanh()
self.fc1 = nn.Linear(2*dim, nhidden)
self.fc2 = nn.Linear(nhidden, nhidden)
self.fc3 = nn.Linear(nhidden, dim)
self.nfe = 0
def forward(self, t, z):
cutoff = int(len(z)/2)
x = z[:cutoff]
v = z[cutoff:]
into = torch.cat((x, v), dim=1)
self.nfe += 1
out = self.fc1(into)
out = self.elu(out)
out = self.fc2(out)
out = self.elu(out)
out = self.fc3(out)
return torch.cat((v, out))
class ODEBlock(nn.Module):
def __init__(self, odefunc, integration_times):
super(ODEBlock, self).__init__()
self.odefunc = odefunc
self.integration_times = integration_times
def forward(self, x):
out = odeint(self.odefunc, x, self.integration_times, rtol=args.tol, atol=args.tol)
return out
@property
def nfe(self):
return self.odefunc.nfe
@nfe.setter
def nfe(self, value):
self.odefunc.nfe = value
def count_parameters(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad)
if __name__ == '__main__':
device = torch.device('cuda:' + str(args.gpu) if torch.cuda.is_available() else 'cpu')
filename = 'sonode./'+str(args.experiment_no)+'./'
try:
os.makedirs('./'+filename)
except FileExistsError:
pass
torch.random.manual_seed(2021) # Set random seed for repeatability package
data_dim = 1
dim = data_dim
#dim does not equal data_dim for ANODEs where they are augmented with extra zeros
#download data
z0 = torch.tensor(np.load('data/z0.npy')).float().to(device)
z = torch.tensor(np.load('data/z.npy')).float().to(device)
samp_ts = torch.tensor(np.load('data/samp_ts.npy')).float().to(device)
# model
if args.experiment_no == 1:
nhidden = 15
elif args.experiment_no == 2:
nhidden = 20
elif args.experiment_no == 3:
nhidden = 25
else:
nhidden = 20
feature_layers = [ODEBlock(ODEfunc(dim, nhidden), samp_ts)]
model = nn.Sequential(*feature_layers).to(device)
optimizer = optim.Adam(model.parameters(), lr=args.lr)
loss_func = nn.MSELoss()
itr_arr = np.empty(args.niters)
loss_arr = np.empty(args.niters)
nfe_arr = np.empty(args.niters)
time_arr = np.empty(args.niters)
# training
start_time = time.time()
for itr in range(1, args.niters+1):
model[0].nfe = 0
iter_start_time = time.time()
optimizer.zero_grad()
#forward in time and solve ode
pred_z = model(z0).to(device)
# compute loss
loss = loss_func(pred_z, z)
loss.backward()
optimizer.step()
iter_end_time = time.time()
# make arrays
itr_arr[itr-1] = itr
loss_arr[itr-1] = loss
nfe_arr[itr-1] = model[0].nfe
time_arr[itr-1] = iter_end_time-iter_start_time
print('Iter: {}, running MSE: {:.4f}'.format(itr, loss))
end_time = time.time()
print('\n')
print('Training complete after {} iterations.'.format(itr))
loss = loss.detach().numpy()
print('Train MSE = ' +str(loss))
print('NFE = ' +str(model[0].nfe))
print('Total time = '+str(end_time-start_time))
print('No. parameters = '+str(count_parameters(model)))
np.save(filename+'itr_arr.npy', itr_arr)
np.save(filename+'nfe_arr.npy', nfe_arr)
np.save(filename+'loss_arr.npy', loss_arr)
np.save(filename+'time_arr.npy', time_arr)
torch.save(model, filename+'model.pth')
names = []
params = []
params_orig = []
for name,param in model.named_parameters():
names.append(name)
params.append(param.detach().numpy())
params_orig.append(param)
for name,param in model.named_buffers():
names.append(name)
params.append(param.detach().numpy())
nn1 = dict({'Wb':params,'names':names,'mse':loss})
savemat(filename+'model.mat',nn1)
|
[
"torch.nn.MSELoss",
"numpy.save",
"numpy.load",
"argparse.ArgumentParser",
"torch.random.manual_seed",
"os.makedirs",
"torch.nn.Tanh",
"numpy.empty",
"torch.nn.Sequential",
"scipy.io.savemat",
"torch.cat",
"time.time",
"torch.save",
"torchdiffeq.odeint",
"torch.cuda.is_available",
"torch.nn.Linear"
] |
[((160, 185), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (183, 185), False, 'import argparse\n'), ((2234, 2264), 'torch.random.manual_seed', 'torch.random.manual_seed', (['(2021)'], {}), '(2021)\n', (2258, 2264), False, 'import torch\n'), ((3069, 3081), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (3079, 3081), True, 'import torch.nn as nn\n'), ((3101, 3122), 'numpy.empty', 'np.empty', (['args.niters'], {}), '(args.niters)\n', (3109, 3122), True, 'import numpy as np\n'), ((3138, 3159), 'numpy.empty', 'np.empty', (['args.niters'], {}), '(args.niters)\n', (3146, 3159), True, 'import numpy as np\n'), ((3174, 3195), 'numpy.empty', 'np.empty', (['args.niters'], {}), '(args.niters)\n', (3182, 3195), True, 'import numpy as np\n'), ((3211, 3232), 'numpy.empty', 'np.empty', (['args.niters'], {}), '(args.niters)\n', (3219, 3232), True, 'import numpy as np\n'), ((3266, 3277), 'time.time', 'time.time', ([], {}), '()\n', (3275, 3277), False, 'import time\n'), ((3912, 3923), 'time.time', 'time.time', ([], {}), '()\n', (3921, 3923), False, 'import time\n'), ((4234, 4276), 'numpy.save', 'np.save', (["(filename + 'itr_arr.npy')", 'itr_arr'], {}), "(filename + 'itr_arr.npy', itr_arr)\n", (4241, 4276), True, 'import numpy as np\n'), ((4279, 4321), 'numpy.save', 'np.save', (["(filename + 'nfe_arr.npy')", 'nfe_arr'], {}), "(filename + 'nfe_arr.npy', nfe_arr)\n", (4286, 4321), True, 'import numpy as np\n'), ((4324, 4368), 'numpy.save', 'np.save', (["(filename + 'loss_arr.npy')", 'loss_arr'], {}), "(filename + 'loss_arr.npy', loss_arr)\n", (4331, 4368), True, 'import numpy as np\n'), ((4371, 4415), 'numpy.save', 'np.save', (["(filename + 'time_arr.npy')", 'time_arr'], {}), "(filename + 'time_arr.npy', time_arr)\n", (4378, 4415), True, 'import numpy as np\n'), ((4418, 4459), 'torch.save', 'torch.save', (['model', "(filename + 'model.pth')"], {}), "(model, filename + 'model.pth')\n", (4428, 4459), False, 'import torch\n'), ((4867, 4903), 'scipy.io.savemat', 'savemat', (["(filename + 'model.mat')", 'nn1'], {}), "(filename + 'model.mat', nn1)\n", (4874, 4903), False, 'from scipy.io import savemat\n'), ((833, 842), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (840, 842), True, 'import torch.nn as nn\n'), ((862, 889), 'torch.nn.Linear', 'nn.Linear', (['(2 * dim)', 'nhidden'], {}), '(2 * dim, nhidden)\n', (871, 889), True, 'import torch.nn as nn\n'), ((907, 934), 'torch.nn.Linear', 'nn.Linear', (['nhidden', 'nhidden'], {}), '(nhidden, nhidden)\n', (916, 934), True, 'import torch.nn as nn\n'), ((954, 977), 'torch.nn.Linear', 'nn.Linear', (['nhidden', 'dim'], {}), '(nhidden, dim)\n', (963, 977), True, 'import torch.nn as nn\n'), ((1121, 1145), 'torch.cat', 'torch.cat', (['(x, v)'], {'dim': '(1)'}), '((x, v), dim=1)\n', (1130, 1145), False, 'import torch\n'), ((1324, 1343), 'torch.cat', 'torch.cat', (['(v, out)'], {}), '((v, out))\n', (1333, 1343), False, 'import torch\n'), ((1598, 1675), 'torchdiffeq.odeint', 'odeint', (['self.odefunc', 'x', 'self.integration_times'], {'rtol': 'args.tol', 'atol': 'args.tol'}), '(self.odefunc, x, self.integration_times, rtol=args.tol, atol=args.tol)\n', (1604, 1675), False, 'from torchdiffeq import odeint\n'), ((2157, 2185), 'os.makedirs', 'os.makedirs', (["('./' + filename)"], {}), "('./' + filename)\n", (2168, 2185), False, 'import os\n'), ((3370, 3381), 'time.time', 'time.time', ([], {}), '()\n', (3379, 3381), False, 'import time\n'), ((3621, 3632), 'time.time', 'time.time', ([], {}), '()\n', (3630, 3632), False, 'import time\n'), ((2047, 2072), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2070, 2072), False, 'import torch\n'), ((2952, 2982), 'torch.nn.Sequential', 'nn.Sequential', (['*feature_layers'], {}), '(*feature_layers)\n', (2965, 2982), True, 'import torch.nn as nn\n'), ((2479, 2501), 'numpy.load', 'np.load', (['"""data/z0.npy"""'], {}), "('data/z0.npy')\n", (2486, 2501), True, 'import numpy as np\n'), ((2543, 2564), 'numpy.load', 'np.load', (['"""data/z.npy"""'], {}), "('data/z.npy')\n", (2550, 2564), True, 'import numpy as np\n'), ((2612, 2639), 'numpy.load', 'np.load', (['"""data/samp_ts.npy"""'], {}), "('data/samp_ts.npy')\n", (2619, 2639), True, 'import numpy as np\n')]
|
"""Unit tests for module for interacting with octave / MATL."""
import base64
import json
import os
import pytest
import shutil
from bs4 import BeautifulSoup
from datetime import datetime
from matl_online import matl
from matl_online.utils import parse_iso8601, ISO8601_FORMAT
from matl_online.public.models import Release
from .factories import DocumentationLinkFactory as DocLink
TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
class TestSourceCache:
"""Series of tests to check if source code is managed properly."""
def test_no_source_no_install(self, app, tmpdir):
"""The source folder does not exist and we won't create it."""
app.config['MATL_FOLDER'] = tmpdir.strpath
folder = matl.get_matl_folder('18.3.0', install=False)
# In this case, the result should simply be None
assert folder is None
def test_no_source_install(self, app, tmpdir, mocker):
"""The source folder does not exist but we'll fetch the source."""
mock_install = mocker.patch('matl_online.matl.install_matl')
app.config['MATL_FOLDER'] = tmpdir.strpath
version = '0.0.0'
folder = matl.get_matl_folder(version)
expected = os.path.join(tmpdir.strpath, version)
mock_install.assert_called_once_with(version, expected)
assert folder == expected
def test_source_folder_exists(self, app, tmpdir):
"""Source folder exists so simply return it."""
app.config['MATL_FOLDER'] = tmpdir.strpath
# Create the source folder
version = '13.4.0'
versiondir = tmpdir.mkdir(version)
folder = matl.get_matl_folder(version, install=False)
# Make sure that we only return the source folder
assert folder == versiondir.strpath
class TestDocLinks:
"""Ensure that documentation hyperlinks are added appropriately."""
def test_basic_doclink(self, db):
"""Use a straightforward single function name."""
link = DocLink(name='ans')
template = 'This is a doc string for <strong>%s</strong>'
output = matl.add_doc_links(template % link.name)
soup = BeautifulSoup(output, 'html.parser')
assert soup.strong.a['href'] == link.link
assert soup.strong.a.text == link.name
def test_multiple_doclink(self, db):
"""Include two functions in the same docstring."""
links = (DocLink(name='func1'), DocLink(name='func2'))
template = 'This is a doc for <strong>%s</strong>'
docstring = (template % links[0].name) + (template % links[1].name)
output = matl.add_doc_links(docstring)
soup = BeautifulSoup(output, 'html.parser')
strongs = soup.findAll('strong')
assert len(strongs) == len(links)
for k, strong in enumerate(strongs):
assert strong.a['href'] == links[k].link
assert strong.a.text == links[k].name
def test_single_quoted(self, db):
"""Single quoted function names should be ignored."""
double = DocLink(name='double')
links = (DocLink(name='func1'), DocLink(name='func2'))
docstring = ("doc string for <strong>'%s'</strong>, "
'<strong>%s</strong> and <strong>%s</strong>') % \
(double.name, links[0].name, links[1].name)
output = matl.add_doc_links(docstring)
soup = BeautifulSoup(output, 'html.parser')
strongs = soup.findAll('strong')
# Make sure the first one wasn't converted to a link
assert strongs[0].a is None
# Remove it and make sure everything else is golden
strongs = strongs[1:]
assert len(strongs) == len(links)
for k, strong in enumerate(strongs):
assert strong.a['href'] == links[k].link
assert strong.a.text == links[k].name
def test_complex_function(self, db):
"""Test when there is a multi-function example."""
mat2cell = DocLink(name='mat2cell')
ones = DocLink(name='ones')
size = DocLink(name='size')
ndims = DocLink(name='ndims')
expected = [mat2cell, ones, size, size, size, ndims]
ex = 'mat2cell(x, ones(size(x,1),1), size(x,2),...,size(x,ndims(x)))'
docstring = 'Doc for: <strong>%s</strong>' % ex
output = matl.add_doc_links(docstring)
soup = BeautifulSoup(output, 'html.parser')
assert len(soup.findAll('strong')) == 1
links = soup.strong.findAll('a')
assert len(links) == len(expected)
for k, link in enumerate(links):
assert link.text == expected[k].name
assert link['href'] == expected[k].link
class TestResults:
"""Series of tests to ensure proper MATL output parsing."""
def test_error_parsing(self):
"""All errors are correctly classified."""
msg = 'single error'
result = matl.parse_matl_results('[STDERR]' + msg)
assert isinstance(result, list)
assert len(result) == 1
assert result[0]['type'] == 'stderr'
assert result[0]['value'] == msg
def test_invalid_image_parsing(self):
"""Test with a bad filename and ensure no result."""
filename = '/ignore/this/filename.png'
result = matl.parse_matl_results('[IMAGE]' + filename)
assert isinstance(result, list)
assert len(result) == 0
def test_nn_image_parsing(self, tmpdir):
"""Test for nearest-neighbor interpolated image."""
fileobj = tmpdir.join('image.png')
contents = b'hello'
fileobj.write(contents)
# Parse the string
result = matl.parse_matl_results('[IMAGE_NN]' + fileobj.strpath)
assert isinstance(result, list)
assert len(result) == 1
assert result[0]['type'] == 'image_nn'
# Since the file is empty it should just be the header portion
encoded = base64.b64encode(contents).decode()
assert result[0]['value'] == 'data:image/png;base64,' + encoded
# Make sure the file was not removed
assert os.path.isfile(fileobj.strpath)
def test_image_parsing(self, tmpdir):
"""Test valid image result."""
fileobj = tmpdir.join('image.png')
contents = b'hello'
fileobj.write(contents)
# Parse the string
result = matl.parse_matl_results('[IMAGE]' + fileobj.strpath)
assert isinstance(result, list)
assert len(result) == 1
assert result[0]['type'] == 'image'
# Since the file is empty it should just be the header portion
encoded = base64.b64encode(contents).decode()
assert result[0]['value'] == 'data:image/png;base64,' + encoded
# Make sure the file was not removed
assert os.path.isfile(fileobj.strpath)
def test_invalid_audio_parsing(self):
"""Test with a bad filename and ensure no result."""
filename = '/ignore/this/audio.wav'
result = matl.parse_matl_results('[AUDIO]' + filename)
assert isinstance(result, list)
assert len(result) == 0
def test_audio_parsing(self, tmpdir):
"""Test valid audio result."""
fileobj = tmpdir.join('audio.wav')
contents = b'AUDIO'
fileobj.write(contents)
# Parse the string
result = matl.parse_matl_results('[AUDIO]' + fileobj.strpath)
assert isinstance(result, list)
assert len(result) == 1
assert result[0]['type'] == 'audio'
encoded = base64.b64encode(contents).decode()
assert result[0]['value'] == 'data:audio/wav;base64,' + encoded
# Make sure that the file was not removed
assert os.path.isfile(fileobj.strpath)
def test_stdout2_parsing(self):
"""Test potential to have a second type of STDOUT."""
expected = 'ouptut2'
result = matl.parse_matl_results('[STDOUT]' + expected)
assert isinstance(result, list)
assert len(result) == 1
assert result[0]['type'] == 'stdout2'
assert result[0]['value'] == expected
def test_stdout_single_line_parsing(self):
"""A single line of output is handled as STDOUT."""
expected = 'standard output'
result = matl.parse_matl_results(expected)
assert isinstance(result, list)
assert len(result) == 1
assert result[0]['type'] == 'stdout'
assert result[0]['value'] == expected
def test_stdout_multi_line_parsing(self):
"""Multi-line output is also handled as STDOUT if not specified."""
expected = 'standard\noutput'
result = matl.parse_matl_results(expected)
assert isinstance(result, list)
assert len(result) == 1
assert result[0]['type'] == 'stdout'
assert result[0]['value'] == expected
class TestHelpParsing:
"""Series of tests for checking help to JSON conversion."""
def test_generate_help_json(self, tmpdir, mocker, db):
"""Check all reading / parsing of help .mat file."""
folder = mocker.patch('matl_online.matl.get_matl_folder')
folder.return_value = tmpdir.strpath
# Copy the test file into place
shutil.copy(os.path.join(TEST_DATA_DIR, 'help.mat'),
os.path.join(tmpdir.strpath, 'help.mat'))
outfile = matl.help_file('1.2.3')
assert outfile == os.path.join(folder.return_value, 'help.json')
# Now actually check the file
with open(outfile, 'r') as fid:
data = json.load(fid)
assert 'data' in data
assert len(data['data']) == 3
# Make sure it has all the necessary keys
expected = ['source', 'description', 'brief', 'arguments']
expected.sort()
actual = list(data['data'][0].keys())
actual.sort()
assert actual == expected
item = data['data'][0]
# make sure all newlines were removed from description
assert item.get('description').find('\n') == -1
assert item.get('arguments') == ''
assert item.get('source') == '&'
assert item.get('brief') == 'alternative input/output specification'
item = data['data'][1]
assert item.get('description').find('\n') == -1
assert item.get('arguments') == '1--2 (1 / 2); 1'
assert item.get('source') == 'a'
assert item.get('brief') == 'any'
item = data['data'][2]
assert item.get('description') == ' '
assert item.get('arguments') == '0; 1'
assert item.get('source') == 'Y?'
assert item.get('brief') == ''
def test_help_json_exists(self, tmpdir, mocker):
"""Verify correctness of output JSON."""
folder = mocker.patch('matl_online.matl.get_matl_folder')
folder.return_value = tmpdir.strpath
jsonfile = tmpdir.join('help.json')
contents = 'placeholder'
jsonfile.write(contents)
outfile = matl.help_file('1.2.3')
assert outfile == jsonfile.strpath
# Make sure the file wasn't updated
with open(outfile, 'r') as fid:
assert fid.read() == contents
class TestInstall:
"""Tests to check if MATL is properly downloaded and installed."""
def test_valid_version(self, tmpdir, mocker, app):
"""Test using a version which we know to exist on github."""
get = mocker.patch('matl_online.matl.requests.get')
get.return_value.status_code = 200
get.return_value.json = lambda: {'zipball_url': 'zipball'}
content = b'zipball_content'
get.return_value.content = content
zipper = mocker.patch('matl_online.matl.unzip')
matl.install_matl('1.2.3', tmpdir.strpath)
assert zipper.called
assert zipper.call_args[0][0].read() == content
assert zipper.call_args[0][1] == tmpdir.strpath
def test_invalid_version(self, tmpdir, mocker, app):
"""Try to install a version which does NOT exist on github."""
get = mocker.patch('matl_online.matl.requests.get')
get.return_value.status_code = 404
with pytest.raises(KeyError):
matl.install_matl('3.4.5', tmpdir.strpath)
class TestReleaseRefresh:
"""Tests for updating our local release database from github."""
def test_all_new(self, mocker, app, db):
"""Completely populate the database (no previous entries)."""
get = mocker.patch('matl_online.matl.requests.get')
with open(os.path.join(TEST_DATA_DIR, 'releases.json')) as fid:
data = json.load(fid)
get.return_value.json = lambda: data
matl.refresh_releases()
# Now query all releases
releases = Release.query.all()
assert len(releases) == len(data)
for k, release in enumerate(releases):
assert release.tag == data[k]['tag_name']
def test_prerelease(self, mocker, app, db):
"""Ensure that pre-releases are ignored."""
# Change one of the releases to a pre release and hope it's ignored
get = mocker.patch('matl_online.matl.requests.get')
with open(os.path.join(TEST_DATA_DIR, 'releases.json')) as fid:
data = json.load(fid)
data[-1]['prerelease'] = True
get.return_value.json = lambda: data
matl.refresh_releases()
# Query all releases
releases = Release.query.all()
assert len(releases) == len(data) - 1
for k, release in enumerate(releases):
assert release.tag == data[k]['tag_name']
def test_updated_release(self, mocker, app, db):
"""Updated releases should be updated in our database."""
get = mocker.patch('matl_online.matl.requests.get')
with open(os.path.join(TEST_DATA_DIR, 'releases.json')) as fid:
data = json.load(fid)
# Make a release with the first one listed here but set the
# date to be wrong
tag_of_interest = data[0]['tag_name']
Release.create(date=parse_iso8601(data[0]['published_at']),
tag=tag_of_interest)
# Now make the pub date something else
newdate = datetime(2000, 1, 1)
data[0]['published_at'] = newdate.strftime(ISO8601_FORMAT)
get.return_value.json = lambda: data
assert Release.query.count() == 1
matl.refresh_releases()
releases = Release.query.all()
assert len(releases) == len(data)
# Now check to make sure that the release has the updated date
updated = Release.query.filter(Release.tag == tag_of_interest).one()
assert updated.date == newdate
def test_updated_release_with_source(self, mocker, app, db, tmpdir):
"""Updated releases should remove the old source code."""
matl_folder = mocker.patch('matl_online.matl.get_matl_folder')
matl_folder.return_value = tmpdir.strpath
assert os.path.isdir(tmpdir.strpath)
self.test_updated_release(mocker, app, db)
assert not os.path.isdir(tmpdir.strpath)
class TestMATLInterface:
"""Some basic tests to check that the MATL interface is working."""
def test_empty_inputs(self, mocker, app, moctave):
"""If no inputs are provided, MATL shouldn't receive any."""
get_matl_folder = mocker.patch('matl_online.matl.get_matl_folder')
foldername = 'folder'
get_matl_folder.return_value = foldername
matl.matl(moctave, '-ro')
# Make sure we only had eval calls (faster)
assert len(moctave.method_calls) == 0
# Make sure we move to the temp directory at the beginning
assert moctave.evals[0].startswith('cd(')
# Ensure the MATL code gets added to the path
assert moctave.evals[1] == "addpath('%s')" % foldername
# Make sure we cleanup at the end
assert moctave.evals[-1].startswith('cd(')
def test_single_input(self, mocker, app, moctave):
"""Single input parameter should be send to matl_runner."""
get_matl_folder = mocker.patch('matl_online.matl.get_matl_folder')
get_matl_folder.return_value = ''
matl.matl(moctave, '-ro', code='D', inputs='12')
# Find the call to matl_runner
call = [x for x in moctave.evals if x.startswith('matl_runner')]
assert len(call) == 1
assert call[0].rstrip() == "matl_runner('-ro', {'D'}, '12');"
def test_multiple_inputs(self, mocker, app, moctave):
"""Multiple input parameters should be send to matl_runner."""
get_matl_folder = mocker.patch('matl_online.matl.get_matl_folder')
get_matl_folder.return_value = ''
matl.matl(moctave, '-ro', code='D', inputs='12\n13')
# Find the call to matl_runner
call = [x for x in moctave.evals if x.startswith('matl_runner')]
assert len(call) == 1
assert call[0].rstrip() == "matl_runner('-ro', {'D'}, '12','13');"
def test_string_escape(self, mocker, app, moctave):
"""All single quotes need to be escaped properly."""
get_matl_folder = mocker.patch('matl_online.matl.get_matl_folder')
get_matl_folder.return_value = ''
matl.matl(moctave, '-ro', code="'abc'")
# Find the call to matl_runner
call = [x for x in moctave.evals if x.startswith('matl_runner')]
assert len(call) == 1
assert call[0].rstrip() == "matl_runner('-ro', {'''abc'''});"
|
[
"matl_online.public.models.Release.query.count",
"os.path.isfile",
"matl_online.matl.help_file",
"os.path.join",
"os.path.dirname",
"matl_online.matl.get_matl_folder",
"pytest.raises",
"matl_online.matl.refresh_releases",
"matl_online.matl.add_doc_links",
"matl_online.utils.parse_iso8601",
"datetime.datetime",
"matl_online.public.models.Release.query.filter",
"matl_online.public.models.Release.query.all",
"bs4.BeautifulSoup",
"matl_online.matl.matl",
"json.load",
"os.path.isdir",
"matl_online.matl.install_matl",
"base64.b64encode",
"matl_online.matl.parse_matl_results"
] |
[((415, 440), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (430, 440), False, 'import os\n'), ((740, 785), 'matl_online.matl.get_matl_folder', 'matl.get_matl_folder', (['"""18.3.0"""'], {'install': '(False)'}), "('18.3.0', install=False)\n", (760, 785), False, 'from matl_online import matl\n'), ((1174, 1203), 'matl_online.matl.get_matl_folder', 'matl.get_matl_folder', (['version'], {}), '(version)\n', (1194, 1203), False, 'from matl_online import matl\n'), ((1223, 1260), 'os.path.join', 'os.path.join', (['tmpdir.strpath', 'version'], {}), '(tmpdir.strpath, version)\n', (1235, 1260), False, 'import os\n'), ((1645, 1689), 'matl_online.matl.get_matl_folder', 'matl.get_matl_folder', (['version'], {'install': '(False)'}), '(version, install=False)\n', (1665, 1689), False, 'from matl_online import matl\n'), ((2103, 2143), 'matl_online.matl.add_doc_links', 'matl.add_doc_links', (['(template % link.name)'], {}), '(template % link.name)\n', (2121, 2143), False, 'from matl_online import matl\n'), ((2160, 2196), 'bs4.BeautifulSoup', 'BeautifulSoup', (['output', '"""html.parser"""'], {}), "(output, 'html.parser')\n", (2173, 2196), False, 'from bs4 import BeautifulSoup\n'), ((2613, 2642), 'matl_online.matl.add_doc_links', 'matl.add_doc_links', (['docstring'], {}), '(docstring)\n', (2631, 2642), False, 'from matl_online import matl\n'), ((2659, 2695), 'bs4.BeautifulSoup', 'BeautifulSoup', (['output', '"""html.parser"""'], {}), "(output, 'html.parser')\n", (2672, 2695), False, 'from bs4 import BeautifulSoup\n'), ((3343, 3372), 'matl_online.matl.add_doc_links', 'matl.add_doc_links', (['docstring'], {}), '(docstring)\n', (3361, 3372), False, 'from matl_online import matl\n'), ((3389, 3425), 'bs4.BeautifulSoup', 'BeautifulSoup', (['output', '"""html.parser"""'], {}), "(output, 'html.parser')\n", (3402, 3425), False, 'from bs4 import BeautifulSoup\n'), ((4318, 4347), 'matl_online.matl.add_doc_links', 'matl.add_doc_links', (['docstring'], {}), '(docstring)\n', (4336, 4347), False, 'from matl_online import matl\n'), ((4364, 4400), 'bs4.BeautifulSoup', 'BeautifulSoup', (['output', '"""html.parser"""'], {}), "(output, 'html.parser')\n", (4377, 4400), False, 'from bs4 import BeautifulSoup\n'), ((4896, 4937), 'matl_online.matl.parse_matl_results', 'matl.parse_matl_results', (["('[STDERR]' + msg)"], {}), "('[STDERR]' + msg)\n", (4919, 4937), False, 'from matl_online import matl\n'), ((5265, 5310), 'matl_online.matl.parse_matl_results', 'matl.parse_matl_results', (["('[IMAGE]' + filename)"], {}), "('[IMAGE]' + filename)\n", (5288, 5310), False, 'from matl_online import matl\n'), ((5638, 5693), 'matl_online.matl.parse_matl_results', 'matl.parse_matl_results', (["('[IMAGE_NN]' + fileobj.strpath)"], {}), "('[IMAGE_NN]' + fileobj.strpath)\n", (5661, 5693), False, 'from matl_online import matl\n'), ((6073, 6104), 'os.path.isfile', 'os.path.isfile', (['fileobj.strpath'], {}), '(fileobj.strpath)\n', (6087, 6104), False, 'import os\n'), ((6335, 6387), 'matl_online.matl.parse_matl_results', 'matl.parse_matl_results', (["('[IMAGE]' + fileobj.strpath)"], {}), "('[IMAGE]' + fileobj.strpath)\n", (6358, 6387), False, 'from matl_online import matl\n'), ((6764, 6795), 'os.path.isfile', 'os.path.isfile', (['fileobj.strpath'], {}), '(fileobj.strpath)\n', (6778, 6795), False, 'import os\n'), ((6961, 7006), 'matl_online.matl.parse_matl_results', 'matl.parse_matl_results', (["('[AUDIO]' + filename)"], {}), "('[AUDIO]' + filename)\n", (6984, 7006), False, 'from matl_online import matl\n'), ((7310, 7362), 'matl_online.matl.parse_matl_results', 'matl.parse_matl_results', (["('[AUDIO]' + fileobj.strpath)"], {}), "('[AUDIO]' + fileobj.strpath)\n", (7333, 7362), False, 'from matl_online import matl\n'), ((7673, 7704), 'os.path.isfile', 'os.path.isfile', (['fileobj.strpath'], {}), '(fileobj.strpath)\n', (7687, 7704), False, 'import os\n'), ((7850, 7896), 'matl_online.matl.parse_matl_results', 'matl.parse_matl_results', (["('[STDOUT]' + expected)"], {}), "('[STDOUT]' + expected)\n", (7873, 7896), False, 'from matl_online import matl\n'), ((8224, 8257), 'matl_online.matl.parse_matl_results', 'matl.parse_matl_results', (['expected'], {}), '(expected)\n', (8247, 8257), False, 'from matl_online import matl\n'), ((8600, 8633), 'matl_online.matl.parse_matl_results', 'matl.parse_matl_results', (['expected'], {}), '(expected)\n', (8623, 8633), False, 'from matl_online import matl\n'), ((9302, 9325), 'matl_online.matl.help_file', 'matl.help_file', (['"""1.2.3"""'], {}), "('1.2.3')\n", (9316, 9325), False, 'from matl_online import matl\n'), ((10931, 10954), 'matl_online.matl.help_file', 'matl.help_file', (['"""1.2.3"""'], {}), "('1.2.3')\n", (10945, 10954), False, 'from matl_online import matl\n'), ((11659, 11701), 'matl_online.matl.install_matl', 'matl.install_matl', (['"""1.2.3"""', 'tmpdir.strpath'], {}), "('1.2.3', tmpdir.strpath)\n", (11676, 11701), False, 'from matl_online import matl\n'), ((12608, 12631), 'matl_online.matl.refresh_releases', 'matl.refresh_releases', ([], {}), '()\n', (12629, 12631), False, 'from matl_online import matl\n'), ((12685, 12704), 'matl_online.public.models.Release.query.all', 'Release.query.all', ([], {}), '()\n', (12702, 12704), False, 'from matl_online.public.models import Release\n'), ((13294, 13317), 'matl_online.matl.refresh_releases', 'matl.refresh_releases', ([], {}), '()\n', (13315, 13317), False, 'from matl_online import matl\n'), ((13367, 13386), 'matl_online.public.models.Release.query.all', 'Release.query.all', ([], {}), '()\n', (13384, 13386), False, 'from matl_online.public.models import Release\n'), ((14367, 14390), 'matl_online.matl.refresh_releases', 'matl.refresh_releases', ([], {}), '()\n', (14388, 14390), False, 'from matl_online import matl\n'), ((14411, 14430), 'matl_online.public.models.Release.query.all', 'Release.query.all', ([], {}), '()\n', (14428, 14430), False, 'from matl_online.public.models import Release\n'), ((14940, 14969), 'os.path.isdir', 'os.path.isdir', (['tmpdir.strpath'], {}), '(tmpdir.strpath)\n', (14953, 14969), False, 'import os\n'), ((15460, 15485), 'matl_online.matl.matl', 'matl.matl', (['moctave', '"""-ro"""'], {}), "(moctave, '-ro')\n", (15469, 15485), False, 'from matl_online import matl\n'), ((16166, 16214), 'matl_online.matl.matl', 'matl.matl', (['moctave', '"""-ro"""'], {'code': '"""D"""', 'inputs': '"""12"""'}), "(moctave, '-ro', code='D', inputs='12')\n", (16175, 16214), False, 'from matl_online import matl\n'), ((16685, 16737), 'matl_online.matl.matl', 'matl.matl', (['moctave', '"""-ro"""'], {'code': '"""D"""', 'inputs': '"""12\n13"""'}), "(moctave, '-ro', code='D', inputs='12\\n13')\n", (16694, 16737), False, 'from matl_online import matl\n'), ((17201, 17240), 'matl_online.matl.matl', 'matl.matl', (['moctave', '"""-ro"""'], {'code': '"""\'abc\'"""'}), '(moctave, \'-ro\', code="\'abc\'")\n', (17210, 17240), False, 'from matl_online import matl\n'), ((9180, 9219), 'os.path.join', 'os.path.join', (['TEST_DATA_DIR', '"""help.mat"""'], {}), "(TEST_DATA_DIR, 'help.mat')\n", (9192, 9219), False, 'import os\n'), ((9241, 9281), 'os.path.join', 'os.path.join', (['tmpdir.strpath', '"""help.mat"""'], {}), "(tmpdir.strpath, 'help.mat')\n", (9253, 9281), False, 'import os\n'), ((9353, 9399), 'os.path.join', 'os.path.join', (['folder.return_value', '"""help.json"""'], {}), "(folder.return_value, 'help.json')\n", (9365, 9399), False, 'import os\n'), ((9498, 9512), 'json.load', 'json.load', (['fid'], {}), '(fid)\n', (9507, 9512), False, 'import json\n'), ((12090, 12113), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (12103, 12113), False, 'import pytest\n'), ((12127, 12169), 'matl_online.matl.install_matl', 'matl.install_matl', (['"""3.4.5"""', 'tmpdir.strpath'], {}), "('3.4.5', tmpdir.strpath)\n", (12144, 12169), False, 'from matl_online import matl\n'), ((12535, 12549), 'json.load', 'json.load', (['fid'], {}), '(fid)\n', (12544, 12549), False, 'import json\n'), ((13179, 13193), 'json.load', 'json.load', (['fid'], {}), '(fid)\n', (13188, 13193), False, 'import json\n'), ((13808, 13822), 'json.load', 'json.load', (['fid'], {}), '(fid)\n', (13817, 13822), False, 'import json\n'), ((14173, 14193), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)'], {}), '(2000, 1, 1)\n', (14181, 14193), False, 'from datetime import datetime\n'), ((14331, 14352), 'matl_online.public.models.Release.query.count', 'Release.query.count', ([], {}), '()\n', (14350, 14352), False, 'from matl_online.public.models import Release\n'), ((15042, 15071), 'os.path.isdir', 'os.path.isdir', (['tmpdir.strpath'], {}), '(tmpdir.strpath)\n', (15055, 15071), False, 'import os\n'), ((5904, 5930), 'base64.b64encode', 'base64.b64encode', (['contents'], {}), '(contents)\n', (5920, 5930), False, 'import base64\n'), ((6595, 6621), 'base64.b64encode', 'base64.b64encode', (['contents'], {}), '(contents)\n', (6611, 6621), False, 'import base64\n'), ((7499, 7525), 'base64.b64encode', 'base64.b64encode', (['contents'], {}), '(contents)\n', (7515, 7525), False, 'import base64\n'), ((12462, 12506), 'os.path.join', 'os.path.join', (['TEST_DATA_DIR', '"""releases.json"""'], {}), "(TEST_DATA_DIR, 'releases.json')\n", (12474, 12506), False, 'import os\n'), ((13106, 13150), 'os.path.join', 'os.path.join', (['TEST_DATA_DIR', '"""releases.json"""'], {}), "(TEST_DATA_DIR, 'releases.json')\n", (13118, 13150), False, 'import os\n'), ((13735, 13779), 'os.path.join', 'os.path.join', (['TEST_DATA_DIR', '"""releases.json"""'], {}), "(TEST_DATA_DIR, 'releases.json')\n", (13747, 13779), False, 'import os\n'), ((14564, 14616), 'matl_online.public.models.Release.query.filter', 'Release.query.filter', (['(Release.tag == tag_of_interest)'], {}), '(Release.tag == tag_of_interest)\n', (14584, 14616), False, 'from matl_online.public.models import Release\n'), ((14011, 14049), 'matl_online.utils.parse_iso8601', 'parse_iso8601', (["data[0]['published_at']"], {}), "(data[0]['published_at'])\n", (14024, 14049), False, 'from matl_online.utils import parse_iso8601, ISO8601_FORMAT\n')]
|
import os
import pickledb
import requests
from pathlib import Path
try:
os.makedirs(str(Path.home() / '.sussex'))
except(FileExistsError):
pass
db = pickledb.load(str(Path.home() / '.sussex' / '.auth'), False)
def save_session_id(sessid):
db.set('session_id', sessid)
db.dump()
def read_session_id():
if not db.get('session_id'):
save_session_id(get_new_session_id())
else:
return db.get('session_id')
def clear_session_id():
if db.get('session_id'):
db.rem('session_id')
def get_new_session_id():
session = requests.Session()
session.get('https://direct.sussex.ac.uk')
return session.cookies.get_dict()['PHPSESSID']
def save_login(username, password):
db.set('sussex_username', username)
db.set('sussex_password', password)
db.dump()
return True
def verify_login_status():
login()
if make_get('https://direct.sussex.ac.uk/page.php?realm=home').history:
return False
else:
return True
def login():
requests.post('https://direct.sussex.ac.uk/login.php', data = {
'username': db.get('sussex_username'),
'password': db.get('<PASSWORD>'),
'QUERY_STRING': None,
'js_enabled': 0
}, cookies = {
'PHPSESSID': read_session_id()
}, headers={
'Sec-Fetch-Mode': 'navigate',
'Sec-Fetch-Site': 'same-origin',
'Sec-Fetch-User': '?1',
'Origin': 'https://direct.sussex.ac.uk',
'Referer': 'https://direct.sussex.ac.uk/login.php',
'Upgrade-Insecure-Requests': '1'
})
def make_get(url, payload=None):
return requests.get(
url,
data = payload,
cookies = {
'PHPSESSID': read_session_id()
}
)
def make_post(url):
pass
|
[
"requests.Session",
"pathlib.Path.home"
] |
[((572, 590), 'requests.Session', 'requests.Session', ([], {}), '()\n', (588, 590), False, 'import requests\n'), ((93, 104), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (102, 104), False, 'from pathlib import Path\n'), ((177, 188), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (186, 188), False, 'from pathlib import Path\n')]
|
import pbr.version
from sphinx.util import logging
from . import directive, domain
LOG = logging.getLogger(__name__)
__version__ = pbr.version.VersionInfo(
"sphinxcontrib.datatemplates").version_string()
def setup(app):
LOG.info('initializing sphinxcontrib.datatemplates')
app.add_directive('datatemplate', directive.DataTemplateLegacy)
app.add_domain(domain.DataTemplateDomain)
return {
'version': __version__,
'parallel_read_safe': True,
'parallel_write_safe': True,
}
|
[
"sphinx.util.logging.getLogger"
] |
[((91, 118), 'sphinx.util.logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (108, 118), False, 'from sphinx.util import logging\n')]
|
"""
Permits calling arbitrary functions and passing some forms of data from C++
to Python (only one direction) as a server-client pair.
The server in this case is the C++ program, and the client is this binary.
For an example of C++ usage, see `call_python_server_test.cc`.
Here's an example of running with the C++ test program:
cd drake
bazel build //common/proto:call_python_client_cli //common/proto:call_python_server_test # noqa
# Create default pipe file.
rm -f /tmp/python_rpc && mkfifo /tmp/python_rpc
# In Terminal 1, run client.
./bazel-bin/common/proto/call_python_client_cli
# In Terminal 2, run server (or your C++ program).
./bazel-bin/common/proto/call_python_server_test
To use in Jupyter (if you have it installed) without a FIFO file (such that
it's non-blocking):
cd drake
bazel build //common/proto:call_python_client_cli //common/proto:call_python_server_test # noqa
rm -f /tmp/python_rpc # Do not make it FIFO
# In Terminal 1, run server, create output.
./bazel-bin/common/proto/call_python_server_test
# In Terminal 2, run client in notebook.
./bazel-bin/common/proto/call_python_client_cli \
-c jupyter notebook ${PWD}/common/proto/call_python_client_notebook.ipynb # noqa
# Execute: Cell > Run All
Note:
Occasionally, the plotting will not come through on the notebook. I (Eric)
am unsure why.
"""
import argparse
import os
from queue import Queue
import signal
import stat
import sys
from threading import Thread
import time
import traceback
import numpy as np
from drake import lcmt_call_python, lcmt_call_python_data
def _ensure_sigint_handler():
# @ref https://stackoverflow.com/a/47801921/2654527
if signal.getsignal(signal.SIGINT) == signal.SIG_IGN:
signal.signal(signal.SIGINT, signal.default_int_handler)
def _get_required_helpers(scope_locals):
# Provides helpers to keep C++ interface as simple as possible.
# @returns Dictionary containing the helpers needed.
def getitem(obj, index):
"""Global function for `obj[index]`. """
return obj[index]
def setitem(obj, index, value):
"""Global function for `obj[index] = value`. """
obj[index] = value
return obj[index]
def call(obj, *args, **kwargs):
return obj(*args, **kwargs)
def pass_through(value):
"""Pass-through for direct variable access. """
return value
def make_tuple(*args):
"""Create a tuple from an argument list. """
return tuple(args)
def make_list(*args):
"""Create a list from an argument list. """
return list(args)
def make_kwargs(*args):
"""Create a keyword argument object from an argument list. """
assert len(args) % 2 == 0
keys = args[0::2]
values = args[1::2]
kwargs = dict(zip(keys, values))
return _KwArgs(**kwargs)
def _make_slice(expr):
"""Parse a slice object from a string. """
def to_piece(s):
return s and int(s) or None
pieces = list(map(to_piece, expr.split(':')))
if len(pieces) == 1:
return slice(pieces[0], pieces[0] + 1)
else:
return slice(*pieces)
def make_slice_arg(*args):
"""Create a scalar or tuple for accessing objects via slices. """
out = [None] * len(args)
for i, arg in enumerate(args):
if isinstance(arg, str):
out[i] = _make_slice(arg)
else:
out[i] = arg
# Special case: If single index, collapse.
if len(out) == 1:
return out[0]
else:
return tuple(out)
def setvar(var, value):
"""Sets a variable in the client's locals. """
scope_locals[var] = value
def setvars(*args):
"""Sets multiple variables in the client's locals. """
scope_locals.update(make_kwargs(*args))
execution_check = _ExecutionCheck()
out = locals().copy()
# Scrub extra stuff.
del out["scope_locals"]
return out
class _KwArgs(dict):
# Indicates values meant solely for `**kwargs`.
pass
class _ExecutionCheck:
# Allows checking that we received and executed a complete set of
# instructions.
def __init__(self):
self.count = 0
def start(self):
self.count += 1
def finish(self):
assert self.count > 0
self.count -= 1
def _merge_dicts(*args):
# Merges a list of dict's.
out = {}
for arg in args:
out.update(arg)
return out
def _fix_pyplot(plt):
# This patches matplotlib/matplotlib#9412 by injecting `time` into the
# module (#7597).
cur = plt.__dict__
if 'time' not in cur:
cur['time'] = time
def default_globals():
"""Creates default globals for code that the client side can execute.
This is geared for convenient (not necessarily efficient) plotting
with `matplotlib`.
"""
# @note This imports modules at a function-scope rather than at a
# module-scope, which does not satisfy PEP8. This is intentional, as it
# allows for a cleaner scope separation between the client core code (e.g.
# `CallPythonClient`) and the client user code (e.g. `plot(x, y)`).
# TODO(eric.cousineau): Consider relegating this to a different module,
# possibly when this falls under `pydrake`.
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import matplotlib
import matplotlib.pyplot as plt
import pylab # See `%pylab?` in IPython.
# TODO(eric.cousineau): Where better to put this?
matplotlib.interactive(True)
_fix_pyplot(plt)
def disp(value):
"""Alias for print."""
print(value)
def wait():
"""Waits to allow user interaction with plots."""
plt.show(block=True)
def pause(interval):
"""Pause for `interval` seconds, letting the GUI flush its event queue.
@note This is a *necessary* function to be defined if these globals are
not used!
"""
plt.pause(interval)
def box(bmin, bmax, rstride=1, cstride=1, **kwargs):
"""Plots a box bmin[i] <= x[i] <= bmax[i] for i < 3."""
fig = plt.gcf()
ax = fig.gca(projection='3d')
u = np.linspace(1, 9, 5) * np.pi / 4
U, V = np.meshgrid(u, u)
cx, cy, cz = (bmax + bmin) / 2
dx, dy, dz = bmax - bmin
X = cx + dx * np.cos(U) * np.sin(V)
Y = cy + dy * np.sin(U) * np.sin(V)
Z = cz + dz * np.cos(V) / np.sqrt(2)
ax.plot_surface(X, Y, Z, rstride=rstride, cstride=cstride, **kwargs)
def plot3(x, y, z, **kwargs):
"""Plots a 3d line plot."""
fig = plt.gcf()
ax = fig.gca(projection='3d')
ax.plot(x, y, z, **kwargs)
def sphere(n, rstride=1, cstride=1, **kwargs):
"""Plots a sphere."""
fig = plt.gcf()
ax = fig.gca(projection='3d')
u = np.linspace(0, np.pi, n)
v = np.linspace(0, 2 * np.pi, n)
X = np.outer(np.sin(u), np.sin(v))
Y = np.outer(np.sin(u), np.cos(v))
Z = np.outer(np.cos(u), np.ones_like(v))
ax.plot_surface(X, Y, Z, rstride=rstride, cstride=cstride, **kwargs)
def surf(x, y, Z, rstride=1, cstride=1, **kwargs):
"""Plots a 3d surface."""
fig = plt.gcf()
ax = fig.gca(projection='3d')
X, Y = np.meshgrid(x, y)
ax.plot_surface(X, Y, Z, rstride=rstride, cstride=cstride, **kwargs)
def show():
"""Shows `matplotlib` images without blocking.
Generally not needed if `matplotlib.is_interactive()` is true.
"""
plt.show(block=False)
def magic(N):
"""Provides simple odd-only case for magic squares.
@ref https://scipython.com/book/chapter-6-numpy/examples/creating-a-magic-square # noqa
"""
assert N % 2 == 1
magic_square = np.zeros((N, N), dtype=int)
n = 1
i, j = 0, N//2
while n <= N**2:
magic_square[i, j] = n
n += 1
newi, newj = (i - 1) % N, (j + 1) % N
if magic_square[newi, newj]:
i += 1
else:
i, j = newi, newj
return magic_square
# Use <module>.__dict__ to simulate `from <module> import *`, since that is
# normally invalid in a function with nested functions.
return _merge_dicts(
globals(),
plt.__dict__,
pylab.__dict__,
locals())
class CallPythonClient:
"""Provides a client to receive Python commands.
Enables printing or plotting from a C++ application for debugging
purposes.
"""
def __init__(self, filename=None, stop_on_error=True,
scope_globals=None, scope_locals=None,
threaded=False, wait=False):
if filename is None:
# TODO(jamiesnape): Implement and use a
# drake.common.GetRpcPipeTempDirectory function.
temp_directory = os.environ.get("TEST_TMPDIR", "/tmp")
self.filename = os.path.join(temp_directory, "python_rpc")
else:
self.filename = filename
# Scope. Give it access to everything here.
# However, keep it's written values scoped.
if scope_locals is None:
self.scope_locals = {}
else:
self.scope_locals = scope_locals
# Define globals as (a) required helpers for C++ interface, and
# (b) convenience plotting functionality.
# N.B. The provided locals OR globals can shadow the helpers. BE
# CAREFUL!
required_helpers = _get_required_helpers(self.scope_locals)
if scope_globals is None:
scope_globals = default_globals()
self.scope_globals = _merge_dicts(required_helpers, scope_globals)
self._stop_on_error = stop_on_error
self._threaded = threaded
self._loop = False
self._wait = False
if wait:
if _is_fifo(self.filename):
self._loop = True
print("Looping for FIFO file (wait=True).")
else:
self._wait = True
print("Waiting after processing non-FIFO file (wait=True).")
# Variables indexed by GUID.
self._client_vars = {}
self._had_error = False
self._done = False
self._file = None
def _to_array(self, arg, dtype):
# Converts a lcmt_call_python argument to the appropriate NumPy array
# (or scalar).
np_raw = np.frombuffer(arg.data, dtype=dtype)
if arg.shape_type == lcmt_call_python_data.SCALAR:
assert arg.cols == 1 and arg.rows == 1
return np_raw[0]
elif arg.shape_type == lcmt_call_python_data.VECTOR:
assert arg.cols == 1
return np_raw.reshape(arg.rows)
elif arg.shape_type is None or \
arg.shape_type == lcmt_call_python_data.MATRIX:
# TODO(eric.cousineau): Figure out how to ensure `np.frombuffer`
# creates a column-major array?
return np_raw.reshape(arg.cols, arg.rows).T
def _execute_message(self, msg):
# Executes a message, handling / recording that an error occurred.
if self._stop_on_error:
# Do not wrap in a `try` / `catch` to simplify debugging.
self._execute_message_impl(msg)
else:
try:
self._execute_message_impl(msg)
except Exception as e:
traceback.print_exc(file=sys.stderr)
sys.stderr.write(" Continuing (no --stop_on_error)\n")
self._had_error = True
def _execute_message_impl(self, msg):
# Executes relevant portions of a message.
# Create input arguments.
inputs = []
kwargs = None
for i, arg in enumerate(msg.rhs):
value = None
if (arg.data_type
== lcmt_call_python_data.REMOTE_VARIABLE_REFERENCE):
id = np.frombuffer(arg.data, dtype=np.uint64).reshape(1)[0]
if id not in self._client_vars:
raise RuntimeError("Unknown local variable. "
"Dropping message.")
value = self._client_vars[id]
elif arg.data_type == lcmt_call_python_data.DOUBLE:
value = self._to_array(arg, np.double)
elif arg.data_type == lcmt_call_python_data.CHAR:
assert arg.rows == 1
value = arg.data.decode('utf8')
elif arg.data_type == lcmt_call_python_data.LOGICAL:
value = self._to_array(arg, np.bool)
elif arg.data_type == lcmt_call_python_data.INT:
value = self._to_array(arg, np.int32)
else:
assert False
if isinstance(value, _KwArgs):
assert kwargs is None
kwargs = value
else:
inputs.append(value)
# Call the function
# N.B. No security measures to sanitize function name.
function_name = msg.function_name
assert isinstance(function_name, str), type(function_name)
self.scope_locals.update(_tmp_args=inputs, _tmp_kwargs=kwargs or {})
# N.B. No try-catch block here. Can change this if needed.
if function_name == "exec":
assert len(inputs) == 1
assert kwargs is None or len(kwargs) == 0
exec(inputs[0], self.scope_globals, self.scope_locals)
out = None
else:
out = eval(function_name + "(*_tmp_args, **_tmp_kwargs)",
self.scope_globals, self.scope_locals)
self.scope_locals.update(_tmp_out=out)
# Update outputs.
self._client_vars[msg.lhs] = out
def run(self):
"""Runs the client code.
@return True if no error encountered.
"""
if self._threaded:
self._handle_messages_threaded()
else:
self.handle_messages(record=False)
# Check any execution in progress.
execution_check = self.scope_globals['execution_check']
if not self._had_error and execution_check.count != 0:
self._had_error = True
sys.stderr.write(
"ERROR: Invalid termination. "
"'execution_check.finish' called insufficient number of "
"times: {}\n".format(execution_check.count))
if self._wait and not self._had_error:
wait_func = self.scope_globals["wait"]
wait_func()
return not self._had_error
def _handle_messages_threaded(self):
# Handles messages in a threaded fashion.
queue = Queue()
def producer_loop():
# Read messages from file, and queue them for execution.
for msg in self._read_next_message():
queue.put(msg)
# Check if an error occurred.
if self._done:
break
# Wait until the queue empties out to signal completion from the
# producer's side.
if not self._done:
queue.join()
self._done = True
producer = Thread(name="Producer", target=producer_loop)
# @note Previously, when trying to do `queue.clear()` in the consumer,
# and `queue.join()` in the producer, there would be intermittent
# deadlocks. By demoting the producer to a daemon, I (eric.c) have not
# yet encountered a deadlock.
producer.daemon = True
producer.start()
# Consume.
# TODO(eric.cousineau): Trying to quit via Ctrl+C is awkward (but kinda
# works). Is there a way to have `plt.pause` handle Ctrl+C differently?
try:
pause = self.scope_globals['pause']
while not self._done:
# Process messages.
while not queue.empty():
msg = queue.get()
queue.task_done()
self._execute_message(msg)
# Spin busy for a bit, let matplotlib (or whatever) flush its
# event queue.
pause(0.01)
except KeyboardInterrupt:
# User pressed Ctrl+C.
self._done = True
print("Quitting")
except Exception as e:
# We encountered an error, and must stop.
self._done = True
self._had_error = True
traceback.print_exc(file=sys.stderr)
sys.stderr.write(" Stopping (--stop_on_error)\n")
# No need to worry about waiting for the producer, as it is a daemon
# thread.
def handle_messages(self, max_count=None, record=True, execute=True):
"""Handle all messages sent (e.g., through IPython).
@param max_count Maximum number of messages to handle.
@param record Record all messages and return them.
@param execute Execute the given message upon receiving it.
@return (count, msgs) where `count` is how many messages were processed
(e.g. 0 if no more messages left).
and `msgs` are either the messages themselves for playback.
and (b) the messages themselves for playback (if record==True),
otherwise an empty list.
"""
assert record or execute, "Not doing anything useful?"
count = 0
msgs = []
for msg in self._read_next_message():
if execute:
self._execute_message(msg)
count += 1
if record:
msgs.append(msg)
if max_count is not None and count >= max_count:
break
return (count, msgs)
def execute_messages(self, msgs):
"""Executes a set of recorded messages."""
for msg in msgs:
self._execute_message(msg)
def _read_next_message(self):
"""Returns incoming messages using a generator."""
while not self._done:
fifo = self._get_file()
# Close the file if we reach the end, NOT when exiting the scope
# (which is why `with` is not used here).
# This way the user can read a few messages at a time, with the
# same file handle.
# @note We must close / reopen the file when looping because the
# C++ program will effectively send a EOF signal when it closes
# the pipe.
while not self._done:
message = self._read_fifo_message(fifo)
if message is not None:
yield message
self._close_file()
if not self._loop:
break
def _read_fifo_message(self, fifo):
"""Reads at most one message from the given fifo."""
# Read the datagram size. (The C++ code encodes the datagram_size
# integer as an ASCII string.)
datagram_size = None
buffer = bytearray()
while not self._done:
byte = fifo.read(1)
if not byte: # EOF
return None
if byte == b'\0': # EOM
datagram_size = int(buffer.decode())
break
else:
buffer.extend(byte)
# Read the payload.
buffer[:] = ()
while not self._done:
byte = fifo.read(1)
if not byte: # EOF
return None
buffer.extend(byte)
if len(buffer) == datagram_size:
byte = fifo.read(1)
assert byte == b'\0' # EOM
return lcmt_call_python.decode(bytes(buffer))
def _get_file(self):
# Gets file handle, opening if needed.
if self._file is None:
self._file = open(self.filename, 'rb')
return self._file
def _close_file(self):
# Closes file if open.
if self._file is not None:
self._file.close()
self._file = None
def _is_fifo(filepath):
# Determine if a file is a FIFO named pipe or not.
# @ref https://stackoverflow.com/a/8558940/7829525
return stat.S_ISFIFO(os.stat(filepath).st_mode)
def main(argv):
_ensure_sigint_handler()
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
"--no_wait", action='store_true',
help="Close client after messages are processed. "
"For FIFO, this means the client will close after the C++ "
"binary is executed once.")
parser.add_argument(
"--no_threading", action='store_true',
help="Disable threaded dispatch.")
parser.add_argument(
"--stop_on_error", action='store_true',
help="Stop client if there is an error when executing a call.")
parser.add_argument("-f", "--file", type=str, default=None)
parser.add_argument(
"-c", "--command", type=str, nargs='+', default=None,
help="Execute command (e.g. `jupyter notebook`) instead of running "
"client.")
args = parser.parse_args(argv)
if args.command is not None:
# Execute command s.t. it has access to the relevant PYTHNOPATH.
os.execvp(args.command[0], args.command)
# Control should not return to this program unless there was an error.
return False
else:
client = CallPythonClient(
args.file, stop_on_error=args.stop_on_error,
threaded=not args.no_threading, wait=not args.no_wait)
good = client.run()
return good
if __name__ == "__main__":
good = main(sys.argv[1:])
if not good:
exit(1)
|
[
"argparse.ArgumentParser",
"numpy.sin",
"os.path.join",
"numpy.meshgrid",
"traceback.print_exc",
"numpy.linspace",
"matplotlib.pyplot.pause",
"threading.Thread",
"matplotlib.pyplot.show",
"numpy.ones_like",
"os.stat",
"matplotlib.interactive",
"numpy.frombuffer",
"signal.getsignal",
"numpy.cos",
"signal.signal",
"matplotlib.pyplot.gcf",
"queue.Queue",
"numpy.zeros",
"os.environ.get",
"sys.stderr.write",
"os.execvp",
"numpy.sqrt"
] |
[((5645, 5673), 'matplotlib.interactive', 'matplotlib.interactive', (['(True)'], {}), '(True)\n', (5667, 5673), False, 'import matplotlib\n'), ((20361, 20464), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__', 'formatter_class': 'argparse.RawDescriptionHelpFormatter'}), '(description=__doc__, formatter_class=argparse.\n RawDescriptionHelpFormatter)\n', (20384, 20464), False, 'import argparse\n'), ((1738, 1769), 'signal.getsignal', 'signal.getsignal', (['signal.SIGINT'], {}), '(signal.SIGINT)\n', (1754, 1769), False, 'import signal\n'), ((1797, 1853), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal.default_int_handler'], {}), '(signal.SIGINT, signal.default_int_handler)\n', (1810, 1853), False, 'import signal\n'), ((5852, 5872), 'matplotlib.pyplot.show', 'plt.show', ([], {'block': '(True)'}), '(block=True)\n', (5860, 5872), True, 'import matplotlib.pyplot as plt\n'), ((6098, 6117), 'matplotlib.pyplot.pause', 'plt.pause', (['interval'], {}), '(interval)\n', (6107, 6117), True, 'import matplotlib.pyplot as plt\n'), ((6254, 6263), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (6261, 6263), True, 'import matplotlib.pyplot as plt\n'), ((6362, 6379), 'numpy.meshgrid', 'np.meshgrid', (['u', 'u'], {}), '(u, u)\n', (6373, 6379), True, 'import numpy as np\n'), ((6747, 6756), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (6754, 6756), True, 'import matplotlib.pyplot as plt\n'), ((6926, 6935), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (6933, 6935), True, 'import matplotlib.pyplot as plt\n'), ((6986, 7010), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', 'n'], {}), '(0, np.pi, n)\n', (6997, 7010), True, 'import numpy as np\n'), ((7023, 7051), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', 'n'], {}), '(0, 2 * np.pi, n)\n', (7034, 7051), True, 'import numpy as np\n'), ((7368, 7377), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (7375, 7377), True, 'import matplotlib.pyplot as plt\n'), ((7431, 7448), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (7442, 7448), True, 'import numpy as np\n'), ((7690, 7711), 'matplotlib.pyplot.show', 'plt.show', ([], {'block': '(False)'}), '(block=False)\n', (7698, 7711), True, 'import matplotlib.pyplot as plt\n'), ((7950, 7977), 'numpy.zeros', 'np.zeros', (['(N, N)'], {'dtype': 'int'}), '((N, N), dtype=int)\n', (7958, 7977), True, 'import numpy as np\n'), ((10593, 10629), 'numpy.frombuffer', 'np.frombuffer', (['arg.data'], {'dtype': 'dtype'}), '(arg.data, dtype=dtype)\n', (10606, 10629), True, 'import numpy as np\n'), ((14824, 14831), 'queue.Queue', 'Queue', ([], {}), '()\n', (14829, 14831), False, 'from queue import Queue\n'), ((15337, 15382), 'threading.Thread', 'Thread', ([], {'name': '"""Producer"""', 'target': 'producer_loop'}), "(name='Producer', target=producer_loop)\n", (15343, 15382), False, 'from threading import Thread\n'), ((21379, 21419), 'os.execvp', 'os.execvp', (['args.command[0]', 'args.command'], {}), '(args.command[0], args.command)\n', (21388, 21419), False, 'import os\n'), ((7073, 7082), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (7079, 7082), True, 'import numpy as np\n'), ((7084, 7093), 'numpy.sin', 'np.sin', (['v'], {}), '(v)\n', (7090, 7093), True, 'import numpy as np\n'), ((7116, 7125), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (7122, 7125), True, 'import numpy as np\n'), ((7127, 7136), 'numpy.cos', 'np.cos', (['v'], {}), '(v)\n', (7133, 7136), True, 'import numpy as np\n'), ((7159, 7168), 'numpy.cos', 'np.cos', (['u'], {}), '(u)\n', (7165, 7168), True, 'import numpy as np\n'), ((7170, 7185), 'numpy.ones_like', 'np.ones_like', (['v'], {}), '(v)\n', (7182, 7185), True, 'import numpy as np\n'), ((9040, 9077), 'os.environ.get', 'os.environ.get', (['"""TEST_TMPDIR"""', '"""/tmp"""'], {}), "('TEST_TMPDIR', '/tmp')\n", (9054, 9077), False, 'import os\n'), ((9106, 9148), 'os.path.join', 'os.path.join', (['temp_directory', '"""python_rpc"""'], {}), "(temp_directory, 'python_rpc')\n", (9118, 9148), False, 'import os\n'), ((20274, 20291), 'os.stat', 'os.stat', (['filepath'], {}), '(filepath)\n', (20281, 20291), False, 'import os\n'), ((6314, 6334), 'numpy.linspace', 'np.linspace', (['(1)', '(9)', '(5)'], {}), '(1, 9, 5)\n', (6325, 6334), True, 'import numpy as np\n'), ((6486, 6495), 'numpy.sin', 'np.sin', (['V'], {}), '(V)\n', (6492, 6495), True, 'import numpy as np\n'), ((6530, 6539), 'numpy.sin', 'np.sin', (['V'], {}), '(V)\n', (6536, 6539), True, 'import numpy as np\n'), ((6574, 6584), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (6581, 6584), True, 'import numpy as np\n'), ((16612, 16648), 'traceback.print_exc', 'traceback.print_exc', ([], {'file': 'sys.stderr'}), '(file=sys.stderr)\n', (16631, 16648), False, 'import traceback\n'), ((16661, 16711), 'sys.stderr.write', 'sys.stderr.write', (['""" Stopping (--stop_on_error)\n"""'], {}), "(' Stopping (--stop_on_error)\\n')\n", (16677, 16711), False, 'import sys\n'), ((6474, 6483), 'numpy.cos', 'np.cos', (['U'], {}), '(U)\n', (6480, 6483), True, 'import numpy as np\n'), ((6518, 6527), 'numpy.sin', 'np.sin', (['U'], {}), '(U)\n', (6524, 6527), True, 'import numpy as np\n'), ((6562, 6571), 'numpy.cos', 'np.cos', (['V'], {}), '(V)\n', (6568, 6571), True, 'import numpy as np\n'), ((11578, 11614), 'traceback.print_exc', 'traceback.print_exc', ([], {'file': 'sys.stderr'}), '(file=sys.stderr)\n', (11597, 11614), False, 'import traceback\n'), ((11631, 11686), 'sys.stderr.write', 'sys.stderr.write', (['""" Continuing (no --stop_on_error)\n"""'], {}), "(' Continuing (no --stop_on_error)\\n')\n", (11647, 11686), False, 'import sys\n'), ((12087, 12127), 'numpy.frombuffer', 'np.frombuffer', (['arg.data'], {'dtype': 'np.uint64'}), '(arg.data, dtype=np.uint64)\n', (12100, 12127), True, 'import numpy as np\n')]
|
#!/usr/bin/env python3
# encoding: utf-8
import json
import requests
import urllib3
from time import time
from urllib.parse import unquote_plus
from settings import API_EP_DOUYIN, ROUTE_SIGN_DOUYIN
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def get_original_url(action, args_dict, ts, device_info):
install_id = device_info['install_id']
device_id = device_info['device_id']
uuid = device_info['uuid']
openudid = device_info['openudid']
args = ""
# print(args_dict)
for (idx, val) in args_dict.items():
args += "&{0}={1}".format(idx, val)
url = "https://aweme.snssdk.com/aweme/" + action + "/?" \
+ args \
+ "&retry_type=no_retry&" \
+ "iid=" + str(install_id) \
+ "&device_id=" + str(device_id) \
+ "&uuid=" + str(uuid) \
+ "&openudid=" + str(openudid) \
+ "&ts=" + str(ts) \
+ "&ac=wifi&channel=wandoujia_zhiwei&aid=1128&app_name=aweme&" \
"version_code=290&version_name=2.9.0&device_platform=android&" \
"ssmix=a&device_type=ONEPLUS+A5000&device_brand=OnePlus&language=zh&" \
"os_api=28&os_version=9&manifest_version_code=290&resolution=1080*1920&" \
"dpi=420&update_version_code=2902&_rticket=1548672388498"
return url
def get_signed_url(action, args, ts, device_info, token=""):
original_url = get_original_url(action, args, ts, device_info)
return sign(original_url, token=token)
def sign(original_url, token=""):
data = {"url": original_url}
try:
data = api_service(token=token, route=ROUTE_SIGN_DOUYIN, method="post", data=json.dumps(data))
# cc = json.loads(data)
# print(cc)
return data.get("url")
except Exception as e:
print(e)
def api_douyin(action, args, ts, device_info, token="",proxy=None):
try:
url = get_signed_url(action, args, ts, device_info, token=token)
resp = requests.get(url=url,
headers={
"User-Agent": "okhttp/3.10.0.1"},
verify=False,
cookies={'install_id': str(device_info['install_id'])},
proxies=proxy)
content = resp.content.decode("utf-8")
d = json.loads(content)
return d
except Exception as e:
print(e)
def api_service(route, token="", method="get", data=None, content_type="application/json",proxy=None):
resp = requests.request(method=method, url="{0}/{1}/{2}".format(API_EP_DOUYIN, route, token), data=data,
headers={"Content-Type": content_type}, verify=False,proxies=proxy)
if token != "" and resp.headers.get("x-token") != token:
raise Exception(resp.headers.get("x-token"))
elif resp.headers.get("x-token-times") == "0":
raise Exception(resp.content)
data = resp.content.decode("utf-8")
return json.loads(data)
def wrap_api(action, args, device_info={}, token="",proxy=None):
try:
ts = str(int(time()))
data = api_douyin(action, args, ts, device_info, token=token,proxy=proxy)
return data
except Exception as e:
print(e)
def request_dict(req):
params = req.split("?")[1]
lp = params.split('&')
di = {}
for e in lp:
k, v = e.split('=')
di[k] = unquote_plus(v)
return dict(di)
|
[
"json.loads",
"json.dumps",
"time.time",
"urllib3.disable_warnings",
"urllib.parse.unquote_plus"
] |
[((199, 266), 'urllib3.disable_warnings', 'urllib3.disable_warnings', (['urllib3.exceptions.InsecureRequestWarning'], {}), '(urllib3.exceptions.InsecureRequestWarning)\n', (223, 266), False, 'import urllib3\n'), ((2967, 2983), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (2977, 2983), False, 'import json\n'), ((2323, 2342), 'json.loads', 'json.loads', (['content'], {}), '(content)\n', (2333, 2342), False, 'import json\n'), ((3390, 3405), 'urllib.parse.unquote_plus', 'unquote_plus', (['v'], {}), '(v)\n', (3402, 3405), False, 'from urllib.parse import unquote_plus\n'), ((1658, 1674), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1668, 1674), False, 'import json\n'), ((3080, 3086), 'time.time', 'time', ([], {}), '()\n', (3084, 3086), False, 'from time import time\n')]
|
import matplotlib.pyplot as plt
import pymongo
# Make pi chart of 18+ posts
# All charts in graph folder
def intilise_database(db_name):
"""
Initilse the database and make a table instance
Returns
pymongo object of the table
"""
myclient = pymongo.MongoClient("mongodb://localhost:27017/")
mydb=myclient['subreddit']
maintable = mydb[db_name]
return maintable
post = intilise_database('posts2')
over = post.find({'spoiler': True}).count()
print(over)
alll = post.find({}).count()
ookk = over*100/alll
# Pie chart, where the slices will be ordered and plotted counter-clockwise:
labels = 'over_18', ''
sizes = [ookk, 100-ookk ]
explode = (0.1, 0) # only "explode" the 2nd slice (i.e. 'Hogs')
fig1, ax1 = plt.subplots()
ax1.pie(sizes, explode=explode, labels=labels, autopct='%1.1f%%',
shadow=True, startangle=90)
ax1.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.
plt.show()
|
[
"pymongo.MongoClient",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] |
[((760, 774), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (772, 774), True, 'import matplotlib.pyplot as plt\n'), ((957, 967), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (965, 967), True, 'import matplotlib.pyplot as plt\n'), ((272, 321), 'pymongo.MongoClient', 'pymongo.MongoClient', (['"""mongodb://localhost:27017/"""'], {}), "('mongodb://localhost:27017/')\n", (291, 321), False, 'import pymongo\n')]
|
"""Tests for the middlewares of the ``traces`` app."""
from django.contrib.auth.models import AnonymousUser
from django.test import TestCase
from django_libs.tests.factories import UserFactory
from mock import Mock
from factories import BlacklistIPFactory
from ..middleware import TracesMiddleware
from ..models import Trace
class TraceMiddlewareTestCase(TestCase):
longMessage = True
def setUp(self):
self.middleware = TracesMiddleware()
self.request = Mock()
self.request.user = AnonymousUser()
self.request.path_info = '/'
self.request.session.session_key = 'foobar'
self.request.resolver_match.url_name = 'test_view'
self.request.META = {'HTTP_USER_AGENT': ''}
self.response = Mock()
self.response.context_data = None
def test_untraced_view(self):
with self.settings(TRACED_VIEWS=[]):
self.assertTrue(
self.middleware.process_response(self.request, self.response))
self.assertEqual(Trace.objects.count(), 0, msg=(
'No trace should have been created.'))
def test_traced_view(self):
with self.settings(TRACED_VIEWS=['test_view']):
# Anonymous user
self.assertTrue(
self.middleware.process_response(self.request, self.response))
self.assertEqual(Trace.objects.count(), 1, msg=(
'A new trace should have been created.'))
self.assertTrue(
self.middleware.process_response(self.request, self.response))
self.assertEqual(Trace.objects.count(), 1, msg=(
'No new trace should have been created.'))
self.assertEqual(Trace.objects.all()[0].hits, 2, msg=(
'Hits should have been increased.'))
# Blacklisted
BlacklistIPFactory(ip=Trace.objects.get().ip)
self.assertTrue(
self.middleware.process_response(self.request, self.response))
self.assertEqual(Trace.objects.count(), 1, msg=(
'No new trace should have been created.'))
self.request.session.session_key = ''
self.request.META['HTTP_X_FORWARDED_FOR'] = '1.1.1.1'
self.assertTrue(
self.middleware.process_response(self.request, self.response))
# Logged in user
self.request.user = UserFactory()
self.assertTrue(
self.middleware.process_response(self.request, self.response))
self.assertEqual(Trace.objects.count(), 3, msg=(
'A new trace should have been created.'))
self.assertTrue(
self.middleware.process_response(self.request, self.response))
self.assertEqual(Trace.objects.count(), 3, msg=(
'No new trace should have been created.'))
self.assertEqual(Trace.objects.all()[0].hits, 2, msg=(
'Hits should have been increased.'))
# View object
self.response.context_data = {'object': UserFactory()}
self.request.resolver_match.url_name = 'test_model_view'
self.assertTrue(
self.middleware.process_response(self.request, self.response))
self.assertEqual(Trace.objects.count(), 4, msg=(
'A new trace should have been created.'))
# Invalid IP
self.request.META['HTTP_X_FORWARDED_FOR'] = '1.1.1.1.1.1.1'
self.assertTrue(
self.middleware.process_response(self.request, self.response))
self.assertEqual(Trace.objects.count(), 4, msg=(
'No new trace should have been created.'))
# Invalid URL or missing view name
self.request.path_info = '/inexistant-view/'
self.assertTrue(
self.middleware.process_response(self.request, self.response))
self.assertEqual(Trace.objects.count(), 4, msg=(
'No new trace should have been created.'))
# 404
self.response.status_code = 404
self.assertTrue(
self.middleware.process_response(self.request, self.response))
self.assertEqual(Trace.objects.count(), 4, msg=(
'No new trace should have been created.'))
|
[
"django.contrib.auth.models.AnonymousUser",
"django_libs.tests.factories.UserFactory",
"mock.Mock"
] |
[((483, 489), 'mock.Mock', 'Mock', ([], {}), '()\n', (487, 489), False, 'from mock import Mock\n'), ((518, 533), 'django.contrib.auth.models.AnonymousUser', 'AnonymousUser', ([], {}), '()\n', (531, 533), False, 'from django.contrib.auth.models import AnonymousUser\n'), ((758, 764), 'mock.Mock', 'Mock', ([], {}), '()\n', (762, 764), False, 'from mock import Mock\n'), ((2405, 2418), 'django_libs.tests.factories.UserFactory', 'UserFactory', ([], {}), '()\n', (2416, 2418), False, 'from django_libs.tests.factories import UserFactory\n'), ((3074, 3087), 'django_libs.tests.factories.UserFactory', 'UserFactory', ([], {}), '()\n', (3085, 3087), False, 'from django_libs.tests.factories import UserFactory\n')]
|
""" Plot SV3 Results """
# LRGs
import sys
sys.path.append('/home/mehdi/github/LSSutils')
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
import healpy as hp
import numpy as np
from time import time
import fitsio as ft
from lssutils.lab import (make_overdensity, AnaFast,
histogram_cell, hpixsum, get_meandensity)
from lssutils.stats.pcc import pcc
from lssutils.dataviz import setup_color
import pandas as pd
root_dir = '/home/mehdi/data/dr9v0.57.0/'
def cutphotmask(aa, bits):
print(f'{len(aa)} before imaging veto')
keep = (aa['NOBS_G']>0) & (aa['NOBS_R']>0) & (aa['NOBS_Z']>0)
for biti in bits:
keep &= ((aa['MASKBITS'] & 2**biti)==0)
print(f'{keep.sum()} after imaging veto')
print(keep)
return keep
class SV3Data:
def __init__(self, target, region, mversion):
columns = ['RA', 'DEC', 'NOBS_R', 'NOBS_G', 'NOBS_Z', 'MASKBITS']
bits = [1, 5, 6, 7, 8, 9, 11, 12, 13]
self.nside = 256
p = f'{root_dir}sv3_v1/'
self.dcat = ft.read(f'{p}sv3target_{target}_{region}.fits',
columns=columns)
self.rcat = ft.read(f'{p}{region}_randoms-1-0x2.fits',
columns=columns)
self.wrf = ft.read(f'{p}sv3target_{target}_{region}.fits_EdWsys/wsys_v0.fits')['wsys']
self.wnn = ft.read(f'{p}sv3target_{target}_{region}.fits_MrWsys/wsys_{mversion}.fits')['wsys']
ix_d = cutphotmask(self.dcat, bits)
self.dcat = self.dcat[ix_d]
self.wrf = self.wrf[ix_d]
self.wnn = self.wnn[ix_d]
ix_r = cutphotmask(self.rcat, bits)
self.rcat = self.rcat[ix_r]
print(f'mean(wrf): {self.wrf.mean():.2f}, {self.wrf.min():.1f} < wrf < {self.wrf.max():.1f}')
print(f'mean(wnn): {self.wnn.mean():.2f}, {self.wnn.min():.1f} < wnn < {self.wnn.max():.1f}')
self.af = AnaFast()
tmpl = pd.read_hdf(f'/home/mehdi/data/templates/dr9/pixweight_dark_dr9m_nside{self.nside}.h5')
#self.cols = ['nstar', 'ebv', 'loghi']\
# +[f'{s}_{b}' for s in ['ccdskymag_mean', 'fwhm_mean', 'fwhm_min', 'fwhm_max', 'depth_total',
# 'mjd_mean', 'mjd_min', 'mjd_max', 'airmass_mean', 'exptime_total']\
# for b in ['g', 'r', 'z']]
self.cols = ['stardens', 'ebv', 'loghi',
'psfdepth_g', 'psfdepth_r', 'psfdepth_z',
'galdepth_g', 'galdepth_r', 'galdepth_z',
'psfsize_g', 'psfsize_r', 'psfsize_z',
'psfdepth_w1', 'psfdepth_w2']
self.tmpl = tmpl[self.cols].values
def make_delta(self):
nran = hpixsum(self.nside, self.rcat['RA'], self.rcat['DEC'])*1.0
self.mask = (nran > 0)
print(f'mask: {self.mask.sum()} pixels')
is_good = np.isfinite(self.tmpl).sum(axis=1) == len(self.cols)
self.mask &= is_good
print(f'mask: {self.mask.sum()} pixels (with imaging)')
self.frac = nran / nran[self.mask].mean()
self.mask &= (self.frac > 0.2)
print(f'mask: {self.mask.sum()} pixels (with frac>0.2)')
self.ngal_now = hpixsum(self.nside, self.dcat['RA'], self.dcat['DEC'])*1.0
self.ngal_rf = hpixsum(self.nside, self.dcat['RA'], self.dcat['DEC'], weights=self.wrf)
self.ngal_wnn = hpixsum(self.nside, self.dcat['RA'], self.dcat['DEC'], weights=self.wnn)
self.delta_now = make_overdensity(self.ngal_now, self.frac, self.mask)
self.delta_rf = make_overdensity(self.ngal_rf, self.frac, self.mask)
self.delta_wnn = make_overdensity(self.ngal_wnn, self.frac, self.mask)
def make_cl(self):
self.cl_now = self.af(self.delta_now, self.frac, self.mask)
self.cl_rf = self.af(self.delta_rf, self.frac, self.mask)
self.cl_nn = self.af(self.delta_wnn, self.frac, self.mask)
def make_nbar(self):
self.nbar_now = get_meandensity(self.ngal_now, self.frac, self.mask, self.tmpl)
self.nbar_rf = get_meandensity(self.ngal_rf, self.frac, self.mask, self.tmpl)
self.nbar_nn = get_meandensity(self.ngal_wnn, self.frac, self.mask, self.tmpl)
def make_pcc(self):
self.pcc_now = pcc(self.tmpl[self.mask], self.delta_now[self.mask], return_err=True)
self.pcc_rf = pcc(self.tmpl[self.mask], self.delta_rf[self.mask])
self.pcc_nn = pcc(self.tmpl[self.mask], self.delta_wnn[self.mask])
setup_color()
region = sys.argv[1] # NDECALS
target = sys.argv[2] # QSO
mversion = sys.argv[3]
assert region in ['NDECALS', 'SDECALS', 'NBMZLS', 'DES', 'SDECALS_noDES', 'DES_noLMC']
assert target in ['QSO', 'LRG', 'ELG', 'BGS_ANY']
print(f'target: {target}, region: {region}, mversion: {mversion}')
target_region = f'{target}-{region}-{mversion}'
t0 = time()
sv = SV3Data(target, region, mversion)
t1 = time()
print(f'Finished reading in {t1-t0:.1f} sec')
sv.make_delta()
t2 = time()
print(f'Finished deltas in {t2-t1:.1f} sec')
sv.make_cl()
t3 = time()
print(f'Finished Cell in {t3-t2:.1f} sec')
sv.make_nbar()
t4 = time()
print(f'Finished nbar in {t4-t3:.1f} sec')
sv.make_pcc()
t5 = time()
print(f'Finished pcc in {t5-t4:.1f} sec')
pp = PdfPages(''.join([f'{root_dir}sv3_v1/', target_region, '.pdf']))
# C_ell
methods = ['No weight', 'RF weight', 'NN weight']
cls = [sv.cl_now, sv.cl_rf, sv.cl_nn]
fg, ax = plt.subplots(figsize=(8, 6))
for n_i, cl_i in zip(methods, cls ):
lb, clb = histogram_cell(cl_i['cl'], bins=np.logspace(0, np.log10(770), 10))
l_, = ax.plot(cl_i['cl'], lw=1, zorder=-1, alpha=0.2)
ax.plot(lb, clb, marker='.', mfc='w', ls='None', color=l_.get_color(), label=n_i)
ax.legend(title=target_region, frameon=False)
ax.set(xscale='log', yscale='log', ylim=(2.0e-8, 8.0e-3),
xlabel=r'$\ell$', ylabel=r'C$_{\ell}$')
#fg.savefig('cl_lrg_bmzls.png', dpi=300, bbox_inches='tight')
pp.savefig(bbox_inches='tight')
# Nbar
fig, ax = plt.subplots(ncols=3, nrows=5, figsize=(22, 25), sharey=True)
fig.subplots_adjust(hspace=0.35, wspace=0.1)
ax = ax.flatten()
nbars = [sv.nbar_now, sv.nbar_rf, sv.nbar_nn]
for name_i, nbar_i in zip(methods, nbars):
for j, nbar_ij in enumerate(nbar_i):
ax[j].plot(nbar_ij['bin_avg'], nbar_ij['nnbar'], marker='.', mfc='w', label=name_i)
if name_i == 'No weight':
ax[j].fill_between(nbar_ij['bin_avg'], 1-nbar_ij['nnbar_err'], 1+nbar_ij['nnbar_err'],
color='grey', alpha=0.2, zorder=-1)
ax[2].legend(title=target_region, frameon=False)
for j, colj in enumerate(sv.cols):
ax[j].set_xlabel(colj)
if j%3==0:
ax[j].set_ylabel('Mean Density')
pp.savefig(bbox_inches='tight')
# PCC
fg, ax = plt.subplots(figsize=(12, 4))
x_columns = np.arange(len(sv.cols))
ax.set_xticks(x_columns)
ax.set_xticklabels(sv.cols, rotation=90)
pcc_min, pcc_max = np.percentile(sv.pcc_now[1], [2.5, 97.5], axis=0)
ax.bar(x_columns-0.25, sv.pcc_now[0], width=0.25, label='No weight')
ax.bar(x_columns, sv.pcc_rf[0], width=0.25, label='RF')
ax.bar(x_columns+0.25, sv.pcc_nn[0], width=0.25, label='NN')
ax.fill_between(x_columns, pcc_min, pcc_max, color='grey', alpha=0.2, zorder=10)
ax.legend(title=target_region, frameon=False)
ax.grid(ls=':')
ax.set(ylabel='PCC')
pp.savefig(bbox_inches='tight')
pp.close()
|
[
"sys.path.append",
"pandas.read_hdf",
"lssutils.lab.make_overdensity",
"lssutils.stats.pcc.pcc",
"lssutils.lab.hpixsum",
"lssutils.lab.AnaFast",
"lssutils.lab.get_meandensity",
"numpy.isfinite",
"time.time",
"numpy.percentile",
"fitsio.read",
"numpy.log10",
"lssutils.dataviz.setup_color",
"matplotlib.pyplot.subplots"
] |
[((44, 90), 'sys.path.append', 'sys.path.append', (['"""/home/mehdi/github/LSSutils"""'], {}), "('/home/mehdi/github/LSSutils')\n", (59, 90), False, 'import sys\n'), ((4698, 4711), 'lssutils.dataviz.setup_color', 'setup_color', ([], {}), '()\n', (4709, 4711), False, 'from lssutils.dataviz import setup_color\n'), ((5059, 5065), 'time.time', 'time', ([], {}), '()\n', (5063, 5065), False, 'from time import time\n'), ((5110, 5116), 'time.time', 'time', ([], {}), '()\n', (5114, 5116), False, 'from time import time\n'), ((5185, 5191), 'time.time', 'time', ([], {}), '()\n', (5189, 5191), False, 'from time import time\n'), ((5257, 5263), 'time.time', 'time', ([], {}), '()\n', (5261, 5263), False, 'from time import time\n'), ((5329, 5335), 'time.time', 'time', ([], {}), '()\n', (5333, 5335), False, 'from time import time\n'), ((5400, 5406), 'time.time', 'time', ([], {}), '()\n', (5404, 5406), False, 'from time import time\n'), ((5631, 5659), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(8, 6)'}), '(figsize=(8, 6))\n', (5643, 5659), True, 'import matplotlib.pyplot as plt\n'), ((6199, 6260), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'ncols': '(3)', 'nrows': '(5)', 'figsize': '(22, 25)', 'sharey': '(True)'}), '(ncols=3, nrows=5, figsize=(22, 25), sharey=True)\n', (6211, 6260), True, 'import matplotlib.pyplot as plt\n'), ((7007, 7036), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(12, 4)'}), '(figsize=(12, 4))\n', (7019, 7036), True, 'import matplotlib.pyplot as plt\n'), ((7159, 7208), 'numpy.percentile', 'np.percentile', (['sv.pcc_now[1]', '[2.5, 97.5]'], {'axis': '(0)'}), '(sv.pcc_now[1], [2.5, 97.5], axis=0)\n', (7172, 7208), True, 'import numpy as np\n'), ((1101, 1165), 'fitsio.read', 'ft.read', (['f"""{p}sv3target_{target}_{region}.fits"""'], {'columns': 'columns'}), "(f'{p}sv3target_{target}_{region}.fits', columns=columns)\n", (1108, 1165), True, 'import fitsio as ft\n'), ((1215, 1274), 'fitsio.read', 'ft.read', (['f"""{p}{region}_randoms-1-0x2.fits"""'], {'columns': 'columns'}), "(f'{p}{region}_randoms-1-0x2.fits', columns=columns)\n", (1222, 1274), True, 'import fitsio as ft\n'), ((2003, 2012), 'lssutils.lab.AnaFast', 'AnaFast', ([], {}), '()\n', (2010, 2012), False, 'from lssutils.lab import make_overdensity, AnaFast, histogram_cell, hpixsum, get_meandensity\n'), ((2037, 2129), 'pandas.read_hdf', 'pd.read_hdf', (['f"""/home/mehdi/data/templates/dr9/pixweight_dark_dr9m_nside{self.nside}.h5"""'], {}), "(\n f'/home/mehdi/data/templates/dr9/pixweight_dark_dr9m_nside{self.nside}.h5')\n", (2048, 2129), True, 'import pandas as pd\n'), ((3460, 3532), 'lssutils.lab.hpixsum', 'hpixsum', (['self.nside', "self.dcat['RA']", "self.dcat['DEC']"], {'weights': 'self.wrf'}), "(self.nside, self.dcat['RA'], self.dcat['DEC'], weights=self.wrf)\n", (3467, 3532), False, 'from lssutils.lab import make_overdensity, AnaFast, histogram_cell, hpixsum, get_meandensity\n'), ((3557, 3629), 'lssutils.lab.hpixsum', 'hpixsum', (['self.nside', "self.dcat['RA']", "self.dcat['DEC']"], {'weights': 'self.wnn'}), "(self.nside, self.dcat['RA'], self.dcat['DEC'], weights=self.wnn)\n", (3564, 3629), False, 'from lssutils.lab import make_overdensity, AnaFast, histogram_cell, hpixsum, get_meandensity\n'), ((3664, 3717), 'lssutils.lab.make_overdensity', 'make_overdensity', (['self.ngal_now', 'self.frac', 'self.mask'], {}), '(self.ngal_now, self.frac, self.mask)\n', (3680, 3717), False, 'from lssutils.lab import make_overdensity, AnaFast, histogram_cell, hpixsum, get_meandensity\n'), ((3743, 3795), 'lssutils.lab.make_overdensity', 'make_overdensity', (['self.ngal_rf', 'self.frac', 'self.mask'], {}), '(self.ngal_rf, self.frac, self.mask)\n', (3759, 3795), False, 'from lssutils.lab import make_overdensity, AnaFast, histogram_cell, hpixsum, get_meandensity\n'), ((3823, 3876), 'lssutils.lab.make_overdensity', 'make_overdensity', (['self.ngal_wnn', 'self.frac', 'self.mask'], {}), '(self.ngal_wnn, self.frac, self.mask)\n', (3839, 3876), False, 'from lssutils.lab import make_overdensity, AnaFast, histogram_cell, hpixsum, get_meandensity\n'), ((4179, 4242), 'lssutils.lab.get_meandensity', 'get_meandensity', (['self.ngal_now', 'self.frac', 'self.mask', 'self.tmpl'], {}), '(self.ngal_now, self.frac, self.mask, self.tmpl)\n', (4194, 4242), False, 'from lssutils.lab import make_overdensity, AnaFast, histogram_cell, hpixsum, get_meandensity\n'), ((4267, 4329), 'lssutils.lab.get_meandensity', 'get_meandensity', (['self.ngal_rf', 'self.frac', 'self.mask', 'self.tmpl'], {}), '(self.ngal_rf, self.frac, self.mask, self.tmpl)\n', (4282, 4329), False, 'from lssutils.lab import make_overdensity, AnaFast, histogram_cell, hpixsum, get_meandensity\n'), ((4355, 4418), 'lssutils.lab.get_meandensity', 'get_meandensity', (['self.ngal_wnn', 'self.frac', 'self.mask', 'self.tmpl'], {}), '(self.ngal_wnn, self.frac, self.mask, self.tmpl)\n', (4370, 4418), False, 'from lssutils.lab import make_overdensity, AnaFast, histogram_cell, hpixsum, get_meandensity\n'), ((4475, 4544), 'lssutils.stats.pcc.pcc', 'pcc', (['self.tmpl[self.mask]', 'self.delta_now[self.mask]'], {'return_err': '(True)'}), '(self.tmpl[self.mask], self.delta_now[self.mask], return_err=True)\n', (4478, 4544), False, 'from lssutils.stats.pcc import pcc\n'), ((4568, 4619), 'lssutils.stats.pcc.pcc', 'pcc', (['self.tmpl[self.mask]', 'self.delta_rf[self.mask]'], {}), '(self.tmpl[self.mask], self.delta_rf[self.mask])\n', (4571, 4619), False, 'from lssutils.stats.pcc import pcc\n'), ((4643, 4695), 'lssutils.stats.pcc.pcc', 'pcc', (['self.tmpl[self.mask]', 'self.delta_wnn[self.mask]'], {}), '(self.tmpl[self.mask], self.delta_wnn[self.mask])\n', (4646, 4695), False, 'from lssutils.stats.pcc import pcc\n'), ((1332, 1399), 'fitsio.read', 'ft.read', (['f"""{p}sv3target_{target}_{region}.fits_EdWsys/wsys_v0.fits"""'], {}), "(f'{p}sv3target_{target}_{region}.fits_EdWsys/wsys_v0.fits')\n", (1339, 1399), True, 'import fitsio as ft\n'), ((1427, 1502), 'fitsio.read', 'ft.read', (['f"""{p}sv3target_{target}_{region}.fits_MrWsys/wsys_{mversion}.fits"""'], {}), "(f'{p}sv3target_{target}_{region}.fits_MrWsys/wsys_{mversion}.fits')\n", (1434, 1502), True, 'import fitsio as ft\n'), ((2865, 2919), 'lssutils.lab.hpixsum', 'hpixsum', (['self.nside', "self.rcat['RA']", "self.rcat['DEC']"], {}), "(self.nside, self.rcat['RA'], self.rcat['DEC'])\n", (2872, 2919), False, 'from lssutils.lab import make_overdensity, AnaFast, histogram_cell, hpixsum, get_meandensity\n'), ((3377, 3431), 'lssutils.lab.hpixsum', 'hpixsum', (['self.nside', "self.dcat['RA']", "self.dcat['DEC']"], {}), "(self.nside, self.dcat['RA'], self.dcat['DEC'])\n", (3384, 3431), False, 'from lssutils.lab import make_overdensity, AnaFast, histogram_cell, hpixsum, get_meandensity\n'), ((5759, 5772), 'numpy.log10', 'np.log10', (['(770)'], {}), '(770)\n', (5767, 5772), True, 'import numpy as np\n'), ((3031, 3053), 'numpy.isfinite', 'np.isfinite', (['self.tmpl'], {}), '(self.tmpl)\n', (3042, 3053), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
The entrance for ipfs module.
"""
import logging
import shutil
from pathlib import Path
from src import hive_setting
from src.utils_v1.common import gene_temp_file_name
from src.utils_v1.constants import VAULT_ACCESS_WR, VAULT_ACCESS_R, DID_INFO_DB_NAME
from src.utils_v1.payment.vault_service_manage import update_used_storage_for_files_data
from src.utils.consts import COL_IPFS_FILES, APP_DID, COL_IPFS_FILES_PATH, COL_IPFS_FILES_SHA256, \
COL_IPFS_FILES_IS_FILE, SIZE, COL_IPFS_FILES_IPFS_CID, COL_IPFS_CID_REF, CID, COUNT, USR_DID
from src.utils.db_client import cli
from src.utils.did_auth import check_auth_and_vault
from src.utils.file_manager import fm
from src.utils.http_exception import InvalidParameterException, FileNotFoundException, AlreadyExistsException
from src.utils.http_response import hive_restful_response, hive_stream_response
class IpfsFiles:
def __init__(self):
"""
IPFS node is being used to store immutable block data (files):
1. Each user_did/app_did has the sandboxing to cache application data;
2. Each user_did/app_did has the mongodb collection to manage the metadata on the block data on IPFS node;
3. Once a block data (usually file) has been uploaded to hive node, it would be cached on local filesystem
first, afterwards it also would be uploaded and pined to the paired IPFS node.
4. The CID to the block data on IPFS would be managed as the field of metadata in the collection.
"""
pass
@hive_restful_response
def upload_file(self, path):
user_did, app_did = check_auth_and_vault(VAULT_ACCESS_WR)
self.upload_file_with_path(user_did, app_did, path)
return {
'name': path
}
@hive_stream_response
def download_file(self, path):
user_did, app_did = check_auth_and_vault(VAULT_ACCESS_R)
return self.download_file_with_path(user_did, app_did, path)
@hive_restful_response
def delete_file(self, path):
"""
Delete a file from the vault.
1. Remove the cached file in local filesystem;
2. Unpin the file data from corresponding IPFS node.
:param path:
:return:
"""
user_did, app_did = check_auth_and_vault(VAULT_ACCESS_WR)
self.delete_file_with_path(user_did, app_did, path)
def delete_file_with_path(self, user_did, app_did, path):
col_filter = {USR_DID: user_did,
APP_DID: app_did,
COL_IPFS_FILES_PATH: path}
doc = cli.find_one(user_did, app_did, COL_IPFS_FILES, col_filter, throw_exception=False)
if not doc:
return
cache_file = fm.ipfs_get_cache_root(user_did) / doc[COL_IPFS_FILES_IPFS_CID]
if cache_file.exists():
cache_file.unlink()
self.delete_file_metadata(user_did, app_did, path, doc[COL_IPFS_FILES_IPFS_CID])
update_used_storage_for_files_data(user_did, 0 - doc[SIZE])
@hive_restful_response
def move_file(self, src_path, dst_path):
user_did, app_did = check_auth_and_vault(VAULT_ACCESS_WR)
return self.move_copy_file(user_did, app_did, src_path, dst_path)
@hive_restful_response
def copy_file(self, src_path, dst_path):
user_did, app_did = check_auth_and_vault(VAULT_ACCESS_WR)
return self.move_copy_file(user_did, app_did, src_path, dst_path, is_copy=True)
@hive_restful_response
def list_folder(self, path):
"""
List the files under the specific directory.
:param path: Empty means root folder.
:return: File list.
"""
user_did, app_did = check_auth_and_vault(VAULT_ACCESS_WR)
docs = self.list_folder_with_path(user_did, app_did, path)
return {
'value': list(map(lambda d: self._get_list_file_info_by_doc(d), docs))
}
def list_folder_with_path(self, user_did, app_did, path):
col_filter = {USR_DID: user_did, APP_DID: app_did}
if path:
folder_path = path if path[len(path) - 1] == '/' else f'{path}/'
col_filter[COL_IPFS_FILES_PATH] = {
'$regex': f'^{folder_path}'
}
docs = cli.find_many(user_did, app_did, COL_IPFS_FILES, col_filter)
if not docs and path:
raise InvalidParameterException(f'The directory {path} is not exist.')
return docs
@hive_restful_response
def get_properties(self, path):
user_did, app_did = check_auth_and_vault(VAULT_ACCESS_R)
metadata = self.get_file_metadata(user_did, app_did, path)
return {
'name': metadata[COL_IPFS_FILES_PATH],
'is_file': metadata[COL_IPFS_FILES_IS_FILE],
'size': metadata[SIZE],
'created': metadata['created'],
'updated': metadata['modified'],
}
@hive_restful_response
def get_hash(self, path):
user_did, app_did = check_auth_and_vault(VAULT_ACCESS_R)
metadata = self.get_file_metadata(user_did, app_did, path)
return {
'name': metadata[COL_IPFS_FILES_PATH],
'algorithm': 'SHA256',
'hash': metadata[COL_IPFS_FILES_SHA256]
}
def upload_file_with_path(self, user_did, app_did, path: str):
"""
The routine to process the file uploading:
1. Receive the content of uploaded file and cache it a temp file;
2. Add this file onto IPFS node and return with CID;
3. Create a new metadata with the CID and store them as document;
4. Cached the temp file to specific cache directory.
:param user_did: the user did
:param app_did: the application did
:param path: the file relative path, not None
:return: None
"""
# upload to the temporary file and then to IPFS node.
temp_file = gene_temp_file_name()
fm.write_file_by_request_stream(temp_file)
self.upload_file_from_local(user_did, app_did, path, temp_file)
def upload_file_from_local(self, user_did, app_did, path: str, local_path: Path, only_import=False, **kwargs):
# insert or update file metadata.
doc = self.get_file_metadata(user_did, app_did, path, throw_exception=False)
if not doc:
cid = self.create_file_metadata(user_did, app_did, path, local_path,
only_import=only_import, **kwargs)
else:
cid = self.update_file_metadata(user_did, app_did, path, local_path, doc,
only_import=only_import, **kwargs)
# set temporary file as cache.
if cid:
cache_file = fm.ipfs_get_cache_root(user_did) / cid
if cache_file.exists():
cache_file.unlink()
if only_import:
shutil.copy(local_path.as_posix(), cache_file.as_posix())
else:
shutil.move(local_path.as_posix(), cache_file.as_posix())
def create_file_metadata(self, user_did, app_did, rel_path: str, file_path: Path, only_import=False, **kwargs):
cid = fm.ipfs_upload_file_from_path(file_path)
metadata = {
USR_DID: user_did,
APP_DID: app_did,
COL_IPFS_FILES_PATH: rel_path,
COL_IPFS_FILES_SHA256: fm.get_file_content_sha256(file_path),
COL_IPFS_FILES_IS_FILE: True,
SIZE: file_path.stat().st_size,
COL_IPFS_FILES_IPFS_CID: cid,
}
self.increase_refcount_cid(cid)
result = cli.insert_one(user_did, app_did, COL_IPFS_FILES, metadata, create_on_absence=True, **kwargs)
if not only_import:
update_used_storage_for_files_data(user_did, metadata[SIZE])
logging.info(f'[ipfs-files] Add a new file {rel_path}')
return cid
def update_file_metadata(self, user_did, app_did, rel_path: str, file_path: Path,
existing_metadata=None, only_import=False, **kwargs):
col_filter = {USR_DID: user_did,
APP_DID: app_did,
COL_IPFS_FILES_PATH: rel_path}
if not existing_metadata:
existing_metadata = cli.find_one(user_did, app_did, COL_IPFS_FILES, col_filter, create_on_absence=True, throw_exception=False)
if not existing_metadata:
logging.error(f'The file {rel_path} metadata is not existed, impossible to be updated')
return None
# check the consistence between the new one and existing one.
sha256 = fm.get_file_content_sha256(file_path)
cid = fm.ipfs_upload_file_from_path(file_path)
size = file_path.stat().st_size
if size == existing_metadata[SIZE] and sha256 == existing_metadata[COL_IPFS_FILES_SHA256] \
and cid == existing_metadata[COL_IPFS_FILES_IPFS_CID]:
logging.info(f'The file {rel_path} metadata is consistent with existed one, skip updation')
return None
# update the metadata of new file.
if cid != existing_metadata[COL_IPFS_FILES_IPFS_CID]:
self.increase_refcount_cid(cid)
updated_metadata = {'$set': {COL_IPFS_FILES_SHA256: sha256,
SIZE: size,
COL_IPFS_FILES_IPFS_CID: cid}}
result = cli.update_one(user_did, app_did, COL_IPFS_FILES, col_filter, updated_metadata,
is_extra=True, **kwargs)
## dereference the existing cid to IPFS.
if cid != existing_metadata[COL_IPFS_FILES_IPFS_CID]:
self.decrease_refcount_cid(existing_metadata[COL_IPFS_FILES_IPFS_CID])
if not only_import and size != existing_metadata[SIZE]:
update_used_storage_for_files_data(user_did, size - existing_metadata[SIZE])
logging.info(f'[ipfs-files] The existing file with {rel_path} has been updated')
return cid
def delete_file_metadata(self, user_did, app_did, rel_path, cid):
col_filter = {USR_DID: user_did,
APP_DID: app_did,
COL_IPFS_FILES_PATH: rel_path}
result = cli.delete_one(user_did, app_did, COL_IPFS_FILES, col_filter, is_check_exist=False)
if result['deleted_count'] > 0 and cid:
self.decrease_refcount_cid(cid)
logging.info(f'[ipfs-files] Remove an existing file {rel_path}')
def download_file_with_path(self, user_did, app_did, path: str):
"""
Download the target file with the following steps:
1. Check target file already be cached, then just use this file, otherwise:
2. Download file from IPFS to cache directory;
3. Response to requrester with this cached file.
:param user_did: The user did.
:param app_did: The application did
:param path:
:return:
"""
metadata = self.get_file_metadata(user_did, app_did, path)
cached_file = fm.ipfs_get_cache_root(user_did) / metadata[COL_IPFS_FILES_IPFS_CID]
if not cached_file.exists():
fm.ipfs_download_file_to_path(metadata[COL_IPFS_FILES_IPFS_CID], cached_file)
return fm.get_response_by_file_path(cached_file)
def move_copy_file(self, user_did, app_did, src_path, dst_path, is_copy=False):
"""
Move/Copy file with the following steps:
1. Check source file existing and file with destination name existing. If not, then
2. Move or copy file;
3. Update metadata
:param user_did:
:param app_did:
:param src_path: The path of the source file.
:param dst_path: The path of the destination file.
:param is_copy: True means copy file, else move.
:return: Json data of the response.
"""
src_filter = {USR_DID: user_did, APP_DID: app_did, COL_IPFS_FILES_PATH: src_path}
dst_filter = {USR_DID: user_did, APP_DID: app_did, COL_IPFS_FILES_PATH: dst_path}
src_doc = cli.find_one(user_did, app_did, COL_IPFS_FILES, src_filter)
dst_doc = cli.find_one(user_did, app_did, COL_IPFS_FILES, dst_filter)
if not src_doc:
raise FileNotFoundException(msg=f'The source file {src_path} not found, impossible to move/copy.')
if dst_doc:
raise AlreadyExistsException(msg=f'A file with destnation name {dst_path} already exists, impossible to move/copy')
if is_copy:
metadata = {
USR_DID: user_did,
APP_DID: app_did,
COL_IPFS_FILES_PATH: dst_path,
COL_IPFS_FILES_SHA256: src_doc[COL_IPFS_FILES_SHA256],
COL_IPFS_FILES_IS_FILE: True,
SIZE: src_doc[SIZE],
COL_IPFS_FILES_IPFS_CID: src_doc[COL_IPFS_FILES_IPFS_CID],
}
self.increase_refcount_cid(src_doc[COL_IPFS_FILES_IPFS_CID])
cli.insert_one(user_did, app_did, COL_IPFS_FILES, metadata)
update_used_storage_for_files_data(user_did, src_doc[SIZE])
else:
cli.update_one(user_did, app_did, COL_IPFS_FILES, src_filter,
{'$set': {COL_IPFS_FILES_PATH: dst_path}}, is_extra=True)
return {
'name': dst_path
}
def _get_list_file_info_by_doc(self, file_doc):
return {
'name': file_doc[COL_IPFS_FILES_PATH],
'is_file': file_doc[COL_IPFS_FILES_IS_FILE],
'size': file_doc[SIZE],
}
def get_file_metadata(self, user_did, app_did, path: str, throw_exception=True):
col_filter = {USR_DID: user_did,
APP_DID: app_did,
COL_IPFS_FILES_PATH: path}
metadata = cli.find_one(user_did, app_did, COL_IPFS_FILES, col_filter,
create_on_absence=True, throw_exception=throw_exception)
if not metadata:
if throw_exception:
raise FileNotFoundException(msg=f'No file metadata with path: {path} found')
return None
return metadata
def get_ipfs_file_access_url(self, metadata):
return f'{hive_setting.IPFS_PROXY_URL}/ipfs/{metadata[COL_IPFS_FILES_IPFS_CID]}'
def increase_refcount_cid(self, cid, count=1):
if not cid:
logging.error(f'CID must be provided for increase.')
return
doc = cli.find_one_origin(DID_INFO_DB_NAME, COL_IPFS_CID_REF, {CID: cid},
create_on_absence=True, throw_exception=False)
if not doc:
doc = {
CID: cid,
COUNT: count
}
cli.insert_one_origin(DID_INFO_DB_NAME, COL_IPFS_CID_REF, doc, create_on_absence=True)
else:
self._update_refcount_cid(cid, doc[COUNT] + count)
def decrease_refcount_cid(self, cid, count=1):
if not cid:
logging.error(f'CID must exist for decrease.')
return
doc = cli.find_one_origin(DID_INFO_DB_NAME, COL_IPFS_CID_REF, {CID: cid},
create_on_absence=True, throw_exception=False)
if not doc:
fm.ipfs_unpin_cid(cid)
return
if doc[COUNT] <= count:
cli.delete_one_origin(DID_INFO_DB_NAME, COL_IPFS_CID_REF, {CID: cid}, is_check_exist=False)
fm.ipfs_unpin_cid(cid)
else:
self._update_refcount_cid(cid, doc[COUNT] - count)
def _update_refcount_cid(self, cid, count):
col_filter = {CID: cid}
update = {'$set': {
COUNT: count,
}}
cli.update_one_origin(DID_INFO_DB_NAME, COL_IPFS_CID_REF, col_filter, update,
create_on_absence=True, is_extra=True)
|
[
"src.utils_v1.common.gene_temp_file_name",
"src.utils.file_manager.fm.ipfs_upload_file_from_path",
"src.utils.file_manager.fm.ipfs_download_file_to_path",
"src.utils_v1.payment.vault_service_manage.update_used_storage_for_files_data",
"src.utils.db_client.cli.insert_one",
"src.utils.db_client.cli.find_one",
"src.utils.file_manager.fm.ipfs_unpin_cid",
"src.utils.db_client.cli.insert_one_origin",
"src.utils.file_manager.fm.get_file_content_sha256",
"logging.error",
"src.utils.file_manager.fm.write_file_by_request_stream",
"src.utils.file_manager.fm.ipfs_get_cache_root",
"src.utils.db_client.cli.delete_one",
"src.utils.did_auth.check_auth_and_vault",
"src.utils.file_manager.fm.get_response_by_file_path",
"src.utils.http_exception.InvalidParameterException",
"src.utils.db_client.cli.update_one_origin",
"src.utils.db_client.cli.find_many",
"src.utils.db_client.cli.delete_one_origin",
"src.utils.db_client.cli.find_one_origin",
"src.utils.http_exception.FileNotFoundException",
"src.utils.http_exception.AlreadyExistsException",
"src.utils.db_client.cli.update_one",
"logging.info"
] |
[((1628, 1665), 'src.utils.did_auth.check_auth_and_vault', 'check_auth_and_vault', (['VAULT_ACCESS_WR'], {}), '(VAULT_ACCESS_WR)\n', (1648, 1665), False, 'from src.utils.did_auth import check_auth_and_vault\n'), ((1868, 1904), 'src.utils.did_auth.check_auth_and_vault', 'check_auth_and_vault', (['VAULT_ACCESS_R'], {}), '(VAULT_ACCESS_R)\n', (1888, 1904), False, 'from src.utils.did_auth import check_auth_and_vault\n'), ((2279, 2316), 'src.utils.did_auth.check_auth_and_vault', 'check_auth_and_vault', (['VAULT_ACCESS_WR'], {}), '(VAULT_ACCESS_WR)\n', (2299, 2316), False, 'from src.utils.did_auth import check_auth_and_vault\n'), ((2584, 2671), 'src.utils.db_client.cli.find_one', 'cli.find_one', (['user_did', 'app_did', 'COL_IPFS_FILES', 'col_filter'], {'throw_exception': '(False)'}), '(user_did, app_did, COL_IPFS_FILES, col_filter, throw_exception\n =False)\n', (2596, 2671), False, 'from src.utils.db_client import cli\n'), ((2954, 3013), 'src.utils_v1.payment.vault_service_manage.update_used_storage_for_files_data', 'update_used_storage_for_files_data', (['user_did', '(0 - doc[SIZE])'], {}), '(user_did, 0 - doc[SIZE])\n', (2988, 3013), False, 'from src.utils_v1.payment.vault_service_manage import update_used_storage_for_files_data\n'), ((3115, 3152), 'src.utils.did_auth.check_auth_and_vault', 'check_auth_and_vault', (['VAULT_ACCESS_WR'], {}), '(VAULT_ACCESS_WR)\n', (3135, 3152), False, 'from src.utils.did_auth import check_auth_and_vault\n'), ((3328, 3365), 'src.utils.did_auth.check_auth_and_vault', 'check_auth_and_vault', (['VAULT_ACCESS_WR'], {}), '(VAULT_ACCESS_WR)\n', (3348, 3365), False, 'from src.utils.did_auth import check_auth_and_vault\n'), ((3694, 3731), 'src.utils.did_auth.check_auth_and_vault', 'check_auth_and_vault', (['VAULT_ACCESS_WR'], {}), '(VAULT_ACCESS_WR)\n', (3714, 3731), False, 'from src.utils.did_auth import check_auth_and_vault\n'), ((4246, 4306), 'src.utils.db_client.cli.find_many', 'cli.find_many', (['user_did', 'app_did', 'COL_IPFS_FILES', 'col_filter'], {}), '(user_did, app_did, COL_IPFS_FILES, col_filter)\n', (4259, 4306), False, 'from src.utils.db_client import cli\n'), ((4532, 4568), 'src.utils.did_auth.check_auth_and_vault', 'check_auth_and_vault', (['VAULT_ACCESS_R'], {}), '(VAULT_ACCESS_R)\n', (4552, 4568), False, 'from src.utils.did_auth import check_auth_and_vault\n'), ((4982, 5018), 'src.utils.did_auth.check_auth_and_vault', 'check_auth_and_vault', (['VAULT_ACCESS_R'], {}), '(VAULT_ACCESS_R)\n', (5002, 5018), False, 'from src.utils.did_auth import check_auth_and_vault\n'), ((5921, 5942), 'src.utils_v1.common.gene_temp_file_name', 'gene_temp_file_name', ([], {}), '()\n', (5940, 5942), False, 'from src.utils_v1.common import gene_temp_file_name\n'), ((5951, 5993), 'src.utils.file_manager.fm.write_file_by_request_stream', 'fm.write_file_by_request_stream', (['temp_file'], {}), '(temp_file)\n', (5982, 5993), False, 'from src.utils.file_manager import fm\n'), ((7185, 7225), 'src.utils.file_manager.fm.ipfs_upload_file_from_path', 'fm.ipfs_upload_file_from_path', (['file_path'], {}), '(file_path)\n', (7214, 7225), False, 'from src.utils.file_manager import fm\n'), ((7620, 7717), 'src.utils.db_client.cli.insert_one', 'cli.insert_one', (['user_did', 'app_did', 'COL_IPFS_FILES', 'metadata'], {'create_on_absence': '(True)'}), '(user_did, app_did, COL_IPFS_FILES, metadata,\n create_on_absence=True, **kwargs)\n', (7634, 7717), False, 'from src.utils.db_client import cli\n'), ((7823, 7878), 'logging.info', 'logging.info', (['f"""[ipfs-files] Add a new file {rel_path}"""'], {}), "(f'[ipfs-files] Add a new file {rel_path}')\n", (7835, 7878), False, 'import logging\n'), ((8633, 8670), 'src.utils.file_manager.fm.get_file_content_sha256', 'fm.get_file_content_sha256', (['file_path'], {}), '(file_path)\n', (8659, 8670), False, 'from src.utils.file_manager import fm\n'), ((8685, 8725), 'src.utils.file_manager.fm.ipfs_upload_file_from_path', 'fm.ipfs_upload_file_from_path', (['file_path'], {}), '(file_path)\n', (8714, 8725), False, 'from src.utils.file_manager import fm\n'), ((9399, 9507), 'src.utils.db_client.cli.update_one', 'cli.update_one', (['user_did', 'app_did', 'COL_IPFS_FILES', 'col_filter', 'updated_metadata'], {'is_extra': '(True)'}), '(user_did, app_did, COL_IPFS_FILES, col_filter,\n updated_metadata, is_extra=True, **kwargs)\n', (9413, 9507), False, 'from src.utils.db_client import cli\n'), ((9894, 9979), 'logging.info', 'logging.info', (['f"""[ipfs-files] The existing file with {rel_path} has been updated"""'], {}), "(f'[ipfs-files] The existing file with {rel_path} has been updated'\n )\n", (9906, 9979), False, 'import logging\n'), ((10216, 10303), 'src.utils.db_client.cli.delete_one', 'cli.delete_one', (['user_did', 'app_did', 'COL_IPFS_FILES', 'col_filter'], {'is_check_exist': '(False)'}), '(user_did, app_did, COL_IPFS_FILES, col_filter,\n is_check_exist=False)\n', (10230, 10303), False, 'from src.utils.db_client import cli\n'), ((10400, 10464), 'logging.info', 'logging.info', (['f"""[ipfs-files] Remove an existing file {rel_path}"""'], {}), "(f'[ipfs-files] Remove an existing file {rel_path}')\n", (10412, 10464), False, 'import logging\n'), ((11236, 11277), 'src.utils.file_manager.fm.get_response_by_file_path', 'fm.get_response_by_file_path', (['cached_file'], {}), '(cached_file)\n', (11264, 11277), False, 'from src.utils.file_manager import fm\n'), ((12047, 12106), 'src.utils.db_client.cli.find_one', 'cli.find_one', (['user_did', 'app_did', 'COL_IPFS_FILES', 'src_filter'], {}), '(user_did, app_did, COL_IPFS_FILES, src_filter)\n', (12059, 12106), False, 'from src.utils.db_client import cli\n'), ((12125, 12184), 'src.utils.db_client.cli.find_one', 'cli.find_one', (['user_did', 'app_did', 'COL_IPFS_FILES', 'dst_filter'], {}), '(user_did, app_did, COL_IPFS_FILES, dst_filter)\n', (12137, 12184), False, 'from src.utils.db_client import cli\n'), ((13778, 13898), 'src.utils.db_client.cli.find_one', 'cli.find_one', (['user_did', 'app_did', 'COL_IPFS_FILES', 'col_filter'], {'create_on_absence': '(True)', 'throw_exception': 'throw_exception'}), '(user_did, app_did, COL_IPFS_FILES, col_filter,\n create_on_absence=True, throw_exception=throw_exception)\n', (13790, 13898), False, 'from src.utils.db_client import cli\n'), ((14436, 14554), 'src.utils.db_client.cli.find_one_origin', 'cli.find_one_origin', (['DID_INFO_DB_NAME', 'COL_IPFS_CID_REF', '{CID: cid}'], {'create_on_absence': '(True)', 'throw_exception': '(False)'}), '(DID_INFO_DB_NAME, COL_IPFS_CID_REF, {CID: cid},\n create_on_absence=True, throw_exception=False)\n', (14455, 14554), False, 'from src.utils.db_client import cli\n'), ((15035, 15153), 'src.utils.db_client.cli.find_one_origin', 'cli.find_one_origin', (['DID_INFO_DB_NAME', 'COL_IPFS_CID_REF', '{CID: cid}'], {'create_on_absence': '(True)', 'throw_exception': '(False)'}), '(DID_INFO_DB_NAME, COL_IPFS_CID_REF, {CID: cid},\n create_on_absence=True, throw_exception=False)\n', (15054, 15153), False, 'from src.utils.db_client import cli\n'), ((15660, 15780), 'src.utils.db_client.cli.update_one_origin', 'cli.update_one_origin', (['DID_INFO_DB_NAME', 'COL_IPFS_CID_REF', 'col_filter', 'update'], {'create_on_absence': '(True)', 'is_extra': '(True)'}), '(DID_INFO_DB_NAME, COL_IPFS_CID_REF, col_filter,\n update, create_on_absence=True, is_extra=True)\n', (15681, 15780), False, 'from src.utils.db_client import cli\n'), ((2728, 2760), 'src.utils.file_manager.fm.ipfs_get_cache_root', 'fm.ipfs_get_cache_root', (['user_did'], {}), '(user_did)\n', (2750, 2760), False, 'from src.utils.file_manager import fm\n'), ((4355, 4419), 'src.utils.http_exception.InvalidParameterException', 'InvalidParameterException', (['f"""The directory {path} is not exist."""'], {}), "(f'The directory {path} is not exist.')\n", (4380, 4419), False, 'from src.utils.http_exception import InvalidParameterException, FileNotFoundException, AlreadyExistsException\n'), ((7386, 7423), 'src.utils.file_manager.fm.get_file_content_sha256', 'fm.get_file_content_sha256', (['file_path'], {}), '(file_path)\n', (7412, 7423), False, 'from src.utils.file_manager import fm\n'), ((7754, 7814), 'src.utils_v1.payment.vault_service_manage.update_used_storage_for_files_data', 'update_used_storage_for_files_data', (['user_did', 'metadata[SIZE]'], {}), '(user_did, metadata[SIZE])\n', (7788, 7814), False, 'from src.utils_v1.payment.vault_service_manage import update_used_storage_for_files_data\n'), ((8268, 8378), 'src.utils.db_client.cli.find_one', 'cli.find_one', (['user_did', 'app_did', 'COL_IPFS_FILES', 'col_filter'], {'create_on_absence': '(True)', 'throw_exception': '(False)'}), '(user_did, app_did, COL_IPFS_FILES, col_filter,\n create_on_absence=True, throw_exception=False)\n', (8280, 8378), False, 'from src.utils.db_client import cli\n'), ((8949, 9050), 'logging.info', 'logging.info', (['f"""The file {rel_path} metadata is consistent with existed one, skip updation"""'], {}), "(\n f'The file {rel_path} metadata is consistent with existed one, skip updation'\n )\n", (8961, 9050), False, 'import logging\n'), ((9808, 9884), 'src.utils_v1.payment.vault_service_manage.update_used_storage_for_files_data', 'update_used_storage_for_files_data', (['user_did', '(size - existing_metadata[SIZE])'], {}), '(user_did, size - existing_metadata[SIZE])\n', (9842, 9884), False, 'from src.utils_v1.payment.vault_service_manage import update_used_storage_for_files_data\n'), ((11025, 11057), 'src.utils.file_manager.fm.ipfs_get_cache_root', 'fm.ipfs_get_cache_root', (['user_did'], {}), '(user_did)\n', (11047, 11057), False, 'from src.utils.file_manager import fm\n'), ((11143, 11220), 'src.utils.file_manager.fm.ipfs_download_file_to_path', 'fm.ipfs_download_file_to_path', (['metadata[COL_IPFS_FILES_IPFS_CID]', 'cached_file'], {}), '(metadata[COL_IPFS_FILES_IPFS_CID], cached_file)\n', (11172, 11220), False, 'from src.utils.file_manager import fm\n'), ((12227, 12324), 'src.utils.http_exception.FileNotFoundException', 'FileNotFoundException', ([], {'msg': 'f"""The source file {src_path} not found, impossible to move/copy."""'}), "(msg=\n f'The source file {src_path} not found, impossible to move/copy.')\n", (12248, 12324), False, 'from src.utils.http_exception import InvalidParameterException, FileNotFoundException, AlreadyExistsException\n'), ((12358, 12477), 'src.utils.http_exception.AlreadyExistsException', 'AlreadyExistsException', ([], {'msg': 'f"""A file with destnation name {dst_path} already exists, impossible to move/copy"""'}), "(msg=\n f'A file with destnation name {dst_path} already exists, impossible to move/copy'\n )\n", (12380, 12477), False, 'from src.utils.http_exception import InvalidParameterException, FileNotFoundException, AlreadyExistsException\n'), ((12958, 13017), 'src.utils.db_client.cli.insert_one', 'cli.insert_one', (['user_did', 'app_did', 'COL_IPFS_FILES', 'metadata'], {}), '(user_did, app_did, COL_IPFS_FILES, metadata)\n', (12972, 13017), False, 'from src.utils.db_client import cli\n'), ((13030, 13089), 'src.utils_v1.payment.vault_service_manage.update_used_storage_for_files_data', 'update_used_storage_for_files_data', (['user_did', 'src_doc[SIZE]'], {}), '(user_did, src_doc[SIZE])\n', (13064, 13089), False, 'from src.utils_v1.payment.vault_service_manage import update_used_storage_for_files_data\n'), ((13116, 13240), 'src.utils.db_client.cli.update_one', 'cli.update_one', (['user_did', 'app_did', 'COL_IPFS_FILES', 'src_filter', "{'$set': {COL_IPFS_FILES_PATH: dst_path}}"], {'is_extra': '(True)'}), "(user_did, app_did, COL_IPFS_FILES, src_filter, {'$set': {\n COL_IPFS_FILES_PATH: dst_path}}, is_extra=True)\n", (13130, 13240), False, 'from src.utils.db_client import cli\n'), ((14349, 14401), 'logging.error', 'logging.error', (['f"""CID must be provided for increase."""'], {}), "(f'CID must be provided for increase.')\n", (14362, 14401), False, 'import logging\n'), ((14706, 14796), 'src.utils.db_client.cli.insert_one_origin', 'cli.insert_one_origin', (['DID_INFO_DB_NAME', 'COL_IPFS_CID_REF', 'doc'], {'create_on_absence': '(True)'}), '(DID_INFO_DB_NAME, COL_IPFS_CID_REF, doc,\n create_on_absence=True)\n', (14727, 14796), False, 'from src.utils.db_client import cli\n'), ((14954, 15000), 'logging.error', 'logging.error', (['f"""CID must exist for decrease."""'], {}), "(f'CID must exist for decrease.')\n", (14967, 15000), False, 'import logging\n'), ((15216, 15238), 'src.utils.file_manager.fm.ipfs_unpin_cid', 'fm.ipfs_unpin_cid', (['cid'], {}), '(cid)\n', (15233, 15238), False, 'from src.utils.file_manager import fm\n'), ((15302, 15397), 'src.utils.db_client.cli.delete_one_origin', 'cli.delete_one_origin', (['DID_INFO_DB_NAME', 'COL_IPFS_CID_REF', '{CID: cid}'], {'is_check_exist': '(False)'}), '(DID_INFO_DB_NAME, COL_IPFS_CID_REF, {CID: cid},\n is_check_exist=False)\n', (15323, 15397), False, 'from src.utils.db_client import cli\n'), ((15406, 15428), 'src.utils.file_manager.fm.ipfs_unpin_cid', 'fm.ipfs_unpin_cid', (['cid'], {}), '(cid)\n', (15423, 15428), False, 'from src.utils.file_manager import fm\n'), ((6749, 6781), 'src.utils.file_manager.fm.ipfs_get_cache_root', 'fm.ipfs_get_cache_root', (['user_did'], {}), '(user_did)\n', (6771, 6781), False, 'from src.utils.file_manager import fm\n'), ((8429, 8521), 'logging.error', 'logging.error', (['f"""The file {rel_path} metadata is not existed, impossible to be updated"""'], {}), "(\n f'The file {rel_path} metadata is not existed, impossible to be updated')\n", (8442, 8521), False, 'import logging\n'), ((14006, 14076), 'src.utils.http_exception.FileNotFoundException', 'FileNotFoundException', ([], {'msg': 'f"""No file metadata with path: {path} found"""'}), "(msg=f'No file metadata with path: {path} found')\n", (14027, 14076), False, 'from src.utils.http_exception import InvalidParameterException, FileNotFoundException, AlreadyExistsException\n')]
|
"""This module implements a time series class with related methods."""
from collections import deque
from datetime import datetime, timedelta
from IPython.display import display
from matplotlib.axes import Axes
from matplotlib.figure import Figure
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from typing import Any, Callable, Dict, Iterator, List, Optional, Set, Tuple
from waad.utils.asset import Asset
from waad.utils.config import ANOMALIES_SCORES
from waad.utils.postgreSQL_utils import Table
class StatSeries:
"""This class defines a statistical series and implements some computing and plotting methods on it.
Attributes:
name (str): Name of the series.
series(List[float]): Contains the actual data of the series.
"""
def __init__(self, name: str, series: List[float]):
self.name = name
self.series = series
self.anomalies: List = []
def IQR_outlier_detection(self, factor: float = 1.5) -> List[int]:
"""Implement IQR outliers detection.
Args:
factor: IQR outliers detection factor (1.5 for standard method, up to 2 or 3 for only extrem outliers).
"""
series = pd.Series(self.series)
Q1 = series.quantile(0.25)
Q3 = series.quantile(0.75)
IQR = Q3 - Q1
self.anomalies = series[((series < Q1 - factor * IQR) | (series > Q3 + factor * IQR))].index.values.tolist()
return self.anomalies
def std_outlier_detection(self, factor: float = 2) -> List[int]:
"""Implement std outliers detection.
Args:
factor: std outliers detection factor (2 for standard method 95%, up to 3 for only extrem outliers).
Returns:
A ``List`` containing indexes of outlier values detected.
"""
series = pd.Series(self.series)
std = series.std()
mean = series.mean()
self.anomalies = series[((series < mean - factor * std) | (series > mean + factor * std))].index.values.tolist()
return self.anomalies
def custom_outlier_detection(self, indicator_bound: Optional[float] = None, IQR_factor: float = 2, sigma_factor: float = 3):
"""Implement custom IQR detection, enriched by a std criterion to be more robust.
Args:
indicator_bound: Physical criterion that helps remove False Positives. For example with a series representing the number of authentications over time and containing
a vast majority of zeros, the IQR would raise a lot of outliers even if it they only represent an increase of 2 authentications from the median (apparently 0). This
is due to the fact that an attacker work pattern is highly non gaussiann.
IQR_factor: IQR outliers detection factor (1.5 for standard method, up to 2 or 3 for only extrem outliers).
sigma_factor: std outliers detection factor (2 for standard method 95%, up to 3 for only extrem outliers).
Returns:
A ``List`` containing indexes of outlier values detected.
"""
series = pd.Series(self.series)
std = series.std()
mean = series.mean()
median = series.median()
Q1 = series.quantile(0.25)
Q3 = series.quantile(0.75)
IQR = Q3 - Q1
# Combination of a custom (stricter) IQR method and the 3-sigma rule. Even if distributions over time are not gaussians, this is supposed to show up outliers
outliers = series[((series < Q1 - IQR_factor * IQR) | (series > Q3 + IQR_factor * IQR)) & ((series < mean - sigma_factor * std) | (series > mean + sigma_factor * std))].index.values.tolist()
# Apply ``indicator_bound``
if indicator_bound is not None:
to_remove = []
for index in outliers:
if (indicator_bound > 0) and (series[index] < median + indicator_bound):
to_remove.append(index)
elif (indicator_bound < 0) and (series[index] > median + indicator_bound):
to_remove.append(index)
for index in to_remove:
outliers.remove(index)
self.anomalies = outliers
return outliers
def contains_isolated_values(self, percentage_null_values: int = 90) -> bool:
"""Detect if a series contains isolated values.
Args:
percentage_null_values: Percentage of zero values used as a threshold to evaluate if the series contains isolated points.
Returns:
A ``bool`` describing whether a time series contains isolated values or not.
"""
nb_non_null_values = np.flatnonzero(self.series).size
if nb_non_null_values < (1 - percentage_null_values / 100) * len(self.series) and len(self.series) >= 1:
return True
return False
def detect_isolated_groups(self) -> List[List[int]]:
"""Detect isolated groups of values in ``time_series``.
Returns:
Groups of consecutive indices, corresponding to the isolated values (separated by zeros).
"""
indices = np.flatnonzero(self.series)
groups: List = []
if indices.size == 0:
return groups
current_group = [indices[0]]
for index in indices[1:]:
if index - current_group[-1] == 1:
current_group.append(index)
else:
groups.append(current_group)
current_group = [index]
return groups
def detect_abnormal_outbreak(self, legitimate_model_duration: int = 50):
"""Detect if there is an abnormal outbreak values in ``time_series`` if the first
`legitimate_model_duration` percentage of the series is zero."""
index = next((i for i, x in enumerate(self.series) if x), None)
if index is not None and index > legitimate_model_duration / 100 * len(self.series):
self.anomalies = [index]
@staticmethod
def detect_abnormal_outbreak_static(series: List[float], legitimate_model_duration: int = 50):
"""Detect if there is an abnormal outbreak values in ``time_series`` if the first
`legitimate_model_duration` percentage of the series is zero."""
index = next((i for i, x in enumerate(series) if x), None)
if index is not None and index > legitimate_model_duration / 100 * len(series):
return [index]
else:
return []
def compute_anomalies(self, anomalies_detector: Optional[Callable] = None, config: Optional[Dict[str, Dict]] = None):
if anomalies_detector is not None:
self.anomalies = anomalies_detector(self.series)
else:
if config is not None:
self.custom_outlier_detection(indicator_bound=config[self.name]["indicator_bound"])
else:
self.custom_outlier_detection()
def plot_series(self, ax: Axes):
"""Plot a series.
Examples:
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>> from waad.utils.indicators import plot_series
>>>
>>> data = [355, 368, 0, 0, 0, 447, 466, 250, 367, 0, 0, 0, 320,
307, 395, 601, 258, 0, 0, 0, 382, 400, 326, 319, 0, 0,
304, 360, 327, 368, 0, 0, 0, 383, 327, 422, 290, 253, 0,
0, 446, 414, 381, 393, 0, 0, 0, 0, 373, 387, 312, 327,
0, 0, 370, 275, 436, 348]
>>>
>>> demo = StatSeries('demo', data)
>>> fig, ax = plt.subplots(figsize=(30, 5))
>>> demo.plot_series(ax)
.. testcleanup::
fig.savefig(f'{DOCTEST_FIGURES_PATH}/test.png')
.. figure:: ../../_static/doctest_figures/time_series_plot_example.png
:align: center
:alt: time series plot example
Args:
ax: ``Axes`` to plot series on.
"""
ax.plot([i for i in range(1, len(self.series) + 1)], self.series)
ax.set_title(self.name)
def get_figure(self, figsize: Tuple[int, int] = (20, 4)) -> Figure:
fig, ax = plt.subplots(figsize=figsize)
self.plot_series(ax)
return fig
def display(self):
fig = self.get_figure()
fig.axes[0].vlines(np.array(self.anomalies) + 1, *fig.axes[0].get_ylim(), colors="r")
display(fig)
class TimeSeries(StatSeries):
"""This class is a child of ``StatSeries`` taking into account a notion of time.
Attributes:
time_step (float): Time step in seconds between each index.
start_time (Optional[str]): Start time of the series in ISO format.
intermediary_content (Optional[Any]): Helper that keeps in memory intermediary content used during previous computations.
"""
def __init__(self, name: str, series: List[float], time_step: float, start_time: Optional[str] = None, intermediary_content: Optional[Any] = None):
super().__init__(name, series)
self.time_step = time_step
self.start_time = start_time
self.intermediary_content = intermediary_content
def get_anomalies_date(self):
res = []
for anomaly in self.anomalies:
try:
start = datetime.fromisoformat(self.start_time) + timedelta(seconds=self.time_step * anomaly)
end = start + timedelta(seconds=self.time_step)
res.append(f'{start.isoformat()} - {end.isoformat()}')
except Exception as e:
print(e)
pass
return res
def detailed_display(self):
self.display()
anomalies_date = self.get_anomalies_date()
for i, anomaly in enumerate(self.anomalies):
print(f"Anomaly found at time step {anomaly} / {anomalies_date[i]}")
print(f"Pic value of {self.series[anomaly]} on indicator")
if self.intermediary_content is not None:
print(f"Intermediary content : {self.intermediary_content[anomaly]}")
print()
|
[
"datetime.datetime.fromisoformat",
"numpy.flatnonzero",
"IPython.display.display",
"numpy.array",
"pandas.Series",
"datetime.timedelta",
"matplotlib.pyplot.subplots"
] |
[((1209, 1231), 'pandas.Series', 'pd.Series', (['self.series'], {}), '(self.series)\n', (1218, 1231), True, 'import pandas as pd\n'), ((1834, 1856), 'pandas.Series', 'pd.Series', (['self.series'], {}), '(self.series)\n', (1843, 1856), True, 'import pandas as pd\n'), ((3106, 3128), 'pandas.Series', 'pd.Series', (['self.series'], {}), '(self.series)\n', (3115, 3128), True, 'import pandas as pd\n'), ((5118, 5145), 'numpy.flatnonzero', 'np.flatnonzero', (['self.series'], {}), '(self.series)\n', (5132, 5145), True, 'import numpy as np\n'), ((8233, 8262), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (8245, 8262), True, 'import matplotlib.pyplot as plt\n'), ((8469, 8481), 'IPython.display.display', 'display', (['fig'], {}), '(fig)\n', (8476, 8481), False, 'from IPython.display import display\n'), ((4655, 4682), 'numpy.flatnonzero', 'np.flatnonzero', (['self.series'], {}), '(self.series)\n', (4669, 4682), True, 'import numpy as np\n'), ((8394, 8418), 'numpy.array', 'np.array', (['self.anomalies'], {}), '(self.anomalies)\n', (8402, 8418), True, 'import numpy as np\n'), ((9351, 9390), 'datetime.datetime.fromisoformat', 'datetime.fromisoformat', (['self.start_time'], {}), '(self.start_time)\n', (9373, 9390), False, 'from datetime import datetime, timedelta\n'), ((9393, 9436), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(self.time_step * anomaly)'}), '(seconds=self.time_step * anomaly)\n', (9402, 9436), False, 'from datetime import datetime, timedelta\n'), ((9467, 9500), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'self.time_step'}), '(seconds=self.time_step)\n', (9476, 9500), False, 'from datetime import datetime, timedelta\n')]
|
from atst.database import db
from atst.domain.common import Query
from atst.models.audit_event import AuditEvent
class AuditEventQuery(Query):
model = AuditEvent
@classmethod
def get_all(cls, pagination_opts):
query = db.session.query(cls.model).order_by(cls.model.time_created.desc())
return cls.paginate(query, pagination_opts)
@classmethod
def get_portfolio_events(cls, portfolio_id, pagination_opts):
query = (
db.session.query(cls.model)
.filter(cls.model.portfolio_id == portfolio_id)
.order_by(cls.model.time_created.desc())
)
return cls.paginate(query, pagination_opts)
@classmethod
def get_application_events(cls, application_id, pagination_opts):
query = (
db.session.query(cls.model)
.filter(cls.model.application_id == application_id)
.order_by(cls.model.time_created.desc())
)
return cls.paginate(query, pagination_opts)
class AuditLog(object):
@classmethod
# TODO: see if this is being used anywhere and remove if not
def log_system_event(cls, resource, action, portfolio=None):
return cls._log(resource=resource, action=action, portfolio=portfolio)
@classmethod
def get_all_events(cls, pagination_opts=None):
return AuditEventQuery.get_all(pagination_opts)
@classmethod
def get_portfolio_events(cls, portfolio, pagination_opts=None):
return AuditEventQuery.get_portfolio_events(portfolio.id, pagination_opts)
@classmethod
def get_application_events(cls, application, pagination_opts=None):
return AuditEventQuery.get_application_events(application.id, pagination_opts)
@classmethod
def get_by_resource(cls, resource_id):
return (
db.session.query(AuditEvent)
.filter(AuditEvent.resource_id == resource_id)
.order_by(AuditEvent.time_created.desc())
.all()
)
@classmethod
def _resource_type(cls, resource):
return type(resource).__name__.lower()
@classmethod
# TODO: see if this is being used anywhere and remove if not
def _log(cls, user=None, portfolio=None, resource=None, action=None):
resource_id = resource.id if resource else None
resource_type = cls._resource_type(resource) if resource else None
portfolio_id = portfolio.id if portfolio else None
audit_event = AuditEventQuery.create(
user=user,
portfolio_id=portfolio_id,
resource_id=resource_id,
resource_type=resource_type,
action=action,
)
return AuditEventQuery.add_and_commit(audit_event)
|
[
"atst.database.db.session.query",
"atst.models.audit_event.AuditEvent.time_created.desc"
] |
[((241, 268), 'atst.database.db.session.query', 'db.session.query', (['cls.model'], {}), '(cls.model)\n', (257, 268), False, 'from atst.database import db\n'), ((1926, 1956), 'atst.models.audit_event.AuditEvent.time_created.desc', 'AuditEvent.time_created.desc', ([], {}), '()\n', (1954, 1956), False, 'from atst.models.audit_event import AuditEvent\n'), ((475, 502), 'atst.database.db.session.query', 'db.session.query', (['cls.model'], {}), '(cls.model)\n', (491, 502), False, 'from atst.database import db\n'), ((796, 823), 'atst.database.db.session.query', 'db.session.query', (['cls.model'], {}), '(cls.model)\n', (812, 823), False, 'from atst.database import db\n'), ((1816, 1844), 'atst.database.db.session.query', 'db.session.query', (['AuditEvent'], {}), '(AuditEvent)\n', (1832, 1844), False, 'from atst.database import db\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('crowdsourcing', '0015_auto_20150709_0149'),
]
operations = [
migrations.CreateModel(
name='ConversationRecipient',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status', models.IntegerField(default=1)),
('message', models.ForeignKey(to='crowdsourcing.Conversation')),
('recipient', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
migrations.RemoveField(
model_name='messagerecipient',
name='message',
),
migrations.RemoveField(
model_name='messagerecipient',
name='recipient',
),
migrations.DeleteModel(
name='MessageRecipient',
),
]
|
[
"django.db.migrations.swappable_dependency",
"django.db.migrations.RemoveField",
"django.db.models.ForeignKey",
"django.db.migrations.DeleteModel",
"django.db.models.AutoField",
"django.db.models.IntegerField"
] |
[((210, 267), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (241, 267), False, 'from django.db import models, migrations\n'), ((813, 882), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""messagerecipient"""', 'name': '"""message"""'}), "(model_name='messagerecipient', name='message')\n", (835, 882), False, 'from django.db import models, migrations\n'), ((927, 998), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""messagerecipient"""', 'name': '"""recipient"""'}), "(model_name='messagerecipient', name='recipient')\n", (949, 998), False, 'from django.db import models, migrations\n'), ((1043, 1090), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""MessageRecipient"""'}), "(name='MessageRecipient')\n", (1065, 1090), False, 'from django.db import models, migrations\n'), ((467, 560), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (483, 560), False, 'from django.db import models, migrations\n'), ((586, 616), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (605, 616), False, 'from django.db import models, migrations\n'), ((647, 697), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""crowdsourcing.Conversation"""'}), "(to='crowdsourcing.Conversation')\n", (664, 697), False, 'from django.db import models, migrations\n'), ((730, 776), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': 'settings.AUTH_USER_MODEL'}), '(to=settings.AUTH_USER_MODEL)\n', (747, 776), False, 'from django.db import models, migrations\n')]
|
"""
Django settings for abs project.
Generated by 'django-admin startproject' using Django 2.0.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
import platform
VERSION = '2.0.10'
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '<KEY>'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
# 'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'apps.account',
'apps.ticket',
'apps.workflow',
]
ROOT_URLCONF = 'loonflow.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'loonflow.contexts.global_variables',
],
'libraries':{
'loonflow_filter': 'apps.manage.templatetags.loonflow_filter',
}
},
},
]
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
("bower_components", os.path.join(STATIC_ROOT, 'bower_components')),
("dist", os.path.join(STATIC_ROOT, 'dist')),
("plugins", os.path.join(STATIC_ROOT, 'plugins')),
)
WSGI_APPLICATION = 'loonflow.wsgi.application'
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = False
USE_TZ = False
DATETIME_FORMAT = 'Y-m-d H:i:s'
TIME_FORMAT = 'H:i:s'
LOGIN_URL = '/account/login/'
AUTH_USER_MODEL = 'account.LoonUser'
STATIC_URL = '/static/'
FIXTURE_DIRS = ['fixtures/']
STATIC_FILES_VERSION = '1.0'
LOGIN_URL = '/manage/login'
APPEND_SLASH = False # disable urls.W002 warning
if platform.system() == 'Windows':
HOMEPATH = os.environ['HOMEPATH']
else:
HOMEPATH = os.environ['HOME']
JWT_SALT = 'aUApFqfQjyYVAPo8'
|
[
"platform.system",
"os.path.abspath",
"os.path.join"
] |
[((1868, 1900), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static"""'], {}), "(BASE_DIR, 'static')\n", (1880, 1900), False, 'import os\n'), ((3001, 3018), 'platform.system', 'platform.system', ([], {}), '()\n', (3016, 3018), False, 'import platform\n'), ((467, 492), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (482, 492), False, 'import os\n'), ((1972, 2017), 'os.path.join', 'os.path.join', (['STATIC_ROOT', '"""bower_components"""'], {}), "(STATIC_ROOT, 'bower_components')\n", (1984, 2017), False, 'import os\n'), ((2033, 2066), 'os.path.join', 'os.path.join', (['STATIC_ROOT', '"""dist"""'], {}), "(STATIC_ROOT, 'dist')\n", (2045, 2066), False, 'import os\n'), ((2085, 2121), 'os.path.join', 'os.path.join', (['STATIC_ROOT', '"""plugins"""'], {}), "(STATIC_ROOT, 'plugins')\n", (2097, 2121), False, 'import os\n'), ((1259, 1294), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (1271, 1294), False, 'import os\n')]
|
#!/usr/bin/env python
import os,sys
sys.path.insert(0,os.path.abspath(os.path.dirname(__file__)))
|
[
"os.path.dirname"
] |
[((72, 97), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (87, 97), False, 'import os, sys\n')]
|
##########################################################################
#
# Copyright (c) 2009-2010, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import IECore
class SubstitutedDictTest( unittest.TestCase ) :
def test( self ) :
d = {
"a" : "hello ${name}",
"b" : IECore.CompoundObject(
{
"c" : IECore.StringData( "goodbye ${place}" )
}
)
}
ds = IECore.SubstitutedDict( d, { "name" : "john", "place" : "london" } )
self.assertEqual( ds["a"], "hello john" )
self.assertEqual( ds["b"]["c"], IECore.StringData( "goodbye london" ) )
self.failUnless( isinstance( ds["b"], IECore.SubstitutedDict ) )
self.assertEqual( ds.get( "a" ), "hello john" )
self.assertEqual( ds.get( "notThere" ), None )
self.assertEqual( ds.get( "notThere", 10 ), 10 )
self.assertEqual( ds.get( "a", substituted=False ), "hello ${name}" )
self.assertEqual( ds.get( "b", substituted=False )["c"], IECore.StringData( "goodbye ${place}" ) )
self.failUnless( ds.get( "b", substituted=False ).isInstanceOf( IECore.CompoundObject.staticTypeId() ) )
self.assertEqual( ds.get( "notThere", substituted=False ), None )
self.assertEqual( ds, ds )
keys = ds.keys()
self.assertEqual( len( keys ), 2 )
self.failUnless( "a" in keys )
self.failUnless( "b" in keys )
values = ds.values()
self.assertEqual( len( values ), len( keys ) )
self.assertEqual( values[keys.index( "a" )], "hello john" )
self.failUnless( isinstance( values[keys.index( "b" )], IECore.SubstitutedDict ) )
values = ds.values( substituted=False )
self.assertEqual( len( values ), len( keys ) )
self.assertEqual( values[keys.index( "a" )], "hello ${name}" )
self.failUnless( isinstance( values[keys.index( "b" )], IECore.CompoundObject ) )
self.assertEqual( zip( *(ds.items()) ), [ tuple( ds.keys() ), tuple( ds.values() ) ] )
def testEquality( self ) :
d = IECore.SubstitutedDict(
{
"a" : "aa",
"b" : "${b}",
},
{
"b" : "x",
}
)
d2 = IECore.SubstitutedDict(
{
"a" : "aa",
"b" : "${b}",
},
{
"b" : "x",
}
)
d3 = IECore.SubstitutedDict(
{
"a" : "aa",
"b" : "different ${b}",
},
{
"b" : "x",
}
)
d4 = IECore.SubstitutedDict(
{
"a" : "aa",
"b" : "${b}",
},
{
"b" : "xxx",
}
)
self.assertEqual( d, d )
self.assertEqual( d, d2 )
self.assertNotEqual( d, d3 )
self.assertNotEqual( d, d4 )
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"IECore.CompoundObject.staticTypeId",
"IECore.SubstitutedDict",
"IECore.StringData"
] |
[((4137, 4152), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4150, 4152), False, 'import unittest\n'), ((2040, 2102), 'IECore.SubstitutedDict', 'IECore.SubstitutedDict', (['d', "{'name': 'john', 'place': 'london'}"], {}), "(d, {'name': 'john', 'place': 'london'})\n", (2062, 2102), False, 'import IECore\n'), ((3547, 3607), 'IECore.SubstitutedDict', 'IECore.SubstitutedDict', (["{'a': 'aa', 'b': '${b}'}", "{'b': 'x'}"], {}), "({'a': 'aa', 'b': '${b}'}, {'b': 'x'})\n", (3569, 3607), False, 'import IECore\n'), ((3655, 3715), 'IECore.SubstitutedDict', 'IECore.SubstitutedDict', (["{'a': 'aa', 'b': '${b}'}", "{'b': 'x'}"], {}), "({'a': 'aa', 'b': '${b}'}, {'b': 'x'})\n", (3677, 3715), False, 'import IECore\n'), ((3763, 3833), 'IECore.SubstitutedDict', 'IECore.SubstitutedDict', (["{'a': 'aa', 'b': 'different ${b}'}", "{'b': 'x'}"], {}), "({'a': 'aa', 'b': 'different ${b}'}, {'b': 'x'})\n", (3785, 3833), False, 'import IECore\n'), ((3881, 3943), 'IECore.SubstitutedDict', 'IECore.SubstitutedDict', (["{'a': 'aa', 'b': '${b}'}", "{'b': 'xxx'}"], {}), "({'a': 'aa', 'b': '${b}'}, {'b': 'xxx'})\n", (3903, 3943), False, 'import IECore\n'), ((2190, 2225), 'IECore.StringData', 'IECore.StringData', (['"""goodbye london"""'], {}), "('goodbye london')\n", (2207, 2225), False, 'import IECore\n'), ((2578, 2615), 'IECore.StringData', 'IECore.StringData', (['"""goodbye ${place}"""'], {}), "('goodbye ${place}')\n", (2595, 2615), False, 'import IECore\n'), ((2686, 2722), 'IECore.CompoundObject.staticTypeId', 'IECore.CompoundObject.staticTypeId', ([], {}), '()\n', (2720, 2722), False, 'import IECore\n'), ((1975, 2012), 'IECore.StringData', 'IECore.StringData', (['"""goodbye ${place}"""'], {}), "('goodbye ${place}')\n", (1992, 2012), False, 'import IECore\n')]
|
"""
Copyright (C) 2017-2018 University of Massachusetts Amherst.
This file is part of "learned-string-alignments"
http://github.com/iesl/learned-string-alignments
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import torch
import codecs
import subprocess
import json
def file_lines(filename,codec):
f = codecs.open(filename,'r',codec)
for line in f:
yield line.decode(codec)
f.close()
def row_wise_dot(tensor1, tensor2):
return torch.sum(tensor1 * tensor2, dim=1,keepdim=True)
def wc_minus_l(fname):
p = subprocess.Popen(['wc', '-l', fname], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
result, err = p.communicate()
if p.returncode != 0:
raise IOError(err)
return int(result.strip().split()[0])
def __filter_json(the_dict):
print("__filter_json")
print(the_dict)
res = {}
for k in the_dict.keys():
print("k : {} \t {} \t {}".format(k,the_dict[k],type(the_dict[k])))
if type(the_dict[k]) is str or type(the_dict[k]) is float or type(the_dict[k]) is int or type(the_dict[k]) is list:
res[k] = the_dict[k]
elif type(the_dict[k]) is dict:
res[k] = __filter_json(the_dict[k])
print("res: {} ".format(res))
return res
def save_dict_to_json(the_dict,the_file):
with open(the_file, 'w') as fout:
fout.write(json.dumps(__filter_json(the_dict)))
fout.write("\n")
|
[
"subprocess.Popen",
"torch.sum",
"codecs.open"
] |
[((785, 818), 'codecs.open', 'codecs.open', (['filename', '"""r"""', 'codec'], {}), "(filename, 'r', codec)\n", (796, 818), False, 'import codecs\n'), ((932, 981), 'torch.sum', 'torch.sum', (['(tensor1 * tensor2)'], {'dim': '(1)', 'keepdim': '(True)'}), '(tensor1 * tensor2, dim=1, keepdim=True)\n', (941, 981), False, 'import torch\n'), ((1013, 1103), 'subprocess.Popen', 'subprocess.Popen', (["['wc', '-l', fname]"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['wc', '-l', fname], stdout=subprocess.PIPE, stderr=\n subprocess.PIPE)\n", (1029, 1103), False, 'import subprocess\n')]
|
'''
Copyright (c) <2012> <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR
A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
from .mechanisms.wauth import WAuth as AuthMechanism
from Yowsup.Common.constants import Constants
from Yowsup.Common.debugger import Debugger
class YowsupAuth:
def __init__(self, connection):
Debugger.attach(self)
self.connection = connection
self.mechanism = AuthMechanism
self.authenticated = False
self.username = None
self.password = None
self.domain = None
self.resource = None
self.supportsReceiptAcks = True
self.accountKind = None
self.expireData = None
self.authCallbacks = []
def isAuthenticated(self):
return self.authenticated
def onAuthenticated(self, callback):
self.authCallbacks.append(callback)
def authenticationComplete(self):
self.authenticated = True
#should process callbacks
def authenticationFailed(self):
self._d("Authentication failed!!")
def authenticate(self, username, password, domain, resource):
self._d("Connecting to %s" % Constants.host)
#connection = ConnectionEngine()
self.connection.connect((Constants.host, Constants.port));
self.mechanism = AuthMechanism(self.connection)
self.mechanism.setAuthObject(self)
self.username = username
self.password = password
self.domain = domain
self.resource = resource
self.jid = "%s@%s"%(self.username,self.domain)
connection = self.mechanism.login(username, password, domain, resource)
return connection
|
[
"Yowsup.Common.debugger.Debugger.attach"
] |
[((1280, 1301), 'Yowsup.Common.debugger.Debugger.attach', 'Debugger.attach', (['self'], {}), '(self)\n', (1295, 1301), False, 'from Yowsup.Common.debugger import Debugger\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_categories(apps, schema_editor):
Post = apps.get_model("bulletin", "Post")
PostCategory = apps.get_model("bulletin", "PostCategory")
for post in Post.objects.filter(category__isnull=False):
PostCategory.objects.create(post=post,
category=post.category,
primary=True)
class Migration(migrations.Migration):
dependencies = [
('bulletin', '0003_add_field_post_categories'),
]
operations = [
migrations.RunPython(migrate_categories)
]
|
[
"django.db.migrations.RunPython"
] |
[((624, 664), 'django.db.migrations.RunPython', 'migrations.RunPython', (['migrate_categories'], {}), '(migrate_categories)\n', (644, 664), False, 'from django.db import migrations\n')]
|
import argparse
import collections
import numpy as np
parser = argparse.ArgumentParser(
description='Convert T5 predictions into a TREC-formatted run.')
parser.add_argument('--predictions', type=str, required=True, help='T5 predictions file.')
parser.add_argument('--query_run_ids', type=str, required=True,
help='File containing query doc id pairs paired with the T5\'s predictions file.')
parser.add_argument('--output', type=str, required=True, help='run file in the TREC format.')
args = parser.parse_args()
examples = collections.defaultdict(dict)
with open(args.query_run_ids) as f_query_run_ids, open(args.predictions) as f_pred:
for line_query_doc_id, line_pred in zip(f_query_run_ids, f_pred):
query_id, doc_id_a, doc_id_b = line_query_doc_id.strip().split()
doc_id_a = doc_id_a.split("#")[0]
doc_id_b = doc_id_b.split("#")[0]
_, score = line_pred.strip().split()
score = float(score)
if doc_id_a not in examples[query_id]:
examples[query_id][doc_id_a] = 0
if doc_id_b not in examples[query_id]:
examples[query_id][doc_id_b] = 0
examples[query_id][doc_id_a] += np.exp(score)
examples[query_id][doc_id_b] += 1 - np.exp(score)
with open(args.output, 'w') as fout:
for query_id, doc_ids_scores in examples.items():
doc_ids_scores = [
(doc_id, scores)
for doc_id, scores in doc_ids_scores.items()]
doc_ids_scores.sort(key=lambda x: x[1], reverse=True)
for rank, (doc_id, score) in enumerate(doc_ids_scores):
print(2*(len(doc_ids_scores) - 1))
fout.write(
f'{query_id} Q0 {doc_id} {rank + 1} {score/(2*(len(doc_ids_scores) - 1))} duot5\n')
|
[
"collections.defaultdict",
"numpy.exp",
"argparse.ArgumentParser"
] |
[((65, 158), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Convert T5 predictions into a TREC-formatted run."""'}), "(description=\n 'Convert T5 predictions into a TREC-formatted run.')\n", (88, 158), False, 'import argparse\n'), ((551, 580), 'collections.defaultdict', 'collections.defaultdict', (['dict'], {}), '(dict)\n', (574, 580), False, 'import collections\n'), ((1190, 1203), 'numpy.exp', 'np.exp', (['score'], {}), '(score)\n', (1196, 1203), True, 'import numpy as np\n'), ((1248, 1261), 'numpy.exp', 'np.exp', (['score'], {}), '(score)\n', (1254, 1261), True, 'import numpy as np\n')]
|
import argparse
import pprint
from PyPDF2 import PdfFileWriter, PdfFileReader
import os
import logging
parser = argparse.ArgumentParser(description="Split pdf into multiple files")
parser.add_argument("-i","--input", help="Input file", required=True)
parser.add_argument("-l","--list", help="Comma separated list for splitting")
parser.add_argument("-s","--suffix", help="Suffix for output filename")
parser.add_argument("--log", help="Log Level")
args = vars(parser.parse_args())
if args['log']:
numericLevel = getattr(logging, args['log'].upper(), None)
if not isinstance(numericLevel, int):
raise ValueError('Invalid log level: %s' % args['log'])
logging.basicConfig(level=numericLevel)
inputFile = args['input']
inputReader=PdfFileReader(open(inputFile, "rb"))
numberOfPages = inputReader.getNumPages()
logging.info("Input file " + inputFile + " has " + str(numberOfPages) + " pages")
splistlist=[]
if args['list']:
splitlist = [int(n)-1 for n in args['list'].split(',')]
#Append the last page
splitlist.append(numberOfPages)
else:
splitlist=list(range(0,numberOfPages+1))
logging.debug("Split list is :")
logging.debug(pprint.pformat(splitlist))
suffix='page'
if args['suffix']:
suffix = args['suffix']
logging.debug("Suffix is " + suffix)
#Get the file basename
inputFileBase = os.path.splitext(inputFile)[0]
for i in range(len(splitlist)-1):
logging.debug("Starting with page: " + str(splitlist[i]+1))
outputWriter=PdfFileWriter()
for j in range(splitlist[i], splitlist[i+1]):
logging.debug("Adding page " + str(j+1))
outputWriter.addPage(inputReader.getPage(j))
outputFileName= inputFileBase + '-' + suffix + str(i+1) + ".pdf"
logging.info("Writing to file " + outputFileName)
outputStream = open(outputFileName, "wb")
outputWriter.write(outputStream)
|
[
"pprint.pformat",
"logging.debug",
"argparse.ArgumentParser",
"logging.basicConfig",
"logging.info",
"os.path.splitext",
"PyPDF2.PdfFileWriter"
] |
[((113, 181), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Split pdf into multiple files"""'}), "(description='Split pdf into multiple files')\n", (136, 181), False, 'import argparse\n'), ((1121, 1153), 'logging.debug', 'logging.debug', (['"""Split list is :"""'], {}), "('Split list is :')\n", (1134, 1153), False, 'import logging\n'), ((1257, 1293), 'logging.debug', 'logging.debug', (["('Suffix is ' + suffix)"], {}), "('Suffix is ' + suffix)\n", (1270, 1293), False, 'import logging\n'), ((674, 713), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'numericLevel'}), '(level=numericLevel)\n', (693, 713), False, 'import logging\n'), ((1168, 1193), 'pprint.pformat', 'pprint.pformat', (['splitlist'], {}), '(splitlist)\n', (1182, 1193), False, 'import pprint\n'), ((1334, 1361), 'os.path.splitext', 'os.path.splitext', (['inputFile'], {}), '(inputFile)\n', (1350, 1361), False, 'import os\n'), ((1480, 1495), 'PyPDF2.PdfFileWriter', 'PdfFileWriter', ([], {}), '()\n', (1493, 1495), False, 'from PyPDF2 import PdfFileWriter, PdfFileReader\n'), ((1721, 1770), 'logging.info', 'logging.info', (["('Writing to file ' + outputFileName)"], {}), "('Writing to file ' + outputFileName)\n", (1733, 1770), False, 'import logging\n')]
|
from tests.unit import unittest
from tests.unit import AWSMockServiceTestCase
from boto.vpc import VPCConnection, InternetGateway
class TestDescribeInternetGateway(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return """
<DescribeInternetGatewaysResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<internetGatewaySet>
<item>
<internetGatewayId>igw-eaad4883EXAMPLE</internetGatewayId>
<attachmentSet>
<item>
<vpcId>vpc-11ad4878</vpcId>
<state>available</state>
</item>
</attachmentSet>
<tagSet/>
</item>
</internetGatewaySet>
</DescribeInternetGatewaysResponse>
"""
def test_describe_internet_gateway(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.get_all_internet_gateways(
'igw-eaad4883EXAMPLE', filters=[('attachment.state', ['available', 'pending'])])
self.assert_request_parameters({
'Action': 'DescribeInternetGateways',
'InternetGatewayId.1': 'igw-eaad4883EXAMPLE',
'Filter.1.Name': 'attachment.state',
'Filter.1.Value.1': 'available',
'Filter.1.Value.2': 'pending'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(len(api_response), 1)
self.assertIsInstance(api_response[0], InternetGateway)
self.assertEqual(api_response[0].id, 'igw-eaad4883EXAMPLE')
class TestCreateInternetGateway(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return """
<CreateInternetGatewayResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<internetGateway>
<internetGatewayId>igw-eaad4883</internetGatewayId>
<attachmentSet/>
<tagSet/>
</internetGateway>
</CreateInternetGatewayResponse>
"""
def test_create_internet_gateway(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_internet_gateway()
self.assert_request_parameters({
'Action': 'CreateInternetGateway'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertIsInstance(api_response, InternetGateway)
self.assertEqual(api_response.id, 'igw-eaad4883')
class TestDeleteInternetGateway(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return """
<DeleteInternetGatewayResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</DeleteInternetGatewayResponse>
"""
def test_delete_internet_gateway(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.delete_internet_gateway('igw-eaad4883')
self.assert_request_parameters({
'Action': 'DeleteInternetGateway',
'InternetGatewayId': 'igw-eaad4883'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
class TestAttachInternetGateway(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return """
<AttachInternetGatewayResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</AttachInternetGatewayResponse>
"""
def test_attach_internet_gateway(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.attach_internet_gateway(
'igw-eaad4883', 'vpc-11ad4878')
self.assert_request_parameters({
'Action': 'AttachInternetGateway',
'InternetGatewayId': 'igw-eaad4883',
'VpcId': 'vpc-11ad4878'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
class TestDetachInternetGateway(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return """
<DetachInternetGatewayResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</DetachInternetGatewayResponse>
"""
def test_detach_internet_gateway(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.detach_internet_gateway(
'igw-eaad4883', 'vpc-11ad4878')
self.assert_request_parameters({
'Action': 'DetachInternetGateway',
'InternetGatewayId': 'igw-eaad4883',
'VpcId': 'vpc-11ad4878'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
if __name__ == '__main__':
unittest.main()
|
[
"tests.unit.unittest.main"
] |
[((6059, 6074), 'tests.unit.unittest.main', 'unittest.main', ([], {}), '()\n', (6072, 6074), False, 'from tests.unit import unittest\n')]
|
import numpy as np
import gym
from gym import Wrapper
from gym.spaces import Discrete, Box
from gym_pomdp.envs.rock import RockEnv, Obs
class RockSampleHistoryEnv(Wrapper):
"""
takes observations from an RockSample environment and stacks to history given hist_len of history length
"""
def __init__(self, env_id, hist_len=4, history_type='standard', kwargs={}):
"""
Parameters
----------
env_id - id of registered gym environment (currently only implemented for Rock-v0)
history_type - * one_hot: encodes the actions as one hot vector in the history
* one_hot_pos: one hot agent position and history of 'one_hot' observations
* standard: encodes the actions as action_index+1 (reason for this is that the initial history is
all zeros and we don't want to collide with action 0, which is move north)
* standard_pos: one hot agent position and history of 'standard' observations
* field_vision: encodes the actions as action_index+1 (reason: see 'standard')
and noisy observation for each rock
* field_vision_pos: one hot agent position and history of noisy observations for each rock
* fully_observable: one hot agent position and history of true observations for each rock
* mixed_full_pomdp: flag to indicate if full information is avail + true observations for each rock +
one hot agent position and history of 'one_hot' observations
* history_full: complete history of: flag to indicate if full information is avail (=1) + true observations for each rock +
one hot agent position + 'one_hot' action + noisy rock observation
* history_pomdp: complete history of: flag to indicate if full information is avail (=0) + zeros(num rocks) +
one hot agent position + 'one_hot' action + noisy rock observation
* history_rockpos_full: complete history of: flag to indicate if full information is avail (=1) + true observations for each rock +
one hot agent position + 'one_hot' action + noisy rock observation + one hot position for all rocks
hist_len - length of the history (hist_len==0 is without history, just current observation)
kwargs - optional arguments for initializing the wrapped environment
"""
if not env_id == "Rock-v0":
raise NotImplementedError("history only implemented for Rock-v0")
env = gym.make(env_id)
env.__init__(**kwargs)
super(RockSampleHistoryEnv, self).__init__(env)
self._wrapped_env = env
self.hist_len = hist_len
self.hist_type = history_type
self.history = None
self.full_obs_dim = 1
self.num_rocks = self._wrapped_env.num_rocks
self.size_x, self.size_y = self._wrapped_env.grid.get_size
# specify observation space and arrangement according to selected history type
if self.hist_type == "standard":
self.historyIgnoreIdx = 0
self.total_obs_dim = (1+1) # standard obs
self.observation_space = Box(low=0, high=(4+1)+self.num_rocks, shape=(self.total_obs_dim*(self.hist_len+1),)) # history of: ac + ob pairs
self.genObservation = self.generateObservationStandard
elif self.hist_type == "standard_pos":
self.historyIgnoreIdx = self.size_x + self.size_y
self.total_obs_dim = self.historyIgnoreIdx+(1+1) # agent pos + standard obs
self.observation_space = Box(low=0, high=(4+1)+self.num_rocks, shape=(self.historyIgnoreIdx + (1+1)*(self.hist_len+1),)) # agent pos + history of: ac + ob pairs
self.genObservation = self.generateObservationStandardPos
elif self.hist_type == "one_hot":
self.historyIgnoreIdx = 0
self.nact = self._wrapped_env.action_space.n
self.total_obs_dim = (self.nact+1) # one hot encoded actaion + single ob
self.observation_space = Box(low=0, high=len(Obs)-1, shape=(self.total_obs_dim*(self.hist_len+1),)) # history of: one_hot_ac + ob pairs
self.genObservation = self.generateObservationOneHot
elif self.hist_type == "one_hot_pos":
self.historyIgnoreIdx = self.size_x + self.size_y
self.nact = self._wrapped_env.action_space.n
self.total_obs_dim = self.historyIgnoreIdx+(self.nact+1) # agent pos + one hot encoded actaion + single ob
self.observation_space = Box(low=0, high=len(Obs)-1, shape=(self.historyIgnoreIdx + (self.nact+1)*(self.hist_len+1),)) # agent pos + history of: one_hot_ac + ob pairs
self.genObservation = self.generateObservationOneHotPos
elif self.hist_type == "field_vision":
self.historyIgnoreIdx = 0
self.total_obs_dim = (1+self.num_rocks) # actaion + ob (for each rock)
self.observation_space = Box(low=0, high=(4+1)+self.num_rocks, shape=(self.total_obs_dim*(self.hist_len+1),)) # history of: ac + ob (for each rock) pairs
self.genObservation = self.generateObservationFieldVision
elif self.hist_type == "field_vision_pos":
self.historyIgnoreIdx = self.size_x + self.size_y
self.total_obs_dim = (self.historyIgnoreIdx+self.num_rocks) # oneHot agent position + ob (for each rock)
self.observation_space = Box(low=0, high=len(Obs)-1, shape=(self.historyIgnoreIdx + self.num_rocks*(self.hist_len+1),)) # agent pos + history of: ac + ob (for each rock) pairs
self.genObservation = self.generateObservationFieldVisionPos
elif self.hist_type == "fully_observable":
self.historyIgnoreIdx = self.size_x + self.size_y
self.total_obs_dim = (self.historyIgnoreIdx+self.num_rocks) # oneHot agent position + ob (for each rock)
self.observation_space = Box(low=0, high=len(Obs)-1, shape=(self.historyIgnoreIdx + self.num_rocks*(self.hist_len+1),)) # agent pos + history of: ac + ob (for each rock) pairs
self.genObservation = self.generateObservationFullState
elif self.hist_type == "mixed_full_pomdp":
self.historyIgnoreIdx = 1 + self.num_rocks + self.size_x + self.size_y
self.nact = self._wrapped_env.action_space.n
self.total_obs_dim = self.historyIgnoreIdx+(self.nact+1) # ignore index + agent pos + one hot encoded action + single ob
self.observation_space = Box(low=0, high=len(Obs)-1, shape=(self.historyIgnoreIdx + (self.nact+1)*(self.hist_len+1),)) # flag + full obs + agent pos + history of: one_hot_ac + ob pairs
self.genObservation = self.generateObservationMixed
elif self.hist_type == "history_full":
self.historyIgnoreIdx = 0
self.nact = self._wrapped_env.action_space.n
self.total_obs_dim = 1 + self.size_x + self.size_y + self.num_rocks + self.nact + 1 # flag + one hot agent pos + rock obs + one hot action + single ob
self.observation_space = Box(low=0, high=len(Obs)-1, shape=(self.historyIgnoreIdx + self.total_obs_dim*(self.hist_len+1),))
self.genObservation = self.generateObservationHistoryFull
elif self.hist_type == "history_pomdp":
self.historyIgnoreIdx = 0
self.nact = self._wrapped_env.action_space.n
self.total_obs_dim = 1 + self.size_x + self.size_y + self.num_rocks + self.nact + 1 # flag + one hot agent pos + rock obs (zeros) + one hot action + single ob
self.observation_space = Box(low=0, high=len(Obs)-1, shape=(self.historyIgnoreIdx + self.total_obs_dim*(self.hist_len+1),))
self.genObservation = self.generateObservationHistoryPomdp
elif self.hist_type == "history_rockpos_full":
self.historyIgnoreIdx = (self.size_x + self.size_y) * self.num_rocks # num of one_hot encoded rock positions
self.nact = self._wrapped_env.action_space.n
self.total_history_ob_dim = 1 + self.size_x + self.size_y + self.num_rocks + self.nact + 1
self.total_obs_dim = self.historyIgnoreIdx + self.total_history_ob_dim # ignoreIndex + flag + one hot agent pos + rock obs + one hot action + single ob
self.observation_space = Box(low=0, high=len(Obs)-1, shape=(self.historyIgnoreIdx + self.total_history_ob_dim*(self.hist_len+1),))
self.genObservation = self.generateObservationHistoryRockPosFull
else:
raise NameError("error: wrong history type")
self.observation_dim_hist_part = self.total_obs_dim - self.historyIgnoreIdx
print('-------- History Info: --------')
print('total obs dim:', self.total_obs_dim)
print('original obs dim:', self.full_obs_dim)
print('history obs dim:', self.observation_dim_hist_part)
print('-------------------------------')
def reset_history(self, new_):
self.history = np.zeros((self.observation_space.shape[0]-self.historyIgnoreIdx, ))
self.history[0:self.observation_dim_hist_part] = new_[self.historyIgnoreIdx:]
def add_to_history(self, new_):
self.history[self.observation_dim_hist_part:] = self.history[:-self.observation_dim_hist_part]
self.history[0:self.observation_dim_hist_part] = new_[self.historyIgnoreIdx:]
def reset(self):
obs = self._wrapped_env.reset()
xpos, ypos = self.generatePosOneHot(False)
if self.hist_type == "standard":
new_ob = np.array([np.zeros(1), obs])
elif self.hist_type == "standard_pos":
std_ob = np.array([np.zeros(1), obs])
new_ob = np.concatenate([xpos, ypos, std_ob])
elif self.hist_type == "one_hot":
new_ob = np.concatenate([np.zeros(self.nact), [obs]])
elif self.hist_type == "one_hot_pos":
new_ob = np.concatenate([xpos, ypos,np.zeros(self.nact), [obs]])
elif self.hist_type == "field_vision":
observation_rocks = self.generateFieldVisionRockObservation(False)
new_ob = np.concatenate([np.zeros(1), observation_rocks])
elif self.hist_type == "field_vision_pos":
observation_rocks = self.generateFieldVisionRockObservation(False)
new_ob = np.concatenate([xpos, ypos, observation_rocks])
elif self.hist_type == "fully_observable":
observation_rocks = self.generateTrueRockOvservation(False)
new_ob = np.concatenate([xpos, ypos, observation_rocks])
elif self.hist_type == "mixed_full_pomdp" or self.hist_type == "history_full":
observation_rocks = self.generateTrueRockOvservation(False)
flag = 1
new_ob = np.concatenate([[flag],observation_rocks,xpos,ypos,np.zeros(self.nact),[obs]])
elif self.hist_type == "history_pomdp":
observation_rocks = np.zeros(self.num_rocks)
flag = 0
new_ob = np.concatenate([[flag],observation_rocks,xpos,ypos,np.zeros(self.nact),[obs]])
elif self.hist_type == "history_rockpos_full":
observation_rocks = self.generateTrueRockOvservation(False)
flag = 1
rock_pos = self.generateRockPosOneHot(False)
new_ob = np.concatenate([[flag],observation_rocks,xpos,ypos,np.zeros(self.nact),[obs],rock_pos])
else:
raise NameError("error: wrong history type")
self.reset_history(new_ob)
# we return copy so that we can modify the history without changing already returned histories
return np.concatenate([new_ob[0:self.historyIgnoreIdx],self.history])
def step(self, action):
next_obs, reward, done, info = self._wrapped_env.step(action)
ob = self.genObservation(next_obs, action, done)
self.add_to_history(ob)
# we return copy so that we can modify the history without changing already returned histories
return np.concatenate([ob[0:self.historyIgnoreIdx],self.history]), reward, done, info
def generateObservationStandard(self, ob, a, done):
return np.array([a+1, ob])
def generateObservationStandardPos(self, ob, a, done):
xpos, ypos = self.generatePosOneHot(done)
std_ob = np.array([a+1, ob])
return np.concatenate([xpos,ypos,std_ob])
def generateObservationOneHot(self, ob, a, done):
one_hot_a = np.zeros(self.nact, dtype=np.int)
one_hot_a[int(a)] = 1
return np.concatenate([one_hot_a, [ob]])
def generateObservationOneHotPos(self, ob, a, done):
xpos, ypos = self.generatePosOneHot(done)
one_hot_a = np.zeros(self.nact, dtype=np.int)
one_hot_a[int(a)] = 1
return np.concatenate([xpos,ypos,one_hot_a,[ob]])
def generateObservationFieldVision(self, ob, a, done):
# action + noisy value of all rocks
observation_rocks = self.generateFieldVisionRockObservation(done)
return np.concatenate([[a+1], observation_rocks])
def generateObservationFieldVisionPos(self, ob, a, done):
# agent pos + noisy value of all rocks
observation_rocks = self.generateFieldVisionRockObservation(done)
xpos, ypos = self.generatePosOneHot(done)
return np.concatenate([xpos,ypos,observation_rocks])
def generateObservationFullState(self, ob, a, done):
# agent pos + true value of all rocks
observation_rocks = self.generateTrueRockOvservation(done)
xpos, ypos = self.generatePosOneHot(done)
return np.concatenate([xpos,ypos,observation_rocks])
def generateObservationMixed(self, ob, a, done):
# flag + true value of all rocks + agent pos + history of: one_hot_ac + noisy ob pairs
flag = 1
observation_rocks = self.generateTrueRockOvservation(done)
xpos, ypos = self.generatePosOneHot(done)
one_hot_a = np.zeros(self.nact, dtype=np.int)
one_hot_a[int(a)] = 1
return np.concatenate([[flag],observation_rocks,xpos,ypos,one_hot_a,[ob]])
def generateObservationHistoryFull(self, ob, a, done):
# flag + one hot agent pos + rock obs + one hot action + single ob
return self.generateObservationMixed(ob, a, done)
def generateObservationHistoryPomdp(self, ob, a, done):
# flag + one hot agent pos + rock obs (zeros) + one hot action + single ob
flag = 0
observation_rocks = np.zeros(self.num_rocks)
xpos, ypos = self.generatePosOneHot(done)
one_hot_a = np.zeros(self.nact, dtype=np.int)
one_hot_a[int(a)] = 1
return np.concatenate([[flag],observation_rocks,xpos,ypos,one_hot_a,[ob]])
def generateObservationHistoryRockPosFull(self, ob, a, done):
# num of one_hot encoded rock positions
# flag + one hot agent pos + rock obs + one hot action + single ob + one hot rock positions
rock_pos = self.generateRockPosOneHot(done)
full_ob = self.generateObservationMixed(ob, a, done)
return np.concatenate([full_ob, rock_pos])
def generateFieldVisionRockObservation(self, done):
# noisy value of all rocks
observation_rocks = np.zeros((self.num_rocks,))
if not done:
for rock in range(0, self.num_rocks):
if self._wrapped_env.state.rocks[rock].status == 0: # collected
ob = Obs.NULL.value
else:
ob = self._wrapped_env._sample_ob(self._wrapped_env.state.agent_pos, self._wrapped_env.state.rocks[rock])
observation_rocks[rock] = ob
return observation_rocks
def generateTrueRockOvservation(self, done):
# true value of all rocks
observation_rocks = np.zeros((self.num_rocks,))
if not done:
for rock in range(0, self.num_rocks):
rock_status = self._wrapped_env.state.rocks[rock].status
if rock_status == 1: #good
observation_rocks[rock] = Obs.GOOD.value
elif rock_status == -1: #bad
observation_rocks[rock] = Obs.BAD.value
else: # collected
observation_rocks[rock] = Obs.NULL.value
return observation_rocks
def generatePosOneHot(self, done):
xpos=np.zeros(self.size_x)
ypos=np.zeros(self.size_y)
if not done:
# one hot encoded x and y position of the agent
xpos = np.zeros(self.size_x, dtype=np.int)
xpos[int(self._wrapped_env.state.agent_pos.x)] = 1
ypos = np.zeros(self.size_y, dtype=np.int)
ypos[int(self._wrapped_env.state.agent_pos.y)] = 1
return xpos, ypos
def generateRockPosOneHot(self, done):
rocks = []
if not done:
for rock in self._wrapped_env._rock_pos:
# one hot encoded x and y position of the rocks
xpos = np.zeros(self.size_x, dtype=np.int)
xpos[int(rock.x)] = 1
ypos = np.zeros(self.size_y, dtype=np.int)
ypos[int(rock.y)] = 1
rocks.append(xpos)
rocks.append(ypos)
if len(rocks) > 0:
return np.hstack(rocks)
else:
return np.zeros((self.size_x+self.size_y)*self.num_rocks)
|
[
"gym.make",
"numpy.zeros",
"numpy.hstack",
"numpy.array",
"gym.spaces.Box",
"numpy.concatenate"
] |
[((2699, 2715), 'gym.make', 'gym.make', (['env_id'], {}), '(env_id)\n', (2707, 2715), False, 'import gym\n'), ((9135, 9203), 'numpy.zeros', 'np.zeros', (['(self.observation_space.shape[0] - self.historyIgnoreIdx,)'], {}), '((self.observation_space.shape[0] - self.historyIgnoreIdx,))\n', (9143, 9203), True, 'import numpy as np\n'), ((11736, 11799), 'numpy.concatenate', 'np.concatenate', (['[new_ob[0:self.historyIgnoreIdx], self.history]'], {}), '([new_ob[0:self.historyIgnoreIdx], self.history])\n', (11750, 11799), True, 'import numpy as np\n'), ((12256, 12277), 'numpy.array', 'np.array', (['[a + 1, ob]'], {}), '([a + 1, ob])\n', (12264, 12277), True, 'import numpy as np\n'), ((12403, 12424), 'numpy.array', 'np.array', (['[a + 1, ob]'], {}), '([a + 1, ob])\n', (12411, 12424), True, 'import numpy as np\n'), ((12438, 12474), 'numpy.concatenate', 'np.concatenate', (['[xpos, ypos, std_ob]'], {}), '([xpos, ypos, std_ob])\n', (12452, 12474), True, 'import numpy as np\n'), ((12548, 12581), 'numpy.zeros', 'np.zeros', (['self.nact'], {'dtype': 'np.int'}), '(self.nact, dtype=np.int)\n', (12556, 12581), True, 'import numpy as np\n'), ((12627, 12660), 'numpy.concatenate', 'np.concatenate', (['[one_hot_a, [ob]]'], {}), '([one_hot_a, [ob]])\n', (12641, 12660), True, 'import numpy as np\n'), ((12789, 12822), 'numpy.zeros', 'np.zeros', (['self.nact'], {'dtype': 'np.int'}), '(self.nact, dtype=np.int)\n', (12797, 12822), True, 'import numpy as np\n'), ((12868, 12913), 'numpy.concatenate', 'np.concatenate', (['[xpos, ypos, one_hot_a, [ob]]'], {}), '([xpos, ypos, one_hot_a, [ob]])\n', (12882, 12913), True, 'import numpy as np\n'), ((13104, 13148), 'numpy.concatenate', 'np.concatenate', (['[[a + 1], observation_rocks]'], {}), '([[a + 1], observation_rocks])\n', (13118, 13148), True, 'import numpy as np\n'), ((13396, 13443), 'numpy.concatenate', 'np.concatenate', (['[xpos, ypos, observation_rocks]'], {}), '([xpos, ypos, observation_rocks])\n', (13410, 13443), True, 'import numpy as np\n'), ((13678, 13725), 'numpy.concatenate', 'np.concatenate', (['[xpos, ypos, observation_rocks]'], {}), '([xpos, ypos, observation_rocks])\n', (13692, 13725), True, 'import numpy as np\n'), ((14027, 14060), 'numpy.zeros', 'np.zeros', (['self.nact'], {'dtype': 'np.int'}), '(self.nact, dtype=np.int)\n', (14035, 14060), True, 'import numpy as np\n'), ((14106, 14178), 'numpy.concatenate', 'np.concatenate', (['[[flag], observation_rocks, xpos, ypos, one_hot_a, [ob]]'], {}), '([[flag], observation_rocks, xpos, ypos, one_hot_a, [ob]])\n', (14120, 14178), True, 'import numpy as np\n'), ((14556, 14580), 'numpy.zeros', 'np.zeros', (['self.num_rocks'], {}), '(self.num_rocks)\n', (14564, 14580), True, 'import numpy as np\n'), ((14651, 14684), 'numpy.zeros', 'np.zeros', (['self.nact'], {'dtype': 'np.int'}), '(self.nact, dtype=np.int)\n', (14659, 14684), True, 'import numpy as np\n'), ((14730, 14802), 'numpy.concatenate', 'np.concatenate', (['[[flag], observation_rocks, xpos, ypos, one_hot_a, [ob]]'], {}), '([[flag], observation_rocks, xpos, ypos, one_hot_a, [ob]])\n', (14744, 14802), True, 'import numpy as np\n'), ((15142, 15177), 'numpy.concatenate', 'np.concatenate', (['[full_ob, rock_pos]'], {}), '([full_ob, rock_pos])\n', (15156, 15177), True, 'import numpy as np\n'), ((15298, 15325), 'numpy.zeros', 'np.zeros', (['(self.num_rocks,)'], {}), '((self.num_rocks,))\n', (15306, 15325), True, 'import numpy as np\n'), ((15856, 15883), 'numpy.zeros', 'np.zeros', (['(self.num_rocks,)'], {}), '((self.num_rocks,))\n', (15864, 15883), True, 'import numpy as np\n'), ((16423, 16444), 'numpy.zeros', 'np.zeros', (['self.size_x'], {}), '(self.size_x)\n', (16431, 16444), True, 'import numpy as np\n'), ((16458, 16479), 'numpy.zeros', 'np.zeros', (['self.size_y'], {}), '(self.size_y)\n', (16466, 16479), True, 'import numpy as np\n'), ((3342, 3437), 'gym.spaces.Box', 'Box', ([], {'low': '(0)', 'high': '(4 + 1 + self.num_rocks)', 'shape': '(self.total_obs_dim * (self.hist_len + 1),)'}), '(low=0, high=4 + 1 + self.num_rocks, shape=(self.total_obs_dim * (self.\n hist_len + 1),))\n', (3345, 3437), False, 'from gym.spaces import Discrete, Box\n'), ((12105, 12164), 'numpy.concatenate', 'np.concatenate', (['[ob[0:self.historyIgnoreIdx], self.history]'], {}), '([ob[0:self.historyIgnoreIdx], self.history])\n', (12119, 12164), True, 'import numpy as np\n'), ((16580, 16615), 'numpy.zeros', 'np.zeros', (['self.size_x'], {'dtype': 'np.int'}), '(self.size_x, dtype=np.int)\n', (16588, 16615), True, 'import numpy as np\n'), ((16698, 16733), 'numpy.zeros', 'np.zeros', (['self.size_y'], {'dtype': 'np.int'}), '(self.size_y, dtype=np.int)\n', (16706, 16733), True, 'import numpy as np\n'), ((17334, 17350), 'numpy.hstack', 'np.hstack', (['rocks'], {}), '(rocks)\n', (17343, 17350), True, 'import numpy as np\n'), ((17384, 17438), 'numpy.zeros', 'np.zeros', (['((self.size_x + self.size_y) * self.num_rocks)'], {}), '((self.size_x + self.size_y) * self.num_rocks)\n', (17392, 17438), True, 'import numpy as np\n'), ((3756, 3863), 'gym.spaces.Box', 'Box', ([], {'low': '(0)', 'high': '(4 + 1 + self.num_rocks)', 'shape': '(self.historyIgnoreIdx + (1 + 1) * (self.hist_len + 1),)'}), '(low=0, high=4 + 1 + self.num_rocks, shape=(self.historyIgnoreIdx + (1 +\n 1) * (self.hist_len + 1),))\n', (3759, 3863), False, 'from gym.spaces import Discrete, Box\n'), ((9837, 9873), 'numpy.concatenate', 'np.concatenate', (['[xpos, ypos, std_ob]'], {}), '([xpos, ypos, std_ob])\n', (9851, 9873), True, 'import numpy as np\n'), ((17047, 17082), 'numpy.zeros', 'np.zeros', (['self.size_x'], {'dtype': 'np.int'}), '(self.size_x, dtype=np.int)\n', (17055, 17082), True, 'import numpy as np\n'), ((17144, 17179), 'numpy.zeros', 'np.zeros', (['self.size_y'], {'dtype': 'np.int'}), '(self.size_y, dtype=np.int)\n', (17152, 17179), True, 'import numpy as np\n'), ((9700, 9711), 'numpy.zeros', 'np.zeros', (['(1)'], {}), '(1)\n', (9708, 9711), True, 'import numpy as np\n'), ((9797, 9808), 'numpy.zeros', 'np.zeros', (['(1)'], {}), '(1)\n', (9805, 9808), True, 'import numpy as np\n'), ((5133, 5228), 'gym.spaces.Box', 'Box', ([], {'low': '(0)', 'high': '(4 + 1 + self.num_rocks)', 'shape': '(self.total_obs_dim * (self.hist_len + 1),)'}), '(low=0, high=4 + 1 + self.num_rocks, shape=(self.total_obs_dim * (self.\n hist_len + 1),))\n', (5136, 5228), False, 'from gym.spaces import Discrete, Box\n'), ((9953, 9972), 'numpy.zeros', 'np.zeros', (['self.nact'], {}), '(self.nact)\n', (9961, 9972), True, 'import numpy as np\n'), ((10076, 10095), 'numpy.zeros', 'np.zeros', (['self.nact'], {}), '(self.nact)\n', (10084, 10095), True, 'import numpy as np\n'), ((10452, 10499), 'numpy.concatenate', 'np.concatenate', (['[xpos, ypos, observation_rocks]'], {}), '([xpos, ypos, observation_rocks])\n', (10466, 10499), True, 'import numpy as np\n'), ((10268, 10279), 'numpy.zeros', 'np.zeros', (['(1)'], {}), '(1)\n', (10276, 10279), True, 'import numpy as np\n'), ((10644, 10691), 'numpy.concatenate', 'np.concatenate', (['[xpos, ypos, observation_rocks]'], {}), '([xpos, ypos, observation_rocks])\n', (10658, 10691), True, 'import numpy as np\n'), ((11052, 11076), 'numpy.zeros', 'np.zeros', (['self.num_rocks'], {}), '(self.num_rocks)\n', (11060, 11076), True, 'import numpy as np\n'), ((10944, 10963), 'numpy.zeros', 'np.zeros', (['self.nact'], {}), '(self.nact)\n', (10952, 10963), True, 'import numpy as np\n'), ((11170, 11189), 'numpy.zeros', 'np.zeros', (['self.nact'], {}), '(self.nact)\n', (11178, 11189), True, 'import numpy as np\n'), ((11475, 11494), 'numpy.zeros', 'np.zeros', (['self.nact'], {}), '(self.nact)\n', (11483, 11494), True, 'import numpy as np\n')]
|
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
# Load training data set from CSV file
training_data_df = pd.read_csv("sales_data_training.csv")
# Load testing data set from CSV file
test_data_df = pd.read_csv("sales_data_test.csv")
# Data needs to be scaled to a small range like 0 to 1 for the neural
# network to work well.
scaler = MinMaxScaler(feature_range=(0, 1))
# Scale both the training inputs and outputs
scaled_training = scaler.fit_transform(training_data_df)
scaled_testing = scaler.transform(test_data_df)
# Print out the adjustment that the scaler applied to the total_earnings column of data
print("Note: total_earnings values were scaled by multiplying by {:.10f} and adding {:.6f}".format(scaler.scale_[8], scaler.min_[8]))
# Create new pandas DataFrame objects from the scaled data
scaled_training_df = pd.DataFrame(scaled_training, columns=training_data_df.columns.values)
scaled_testing_df = pd.DataFrame(scaled_testing, columns=test_data_df.columns.values)
# Save scaled data dataframes to new CSV files
scaled_training_df.to_csv("sales_data_training_scaled.csv", index=False)
scaled_testing_df.to_csv("sales_data_testing_scaled.csv", index=False)
|
[
"pandas.read_csv",
"sklearn.preprocessing.MinMaxScaler",
"pandas.DataFrame"
] |
[((126, 164), 'pandas.read_csv', 'pd.read_csv', (['"""sales_data_training.csv"""'], {}), "('sales_data_training.csv')\n", (137, 164), True, 'import pandas as pd\n'), ((219, 253), 'pandas.read_csv', 'pd.read_csv', (['"""sales_data_test.csv"""'], {}), "('sales_data_test.csv')\n", (230, 253), True, 'import pandas as pd\n'), ((358, 392), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {'feature_range': '(0, 1)'}), '(feature_range=(0, 1))\n', (370, 392), False, 'from sklearn.preprocessing import MinMaxScaler\n'), ((848, 918), 'pandas.DataFrame', 'pd.DataFrame', (['scaled_training'], {'columns': 'training_data_df.columns.values'}), '(scaled_training, columns=training_data_df.columns.values)\n', (860, 918), True, 'import pandas as pd\n'), ((939, 1004), 'pandas.DataFrame', 'pd.DataFrame', (['scaled_testing'], {'columns': 'test_data_df.columns.values'}), '(scaled_testing, columns=test_data_df.columns.values)\n', (951, 1004), True, 'import pandas as pd\n')]
|
# Copyright Contributors to the Packit project.
# SPDX-License-Identifier: MIT
"""
Update selected component from upstream in Fedora
"""
import logging
import os
import click
from packit.cli.types import LocalProjectParameter
from packit.cli.utils import cover_packit_exception, get_packit_api
from packit.config import pass_config, get_context_settings
from packit.config.aliases import get_branches
logger = logging.getLogger(__name__)
@click.command("sync-from-downstream", context_settings=get_context_settings())
@click.option(
"--dist-git-branch",
help="Comma separated list of target branches in dist-git to sync from. "
"(defaults to repo's default branch)",
)
@click.option(
"--upstream-branch",
help="Target branch in upstream to sync to. (defaults to repo's default branch)",
)
@click.option(
"--no-pr",
is_flag=True,
default=False,
help="Do not create a pull request to upstream repository.",
)
@click.option(
"--fork/--no-fork",
is_flag=True,
default=True,
help="Push to a fork before creating a pull request.",
)
@click.option(
"--remote-to-push",
default=None,
help=(
"Name of the remote where packit should push. "
"If this is not specified, push to a fork if the repo can be forked."
),
)
@click.option(
"--force",
"-f",
default=False,
is_flag=True,
help="Don't discard changes in the git repo by default, unless this is set.",
)
@click.option("-x", "--exclude", help="File to exclude from sync", multiple=True)
@click.argument(
"path_or_url",
type=LocalProjectParameter(),
default=os.path.curdir,
)
@cover_packit_exception
@pass_config
def sync_from_downstream(
config,
dist_git_branch,
upstream_branch,
no_pr,
path_or_url,
fork,
remote_to_push,
exclude,
force,
):
"""
Copy synced files from Fedora dist-git into upstream by opening a pull request.
PATH_OR_URL argument is a local path or a URL to the upstream git repository,
it defaults to the current working directory
"""
api = get_packit_api(config=config, local_project=path_or_url)
default_dg_branch = api.dg.local_project.git_project.default_branch
dist_git_branch = dist_git_branch or default_dg_branch
branches_to_sync = get_branches(
*dist_git_branch.split(","), default_dg_branch=default_dg_branch
)
click.echo(f"Syncing from the following branches: {', '.join(branches_to_sync)}")
for branch in branches_to_sync:
api.sync_from_downstream(
dist_git_branch=branch,
upstream_branch=upstream_branch,
no_pr=no_pr,
fork=fork,
remote_name=remote_to_push,
exclude_files=exclude,
force=force,
)
|
[
"packit.cli.types.LocalProjectParameter",
"click.option",
"packit.config.get_context_settings",
"packit.cli.utils.get_packit_api",
"logging.getLogger"
] |
[((415, 442), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (432, 442), False, 'import logging\n'), ((526, 679), 'click.option', 'click.option', (['"""--dist-git-branch"""'], {'help': '"""Comma separated list of target branches in dist-git to sync from. (defaults to repo\'s default branch)"""'}), '(\'--dist-git-branch\', help=\n "Comma separated list of target branches in dist-git to sync from. (defaults to repo\'s default branch)"\n )\n', (538, 679), False, 'import click\n'), ((689, 814), 'click.option', 'click.option', (['"""--upstream-branch"""'], {'help': '"""Target branch in upstream to sync to. (defaults to repo\'s default branch)"""'}), '(\'--upstream-branch\', help=\n "Target branch in upstream to sync to. (defaults to repo\'s default branch)"\n )\n', (701, 814), False, 'import click\n'), ((817, 935), 'click.option', 'click.option', (['"""--no-pr"""'], {'is_flag': '(True)', 'default': '(False)', 'help': '"""Do not create a pull request to upstream repository."""'}), "('--no-pr', is_flag=True, default=False, help=\n 'Do not create a pull request to upstream repository.')\n", (829, 935), False, 'import click\n'), ((951, 1071), 'click.option', 'click.option', (['"""--fork/--no-fork"""'], {'is_flag': '(True)', 'default': '(True)', 'help': '"""Push to a fork before creating a pull request."""'}), "('--fork/--no-fork', is_flag=True, default=True, help=\n 'Push to a fork before creating a pull request.')\n", (963, 1071), False, 'import click\n'), ((1087, 1264), 'click.option', 'click.option', (['"""--remote-to-push"""'], {'default': 'None', 'help': '"""Name of the remote where packit should push. If this is not specified, push to a fork if the repo can be forked."""'}), "('--remote-to-push', default=None, help=\n 'Name of the remote where packit should push. If this is not specified, push to a fork if the repo can be forked.'\n )\n", (1099, 1264), False, 'import click\n'), ((1298, 1439), 'click.option', 'click.option', (['"""--force"""', '"""-f"""'], {'default': '(False)', 'is_flag': '(True)', 'help': '"""Don\'t discard changes in the git repo by default, unless this is set."""'}), '(\'--force\', \'-f\', default=False, is_flag=True, help=\n "Don\'t discard changes in the git repo by default, unless this is set.")\n', (1310, 1439), False, 'import click\n'), ((1459, 1544), 'click.option', 'click.option', (['"""-x"""', '"""--exclude"""'], {'help': '"""File to exclude from sync"""', 'multiple': '(True)'}), "('-x', '--exclude', help='File to exclude from sync', multiple=True\n )\n", (1471, 1544), False, 'import click\n'), ((2084, 2140), 'packit.cli.utils.get_packit_api', 'get_packit_api', ([], {'config': 'config', 'local_project': 'path_or_url'}), '(config=config, local_project=path_or_url)\n', (2098, 2140), False, 'from packit.cli.utils import cover_packit_exception, get_packit_api\n'), ((501, 523), 'packit.config.get_context_settings', 'get_context_settings', ([], {}), '()\n', (521, 523), False, 'from packit.config import pass_config, get_context_settings\n'), ((1585, 1608), 'packit.cli.types.LocalProjectParameter', 'LocalProjectParameter', ([], {}), '()\n', (1606, 1608), False, 'from packit.cli.types import LocalProjectParameter\n')]
|
from torch import nn
__all__ = ['MobileNetV2']
def _make_divisible(v, divisor, min_value=None):
"""
This function is taken from the original tf repo.
It ensures that all layers have a channel number that is divisible by 8
It can be seen here:
https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py
:param v:
:param divisor:
:param min_value:
:return:
"""
if min_value is None:
min_value = divisor
new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
# Make sure that round down does not go down by more than 10%.
if new_v < 0.9 * v:
new_v += divisor
return new_v
class ConvBNReLU(nn.Sequential):
def __init__(self, in_planes, out_planes, kernel_size=3, stride=1, groups=1, norm_layer=None):
padding = (kernel_size - 1) // 2
if norm_layer is None:
norm_layer = nn.BatchNorm2d
super(ConvBNReLU, self).__init__(
nn.Conv2d(in_planes, out_planes, kernel_size, stride, padding, groups=groups, bias=False),
norm_layer(out_planes),
nn.ReLU6(inplace=True)
)
class InvertedResidual(nn.Module):
def __init__(self, inp, oup, stride, expand_ratio, norm_layer=None):
super(InvertedResidual, self).__init__()
self.stride = stride
assert stride in [1, 2]
if norm_layer is None:
norm_layer = nn.BatchNorm2d
hidden_dim = int(round(inp * expand_ratio))
self.use_res_connect = self.stride == 1 and inp == oup
layers = []
if expand_ratio != 1:
# pw
layers.append(ConvBNReLU(inp, hidden_dim, kernel_size=1, norm_layer=norm_layer))
layers.extend([
# dw
ConvBNReLU(hidden_dim, hidden_dim, stride=stride, groups=hidden_dim, norm_layer=norm_layer),
# pw-linear
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
norm_layer(oup),
])
self.conv = nn.Sequential(*layers)
def forward(self, x):
if self.use_res_connect:
return x + self.conv(x)
else:
return self.conv(x)
class Sandglass(nn.Module):
def __init__(self, inp, oup, stride, reduce_ratio, norm_layer=None):
super(Sandglass, self).__init__()
self.stride = stride
assert stride in [1, 2]
if norm_layer is None:
norm_layer = nn.BatchNorm2d
hidden_dim = int(round(inp / reduce_ratio))
self.use_res_connect = self.stride == 1 and inp == oup
layers = []
layers.extend([
# dw
ConvBNReLU(inp, inp, stride=1, groups=inp, norm_layer=norm_layer),
# pw-linear
nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
norm_layer(hidden_dim),
# pw-relu6
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
norm_layer(oup),
nn.ReLU6(inplace=True),
# dw-liner
nn.Conv2d(oup, oup, 3, stride, 1, groups=oup, bias=False),
norm_layer(oup),
])
self.conv = nn.Sequential(*layers)
def forward(self, x):
if self.use_res_connect:
return x + self.conv(x)
else:
return self.conv(x)
class My_Sandglass(nn.Module):
def __init__(self, inp, oup, stride, reduce_ratio, norm_layer=None):
super(My_Sandglass, self).__init__()
self.stride = stride
assert stride in [1, 2]
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self.act = nn.ReLU6(inplace=True)
hidden_dim = int(round(inp / reduce_ratio))
self.use_res_connect = self.stride == 1 and inp == oup
self.dw1 = nn.Conv2d(inp, inp, 3, 1, 1, groups=inp, bias=False)
self.bn1 = norm_layer(inp)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Linear(inp, hidden_dim)
self.pw1 = nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False)
self.bn2 = norm_layer(hidden_dim)
self.pw2 = nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False)
self.bn3 = norm_layer(oup)
self.dw2 = nn.Conv2d(oup, oup, 3, stride, 1, groups=oup, bias=False)
self.bn4 = norm_layer(oup)
def forward(self, x):
y = self.dw1(x)
b, c, _, _ = y.size()
z = self.avg_pool(y).view(b, c)
z = self.fc(z).view(b, -1, 1, 1)
z = torch.clamp(z, 0, 1)
y = self.bn1(y)
y = self.act(y)
y = self.pw1(y)
y = self.bn2(y)
y = y * z
y = self.pw2(y)
y = self.bn3(y)
y = self.act(y)
y = self. dw2(y)
y = self.bn4(y)
if self.use_res_connect:
return x + y
else:
return y
def hard_sigmoid(x, inplace: bool = False):
if inplace:
return x.add_(3.).clamp_(0., 6.).div_(6.)
else:
return F.relu6(x + 3.) / 6.
class My_Sandglass_2(nn.Module):
def __init__(self, inp, oup, stride, reduce_ratio, norm_layer=None):
super(My_Sandglass_2, self).__init__()
self.stride = stride
assert stride in [1, 2]
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self.act = nn.ReLU6(inplace=True)
hidden_dim = int(round(inp / reduce_ratio))
self.use_res_connect = self.stride == 1 and inp == oup
self.dw1 = nn.Conv2d(inp, inp, 3, 1, 1, groups=inp, bias=False)
self.bn1 = norm_layer(inp)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Linear(inp, hidden_dim)
self.pw1 = nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False)
self.bn2 = norm_layer(hidden_dim)
self.pw2 = nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False)
self.bn3 = norm_layer(oup)
self.dw2 = nn.Conv2d(oup, oup, 3, stride, 1, groups=oup, bias=False)
self.bn4 = norm_layer(oup)
def forward(self, x):
y = self.dw1(x)
b, c, _, _ = y.size()
z = self.avg_pool(y).view(b, c)
z = self.fc(z).view(b, -1, 1, 1)
# z = torch.clamp(z, 0, 1)
z = hard_sigmoid(z, inplace=True)
y = self.bn1(y)
y = self.act(y)
y = self.pw1(y)
y = self.bn2(y)
y = y * z
y = self.pw2(y)
y = self.bn3(y)
y = self.act(y)
y = self. dw2(y)
y = self.bn4(y)
if self.use_res_connect:
return x + y
else:
return y
class SELayer(nn.Module):
def __init__(self, channel, reduction=4):
super(SELayer, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Sequential(
nn.Linear(channel, channel // reduction),
nn.ReLU(inplace=True),
nn.Linear(channel // reduction, channel))
def forward(self, x):
b, c, _, _ = x.size()
y = self.avg_pool(x).view(b, c)
y = self.fc(y).view(b, c, 1, 1)
y = torch.clamp(y, 0, 1)
return x * y
class MobileNetV2(nn.Module):
def __init__(self,
num_classes=1000,
width_mult=1.0,
inverted_residual_setting=None,
round_nearest=8,
block=None,
norm_layer=None):
"""
MobileNet V2 main class
Args:
num_classes (int): Number of classes
width_mult (float): Width multiplier - adjusts number of channels in each layer by this amount
inverted_residual_setting: Network structure
round_nearest (int): Round the number of channels in each layer to be a multiple of this number
Set to 1 to turn off rounding
block: Module specifying inverted residual building block for mobilenetv2
norm_layer: Module specifying the normalization layer to use
"""
super(MobileNetV2, self).__init__()
if block is None:
block = InvertedResidual
if norm_layer is None:
norm_layer = nn.BatchNorm2d
input_channel = 32
last_channel = 1280
if inverted_residual_setting is None:
inverted_residual_setting = [
# t, c, n, s
[1, 16, 1, 1],
[6, 24, 2, 2],
[6, 32, 3, 2],
[6, 64, 4, 2],
[6, 96, 3, 1],
[6, 160, 3, 2],
[6, 320, 1, 1],
]
# only check the first element, assuming user knows t,c,n,s are required
if len(inverted_residual_setting) == 0 or len(inverted_residual_setting[0]) != 4:
raise ValueError("inverted_residual_setting should be non-empty "
"or a 4-element list, got {}".format(inverted_residual_setting))
# building first layer
input_channel = _make_divisible(input_channel * width_mult, round_nearest)
self.last_channel = _make_divisible(last_channel * max(1.0, width_mult), round_nearest)
features = [ConvBNReLU(3, input_channel, stride=2, norm_layer=norm_layer)]
# building inverted residual blocks
for t, c, n, s in inverted_residual_setting:
output_channel = _make_divisible(c * width_mult, round_nearest)
for i in range(n):
stride = s if i == 0 else 1
features.append(block(input_channel, output_channel, stride, t, norm_layer=norm_layer))
input_channel = output_channel
# building last several layers
features.append(ConvBNReLU(input_channel, self.last_channel, kernel_size=1, norm_layer=norm_layer))
# make it nn.Sequential
self.features = nn.Sequential(*features)
# building classifier
self.classifier = nn.Sequential(
nn.Dropout(0.2),
# nn.Linear(self.last_channel, num_classes),
nn.Linear(output_channel, num_classes),
)
# weight initialization
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.ones_(m.weight)
nn.init.zeros_(m.bias)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.zeros_(m.bias)
def _forward_impl(self, x):
# This exists since TorchScript doesn't support inheritance, so the superclass method
# (this one) needs to have a name other than `forward` that can be accessed in a subclass
x = self.features(x)
# Cannot use "squeeze" as batch-size can be 1 => must use reshape with x.shape[0]
x = nn.functional.adaptive_avg_pool2d(x, 1).reshape(x.shape[0], -1)
x = self.classifier(x)
return x
def forward(self, x):
return self._forward_impl(x)
class MobileNetV2_sandglass(nn.Module):
def __init__(self,
num_classes=1000,
width_mult=1.0,
inverted_residual_setting=None,
round_nearest=8,
block=None,
norm_layer=None):
"""
MobileNet V2 main class
Args:
num_classes (int): Number of classes
width_mult (float): Width multiplier - adjusts number of channels in each layer by this amount
inverted_residual_setting: Network structure
round_nearest (int): Round the number of channels in each layer to be a multiple of this number
Set to 1 to turn off rounding
block: Module specifying inverted residual building block for mobilenetv2
norm_layer: Module specifying the normalization layer to use
"""
super(MobileNetV2_sandglass, self).__init__()
if block is None:
block = Sandglass
if norm_layer is None:
norm_layer = nn.BatchNorm2d
input_channel = 32
last_channel = 1280
if inverted_residual_setting is None:
inverted_residual_setting = [
# t, c, n, s
[1, 16, 1, 1],
[6, 24, 2, 2],
[6, 32, 3, 2],
[6, 64, 4, 2],
[6, 96, 3, 1],
[6, 160, 3, 2],
# [6, 320, 1, 1],
]
# only check the first element, assuming user knows t,c,n,s are required
if len(inverted_residual_setting) == 0 or len(inverted_residual_setting[0]) != 4:
raise ValueError("inverted_residual_setting should be non-empty "
"or a 4-element list, got {}".format(inverted_residual_setting))
# building first layer
input_channel = _make_divisible(input_channel * width_mult, round_nearest)
self.last_channel = _make_divisible(last_channel * max(1.0, width_mult), round_nearest)
features = [ConvBNReLU(3, input_channel, stride=2, norm_layer=norm_layer)]
# building inverted residual blocks
for t, c, n, s in inverted_residual_setting:
output_channel = _make_divisible(c * t * width_mult, round_nearest)
for i in range(n):
stride = s if i == 0 else 1
features.append(block(input_channel, output_channel, stride, t, norm_layer=norm_layer))
input_channel = output_channel
features.extend(
[ConvBNReLU(960, 960, stride=1, groups=960, norm_layer=norm_layer),
# pw-linear
nn.Conv2d(960, 320, 1, 1, 0, bias=False),
norm_layer(320),]
)
# building last several layers
features.append(ConvBNReLU(input_channel, self.last_channel, kernel_size=1, norm_layer=norm_layer))
# make it nn.Sequential
self.features = nn.Sequential(*features)
# building classifier
self.classifier = nn.Sequential(
nn.Dropout(0.2),
nn.Linear(self.last_channel, num_classes),
)
# weight initialization
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.ones_(m.weight)
nn.init.zeros_(m.bias)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.zeros_(m.bias)
def _forward_impl(self, x):
# This exists since TorchScript doesn't support inheritance, so the superclass method
# (this one) needs to have a name other than `forward` that can be accessed in a subclass
x = self.features(x)
# Cannot use "squeeze" as batch-size can be 1 => must use reshape with x.shape[0]
x = nn.functional.adaptive_avg_pool2d(x, 1).reshape(x.shape[0], -1)
x = self.classifier(x)
return x
def forward(self, x):
return self._forward_impl(x)
class MobileNeXt(nn.Module):
def __init__(self,
num_classes=1000,
width_mult=1.0,
sandglass_setting=None,
round_nearest=8,
block=None,
norm_layer=None):
"""
MobileNet V2 main class
Args:
num_classes (int): Number of classes
width_mult (float): Width multiplier - adjusts number of channels in each layer by this amount
sandglass_setting: Network structure
round_nearest (int): Round the number of channels in each layer to be a multiple of this number
Set to 1 to turn off rounding
block: Module specifying inverted residual building block for mobilenetv2
norm_layer: Module specifying the normalization layer to use
"""
super(MobileNeXt, self).__init__()
if block is None:
block = Sandglass
if norm_layer is None:
norm_layer = nn.BatchNorm2d
input_channel = 32
last_channel = 1280
if sandglass_setting is None:
sandglass_setting = [
# t, c, n, s
[2, 96, 1, 2],
[6, 144, 1, 1],
[6, 192, 3, 2],
[6, 288, 3, 2],
[6, 384, 4, 1],
[6, 576, 4, 2],
[6, 960, 3, 1],# [6, 960, 2, 1],
[6, 1280, 1, 1],
]
# only check the first element, assuming user knows t,c,n,s are required
if len(sandglass_setting) == 0 or len(sandglass_setting[0]) != 4:
raise ValueError("sandglass_setting should be non-empty "
"or a 4-element list, got {}".format(sandglass_setting))
# building first layer
input_channel = _make_divisible(input_channel * width_mult, round_nearest)
self.last_channel = _make_divisible(last_channel * max(1.0, width_mult), round_nearest)
features = [ConvBNReLU(3, input_channel, stride=2, norm_layer=norm_layer)]
# building sandglass blocks
for t, c, n, s in sandglass_setting:
output_channel = _make_divisible(c * width_mult, round_nearest)
for i in range(n):
stride = s if i == 0 else 1
features.append(block(input_channel, output_channel, stride, t, norm_layer=norm_layer))
input_channel = output_channel
# make it nn.Sequential
self.features = nn.Sequential(*features)
# building classifier
self.classifier = nn.Sequential(
nn.Dropout(0.2),
nn.Linear(self.last_channel, num_classes),
)
# weight initialization
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.ones_(m.weight)
nn.init.zeros_(m.bias)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.zeros_(m.bias)
def _forward_impl(self, x):
# This exists since TorchScript doesn't support inheritance, so the superclass method
# (this one) needs to have a name other than `forward` that can be accessed in a subclass
x = self.features(x)
# Cannot use "squeeze" as batch-size can be 1 => must use reshape with x.shape[0]
x = nn.functional.adaptive_avg_pool2d(x, 1).reshape(x.shape[0], -1)
x = self.classifier(x)
return x
def forward(self, x):
return self._forward_impl(x)
def mobilenetv2_sandglass(**kwargs):
sandgrass_setting = [
# t, c, n, s
[1, 16, 1, 1],
[6, 144, 2, 2],
[6, 192, 3, 2],
[6, 384, 4, 2],
[6, 576, 3, 1],
[6, 960, 3, 2],
[6, 1920, 1, 1],
# [1, 16, 1, 1],
# [6, 24, 2, 2],
# [6, 32, 3, 2],
# [6, 64, 4, 2],
# [6, 96, 3, 1],
# [6, 160, 3, 2],
# [6, 320, 1, 1],
]
block = Sandglass
return MobileNetV2(inverted_residual_setting=sandgrass_setting, block=block, **kwargs)
def my_mobilenext(**kwargs):
block = My_Sandglass
return MobileNeXt(block=block, **kwargs)
def my_mobilenext_2(**kwargs):
block = My_Sandglass_2
return MobileNeXt(block=block, **kwargs)
if __name__=='__main__':
import torch
from torchvision import models
model = MobileNeXt()
print('Total params: %f M' % (sum(p.numel() for p in model.parameters()) / 1024. / 1024.0))
print(len(list(model.modules())))
# model = my_mobilenext()
# print('Total params: %f M' % (sum(p.numel() for p in model.parameters()) / 1024. / 1024.0))
# print(len(list(model.modules())))
# model = MobileNetV2()
# print('Total params: %f M' % (sum(p.numel() for p in model.parameters()) / 1024. / 1024.0))
# print(len(list(model.modules())))
# model = models.mobilenet_v2(pretrained=False, width_mult=1.0)
# print('Total params: %f M' % (sum(p.numel() for p in model.parameters()) / 1024. / 1024.0))
# print(len(list(model.modules())))
# model =mobilenetv2_sandglass()
# print('Total params: %f M' % (sum(p.numel() for p in model.parameters()) / 1024. / 1024.0))
# print(len(list(model.modules())))
# model = MobileNetV2_sandglass()
# print('Total params: %f M' % (sum(p.numel() for p in model.parameters()) / 1024. / 1024.0))
# print(len(list(model.modules())))
# model = InvertedResidual(32, 32, 1, 6)
# print('InvertedResidual params: %.f' % (sum(p.numel() for p in model.parameters())))
# print(len(list(model.modules())))
# print(model)
# model = Sandglass(192, 192, 1, 6)
# print('Sandglass params: %.f' % (sum(p.numel() for p in model.parameters())))
# print(len(list(model.modules())))
# # print(model)
# model = My_Sandglass(192, 192, 1, 6)
# print('Sandglass params: %.f' % (sum(p.numel() for p in model.parameters())))
# print(len(list(model.modules())))
# print(model)
# model.eval()
# # print(model)
input = torch.randn(1, 3, 224, 224)
# y = model(input)
# # print(y.shape)
# print('Total params: %f M' % (sum(p.numel() for p in model.parameters())/ 1024. / 1024.0))
from thop import profile
flops, params = profile(model, inputs=[input])
print(flops)
print(params)
print('Total params: %f M' % (sum(p.numel() for p in model.parameters())))
|
[
"torch.nn.Dropout",
"torch.nn.AdaptiveAvgPool2d",
"thop.profile",
"torch.nn.ReLU6",
"torch.nn.ReLU",
"torch.nn.Sequential",
"torch.nn.init.kaiming_normal_",
"torch.nn.Conv2d",
"torch.randn",
"torch.nn.functional.adaptive_avg_pool2d",
"torch.nn.init.zeros_",
"torch.clamp",
"torch.nn.init.normal_",
"torch.nn.Linear",
"torch.nn.init.ones_"
] |
[((21493, 21520), 'torch.randn', 'torch.randn', (['(1)', '(3)', '(224)', '(224)'], {}), '(1, 3, 224, 224)\n', (21504, 21520), False, 'import torch\n'), ((21713, 21743), 'thop.profile', 'profile', (['model'], {'inputs': '[input]'}), '(model, inputs=[input])\n', (21720, 21743), False, 'from thop import profile\n'), ((2022, 2044), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (2035, 2044), False, 'from torch import nn\n'), ((3146, 3168), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (3159, 3168), False, 'from torch import nn\n'), ((3613, 3635), 'torch.nn.ReLU6', 'nn.ReLU6', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3621, 3635), False, 'from torch import nn\n'), ((3771, 3823), 'torch.nn.Conv2d', 'nn.Conv2d', (['inp', 'inp', '(3)', '(1)', '(1)'], {'groups': 'inp', 'bias': '(False)'}), '(inp, inp, 3, 1, 1, groups=inp, bias=False)\n', (3780, 3823), False, 'from torch import nn\n'), ((3883, 3906), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (3903, 3906), False, 'from torch import nn\n'), ((3925, 3951), 'torch.nn.Linear', 'nn.Linear', (['inp', 'hidden_dim'], {}), '(inp, hidden_dim)\n', (3934, 3951), False, 'from torch import nn\n'), ((3971, 4018), 'torch.nn.Conv2d', 'nn.Conv2d', (['inp', 'hidden_dim', '(1)', '(1)', '(0)'], {'bias': '(False)'}), '(inp, hidden_dim, 1, 1, 0, bias=False)\n', (3980, 4018), False, 'from torch import nn\n'), ((4080, 4127), 'torch.nn.Conv2d', 'nn.Conv2d', (['hidden_dim', 'oup', '(1)', '(1)', '(0)'], {'bias': '(False)'}), '(hidden_dim, oup, 1, 1, 0, bias=False)\n', (4089, 4127), False, 'from torch import nn\n'), ((4182, 4239), 'torch.nn.Conv2d', 'nn.Conv2d', (['oup', 'oup', '(3)', 'stride', '(1)'], {'groups': 'oup', 'bias': '(False)'}), '(oup, oup, 3, stride, 1, groups=oup, bias=False)\n', (4191, 4239), False, 'from torch import nn\n'), ((4450, 4470), 'torch.clamp', 'torch.clamp', (['z', '(0)', '(1)'], {}), '(z, 0, 1)\n', (4461, 4470), False, 'import torch\n'), ((5266, 5288), 'torch.nn.ReLU6', 'nn.ReLU6', ([], {'inplace': '(True)'}), '(inplace=True)\n', (5274, 5288), False, 'from torch import nn\n'), ((5424, 5476), 'torch.nn.Conv2d', 'nn.Conv2d', (['inp', 'inp', '(3)', '(1)', '(1)'], {'groups': 'inp', 'bias': '(False)'}), '(inp, inp, 3, 1, 1, groups=inp, bias=False)\n', (5433, 5476), False, 'from torch import nn\n'), ((5536, 5559), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (5556, 5559), False, 'from torch import nn\n'), ((5578, 5604), 'torch.nn.Linear', 'nn.Linear', (['inp', 'hidden_dim'], {}), '(inp, hidden_dim)\n', (5587, 5604), False, 'from torch import nn\n'), ((5624, 5671), 'torch.nn.Conv2d', 'nn.Conv2d', (['inp', 'hidden_dim', '(1)', '(1)', '(0)'], {'bias': '(False)'}), '(inp, hidden_dim, 1, 1, 0, bias=False)\n', (5633, 5671), False, 'from torch import nn\n'), ((5733, 5780), 'torch.nn.Conv2d', 'nn.Conv2d', (['hidden_dim', 'oup', '(1)', '(1)', '(0)'], {'bias': '(False)'}), '(hidden_dim, oup, 1, 1, 0, bias=False)\n', (5742, 5780), False, 'from torch import nn\n'), ((5835, 5892), 'torch.nn.Conv2d', 'nn.Conv2d', (['oup', 'oup', '(3)', 'stride', '(1)'], {'groups': 'oup', 'bias': '(False)'}), '(oup, oup, 3, stride, 1, groups=oup, bias=False)\n', (5844, 5892), False, 'from torch import nn\n'), ((6637, 6660), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (6657, 6660), False, 'from torch import nn\n'), ((6998, 7018), 'torch.clamp', 'torch.clamp', (['y', '(0)', '(1)'], {}), '(y, 0, 1)\n', (7009, 7018), False, 'import torch\n'), ((9731, 9755), 'torch.nn.Sequential', 'nn.Sequential', (['*features'], {}), '(*features)\n', (9744, 9755), False, 'from torch import nn\n'), ((13972, 13996), 'torch.nn.Sequential', 'nn.Sequential', (['*features'], {}), '(*features)\n', (13985, 13996), False, 'from torch import nn\n'), ((17734, 17758), 'torch.nn.Sequential', 'nn.Sequential', (['*features'], {}), '(*features)\n', (17747, 17758), False, 'from torch import nn\n'), ((990, 1084), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_planes', 'out_planes', 'kernel_size', 'stride', 'padding'], {'groups': 'groups', 'bias': '(False)'}), '(in_planes, out_planes, kernel_size, stride, padding, groups=\n groups, bias=False)\n', (999, 1084), False, 'from torch import nn\n'), ((1129, 1151), 'torch.nn.ReLU6', 'nn.ReLU6', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1137, 1151), False, 'from torch import nn\n'), ((6710, 6750), 'torch.nn.Linear', 'nn.Linear', (['channel', '(channel // reduction)'], {}), '(channel, channel // reduction)\n', (6719, 6750), False, 'from torch import nn\n'), ((6768, 6789), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (6775, 6789), False, 'from torch import nn\n'), ((6807, 6847), 'torch.nn.Linear', 'nn.Linear', (['(channel // reduction)', 'channel'], {}), '(channel // reduction, channel)\n', (6816, 6847), False, 'from torch import nn\n'), ((9840, 9855), 'torch.nn.Dropout', 'nn.Dropout', (['(0.2)'], {}), '(0.2)\n', (9850, 9855), False, 'from torch import nn\n'), ((9926, 9964), 'torch.nn.Linear', 'nn.Linear', (['output_channel', 'num_classes'], {}), '(output_channel, num_classes)\n', (9935, 9964), False, 'from torch import nn\n'), ((14081, 14096), 'torch.nn.Dropout', 'nn.Dropout', (['(0.2)'], {}), '(0.2)\n', (14091, 14096), False, 'from torch import nn\n'), ((14110, 14151), 'torch.nn.Linear', 'nn.Linear', (['self.last_channel', 'num_classes'], {}), '(self.last_channel, num_classes)\n', (14119, 14151), False, 'from torch import nn\n'), ((17843, 17858), 'torch.nn.Dropout', 'nn.Dropout', (['(0.2)'], {}), '(0.2)\n', (17853, 17858), False, 'from torch import nn\n'), ((17872, 17913), 'torch.nn.Linear', 'nn.Linear', (['self.last_channel', 'num_classes'], {}), '(self.last_channel, num_classes)\n', (17881, 17913), False, 'from torch import nn\n'), ((1913, 1960), 'torch.nn.Conv2d', 'nn.Conv2d', (['hidden_dim', 'oup', '(1)', '(1)', '(0)'], {'bias': '(False)'}), '(hidden_dim, oup, 1, 1, 0, bias=False)\n', (1922, 1960), False, 'from torch import nn\n'), ((2758, 2805), 'torch.nn.Conv2d', 'nn.Conv2d', (['inp', 'hidden_dim', '(1)', '(1)', '(0)'], {'bias': '(False)'}), '(inp, hidden_dim, 1, 1, 0, bias=False)\n', (2767, 2805), False, 'from torch import nn\n'), ((2878, 2925), 'torch.nn.Conv2d', 'nn.Conv2d', (['hidden_dim', 'oup', '(1)', '(1)', '(0)'], {'bias': '(False)'}), '(hidden_dim, oup, 1, 1, 0, bias=False)\n', (2887, 2925), False, 'from torch import nn\n'), ((2968, 2990), 'torch.nn.ReLU6', 'nn.ReLU6', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2976, 2990), False, 'from torch import nn\n'), ((3027, 3084), 'torch.nn.Conv2d', 'nn.Conv2d', (['oup', 'oup', '(3)', 'stride', '(1)'], {'groups': 'oup', 'bias': '(False)'}), '(oup, oup, 3, stride, 1, groups=oup, bias=False)\n', (3036, 3084), False, 'from torch import nn\n'), ((10099, 10148), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['m.weight'], {'mode': '"""fan_out"""'}), "(m.weight, mode='fan_out')\n", (10122, 10148), False, 'from torch import nn\n'), ((10863, 10902), 'torch.nn.functional.adaptive_avg_pool2d', 'nn.functional.adaptive_avg_pool2d', (['x', '(1)'], {}), '(x, 1)\n', (10896, 10902), False, 'from torch import nn\n'), ((13687, 13727), 'torch.nn.Conv2d', 'nn.Conv2d', (['(960)', '(320)', '(1)', '(1)', '(0)'], {'bias': '(False)'}), '(960, 320, 1, 1, 0, bias=False)\n', (13696, 13727), False, 'from torch import nn\n'), ((14286, 14335), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['m.weight'], {'mode': '"""fan_out"""'}), "(m.weight, mode='fan_out')\n", (14309, 14335), False, 'from torch import nn\n'), ((15050, 15089), 'torch.nn.functional.adaptive_avg_pool2d', 'nn.functional.adaptive_avg_pool2d', (['x', '(1)'], {}), '(x, 1)\n', (15083, 15089), False, 'from torch import nn\n'), ((18048, 18097), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['m.weight'], {'mode': '"""fan_out"""'}), "(m.weight, mode='fan_out')\n", (18071, 18097), False, 'from torch import nn\n'), ((18812, 18851), 'torch.nn.functional.adaptive_avg_pool2d', 'nn.functional.adaptive_avg_pool2d', (['x', '(1)'], {}), '(x, 1)\n', (18845, 18851), False, 'from torch import nn\n'), ((10208, 10230), 'torch.nn.init.zeros_', 'nn.init.zeros_', (['m.bias'], {}), '(m.bias)\n', (10222, 10230), False, 'from torch import nn\n'), ((10311, 10334), 'torch.nn.init.ones_', 'nn.init.ones_', (['m.weight'], {}), '(m.weight)\n', (10324, 10334), False, 'from torch import nn\n'), ((10351, 10373), 'torch.nn.init.zeros_', 'nn.init.zeros_', (['m.bias'], {}), '(m.bias)\n', (10365, 10373), False, 'from torch import nn\n'), ((14395, 14417), 'torch.nn.init.zeros_', 'nn.init.zeros_', (['m.bias'], {}), '(m.bias)\n', (14409, 14417), False, 'from torch import nn\n'), ((14498, 14521), 'torch.nn.init.ones_', 'nn.init.ones_', (['m.weight'], {}), '(m.weight)\n', (14511, 14521), False, 'from torch import nn\n'), ((14538, 14560), 'torch.nn.init.zeros_', 'nn.init.zeros_', (['m.bias'], {}), '(m.bias)\n', (14552, 14560), False, 'from torch import nn\n'), ((18157, 18179), 'torch.nn.init.zeros_', 'nn.init.zeros_', (['m.bias'], {}), '(m.bias)\n', (18171, 18179), False, 'from torch import nn\n'), ((18260, 18283), 'torch.nn.init.ones_', 'nn.init.ones_', (['m.weight'], {}), '(m.weight)\n', (18273, 18283), False, 'from torch import nn\n'), ((18300, 18322), 'torch.nn.init.zeros_', 'nn.init.zeros_', (['m.bias'], {}), '(m.bias)\n', (18314, 18322), False, 'from torch import nn\n'), ((10433, 10467), 'torch.nn.init.normal_', 'nn.init.normal_', (['m.weight', '(0)', '(0.01)'], {}), '(m.weight, 0, 0.01)\n', (10448, 10467), False, 'from torch import nn\n'), ((10484, 10506), 'torch.nn.init.zeros_', 'nn.init.zeros_', (['m.bias'], {}), '(m.bias)\n', (10498, 10506), False, 'from torch import nn\n'), ((14620, 14654), 'torch.nn.init.normal_', 'nn.init.normal_', (['m.weight', '(0)', '(0.01)'], {}), '(m.weight, 0, 0.01)\n', (14635, 14654), False, 'from torch import nn\n'), ((14671, 14693), 'torch.nn.init.zeros_', 'nn.init.zeros_', (['m.bias'], {}), '(m.bias)\n', (14685, 14693), False, 'from torch import nn\n'), ((18382, 18416), 'torch.nn.init.normal_', 'nn.init.normal_', (['m.weight', '(0)', '(0.01)'], {}), '(m.weight, 0, 0.01)\n', (18397, 18416), False, 'from torch import nn\n'), ((18433, 18455), 'torch.nn.init.zeros_', 'nn.init.zeros_', (['m.bias'], {}), '(m.bias)\n', (18447, 18455), False, 'from torch import nn\n')]
|
import dataclasses as dtc
import librosa
import numpy as np
import torch
from torch.utils.data import Dataset
from typing import Optional, Callable, Union
import re
from torch._six import string_classes
import collections
__all__ = [
'Setter',
'Getter',
'AsSlice',
'AsFramedSlice',
'GetId',
'Input',
'Target',
'process_batch',
'ProgrammableDataset',
]
@dtc.dataclass
class Setter:
dim: int = 0
after_item: bool = True
def __post_init__(self):
self.pre_slices = (slice(None),) * self.dim
def __call__(self, data, item, value):
slc = slice(item, item + value.shape[self.dim]) if self.after_item \
else slice(item-value.shape[self.dim], item)
data.data[self.pre_slices + (slc,)] = value
return value.shape[self.dim]
@dtc.dataclass
class Getter:
"""
base class for implementing data getter
Parameters
----------
Attributes
----------
n : int or None
the length of the underlying data
"""
n: Optional[int] = dtc.field(default=None, init=False)
def __call__(self, proxy, item):
"""
apply this instance's logic to get data from ``proxy`` for a given ``item``
Parameters
----------
proxy: h5m.Proxy
the proxy to read from
item: int
the index emitted from a sampler
Returns
-------
data: Any
the data corresponding to this item
"""
return proxy[item]
def __len__(self):
return self.n
class GetId(Getter):
def __call__(self, proxy, item):
return proxy[proxy.refs[item]]
@dtc.dataclass
class AsSlice(Getter):
"""
maps an ``item`` to a slice of data
Parameters
----------
dim : int
the dimension to slice
shift : int
the slice will start at the index `item + shift`
length : int
the length of the slice
stride : int
sub-sampling factor. Every `stride` datapoints `item` increases of `1`
Examples
--------
.. testcode::
import h5mapper as h5m
slicer = h5m.AsSlice(shift=2, length=3)
data, item = list(range(10)), 2
# now use it like a function :
sliced = slicer(data, item)
print(sliced)
will output:
.. testoutput::
[4, 5, 6]
"""
dim: int = 0
shift: int = 0
length: int = 1
downsampling: int = 1
def __post_init__(self):
self.pre_slices = (slice(None),) * self.dim
def __call__(self, proxy, item):
i = item * self.downsampling
slc = slice(i + self.shift, i + self.shift + self.length)
return proxy[self.pre_slices + (slc, )]
def __len__(self):
return (self.n - (abs(self.shift) + self.length) + 1) // self.downsampling
def shift_and_length_to_samples(self, frame_length, hop_length, center=False):
extra = -hop_length if center else \
((frame_length // hop_length) - 1) * hop_length
shift = self.shift * hop_length
length = self.length * hop_length + extra
return shift, length
@dtc.dataclass
class AsFramedSlice(AsSlice):
dim: int = 0
shift: int = 0 # in frames!
length: int = 1 # in frames!
frame_size: int = 1
hop_length: int = 1
center: bool = False
pad_mode: str = 'reflect'
downsampling: int = 1
def __post_init__(self):
super(AsFramedSlice, self).__post_init__()
# convert frames to samples
if self.hop_length != self.frame_size:
_, self.length = self.shift_and_length_to_samples(
self.frame_size, self.hop_length, self.center)
def __call__(self, proxy, item):
sliced = super(AsFramedSlice, self).__call__(proxy, item)
if self.center:
sliced = np.pad(sliced, int(self.frame_size // 2), self.pad_mode)
return librosa.util.frame(sliced, self.frame_size, self.hop_length, axis=0)
@dtc.dataclass
class Input:
"""read and transform data from a specific key/proxy in a .h5 file"""
data: Union[str, np.ndarray, "Proxy"] = ''
getter: Getter = Getter()
setter: Optional[Setter] = None
transform: Callable[[np.ndarray], np.ndarray] = lambda x: x
inverse_transform: Callable[[np.ndarray], np.ndarray] = lambda x: x
to_tensor: bool = False
device: str = 'cuda' if torch.cuda.is_available() else 'cpu'
def __post_init__(self):
pass
def get_object(self, file):
return self.data if file is None or not isinstance(self.data, str) \
else getattr(file, self.data)
def __len__(self):
return len(self.getter)
def __call__(self, item, file=None):
data = self.getter(self.get_object(file), item)
if self.to_tensor:
data = torch.from_numpy(data).to(self.device)
return self.transform(data)
def set(self, key, value):
return self.setter(self.data, key, value)
class Target(Input):
"""exactly equivalent to Input, just makes code simpler to read."""
pass
np_str_obj_array_pattern = re.compile(r'[SaUO]')
def process_batch(batch, test=lambda x: False, func=lambda x: x):
"""
recursively apply func to the elements of data if test(element) is True.
This is used in ProgrammableDataset to process elements (Input or Target) packed in tuples, list, dict etc...
"""
elem_type = type(batch)
if test(batch):
return func(batch)
elif isinstance(batch, collections.abc.Mapping):
return {key: process_batch(batch[key], test, func) for key in batch}
elif isinstance(batch, tuple) and hasattr(batch, '_fields'): # namedtuple
return elem_type(*(process_batch(d, test, func) for d in batch))
elif isinstance(batch, collections.abc.Sequence) and not isinstance(batch, string_classes):
return [process_batch(d, test, func) for d in batch]
else:
return batch
def _is_batchitem(obj):
return isinstance(obj, (Input, Target))
class ProgrammableDataset(Dataset):
"""
Dataset whose __getitem__ method is specified by a batch object passed to its constructor.
The batch object can be of any type supported by torch's default collate function (Mapping, Sequence, etc.)
and should contain batch items (``h5m.Input`` or ``h5m.Target``).
"""
def __init__(self, file, batch=tuple()):
super(Dataset, self).__init__()
self.file = file
def cache_lengths(feat):
# pass the lengths of the db features to the getters
if feat.getter.n is None:
if isinstance(feat.getter, GetId):
n = sum(feat.get_object(file).refs[()].astype(np.bool))
else:
n = len(feat.get_object(file))
setattr(feat.getter, 'n', n)
return feat
self.batch = process_batch(batch, _is_batchitem, cache_lengths)
# get the minimum length of all batchitems
self.N = float('inf')
def set_n_to_min(feat):
self.N = min(len(feat), self.N)
return feat
process_batch(self.batch, _is_batchitem, set_n_to_min)
def __getitem__(self, item):
def get_data(feat):
return feat(item, self.file)
return process_batch(self.batch, _is_batchitem, get_data)
def __len__(self):
return self.N
def __del__(self):
if hasattr(self.file, 'close'):
self.file.close()
|
[
"librosa.util.frame",
"re.compile",
"dataclasses.field",
"torch.cuda.is_available",
"torch.from_numpy"
] |
[((5117, 5137), 're.compile', 're.compile', (['"""[SaUO]"""'], {}), "('[SaUO]')\n", (5127, 5137), False, 'import re\n'), ((1055, 1090), 'dataclasses.field', 'dtc.field', ([], {'default': 'None', 'init': '(False)'}), '(default=None, init=False)\n', (1064, 1090), True, 'import dataclasses as dtc\n'), ((3917, 3985), 'librosa.util.frame', 'librosa.util.frame', (['sliced', 'self.frame_size', 'self.hop_length'], {'axis': '(0)'}), '(sliced, self.frame_size, self.hop_length, axis=0)\n', (3935, 3985), False, 'import librosa\n'), ((4395, 4420), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4418, 4420), False, 'import torch\n'), ((4827, 4849), 'torch.from_numpy', 'torch.from_numpy', (['data'], {}), '(data)\n', (4843, 4849), False, 'import torch\n')]
|
import sys
def import_module(name, path):
if sys.version_info >= (3, 5):
import importlib.util
spec = importlib.util.spec_from_file_location(name, path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
elif sys.version_info >= (3, 0):
from importlib.machinery import SourceFileLoader
return SourceFileLoader(name, path).load_module()
else:
import imp
return imp.load_source(name, path)
|
[
"imp.load_source",
"importlib.machinery.SourceFileLoader"
] |
[((488, 515), 'imp.load_source', 'imp.load_source', (['name', 'path'], {}), '(name, path)\n', (503, 515), False, 'import imp\n'), ((401, 429), 'importlib.machinery.SourceFileLoader', 'SourceFileLoader', (['name', 'path'], {}), '(name, path)\n', (417, 429), False, 'from importlib.machinery import SourceFileLoader\n')]
|
import time
from peewee import *
from playhouse.postgres_ext import ArrayField, BinaryJSONField
from model import BaseModel, MyTimestampField
from model.board import Board
from model._post import POST_TYPES
from model.topic import Topic
from slim import json_ex_dumps
class PostStats(BaseModel):
id = BlobField(primary_key=True)
post_type = IntegerField(index=True)
last_comment_id = BlobField(null=True, default=None)
last_edit_user_id = BlobField(null=True, default=None)
last_edit_time = BigIntegerField(null=True, default=None)
update_time = BigIntegerField(null=True, default=None, index=True)
click_count = BigIntegerField(default=0) # 点击数量
edit_count = IntegerField(default=0) # 编辑次数
comment_count = IntegerField(default=0) # 评论数量
topic_count = IntegerField(default=0) # 主题数量
follow_count = IntegerField(default=0) # 关注数量
bookmark_count = IntegerField(default=0) # 收藏数量
upvote_count = IntegerField(default=0) # 赞同数量
downvote_count = IntegerField(default=0) # 反对数量
thank_count = IntegerField(default=0) # 感谢数量
vote_weight = IntegerField(default=0, index=True) # 权重
# board
# click_count = IntegerField(default=0)
# comment_count = IntegerField(default=0)
# topic_count = IntegerField(default=0)
# last_comment_id = BlobField(null=True, default=None)
# topic
# viewed_users = ArrayField(BlobField, null=True)
# commented_users = ArrayField(BlobField, null=True)
# click_count = IntegerField(default=0)
# comment_count = IntegerField(default=0)
# follow_count = IntegerField(default=0)
# last_comment_id = BlobField(null=True)
# user
# click_count = IntegerField(default=0)
# comment_count = IntegerField(default=0)
# follow_count = IntegerField(default=0)
class Meta:
db_table = 'post_stats'
class StatsLog(BaseModel):
id = BlobField(primary_key=True)
time = MyTimestampField(index=True)
data = BinaryJSONField(dumps=json_ex_dumps)
class Meta:
db_table = 'stats_log'
def post_stats_incr(field: Field, post_id, num=1, cb=None):
# 关于原子更新
# http://docs.peewee-orm.com/en/latest/peewee/querying.html#atomic-updates
update_data = {field.name: field + num}
where = [PostStats.id == post_id]
if cb: cb(update_data, where)
PostStats.update(**update_data)\
.where(*where) \
.execute()
def post_stats_do_edit(post_id, user_id):
def func(update, where):
update['last_edit_user_id'] = user_id
update['last_edit_time'] = int(time.time())
update['update_time'] = int(time.time())
post_stats_incr(PostStats.edit_count, post_id, cb=func)
def post_stats_do_comment(related_type, related_id, comment_id):
# 需要同时更新被评论对象的数字和最后评论id
def func(update, where): update['last_comment_id'] = comment_id
post_stats_incr(PostStats.comment_count, related_id, 1, cb=func)
# 如果被评论的是文章,需要更新板块数据
if related_type == POST_TYPES.TOPIC:
t = Topic.get_by_pk(related_id)
post_stats_incr(PostStats.comment_count, t.board_id, 1, cb=func)
def post_stats_add_topic_click(topic_id, board_id=None):
if not board_id:
t = Topic.get_by_pk(topic_id)
board_id = t.board_id
post_stats_incr(PostStats.click_count, topic_id)
post_stats_incr(PostStats.click_count, board_id)
def post_stats_topic_move(from_board_id, to_board_id, topic_id):
# 修改评论数据
ts = PostStats.get(PostStats.id == topic_id)
if from_board_id:
def func(update_data, where):
update_data['comment_count'] = PostStats.comment_count - ts.comment_count
post_stats_incr(PostStats.topic_count, from_board_id, -1, cb=func)
def func(update_data, where):
update_data['comment_count'] = PostStats.comment_count + ts.comment_count
post_stats_incr(PostStats.topic_count, to_board_id, 1, cb=func)
def post_stats_new(post_type, id):
PostStats.create(id=id, post_type=post_type, update_time=int(time.time()))
def post_stats_topic_new(board_id, topic_id):
post_stats_incr(PostStats.topic_count, board_id)
post_stats_new(POST_TYPES.TOPIC, topic_id)
|
[
"model.MyTimestampField",
"model.topic.Topic.get_by_pk",
"playhouse.postgres_ext.BinaryJSONField",
"time.time"
] |
[((1932, 1960), 'model.MyTimestampField', 'MyTimestampField', ([], {'index': '(True)'}), '(index=True)\n', (1948, 1960), False, 'from model import BaseModel, MyTimestampField\n'), ((1972, 2008), 'playhouse.postgres_ext.BinaryJSONField', 'BinaryJSONField', ([], {'dumps': 'json_ex_dumps'}), '(dumps=json_ex_dumps)\n', (1987, 2008), False, 'from playhouse.postgres_ext import ArrayField, BinaryJSONField\n'), ((3000, 3027), 'model.topic.Topic.get_by_pk', 'Topic.get_by_pk', (['related_id'], {}), '(related_id)\n', (3015, 3027), False, 'from model.topic import Topic\n'), ((3193, 3218), 'model.topic.Topic.get_by_pk', 'Topic.get_by_pk', (['topic_id'], {}), '(topic_id)\n', (3208, 3218), False, 'from model.topic import Topic\n'), ((2567, 2578), 'time.time', 'time.time', ([], {}), '()\n', (2576, 2578), False, 'import time\n'), ((2616, 2627), 'time.time', 'time.time', ([], {}), '()\n', (2625, 2627), False, 'import time\n'), ((3992, 4003), 'time.time', 'time.time', ([], {}), '()\n', (4001, 4003), False, 'import time\n')]
|
import os
import pandas as pd
import hashlib
from glob import glob
from helpers.menu_extractor import MenuExtractor
input_path = './data'
output_file = './data/raw_menu_data.csv'
def sha1sum(filename):
h = hashlib.sha1()
b = bytearray(128 * 1024)
mv = memoryview(b)
with open(filename, 'rb', buffering=0) as f:
for n in iter(lambda: f.readinto(mv), 0):
h.update(mv[:n])
return h.hexdigest()
def select_new_files(input_files, output_file):
"""
List Excel files in the input_path folder and return only files
that haven't processed yet according SHA1 column in the output_file
:param input_path: a list of paths to Excel menu files
:param output_file: a path to a csv files with existing extracted data
:return: a list of paths to Excel files that haven't processed yet
"""
new_files = []
try:
df = pd.read_csv(output_file)
except (FileNotFoundError, pd.errors.EmptyDataError):
print("Provided CSV files either does not exist or empty.")
return input_files
try:
existed_sha1 = df['SHA1'].unique()
except KeyError:
print("SHA1 column does not exist in the provided CSV file.")
return input_files
for fpath in input_files:
sha1 = sha1sum(fpath)
if sha1 not in existed_sha1:
new_files.append(fpath)
return new_files
def main():
input_files = glob(os.path.join(input_path, '*.xls*'))
print("Files to process: %d" % len(input_files))
input_files = select_new_files(input_files, output_file)
print("New files: %d" % len(input_files))
if 0 == len(input_files):
print("There is nothing to add to the existing CSV file.")
return
with open(output_file, 'a', encoding='utf-8') as f:
for fpath in input_files:
print("\t%s" % fpath)
mex = MenuExtractor(fpath)
menu_data = mex.menus_combined
menu_data['File'] = os.path.basename(fpath)
menu_data['SHA1'] = sha1sum(fpath)
menu_data.to_csv(f, header=False, index=False)
del mex
# Add a header to the csv file
df = pd.read_csv(output_file)
df.columns = ['Food', 'Weight', 'Price', 'Type', 'Key', 'Date', 'File', 'SHA1']
df.to_csv(output_file, index=False)
print("Done.")
if __name__ == '__main__':
main()
|
[
"helpers.menu_extractor.MenuExtractor",
"hashlib.sha1",
"os.path.basename",
"pandas.read_csv",
"os.path.join"
] |
[((214, 228), 'hashlib.sha1', 'hashlib.sha1', ([], {}), '()\n', (226, 228), False, 'import hashlib\n'), ((2167, 2191), 'pandas.read_csv', 'pd.read_csv', (['output_file'], {}), '(output_file)\n', (2178, 2191), True, 'import pandas as pd\n'), ((887, 911), 'pandas.read_csv', 'pd.read_csv', (['output_file'], {}), '(output_file)\n', (898, 911), True, 'import pandas as pd\n'), ((1426, 1460), 'os.path.join', 'os.path.join', (['input_path', '"""*.xls*"""'], {}), "(input_path, '*.xls*')\n", (1438, 1460), False, 'import os\n'), ((1877, 1897), 'helpers.menu_extractor.MenuExtractor', 'MenuExtractor', (['fpath'], {}), '(fpath)\n', (1890, 1897), False, 'from helpers.menu_extractor import MenuExtractor\n'), ((1973, 1996), 'os.path.basename', 'os.path.basename', (['fpath'], {}), '(fpath)\n', (1989, 1996), False, 'import os\n')]
|
from tornado import httpserver
from tornado.ioloop import IOLoop
import tornado.web
import json
"""
In this file, the API is defined to obtain information about the simulation and control of avatars.
Specifically, the API provide the next requests:
/api/v1/movements_occupants
Returns the movement of all occupants as a list of positions [x, y].
/api/v1/positions_occupants
Returns the position of all occupants as a list of positions [x, y].
/api/v1/state_occupants
Returns the state of all occupants as a String.
/api/v1/movement_occupant/id
Returns the movement of one occupant given as orientation and speed.
/api/v1/position_occupant/id
Returns the position of one occupant as position [x, y].
/api/v1/getstatesoccupant/id
Returns the state of one occupant as a String.
/api/v1/fov_occupant/id
Returns the FOV (field of view) of one occupant as a list of positiions [x, y].
/api/v1/info_occupant/id
Returns the state, movement, position and FOV of one occupant.
/api/v1/create_avatar/id&x,y
Creates an avatar with an id in an (x, y) position in the grid.
/api/v1/move_avatar/id&x,y
Moves an avatar to the position (x, y) in the grid.
Where:
id is a number with the unique_id of an occupant.
x and y are the two numbers with the grid coordinates.
"""
# Simulation model
global model
model = None
# External handlers to expand the API.
global externalHandlers
externalHandlers = []
def setModel(modelAux):
global model
if not model:
model = modelAux
class presentation(tornado.web.RequestHandler):
def get(self):
global model
response = ' Welcome to SOBA API! \n Simulation in step: {}'.format(model.NStep)
self.write(response)
class list_occupants(tornado.web.RequestHandler):
def get(self):
global model
data = model.list_occupants()
response = json.dumps(data)
self.write(response)
class movements_occupants(tornado.web.RequestHandler):
def get(self):
global model
data = model.movements_occupants()
response = json.dumps(data)
self.write(response)
class positions_occupants(tornado.web.RequestHandler):
def get(self):
global model
data = model.positions_occupants()
response = json.dumps(data)
self.write(response)
class states_occupants(tornado.web.RequestHandler):
def get(self):
global model
data = model.states_occupants()
response = json.dumps(data)
self.write(response)
class movement_occupant(tornado.web.RequestHandler):
def get(self, occupant_id):
global model
data = model.movement_occupant(occupant_id)
response = json.dumps(data)
self.write(response)
class position_occupant(tornado.web.RequestHandler):
def get(self, occupant_id):
global model
data = model.position_occupant(occupant_id)
response = json.dumps(data)
self.write(response)
def post(self, avatar_id):
global model
data = tornado.escape.json_decode(self.request.body)
x = data["x"]
y = data["y"]
pos = (int(x), int(y))
a = model.move_avatar(avatar_id, pos)
x, y = a.pos
data = {'avatar': {'id': a.unique_id, 'position': {'x': x, 'y': y}}}
response = json.dumps(data)
self.write(response)
class state_occupant(tornado.web.RequestHandler):
def get(self, occupant_id):
global model
data = model.state_occupant(occupant_id)
response = json.dumps(data)
self.write(response)
class fov_occupant(tornado.web.RequestHandler):
def get(self, occupant_id):
global model
data = model.fov_occupant(occupant_id)
response = json.dumps(data)
self.write(response)
class info_occupant(tornado.web.RequestHandler):
def get(self, occupant_id):
global model
data = model.info_occupant(occupant_id)
response = json.dumps(data)
self.write(response)
def put(self, avatar_id):
global model
data = tornado.escape.json_decode(self.request.body)
x = data["x"]
y = data["y"]
pos = (int(x), int(y))
a = model.create_avatar(avatar_id, pos)
x, y = a.pos
data = {'avatar': {'id': a.unique_id, 'position': {'x': x, 'y': y}}}
response = json.dumps(data)
self.write(response)
#self.write('Avatar with id: {}, created in pos: {} \n'.format(a.unique_id, a.pos))
#Defining application
class Application(tornado.web.Application):
global externalHandlers
def __init__(self):
internalHandlers = [
(r"/?", presentation),
(r"/api/v1/occupants?", list_occupants),
(r"/api/v1/occupants/movements?", movements_occupants),
(r"/api/v1/occupants/positions?", positions_occupants),
(r"/api/v1/occupants/states?", states_occupants),
(r"/api/v1/occupants/([0-9]+)?", info_occupant),
(r"/api/v1/occupants/([0-9]+)/movement?", movement_occupant),
(r"/api/v1/occupants/([0-9]+)/position?", position_occupant),
(r"/api/v1/occupants/([0-9]+)/state?", state_occupant),
(r"/api/v1/occupants/([0-9]+)/fov?", fov_occupant)
]
for t1 in internalHandlers:
for t2 in externalHandlers:
if t1[0]==t2[0]:
internalHandlers.remove((t1[0], t1[1]))
handlers = internalHandlers + externalHandlers
tornado.web.Application.__init__(self, handlers)
#Run server method
def runServer(port=10000):
global app
print('server launched in port: {}.\n'.format(port))
app = Application()
app.listen(port, address='127.0.1.1')
tornado.autoreload.start()
IOLoop.current().start()
|
[
"tornado.ioloop.IOLoop.current",
"json.dumps"
] |
[((1922, 1938), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1932, 1938), False, 'import json\n'), ((2106, 2122), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2116, 2122), False, 'import json\n'), ((2290, 2306), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2300, 2306), False, 'import json\n'), ((2468, 2484), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2478, 2484), False, 'import json\n'), ((2672, 2688), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2682, 2688), False, 'import json\n'), ((2876, 2892), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2886, 2892), False, 'import json\n'), ((3223, 3239), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (3233, 3239), False, 'import json\n'), ((3421, 3437), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (3431, 3437), False, 'import json\n'), ((3615, 3631), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (3625, 3631), False, 'import json\n'), ((3811, 3827), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (3821, 3827), False, 'import json\n'), ((4159, 4175), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (4169, 4175), False, 'import json\n'), ((5586, 5602), 'tornado.ioloop.IOLoop.current', 'IOLoop.current', ([], {}), '()\n', (5600, 5602), False, 'from tornado.ioloop import IOLoop\n')]
|
import numpy as np
# The last dimensions of box_1 and box_2 are both 4. (x, y, w, h)
class IOU(object):
def __init__(self, box_1, box_2):
self.box_1_min, self.box_1_max = self.__get_box_min_and_max(box_1)
self.box_2_min, self.box_2_max = self.__get_box_min_and_max(box_2)
self.box_1_area = self.__get_box_area(box_1)
self.box_2_area = self.__get_box_area(box_2)
@staticmethod
def __get_box_min_and_max(box):
box_xy = box[..., 0:2]
box_wh = box[..., 2:4]
box_min = box_xy - box_wh / 2
box_max = box_xy + box_wh / 2
return box_min, box_max
@staticmethod
def __get_box_area(box):
return box[..., 2] * box[..., 3]
def calculate_iou(self):
intersect_min = np.maximum(self.box_1_min, self.box_2_min)
intersect_max = np.minimum(self.box_1_max, self.box_2_max)
intersect_wh = np.maximum(intersect_max - intersect_min, 0.0)
intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1]
union_area = self.box_1_area + self.box_2_area - intersect_area
iou = intersect_area / union_area
return iou
|
[
"numpy.minimum",
"numpy.maximum"
] |
[((768, 810), 'numpy.maximum', 'np.maximum', (['self.box_1_min', 'self.box_2_min'], {}), '(self.box_1_min, self.box_2_min)\n', (778, 810), True, 'import numpy as np\n'), ((835, 877), 'numpy.minimum', 'np.minimum', (['self.box_1_max', 'self.box_2_max'], {}), '(self.box_1_max, self.box_2_max)\n', (845, 877), True, 'import numpy as np\n'), ((901, 947), 'numpy.maximum', 'np.maximum', (['(intersect_max - intersect_min)', '(0.0)'], {}), '(intersect_max - intersect_min, 0.0)\n', (911, 947), True, 'import numpy as np\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
__all__ = [
'PrivateEndpointConnectionResponse',
'PrivateEndpointResponse',
'PrivateLinkServiceConnectionStateResponse',
'RedisAccessKeysResponse',
'RedisInstanceDetailsResponse',
'RedisLinkedServerResponse',
'ScheduleEntryResponse',
'SkuResponse',
]
@pulumi.output_type
class PrivateEndpointConnectionResponse(dict):
"""
The Private Endpoint Connection resource.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "privateLinkServiceConnectionState":
suggest = "private_link_service_connection_state"
elif key == "provisioningState":
suggest = "provisioning_state"
elif key == "privateEndpoint":
suggest = "private_endpoint"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PrivateEndpointConnectionResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PrivateEndpointConnectionResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PrivateEndpointConnectionResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: str,
name: str,
private_link_service_connection_state: 'outputs.PrivateLinkServiceConnectionStateResponse',
provisioning_state: str,
type: str,
private_endpoint: Optional['outputs.PrivateEndpointResponse'] = None):
"""
The Private Endpoint Connection resource.
:param str id: Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
:param str name: The name of the resource
:param 'PrivateLinkServiceConnectionStateResponse' private_link_service_connection_state: A collection of information about the state of the connection between service consumer and provider.
:param str provisioning_state: The provisioning state of the private endpoint connection resource.
:param str type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
:param 'PrivateEndpointResponse' private_endpoint: The resource of private end point.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "private_link_service_connection_state", private_link_service_connection_state)
pulumi.set(__self__, "provisioning_state", provisioning_state)
pulumi.set(__self__, "type", type)
if private_endpoint is not None:
pulumi.set(__self__, "private_endpoint", private_endpoint)
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="privateLinkServiceConnectionState")
def private_link_service_connection_state(self) -> 'outputs.PrivateLinkServiceConnectionStateResponse':
"""
A collection of information about the state of the connection between service consumer and provider.
"""
return pulumi.get(self, "private_link_service_connection_state")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state of the private endpoint connection resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="privateEndpoint")
def private_endpoint(self) -> Optional['outputs.PrivateEndpointResponse']:
"""
The resource of private end point.
"""
return pulumi.get(self, "private_endpoint")
@pulumi.output_type
class PrivateEndpointResponse(dict):
"""
The Private Endpoint resource.
"""
def __init__(__self__, *,
id: str):
"""
The Private Endpoint resource.
:param str id: The ARM identifier for Private Endpoint
"""
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> str:
"""
The ARM identifier for Private Endpoint
"""
return pulumi.get(self, "id")
@pulumi.output_type
class PrivateLinkServiceConnectionStateResponse(dict):
"""
A collection of information about the state of the connection between service consumer and provider.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "actionsRequired":
suggest = "actions_required"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PrivateLinkServiceConnectionStateResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PrivateLinkServiceConnectionStateResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PrivateLinkServiceConnectionStateResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
actions_required: Optional[str] = None,
description: Optional[str] = None,
status: Optional[str] = None):
"""
A collection of information about the state of the connection between service consumer and provider.
:param str actions_required: A message indicating if changes on the service provider require any updates on the consumer.
:param str description: The reason for approval/rejection of the connection.
:param str status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
"""
if actions_required is not None:
pulumi.set(__self__, "actions_required", actions_required)
if description is not None:
pulumi.set(__self__, "description", description)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="actionsRequired")
def actions_required(self) -> Optional[str]:
"""
A message indicating if changes on the service provider require any updates on the consumer.
"""
return pulumi.get(self, "actions_required")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
The reason for approval/rejection of the connection.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
"""
return pulumi.get(self, "status")
@pulumi.output_type
class RedisAccessKeysResponse(dict):
"""
Redis cache access keys.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "primaryKey":
suggest = "primary_key"
elif key == "secondaryKey":
suggest = "secondary_key"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in RedisAccessKeysResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
RedisAccessKeysResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
RedisAccessKeysResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
primary_key: str,
secondary_key: str):
"""
Redis cache access keys.
:param str primary_key: The current primary key that clients can use to authenticate with Redis cache.
:param str secondary_key: The current secondary key that clients can use to authenticate with Redis cache.
"""
pulumi.set(__self__, "primary_key", primary_key)
pulumi.set(__self__, "secondary_key", secondary_key)
@property
@pulumi.getter(name="primaryKey")
def primary_key(self) -> str:
"""
The current primary key that clients can use to authenticate with Redis cache.
"""
return pulumi.get(self, "primary_key")
@property
@pulumi.getter(name="secondaryKey")
def secondary_key(self) -> str:
"""
The current secondary key that clients can use to authenticate with Redis cache.
"""
return pulumi.get(self, "secondary_key")
@pulumi.output_type
class RedisInstanceDetailsResponse(dict):
"""
Details of single instance of redis.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "isMaster":
suggest = "is_master"
elif key == "isPrimary":
suggest = "is_primary"
elif key == "nonSslPort":
suggest = "non_ssl_port"
elif key == "shardId":
suggest = "shard_id"
elif key == "sslPort":
suggest = "ssl_port"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in RedisInstanceDetailsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
RedisInstanceDetailsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
RedisInstanceDetailsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
is_master: bool,
is_primary: bool,
non_ssl_port: int,
shard_id: int,
ssl_port: int,
zone: str):
"""
Details of single instance of redis.
:param bool is_master: Specifies whether the instance is a primary node.
:param bool is_primary: Specifies whether the instance is a primary node.
:param int non_ssl_port: If enableNonSslPort is true, provides Redis instance Non-SSL port.
:param int shard_id: If clustering is enabled, the Shard ID of Redis Instance
:param int ssl_port: Redis instance SSL port.
:param str zone: If the Cache uses availability zones, specifies availability zone where this instance is located.
"""
pulumi.set(__self__, "is_master", is_master)
pulumi.set(__self__, "is_primary", is_primary)
pulumi.set(__self__, "non_ssl_port", non_ssl_port)
pulumi.set(__self__, "shard_id", shard_id)
pulumi.set(__self__, "ssl_port", ssl_port)
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter(name="isMaster")
def is_master(self) -> bool:
"""
Specifies whether the instance is a primary node.
"""
return pulumi.get(self, "is_master")
@property
@pulumi.getter(name="isPrimary")
def is_primary(self) -> bool:
"""
Specifies whether the instance is a primary node.
"""
return pulumi.get(self, "is_primary")
@property
@pulumi.getter(name="nonSslPort")
def non_ssl_port(self) -> int:
"""
If enableNonSslPort is true, provides Redis instance Non-SSL port.
"""
return pulumi.get(self, "non_ssl_port")
@property
@pulumi.getter(name="shardId")
def shard_id(self) -> int:
"""
If clustering is enabled, the Shard ID of Redis Instance
"""
return pulumi.get(self, "shard_id")
@property
@pulumi.getter(name="sslPort")
def ssl_port(self) -> int:
"""
Redis instance SSL port.
"""
return pulumi.get(self, "ssl_port")
@property
@pulumi.getter
def zone(self) -> str:
"""
If the Cache uses availability zones, specifies availability zone where this instance is located.
"""
return pulumi.get(self, "zone")
@pulumi.output_type
class RedisLinkedServerResponse(dict):
"""
Linked server Id
"""
def __init__(__self__, *,
id: str):
"""
Linked server Id
:param str id: Linked server Id.
"""
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> str:
"""
Linked server Id.
"""
return pulumi.get(self, "id")
@pulumi.output_type
class ScheduleEntryResponse(dict):
"""
Patch schedule entry for a Premium Redis Cache.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "dayOfWeek":
suggest = "day_of_week"
elif key == "startHourUtc":
suggest = "start_hour_utc"
elif key == "maintenanceWindow":
suggest = "maintenance_window"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ScheduleEntryResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ScheduleEntryResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ScheduleEntryResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
day_of_week: str,
start_hour_utc: int,
maintenance_window: Optional[str] = None):
"""
Patch schedule entry for a Premium Redis Cache.
:param str day_of_week: Day of the week when a cache can be patched.
:param int start_hour_utc: Start hour after which cache patching can start.
:param str maintenance_window: ISO8601 timespan specifying how much time cache patching can take.
"""
pulumi.set(__self__, "day_of_week", day_of_week)
pulumi.set(__self__, "start_hour_utc", start_hour_utc)
if maintenance_window is not None:
pulumi.set(__self__, "maintenance_window", maintenance_window)
@property
@pulumi.getter(name="dayOfWeek")
def day_of_week(self) -> str:
"""
Day of the week when a cache can be patched.
"""
return pulumi.get(self, "day_of_week")
@property
@pulumi.getter(name="startHourUtc")
def start_hour_utc(self) -> int:
"""
Start hour after which cache patching can start.
"""
return pulumi.get(self, "start_hour_utc")
@property
@pulumi.getter(name="maintenanceWindow")
def maintenance_window(self) -> Optional[str]:
"""
ISO8601 timespan specifying how much time cache patching can take.
"""
return pulumi.get(self, "maintenance_window")
@pulumi.output_type
class SkuResponse(dict):
"""
SKU parameters supplied to the create Redis operation.
"""
def __init__(__self__, *,
capacity: int,
family: str,
name: str):
"""
SKU parameters supplied to the create Redis operation.
:param int capacity: The size of the Redis cache to deploy. Valid values: for C (Basic/Standard) family (0, 1, 2, 3, 4, 5, 6), for P (Premium) family (1, 2, 3, 4).
:param str family: The SKU family to use. Valid values: (C, P). (C = Basic/Standard, P = Premium).
:param str name: The type of Redis cache to deploy. Valid values: (Basic, Standard, Premium)
"""
pulumi.set(__self__, "capacity", capacity)
pulumi.set(__self__, "family", family)
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def capacity(self) -> int:
"""
The size of the Redis cache to deploy. Valid values: for C (Basic/Standard) family (0, 1, 2, 3, 4, 5, 6), for P (Premium) family (1, 2, 3, 4).
"""
return pulumi.get(self, "capacity")
@property
@pulumi.getter
def family(self) -> str:
"""
The SKU family to use. Valid values: (C, P). (C = Basic/Standard, P = Premium).
"""
return pulumi.get(self, "family")
@property
@pulumi.getter
def name(self) -> str:
"""
The type of Redis cache to deploy. Valid values: (Basic, Standard, Premium)
"""
return pulumi.get(self, "name")
|
[
"pulumi.get",
"pulumi.getter",
"pulumi.log.warn",
"pulumi.set"
] |
[((3732, 3787), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""privateLinkServiceConnectionState"""'}), "(name='privateLinkServiceConnectionState')\n", (3745, 3787), False, 'import pulumi\n'), ((4122, 4161), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""provisioningState"""'}), "(name='provisioningState')\n", (4135, 4161), False, 'import pulumi\n'), ((4616, 4653), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""privateEndpoint"""'}), "(name='privateEndpoint')\n", (4629, 4653), False, 'import pulumi\n'), ((7174, 7211), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""actionsRequired"""'}), "(name='actionsRequired')\n", (7187, 7211), False, 'import pulumi\n'), ((9183, 9215), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""primaryKey"""'}), "(name='primaryKey')\n", (9196, 9215), False, 'import pulumi\n'), ((9428, 9462), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""secondaryKey"""'}), "(name='secondaryKey')\n", (9441, 9462), False, 'import pulumi\n'), ((11804, 11834), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""isMaster"""'}), "(name='isMaster')\n", (11817, 11834), False, 'import pulumi\n'), ((12015, 12046), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""isPrimary"""'}), "(name='isPrimary')\n", (12028, 12046), False, 'import pulumi\n'), ((12229, 12261), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""nonSslPort"""'}), "(name='nonSslPort')\n", (12242, 12261), False, 'import pulumi\n'), ((12464, 12493), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""shardId"""'}), "(name='shardId')\n", (12477, 12493), False, 'import pulumi\n'), ((12678, 12707), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""sslPort"""'}), "(name='sslPort')\n", (12691, 12707), False, 'import pulumi\n'), ((15137, 15168), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dayOfWeek"""'}), "(name='dayOfWeek')\n", (15150, 15168), False, 'import pulumi\n'), ((15347, 15381), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""startHourUtc"""'}), "(name='startHourUtc')\n", (15360, 15381), False, 'import pulumi\n'), ((15570, 15609), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""maintenanceWindow"""'}), "(name='maintenanceWindow')\n", (15583, 15609), False, 'import pulumi\n'), ((2830, 2860), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (2840, 2860), False, 'import pulumi\n'), ((2869, 2903), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (2879, 2903), False, 'import pulumi\n'), ((2912, 3016), 'pulumi.set', 'pulumi.set', (['__self__', '"""private_link_service_connection_state"""', 'private_link_service_connection_state'], {}), "(__self__, 'private_link_service_connection_state',\n private_link_service_connection_state)\n", (2922, 3016), False, 'import pulumi\n'), ((3021, 3083), 'pulumi.set', 'pulumi.set', (['__self__', '"""provisioning_state"""', 'provisioning_state'], {}), "(__self__, 'provisioning_state', provisioning_state)\n", (3031, 3083), False, 'import pulumi\n'), ((3092, 3126), 'pulumi.set', 'pulumi.set', (['__self__', '"""type"""', 'type'], {}), "(__self__, 'type', type)\n", (3102, 3126), False, 'import pulumi\n'), ((3531, 3553), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (3541, 3553), False, 'import pulumi\n'), ((3687, 3711), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (3697, 3711), False, 'import pulumi\n'), ((4044, 4101), 'pulumi.get', 'pulumi.get', (['self', '"""private_link_service_connection_state"""'], {}), "(self, 'private_link_service_connection_state')\n", (4054, 4101), False, 'import pulumi\n'), ((4318, 4356), 'pulumi.get', 'pulumi.get', (['self', '"""provisioning_state"""'], {}), "(self, 'provisioning_state')\n", (4328, 4356), False, 'import pulumi\n'), ((4571, 4595), 'pulumi.get', 'pulumi.get', (['self', '"""type"""'], {}), "(self, 'type')\n", (4581, 4595), False, 'import pulumi\n'), ((4815, 4851), 'pulumi.get', 'pulumi.get', (['self', '"""private_endpoint"""'], {}), "(self, 'private_endpoint')\n", (4825, 4851), False, 'import pulumi\n'), ((5153, 5183), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (5163, 5183), False, 'import pulumi\n'), ((5330, 5352), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (5340, 5352), False, 'import pulumi\n'), ((7401, 7437), 'pulumi.get', 'pulumi.get', (['self', '"""actions_required"""'], {}), "(self, 'actions_required')\n", (7411, 7437), False, 'import pulumi\n'), ((7616, 7647), 'pulumi.get', 'pulumi.get', (['self', '"""description"""'], {}), "(self, 'description')\n", (7626, 7647), False, 'import pulumi\n'), ((7865, 7891), 'pulumi.get', 'pulumi.get', (['self', '"""status"""'], {}), "(self, 'status')\n", (7875, 7891), False, 'import pulumi\n'), ((9053, 9101), 'pulumi.set', 'pulumi.set', (['__self__', '"""primary_key"""', 'primary_key'], {}), "(__self__, 'primary_key', primary_key)\n", (9063, 9101), False, 'import pulumi\n'), ((9110, 9162), 'pulumi.set', 'pulumi.set', (['__self__', '"""secondary_key"""', 'secondary_key'], {}), "(__self__, 'secondary_key', secondary_key)\n", (9120, 9162), False, 'import pulumi\n'), ((9376, 9407), 'pulumi.get', 'pulumi.get', (['self', '"""primary_key"""'], {}), "(self, 'primary_key')\n", (9386, 9407), False, 'import pulumi\n'), ((9627, 9660), 'pulumi.get', 'pulumi.get', (['self', '"""secondary_key"""'], {}), "(self, 'secondary_key')\n", (9637, 9660), False, 'import pulumi\n'), ((11480, 11524), 'pulumi.set', 'pulumi.set', (['__self__', '"""is_master"""', 'is_master'], {}), "(__self__, 'is_master', is_master)\n", (11490, 11524), False, 'import pulumi\n'), ((11533, 11579), 'pulumi.set', 'pulumi.set', (['__self__', '"""is_primary"""', 'is_primary'], {}), "(__self__, 'is_primary', is_primary)\n", (11543, 11579), False, 'import pulumi\n'), ((11588, 11638), 'pulumi.set', 'pulumi.set', (['__self__', '"""non_ssl_port"""', 'non_ssl_port'], {}), "(__self__, 'non_ssl_port', non_ssl_port)\n", (11598, 11638), False, 'import pulumi\n'), ((11647, 11689), 'pulumi.set', 'pulumi.set', (['__self__', '"""shard_id"""', 'shard_id'], {}), "(__self__, 'shard_id', shard_id)\n", (11657, 11689), False, 'import pulumi\n'), ((11698, 11740), 'pulumi.set', 'pulumi.set', (['__self__', '"""ssl_port"""', 'ssl_port'], {}), "(__self__, 'ssl_port', ssl_port)\n", (11708, 11740), False, 'import pulumi\n'), ((11749, 11783), 'pulumi.set', 'pulumi.set', (['__self__', '"""zone"""', 'zone'], {}), "(__self__, 'zone', zone)\n", (11759, 11783), False, 'import pulumi\n'), ((11965, 11994), 'pulumi.get', 'pulumi.get', (['self', '"""is_master"""'], {}), "(self, 'is_master')\n", (11975, 11994), False, 'import pulumi\n'), ((12178, 12208), 'pulumi.get', 'pulumi.get', (['self', '"""is_primary"""'], {}), "(self, 'is_primary')\n", (12188, 12208), False, 'import pulumi\n'), ((12411, 12443), 'pulumi.get', 'pulumi.get', (['self', '"""non_ssl_port"""'], {}), "(self, 'non_ssl_port')\n", (12421, 12443), False, 'import pulumi\n'), ((12629, 12657), 'pulumi.get', 'pulumi.get', (['self', '"""shard_id"""'], {}), "(self, 'shard_id')\n", (12639, 12657), False, 'import pulumi\n'), ((12811, 12839), 'pulumi.get', 'pulumi.get', (['self', '"""ssl_port"""'], {}), "(self, 'ssl_port')\n", (12821, 12839), False, 'import pulumi\n'), ((13046, 13070), 'pulumi.get', 'pulumi.get', (['self', '"""zone"""'], {}), "(self, 'zone')\n", (13056, 13070), False, 'import pulumi\n'), ((13324, 13354), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (13334, 13354), False, 'import pulumi\n'), ((13479, 13501), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (13489, 13501), False, 'import pulumi\n'), ((14887, 14935), 'pulumi.set', 'pulumi.set', (['__self__', '"""day_of_week"""', 'day_of_week'], {}), "(__self__, 'day_of_week', day_of_week)\n", (14897, 14935), False, 'import pulumi\n'), ((14944, 14998), 'pulumi.set', 'pulumi.set', (['__self__', '"""start_hour_utc"""', 'start_hour_utc'], {}), "(__self__, 'start_hour_utc', start_hour_utc)\n", (14954, 14998), False, 'import pulumi\n'), ((15295, 15326), 'pulumi.get', 'pulumi.get', (['self', '"""day_of_week"""'], {}), "(self, 'day_of_week')\n", (15305, 15326), False, 'import pulumi\n'), ((15515, 15549), 'pulumi.get', 'pulumi.get', (['self', '"""start_hour_utc"""'], {}), "(self, 'start_hour_utc')\n", (15525, 15549), False, 'import pulumi\n'), ((15776, 15814), 'pulumi.get', 'pulumi.get', (['self', '"""maintenance_window"""'], {}), "(self, 'maintenance_window')\n", (15786, 15814), False, 'import pulumi\n'), ((16533, 16575), 'pulumi.set', 'pulumi.set', (['__self__', '"""capacity"""', 'capacity'], {}), "(__self__, 'capacity', capacity)\n", (16543, 16575), False, 'import pulumi\n'), ((16584, 16622), 'pulumi.set', 'pulumi.set', (['__self__', '"""family"""', 'family'], {}), "(__self__, 'family', family)\n", (16594, 16622), False, 'import pulumi\n'), ((16631, 16665), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (16641, 16665), False, 'import pulumi\n'), ((16921, 16949), 'pulumi.get', 'pulumi.get', (['self', '"""capacity"""'], {}), "(self, 'capacity')\n", (16931, 16949), False, 'import pulumi\n'), ((17140, 17166), 'pulumi.get', 'pulumi.get', (['self', '"""family"""'], {}), "(self, 'family')\n", (17150, 17166), False, 'import pulumi\n'), ((17351, 17375), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (17361, 17375), False, 'import pulumi\n'), ((1163, 1314), 'pulumi.log.warn', 'pulumi.log.warn', (['f"""Key \'{key}\' not found in PrivateEndpointConnectionResponse. Access the value via the \'{suggest}\' property getter instead."""'], {}), '(\n f"Key \'{key}\' not found in PrivateEndpointConnectionResponse. Access the value via the \'{suggest}\' property getter instead."\n )\n', (1178, 1314), False, 'import pulumi\n'), ((3180, 3238), 'pulumi.set', 'pulumi.set', (['__self__', '"""private_endpoint"""', 'private_endpoint'], {}), "(__self__, 'private_endpoint', private_endpoint)\n", (3190, 3238), False, 'import pulumi\n'), ((5736, 5895), 'pulumi.log.warn', 'pulumi.log.warn', (['f"""Key \'{key}\' not found in PrivateLinkServiceConnectionStateResponse. Access the value via the \'{suggest}\' property getter instead."""'], {}), '(\n f"Key \'{key}\' not found in PrivateLinkServiceConnectionStateResponse. Access the value via the \'{suggest}\' property getter instead."\n )\n', (5751, 5895), False, 'import pulumi\n'), ((6916, 6974), 'pulumi.set', 'pulumi.set', (['__self__', '"""actions_required"""', 'actions_required'], {}), "(__self__, 'actions_required', actions_required)\n", (6926, 6974), False, 'import pulumi\n'), ((7023, 7071), 'pulumi.set', 'pulumi.set', (['__self__', '"""description"""', 'description'], {}), "(__self__, 'description', description)\n", (7033, 7071), False, 'import pulumi\n'), ((7115, 7153), 'pulumi.set', 'pulumi.set', (['__self__', '"""status"""', 'status'], {}), "(__self__, 'status', status)\n", (7125, 7153), False, 'import pulumi\n'), ((8245, 8386), 'pulumi.log.warn', 'pulumi.log.warn', (['f"""Key \'{key}\' not found in RedisAccessKeysResponse. Access the value via the \'{suggest}\' property getter instead."""'], {}), '(\n f"Key \'{key}\' not found in RedisAccessKeysResponse. Access the value via the \'{suggest}\' property getter instead."\n )\n', (8260, 8386), False, 'import pulumi\n'), ((10220, 10366), 'pulumi.log.warn', 'pulumi.log.warn', (['f"""Key \'{key}\' not found in RedisInstanceDetailsResponse. Access the value via the \'{suggest}\' property getter instead."""'], {}), '(\n f"Key \'{key}\' not found in RedisInstanceDetailsResponse. Access the value via the \'{suggest}\' property getter instead."\n )\n', (10235, 10366), False, 'import pulumi\n'), ((13960, 14099), 'pulumi.log.warn', 'pulumi.log.warn', (['f"""Key \'{key}\' not found in ScheduleEntryResponse. Access the value via the \'{suggest}\' property getter instead."""'], {}), '(\n f"Key \'{key}\' not found in ScheduleEntryResponse. Access the value via the \'{suggest}\' property getter instead."\n )\n', (13975, 14099), False, 'import pulumi\n'), ((15054, 15116), 'pulumi.set', 'pulumi.set', (['__self__', '"""maintenance_window"""', 'maintenance_window'], {}), "(__self__, 'maintenance_window', maintenance_window)\n", (15064, 15116), False, 'import pulumi\n')]
|
"""Errors encountered while executing scrape jobs."""
from datetime import timezone
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.types import Enum
import vigorish.database as db
from vigorish.enums import DataSet
from vigorish.util.datetime_util import (
get_local_utcoffset,
localized_dt_string,
make_tzaware,
utc_now,
)
class ScrapeError(db.Base):
"""Errors encountered while executing scrape jobs."""
__tablename__ = "scrape_error"
id = Column(Integer, primary_key=True)
occurred_at = Column(DateTime, default=utc_now)
data_set = Column(Enum(DataSet), nullable=False)
error_message = Column(String, nullable=False)
fixed = Column(Boolean, default=False)
job_id = Column(Integer, ForeignKey("scrape_job.id"))
@hybrid_property
def occurred_at_str(self):
occurred_at_utc = make_tzaware(self.occurred_at, use_tz=timezone.utc, localize=False)
return localized_dt_string(occurred_at_utc, use_tz=get_local_utcoffset())
def __repr__(self):
return f"<ScrapeError job_name={self.job.name}, job_id={self.job_id}, message={self.error_message}>"
def __str__(self):
return f"{self.occurred_at_str} | {self.error_message}"
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
|
[
"sqlalchemy.types.Enum",
"sqlalchemy.ForeignKey",
"sqlalchemy.Column",
"vigorish.util.datetime_util.get_local_utcoffset",
"vigorish.util.datetime_util.make_tzaware"
] |
[((572, 605), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (578, 605), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String\n'), ((624, 657), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'utc_now'}), '(DateTime, default=utc_now)\n', (630, 657), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String\n'), ((731, 761), 'sqlalchemy.Column', 'Column', (['String'], {'nullable': '(False)'}), '(String, nullable=False)\n', (737, 761), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String\n'), ((774, 804), 'sqlalchemy.Column', 'Column', (['Boolean'], {'default': '(False)'}), '(Boolean, default=False)\n', (780, 804), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String\n'), ((680, 693), 'sqlalchemy.types.Enum', 'Enum', (['DataSet'], {}), '(DataSet)\n', (684, 693), False, 'from sqlalchemy.types import Enum\n'), ((834, 861), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""scrape_job.id"""'], {}), "('scrape_job.id')\n", (844, 861), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String\n'), ((942, 1009), 'vigorish.util.datetime_util.make_tzaware', 'make_tzaware', (['self.occurred_at'], {'use_tz': 'timezone.utc', 'localize': '(False)'}), '(self.occurred_at, use_tz=timezone.utc, localize=False)\n', (954, 1009), False, 'from vigorish.util.datetime_util import get_local_utcoffset, localized_dt_string, make_tzaware, utc_now\n'), ((1069, 1090), 'vigorish.util.datetime_util.get_local_utcoffset', 'get_local_utcoffset', ([], {}), '()\n', (1088, 1090), False, 'from vigorish.util.datetime_util import get_local_utcoffset, localized_dt_string, make_tzaware, utc_now\n')]
|
'''
Function:
Algorithm implementation.
Author:
Charles
微信公众号:
Charles的皮卡丘
'''
import cv2
import math
import numpy as np
from PIL import Image
from scipy import signal
from utils.utils import *
from scipy.ndimage import interpolation
from scipy.sparse.linalg import spsolve
from scipy.sparse import csr_matrix, spdiags
import warnings
warnings.filterwarnings("ignore")
'''pencil drawing'''
class PencilDrawing():
def __init__(self, **kwargs):
self.kernel_size_scale = kwargs.get('kernel_size_scale')
self.stroke_width = kwargs.get('stroke_width')
self.weights_color = kwargs.get('weights_color')
self.weights_gray = kwargs.get('weights_gray')
self.texture_path = kwargs.get('texture_path')
self.color_depth = kwargs.get('color_depth')
'''in order to call'''
def draw(self, image_path, mode='gray', savename='output.jpg'):
img = cv2.imread(image_path)
if mode == 'color':
'''
img_ycbcr = cv2.cvtColor(img, cv2.COLOR_BGR2YCR_CB)
Y = img_ycbcr[:, :, 0]
img_ycbcr_new = img_ycbcr.copy()
img_ycbcr_new.flags.writeable = True
img_ycbcr_new[:, :, 0] = self.__strokeGeneration(Y) * self.__toneGeneration(Y) * 255
img_out = cv2.cvtColor(img_ycbcr_new, cv2.COLOR_YCR_CB2BGR)
img = cv2.imwrite(savename, img_out)
'''
img = Image.open(image_path)
img_ycbcr = img.convert('YCbCr')
img = np.ndarray((img.size[1], img.size[0], 3), 'u1', img_ycbcr.tobytes())
img_out = img.copy()
img_out.flags.writeable = True
img_out[:, :, 0] = self.__strokeGeneration(img[:, :, 0]) * self.__toneGeneration(img[:, :, 0]) * 255
img_out = cv2.cvtColor(img_out, cv2.COLOR_YCR_CB2BGR)
img_out = Image.fromarray(img_out)
img_out.save(savename)
elif mode == 'gray':
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img_s = self.__strokeGeneration(img)
img_t = self.__toneGeneration(img)
img_out = img_s * img_t * 255
img = cv2.imwrite(savename, img_out)
else:
raise ValueError('PencilDrawing.draw unsupport mode <%s>...' % mode)
'''pencil stroke generation'''
def __strokeGeneration(self, img):
h, w = img.shape
kernel_size = int(min(w, h) * self.kernel_size_scale)
kernel_size += kernel_size % 2
# compute gradients, yielding magnitude
img_double = im2double(img)
dx = np.concatenate((np.abs(img_double[:, 0:-1]-img_double[:, 1:]), np.zeros((h, 1))), 1)
dy = np.concatenate((np.abs(img_double[0:-1, :]-img_double[1:, :]), np.zeros((1, w))), 0)
img_gradient = np.sqrt(np.power(dx, 2) + np.power(dy, 2))
# choose eight reference directions
line_segments = np.zeros((kernel_size, kernel_size, 8))
for i in [0, 1, 2, 7]:
for x in range(kernel_size):
y = round((x + 1 - kernel_size / 2) * math.tan(math.pi / 8 * i))
y = kernel_size / 2 - y
if y > 0 and y <= kernel_size:
line_segments[int(y-1), x, i] = 1
if i == 7:
line_segments[:, :, 3] = np.rot90(line_segments[:, :, 7], -1)
else:
line_segments[:, :, i+4] = np.rot90(line_segments[:, :, i], 1)
# get response maps for the reference directions
response_maps = np.zeros((h, w, 8))
for i in range(8):
response_maps[:, :, i] = signal.convolve2d(img_gradient, line_segments[:, :, i], 'same')
response_maps_maxvalueidx = response_maps.argmax(axis=-1)
# the classification is performed by selecting the maximum value among the responses in all directions
magnitude_maps = np.zeros_like(response_maps)
for i in range(8):
magnitude_maps[:, :, i] = img_gradient * (response_maps_maxvalueidx == i).astype('float')
# line shaping
stroke_maps = np.zeros_like(response_maps)
for i in range(8):
stroke_maps[:, :, i] = signal.convolve2d(magnitude_maps[:, :, i], line_segments[:, :, i], 'same')
stroke_maps = stroke_maps.sum(axis=-1)
stroke_maps = (stroke_maps - stroke_maps.min()) / (stroke_maps.max() - stroke_maps.min())
stroke_maps = (1 - stroke_maps) * self.stroke_width
return stroke_maps
'''pencil tone drawing'''
def __toneGeneration(self, img, mode=None):
height, width = img.shape
# histogram matching
img_hist_match = self.__histogramMatching(img, mode) ** self.color_depth
# get texture
texture = cv2.imread(self.texture_path)
texture = cv2.cvtColor(texture, cv2.COLOR_BGR2GRAY)[99: texture.shape[0]-100, 99: texture.shape[1]-100]
ratio = 0.2 * min(img.shape[0], img.shape[1]) / float(1024)
texture = interpolation.zoom(texture, (ratio, ratio))
texture = im2double(texture)
texture = horizontalStitch(texture, img.shape[1])
texture = verticalStitch(texture, img.shape[0])
size = img.size
nzmax = 2 * (size-1)
i = np.zeros((nzmax, 1))
j = np.zeros((nzmax, 1))
s = np.zeros((nzmax, 1))
for m in range(1, nzmax+1):
i[m-1] = int(math.ceil((m + 0.1) / 2)) - 1
j[m-1] = int(math.ceil((m - 0.1) / 2)) - 1
s[m-1] = -2 * (m % 2) + 1
dx = csr_matrix((s.T[0], (i.T[0], j.T[0])), shape=(size, size))
nzmax = 2 * (size - img.shape[1])
i = np.zeros((nzmax, 1))
j = np.zeros((nzmax, 1))
s = np.zeros((nzmax, 1))
for m in range(1, nzmax+1):
i[m-1, :] = int(math.ceil((m - 1 + 0.1) / 2) + img.shape[1] * (m % 2)) - 1
j[m-1, :] = math.ceil((m - 0.1) / 2) - 1
s[m-1, :] = -2 * (m % 2) + 1
dy = csr_matrix((s.T[0], (i.T[0], j.T[0])), shape=(size, size))
texture_sparse = spdiags(np.log(np.reshape(texture.T, (1, texture.size), order="f") + 0.01), 0, size, size)
img_hist_match1d = np.log(np.reshape(img_hist_match.T, (1, img_hist_match.size), order="f").T + 0.01)
nat = texture_sparse.T.dot(img_hist_match1d)
a = np.dot(texture_sparse.T, texture_sparse)
b = dx.T.dot(dx)
c = dy.T.dot(dy)
mat = a + 0.2 * (b + c)
beta1d = spsolve(mat, nat)
beta = np.reshape(beta1d, (img.shape[0], img.shape[1]), order="c")
tone = texture ** beta
tone = (tone - tone.min()) / (tone.max() - tone.min())
return tone
'''histogram matching'''
def __histogramMatching(self, img, mode=None):
weights = self.weights_color if mode == 'color' else self.weights_gray
# img
histogram_img = cv2.calcHist([img], [0], None, [256], [0, 256])
histogram_img.resize(histogram_img.size)
histogram_img /= histogram_img.sum()
histogram_img_cdf = np.cumsum(histogram_img)
# natural
histogram_natural = np.zeros_like(histogram_img)
for x in range(256):
histogram_natural[x] = weights[0] * Laplace(x) + weights[1] * Uniform(x) + weights[2] * Gaussian(x)
histogram_natural /= histogram_natural.sum()
histogram_natural_cdf = np.cumsum(histogram_natural)
# do the histogram matching
img_hist_match = np.zeros_like(img)
for x in range(img.shape[0]):
for y in range(img.shape[1]):
value = histogram_img_cdf[img[x, y]]
img_hist_match[x, y] = (np.abs(histogram_natural_cdf-value)).argmin()
img_hist_match = np.true_divide(img_hist_match, 255)
return img_hist_match
|
[
"numpy.abs",
"numpy.rot90",
"numpy.zeros_like",
"numpy.true_divide",
"scipy.signal.convolve2d",
"cv2.cvtColor",
"cv2.imwrite",
"numpy.power",
"scipy.ndimage.interpolation.zoom",
"numpy.cumsum",
"numpy.reshape",
"scipy.sparse.linalg.spsolve",
"math.ceil",
"cv2.calcHist",
"scipy.sparse.csr_matrix",
"numpy.dot",
"warnings.filterwarnings",
"math.tan",
"numpy.zeros",
"PIL.Image.open",
"cv2.imread",
"PIL.Image.fromarray"
] |
[((338, 371), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (361, 371), False, 'import warnings\n'), ((850, 872), 'cv2.imread', 'cv2.imread', (['image_path'], {}), '(image_path)\n', (860, 872), False, 'import cv2\n'), ((2535, 2574), 'numpy.zeros', 'np.zeros', (['(kernel_size, kernel_size, 8)'], {}), '((kernel_size, kernel_size, 8))\n', (2543, 2574), True, 'import numpy as np\n'), ((3032, 3051), 'numpy.zeros', 'np.zeros', (['(h, w, 8)'], {}), '((h, w, 8))\n', (3040, 3051), True, 'import numpy as np\n'), ((3349, 3377), 'numpy.zeros_like', 'np.zeros_like', (['response_maps'], {}), '(response_maps)\n', (3362, 3377), True, 'import numpy as np\n'), ((3525, 3553), 'numpy.zeros_like', 'np.zeros_like', (['response_maps'], {}), '(response_maps)\n', (3538, 3553), True, 'import numpy as np\n'), ((4110, 4139), 'cv2.imread', 'cv2.imread', (['self.texture_path'], {}), '(self.texture_path)\n', (4120, 4139), False, 'import cv2\n'), ((4320, 4363), 'scipy.ndimage.interpolation.zoom', 'interpolation.zoom', (['texture', '(ratio, ratio)'], {}), '(texture, (ratio, ratio))\n', (4338, 4363), False, 'from scipy.ndimage import interpolation\n'), ((4544, 4564), 'numpy.zeros', 'np.zeros', (['(nzmax, 1)'], {}), '((nzmax, 1))\n', (4552, 4564), True, 'import numpy as np\n'), ((4571, 4591), 'numpy.zeros', 'np.zeros', (['(nzmax, 1)'], {}), '((nzmax, 1))\n', (4579, 4591), True, 'import numpy as np\n'), ((4598, 4618), 'numpy.zeros', 'np.zeros', (['(nzmax, 1)'], {}), '((nzmax, 1))\n', (4606, 4618), True, 'import numpy as np\n'), ((4777, 4835), 'scipy.sparse.csr_matrix', 'csr_matrix', (['(s.T[0], (i.T[0], j.T[0]))'], {'shape': '(size, size)'}), '((s.T[0], (i.T[0], j.T[0])), shape=(size, size))\n', (4787, 4835), False, 'from scipy.sparse import csr_matrix, spdiags\n'), ((4878, 4898), 'numpy.zeros', 'np.zeros', (['(nzmax, 1)'], {}), '((nzmax, 1))\n', (4886, 4898), True, 'import numpy as np\n'), ((4905, 4925), 'numpy.zeros', 'np.zeros', (['(nzmax, 1)'], {}), '((nzmax, 1))\n', (4913, 4925), True, 'import numpy as np\n'), ((4932, 4952), 'numpy.zeros', 'np.zeros', (['(nzmax, 1)'], {}), '((nzmax, 1))\n', (4940, 4952), True, 'import numpy as np\n'), ((5144, 5202), 'scipy.sparse.csr_matrix', 'csr_matrix', (['(s.T[0], (i.T[0], j.T[0]))'], {'shape': '(size, size)'}), '((s.T[0], (i.T[0], j.T[0])), shape=(size, size))\n', (5154, 5202), False, 'from scipy.sparse import csr_matrix, spdiags\n'), ((5470, 5510), 'numpy.dot', 'np.dot', (['texture_sparse.T', 'texture_sparse'], {}), '(texture_sparse.T, texture_sparse)\n', (5476, 5510), True, 'import numpy as np\n'), ((5586, 5603), 'scipy.sparse.linalg.spsolve', 'spsolve', (['mat', 'nat'], {}), '(mat, nat)\n', (5593, 5603), False, 'from scipy.sparse.linalg import spsolve\n'), ((5613, 5672), 'numpy.reshape', 'np.reshape', (['beta1d', '(img.shape[0], img.shape[1])'], {'order': '"""c"""'}), "(beta1d, (img.shape[0], img.shape[1]), order='c')\n", (5623, 5672), True, 'import numpy as np\n'), ((5942, 5989), 'cv2.calcHist', 'cv2.calcHist', (['[img]', '[0]', 'None', '[256]', '[0, 256]'], {}), '([img], [0], None, [256], [0, 256])\n', (5954, 5989), False, 'import cv2\n'), ((6094, 6118), 'numpy.cumsum', 'np.cumsum', (['histogram_img'], {}), '(histogram_img)\n', (6103, 6118), True, 'import numpy as np\n'), ((6153, 6181), 'numpy.zeros_like', 'np.zeros_like', (['histogram_img'], {}), '(histogram_img)\n', (6166, 6181), True, 'import numpy as np\n'), ((6381, 6409), 'numpy.cumsum', 'np.cumsum', (['histogram_natural'], {}), '(histogram_natural)\n', (6390, 6409), True, 'import numpy as np\n'), ((6459, 6477), 'numpy.zeros_like', 'np.zeros_like', (['img'], {}), '(img)\n', (6472, 6477), True, 'import numpy as np\n'), ((6677, 6712), 'numpy.true_divide', 'np.true_divide', (['img_hist_match', '(255)'], {}), '(img_hist_match, 255)\n', (6691, 6712), True, 'import numpy as np\n'), ((1266, 1288), 'PIL.Image.open', 'Image.open', (['image_path'], {}), '(image_path)\n', (1276, 1288), False, 'from PIL import Image\n'), ((1578, 1621), 'cv2.cvtColor', 'cv2.cvtColor', (['img_out', 'cv2.COLOR_YCR_CB2BGR'], {}), '(img_out, cv2.COLOR_YCR_CB2BGR)\n', (1590, 1621), False, 'import cv2\n'), ((1635, 1659), 'PIL.Image.fromarray', 'Image.fromarray', (['img_out'], {}), '(img_out)\n', (1650, 1659), False, 'from PIL import Image\n'), ((3101, 3164), 'scipy.signal.convolve2d', 'signal.convolve2d', (['img_gradient', 'line_segments[:, :, i]', '"""same"""'], {}), "(img_gradient, line_segments[:, :, i], 'same')\n", (3118, 3164), False, 'from scipy import signal\n'), ((3601, 3675), 'scipy.signal.convolve2d', 'signal.convolve2d', (['magnitude_maps[:, :, i]', 'line_segments[:, :, i]', '"""same"""'], {}), "(magnitude_maps[:, :, i], line_segments[:, :, i], 'same')\n", (3618, 3675), False, 'from scipy import signal\n'), ((4152, 4193), 'cv2.cvtColor', 'cv2.cvtColor', (['texture', 'cv2.COLOR_BGR2GRAY'], {}), '(texture, cv2.COLOR_BGR2GRAY)\n', (4164, 4193), False, 'import cv2\n'), ((1718, 1755), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\n', (1730, 1755), False, 'import cv2\n'), ((1876, 1906), 'cv2.imwrite', 'cv2.imwrite', (['savename', 'img_out'], {}), '(savename, img_out)\n', (1887, 1906), False, 'import cv2\n'), ((2258, 2305), 'numpy.abs', 'np.abs', (['(img_double[:, 0:-1] - img_double[:, 1:])'], {}), '(img_double[:, 0:-1] - img_double[:, 1:])\n', (2264, 2305), True, 'import numpy as np\n'), ((2305, 2321), 'numpy.zeros', 'np.zeros', (['(h, 1)'], {}), '((h, 1))\n', (2313, 2321), True, 'import numpy as np\n'), ((2350, 2397), 'numpy.abs', 'np.abs', (['(img_double[0:-1, :] - img_double[1:, :])'], {}), '(img_double[0:-1, :] - img_double[1:, :])\n', (2356, 2397), True, 'import numpy as np\n'), ((2397, 2413), 'numpy.zeros', 'np.zeros', (['(1, w)'], {}), '((1, w))\n', (2405, 2413), True, 'import numpy as np\n'), ((2444, 2459), 'numpy.power', 'np.power', (['dx', '(2)'], {}), '(dx, 2)\n', (2452, 2459), True, 'import numpy as np\n'), ((2462, 2477), 'numpy.power', 'np.power', (['dy', '(2)'], {}), '(dy, 2)\n', (2470, 2477), True, 'import numpy as np\n'), ((5076, 5100), 'math.ceil', 'math.ceil', (['((m - 0.1) / 2)'], {}), '((m - 0.1) / 2)\n', (5085, 5100), False, 'import math\n'), ((2848, 2884), 'numpy.rot90', 'np.rot90', (['line_segments[:, :, 7]', '(-1)'], {}), '(line_segments[:, :, 7], -1)\n', (2856, 2884), True, 'import numpy as np\n'), ((2927, 2962), 'numpy.rot90', 'np.rot90', (['line_segments[:, :, i]', '(1)'], {}), '(line_segments[:, :, i], 1)\n', (2935, 2962), True, 'import numpy as np\n'), ((4665, 4689), 'math.ceil', 'math.ceil', (['((m + 0.1) / 2)'], {}), '((m + 0.1) / 2)\n', (4674, 4689), False, 'import math\n'), ((4711, 4735), 'math.ceil', 'math.ceil', (['((m - 0.1) / 2)'], {}), '((m - 0.1) / 2)\n', (4720, 4735), False, 'import math\n'), ((5237, 5288), 'numpy.reshape', 'np.reshape', (['texture.T', '(1, texture.size)'], {'order': '"""f"""'}), "(texture.T, (1, texture.size), order='f')\n", (5247, 5288), True, 'import numpy as np\n'), ((5341, 5406), 'numpy.reshape', 'np.reshape', (['img_hist_match.T', '(1, img_hist_match.size)'], {'order': '"""f"""'}), "(img_hist_match.T, (1, img_hist_match.size), order='f')\n", (5351, 5406), True, 'import numpy as np\n'), ((2674, 2699), 'math.tan', 'math.tan', (['(math.pi / 8 * i)'], {}), '(math.pi / 8 * i)\n', (2682, 2699), False, 'import math\n'), ((5002, 5030), 'math.ceil', 'math.ceil', (['((m - 1 + 0.1) / 2)'], {}), '((m - 1 + 0.1) / 2)\n', (5011, 5030), False, 'import math\n'), ((6612, 6649), 'numpy.abs', 'np.abs', (['(histogram_natural_cdf - value)'], {}), '(histogram_natural_cdf - value)\n', (6618, 6649), True, 'import numpy as np\n')]
|
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('_nested_admin/', include('nested_admin.urls')),
path('', include('home.urls')),
path('blog/', include('blog.urls')),
path('quiz/', include('quiz.urls')),
]
if settings.DEBUG ==True:
urlpatterns+= static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
|
[
"django.conf.urls.static.static",
"django.urls.path",
"django.urls.include"
] |
[((168, 199), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (172, 199), False, 'from django.urls import path, include\n'), ((424, 485), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (430, 485), False, 'from django.conf.urls.static import static\n'), ((228, 256), 'django.urls.include', 'include', (['"""nested_admin.urls"""'], {}), "('nested_admin.urls')\n", (235, 256), False, 'from django.urls import path, include\n'), ((272, 292), 'django.urls.include', 'include', (['"""home.urls"""'], {}), "('home.urls')\n", (279, 292), False, 'from django.urls import path, include\n'), ((313, 333), 'django.urls.include', 'include', (['"""blog.urls"""'], {}), "('blog.urls')\n", (320, 333), False, 'from django.urls import path, include\n'), ((354, 374), 'django.urls.include', 'include', (['"""quiz.urls"""'], {}), "('quiz.urls')\n", (361, 374), False, 'from django.urls import path, include\n')]
|
import os
from tt_web import utils
from tt_web.tests import helpers as web_helpers
from .. import service
from .. import operations
class BaseTests(web_helpers.BaseTests):
def create_application(self):
return service.create_application(get_config())
async def clean_environment(self, app=None):
await operations.clean_database()
def get_config():
config_path = os.path.join(os.path.dirname(__file__), 'fixtures', 'config.json')
return utils.load_config(config_path)
|
[
"os.path.dirname",
"tt_web.utils.load_config"
] |
[((475, 505), 'tt_web.utils.load_config', 'utils.load_config', (['config_path'], {}), '(config_path)\n', (492, 505), False, 'from tt_web import utils\n'), ((410, 435), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (425, 435), False, 'import os\n')]
|
"""Uses fedelemflowlist analysis functions to perform and export basic analysis."""
import fedelemflowlist
from fedelemflowlist.analysis.flow_list_analysis import count_flows_by_class,\
count_flowables_by_class, list_contexts
from fedelemflowlist.globals import outputpath
if __name__ == '__main__':
flowlist = fedelemflowlist.get_flows()
preferred_flows = flowlist[flowlist['Preferred'] == 1]
all_flows_counts = count_flows_by_class(flowlist)
all_flows_counts.to_csv(outputpath + 'all_flows_counts.csv', index=False)
flowable_counts = count_flowables_by_class(flowlist)
flowable_counts.to_csv(outputpath + 'flowable_counts.csv', index=False)
contexts = list_contexts(flowlist)
contexts.to_csv(outputpath + 'all_contexts.csv', index=False)
preferred_contexts = list_contexts(preferred_flows)
preferred_contexts.to_csv(outputpath + 'preferred_contexts.csv', index=False)
|
[
"fedelemflowlist.get_flows",
"fedelemflowlist.analysis.flow_list_analysis.count_flows_by_class",
"fedelemflowlist.analysis.flow_list_analysis.list_contexts",
"fedelemflowlist.analysis.flow_list_analysis.count_flowables_by_class"
] |
[((320, 347), 'fedelemflowlist.get_flows', 'fedelemflowlist.get_flows', ([], {}), '()\n', (345, 347), False, 'import fedelemflowlist\n'), ((431, 461), 'fedelemflowlist.analysis.flow_list_analysis.count_flows_by_class', 'count_flows_by_class', (['flowlist'], {}), '(flowlist)\n', (451, 461), False, 'from fedelemflowlist.analysis.flow_list_analysis import count_flows_by_class, count_flowables_by_class, list_contexts\n'), ((563, 597), 'fedelemflowlist.analysis.flow_list_analysis.count_flowables_by_class', 'count_flowables_by_class', (['flowlist'], {}), '(flowlist)\n', (587, 597), False, 'from fedelemflowlist.analysis.flow_list_analysis import count_flows_by_class, count_flowables_by_class, list_contexts\n'), ((690, 713), 'fedelemflowlist.analysis.flow_list_analysis.list_contexts', 'list_contexts', (['flowlist'], {}), '(flowlist)\n', (703, 713), False, 'from fedelemflowlist.analysis.flow_list_analysis import count_flows_by_class, count_flowables_by_class, list_contexts\n'), ((806, 836), 'fedelemflowlist.analysis.flow_list_analysis.list_contexts', 'list_contexts', (['preferred_flows'], {}), '(preferred_flows)\n', (819, 836), False, 'from fedelemflowlist.analysis.flow_list_analysis import count_flows_by_class, count_flowables_by_class, list_contexts\n')]
|
import pandas as pd
df5 = pd.read_csv('D:/data/final_data_5.csv')
df6 = pd.read_csv('D:/data/final_data_6.csv')
df7 = pd.read_csv('D:/data/final_data_7.csv')
df8 = pd.read_csv('D:/data/final_data_8.csv')
df9 = pd.read_csv('D:/data/final_data_9.csv')
df10 = pd.read_csv('D:/data/final_data_10.csv')
dict = {}
for key in df10['LOST'].tolist():
dict[key] = dict.get(key, 0) + 1
print('A5服务器总流失率')
print("%.2f%%" %(dict[1]/(dict[0]+dict[1])))
# print('------------')
# ACT_lists=['isparty', 'isXMSL','isLYQ','isKTT','isXMHJ','isSYZC','isPTY','isFBJL','isFBRH','dyLMZ', 'dyXMSL', 'dyLYQ', 'dyKTT', 'dyXMHJ', 'dySYZC', 'dyPTY', 'dyFBJL', 'dyFBRH', 'dybanggong','dyfee', 'isfee', 'dyForge_time', 'dyrate', 'dykilltimes','dykilledtimes']
# for ACT in ACT_lists:
# dict = {}
# for key in df5[df5[ACT] == 0]['LOST'].tolist():
# dict[key] = dict.get(key, 0) + 1
# if dict[1] == dict[0] + dict[1]:
# print('所有人都参与了该活动')
# else:
# print('未玩过', ACT, '该活动流失率')
# print("%.2f%%" %(dict[1]/(dict[0]+dict[1])))
#
# dict = {}
# for key in df5[df5[ACT] == 1]['LOST'].tolist():
# dict[key] = dict.get(key, 0) + 1
# if dict[1] == dict[0] + dict[1]:
# print('所有人都参与了该活动')
# else:
# print('玩过',ACT,'该活动流失率')
# print("%.2f%%" %(dict[1]/(dict[0]+dict[1])))
# print('------------')
#
# print('------------')
# ACT_lists=['isparty', 'dyLMZ', 'dyXMSL', 'isXMSL', 'dyLYQ','isLYQ', 'dyKTT', 'isKTT', 'dyXMHJ', 'isXMHJ', 'dySYZC', 'isSYZC','dyPTY', 'isPTY', 'dyFBJL', 'isFBJL', 'dyFBRH', 'isFBRH', 'dybanggong','dyfee', 'isfee', 'dyForge_time', 'dyrate', 'dykilltimes','dykilledtimes']
# for ACT in ACT_lists:
# dict = {}
# for key in df6[df6[ACT] == 0]['LOST'].tolist():
# dict[key] = dict.get(key, 0) + 1
# if dict[1] == dict[0] + dict[1]:
# print('所有人都参与了该活动')
# else:
# print('未玩过', ACT, '该活动流失率')
# print(dict[1]/(dict[0]+dict[1]))
#
# dict = {}
# for key in df6[df6[ACT] == 1]['LOST'].tolist():
# dict[key] = dict.get(key, 0) + 1
# if dict[1] == dict[0] + dict[1]:
# print('所有人都参与了该活动')
# else:
# print('玩过',ACT,'该活动流失率')
# print(dict[1]/(dict[0]+dict[1]))
# print('------------')
print('------------')
ACT_lists=['isparty', 'isXMSL','isLYQ', 'isKTT','isXMHJ','isSYZC','isPTY','isFBJL','isFBRH','dyLMZ', 'dyXMSL', 'dyLYQ', 'dyKTT', 'dyXMHJ', 'dySYZC', 'dyPTY', 'dyFBJL', 'dyFBRH', 'dybanggong','dyfee', 'isfee', 'dyForge_time', 'dyrate', 'dykilltimes','dykilledtimes']
for ACT in ACT_lists:
dict = {}
for key in df5[df5[ACT] == 0]['LOST'].tolist():
dict[key] = dict.get(key, 0) + 1
if dict[1] == dict[0] + dict[1]:
print('所有人都参与了该活动')
else:
print('未玩过', ACT, '该活动流失率')
print("%.2f%%" %(dict[1]/(dict[0]+dict[1])))
dict = {}
for key in df5[df5[ACT] == 1]['LOST'].tolist():
dict[key] = dict.get(key, 0) + 1
if dict[1] == dict[0] + dict[1]:
print('所有人都参与了该活动')
else:
print('玩过',ACT,'该活动流失率')
print("%.2f%%" %(dict[1]/(dict[0]+dict[1])))
print('------------')
|
[
"pandas.read_csv"
] |
[((27, 66), 'pandas.read_csv', 'pd.read_csv', (['"""D:/data/final_data_5.csv"""'], {}), "('D:/data/final_data_5.csv')\n", (38, 66), True, 'import pandas as pd\n'), ((74, 113), 'pandas.read_csv', 'pd.read_csv', (['"""D:/data/final_data_6.csv"""'], {}), "('D:/data/final_data_6.csv')\n", (85, 113), True, 'import pandas as pd\n'), ((121, 160), 'pandas.read_csv', 'pd.read_csv', (['"""D:/data/final_data_7.csv"""'], {}), "('D:/data/final_data_7.csv')\n", (132, 160), True, 'import pandas as pd\n'), ((168, 207), 'pandas.read_csv', 'pd.read_csv', (['"""D:/data/final_data_8.csv"""'], {}), "('D:/data/final_data_8.csv')\n", (179, 207), True, 'import pandas as pd\n'), ((215, 254), 'pandas.read_csv', 'pd.read_csv', (['"""D:/data/final_data_9.csv"""'], {}), "('D:/data/final_data_9.csv')\n", (226, 254), True, 'import pandas as pd\n'), ((263, 303), 'pandas.read_csv', 'pd.read_csv', (['"""D:/data/final_data_10.csv"""'], {}), "('D:/data/final_data_10.csv')\n", (274, 303), True, 'import pandas as pd\n')]
|
import os
import glob
import re
import audiomate
from audiomate.corpus import assets
from audiomate.corpus import subset
from . import base
LABEL_PATTERN = re.compile(r'(.*)_\d')
class AEDReader(base.CorpusReader):
"""
Reader for the Acoustic Event Dataset.
.. seealso::
`AED <https://data.vision.ee.ethz.ch/cvl/ae_dataset/>`_
Download page
"""
@classmethod
def type(cls):
return 'aed'
def _check_for_missing_files(self, path):
return []
def _load(self, path):
corpus = audiomate.Corpus(path=path)
test_folder = os.path.join(path, 'test')
train_folder = os.path.join(path, 'train')
test_utterance_ids = AEDReader.load_folder(test_folder, corpus)
train_utterance_ids = AEDReader.load_folder(train_folder, corpus)
test_filter = subset.MatchingUtteranceIdxFilter(utterance_idxs=test_utterance_ids)
train_filter = subset.MatchingUtteranceIdxFilter(utterance_idxs=train_utterance_ids)
test_subset = subset.Subview(corpus, filter_criteria=[test_filter])
train_subset = subset.Subview(corpus, filter_criteria=[train_filter])
corpus.import_subview('test', test_subset)
corpus.import_subview('train', train_subset)
return corpus
@staticmethod
def load_folder(path, corpus):
utterance_ids = set()
for wav_path in glob.glob(os.path.join(path, '**/*.wav'), recursive=True):
basename = os.path.splitext(os.path.basename(wav_path))[0]
match = LABEL_PATTERN.match(basename)
if match is not None:
label = match.group(1)
corpus.new_file(wav_path, basename)
utt = corpus.new_utterance(basename, basename)
utt.set_label_list(assets.LabelList.create_single(label, audiomate.corpus.LL_SOUND_CLASS))
utterance_ids.add(basename)
return utterance_ids
|
[
"audiomate.corpus.subset.MatchingUtteranceIdxFilter",
"audiomate.corpus.subset.Subview",
"audiomate.corpus.assets.LabelList.create_single",
"os.path.basename",
"audiomate.Corpus",
"os.path.join",
"re.compile"
] |
[((158, 180), 're.compile', 're.compile', (['"""(.*)_\\\\d"""'], {}), "('(.*)_\\\\d')\n", (168, 180), False, 'import re\n'), ((552, 579), 'audiomate.Corpus', 'audiomate.Corpus', ([], {'path': 'path'}), '(path=path)\n', (568, 579), False, 'import audiomate\n'), ((603, 629), 'os.path.join', 'os.path.join', (['path', '"""test"""'], {}), "(path, 'test')\n", (615, 629), False, 'import os\n'), ((653, 680), 'os.path.join', 'os.path.join', (['path', '"""train"""'], {}), "(path, 'train')\n", (665, 680), False, 'import os\n'), ((851, 919), 'audiomate.corpus.subset.MatchingUtteranceIdxFilter', 'subset.MatchingUtteranceIdxFilter', ([], {'utterance_idxs': 'test_utterance_ids'}), '(utterance_idxs=test_utterance_ids)\n', (884, 919), False, 'from audiomate.corpus import subset\n'), ((943, 1012), 'audiomate.corpus.subset.MatchingUtteranceIdxFilter', 'subset.MatchingUtteranceIdxFilter', ([], {'utterance_idxs': 'train_utterance_ids'}), '(utterance_idxs=train_utterance_ids)\n', (976, 1012), False, 'from audiomate.corpus import subset\n'), ((1036, 1089), 'audiomate.corpus.subset.Subview', 'subset.Subview', (['corpus'], {'filter_criteria': '[test_filter]'}), '(corpus, filter_criteria=[test_filter])\n', (1050, 1089), False, 'from audiomate.corpus import subset\n'), ((1113, 1167), 'audiomate.corpus.subset.Subview', 'subset.Subview', (['corpus'], {'filter_criteria': '[train_filter]'}), '(corpus, filter_criteria=[train_filter])\n', (1127, 1167), False, 'from audiomate.corpus import subset\n'), ((1415, 1445), 'os.path.join', 'os.path.join', (['path', '"""**/*.wav"""'], {}), "(path, '**/*.wav')\n", (1427, 1445), False, 'import os\n'), ((1504, 1530), 'os.path.basename', 'os.path.basename', (['wav_path'], {}), '(wav_path)\n', (1520, 1530), False, 'import os\n'), ((1811, 1881), 'audiomate.corpus.assets.LabelList.create_single', 'assets.LabelList.create_single', (['label', 'audiomate.corpus.LL_SOUND_CLASS'], {}), '(label, audiomate.corpus.LL_SOUND_CLASS)\n', (1841, 1881), False, 'from audiomate.corpus import assets\n')]
|
import time
import sys
import os
hpcAccount = 'your_hpc_account_here'
## The following lines indicate the order of the command line arguments that need to be supplied to this script.
# Check if system arguments were provided
if len(sys.argv) > 1:
inDir = sys.argv[1] # Input directory in which to search for parameter rasters
taufdr = sys.argv[2] # Flow direction grid in tauDEM format
taufac = sys.argv[3] # Flow accumulation grid in tauDEM format
workDir = sys.argv[4] # Working directory to save intermediate files
outDir = sys.argv[5] # Output directory to save CPGs
logDir = sys.argv[6] # Directory to save slurm log files
cores = sys.argv[7] # Number of cores to use for each slurm job
accumThresh = sys.argv[8] # Number of cells in flow accumulation grid below which CPG will be set to no data
overwrite = sys.argv[9] # Whether to overwrite existing CPGs
deleteTemp = sys.argv[10] # Whether to delete temporary files
email = sys.argv[11] # Email address to send updates to
else:
print('No arguments provided.')
sys.exit(1)
covList = [] #Initialize list of parameter grids
if os.path.isdir(inDir):
#Get all parameter grid files in directory
for path, subdirs, files in os.walk(inDir):
for name in files:
#Check if file is .tif, and if so add it to parameter list
if os.path.splitext(name)[1] == ".tif":
covList.append(os.path.join(path, name))
elif os.path.isfile(inDir):
#Supplied path is a single parameter grid file
covList.append(inDir)
else:
print("Invalid parameter grid directory")
print("The following parameter grids were located:")
print(*covList, sep='\n')
for cov in covList: #Iterate through the parameter grids
covname = os.path.splitext(os.path.basename(cov))[0] #Get the name of the parameter
#Create batch job which runs python script
jobfile = os.path.join(workDir, "{0}.slurm".format(str(covname))) # Create path to slurm job file
with open(jobfile, 'w+') as f:
#Write slurm job details
f.writelines("#!/bin/bash\n")
f.writelines("#SBATCH --job-name={0}\n".format(covname)) # set the name of the job
f.writelines("#SBATCH -c 1\n") # cpus per task
f.writelines("#SBATCH -n {0}\n".format(cores)) # number of tasks
f.writelines("#SBATCH --tasks-per-node=20\n") # Set number of tasks per node
f.writelines("#SBATCH -o {0}/slurm-%A.out\n".format(logDir)) # Set log file name
f.writelines("#SBATCH -p normal\n") # the partition you want to use, for this case prod is best
f.writelines("#SBATCH --account={0}\n".format(hpcAccount)) # your account
f.writelines("#SBATCH --time=01:00:00\n") # Overestimated guess at time
f.writelines("#SBATCH --mem=128000\n") #memory in MB
f.writelines("#SBATCH --mail-type=ALL\n") # Send email only for all events
f.writelines("#SBATCH --mail-user={0}\n".format(email))
f.writelines("#SBATCH --exclusive\n") # Require exclusive use of nodes
#Set up python environment for job
f.writelines("module load taudem/5.3.8\n") # load TauDEM
f.writelines("source activate fcpgtools\n") # activate the correct Python environment, you will need to build this using Anaconda.
#Run the Python script
f.writelines("python -u ./makeFCPG.py {0} {1} {2} {3} {4} {5} {6} {7} {8}\n".format(cov, taufdr, taufac, workDir, outDir, cores, accumThresh, overwrite, deleteTemp))
print("Launching batch job for: " + str(covname))
os.system("sbatch {0}".format(jobfile)) #Send command to console
time.sleep(5) #Wait between submitting jobs
|
[
"os.path.basename",
"os.path.isdir",
"os.walk",
"time.sleep",
"os.path.isfile",
"os.path.splitext",
"os.path.join",
"sys.exit"
] |
[((1139, 1159), 'os.path.isdir', 'os.path.isdir', (['inDir'], {}), '(inDir)\n', (1152, 1159), False, 'import os\n'), ((1073, 1084), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1081, 1084), False, 'import sys\n'), ((1240, 1254), 'os.walk', 'os.walk', (['inDir'], {}), '(inDir)\n', (1247, 1254), False, 'import os\n'), ((1472, 1493), 'os.path.isfile', 'os.path.isfile', (['inDir'], {}), '(inDir)\n', (1486, 1493), False, 'import os\n'), ((3656, 3669), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (3666, 3669), False, 'import time\n'), ((1794, 1815), 'os.path.basename', 'os.path.basename', (['cov'], {}), '(cov)\n', (1810, 1815), False, 'import os\n'), ((1369, 1391), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (1385, 1391), False, 'import os\n'), ((1441, 1465), 'os.path.join', 'os.path.join', (['path', 'name'], {}), '(path, name)\n', (1453, 1465), False, 'import os\n')]
|
from django.contrib import admin
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from v1.shop.urls import router as shop_router
urlpatterns = [
path('admin/', admin.site.urls),
path('auth/', include('djoser.urls')),
path('auth/', include('djoser.urls.jwt')),
]
router = DefaultRouter(trailing_slash=False)
router.registry.extend(shop_router.registry)
urlpatterns += router.urls
#urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
#urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
[
"django.urls.path",
"rest_framework.routers.DefaultRouter",
"django.urls.include"
] |
[((325, 360), 'rest_framework.routers.DefaultRouter', 'DefaultRouter', ([], {'trailing_slash': '(False)'}), '(trailing_slash=False)\n', (338, 360), False, 'from rest_framework.routers import DefaultRouter\n'), ((190, 221), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (194, 221), False, 'from django.urls import path, include\n'), ((241, 263), 'django.urls.include', 'include', (['"""djoser.urls"""'], {}), "('djoser.urls')\n", (248, 263), False, 'from django.urls import path, include\n'), ((284, 310), 'django.urls.include', 'include', (['"""djoser.urls.jwt"""'], {}), "('djoser.urls.jwt')\n", (291, 310), False, 'from django.urls import path, include\n')]
|
import abc
import inspect
import itertools
import math
import os
import platform
import statistics
from dataclasses import dataclass, field
from enum import Enum
from pathlib import Path
from textwrap import dedent
from typing import (
Collection,
Dict,
Generator,
Iterable,
Iterator,
List,
Optional,
Tuple,
Type,
)
from rich.console import (
Console,
ConsoleOptions,
RenderableType,
RenderGroup,
RenderResult,
)
from rich.highlighter import NullHighlighter
from rich.live import Live
from rich.markdown import Markdown
from rich.padding import Padding
from rich.panel import Panel
from rich.pretty import Pretty
from rich.progress import (
BarColumn,
Progress,
RenderableColumn,
SpinnerColumn,
TimeElapsedColumn,
)
from rich.rule import Rule
from rich.syntax import Syntax
from rich.table import Table
from rich.text import Text
from rich.theme import Theme
from rich.traceback import Traceback
from rich.tree import Tree
from ward._diff import Diff
from ward._fixtures import FixtureHierarchyMapping, fixture_parents_and_children
from ward._suite import Suite
from ward._utilities import group_by
from ward._ward_version import __version__
from ward.expect import Comparison, TestFailure
from ward.fixtures import Fixture
from ward.models import ExitCode, Scope
from ward.testing import Test, TestOutcome, TestResult, fixtures_used_directly_by_tests
HORIZONTAL_PAD = (0, 1, 0, 1)
INDENT = " " * 2
theme = Theme(
{
"title": "bold",
"heading": "bold",
"pass": "#ffffff on #137C39",
"pass.textonly": "#189F4A",
"fail": "#ffffff on #BF2D2D",
"fail.textonly": "#BF2D2D",
"fail.header": "bold #BF2D2D",
"skip": "#ffffff on #0E67B3",
"skip.textonly": "#1381E0",
"xpass": "#162740 on #F4C041",
"xpass.textonly": "#F4C041",
"xfail": "#ffffff on #695CC8",
"xfail.textonly": "#695CC8",
"muted": "dim",
"info": "yellow italic",
"dryrun": "#ffffff on #162740",
"rule.line": "#189F4A",
"fixture.name": "bold #1381E0",
"fixture.scope.test": "bold #189F4A",
"fixture.scope.module": "bold #F4C041",
"fixture.scope.global": "bold #EA913C",
"usedby": "#9285F6",
}
)
rich_console = Console(theme=theme, highlighter=NullHighlighter())
def format_test_id(test_result: TestResult) -> str:
"""
Format module name, line number, and test case number
"""
return f"{format_test_location(test_result.test)}{format_test_case_number(test_result.test)}"
def format_test_location(test: Test) -> str:
"""
Returns the location of a test as a string of the form '{test.module_name}:{test.line_number}'
"""
return f"{test.module_name}:{test.line_number}"
def format_test_case_number(test: Test) -> str:
"""
Returns a string of the format '[{current_test_number}/{num_parameterised_instances}]'.
For example, for the 3rd run of a test that is parameterised with 5 parameter sets the
return value is '[3/5]'.
"""
param_meta = test.param_meta
if param_meta.group_size > 1:
pad = len(str(param_meta.group_size))
iter_indicator = (
f"[{param_meta.instance_index + 1:>{pad}}/{param_meta.group_size}]"
)
else:
iter_indicator = ""
return iter_indicator
class TestOutputStyle(str, Enum):
TEST_PER_LINE = "test-per-line"
DOTS_GLOBAL = "dots-global"
DOTS_MODULE = "dots-module"
LIVE = "live"
NONE = "none"
class TestProgressStyle(str, Enum):
INLINE = "inline"
BAR = "bar"
NONE = "none"
def get_test_result_line(
test_result: TestResult,
test_index: int,
num_tests: int,
progress_styles: List[TestProgressStyle],
extra_left_pad: int = 0,
) -> Table:
"""
Outputs a single test result to the terminal in Ward's standard output
format which outputs a single test per line.
"""
outcome_tag = test_result.outcome.name[:4]
test = test_result.test
test_location = format_test_location(test)
test_case_number = format_test_case_number(test)
test_style = outcome_to_style(test_result.outcome)
grid = Table.grid(expand=True)
grid.add_column()
grid.add_column()
grid.add_column()
columns = [
Padding(outcome_tag, style=test_style, pad=(0, 1, 0, 1 + extra_left_pad)),
Padding(f"{test_location}{test_case_number}", style="muted", pad=(0, 1, 0, 1)),
Padding(
Markdown(test.description, inline_code_theme="ansi_dark"), pad=(0, 1, 0, 0)
),
]
# Skip/Xfail tests may have a reason note attached that we'll print
reason = getattr(test.marker, "reason", "")
if reason and test.marker.active:
grid.add_column(justify="center", style=test_style)
columns.append(Padding(reason, pad=(0, 1, 0, 1)))
if TestProgressStyle.INLINE in progress_styles:
grid.add_column(justify="right", style="muted")
columns.append(f"{(test_index + 1) / num_tests:>4.0%}")
grid.add_row(*columns)
return grid
INLINE_PROGRESS_LEN = 5 # e.g. " 93%"
def get_dot(result: TestResult) -> Text:
style = outcome_to_style(result.outcome)
return Text(result.outcome.display_char, style=style, end="")
@dataclass
class TestTimingStatsPanel:
all_tests_in_session: List[TestResult]
num_tests_to_show: int
@property
def _raw_test_durations_secs(self):
return [r.test.timer.duration for r in self.all_tests_in_session]
@property
def _median_secs(self):
return statistics.median(self._raw_test_durations_secs)
@property
def _percentile99_secs(self):
data = self._raw_test_durations_secs
size = len(data)
percentile = 99
return sorted(data)[int(math.ceil((size * percentile) / 100)) - 1]
def __rich_console__(self, c: Console, co: ConsoleOptions) -> RenderResult:
test_results = sorted(
self.all_tests_in_session, key=lambda r: r.test.timer.duration, reverse=True
)
grid = Table.grid(padding=(0, 2, 0, 0))
grid.add_column(justify="right") # Time taken
grid.add_column() # Test ID
grid.add_column() # Test description
for result in test_results[: self.num_tests_to_show]:
time_taken_secs = result.test.timer.duration
time_taken_millis = time_taken_secs * 1000
test_id = format_test_id(result)
description = result.test.description
grid.add_row(
f"[b]{time_taken_millis:.0f}[/b]ms",
Text(test_id, style="muted"),
description,
)
num_slowest_displayed = min(
len(self.all_tests_in_session), self.num_tests_to_show
)
panel = Panel(
RenderGroup(
Padding(
f"Median: [b]{self._median_secs * 1000:.2f}[/b]ms"
f" [muted]|[/muted] "
f"99th Percentile: [b]{self._percentile99_secs * 1000:.2f}[/b]ms",
pad=(0, 0, 1, 0),
),
grid,
),
title=f"[b white]{num_slowest_displayed} Slowest Tests[/b white]",
style="none",
border_style="rule.line",
)
yield panel
@dataclass
class SessionPrelude:
time_to_collect_secs: float
num_tests_collected: int
num_fixtures_collected: int
config_path: Optional[Path]
python_impl: str = field(default=platform.python_implementation())
python_version: str = field(default=platform.python_version())
ward_version: str = field(default=__version__)
def __rich_console__(self, c: Console, co: ConsoleOptions) -> RenderResult:
yield Rule(
Text(
f"Ward {self.ward_version} | {self.python_impl} {self.python_version}",
style="title",
)
)
if self.config_path:
try:
path = self.config_path.relative_to(Path.cwd())
except ValueError:
path = self.config_path.name
yield f"Loaded config from [b]{path}[/b]."
yield (
f"Found [b]{self.num_tests_collected}[/b] tests "
f"and [b]{self.num_fixtures_collected}[/b] fixtures "
f"in [b]{self.time_to_collect_secs:.2f}[/b] seconds."
)
class ResultProcessor(abc.ABC):
@abc.abstractmethod
def handle_result(self, test_result: TestResult):
pass
class TerminalResultProcessor(ResultProcessor):
def __init__(
self,
suite: Suite,
test_output_style: str,
progress_styles: List[TestProgressStyle],
config_path: Optional[Path],
show_diff_symbols: bool = False,
):
self.suite = suite
self.test_output_style = test_output_style
self.progress_styles = progress_styles
self.config_path = config_path
self.show_diff_symbols = show_diff_symbols
def handle_result(self, test_result: TestResult):
# Make the actual output of the result a pluggy hook, so that users can implement their own version
pass
class TestResultDisplayWidget:
def __init__(self, num_tests: int, progress_styles: List[TestProgressStyle]):
self.console = rich_console
self.num_tests = num_tests
self.progress_styles = progress_styles
def footer(self, test_results: List[TestResult]) -> Optional[RenderableType]:
"""
This method should return an object that can be rendered by Rich.
It will be inserted into the "footer" of the test suite result display,
which hugs the bottom of the output as the suite runs.
This method may be called at any time to refresh the state of the footer,
so it should be a pure function.
If this function returns ``None``, it will not cause anything to be
rendered in the footer. You can use this to "hide" the footer based
on state captured during the suite.
"""
pass
def after_test(self, test_index: int, test_result: TestResult) -> None:
"""
This method is called after each test is executed,
with the results of that test and the index of that test in the suite.
Some ways you can use this method:
- Capture state for use in other methods of your widget.
- Print to the terminal using the attached Console (``self.console``).
Anything printed this way will appear above the footer
and will persist after the suite is done.
"""
pass
def after_suite(self, test_results: List[TestResult]) -> None:
"""
This method is called after the suite is done executing
(or is cancelled, or aborts mid-run, etc.),
with results for all of the tests that have been run.
Some ways you can use this method:
- Change the return value of your footer to None to prevent it
from appearing in the final persistent output.
"""
pass
class TestPerLine(TestResultDisplayWidget):
def after_test(self, test_index: int, test_result: TestResult) -> None:
self.console.print(
get_test_result_line(
test_result, test_index, self.num_tests, self.progress_styles
)
)
class DotsDisplayWidget(TestResultDisplayWidget, abc.ABC):
def __init__(self, num_tests: int, progress_styles: List[TestProgressStyle]):
super().__init__(num_tests, progress_styles)
self.base_max_dots_per_line = get_terminal_size().width
if TestProgressStyle.INLINE in progress_styles:
self.base_max_dots_per_line -= INLINE_PROGRESS_LEN
self.dots_on_line = 0
self.footer_text = self.get_blank_footer_text()
def footer(self, test_results: List[TestResult]) -> Optional[RenderableType]:
return self.footer_text
def get_blank_footer_text(self) -> Text:
return Text("", end="")
@property
@abc.abstractmethod
def max_dots_for_current_line(self) -> int:
raise NotImplementedError()
def end_of_line(self, test_index):
self.footer_text.append(self.get_end_of_line_for_dots(test_index=test_index))
self.console.print(self.footer_text, end="")
self.dots_on_line = 0
self.footer_text = self.get_blank_footer_text()
def get_end_of_line_for_dots(
self,
test_index: int,
) -> Text:
if TestProgressStyle.INLINE in self.progress_styles and self.num_tests > 0:
fill = (
self.max_dots_for_current_line - self.dots_on_line + INLINE_PROGRESS_LEN
)
return Text(
f"{(test_index + 1) / self.num_tests:>{fill}.0%}\n",
style="muted",
)
else:
return Text("\n")
def after_suite(self, test_results: List[TestResult]) -> None:
self.end_of_line(test_index=len(test_results) - 1)
class DotsGlobal(DotsDisplayWidget):
@property
def max_dots_for_current_line(self) -> int:
return self.base_max_dots_per_line
def after_test(self, test_index: int, test_result: TestResult) -> None:
self.footer_text.append(get_dot(test_result))
self.dots_on_line += 1
if self.dots_on_line == self.max_dots_for_current_line:
self.end_of_line(test_index)
class DotsPerModule(DotsDisplayWidget):
def __init__(self, num_tests: int, progress_styles: List[TestProgressStyle]):
super().__init__(num_tests, progress_styles)
self.current_path = Path("")
self.cwd = Path.cwd()
self._max_dots_for_current_line = self.base_max_dots_per_line
@property
def max_dots_for_current_line(self) -> int:
return self._max_dots_for_current_line
def after_test(self, test_index: int, test_result: TestResult) -> None:
# if we are starting a new module
if test_result.test.path != self.current_path:
# if this isn't the first module, add the end-of-line for the previous module
if test_index > 0:
self.end_of_line(test_index)
self.current_path = test_result.test.path
rel_path = str(self.current_path.relative_to(self.cwd))
final_slash_idx = rel_path.rfind("/")
if final_slash_idx != -1:
path_text = Text("", end="").join(
[
Text(rel_path[: final_slash_idx + 1], style="muted"),
Text(rel_path[final_slash_idx + 1 :]),
Text(": "),
]
)
else:
path_text = Text(f"{rel_path}: ", end="")
self.footer_text.append(path_text)
self._max_dots_for_current_line = (
self.base_max_dots_per_line - path_text.cell_len
)
if self.dots_on_line == self.max_dots_for_current_line:
self.end_of_line(test_index)
# we are now on a blank line with no path prefix
self._max_dots_for_current_line = self.base_max_dots_per_line
self.footer_text.append(get_dot(test_result))
self.dots_on_line += 1
GREEN_CHECK = Text("✔", style="pass.textonly")
RED_X = Text("✘", style="fail.textonly")
class LiveTestBar(TestResultDisplayWidget):
def __init__(self, num_tests: int, progress_styles: List[TestProgressStyle]):
super().__init__(num_tests, progress_styles)
self.spinner_column = SpinnerColumn(
style="pass.textonly",
finished_text=GREEN_CHECK,
)
self.test_description_column = RenderableColumn(Text(""))
self.progress = Progress(
self.spinner_column,
self.test_description_column,
console=rich_console,
)
self.task = self.progress.add_task("", total=num_tests)
def footer(self, test_results: List[TestResult]) -> Optional[RenderableType]:
return self.progress
def after_test(self, test_index: int, test_result: TestResult) -> None:
self.progress.update(self.task, advance=1)
self.test_description_column.renderable = get_test_result_line(
test_result=test_result,
test_index=test_index,
num_tests=self.num_tests,
progress_styles=self.progress_styles,
)
if test_result.outcome.will_fail_session:
self.console.print(
get_test_result_line(
test_result=test_result,
test_index=test_index,
num_tests=self.num_tests,
progress_styles=self.progress_styles,
extra_left_pad=2, # account for the spinner
)
)
self.spinner_column.finished_text = RED_X
self.spinner_column.spinner.style = "fail.textonly"
class SuiteProgressBar(TestResultDisplayWidget):
def __init__(self, num_tests: int, progress_styles: List[TestProgressStyle]):
super().__init__(num_tests, progress_styles)
self.spinner_column = SpinnerColumn(
style="pass.textonly",
finished_text=GREEN_CHECK,
)
self.bar_column = BarColumn(
complete_style="pass.textonly",
finished_style="pass.textonly",
)
self.progress = Progress(
self.spinner_column,
TimeElapsedColumn(),
self.bar_column,
"[progress.percentage]{task.percentage:>3.0f}%",
"[progress.percentage][{task.completed} / {task.total}]",
console=self.console,
)
self.task = self.progress.add_task("Testing...", total=num_tests)
def footer(self, test_results: List[TestResult]) -> Optional[RenderableType]:
return self.progress
def after_test(self, test_index: int, test_result: TestResult) -> None:
self.progress.update(self.task, advance=1)
if test_result.outcome.will_fail_session:
self.spinner_column.finished_text = RED_X
self.spinner_column.spinner.style = "fail.textonly"
self.bar_column.complete_style = "fail.textonly"
self.bar_column.finished_style = "fail.textonly"
def after_suite(self, test_results: List[TestResult]) -> None:
self.progress = None
class TerminalResultsWriter:
def __init__(
self,
console: Console,
num_tests: int,
progress_styles: List[TestProgressStyle],
widget_types: Iterable[Type[TestResultDisplayWidget]],
):
self.console = console
self.widgets = [
widgets(num_tests=num_tests, progress_styles=progress_styles)
for widgets in widget_types
]
self.live = Live(
console=console,
renderable=self.footer(results=[]),
)
def footer(self, results: List[TestResult]) -> RenderableType:
table = Table.grid()
table.add_column()
for f in filter(
None, (component.footer(results) for component in self.widgets)
):
table.add_row(f)
return table
def run(
self,
test_results: Iterator[TestResult],
fail_limit: Optional[int],
) -> Tuple[List[TestResult], bool]:
"""
Execute the test suite, returning the list of test results
and a boolean that is true if the run was cancelled and false otherwise.
"""
num_failures = 0
results = []
was_cancelled = False
self.console.print()
with self.live as live:
try:
for idx, result in enumerate(test_results):
# We need to re-enable the Live here in case
# it was disabled by the breakpoint debugger hook.
live.start(refresh=True)
for component in self.widgets:
component.after_test(idx, result)
live.update(self.footer(results))
results.append(result)
if result.outcome is TestOutcome.FAIL:
num_failures += 1
if num_failures == fail_limit:
break
except KeyboardInterrupt:
was_cancelled = True
finally:
for component in self.widgets:
component.after_suite(results)
live.update(self.footer(results), refresh=True)
return results, was_cancelled
class TestResultWriterBase:
runtime_output_strategies = {
TestOutputStyle.TEST_PER_LINE: TestPerLine,
TestOutputStyle.DOTS_GLOBAL: DotsGlobal,
TestOutputStyle.DOTS_MODULE: DotsPerModule,
TestOutputStyle.LIVE: LiveTestBar,
TestOutputStyle.NONE: TestResultDisplayWidget,
}
def __init__(
self,
console: Console,
suite: Suite,
test_output_style: TestOutputStyle,
progress_styles: List[TestProgressStyle],
config_path: Optional[Path],
show_diff_symbols: bool = False,
):
self.console = console
self.suite = suite
self.test_output_style = test_output_style
self.progress_styles = progress_styles
self.config_path = config_path
self.show_diff_symbols = show_diff_symbols
self.terminal_size = get_terminal_size()
def output_all_test_results(
self,
test_results_gen: Generator[TestResult, None, None],
fail_limit: Optional[int] = None,
) -> List[TestResult]:
if not self.suite.num_tests:
return []
widget_types = [self.runtime_output_strategies[self.test_output_style]]
if TestProgressStyle.BAR in self.progress_styles:
widget_types.append(SuiteProgressBar)
all_results, was_cancelled = TerminalResultsWriter(
console=self.console,
num_tests=self.suite.num_tests_with_parameterisation,
progress_styles=self.progress_styles,
widget_types=widget_types,
).run(test_results_gen, fail_limit)
if was_cancelled:
self.console.print(
"Run cancelled - results for tests that ran shown below.",
style="info",
)
failed_test_results = [r for r in all_results if r.outcome == TestOutcome.FAIL]
for failure in failed_test_results:
self.output_why_test_failed_header(failure)
self.output_test_failed_location(failure)
self.output_why_test_failed(failure)
self.output_captured_stderr(failure)
self.output_captured_stdout(failure)
if failed_test_results:
self.print_divider()
else:
self.console.print()
return all_results
@staticmethod
def print_divider() -> None:
rich_console.print(Rule(style="muted"))
def output_why_test_failed_header(self, test_result: TestResult):
"""
Printed above the failing test output
"""
raise NotImplementedError()
def output_test_result_summary(
self, test_results: List[TestResult], time_taken: float, duration: int
):
raise NotImplementedError()
def output_why_test_failed(self, test_result: TestResult):
"""
Extended output shown for failing tests, may include further explanations,
assertion error info, diffs, etc.
"""
raise NotImplementedError()
def output_captured_stderr(self, test_result: TestResult):
raise NotImplementedError()
def output_captured_stdout(self, test_result: TestResult):
raise NotImplementedError()
def output_test_failed_location(self, test_result: TestResult):
raise NotImplementedError()
@dataclass
class TerminalSize:
height: int
width: int
def get_terminal_size() -> TerminalSize:
for i in range(0, 3):
try:
cols, rows = os.get_terminal_size(i)
return TerminalSize(height=rows, width=cols)
except OSError:
continue
return TerminalSize(height=24, width=80)
class TestResultWriter(TestResultWriterBase):
def output_why_test_failed_header(self, test_result: TestResult):
test = test_result.test
self.console.print(
Padding(
Rule(
title=Text(test.description, style="fail.header"),
style="fail.textonly",
),
pad=(1, 0, 0, 0),
),
)
def output_why_test_failed(self, test_result: TestResult):
err = test_result.error
if isinstance(err, TestFailure):
if err.operator in Comparison:
self.console.print(self.get_source(err, test_result))
self.console.print(self.get_pretty_comparison_failure(err))
else:
self.print_traceback(err)
def get_source(self, err: TestFailure, test_result: TestResult) -> RenderableType:
src_lines, line_num = inspect.getsourcelines(test_result.test.fn)
src = Syntax(
"".join(src_lines),
"python",
start_line=line_num,
line_numbers=True,
highlight_lines={err.error_line},
background_color="default",
theme="ansi_dark",
)
return Padding(src, (1, 0, 1, 4))
def get_pretty_comparison_failure(self, err: TestFailure) -> RenderableType:
if err.operator is Comparison.Equals:
return self.get_pretty_failure_for_equals(err)
elif err.operator in {Comparison.In, Comparison.NotIn}:
return self.get_pretty_failure_for_in(err)
else:
return Text("", end="")
def get_pretty_failure_for_equals(self, err: TestFailure) -> RenderableType:
diff_msg = Text.assemble(
("LHS ", "pass.textonly"),
("vs ", "default"),
("RHS ", "fail.textonly"),
("shown below", "default"),
)
diff = Diff(
err.lhs,
err.rhs,
width=self.terminal_size.width - 24,
show_symbols=self.show_diff_symbols,
)
return RenderGroup(
Padding(diff_msg, pad=(0, 0, 1, 2)),
Padding(diff, pad=(0, 0, 1, 4)),
)
def get_pretty_failure_for_in(self, err: TestFailure) -> RenderableType:
lhs_msg = Text.assemble(
("The ", "default"),
("item ", "pass.textonly"),
*self.of_type(err.lhs),
)
lhs = Panel(
Pretty(err.lhs),
title=lhs_msg,
title_align="left",
border_style="pass.textonly",
padding=1,
)
rhs_msg = Text.assemble(
("was not " if err.operator is Comparison.In else "was ", "bold default"),
("found in the ", "default"),
("container ", "fail.textonly"),
*self.of_type(err.rhs),
)
rhs = Panel(
Pretty(err.rhs),
title=rhs_msg,
title_align="left",
border_style="fail.textonly",
padding=1,
)
return Padding(RenderGroup(lhs, rhs), pad=(0, 0, 1, 2))
def of_type(self, obj: object) -> Iterator[Tuple[str, str]]:
yield "(of type ", "default"
yield type(obj).__name__, "bold default"
yield ")", "default"
def print_traceback(self, err):
trace = getattr(err, "__traceback__", "")
if trace:
# The first frame contains library internal code which is not
# relevant to end users, so skip over it.
trace = trace.tb_next
tb = Traceback.from_exception(err.__class__, err, trace, show_locals=True)
self.console.print(Padding(tb, pad=(0, 4, 1, 4)))
else:
self.console.print(str(err))
def output_test_result_summary(
self, test_results: List[TestResult], time_taken: float, show_slowest: int
):
if show_slowest:
self.console.print(TestTimingStatsPanel(test_results, show_slowest))
result_table = Table.grid()
result_table.add_column(justify="right")
result_table.add_column()
result_table.add_column()
outcome_counts = self._get_outcome_counts(test_results)
test_count = sum(outcome_counts.values())
result_table.add_row(
Padding(str(test_count), pad=HORIZONTAL_PAD, style="bold"),
Padding("Tests Encountered", pad=HORIZONTAL_PAD),
style="default",
)
for outcome, count in outcome_counts.items():
if count > 0:
result_table.add_row(
Padding(str(count), pad=HORIZONTAL_PAD, style="bold"),
Padding(outcome.display_name, pad=HORIZONTAL_PAD),
Padding(f"({100 * count / test_count:.1f}%)", pad=HORIZONTAL_PAD),
style=outcome_to_style(outcome),
)
exit_code = get_exit_code(test_results)
if exit_code == ExitCode.SUCCESS:
result_style = "pass.textonly"
else:
result_style = "fail.textonly"
result_summary_panel = Panel(
result_table,
title="[b default]Results[/b default]",
style="none",
expand=False,
border_style=result_style,
)
self.console.print(result_summary_panel)
self.console.print(
Rule(
f"[b]{exit_code.clean_name}[/b] in [b]{time_taken:.2f}[/b] seconds",
style=result_style,
)
)
def output_captured_stderr(self, test_result: TestResult):
if test_result.captured_stderr:
captured_stderr_lines = test_result.captured_stderr.split("\n")
self.console.print(Padding(Text("Captured stderr"), pad=(0, 0, 1, 2)))
for line in captured_stderr_lines:
self.console.print(Padding(line, pad=(0, 0, 0, 4)))
self.console.print()
def output_captured_stdout(self, test_result: TestResult):
if test_result.captured_stdout:
captured_stdout_lines = test_result.captured_stdout.split("\n")
self.console.print(Padding(Text("Captured stdout"), pad=(0, 0, 1, 2)))
for line in captured_stdout_lines:
self.console.print(Padding(line, pad=(0, 0, 0, 4)))
self.console.print()
def output_test_failed_location(self, test_result: TestResult):
if isinstance(test_result.error, TestFailure) or isinstance(
test_result.error, AssertionError
):
self.console.print(
Padding(
Text(
f"Failed at {os.path.relpath(test_result.test.path, Path.cwd())}:{test_result.error.error_line}"
),
pad=(1, 0, 0, 2),
)
)
def _get_outcome_counts(
self, test_results: List[TestResult]
) -> Dict[TestOutcome, int]:
return {
TestOutcome.PASS: len(
[r for r in test_results if r.outcome == TestOutcome.PASS]
),
TestOutcome.FAIL: len(
[r for r in test_results if r.outcome == TestOutcome.FAIL]
),
TestOutcome.SKIP: len(
[r for r in test_results if r.outcome == TestOutcome.SKIP]
),
TestOutcome.XFAIL: len(
[r for r in test_results if r.outcome == TestOutcome.XFAIL]
),
TestOutcome.XPASS: len(
[r for r in test_results if r.outcome == TestOutcome.XPASS]
),
TestOutcome.DRYRUN: len(
[r for r in test_results if r.outcome == TestOutcome.DRYRUN]
),
}
def outcome_to_style(outcome: TestOutcome) -> str:
return {
TestOutcome.PASS: "pass",
TestOutcome.SKIP: "skip",
TestOutcome.FAIL: "fail",
TestOutcome.XFAIL: "xfail",
TestOutcome.XPASS: "xpass",
TestOutcome.DRYRUN: "dryrun",
}[outcome]
def scope_to_style(scope: Scope) -> str:
return {
Scope.Test: "fixture.scope.test",
Scope.Module: "fixture.scope.module",
Scope.Global: "fixture.scope.global",
}[scope]
def output_fixtures(
fixtures: List[Fixture],
tests: List[Test],
show_scopes: bool,
show_docstrings: bool,
show_dependencies: bool,
show_dependency_trees: bool,
):
generated_tests = itertools.chain.from_iterable(
test.get_parameterised_instances() for test in tests
)
fixture_to_tests = fixtures_used_directly_by_tests(generated_tests)
fixtures_to_parents, fixtures_to_children = fixture_parents_and_children(fixtures)
for module, fixtures in group_by(fixtures, key=lambda f: f.module_name).items():
rich_console.print(Rule(Text(module, style="title")))
for fixture in fixtures:
fixture_tree = make_fixture_information_tree(
fixture,
used_by_tests=fixture_to_tests[fixture],
fixtures_to_children=fixtures_to_children,
fixtures_to_parents=fixtures_to_parents,
show_scopes=show_scopes,
show_docstrings=show_docstrings,
show_dependencies=show_dependencies,
show_dependency_trees=show_dependency_trees,
)
rich_console.print(fixture_tree)
def make_fixture_information_tree(
fixture: Fixture,
used_by_tests: Collection[Test],
fixtures_to_children: FixtureHierarchyMapping,
fixtures_to_parents: FixtureHierarchyMapping,
show_scopes: bool,
show_docstrings: bool,
show_dependencies: bool,
show_dependency_trees: bool,
) -> Tree:
root = Tree(label=make_text_for_fixture(fixture, show_scope=show_scopes))
if show_dependency_trees:
max_depth = None
elif show_dependencies:
max_depth = 1
else:
max_depth = 0
if show_docstrings and fixture.fn.__doc__ is not None:
root.add(dedent(fixture.fn.__doc__).strip("\n"))
if show_dependencies or show_dependency_trees:
if fixtures_to_parents[fixture]:
depends_on_node = root.add(label="[usedby]depends on fixtures")
add_fixture_dependencies_to_tree(
depends_on_node,
fixture,
fixtures_to_parents,
show_scopes=show_scopes,
max_depth=max_depth,
)
if fixtures_to_children[fixture]:
used_by_node = root.add(label="[usedby]used by fixtures")
add_fixture_dependencies_to_tree(
used_by_node,
fixture,
fixtures_to_children,
show_scopes=show_scopes,
max_depth=max_depth,
)
if used_by_tests:
used_by_tests_node = root.add("[usedby]used directly by tests")
add_fixture_usages_by_tests_to_tree(used_by_tests_node, used_by_tests)
if not (used_by_tests or fixtures_to_children[fixture]):
root.add("[usedby]used by [fail]no tests or fixtures")
return root
def add_fixture_dependencies_to_tree(
parent: Tree,
fixture: Fixture,
fixtures_to_parents_or_children: FixtureHierarchyMapping,
show_scopes: bool,
max_depth: Optional[int],
depth: int = 0,
) -> None:
if max_depth is not None and depth >= max_depth:
return
this_layer = fixtures_to_parents_or_children[fixture]
if not this_layer:
return
for dep in this_layer:
node = parent.add(make_text_for_fixture(fixture=dep, show_scope=show_scopes))
add_fixture_dependencies_to_tree(
parent=node,
fixture=dep,
fixtures_to_parents_or_children=fixtures_to_parents_or_children,
show_scopes=show_scopes,
max_depth=max_depth,
depth=depth + 1,
)
def add_fixture_usages_by_tests_to_tree(node: Tree, used_by: Iterable[Test]) -> None:
grouped_used_by = group_by(used_by, key=lambda t: t.description)
for idx, (description, tests) in enumerate(grouped_used_by.items()):
test = tests[0]
loc = format_test_location(test)
sep = f" [{len(tests)}]" if len(tests) > 1 else ""
node.add(f"[muted]{loc}{sep}[/muted] {test.description}")
def make_text_for_fixture(fixture: Fixture, show_scope: bool) -> Text:
text = Text()
text.append(f"{fixture.path.name}:{fixture.line_number} ", style="dim")
text.append(fixture.name, style="fixture.name")
if show_scope:
text.append(
f" (scope: {fixture.scope.value})", style=scope_to_style(fixture.scope)
)
return text
def get_exit_code(results: Iterable[TestResult]) -> ExitCode:
if not results:
return ExitCode.NO_TESTS_FOUND
if any(
r.outcome == TestOutcome.FAIL or r.outcome == TestOutcome.XPASS for r in results
):
exit_code = ExitCode.FAILED
else:
exit_code = ExitCode.SUCCESS
return exit_code
|
[
"os.get_terminal_size",
"platform.python_version",
"rich.text.Text",
"pathlib.Path",
"ward._fixtures.fixture_parents_and_children",
"ward._utilities.group_by",
"inspect.getsourcelines",
"rich.text.Text.assemble",
"rich.highlighter.NullHighlighter",
"rich.progress.SpinnerColumn",
"rich.rule.Rule",
"rich.pretty.Pretty",
"rich.progress.Progress",
"rich.panel.Panel",
"ward.testing.fixtures_used_directly_by_tests",
"statistics.median",
"math.ceil",
"rich.console.RenderGroup",
"dataclasses.field",
"rich.progress.BarColumn",
"rich.markdown.Markdown",
"rich.padding.Padding",
"rich.table.Table.grid",
"textwrap.dedent",
"ward._diff.Diff",
"platform.python_implementation",
"rich.traceback.Traceback.from_exception",
"pathlib.Path.cwd",
"rich.theme.Theme",
"rich.progress.TimeElapsedColumn"
] |
[((1488, 2160), 'rich.theme.Theme', 'Theme', (["{'title': 'bold', 'heading': 'bold', 'pass': '#ffffff on #137C39',\n 'pass.textonly': '#189F4A', 'fail': '#ffffff on #BF2D2D',\n 'fail.textonly': '#BF2D2D', 'fail.header': 'bold #BF2D2D', 'skip':\n '#ffffff on #0E67B3', 'skip.textonly': '#1381E0', 'xpass':\n '#162740 on #F4C041', 'xpass.textonly': '#F4C041', 'xfail':\n '#ffffff on #695CC8', 'xfail.textonly': '#695CC8', 'muted': 'dim',\n 'info': 'yellow italic', 'dryrun': '#ffffff on #162740', 'rule.line':\n '#189F4A', 'fixture.name': 'bold #1381E0', 'fixture.scope.test':\n 'bold #189F4A', 'fixture.scope.module': 'bold #F4C041',\n 'fixture.scope.global': 'bold #EA913C', 'usedby': '#9285F6'}"], {}), "({'title': 'bold', 'heading': 'bold', 'pass': '#ffffff on #137C39',\n 'pass.textonly': '#189F4A', 'fail': '#ffffff on #BF2D2D',\n 'fail.textonly': '#BF2D2D', 'fail.header': 'bold #BF2D2D', 'skip':\n '#ffffff on #0E67B3', 'skip.textonly': '#1381E0', 'xpass':\n '#162740 on #F4C041', 'xpass.textonly': '#F4C041', 'xfail':\n '#ffffff on #695CC8', 'xfail.textonly': '#695CC8', 'muted': 'dim',\n 'info': 'yellow italic', 'dryrun': '#ffffff on #162740', 'rule.line':\n '#189F4A', 'fixture.name': 'bold #1381E0', 'fixture.scope.test':\n 'bold #189F4A', 'fixture.scope.module': 'bold #F4C041',\n 'fixture.scope.global': 'bold #EA913C', 'usedby': '#9285F6'})\n", (1493, 2160), False, 'from rich.theme import Theme\n'), ((15362, 15394), 'rich.text.Text', 'Text', (['"""✔"""'], {'style': '"""pass.textonly"""'}), "('✔', style='pass.textonly')\n", (15366, 15394), False, 'from rich.text import Text\n'), ((15403, 15435), 'rich.text.Text', 'Text', (['"""✘"""'], {'style': '"""fail.textonly"""'}), "('✘', style='fail.textonly')\n", (15407, 15435), False, 'from rich.text import Text\n'), ((4229, 4252), 'rich.table.Table.grid', 'Table.grid', ([], {'expand': '(True)'}), '(expand=True)\n', (4239, 4252), False, 'from rich.table import Table\n'), ((5264, 5318), 'rich.text.Text', 'Text', (['result.outcome.display_char'], {'style': 'style', 'end': '""""""'}), "(result.outcome.display_char, style=style, end='')\n", (5268, 5318), False, 'from rich.text import Text\n'), ((7697, 7723), 'dataclasses.field', 'field', ([], {'default': '__version__'}), '(default=__version__)\n', (7702, 7723), False, 'from dataclasses import dataclass, field\n'), ((32948, 32996), 'ward.testing.fixtures_used_directly_by_tests', 'fixtures_used_directly_by_tests', (['generated_tests'], {}), '(generated_tests)\n', (32979, 32996), False, 'from ward.testing import Test, TestOutcome, TestResult, fixtures_used_directly_by_tests\n'), ((33046, 33084), 'ward._fixtures.fixture_parents_and_children', 'fixture_parents_and_children', (['fixtures'], {}), '(fixtures)\n', (33074, 33084), False, 'from ward._fixtures import FixtureHierarchyMapping, fixture_parents_and_children\n'), ((36415, 36461), 'ward._utilities.group_by', 'group_by', (['used_by'], {'key': '(lambda t: t.description)'}), '(used_by, key=lambda t: t.description)\n', (36423, 36461), False, 'from ward._utilities import group_by\n'), ((36809, 36815), 'rich.text.Text', 'Text', ([], {}), '()\n', (36813, 36815), False, 'from rich.text import Text\n'), ((2362, 2379), 'rich.highlighter.NullHighlighter', 'NullHighlighter', ([], {}), '()\n', (2377, 2379), False, 'from rich.highlighter import NullHighlighter\n'), ((4343, 4416), 'rich.padding.Padding', 'Padding', (['outcome_tag'], {'style': 'test_style', 'pad': '(0, 1, 0, 1 + extra_left_pad)'}), '(outcome_tag, style=test_style, pad=(0, 1, 0, 1 + extra_left_pad))\n', (4350, 4416), False, 'from rich.padding import Padding\n'), ((4426, 4504), 'rich.padding.Padding', 'Padding', (['f"""{test_location}{test_case_number}"""'], {'style': '"""muted"""', 'pad': '(0, 1, 0, 1)'}), "(f'{test_location}{test_case_number}', style='muted', pad=(0, 1, 0, 1))\n", (4433, 4504), False, 'from rich.padding import Padding\n'), ((5617, 5665), 'statistics.median', 'statistics.median', (['self._raw_test_durations_secs'], {}), '(self._raw_test_durations_secs)\n', (5634, 5665), False, 'import statistics\n'), ((6110, 6142), 'rich.table.Table.grid', 'Table.grid', ([], {'padding': '(0, 2, 0, 0)'}), '(padding=(0, 2, 0, 0))\n', (6120, 6142), False, 'from rich.table import Table\n'), ((12073, 12089), 'rich.text.Text', 'Text', (['""""""'], {'end': '""""""'}), "('', end='')\n", (12077, 12089), False, 'from rich.text import Text\n'), ((13704, 13712), 'pathlib.Path', 'Path', (['""""""'], {}), "('')\n", (13708, 13712), False, 'from pathlib import Path\n'), ((13732, 13742), 'pathlib.Path.cwd', 'Path.cwd', ([], {}), '()\n', (13740, 13742), False, 'from pathlib import Path\n'), ((15648, 15711), 'rich.progress.SpinnerColumn', 'SpinnerColumn', ([], {'style': '"""pass.textonly"""', 'finished_text': 'GREEN_CHECK'}), "(style='pass.textonly', finished_text=GREEN_CHECK)\n", (15661, 15711), False, 'from rich.progress import BarColumn, Progress, RenderableColumn, SpinnerColumn, TimeElapsedColumn\n'), ((15838, 15924), 'rich.progress.Progress', 'Progress', (['self.spinner_column', 'self.test_description_column'], {'console': 'rich_console'}), '(self.spinner_column, self.test_description_column, console=\n rich_console)\n', (15846, 15924), False, 'from rich.progress import BarColumn, Progress, RenderableColumn, SpinnerColumn, TimeElapsedColumn\n'), ((17260, 17323), 'rich.progress.SpinnerColumn', 'SpinnerColumn', ([], {'style': '"""pass.textonly"""', 'finished_text': 'GREEN_CHECK'}), "(style='pass.textonly', finished_text=GREEN_CHECK)\n", (17273, 17323), False, 'from rich.progress import BarColumn, Progress, RenderableColumn, SpinnerColumn, TimeElapsedColumn\n'), ((17385, 17458), 'rich.progress.BarColumn', 'BarColumn', ([], {'complete_style': '"""pass.textonly"""', 'finished_style': '"""pass.textonly"""'}), "(complete_style='pass.textonly', finished_style='pass.textonly')\n", (17394, 17458), False, 'from rich.progress import BarColumn, Progress, RenderableColumn, SpinnerColumn, TimeElapsedColumn\n'), ((19112, 19124), 'rich.table.Table.grid', 'Table.grid', ([], {}), '()\n', (19122, 19124), False, 'from rich.table import Table\n'), ((25274, 25317), 'inspect.getsourcelines', 'inspect.getsourcelines', (['test_result.test.fn'], {}), '(test_result.test.fn)\n', (25296, 25317), False, 'import inspect\n'), ((25601, 25627), 'rich.padding.Padding', 'Padding', (['src', '(1, 0, 1, 4)'], {}), '(src, (1, 0, 1, 4))\n', (25608, 25627), False, 'from rich.padding import Padding\n'), ((26085, 26204), 'rich.text.Text.assemble', 'Text.assemble', (["('LHS ', 'pass.textonly')", "('vs ', 'default')", "('RHS ', 'fail.textonly')", "('shown below', 'default')"], {}), "(('LHS ', 'pass.textonly'), ('vs ', 'default'), ('RHS ',\n 'fail.textonly'), ('shown below', 'default'))\n", (26098, 26204), False, 'from rich.text import Text\n'), ((26276, 26377), 'ward._diff.Diff', 'Diff', (['err.lhs', 'err.rhs'], {'width': '(self.terminal_size.width - 24)', 'show_symbols': 'self.show_diff_symbols'}), '(err.lhs, err.rhs, width=self.terminal_size.width - 24, show_symbols=\n self.show_diff_symbols)\n', (26280, 26377), False, 'from ward._diff import Diff\n'), ((28391, 28403), 'rich.table.Table.grid', 'Table.grid', ([], {}), '()\n', (28401, 28403), False, 'from rich.table import Table\n'), ((29484, 29602), 'rich.panel.Panel', 'Panel', (['result_table'], {'title': '"""[b default]Results[/b default]"""', 'style': '"""none"""', 'expand': '(False)', 'border_style': 'result_style'}), "(result_table, title='[b default]Results[/b default]', style='none',\n expand=False, border_style=result_style)\n", (29489, 29602), False, 'from rich.panel import Panel\n'), ((4535, 4592), 'rich.markdown.Markdown', 'Markdown', (['test.description'], {'inline_code_theme': '"""ansi_dark"""'}), "(test.description, inline_code_theme='ansi_dark')\n", (4543, 4592), False, 'from rich.markdown import Markdown\n'), ((4870, 4903), 'rich.padding.Padding', 'Padding', (['reason'], {'pad': '(0, 1, 0, 1)'}), '(reason, pad=(0, 1, 0, 1))\n', (4877, 4903), False, 'from rich.padding import Padding\n'), ((7572, 7604), 'platform.python_implementation', 'platform.python_implementation', ([], {}), '()\n', (7602, 7604), False, 'import platform\n'), ((7646, 7671), 'platform.python_version', 'platform.python_version', ([], {}), '()\n', (7669, 7671), False, 'import platform\n'), ((12795, 12867), 'rich.text.Text', 'Text', (['f"""{(test_index + 1) / self.num_tests:>{fill}.0%}\n"""'], {'style': '"""muted"""'}), "(f'{(test_index + 1) / self.num_tests:>{fill}.0%}\\n', style='muted')\n", (12799, 12867), False, 'from rich.text import Text\n'), ((12948, 12958), 'rich.text.Text', 'Text', (['"""\n"""'], {}), "('\\n')\n", (12952, 12958), False, 'from rich.text import Text\n'), ((15803, 15811), 'rich.text.Text', 'Text', (['""""""'], {}), "('')\n", (15807, 15811), False, 'from rich.text import Text\n'), ((17574, 17593), 'rich.progress.TimeElapsedColumn', 'TimeElapsedColumn', ([], {}), '()\n', (17591, 17593), False, 'from rich.progress import BarColumn, Progress, RenderableColumn, SpinnerColumn, TimeElapsedColumn\n'), ((23112, 23131), 'rich.rule.Rule', 'Rule', ([], {'style': '"""muted"""'}), "(style='muted')\n", (23116, 23131), False, 'from rich.rule import Rule\n'), ((24194, 24217), 'os.get_terminal_size', 'os.get_terminal_size', (['i'], {}), '(i)\n', (24214, 24217), False, 'import os\n'), ((26473, 26508), 'rich.padding.Padding', 'Padding', (['diff_msg'], {'pad': '(0, 0, 1, 2)'}), '(diff_msg, pad=(0, 0, 1, 2))\n', (26480, 26508), False, 'from rich.padding import Padding\n'), ((26522, 26553), 'rich.padding.Padding', 'Padding', (['diff'], {'pad': '(0, 0, 1, 4)'}), '(diff, pad=(0, 0, 1, 4))\n', (26529, 26553), False, 'from rich.padding import Padding\n'), ((26828, 26843), 'rich.pretty.Pretty', 'Pretty', (['err.lhs'], {}), '(err.lhs)\n', (26834, 26843), False, 'from rich.pretty import Pretty\n'), ((27266, 27281), 'rich.pretty.Pretty', 'Pretty', (['err.rhs'], {}), '(err.rhs)\n', (27272, 27281), False, 'from rich.pretty import Pretty\n'), ((27441, 27462), 'rich.console.RenderGroup', 'RenderGroup', (['lhs', 'rhs'], {}), '(lhs, rhs)\n', (27452, 27462), False, 'from rich.console import Console, ConsoleOptions, RenderableType, RenderGroup, RenderResult\n'), ((27947, 28016), 'rich.traceback.Traceback.from_exception', 'Traceback.from_exception', (['err.__class__', 'err', 'trace'], {'show_locals': '(True)'}), '(err.__class__, err, trace, show_locals=True)\n', (27971, 28016), False, 'from rich.traceback import Traceback\n'), ((28750, 28798), 'rich.padding.Padding', 'Padding', (['"""Tests Encountered"""'], {'pad': 'HORIZONTAL_PAD'}), "('Tests Encountered', pad=HORIZONTAL_PAD)\n", (28757, 28798), False, 'from rich.padding import Padding\n'), ((29760, 29857), 'rich.rule.Rule', 'Rule', (['f"""[b]{exit_code.clean_name}[/b] in [b]{time_taken:.2f}[/b] seconds"""'], {'style': 'result_style'}), "(f'[b]{exit_code.clean_name}[/b] in [b]{time_taken:.2f}[/b] seconds',\n style=result_style)\n", (29764, 29857), False, 'from rich.rule import Rule\n'), ((33114, 33161), 'ward._utilities.group_by', 'group_by', (['fixtures'], {'key': '(lambda f: f.module_name)'}), '(fixtures, key=lambda f: f.module_name)\n', (33122, 33161), False, 'from ward._utilities import group_by\n'), ((6646, 6674), 'rich.text.Text', 'Text', (['test_id'], {'style': '"""muted"""'}), "(test_id, style='muted')\n", (6650, 6674), False, 'from rich.text import Text\n'), ((6898, 7065), 'rich.padding.Padding', 'Padding', (['f"""Median: [b]{self._median_secs * 1000:.2f}[/b]ms [muted]|[/muted] 99th Percentile: [b]{self._percentile99_secs * 1000:.2f}[/b]ms"""'], {'pad': '(0, 0, 1, 0)'}), "(\n f'Median: [b]{self._median_secs * 1000:.2f}[/b]ms [muted]|[/muted] 99th Percentile: [b]{self._percentile99_secs * 1000:.2f}[/b]ms'\n , pad=(0, 0, 1, 0))\n", (6905, 7065), False, 'from rich.padding import Padding\n'), ((7837, 7932), 'rich.text.Text', 'Text', (['f"""Ward {self.ward_version} | {self.python_impl} {self.python_version}"""'], {'style': '"""title"""'}), "(f'Ward {self.ward_version} | {self.python_impl} {self.python_version}',\n style='title')\n", (7841, 7932), False, 'from rich.text import Text\n'), ((14812, 14841), 'rich.text.Text', 'Text', (['f"""{rel_path}: """'], {'end': '""""""'}), "(f'{rel_path}: ', end='')\n", (14816, 14841), False, 'from rich.text import Text\n'), ((25967, 25983), 'rich.text.Text', 'Text', (['""""""'], {'end': '""""""'}), "('', end='')\n", (25971, 25983), False, 'from rich.text import Text\n'), ((28048, 28077), 'rich.padding.Padding', 'Padding', (['tb'], {'pad': '(0, 4, 1, 4)'}), '(tb, pad=(0, 4, 1, 4))\n', (28055, 28077), False, 'from rich.padding import Padding\n'), ((33203, 33230), 'rich.text.Text', 'Text', (['module'], {'style': '"""title"""'}), "(module, style='title')\n", (33207, 33230), False, 'from rich.text import Text\n'), ((5841, 5875), 'math.ceil', 'math.ceil', (['(size * percentile / 100)'], {}), '(size * percentile / 100)\n', (5850, 5875), False, 'import math\n'), ((8084, 8094), 'pathlib.Path.cwd', 'Path.cwd', ([], {}), '()\n', (8092, 8094), False, 'from pathlib import Path\n'), ((29052, 29101), 'rich.padding.Padding', 'Padding', (['outcome.display_name'], {'pad': 'HORIZONTAL_PAD'}), '(outcome.display_name, pad=HORIZONTAL_PAD)\n', (29059, 29101), False, 'from rich.padding import Padding\n'), ((29123, 29188), 'rich.padding.Padding', 'Padding', (['f"""({100 * count / test_count:.1f}%)"""'], {'pad': 'HORIZONTAL_PAD'}), "(f'({100 * count / test_count:.1f}%)', pad=HORIZONTAL_PAD)\n", (29130, 29188), False, 'from rich.padding import Padding\n'), ((30130, 30153), 'rich.text.Text', 'Text', (['"""Captured stderr"""'], {}), "('Captured stderr')\n", (30134, 30153), False, 'from rich.text import Text\n'), ((30256, 30287), 'rich.padding.Padding', 'Padding', (['line'], {'pad': '(0, 0, 0, 4)'}), '(line, pad=(0, 0, 0, 4))\n', (30263, 30287), False, 'from rich.padding import Padding\n'), ((30541, 30564), 'rich.text.Text', 'Text', (['"""Captured stdout"""'], {}), "('Captured stdout')\n", (30545, 30564), False, 'from rich.text import Text\n'), ((30667, 30698), 'rich.padding.Padding', 'Padding', (['line'], {'pad': '(0, 0, 0, 4)'}), '(line, pad=(0, 0, 0, 4))\n', (30674, 30698), False, 'from rich.padding import Padding\n'), ((34399, 34425), 'textwrap.dedent', 'dedent', (['fixture.fn.__doc__'], {}), '(fixture.fn.__doc__)\n', (34405, 34425), False, 'from textwrap import dedent\n'), ((14504, 14520), 'rich.text.Text', 'Text', (['""""""'], {'end': '""""""'}), "('', end='')\n", (14508, 14520), False, 'from rich.text import Text\n'), ((14573, 14624), 'rich.text.Text', 'Text', (['rel_path[:final_slash_idx + 1]'], {'style': '"""muted"""'}), "(rel_path[:final_slash_idx + 1], style='muted')\n", (14577, 14624), False, 'from rich.text import Text\n'), ((14651, 14687), 'rich.text.Text', 'Text', (['rel_path[final_slash_idx + 1:]'], {}), '(rel_path[final_slash_idx + 1:])\n', (14655, 14687), False, 'from rich.text import Text\n'), ((14714, 14724), 'rich.text.Text', 'Text', (['""": """'], {}), "(': ')\n", (14718, 14724), False, 'from rich.text import Text\n'), ((24612, 24655), 'rich.text.Text', 'Text', (['test.description'], {'style': '"""fail.header"""'}), "(test.description, style='fail.header')\n", (24616, 24655), False, 'from rich.text import Text\n'), ((31087, 31097), 'pathlib.Path.cwd', 'Path.cwd', ([], {}), '()\n', (31095, 31097), False, 'from pathlib import Path\n')]
|
"""Generate example matplotlib plots of polynomials created using the
func.Polynomial class."""
import matplotlib.pyplot as plt
from func import Polynomial
# Define an example polynomial and it's derivatives.
f_x = Polynomial([(3, 1), (2, -2), (1, 1)])
# Define a set of x-values for the plot.
num_points = 100
x_min = -2
x_max = 2
x_values = [x_min + i*(x_max - x_min)/num_points for i in range(num_points)]
y_values = [f_x(x) for x in x_values]
plt.plot(x_values, y_values)
input("Enter to quit.")
|
[
"func.Polynomial",
"matplotlib.pyplot.plot"
] |
[((217, 254), 'func.Polynomial', 'Polynomial', (['[(3, 1), (2, -2), (1, 1)]'], {}), '([(3, 1), (2, -2), (1, 1)])\n', (227, 254), False, 'from func import Polynomial\n'), ((451, 479), 'matplotlib.pyplot.plot', 'plt.plot', (['x_values', 'y_values'], {}), '(x_values, y_values)\n', (459, 479), True, 'import matplotlib.pyplot as plt\n')]
|
"""
Tests for the blaze interface to the pipeline api.
"""
from __future__ import division
from collections import OrderedDict
from datetime import timedelta
from unittest import TestCase
import warnings
import blaze as bz
from datashape import dshape, var, Record
from nose_parameterized import parameterized
import numpy as np
from numpy.testing.utils import assert_array_almost_equal
import pandas as pd
from pandas.util.testing import assert_frame_equal
from toolz import keymap, valmap, concatv
from toolz.curried import operator as op
from zipline.pipeline import Pipeline, CustomFactor
from zipline.pipeline.data import DataSet, BoundColumn
from zipline.pipeline.engine import SimplePipelineEngine
from zipline.pipeline.loaders.blaze import (
from_blaze,
BlazeLoader,
NoDeltasWarning,
NonNumpyField,
NonPipelineField,
)
from zipline.utils.numpy_utils import repeat_last_axis
from zipline.utils.test_utils import tmp_asset_finder, make_simple_asset_info
nameof = op.attrgetter('name')
dtypeof = op.attrgetter('dtype')
asset_infos = (
(make_simple_asset_info(
tuple(map(ord, 'ABC')),
pd.Timestamp(0),
pd.Timestamp('2015'),
),),
(make_simple_asset_info(
tuple(map(ord, 'ABCD')),
pd.Timestamp(0),
pd.Timestamp('2015'),
),),
)
with_extra_sid = parameterized.expand(asset_infos)
class BlazeToPipelineTestCase(TestCase):
@classmethod
def setUpClass(cls):
cls.dates = dates = pd.date_range('2014-01-01', '2014-01-03')
dates = cls.dates.repeat(3)
cls.sids = sids = ord('A'), ord('B'), ord('C')
cls.df = df = pd.DataFrame({
'sid': sids * 3,
'value': (0, 1, 2, 1, 2, 3, 2, 3, 4),
'asof_date': dates,
'timestamp': dates,
})
cls.dshape = dshape("""
var * {
sid: ?int64,
value: ?float64,
asof_date: datetime,
timestamp: datetime
}
""")
cls.macro_df = df[df.sid == 65].drop('sid', axis=1)
dshape_ = OrderedDict(cls.dshape.measure.fields)
del dshape_['sid']
cls.macro_dshape = var * Record(dshape_)
cls.garbage_loader = BlazeLoader()
def test_tabular(self):
name = 'expr'
expr = bz.Data(self.df, name=name, dshape=self.dshape)
ds = from_blaze(
expr,
loader=self.garbage_loader,
no_deltas_rule='ignore',
)
self.assertEqual(ds.__name__, name)
self.assertTrue(issubclass(ds, DataSet))
self.assertEqual(
{c.name: c.dtype for c in ds._columns},
{'sid': np.int64, 'value': np.float64},
)
for field in ('timestamp', 'asof_date'):
with self.assertRaises(AttributeError) as e:
getattr(ds, field)
self.assertIn("'%s'" % field, str(e.exception))
self.assertIn("'datetime'", str(e.exception))
# test memoization
self.assertIs(
from_blaze(
expr,
loader=self.garbage_loader,
no_deltas_rule='ignore',
),
ds,
)
def test_column(self):
exprname = 'expr'
expr = bz.Data(self.df, name=exprname, dshape=self.dshape)
value = from_blaze(
expr.value,
loader=self.garbage_loader,
no_deltas_rule='ignore',
)
self.assertEqual(value.name, 'value')
self.assertIsInstance(value, BoundColumn)
self.assertEqual(value.dtype, np.float64)
# test memoization
self.assertIs(
from_blaze(
expr.value,
loader=self.garbage_loader,
no_deltas_rule='ignore',
),
value,
)
self.assertIs(
from_blaze(
expr,
loader=self.garbage_loader,
no_deltas_rule='ignore',
).value,
value,
)
# test the walk back up the tree
self.assertIs(
from_blaze(
expr,
loader=self.garbage_loader,
no_deltas_rule='ignore',
),
value.dataset,
)
self.assertEqual(value.dataset.__name__, exprname)
def test_missing_asof(self):
expr = bz.Data(
self.df.loc[:, ['sid', 'value', 'timestamp']],
name='expr',
dshape="""
var * {
sid: ?int64,
value: float64,
timestamp: datetime,
}""",
)
with self.assertRaises(TypeError) as e:
from_blaze(
expr,
loader=self.garbage_loader,
no_deltas_rule='ignore',
)
self.assertIn("'asof_date'", str(e.exception))
self.assertIn(repr(str(expr.dshape.measure)), str(e.exception))
def test_auto_deltas(self):
expr = bz.Data(
{'ds': self.df,
'ds_deltas': pd.DataFrame(columns=self.df.columns)},
dshape=var * Record((
('ds', self.dshape.measure),
('ds_deltas', self.dshape.measure),
)),
)
loader = BlazeLoader()
ds = from_blaze(expr.ds, loader=loader)
self.assertEqual(len(loader), 1)
exprdata = loader[ds]
self.assertTrue(exprdata.expr.isidentical(expr.ds))
self.assertTrue(exprdata.deltas.isidentical(expr.ds_deltas))
def test_auto_deltas_fail_warn(self):
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter('always')
loader = BlazeLoader()
expr = bz.Data(self.df, dshape=self.dshape)
from_blaze(
expr,
loader=loader,
no_deltas_rule='warn',
)
self.assertEqual(len(ws), 1)
w = ws[0].message
self.assertIsInstance(w, NoDeltasWarning)
self.assertIn(str(expr), str(w))
def test_auto_deltas_fail_raise(self):
loader = BlazeLoader()
expr = bz.Data(self.df, dshape=self.dshape)
with self.assertRaises(ValueError) as e:
from_blaze(
expr,
loader=loader,
no_deltas_rule='raise',
)
self.assertIn(str(expr), str(e.exception))
def test_non_numpy_field(self):
expr = bz.Data(
[],
dshape="""
var * {
a: datetime,
asof_date: datetime,
timestamp: datetime,
}""",
)
ds = from_blaze(
expr,
loader=self.garbage_loader,
no_deltas_rule='ignore',
)
with self.assertRaises(AttributeError):
ds.a
self.assertIsInstance(object.__getattribute__(ds, 'a'), NonNumpyField)
def test_non_pipeline_field(self):
# NOTE: This test will fail if we ever allow string types in
# the Pipeline API. If this happens, change the dtype of the `a` field
# of expr to another type we don't allow.
expr = bz.Data(
[],
dshape="""
var * {
a: string,
asof_date: datetime,
timestamp: datetime,
}""",
)
ds = from_blaze(
expr,
loader=self.garbage_loader,
no_deltas_rule='ignore',
)
with self.assertRaises(AttributeError):
ds.a
self.assertIsInstance(
object.__getattribute__(ds, 'a'),
NonPipelineField,
)
def test_complex_expr(self):
expr = bz.Data(self.df, dshape=self.dshape)
# put an Add in the table
expr_with_add = bz.transform(expr, value=expr.value + 1)
# Test that we can have complex expressions with no deltas
from_blaze(
expr_with_add,
deltas=None,
loader=self.garbage_loader,
)
with self.assertRaises(TypeError):
from_blaze(
expr.value + 1, # put an Add in the column
deltas=None,
loader=self.garbage_loader,
)
deltas = bz.Data(
pd.DataFrame(columns=self.df.columns),
dshape=self.dshape,
)
with self.assertRaises(TypeError):
from_blaze(
expr_with_add,
deltas=deltas,
loader=self.garbage_loader,
)
with self.assertRaises(TypeError):
from_blaze(
expr.value + 1,
deltas=deltas,
loader=self.garbage_loader,
)
def test_id(self):
expr = bz.Data(self.df, name='expr', dshape=self.dshape)
loader = BlazeLoader()
ds = from_blaze(
expr,
loader=loader,
no_deltas_rule='ignore',
)
p = Pipeline()
p.add(ds.value.latest, 'value')
dates = self.dates
with tmp_asset_finder() as finder:
result = SimplePipelineEngine(
loader,
dates,
finder,
).run_pipeline(p, dates[0], dates[-1])
expected = self.df.drop('asof_date', axis=1).set_index(
['timestamp', 'sid'],
)
expected.index = pd.MultiIndex.from_product((
expected.index.levels[0],
finder.retrieve_all(expected.index.levels[1]),
))
assert_frame_equal(result, expected, check_dtype=False)
def test_id_macro_dataset(self):
expr = bz.Data(self.macro_df, name='expr', dshape=self.macro_dshape)
loader = BlazeLoader()
ds = from_blaze(
expr,
loader=loader,
no_deltas_rule='ignore',
)
p = Pipeline()
p.add(ds.value.latest, 'value')
dates = self.dates
asset_info = asset_infos[0][0]
with tmp_asset_finder(asset_info) as finder:
result = SimplePipelineEngine(
loader,
dates,
finder,
).run_pipeline(p, dates[0], dates[-1])
nassets = len(asset_info)
expected = pd.DataFrame(
list(concatv([0] * nassets, [1] * nassets, [2] * nassets)),
index=pd.MultiIndex.from_product((
self.macro_df.timestamp,
finder.retrieve_all(asset_info.index),
)),
columns=('value',),
)
assert_frame_equal(result, expected, check_dtype=False)
def _run_pipeline(self,
expr,
deltas,
expected_views,
expected_output,
finder,
calendar,
start,
end,
window_length,
compute_fn):
loader = BlazeLoader()
ds = from_blaze(
expr,
deltas,
loader=loader,
no_deltas_rule='raise',
)
p = Pipeline()
# prevent unbound locals issue in the inner class
window_length_ = window_length
class TestFactor(CustomFactor):
inputs = ds.value,
window_length = window_length_
def compute(self, today, assets, out, data):
assert_array_almost_equal(data, expected_views[today])
out[:] = compute_fn(data)
p.add(TestFactor(), 'value')
result = SimplePipelineEngine(
loader,
calendar,
finder,
).run_pipeline(p, start, end)
assert_frame_equal(
result,
expected_output,
check_dtype=False,
)
@with_extra_sid
def test_deltas(self, asset_info):
expr = bz.Data(self.df, name='expr', dshape=self.dshape)
deltas = bz.Data(self.df, name='deltas', dshape=self.dshape)
deltas = bz.transform(
deltas,
value=deltas.value + 10,
timestamp=deltas.timestamp + timedelta(days=1),
)
expected_views = keymap(pd.Timestamp, {
'2014-01-02': np.array([[10.0, 11.0, 12.0],
[1.0, 2.0, 3.0]]),
'2014-01-03': np.array([[11.0, 12.0, 13.0],
[2.0, 3.0, 4.0]]),
'2014-01-04': np.array([[12.0, 13.0, 14.0],
[12.0, 13.0, 14.0]]),
})
nassets = len(asset_info)
if nassets == 4:
expected_views = valmap(
lambda view: np.c_[view, [np.nan, np.nan]],
expected_views,
)
with tmp_asset_finder(asset_info) as finder:
expected_output = pd.DataFrame(
list(concatv([12] * nassets, [13] * nassets, [14] * nassets)),
index=pd.MultiIndex.from_product((
sorted(expected_views.keys()),
finder.retrieve_all(asset_info.index),
)),
columns=('value',),
)
dates = self.dates
dates = dates.insert(len(dates), dates[-1] + timedelta(days=1))
self._run_pipeline(
expr,
deltas,
expected_views,
expected_output,
finder,
calendar=dates,
start=dates[1],
end=dates[-1],
window_length=2,
compute_fn=np.nanmax,
)
def test_deltas_macro(self):
asset_info = asset_infos[0][0]
expr = bz.Data(self.macro_df, name='expr', dshape=self.macro_dshape)
deltas = bz.Data(
self.macro_df.iloc[:-1],
name='deltas',
dshape=self.macro_dshape,
)
deltas = bz.transform(
deltas,
value=deltas.value + 10,
timestamp=deltas.timestamp + timedelta(days=1),
)
nassets = len(asset_info)
expected_views = keymap(pd.Timestamp, {
'2014-01-02': repeat_last_axis(np.array([10.0, 1.0]), nassets),
'2014-01-03': repeat_last_axis(np.array([11.0, 2.0]), nassets),
})
with tmp_asset_finder(asset_info) as finder:
expected_output = pd.DataFrame(
list(concatv([10] * nassets, [11] * nassets)),
index=pd.MultiIndex.from_product((
sorted(expected_views.keys()),
finder.retrieve_all(asset_info.index),
)),
columns=('value',),
)
dates = self.dates
self._run_pipeline(
expr,
deltas,
expected_views,
expected_output,
finder,
calendar=dates,
start=dates[1],
end=dates[-1],
window_length=2,
compute_fn=np.nanmax,
)
@with_extra_sid
def test_novel_deltas(self, asset_info):
base_dates = pd.DatetimeIndex([
pd.Timestamp('2014-01-01'),
pd.Timestamp('2014-01-04')
])
repeated_dates = base_dates.repeat(3)
baseline = pd.DataFrame({
'sid': self.sids * 2,
'value': (0, 1, 2, 1, 2, 3),
'asof_date': repeated_dates,
'timestamp': repeated_dates,
})
expr = bz.Data(baseline, name='expr', dshape=self.dshape)
deltas = bz.Data(baseline, name='deltas', dshape=self.dshape)
deltas = bz.transform(
deltas,
value=deltas.value + 10,
timestamp=deltas.timestamp + timedelta(days=1),
)
expected_views = keymap(pd.Timestamp, {
'2014-01-03': np.array([[10.0, 11.0, 12.0],
[10.0, 11.0, 12.0],
[10.0, 11.0, 12.0]]),
'2014-01-06': np.array([[10.0, 11.0, 12.0],
[10.0, 11.0, 12.0],
[11.0, 12.0, 13.0]]),
})
if len(asset_info) == 4:
expected_views = valmap(
lambda view: np.c_[view, [np.nan, np.nan, np.nan]],
expected_views,
)
expected_output_buffer = [10, 11, 12, np.nan, 11, 12, 13, np.nan]
else:
expected_output_buffer = [10, 11, 12, 11, 12, 13]
cal = pd.DatetimeIndex([
pd.Timestamp('2014-01-01'),
pd.Timestamp('2014-01-02'),
pd.Timestamp('2014-01-03'),
# omitting the 4th and 5th to simulate a weekend
pd.Timestamp('2014-01-06'),
])
with tmp_asset_finder(asset_info) as finder:
expected_output = pd.DataFrame(
expected_output_buffer,
index=pd.MultiIndex.from_product((
sorted(expected_views.keys()),
finder.retrieve_all(asset_info.index),
)),
columns=('value',),
)
self._run_pipeline(
expr,
deltas,
expected_views,
expected_output,
finder,
calendar=cal,
start=cal[2],
end=cal[-1],
window_length=3,
compute_fn=op.itemgetter(-1),
)
def test_novel_deltas_macro(self):
asset_info = asset_infos[0][0]
base_dates = pd.DatetimeIndex([
pd.Timestamp('2014-01-01'),
pd.Timestamp('2014-01-04')
])
baseline = pd.DataFrame({
'value': (0, 1),
'asof_date': base_dates,
'timestamp': base_dates,
})
expr = bz.Data(baseline, name='expr', dshape=self.macro_dshape)
deltas = bz.Data(baseline, name='deltas', dshape=self.macro_dshape)
deltas = bz.transform(
deltas,
value=deltas.value + 10,
timestamp=deltas.timestamp + timedelta(days=1),
)
nassets = len(asset_info)
expected_views = keymap(pd.Timestamp, {
'2014-01-03': repeat_last_axis(
np.array([10.0, 10.0, 10.0]),
nassets,
),
'2014-01-06': repeat_last_axis(
np.array([10.0, 10.0, 11.0]),
nassets,
),
})
cal = pd.DatetimeIndex([
pd.Timestamp('2014-01-01'),
pd.Timestamp('2014-01-02'),
pd.Timestamp('2014-01-03'),
# omitting the 4th and 5th to simulate a weekend
pd.Timestamp('2014-01-06'),
])
with tmp_asset_finder(asset_info) as finder:
expected_output = pd.DataFrame(
list(concatv([10] * nassets, [11] * nassets)),
index=pd.MultiIndex.from_product((
sorted(expected_views.keys()),
finder.retrieve_all(asset_info.index),
)),
columns=('value',),
)
self._run_pipeline(
expr,
deltas,
expected_views,
expected_output,
finder,
calendar=cal,
start=cal[2],
end=cal[-1],
window_length=3,
compute_fn=op.itemgetter(-1),
)
|
[
"zipline.pipeline.engine.SimplePipelineEngine",
"toolz.curried.operator.itemgetter",
"blaze.transform",
"pandas.DataFrame",
"toolz.curried.operator.attrgetter",
"nose_parameterized.parameterized.expand",
"datashape.dshape",
"warnings.simplefilter",
"zipline.pipeline.loaders.blaze.BlazeLoader",
"warnings.catch_warnings",
"numpy.testing.utils.assert_array_almost_equal",
"datetime.timedelta",
"zipline.pipeline.Pipeline",
"pandas.date_range",
"pandas.util.testing.assert_frame_equal",
"toolz.valmap",
"toolz.concatv",
"datashape.Record",
"zipline.pipeline.loaders.blaze.from_blaze",
"pandas.Timestamp",
"blaze.Data",
"zipline.utils.test_utils.tmp_asset_finder",
"numpy.array",
"collections.OrderedDict"
] |
[((994, 1015), 'toolz.curried.operator.attrgetter', 'op.attrgetter', (['"""name"""'], {}), "('name')\n", (1007, 1015), True, 'from toolz.curried import operator as op\n'), ((1026, 1048), 'toolz.curried.operator.attrgetter', 'op.attrgetter', (['"""dtype"""'], {}), "('dtype')\n", (1039, 1048), True, 'from toolz.curried import operator as op\n'), ((1335, 1368), 'nose_parameterized.parameterized.expand', 'parameterized.expand', (['asset_infos'], {}), '(asset_infos)\n', (1355, 1368), False, 'from nose_parameterized import parameterized\n'), ((1482, 1523), 'pandas.date_range', 'pd.date_range', (['"""2014-01-01"""', '"""2014-01-03"""'], {}), "('2014-01-01', '2014-01-03')\n", (1495, 1523), True, 'import pandas as pd\n'), ((1637, 1750), 'pandas.DataFrame', 'pd.DataFrame', (["{'sid': sids * 3, 'value': (0, 1, 2, 1, 2, 3, 2, 3, 4), 'asof_date': dates,\n 'timestamp': dates}"], {}), "({'sid': sids * 3, 'value': (0, 1, 2, 1, 2, 3, 2, 3, 4),\n 'asof_date': dates, 'timestamp': dates})\n", (1649, 1750), True, 'import pandas as pd\n'), ((1827, 2005), 'datashape.dshape', 'dshape', (['"""\n var * {\n sid: ?int64,\n value: ?float64,\n asof_date: datetime,\n timestamp: datetime\n }\n """'], {}), '(\n """\n var * {\n sid: ?int64,\n value: ?float64,\n asof_date: datetime,\n timestamp: datetime\n }\n """\n )\n', (1833, 2005), False, 'from datashape import dshape, var, Record\n'), ((2074, 2112), 'collections.OrderedDict', 'OrderedDict', (['cls.dshape.measure.fields'], {}), '(cls.dshape.measure.fields)\n', (2085, 2112), False, 'from collections import OrderedDict\n'), ((2219, 2232), 'zipline.pipeline.loaders.blaze.BlazeLoader', 'BlazeLoader', ([], {}), '()\n', (2230, 2232), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((2299, 2346), 'blaze.Data', 'bz.Data', (['self.df'], {'name': 'name', 'dshape': 'self.dshape'}), '(self.df, name=name, dshape=self.dshape)\n', (2306, 2346), True, 'import blaze as bz\n'), ((2360, 2429), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr'], {'loader': 'self.garbage_loader', 'no_deltas_rule': '"""ignore"""'}), "(expr, loader=self.garbage_loader, no_deltas_rule='ignore')\n", (2370, 2429), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((3262, 3313), 'blaze.Data', 'bz.Data', (['self.df'], {'name': 'exprname', 'dshape': 'self.dshape'}), '(self.df, name=exprname, dshape=self.dshape)\n', (3269, 3313), True, 'import blaze as bz\n'), ((3330, 3405), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr.value'], {'loader': 'self.garbage_loader', 'no_deltas_rule': '"""ignore"""'}), "(expr.value, loader=self.garbage_loader, no_deltas_rule='ignore')\n", (3340, 3405), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((4391, 4615), 'blaze.Data', 'bz.Data', (["self.df.loc[:, ['sid', 'value', 'timestamp']]"], {'name': '"""expr"""', 'dshape': '"""\n var * {\n sid: ?int64,\n value: float64,\n timestamp: datetime,\n }"""'}), '(self.df.loc[:, [\'sid\', \'value\', \'timestamp\']], name=\'expr\', dshape=\n """\n var * {\n sid: ?int64,\n value: float64,\n timestamp: datetime,\n }"""\n )\n', (4398, 4615), True, 'import blaze as bz\n'), ((5299, 5312), 'zipline.pipeline.loaders.blaze.BlazeLoader', 'BlazeLoader', ([], {}), '()\n', (5310, 5312), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((5326, 5360), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr.ds'], {'loader': 'loader'}), '(expr.ds, loader=loader)\n', (5336, 5360), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((6141, 6154), 'zipline.pipeline.loaders.blaze.BlazeLoader', 'BlazeLoader', ([], {}), '()\n', (6152, 6154), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((6170, 6206), 'blaze.Data', 'bz.Data', (['self.df'], {'dshape': 'self.dshape'}), '(self.df, dshape=self.dshape)\n', (6177, 6206), True, 'import blaze as bz\n'), ((6490, 6666), 'blaze.Data', 'bz.Data', (['[]'], {'dshape': '"""\n var * {\n a: datetime,\n asof_date: datetime,\n timestamp: datetime,\n }"""'}), '([], dshape=\n """\n var * {\n a: datetime,\n asof_date: datetime,\n timestamp: datetime,\n }"""\n )\n', (6497, 6666), True, 'import blaze as bz\n'), ((6705, 6774), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr'], {'loader': 'self.garbage_loader', 'no_deltas_rule': '"""ignore"""'}), "(expr, loader=self.garbage_loader, no_deltas_rule='ignore')\n", (6715, 6774), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((7219, 7393), 'blaze.Data', 'bz.Data', (['[]'], {'dshape': '"""\n var * {\n a: string,\n asof_date: datetime,\n timestamp: datetime,\n }"""'}), '([], dshape=\n """\n var * {\n a: string,\n asof_date: datetime,\n timestamp: datetime,\n }"""\n )\n', (7226, 7393), True, 'import blaze as bz\n'), ((7432, 7501), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr'], {'loader': 'self.garbage_loader', 'no_deltas_rule': '"""ignore"""'}), "(expr, loader=self.garbage_loader, no_deltas_rule='ignore')\n", (7442, 7501), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((7780, 7816), 'blaze.Data', 'bz.Data', (['self.df'], {'dshape': 'self.dshape'}), '(self.df, dshape=self.dshape)\n', (7787, 7816), True, 'import blaze as bz\n'), ((7875, 7915), 'blaze.transform', 'bz.transform', (['expr'], {'value': '(expr.value + 1)'}), '(expr, value=expr.value + 1)\n', (7887, 7915), True, 'import blaze as bz\n'), ((7992, 8058), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr_with_add'], {'deltas': 'None', 'loader': 'self.garbage_loader'}), '(expr_with_add, deltas=None, loader=self.garbage_loader)\n', (8002, 8058), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((8856, 8905), 'blaze.Data', 'bz.Data', (['self.df'], {'name': '"""expr"""', 'dshape': 'self.dshape'}), "(self.df, name='expr', dshape=self.dshape)\n", (8863, 8905), True, 'import blaze as bz\n'), ((8923, 8936), 'zipline.pipeline.loaders.blaze.BlazeLoader', 'BlazeLoader', ([], {}), '()\n', (8934, 8936), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((8950, 9006), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr'], {'loader': 'loader', 'no_deltas_rule': '"""ignore"""'}), "(expr, loader=loader, no_deltas_rule='ignore')\n", (8960, 9006), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((9066, 9076), 'zipline.pipeline.Pipeline', 'Pipeline', ([], {}), '()\n', (9074, 9076), False, 'from zipline.pipeline import Pipeline, CustomFactor\n'), ((9632, 9687), 'pandas.util.testing.assert_frame_equal', 'assert_frame_equal', (['result', 'expected'], {'check_dtype': '(False)'}), '(result, expected, check_dtype=False)\n', (9650, 9687), False, 'from pandas.util.testing import assert_frame_equal\n'), ((9741, 9802), 'blaze.Data', 'bz.Data', (['self.macro_df'], {'name': '"""expr"""', 'dshape': 'self.macro_dshape'}), "(self.macro_df, name='expr', dshape=self.macro_dshape)\n", (9748, 9802), True, 'import blaze as bz\n'), ((9820, 9833), 'zipline.pipeline.loaders.blaze.BlazeLoader', 'BlazeLoader', ([], {}), '()\n', (9831, 9833), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((9847, 9903), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr'], {'loader': 'loader', 'no_deltas_rule': '"""ignore"""'}), "(expr, loader=loader, no_deltas_rule='ignore')\n", (9857, 9903), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((9963, 9973), 'zipline.pipeline.Pipeline', 'Pipeline', ([], {}), '()\n', (9971, 9973), False, 'from zipline.pipeline import Pipeline, CustomFactor\n'), ((10648, 10703), 'pandas.util.testing.assert_frame_equal', 'assert_frame_equal', (['result', 'expected'], {'check_dtype': '(False)'}), '(result, expected, check_dtype=False)\n', (10666, 10703), False, 'from pandas.util.testing import assert_frame_equal\n'), ((11075, 11088), 'zipline.pipeline.loaders.blaze.BlazeLoader', 'BlazeLoader', ([], {}), '()\n', (11086, 11088), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((11102, 11165), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr', 'deltas'], {'loader': 'loader', 'no_deltas_rule': '"""raise"""'}), "(expr, deltas, loader=loader, no_deltas_rule='raise')\n", (11112, 11165), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((11237, 11247), 'zipline.pipeline.Pipeline', 'Pipeline', ([], {}), '()\n', (11245, 11247), False, 'from zipline.pipeline import Pipeline, CustomFactor\n'), ((11819, 11881), 'pandas.util.testing.assert_frame_equal', 'assert_frame_equal', (['result', 'expected_output'], {'check_dtype': '(False)'}), '(result, expected_output, check_dtype=False)\n', (11837, 11881), False, 'from pandas.util.testing import assert_frame_equal\n'), ((12004, 12053), 'blaze.Data', 'bz.Data', (['self.df'], {'name': '"""expr"""', 'dshape': 'self.dshape'}), "(self.df, name='expr', dshape=self.dshape)\n", (12011, 12053), True, 'import blaze as bz\n'), ((12071, 12122), 'blaze.Data', 'bz.Data', (['self.df'], {'name': '"""deltas"""', 'dshape': 'self.dshape'}), "(self.df, name='deltas', dshape=self.dshape)\n", (12078, 12122), True, 'import blaze as bz\n'), ((13830, 13891), 'blaze.Data', 'bz.Data', (['self.macro_df'], {'name': '"""expr"""', 'dshape': 'self.macro_dshape'}), "(self.macro_df, name='expr', dshape=self.macro_dshape)\n", (13837, 13891), True, 'import blaze as bz\n'), ((13909, 13982), 'blaze.Data', 'bz.Data', (['self.macro_df.iloc[:-1]'], {'name': '"""deltas"""', 'dshape': 'self.macro_dshape'}), "(self.macro_df.iloc[:-1], name='deltas', dshape=self.macro_dshape)\n", (13916, 13982), True, 'import blaze as bz\n'), ((15465, 15592), 'pandas.DataFrame', 'pd.DataFrame', (["{'sid': self.sids * 2, 'value': (0, 1, 2, 1, 2, 3), 'asof_date':\n repeated_dates, 'timestamp': repeated_dates}"], {}), "({'sid': self.sids * 2, 'value': (0, 1, 2, 1, 2, 3),\n 'asof_date': repeated_dates, 'timestamp': repeated_dates})\n", (15477, 15592), True, 'import pandas as pd\n'), ((15663, 15713), 'blaze.Data', 'bz.Data', (['baseline'], {'name': '"""expr"""', 'dshape': 'self.dshape'}), "(baseline, name='expr', dshape=self.dshape)\n", (15670, 15713), True, 'import blaze as bz\n'), ((15731, 15783), 'blaze.Data', 'bz.Data', (['baseline'], {'name': '"""deltas"""', 'dshape': 'self.dshape'}), "(baseline, name='deltas', dshape=self.dshape)\n", (15738, 15783), True, 'import blaze as bz\n'), ((17891, 17976), 'pandas.DataFrame', 'pd.DataFrame', (["{'value': (0, 1), 'asof_date': base_dates, 'timestamp': base_dates}"], {}), "({'value': (0, 1), 'asof_date': base_dates, 'timestamp':\n base_dates})\n", (17903, 17976), True, 'import pandas as pd\n'), ((18035, 18091), 'blaze.Data', 'bz.Data', (['baseline'], {'name': '"""expr"""', 'dshape': 'self.macro_dshape'}), "(baseline, name='expr', dshape=self.macro_dshape)\n", (18042, 18091), True, 'import blaze as bz\n'), ((18109, 18167), 'blaze.Data', 'bz.Data', (['baseline'], {'name': '"""deltas"""', 'dshape': 'self.macro_dshape'}), "(baseline, name='deltas', dshape=self.macro_dshape)\n", (18116, 18167), True, 'import blaze as bz\n'), ((1134, 1149), 'pandas.Timestamp', 'pd.Timestamp', (['(0)'], {}), '(0)\n', (1146, 1149), True, 'import pandas as pd\n'), ((1159, 1179), 'pandas.Timestamp', 'pd.Timestamp', (['"""2015"""'], {}), "('2015')\n", (1171, 1179), True, 'import pandas as pd\n'), ((1260, 1275), 'pandas.Timestamp', 'pd.Timestamp', (['(0)'], {}), '(0)\n', (1272, 1275), True, 'import pandas as pd\n'), ((1285, 1305), 'pandas.Timestamp', 'pd.Timestamp', (['"""2015"""'], {}), "('2015')\n", (1297, 1305), True, 'import pandas as pd\n'), ((2173, 2188), 'datashape.Record', 'Record', (['dshape_'], {}), '(dshape_)\n', (2179, 2188), False, 'from datashape import dshape, var, Record\n'), ((3033, 3102), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr'], {'loader': 'self.garbage_loader', 'no_deltas_rule': '"""ignore"""'}), "(expr, loader=self.garbage_loader, no_deltas_rule='ignore')\n", (3043, 3102), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((3662, 3737), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr.value'], {'loader': 'self.garbage_loader', 'no_deltas_rule': '"""ignore"""'}), "(expr.value, loader=self.garbage_loader, no_deltas_rule='ignore')\n", (3672, 3737), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((4112, 4181), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr'], {'loader': 'self.garbage_loader', 'no_deltas_rule': '"""ignore"""'}), "(expr, loader=self.garbage_loader, no_deltas_rule='ignore')\n", (4122, 4181), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((4714, 4783), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr'], {'loader': 'self.garbage_loader', 'no_deltas_rule': '"""ignore"""'}), "(expr, loader=self.garbage_loader, no_deltas_rule='ignore')\n", (4724, 4783), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((5617, 5653), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (5640, 5653), False, 'import warnings\n'), ((5673, 5704), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (5694, 5704), False, 'import warnings\n'), ((5726, 5739), 'zipline.pipeline.loaders.blaze.BlazeLoader', 'BlazeLoader', ([], {}), '()\n', (5737, 5739), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((5759, 5795), 'blaze.Data', 'bz.Data', (['self.df'], {'dshape': 'self.dshape'}), '(self.df, dshape=self.dshape)\n', (5766, 5795), True, 'import blaze as bz\n'), ((5808, 5862), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr'], {'loader': 'loader', 'no_deltas_rule': '"""warn"""'}), "(expr, loader=loader, no_deltas_rule='warn')\n", (5818, 5862), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((6268, 6323), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr'], {'loader': 'loader', 'no_deltas_rule': '"""raise"""'}), "(expr, loader=loader, no_deltas_rule='raise')\n", (6278, 6323), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((8162, 8229), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['(expr.value + 1)'], {'deltas': 'None', 'loader': 'self.garbage_loader'}), '(expr.value + 1, deltas=None, loader=self.garbage_loader)\n', (8172, 8229), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((8360, 8397), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'self.df.columns'}), '(columns=self.df.columns)\n', (8372, 8397), True, 'import pandas as pd\n'), ((8496, 8564), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr_with_add'], {'deltas': 'deltas', 'loader': 'self.garbage_loader'}), '(expr_with_add, deltas=deltas, loader=self.garbage_loader)\n', (8506, 8564), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((8684, 8753), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['(expr.value + 1)'], {'deltas': 'deltas', 'loader': 'self.garbage_loader'}), '(expr.value + 1, deltas=deltas, loader=self.garbage_loader)\n', (8694, 8753), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((9158, 9176), 'zipline.utils.test_utils.tmp_asset_finder', 'tmp_asset_finder', ([], {}), '()\n', (9174, 9176), False, 'from zipline.utils.test_utils import tmp_asset_finder, make_simple_asset_info\n'), ((10094, 10122), 'zipline.utils.test_utils.tmp_asset_finder', 'tmp_asset_finder', (['asset_info'], {}), '(asset_info)\n', (10110, 10122), False, 'from zipline.utils.test_utils import tmp_asset_finder, make_simple_asset_info\n'), ((12766, 12832), 'toolz.valmap', 'valmap', (['(lambda view: np.c_[view, [np.nan, np.nan]])', 'expected_views'], {}), '(lambda view: np.c_[view, [np.nan, np.nan]], expected_views)\n', (12772, 12832), False, 'from toolz import keymap, valmap, concatv\n'), ((12894, 12922), 'zipline.utils.test_utils.tmp_asset_finder', 'tmp_asset_finder', (['asset_info'], {}), '(asset_info)\n', (12910, 12922), False, 'from zipline.utils.test_utils import tmp_asset_finder, make_simple_asset_info\n'), ((14448, 14476), 'zipline.utils.test_utils.tmp_asset_finder', 'tmp_asset_finder', (['asset_info'], {}), '(asset_info)\n', (14464, 14476), False, 'from zipline.utils.test_utils import tmp_asset_finder, make_simple_asset_info\n'), ((16403, 16477), 'toolz.valmap', 'valmap', (['(lambda view: np.c_[view, [np.nan, np.nan, np.nan]])', 'expected_views'], {}), '(lambda view: np.c_[view, [np.nan, np.nan, np.nan]], expected_views)\n', (16409, 16477), False, 'from toolz import keymap, valmap, concatv\n'), ((16959, 16987), 'zipline.utils.test_utils.tmp_asset_finder', 'tmp_asset_finder', (['asset_info'], {}), '(asset_info)\n', (16975, 16987), False, 'from zipline.utils.test_utils import tmp_asset_finder, make_simple_asset_info\n'), ((18959, 18987), 'zipline.utils.test_utils.tmp_asset_finder', 'tmp_asset_finder', (['asset_info'], {}), '(asset_info)\n', (18975, 18987), False, 'from zipline.utils.test_utils import tmp_asset_finder, make_simple_asset_info\n'), ((3866, 3935), 'zipline.pipeline.loaders.blaze.from_blaze', 'from_blaze', (['expr'], {'loader': 'self.garbage_loader', 'no_deltas_rule': '"""ignore"""'}), "(expr, loader=self.garbage_loader, no_deltas_rule='ignore')\n", (3876, 3935), False, 'from zipline.pipeline.loaders.blaze import from_blaze, BlazeLoader, NoDeltasWarning, NonNumpyField, NonPipelineField\n'), ((5085, 5122), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'self.df.columns'}), '(columns=self.df.columns)\n', (5097, 5122), True, 'import pandas as pd\n'), ((10384, 10436), 'toolz.concatv', 'concatv', (['([0] * nassets)', '([1] * nassets)', '([2] * nassets)'], {}), '([0] * nassets, [1] * nassets, [2] * nassets)\n', (10391, 10436), False, 'from toolz import keymap, valmap, concatv\n'), ((11535, 11589), 'numpy.testing.utils.assert_array_almost_equal', 'assert_array_almost_equal', (['data', 'expected_views[today]'], {}), '(data, expected_views[today])\n', (11560, 11589), False, 'from numpy.testing.utils import assert_array_almost_equal\n'), ((11688, 11734), 'zipline.pipeline.engine.SimplePipelineEngine', 'SimplePipelineEngine', (['loader', 'calendar', 'finder'], {}), '(loader, calendar, finder)\n', (11708, 11734), False, 'from zipline.pipeline.engine import SimplePipelineEngine\n'), ((12356, 12403), 'numpy.array', 'np.array', (['[[10.0, 11.0, 12.0], [1.0, 2.0, 3.0]]'], {}), '([[10.0, 11.0, 12.0], [1.0, 2.0, 3.0]])\n', (12364, 12403), True, 'import numpy as np\n'), ((12467, 12514), 'numpy.array', 'np.array', (['[[11.0, 12.0, 13.0], [2.0, 3.0, 4.0]]'], {}), '([[11.0, 12.0, 13.0], [2.0, 3.0, 4.0]])\n', (12475, 12514), True, 'import numpy as np\n'), ((12578, 12628), 'numpy.array', 'np.array', (['[[12.0, 13.0, 14.0], [12.0, 13.0, 14.0]]'], {}), '([[12.0, 13.0, 14.0], [12.0, 13.0, 14.0]])\n', (12586, 12628), True, 'import numpy as np\n'), ((15322, 15348), 'pandas.Timestamp', 'pd.Timestamp', (['"""2014-01-01"""'], {}), "('2014-01-01')\n", (15334, 15348), True, 'import pandas as pd\n'), ((15362, 15388), 'pandas.Timestamp', 'pd.Timestamp', (['"""2014-01-04"""'], {}), "('2014-01-04')\n", (15374, 15388), True, 'import pandas as pd\n'), ((16016, 16086), 'numpy.array', 'np.array', (['[[10.0, 11.0, 12.0], [10.0, 11.0, 12.0], [10.0, 11.0, 12.0]]'], {}), '([[10.0, 11.0, 12.0], [10.0, 11.0, 12.0], [10.0, 11.0, 12.0]])\n', (16024, 16086), True, 'import numpy as np\n'), ((16186, 16256), 'numpy.array', 'np.array', (['[[10.0, 11.0, 12.0], [10.0, 11.0, 12.0], [11.0, 12.0, 13.0]]'], {}), '([[10.0, 11.0, 12.0], [10.0, 11.0, 12.0], [11.0, 12.0, 13.0]])\n', (16194, 16256), True, 'import numpy as np\n'), ((16725, 16751), 'pandas.Timestamp', 'pd.Timestamp', (['"""2014-01-01"""'], {}), "('2014-01-01')\n", (16737, 16751), True, 'import pandas as pd\n'), ((16765, 16791), 'pandas.Timestamp', 'pd.Timestamp', (['"""2014-01-02"""'], {}), "('2014-01-02')\n", (16777, 16791), True, 'import pandas as pd\n'), ((16805, 16831), 'pandas.Timestamp', 'pd.Timestamp', (['"""2014-01-03"""'], {}), "('2014-01-03')\n", (16817, 16831), True, 'import pandas as pd\n'), ((16906, 16932), 'pandas.Timestamp', 'pd.Timestamp', (['"""2014-01-06"""'], {}), "('2014-01-06')\n", (16918, 16932), True, 'import pandas as pd\n'), ((17794, 17820), 'pandas.Timestamp', 'pd.Timestamp', (['"""2014-01-01"""'], {}), "('2014-01-01')\n", (17806, 17820), True, 'import pandas as pd\n'), ((17834, 17860), 'pandas.Timestamp', 'pd.Timestamp', (['"""2014-01-04"""'], {}), "('2014-01-04')\n", (17846, 17860), True, 'import pandas as pd\n'), ((18726, 18752), 'pandas.Timestamp', 'pd.Timestamp', (['"""2014-01-01"""'], {}), "('2014-01-01')\n", (18738, 18752), True, 'import pandas as pd\n'), ((18766, 18792), 'pandas.Timestamp', 'pd.Timestamp', (['"""2014-01-02"""'], {}), "('2014-01-02')\n", (18778, 18792), True, 'import pandas as pd\n'), ((18806, 18832), 'pandas.Timestamp', 'pd.Timestamp', (['"""2014-01-03"""'], {}), "('2014-01-03')\n", (18818, 18832), True, 'import pandas as pd\n'), ((18907, 18933), 'pandas.Timestamp', 'pd.Timestamp', (['"""2014-01-06"""'], {}), "('2014-01-06')\n", (18919, 18933), True, 'import pandas as pd\n'), ((5150, 5223), 'datashape.Record', 'Record', (["(('ds', self.dshape.measure), ('ds_deltas', self.dshape.measure))"], {}), "((('ds', self.dshape.measure), ('ds_deltas', self.dshape.measure)))\n", (5156, 5223), False, 'from datashape import dshape, var, Record\n'), ((9209, 9252), 'zipline.pipeline.engine.SimplePipelineEngine', 'SimplePipelineEngine', (['loader', 'dates', 'finder'], {}), '(loader, dates, finder)\n', (9229, 9252), False, 'from zipline.pipeline.engine import SimplePipelineEngine\n'), ((10155, 10198), 'zipline.pipeline.engine.SimplePipelineEngine', 'SimplePipelineEngine', (['loader', 'dates', 'finder'], {}), '(loader, dates, finder)\n', (10175, 10198), False, 'from zipline.pipeline.engine import SimplePipelineEngine\n'), ((12252, 12269), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (12261, 12269), False, 'from datetime import timedelta\n'), ((12999, 13054), 'toolz.concatv', 'concatv', (['([12] * nassets)', '([13] * nassets)', '([14] * nassets)'], {}), '([12] * nassets, [13] * nassets, [14] * nassets)\n', (13006, 13054), False, 'from toolz import keymap, valmap, concatv\n'), ((13376, 13393), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (13385, 13393), False, 'from datetime import timedelta\n'), ((14159, 14176), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (14168, 14176), False, 'from datetime import timedelta\n'), ((14314, 14335), 'numpy.array', 'np.array', (['[10.0, 1.0]'], {}), '([10.0, 1.0])\n', (14322, 14335), True, 'import numpy as np\n'), ((14390, 14411), 'numpy.array', 'np.array', (['[11.0, 2.0]'], {}), '([11.0, 2.0])\n', (14398, 14411), True, 'import numpy as np\n'), ((14553, 14592), 'toolz.concatv', 'concatv', (['([10] * nassets)', '([11] * nassets)'], {}), '([10] * nassets, [11] * nassets)\n', (14560, 14592), False, 'from toolz import keymap, valmap, concatv\n'), ((15913, 15930), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (15922, 15930), False, 'from datetime import timedelta\n'), ((17630, 17647), 'toolz.curried.operator.itemgetter', 'op.itemgetter', (['(-1)'], {}), '(-1)\n', (17643, 17647), True, 'from toolz.curried import operator as op\n'), ((18297, 18314), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (18306, 18314), False, 'from datetime import timedelta\n'), ((18469, 18497), 'numpy.array', 'np.array', (['[10.0, 10.0, 10.0]'], {}), '([10.0, 10.0, 10.0])\n', (18477, 18497), True, 'import numpy as np\n'), ((18599, 18627), 'numpy.array', 'np.array', (['[10.0, 10.0, 11.0]'], {}), '([10.0, 10.0, 11.0])\n', (18607, 18627), True, 'import numpy as np\n'), ((19064, 19103), 'toolz.concatv', 'concatv', (['([10] * nassets)', '([11] * nassets)'], {}), '([10] * nassets, [11] * nassets)\n', (19071, 19103), False, 'from toolz import keymap, valmap, concatv\n'), ((19653, 19670), 'toolz.curried.operator.itemgetter', 'op.itemgetter', (['(-1)'], {}), '(-1)\n', (19666, 19670), True, 'from toolz.curried import operator as op\n')]
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import torch
from .stage0 import Stage0
from .stage1 import Stage1
from .stage2 import Stage2
from .stage3 import Stage3
class GNMTSplit(torch.nn.Module):
def __init__(self):
super(GNMTSplit, self).__init__()
self.stage0 = Stage0()
self.stage1 = Stage1()
self.stage2 = Stage2()
self.stage3 = Stage3()
def forward(self, input0, input1, input2):
(out0, out2, out1, out3) = self.stage0(input0, input1, input2)
(out12, out13, out4, out5, out6) = self.stage1(out0, out2, out1, out3)
(out14, out15, out16, out17) = self.stage2(out12, out13, out4, out5, out6)
out18 = self.stage3(out12, out14, out15, out16, out17)
return out18
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, torch.nn.Conv2d):
torch.nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
torch.nn.init.constant_(m.bias, 0)
elif isinstance(m, torch.nn.BatchNorm2d):
torch.nn.init.constant_(m.weight, 1)
torch.nn.init.constant_(m.bias, 0)
elif isinstance(m, torch.nn.Linear):
torch.nn.init.normal_(m.weight, 0, 0.01)
torch.nn.init.constant_(m.bias, 0)
|
[
"torch.nn.init.constant_",
"torch.nn.init.kaiming_normal_",
"torch.nn.init.normal_"
] |
[((917, 993), 'torch.nn.init.kaiming_normal_', 'torch.nn.init.kaiming_normal_', (['m.weight'], {'mode': '"""fan_out"""', 'nonlinearity': '"""relu"""'}), "(m.weight, mode='fan_out', nonlinearity='relu')\n", (946, 993), False, 'import torch\n'), ((1053, 1087), 'torch.nn.init.constant_', 'torch.nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (1076, 1087), False, 'import torch\n'), ((1158, 1194), 'torch.nn.init.constant_', 'torch.nn.init.constant_', (['m.weight', '(1)'], {}), '(m.weight, 1)\n', (1181, 1194), False, 'import torch\n'), ((1211, 1245), 'torch.nn.init.constant_', 'torch.nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (1234, 1245), False, 'import torch\n'), ((1311, 1351), 'torch.nn.init.normal_', 'torch.nn.init.normal_', (['m.weight', '(0)', '(0.01)'], {}), '(m.weight, 0, 0.01)\n', (1332, 1351), False, 'import torch\n'), ((1368, 1402), 'torch.nn.init.constant_', 'torch.nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (1391, 1402), False, 'import torch\n')]
|
import cv2
import numpy as np
from pyzbar.pyzbar import decode
def decoder(image):
gray_img = cv2.cvtColor(image, 0)
barcode = decode(gray_img)
for obj in barcode:
points = obj.polygon
(x, y, w, h) = obj.rect
pts = np.array(points, np.int32)
pts = pts.reshape((-1, 1, 2))
cv2.polylines(image, [pts], True, (0, 255, 0), 3)
barcodeData = obj.data.decode("utf-8")
barcodeType = obj.type
string = "Data " + str(barcodeData) + " | Type " + str(barcodeType)
cv2.putText(frame, string, (x, y), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255, 0, 0), 2)
print("Barcode: " + barcodeData + " | Type: " + barcodeType)
cap = cv2.VideoCapture(0)
while True:
ret, frame = cap.read()
decoder(frame)
cv2.imshow('Image', frame)
code = cv2.waitKey(10)
if code == ord('q'):
break
|
[
"cv2.putText",
"cv2.polylines",
"cv2.cvtColor",
"pyzbar.pyzbar.decode",
"cv2.waitKey",
"cv2.VideoCapture",
"numpy.array",
"cv2.imshow"
] |
[((700, 719), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (716, 719), False, 'import cv2\n'), ((100, 122), 'cv2.cvtColor', 'cv2.cvtColor', (['image', '(0)'], {}), '(image, 0)\n', (112, 122), False, 'import cv2\n'), ((137, 153), 'pyzbar.pyzbar.decode', 'decode', (['gray_img'], {}), '(gray_img)\n', (143, 153), False, 'from pyzbar.pyzbar import decode\n'), ((783, 809), 'cv2.imshow', 'cv2.imshow', (['"""Image"""', 'frame'], {}), "('Image', frame)\n", (793, 809), False, 'import cv2\n'), ((821, 836), 'cv2.waitKey', 'cv2.waitKey', (['(10)'], {}), '(10)\n', (832, 836), False, 'import cv2\n'), ((254, 280), 'numpy.array', 'np.array', (['points', 'np.int32'], {}), '(points, np.int32)\n', (262, 280), True, 'import numpy as np\n'), ((327, 376), 'cv2.polylines', 'cv2.polylines', (['image', '[pts]', '(True)', '(0, 255, 0)', '(3)'], {}), '(image, [pts], True, (0, 255, 0), 3)\n', (340, 376), False, 'import cv2\n'), ((541, 627), 'cv2.putText', 'cv2.putText', (['frame', 'string', '(x, y)', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.8)', '(255, 0, 0)', '(2)'], {}), '(frame, string, (x, y), cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255, 0, \n 0), 2)\n', (552, 627), False, 'import cv2\n')]
|
#!/usr/bin/python3
# Copyright 2018 <NAME>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import time
import subprocess
from datetime import datetime
from random import randint
import socket
from urllib.parse import urlparse
from grinlib import lib
from grinlib import grin
from grinbase.model.pool_utxo import Pool_utxo
from grinbase.model.pool_payment import Pool_payment
PROCESS = "makePayouts"
LOGGER = None
CONFIG = None
# pool_utxo <--- these are our user records. A record of each pending payout (one per unique miner payout address)
# makePayouts.py gets the list of pool_utxo records with value greater than threshold and attepmts to make a payment.
# * Future: Do multiple payouts in a single grin wallet tx
# * updates pool_utxo with new total, timestamp of last payout, number of failed payout attempts
# XXX TODO: Add maximum payout value to reduce the pools risk
def makePayout(address, amount):
global LOGGER
global CONFIG
LOGGER.warn("Making Payout of: {} to: {}".format(address, amount))
# Validate the address does not contain dangerous shell characters
valid = validateWalletAddress(address)
if valid == False:
LOGGER.warn("Wallet address is invalid: {}".format(address))
return 1 # failure status
# Test a low-timeout connection before involving the wallet
probe = testWalletPort(address)
if probe == False:
LOGGER.warn("Test Connection Failed: {} {}".format(address, amount))
return 1 # failure status
# Make the payout
LOGGER.warn("Test Connection Ok: {} {}".format(address, amount))
grin_api_url = grin.get_api_url()
os.chdir(CONFIG[PROCESS]["wallet_dir"])
send_cmd = [
"/usr/local/bin/grin",
"wallet",
"--api_server_address", grin_api_url,
"send",
"--selection", "smallest",
"--dest", str(address),
str(amount)
]
LOGGER.warn("Command: {}".format(send_cmd))
try:
output = subprocess.check_output(send_cmd, stderr=subprocess.STDOUT, shell=False)
LOGGER.warn("Sent OK: {}".format(output))
return 0
except subprocess.CalledProcessError as exc:
LOGGER.error("Send failed with rc {} and output {}".format(exc.returncode, exc.output))
return 1 # exc.returncode
except Exception as e:
LOGGER.error("Send failed with error {}".format(str(e)))
return 1
# Only supporting http url for wallet address for now
def validateWalletAddress(address):
global LOGGER
try:
LOGGER.warn("Validating wallet address: {}".format(address))
return urlparse(address).scheme == 'http'
except Exception as e:
LOGGER.error("Wallet address is invalid: {}".format(str(e)))
return False
def testWalletPort(address):
global LOGGER
try:
s = socket.socket()
s.settimeout(2)
netloc = urlparse(address).netloc
addr = netloc.split(':')
LOGGER.warn("Testing: {}, {}".format(addr[0], addr[1]))
s.connect((addr[0], int(addr[1])))
s.close()
except Exception as e:
LOGGER.error("Failed test connection: {}".format(str(e)))
return False
return True
def main():
global LOGGER
global CONFIG
CONFIG = lib.get_config()
LOGGER = lib.get_logger(PROCESS)
LOGGER.warn("=== Starting {}".format(PROCESS))
# Connect to DB
try:
database = lib.get_db()
except Exception as e:
LOGGER.error("Failed to connect to the db: {}".format(e))
wallet_dir = CONFIG[PROCESS]["wallet_dir"]
minimum_payout = int(CONFIG[PROCESS]["minimum_payout"])
os.chdir(wallet_dir)
utxos = Pool_utxo.getPayable(minimum_payout)
database.db.getSession().commit()
# XXX TODO: Use the current balance, timestamp, the last_attempt timestamp, last_payout, and failed_attempts
# XXX TODO: to filter and sort by order we want to make payment attempts
for utxo in utxos:
try:
# Try less often for wallets that dont answer
if utxo.amount < utxo.failure_count:
if randint(0, 11) != 0:
continue
LOGGER.warn("Trying to pay: {} {} {}".format(utxo.id, utxo.address, utxo.amount))
# Lock just this current record for update
locked_utxo = Pool_utxo.get_locked_by_id(utxo.id)
# Save and Zero the balance
original_balance = locked_utxo.amount
locked_utxo.amount = 0
# Savepoint changes - if we crash after sending coins but before commit we roll back to here.
# The pool audit service (coming soon) finds lost payouts and restores user balance
database.db.getSession().begin_nested();
# Attempt to make the payment
timestamp = datetime.utcnow()
status = makePayout(locked_utxo.address, original_balance)
LOGGER.warn("Payout status: {}".format(status))
if status == 0:
LOGGER.warn("Made payout for {} {} {} at {}".format(locked_utxo.id, locked_utxo.address, original_balance, timestamp))
# Create a payment record
payment_record = Pool_payment(locked_utxo.id, timestamp, locked_utxo.address, original_balance, 0, locked_utxo.failure_count, "schedule" )
database.db.getSession().add(payment_record)
# Update timestamp of last payout, number of failed payout attempts
locked_utxo.amount = 0
locked_utxo.failure_count = 0
locked_utxo.last_try = timestamp
locked_utxo.last_success = timestamp
locked_utxo.total_amount += original_balance
# Commit changes
database.db.getSession().commit()
else:
LOGGER.error("Failed to make payout: {} {} {}".format(locked_utxo.id, locked_utxo.address, original_balance))
# Restore the users balance
locked_utxo.amount = original_balance
# Update number of failed payout attempts
if locked_utxo.failure_count is None:
locked_utxo.failure_count = 0
locked_utxo.failure_count += 1
locked_utxo.last_try = timestamp
# Commit changes
database.db.getSession().commit()
database.db.getSession().commit()
except Exception as e:
LOGGER.error("Failed to process utxo: {} because {}".format(utxo.id, str(e)))
database.db.getSession().rollback()
sys.exit(1)
LOGGER.warn("=== Completed {}".format(PROCESS))
if __name__ == "__main__":
main()
|
[
"grinlib.lib.get_db",
"random.randint",
"grinbase.model.pool_utxo.Pool_utxo.get_locked_by_id",
"grinbase.model.pool_utxo.Pool_utxo.getPayable",
"grinlib.lib.get_logger",
"subprocess.check_output",
"socket.socket",
"datetime.datetime.utcnow",
"grinlib.grin.get_api_url",
"grinlib.lib.get_config",
"sys.exit",
"grinbase.model.pool_payment.Pool_payment",
"os.chdir",
"urllib.parse.urlparse"
] |
[((2138, 2156), 'grinlib.grin.get_api_url', 'grin.get_api_url', ([], {}), '()\n', (2154, 2156), False, 'from grinlib import grin\n'), ((2161, 2200), 'os.chdir', 'os.chdir', (["CONFIG[PROCESS]['wallet_dir']"], {}), "(CONFIG[PROCESS]['wallet_dir'])\n", (2169, 2200), False, 'import os\n'), ((3810, 3826), 'grinlib.lib.get_config', 'lib.get_config', ([], {}), '()\n', (3824, 3826), False, 'from grinlib import lib\n'), ((3840, 3863), 'grinlib.lib.get_logger', 'lib.get_logger', (['PROCESS'], {}), '(PROCESS)\n', (3854, 3863), False, 'from grinlib import lib\n'), ((4182, 4202), 'os.chdir', 'os.chdir', (['wallet_dir'], {}), '(wallet_dir)\n', (4190, 4202), False, 'import os\n'), ((4215, 4251), 'grinbase.model.pool_utxo.Pool_utxo.getPayable', 'Pool_utxo.getPayable', (['minimum_payout'], {}), '(minimum_payout)\n', (4235, 4251), False, 'from grinbase.model.pool_utxo import Pool_utxo\n'), ((2516, 2588), 'subprocess.check_output', 'subprocess.check_output', (['send_cmd'], {'stderr': 'subprocess.STDOUT', 'shell': '(False)'}), '(send_cmd, stderr=subprocess.STDOUT, shell=False)\n', (2539, 2588), False, 'import subprocess\n'), ((3364, 3379), 'socket.socket', 'socket.socket', ([], {}), '()\n', (3377, 3379), False, 'import socket\n'), ((3964, 3976), 'grinlib.lib.get_db', 'lib.get_db', ([], {}), '()\n', (3974, 3976), False, 'from grinlib import lib\n'), ((3421, 3438), 'urllib.parse.urlparse', 'urlparse', (['address'], {}), '(address)\n', (3429, 3438), False, 'from urllib.parse import urlparse\n'), ((4867, 4902), 'grinbase.model.pool_utxo.Pool_utxo.get_locked_by_id', 'Pool_utxo.get_locked_by_id', (['utxo.id'], {}), '(utxo.id)\n', (4893, 4902), False, 'from grinbase.model.pool_utxo import Pool_utxo\n'), ((5351, 5368), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (5366, 5368), False, 'from datetime import datetime\n'), ((3147, 3164), 'urllib.parse.urlparse', 'urlparse', (['address'], {}), '(address)\n', (3155, 3164), False, 'from urllib.parse import urlparse\n'), ((5739, 5863), 'grinbase.model.pool_payment.Pool_payment', 'Pool_payment', (['locked_utxo.id', 'timestamp', 'locked_utxo.address', 'original_balance', '(0)', 'locked_utxo.failure_count', '"""schedule"""'], {}), "(locked_utxo.id, timestamp, locked_utxo.address,\n original_balance, 0, locked_utxo.failure_count, 'schedule')\n", (5751, 5863), False, 'from grinbase.model.pool_payment import Pool_payment\n'), ((7150, 7161), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7158, 7161), False, 'import sys\n'), ((4642, 4656), 'random.randint', 'randint', (['(0)', '(11)'], {}), '(0, 11)\n', (4649, 4656), False, 'from random import randint\n')]
|
import requests
import magic
def get_content_type_ext (content_type, req=None):
content_type = content_type.lower()
if content_type.startswith('image/jpeg') or content_type.startswith('image/jpg'):
return '.jpg'
elif content_type.startswith('image/png'):
return '.png'
elif content_type.startswith('image/gif'):
return '.gif'
elif content_type.startswith('image/webp'):
return '.webp'
elif content_type.startswith('image/svg'):
return '.svg'
elif content_type.startswith('image/bmp'):
return '.bmp'
elif req:
return get_content_type_ext(magic.from_buffer(req.content, mime=True))
elif content_type.startswith('text/html'):
return '.html'
elif content_type.startswith('application/pdf'):
return '.pdf'
else:
return ''
|
[
"magic.from_buffer"
] |
[((626, 667), 'magic.from_buffer', 'magic.from_buffer', (['req.content'], {'mime': '(True)'}), '(req.content, mime=True)\n', (643, 667), False, 'import magic\n')]
|
#!/usr/bin/env python3
from os.path import dirname, realpath, split,\
join, isdir, exists
from os import remove, system, mkdir
from logging import getLogger, basicConfig,\
DEBUG, INFO, ERROR
from argparse import ArgumentParser
from atexit import register
from shutil import rmtree
from jinja2 import Environment, FileSystemLoader
from docker.errors import NotFound, APIError
from docker import from_env
from scapy.contrib.gtp import *
from scapy.all import *
verbose_levels = {
'error': ERROR,
'debug': DEBUG,
'info': INFO}
class ContainerStartupError(Exception):
pass
class Container(object):
tmp = "/tmp"
cmd = "vppctl -s 0:5002"
cmd_bash = "/bin/bash"
def __init__(self, ref, name):
self._name = name
self._ref = ref
@property
def name(self):
return self._name
@property
def temp(self):
return join(self.tmp, self.name)
@property
def pg_input_file(self):
return join(self.temp, "pgi.pcap")
@property
def pg_output_file(self):
return join(self.temp, "pgo.pcap")
@property
def pg_input_file_in(self):
return join("/mnt", "pgi.pcap")
@property
def pg_output_file_in(self):
return join("/mnt", "pgo.pcap")
def disconnect_all(self):
status = False
for net in self._ref.client.networks.list():
try:
net.disconnect(self._ref)
except APIError:
continue
status = True
return status
@classmethod
def new(cls, client, image, name):
temp = join(cls.tmp, name)
if isdir(temp):
rmtree(temp)
mkdir(temp)
ref = client.containers.run(
detach=True,
remove=True,
auto_remove=True,
image=image,
name=name,
privileged=True,
volumes={
temp: {
'bind': '/mnt',
'mode': 'rw'}})
obj = cls.get(client, name)
if not obj:
raise ContainerStartupError()
obj.disconnect_all()
return obj
@classmethod
def get(cls, client, name):
try:
ref = client.containers.get(name)
except NotFound:
pass
else:
return cls(ref, name)
def rem(self):
self._ref.kill()
def vppctl(self):
system("docker exec -it {} {}".format(self.name, self.cmd))
def bash(self):
system("docker exec -it {} {}".format(self.name, self.cmd_bash))
def vppctl_exec(self, cmd):
ec, resp = self._ref.exec_run(cmd="{} {}".format(self.cmd, cmd))
assert(ec == 0)
return resp
def setup_host_interface(self, name, ip):
self.vppctl_exec("create host-interface name {}".format(name))
self.vppctl_exec("set int ip addr host-{} {}".format(name, ip))
self.vppctl_exec("set int state host-{} up".format(name))
def pg_create_interface(self, local_ip, remote_ip, local_mac, remote_mac):
# remote_ip can't have subnet mask
time.sleep(2)
self.vppctl_exec("create packet-generator interface pg0")
self.vppctl_exec("set int mac address pg0 {}".format(local_mac))
self.vppctl_exec("set int ip addr pg0 {}".format(local_ip))
self.vppctl_exec(
"set ip neighbor pg0 {} {}".format(remote_ip, remote_mac))
self.vppctl_exec("set int state pg0 up")
def pg_create_interface4(self, local_ip, remote_ip, local_mac, remote_mac):
# remote_ip can't have subnet mask
time.sleep(2)
self.vppctl_exec("create packet-generator interface pg0")
self.vppctl_exec("set int mac address pg0 {}".format(local_mac))
self.vppctl_exec("set int ip addr pg0 {}".format(local_ip))
self.vppctl_exec("set ip neighbor pg0 {} {}".format(remote_ip, remote_mac))
self.vppctl_exec("set int state pg0 up")
def pg_create_interface6(self, local_ip, remote_ip, local_mac, remote_mac):
# remote_ip can't have subnet mask
time.sleep(2)
self.vppctl_exec("create packet-generator interface pg0")
self.vppctl_exec("set int mac address pg0 {}".format(local_mac))
self.vppctl_exec("set int ip addr pg0 {}".format(local_ip))
self.vppctl_exec("set ip neighbor pg0 {} {}".format(remote_ip, remote_mac))
self.vppctl_exec("set int state pg0 up")
def pg_create_interface4_name(self, ifname, local_ip, remote_ip, local_mac, remote_mac):
# remote_ip can't have subnet mask
time.sleep(2)
self.vppctl_exec("create packet-generator interface {}".format(ifname))
self.vppctl_exec("set int mac address {} {}".format(ifname, local_mac))
self.vppctl_exec("set int ip addr {} {}".format(ifname, local_ip))
self.vppctl_exec("set ip neighbor {} {} {}".format(ifname, remote_ip, remote_mac))
self.vppctl_exec("set int state {} up".format(ifname))
def pg_create_interface6_name(self, ifname, local_ip, remote_ip, local_mac, remote_mac):
# remote_ip can't have subnet mask
time.sleep(2)
self.vppctl_exec("create packet-generator interface {}".format(ifname))
self.vppctl_exec("set int mac address {} {}".format(ifname, local_mac))
self.vppctl_exec("set int ip addr {} {}".format(ifname, local_ip))
self.vppctl_exec("set ip neighbor {} {} {}".format(ifname, remote_ip, remote_mac))
self.vppctl_exec("set int state {} up".format(ifname))
def pg_enable(self):
# start packet generator
self.vppctl_exec("packet-generator enable")
def pg_create_stream(self, stream):
wrpcap(self.pg_input_file, stream)
self.vppctl_exec(
"packet-generator new name pg-stream "
"node ethernet-input pcap {}".format(
self.pg_input_file_in))
def pg_start_capture(self):
if exists(self.pg_output_file):
remove(self.pg_output_file)
self.vppctl_exec(
"packet-generator capture pg0 pcap {}".format(
self.pg_output_file_in))
def pg_start_capture_name(self, ifname):
if exists(self.pg_output_file):
remove(self.pg_output_file)
self.vppctl_exec(
"packet-generator capture {} pcap {}".format(
ifname, self.pg_output_file_in))
def pg_read_packets(self):
return rdpcap(self.pg_output_file)
def set_ipv6_route(self, out_if_name, next_hop_ip, subnet):
self.vppctl_exec(
"ip route add {} via host-{} {}".format(
subnet, out_if_name, next_hop_ip))
def set_ipv6_route2(self, out_if_name, next_hop_ip, subnet):
self.vppctl_exec(
"ip route add {} via {} {}".format(
subnet, out_if_name, next_hop_ip))
def set_ip_pgroute(self, out_if_name, next_hop_ip, subnet):
self.vppctl_exec("ip route add {} via {} {}".format(
subnet, out_if_name, next_hop_ip))
def set_ipv6_pgroute(self, out_if_name, next_hop_ip, subnet):
self.vppctl_exec("ip route add {} via {} {}".format(
subnet, out_if_name, next_hop_ip))
def set_ipv6_default_route(self, out_if_name, next_hop_ip):
self.vppctl_exec(
"ip route add ::/0 via host-{} {}".format(
out_if_name, next_hop_ip))
def enable_trace(self, count):
self.vppctl_exec("trace add af-packet-input {}".format(count))
class Containers(object):
def __init__(self, client, image):
self.client = client
self.image = image
def tmp_render(self, path, template, kwargs):
with open(path, "w") as fo:
fo.write(template.render(**kwargs))
register(lambda: remove(path))
def build(self, path, vpp_path):
env = Environment(loader=FileSystemLoader(path),
trim_blocks=True)
self.tmp_render(join(vpp_path, "Dockerfile"),
env.get_template("Dockerfile.j2"),
{'vpp_path': vpp_path})
self.tmp_render(join(vpp_path, "startup.conf"),
env.get_template("startup.conf.j2"),
{'vpp_path': vpp_path})
ref, _ = self.client.images.build(path=vpp_path,
tag=self.image, rm=True)
return ref
def release(self, path, vpp_path):
env = Environment(loader=FileSystemLoader(path),
trim_blocks=True)
self.tmp_render(join(vpp_path, "Dockerfile"),
env.get_template("Dockerfile.j2.release"),
{'vpp_path': vpp_path})
self.tmp_render(join(vpp_path, "startup.conf"),
env.get_template("startup.conf.j2"),
{'vpp_path': vpp_path})
ref, _ = self.client.images.build(path=vpp_path,
tag="srv6m-release-image", rm=True)
return ref
def new(self, name):
return Container.new(self.client, self.image, name)
def get(self, name):
return Container.get(self.client, name)
def vppctl(self, name, command=None):
container = self.get(name)
if not command:
container.vppctl()
else:
print(container.vppctl_exec(command).decode())
def bash(self, name):
container = self.get(name)
container.bash()
class Network(object):
def __init__(self, ref, name):
self._name = name
self._ref = ref
@property
def name(self):
return self._name
@classmethod
def new(cls, client, name):
ref = client.networks.create(name, driver="bridge",
check_duplicate=True)
return cls(ref, name)
@classmethod
def get(cls, client, name):
try:
ref = client.networks.get(name)
except NotFound:
pass
else:
return cls(ref, name)
def rem(self):
self._ref.remove()
def connect(self, c):
self._ref.connect(c.name)
class Networks(object):
def __init__(self, client):
self.client = client
def new(self, name):
return Network.new(self.client, name)
def get(self, name):
return Network.get(self.client, name)
class Program(object):
image = "srv6m-image"
name_prefix = "hck"
# TODO: add description to these instances
# for exmaple what the vpp is supposed to be
# in our topoloty overview
instance_names = ["vpp-1",
"vpp-2",
"vpp-3",
"vpp-4"]
network_names = ["net-1",
"net-2",
"net-3"]
def __init__(self, image=None, prefix=None):
self.path = dirname(realpath(__file__))
if image:
self.image = image
if prefix is not None:
self.name_prefix = prefix
client = from_env()
self.containers = Containers(client, self.image)
self.networks = Networks(client)
self.logger = getLogger(__name__)
@property
def vpp_path(self):
return self.path.rsplit("/", 4)[0]
def get_name(self, name):
if not self.name_prefix:
return name
return "{}-{}".format(self.name_prefix, name)
def stop_containers(self):
for name in self.instance_names:
instance = self.containers.get(self.get_name(name))
if instance:
instance.rem()
for name in self.network_names:
network = self.networks.get(self.get_name(name))
if network:
network.rem()
def start_containers(self):
self.stop_containers()
networks = list()
for name in self.network_names:
networks.append(self.networks.new(self.get_name(name)))
n1, n2, n3 = networks
instances = list()
for name in self.instance_names:
instances.append(self.containers.new(self.get_name(name)))
c1, c2, c3, c4 = instances
# setup packet generator interfaces
# c1.pg_create_interface(local_ip="C::1/120", remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
# local_mac="aa:bb:cc:dd:ee:01", remote_mac="aa:bb:cc:dd:ee:02")
# setup network between instances
n1.connect(c1)
n1.connect(c2)
n2.connect(c2)
n2.connect(c3)
n3.connect(c3)
n3.connect(c4)
# c1 & c2 link
c1.setup_host_interface("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/120")
c2.setup_host_interface("eth1", "fdf8:f53e:61e4::18/120")
# c2 & c3 link
c2.setup_host_interface("eth2", "fdf8:f53e:61e4::18/120")
c3.setup_host_interface("eth1", "fc00:db20:35b:7399::5/120")
# c3 & c4 link
c3.setup_host_interface("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/120")
c4.setup_host_interface("eth1", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/120")
# c1 > c2 default route
c1.set_ipv6_default_route("eth1", "fdf8:f53e:61e4::18")
# c2 > c3 default route
c2.set_ipv6_default_route("eth2", "fc00:db20:35b:7399::5")
# c3 > c2 default route
c3.set_ipv6_default_route("eth1", "fdf8:f53e:61e4::18")
# c4 > c3 default route
c4.set_ipv6_default_route("eth1", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b")
# c3 > c4 static route for address fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/128
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/128")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "fc00:e968:6179::de52:7100/128")
def test_ping(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="fc00:db20:35b:7399::5/120",
remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/120",
remote_ip="fc00:e968:6179::de52:7100",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fc00:e968:6179::de52:7100") / ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_srv6(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 C::1/120
# pg interface on c4 B::1/120
self.start_containers()
print("Sleeping")
time.sleep(30)
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="B::1/120",
remote_ip="fc00:e968:6179::de52:7100",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr D1::")
c1.vppctl_exec(
"sr policy add bsid D1::999:1 next D2:: next D3:: next D4::")
c1.vppctl_exec("sr steer l3 B::/120 via bsid D1::999:1")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid address D4:: behavior end.dx6 pg0 fc00:e968:6179::de52:7100")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/128")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fc00:e968:6179::de52:7100") / ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c2.enable_trace(10)
c3.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
''' T.Map is obsolete
def test_tmap(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec(
"sr policy add bsid D1:: next D2:: next D3:: "
"gtp4_removal sr_prefix D4::/32 v6src_prefix C1::/64")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D1::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="172.16.31.10", dst="192.168.127.12") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_tmap_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec(
"sr policy add bsid D1:: next D2:: next D3:: "
"gtp4_removal sr_prefix D4::/32 v6src_prefix C1::/64")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D1::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(R=1, QFI=3) /
IP(src="172.16.31.10", dst="192.168.127.12") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_tmap_ipv6(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec(
"sr policy add bsid D1:: next D2:: next D3:: "
"gtp4_removal sr_prefix D4::/32 v6src_prefix C1::/64")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D1::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IPv6(src="2001::1", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_tmap_ipv6_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec(
"sr policy add bsid D1:: next D2:: next D3:: "
"gtp4_removal sr_prefix D4::/32 v6src_prefix C1::/64")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D1::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(R=1, QFI=3) /
IPv6(src="2001::1", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
'''
def test_gtp4(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64 nhtype ipv4")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="172.16.31.10", dst="192.168.127.12") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
time.sleep(10)
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_usid(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:1111:aaaa:bbbb::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64 nhtype ipv4")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid prefix D2:1111:aaaa::/48 behavior end usid 16")
c3.vppctl_exec("sr localsid prefix D2:1111:bbbb::/48 behavior end usid 16")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D2:1111:bbbb::/48")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="172.16.31.10", dst="192.168.127.12") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
time.sleep(10)
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64 nhtype ipv4")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(type=1, R=1, QFI=3) /
IP(src="172.16.31.10", dst="192.168.127.12") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_echo(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64 nhtype ipv4")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="echo_request", S=1, teid=200, seq=200))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_reply(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="192.168.127.12/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64 nhtype ipv4")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="echo_response", S=1, teid=200, seq=200))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_error(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="192.168.127.12/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64 nhtype ipv4")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="error_indication", S=1, teid=200, seq=200)/
IE_TEIDI(TEIDI=65535)/IE_GSNAddress(address="1.1.1.1")/
IE_PrivateExtension(extention_value="z"))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_ipv6(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IPv6(src="2001::1", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp4_ipv6_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface4(
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2/30",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.d D4::/32 v6src_prefix C1::/64")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec(
"sr localsid prefix D4::/32 "
"behavior end.m.gtp4.e v4src_position 64")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.20.0.1/32")
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(R=1, QFI=3) /
IPv6(src="2001::1", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="fc00:db20:35b:7399::5/120",
remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/120",
remote_ip="fc00:e968:6179::de52:7100",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "fc00:e968:6179::de52:7100", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="172.16.31.10", dst="192.168.127.12") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="B::1/120",
remote_ip="fc00:e968:6179::de52:7100",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "fc00:e968:6179::de52:7100", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(type=1, R=1, QFI=3) /
IP(src="172.16.31.10", dst="192.168.127.12") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in_echo(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/120",
remote_ip="fc00:e968:6179::de52:7100",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "fc00:e968:6179::de52:7100", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="echo_request", S=1, teid=200, seq=300))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in_reply(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/120",
remote_ip="fc00:e968:6179::de52:7100",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "fc00:e968:6179::de52:7100", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="echo_response", S=1, teid=200, seq=300))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in_error(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="fc00:db20:35b:7399::5/120",
remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/120",
remote_ip="fc00:e968:6179::de52:7100",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "fc00:e968:6179::de52:7100", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="error_indication", S=1, teid=200, seq=300)/
IE_TEIDI(TEIDI=65535)/IE_GSNAddress(address="1.1.1.1")/
IE_PrivateExtension(extention_value="z"))
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in_ipv6(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="fc00:db20:35b:7399::5/120",
remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/120",
remote_ip="fc00:e968:6179::de52:7100",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "fc00:e968:6179::de52:7100", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IPv6(src="2001::1", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_drop_in_ipv6_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/120",
remote_ip="fc00:e968:6179::de52:7100",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d.di D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.m.gtp6.e")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "fc00:e968:6179::de52:7100", "D::2/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(R=1, QFI=3) /
IPv6(src="2001::1", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="C::2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("set ip neighbor pg0 1.0.0.1 aa:bb:cc:dd:ee:22")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.dt4 2")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.16.31.10/32")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="192.168.127.12", dst="172.16.31.10") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="fc00:db20:35b:7399::5/120",
remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface4(
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr Afc00:db20:35b:7399::5")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("set ip neighbor pg0 1.0.0.1 aa:bb:cc:dd:ee:22")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.dt4 2")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ip_pgroute("pg0", "1.0.0.1", "172.16.31.10/32")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(R=1, QFI=3) /
IP(src="192.168.127.12", dst="172.16.31.10") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_ipv6(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="fc00:db20:35b:7399::5/120",
remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/120",
remote_ip="fc00:e968:6179::de52:7100",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("set ip neighbor pg0 fc00:e968:6179::de52:7100 aa:bb:cc:dd:ee:22")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.dt6 2")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ipv6_pgroute("pg0", "fc00:e968:6179::de52:7100", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IPv6(src="2001::1", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_ipv6_5g(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c2 = self.containers.get(self.get_name(self.instance_names[1]))
c3 = self.containers.get(self.get_name(self.instance_names[2]))
c4 = self.containers.get(self.get_name(self.instance_names[-1]))
c1.pg_create_interface(
local_ip="C::1/120",
remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c4.pg_create_interface(
local_ip="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/120",
remote_ip="fc00:e968:6179::de52:7100",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D4:: next D2:: next D3::")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.d D4::/64")
c2.vppctl_exec("sr localsid address D2:: behavior end")
c3.vppctl_exec("sr localsid address D3:: behavior end")
c4.vppctl_exec("set ip neighbor pg0 fc00:e968:6179::de52:7100 aa:bb:cc:dd:ee:22")
c4.vppctl_exec("sr localsid prefix D4::/64 behavior end.dt6 2")
c2.set_ipv6_route("eth2", "fc00:db20:35b:7399::5", "D3::/128")
c2.set_ipv6_route("eth1", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", "C::/120")
c3.set_ipv6_route("eth2", "fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b", "D4::/32")
c3.set_ipv6_route("eth1", "fdf8:f53e:61e4::18", "C::/120")
c4.set_ipv6_pgroute("pg0", "fc00:e968:6179::de52:7100", "2fc00:db20:35b:7399::5/128")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
GTPPDUSessionContainer(R=1, QFI=3) /
IPv6(src="2001::1", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
ICMPv6EchoRequest())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c4.enable_trace(10)
c4.pg_start_capture()
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c4.name))
for p in c4.pg_read_packets():
p.show2()
def test_gtp6_dt(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c1.pg_create_interface6_name(
ifname="pg0",
local_ip="C::1/120",
remote_ip="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c1.pg_create_interface4_name(
ifname="pg1",
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec(
"sr localsid prefix D::/64 behavior end.m.gtp6.dt46 fib-table 0 local-fib-table 0")
c1.vppctl_exec("set ip neighbor pg1 1.0.0.1 aa:bb:cc:dd:ee:22")
c1.set_ip_pgroute("pg1", "1.0.0.1", "172.16.31.10/32")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IPv6(src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b", dst="fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="192.168.127.12", dst="172.16.31.10") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c1.pg_start_capture_name(ifname="pg1")
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c1.name))
for p in c1.pg_read_packets():
p.show2()
def test_gtp4_dt(self):
# TESTS:
# trace add af-packet-input 10
# pg interface on c1 172.20.0.1
# pg interface on c4 B::1/120
self.start_containers()
print("Deleting the old containers...")
time.sleep(30)
print("Starting the new containers...")
c1 = self.containers.get(self.get_name(self.instance_names[0]))
c1.pg_create_interface4_name(
ifname="pg0",
local_ip="172.16.0.1/30",
remote_ip="172.16.0.2",
local_mac="aa:bb:cc:dd:ee:01",
remote_mac="aa:bb:cc:dd:ee:02")
c1.pg_create_interface4_name(
ifname="pg1",
local_ip="1.0.0.2/30",
remote_ip="1.0.0.1",
local_mac="aa:bb:cc:dd:ee:11",
remote_mac="aa:bb:cc:dd:ee:22")
c1.vppctl_exec("set sr encaps source addr fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")
c1.vppctl_exec("sr policy add bsid D5:: behavior t.m.gtp4.dt4 fib-table 0")
c1.vppctl_exec("sr steer l3 172.20.0.1/32 via bsid D5::")
c1.vppctl_exec("set ip neighbor pg1 1.0.0.1 aa:bb:cc:dd:ee:22")
c1.set_ip_pgroute("pg1", "1.0.0.1", "172.16.31.10/32")
print("Waiting...")
time.sleep(30)
p = (Ether(src="aa:bb:cc:dd:ee:02", dst="aa:bb:cc:dd:ee:01") /
IP(src="172.20.0.2", dst="172.20.0.1") /
UDP(sport=2152, dport=2152) /
GTP_U_Header(gtp_type="g_pdu", teid=200) /
IP(src="192.168.127.12", dst="172.16.31.10") /
ICMP())
print("Sending packet on {}:".format(c1.name))
p.show2()
c1.enable_trace(10)
c1.pg_start_capture_name(ifname="pg1")
c1.pg_create_stream(p)
c1.pg_enable()
# timeout (sleep) if needed
print("Sleeping")
time.sleep(5)
print("Receiving packet on {}:".format(c1.name))
for p in c1.pg_read_packets():
p.show2()
def status_containers(self):
print("Instances:")
for i, name in enumerate(self.instance_names):
name = self.get_name(name)
print("\t[{}] {} - {}".format(
i, name,
"running" if self.containers.get(name) else "missing"))
print("Networks:")
for i, name in enumerate(self.network_names):
name = self.get_name(name)
print("\t[{}] {} - {}".format(
i, name,
"running" if self.networks.get(name) else "missing"))
def build_image(self):
print("VPP Path (build): {}".format(self.vpp_path))
self.containers.build(self.path, self.vpp_path)
def release_image(self):
print("VPP Path (release): {}".format(self.vpp_path))
instance = self.containers.new("release-build")
system(
"docker cp release-build:{}/vpp-package.tgz {}/".format(
self.vpp_path, self.vpp_path))
instance.rem()
self.containers.release(self.path, self.vpp_path)
system("rm -rf {}/vpp-package.tgz".format(self.vpp_path))
def vppctl(self, index, command=None):
if index >= len(self.instance_names):
return
name = self.get_name(self.instance_names[index])
self.logger.error("connecting to: {}".format(name))
self.containers.vppctl(name, command)
def bash(self, index):
if index >= len(self.instance_names):
return
name = self.get_name(self.instance_names[index])
self.logger.error("connecting to: {}".format(name))
self.containers.bash(name)
def get_args():
parser = ArgumentParser()
parser.add_argument("--verbose", choices=['error', 'debug', 'info'])
parser.add_argument('--image', choices=['debug', 'release'])
subparsers = parser.add_subparsers()
p1 = subparsers.add_parser(
"infra", help="Infrastructure related commands.")
p1.add_argument(
"op",
choices=[
'stop',
'start',
'status',
'restart',
'build',
'release'])
p1.add_argument("--prefix")
p1.add_argument("--image")
p2 = subparsers.add_parser("cmd", help="Instance related commands.")
p2.add_argument("op", choices=['vppctl', 'bash'])
p2.add_argument(
"index",
type=int,
help="Container instance index. (./runner.py infra status)")
p2.add_argument(
"--command", help="Only vppctl supports this optional argument.")
p3 = subparsers.add_parser("test", help="Test related commands.")
p3.add_argument(
"op",
choices=[
"ping",
"srv6",
# "tmap",
# "tmap_5g",
# "tmap_ipv6",
# "tmap_ipv6_5g",
"gtp4",
"gtp4_usid",
"gtp4_5g",
"gtp4_echo",
"gtp4_reply",
"gtp4_error",
"gtp4_ipv6",
"gtp4_ipv6_5g",
"gtp6_drop_in",
"gtp6_drop_in_5g",
"gtp6_drop_in_echo",
"gtp6_drop_in_reply",
"gtp6_drop_in_error",
"gtp6_drop_in_ipv6",
"gtp6_drop_in_ipv6_5g",
"gtp6",
"gtp6_5g",
"gtp6_ipv6",
"gtp6_ipv6_5g",
"gtp6_dt",
"gtp4_dt"])
args = parser.parse_args()
if not hasattr(args, "op") or not args.op:
parser.print_help(sys.stderr)
sys.exit(1)
return vars(args)
def main(op=None, prefix=None, verbose=None,
image=None, index=None, command=None):
if verbose:
basicConfig(level=verbose_levels[verbose])
if image == 'release':
image = "srv6m-release-image"
elif image == 'debug':
image = "srv6m-image"
else:
image = "srv6m-image"
print("Target image: {}".format(image))
program = Program(image, prefix)
try:
if op == 'build':
program.build_image()
elif op == 'release':
program.release_image()
elif op == 'stop':
program.stop_containers()
elif op == 'start':
program.start_containers()
elif op == 'status':
program.status_containers()
elif op == 'vppctl':
program.vppctl(index, command)
elif op == 'bash':
program.bash(index)
elif op == 'ping':
program.test_ping()
elif op == 'srv6':
program.test_srv6()
# elif op == 'tmap':
# program.test_tmap()
# elif op == 'tmap_5g':
# program.test_tmap_5g()
# elif op == 'tmap_ipv6':
# program.test_tmap_ipv6()
# elif op == 'tmap_ipv6_5g':
# program.test_tmap_ipv6_5g()
elif op == 'gtp4':
program.test_gtp4()
elif op == 'gtp4_usid':
program.test_gtp4_usid()
elif op == 'gtp4_5g':
program.test_gtp4_5g()
elif op == 'gtp4_echo':
program.test_gtp4_echo()
elif op == 'gtp4_reply':
program.test_gtp4_reply()
elif op == 'gtp4_error':
program.test_gtp4_error()
elif op == 'gtp4_ipv6':
program.test_gtp4_ipv6()
elif op == 'gtp4_ipv6_5g':
program.test_gtp4_ipv6_5g()
elif op == 'gtp6_drop_in':
program.test_gtp6_drop_in()
elif op == 'gtp6_drop_in_5g':
program.test_gtp6_drop_in_5g()
elif op == 'gtp6_drop_in_echo':
program.test_gtp6_drop_in_echo()
elif op == 'gtp6_drop_in_reply':
program.test_gtp6_drop_in_reply()
elif op == 'gtp6_drop_in_error':
program.test_gtp6_drop_in_error()
elif op == 'gtp6_drop_in_ipv6':
program.test_gtp6_drop_in_ipv6()
elif op == 'gtp6_drop_in_ipv6_5g':
program.test_gtp6_drop_in_ipv6_5g()
elif op == 'gtp6':
program.test_gtp6()
elif op == 'gtp6_5g':
program.test_gtp6_5g()
elif op == 'gtp6_ipv6':
program.test_gtp6_ipv6()
elif op == 'gtp6_ipv6_5g':
program.test_gtp6_ipv6_5g()
elif op == 'gtp6_dt':
program.test_gtp6_dt()
elif op == 'gtp4_dt':
program.test_gtp4_dt()
except Exception:
program.logger.exception("")
rc = 1
else:
rc = 0
return rc
if __name__ == "__main__":
sys.exit(main(**get_args()))
|
[
"docker.from_env",
"os.mkdir",
"os.remove",
"argparse.ArgumentParser",
"logging.basicConfig",
"os.path.isdir",
"os.path.realpath",
"os.path.exists",
"jinja2.FileSystemLoader",
"shutil.rmtree",
"os.path.join",
"logging.getLogger"
] |
[((85469, 85485), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (85483, 85485), False, 'from argparse import ArgumentParser\n'), ((899, 924), 'os.path.join', 'join', (['self.tmp', 'self.name'], {}), '(self.tmp, self.name)\n', (903, 924), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((984, 1011), 'os.path.join', 'join', (['self.temp', '"""pgi.pcap"""'], {}), "(self.temp, 'pgi.pcap')\n", (988, 1011), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((1072, 1099), 'os.path.join', 'join', (['self.temp', '"""pgo.pcap"""'], {}), "(self.temp, 'pgo.pcap')\n", (1076, 1099), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((1162, 1186), 'os.path.join', 'join', (['"""/mnt"""', '"""pgi.pcap"""'], {}), "('/mnt', 'pgi.pcap')\n", (1166, 1186), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((1250, 1274), 'os.path.join', 'join', (['"""/mnt"""', '"""pgo.pcap"""'], {}), "('/mnt', 'pgo.pcap')\n", (1254, 1274), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((1616, 1635), 'os.path.join', 'join', (['cls.tmp', 'name'], {}), '(cls.tmp, name)\n', (1620, 1635), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((1647, 1658), 'os.path.isdir', 'isdir', (['temp'], {}), '(temp)\n', (1652, 1658), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((1693, 1704), 'os.mkdir', 'mkdir', (['temp'], {}), '(temp)\n', (1698, 1704), False, 'from os import remove, system, mkdir\n'), ((5978, 6005), 'os.path.exists', 'exists', (['self.pg_output_file'], {}), '(self.pg_output_file)\n', (5984, 6005), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((6230, 6257), 'os.path.exists', 'exists', (['self.pg_output_file'], {}), '(self.pg_output_file)\n', (6236, 6257), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((11097, 11107), 'docker.from_env', 'from_env', ([], {}), '()\n', (11105, 11107), False, 'from docker import from_env\n'), ((11229, 11248), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (11238, 11248), False, 'from logging import getLogger, basicConfig, DEBUG, INFO, ERROR\n'), ((87478, 87520), 'logging.basicConfig', 'basicConfig', ([], {'level': 'verbose_levels[verbose]'}), '(level=verbose_levels[verbose])\n', (87489, 87520), False, 'from logging import getLogger, basicConfig, DEBUG, INFO, ERROR\n'), ((1672, 1684), 'shutil.rmtree', 'rmtree', (['temp'], {}), '(temp)\n', (1678, 1684), False, 'from shutil import rmtree\n'), ((6019, 6046), 'os.remove', 'remove', (['self.pg_output_file'], {}), '(self.pg_output_file)\n', (6025, 6046), False, 'from os import remove, system, mkdir\n'), ((6271, 6298), 'os.remove', 'remove', (['self.pg_output_file'], {}), '(self.pg_output_file)\n', (6277, 6298), False, 'from os import remove, system, mkdir\n'), ((8001, 8029), 'os.path.join', 'join', (['vpp_path', '"""Dockerfile"""'], {}), "(vpp_path, 'Dockerfile')\n", (8005, 8029), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((8163, 8193), 'os.path.join', 'join', (['vpp_path', '"""startup.conf"""'], {}), "(vpp_path, 'startup.conf')\n", (8167, 8193), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((8614, 8642), 'os.path.join', 'join', (['vpp_path', '"""Dockerfile"""'], {}), "(vpp_path, 'Dockerfile')\n", (8618, 8642), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((8784, 8814), 'os.path.join', 'join', (['vpp_path', '"""startup.conf"""'], {}), "(vpp_path, 'startup.conf')\n", (8788, 8814), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((10940, 10958), 'os.path.realpath', 'realpath', (['__file__'], {}), '(__file__)\n', (10948, 10958), False, 'from os.path import dirname, realpath, split, join, isdir, exists\n'), ((7823, 7835), 'os.remove', 'remove', (['path'], {}), '(path)\n', (7829, 7835), False, 'from os import remove, system, mkdir\n'), ((7908, 7930), 'jinja2.FileSystemLoader', 'FileSystemLoader', (['path'], {}), '(path)\n', (7924, 7930), False, 'from jinja2 import Environment, FileSystemLoader\n'), ((8521, 8543), 'jinja2.FileSystemLoader', 'FileSystemLoader', (['path'], {}), '(path)\n', (8537, 8543), False, 'from jinja2 import Environment, FileSystemLoader\n')]
|
"""Chemisty Flash Cards.
This sample demonstrates a simple skill built with the Amazon Alexa Skills Kit.
The Intent Schema, Custom Slots, and Sample Utterances for this skill, as well
as testing instructions are located at http://amzn.to/1LzFrj6
For additional samples, visit the Alexa Skills Kit Getting Started guide at
http://amzn.to/1LGWsLG
"""
from __future__ import print_function
import math
import random
# When editing your questions pay attention to your punctuation.
# Make sure you use question marks or periods.
# Make sure the first answer is the correct one.
# Set at least 4 answers, any extras will be shuffled in.
questions = [
{
"What is A C?": [
"actinium"
]
},
{
"What is A L?": [
"aluminum"
]
},
{
"What is A M?": [
"americium"
]
},
{
"What is S B?": [
"antimony"
]
},
{
"What is A R?": [
"argon"
]
},
{
"What is A S?": [
"arsenic"
]
},
{
"What is A T?": [
"astatine"
]
},
{
"What is B A?": [
"barium"
]
},
{
"What is B K?": [
"berkelium"
]
},
{
"What is B E?": [
"beryllium"
]
},
{
"What is B I?": [
"bismuth"
]
},
{
"What is B H?": [
"bohrium"
]
},
{
"What is B?": [
"boron"
]
},
{
"What is B R ?": [
"bromine"
]
},
{
"What is C D ?": [
"cadmium"
]
},
{
"What is C A ?": [
"calcium"
]
},
{
"What is C F ?": [
"californium"
]
},
{
"What is C ?": [
"carbon"
]
},
{
"What is C E ?": [
"cerium"
]
},
{
"What is C S ?": [
"cesium"
]
},
{
"What is C L ?": [
"chlorine"
]
},
{
"What is C R ?": [
"chromium"
]
},
{
"What is C O ?": [
"cobalt"
]
},
{
"What is C U ?": [
"copper"
]
},
{
"What is C M?": [
"curium"
]
},
]
def lambda_handler(event, context):
"""
Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
print("event.session.application.applicationId=" +
event['session']['application']['applicationId'])
"""
Uncomment this if statement and populate with your skill's application ID
to prevent someone else from configuring a skill that sends requests
to this function.
"""
# if (event['session']['application']['applicationId'] !=
# "amzn1.echo-sdk-ams.app.[unique-value-here]"):
# raise ValueError("Invalid Application ID")
if event['session']['new']:
on_session_started({'requestId': event['request']['requestId']},
event['session'])
if event['request']['type'] == "LaunchRequest":
return on_launch(event['request'], event['session'])
elif event['request']['type'] == "IntentRequest":
return on_intent(event['request'], event['session'])
elif event['request']['type'] == "SessionEndedRequest":
return on_session_ended(event['request'], event['session'])
def on_session_started(session_started_request, session):
"""Called when the session starts"""
print("on_session_started requestId=" +
session_started_request['requestId'] + ", sessionId=" +
session['sessionId'])
def on_launch(launch_request, session):
"""
Called when the user launches the skill without specifying what they
want.
"""
print("on_launch requestId=" + launch_request['requestId'] +
", sessionId=" + session['sessionId'])
# Dispatch to your skill's launch
return get_welcome_response()
def on_intent(intent_request, session):
"""Called when the user specifies an intent for this skill"""
print("on_intent requestId=" + intent_request['requestId'] +
", sessionId=" + session['sessionId'])
intent = intent_request['intent']
intent_name = intent_request['intent']['name']
# handle yes/no intent after the user has been prompted
if 'attributes' in session.keys() and 'user_prompted_to_continue' in session['attributes'].keys():
del session['attributes']['user_prompted_to_continue']
if intent_name == 'AMAZON.NoIntent':
return handle_finish_session_request(intent, session)
elif intent_name == "AMAZON.YesIntent":
return handle_repeat_request(intent, session)
# Dispatch to your skill's intent handlers
if intent_name == "AnswerIntent":
return handle_answer_request(intent, session)
elif intent_name == "AnswerOnlyIntent":
return handle_answer_request(intent, session)
elif intent_name == "AMAZON.YesIntent":
return handle_answer_request(intent, session)
elif intent_name == "AMAZON.NoIntent":
return handle_answer_request(intent, session)
elif intent_name == "AMAZON.StartOverIntent":
return get_welcome_response()
elif intent_name == "AMAZON.RepeatIntent":
return handle_repeat_request(intent, session)
elif intent_name == "AMAZON.HelpIntent":
return handle_get_help_request(intent, session)
elif intent_name == "AMAZON.StopIntent":
return handle_finish_session_request(intent, session)
elif intent_name == "AMAZON.CancelIntent":
return handle_finish_session_request(intent, session)
else:
raise ValueError("Invalid intent")
def on_session_ended(session_ended_request, session):
"""
Called when the user ends the session.
Is not called when the skill returns should_end_session=true
"""
print("on_session_ended requestId=" + session_ended_request['requestId'] +
", sessionId=" + session['sessionId'])
# add cleanup logic here
# ------- Skill specific business logic -------
ANSWER_COUNT = 1
GAME_LENGTH = 5
CARD_TITLE = "Chemistry Flash Cards" # Be sure to change this for your skill.
# --------------- Functions that control the skill's behavior -------------
def get_welcome_response():
"""
If we wanted to initialize the session to have some attributes we could
add those here
"""
speech_output = ("Let's test your skills with FlashCards. I will ask you " +
str(GAME_LENGTH) + " questions, try to get as many right" +
"as you can. Just say the answer. Let's begin. ")
should_end_session = False
game_questions = populate_game_questions()
current_questions_index = 0
# Generate a random index for the correct answer, from 0 to 3
correct_answer_index = math.floor(random.random() * (ANSWER_COUNT))
round_answers = populate_round_answers(game_questions[current_questions_index], 0, correct_answer_index)
spoken_question = questions[game_questions[current_questions_index]].keys()[0]
reprompt_text = spoken_question
# Update code to deal with multiple choice answers
for i in range(0, ANSWER_COUNT):
reprompt_text = reprompt_text + ""
speech_output = speech_output + reprompt_text
attributes = {"speech_output": reprompt_text,
"reprompt_text": reprompt_text,
"current_questions_index": current_questions_index,
"correct_answer_index": correct_answer_index + 1,
"questions": game_questions,
"score": 0,
"correct_answer_text": questions[game_questions[current_questions_index]].values()[0][0]
}
return build_response(attributes, build_speechlet_response(
CARD_TITLE, speech_output, reprompt_text, should_end_session))
def populate_game_questions():
game_questions = []
index_list = []
index = len(questions)
if GAME_LENGTH > index:
raise ValueError("Invalid Game Length")
for i in range(0, index):
index_list.append(i)
# Pick GAME_LENGTH random questions from the list to ask the user,
# make sure there are no repeats
for j in range(0, GAME_LENGTH):
rand = int(math.floor(random.random() * index))
index -= 1
temp = index_list[index]
index_list[index] = index_list[rand]
index_list[rand] = temp
game_questions.append(index_list[index])
return game_questions
def populate_round_answers(game_question_indexes, correct_answer_index, correct_answer_target_location):
"""
Get the answers for a given question, and place the correct answer
at the spot marked by the correct_answer_target_location variable.
Note that you can have as many answers as you want but
only ANSWER_COUNT will be selected.
"""
answers = []
answers_copy = questions[game_question_indexes].values()[0]
index = len(answers_copy)
if index < ANSWER_COUNT:
raise ValueError("Not enough answers for question.")
# Shuffle the answers, excluding the first element.
# If only 1 element no need to shuffle
if index > 1:
temp = answers_copy[1:]
random.shuffle(temp)
answers.append(answers_copy[0])
answers.append(temp[0:ANSWER_COUNT-1])
# Swap the correct answer into the target location
answers[0], answers[correct_answer_target_location] = answers[correct_answer_target_location], answers[0]
else:
answers = answers_copy
return answers
def handle_answer_request(intent, session):
attributes = {}
should_end_session = False
answer_slot_valid = is_answer_slot_valid(intent)
user_gave_up = intent['name']
if 'attributes' in session.keys() and 'questions' not in session['attributes'].keys():
# If the user responded with an answer but there is no game
# in progress ask the user if they want to start a new game.
# Set a flag to track that we've prompted the user.
attributes['user_prompted_to_continue'] = True
speech_output = "There is no game in progress. " \
"Do you want to start a new game?"
reprompt_text = speech_output
return build_response(attributes, build_speechlet_response(CARD_TITLE,
speech_output, reprompt_text, should_end_session))
elif not answer_slot_valid and user_gave_up == "DontKnowIntent":
# If the user provided answer isn't a number > 0 and < ANSWER_COUNT,
# return an error message to the user. Remember to guide the user
# into providing correct values.
reprompt = session['attributes']['speech_output']
speech_output = "Your answer must be a known element " + reprompt
return build_response(session['attributes'], build_speechlet_response(CARD_TITLE, speech_output, reprompt_text, should_end_session))
else:
game_questions = session['attributes']['questions']
correct_answer_index = session['attributes']['correct_answer_index']
current_score = session['attributes']['score']
current_questions_index = session['attributes']['current_questions_index']
correct_answer_text = session['attributes']['correct_answer_text']
speech_output_analysis = None
if answer_slot_valid and intent['slots']['Answer']['value'].lower() == correct_answer_text:
current_score += 1
speech_output_analysis = "correct. "
else:
if user_gave_up != "DontKnowIntent":
speech_output_analysis = "wrong. "
speech_output_analysis = (speech_output_analysis +
"The correct answer is " +
correct_answer_text + ".")
# if current_questions_index is 4, we've reached 5 questions
# (zero-indexed) and can exit the game session
if current_questions_index == GAME_LENGTH - 1:
speech_output = "" if intent['name'] == "DontKnowIntent" else "That answer is "
speech_output = (speech_output + speech_output_analysis + "Yog got "
+ str(current_score) + " out of " + str(GAME_LENGTH)
+ " questions correct. Thank you for learning Flash"
" Cards with Alexa!")
reprompt_text = None
should_end_session = True
return build_response(session['attributes'],
build_speechlet_response(CARD_TITLE, speech_output, reprompt_text, should_end_session))
else:
current_questions_index += 1
spoken_question = questions[game_questions[current_questions_index]].keys()[0]
# Generate a random index for the correct answer, from 0 to 3
correct_answer_index = math.floor(random.random() * (ANSWER_COUNT))
round_answers = populate_round_answers(game_questions[current_questions_index], current_questions_index, correct_answer_index)
reprompt_text = spoken_question
for i in range(0, ANSWER_COUNT):
reprompt_text = reprompt_text + ""
speech_output = "" if user_gave_up == "DontKnowIntent" else "That answer is "
speech_output = (speech_output + speech_output_analysis +
"Your score is " +
str(current_score) + '. ' + reprompt_text)
attributes = {"speech_output": reprompt_text,
"reprompt_text": reprompt_text,
"current_questions_index": current_questions_index,
"correct_answer_index": correct_answer_index + 1,
"questions": game_questions,
"score": current_score,
"correct_answer_text": questions[game_questions[current_questions_index]].values()[0][0]
}
return build_response(attributes,
build_speechlet_response(CARD_TITLE, speech_output, reprompt_text,
should_end_session))
def handle_repeat_request(intent, session):
"""
Repeat the previous speech_output and reprompt_text from the
session['attributes'] if available else start a new game session
"""
if 'attributes' not in session or 'speech_output' not in session['attributes']:
return get_welcome_response()
else:
attributes = session['attributes']
speech_output = attributes['speech_output']
reprompt_text = attributes['reprompt_text']
should_end_session = False
return build_response(attributes,
build_speechlet_response_without_card(speech_output, reprompt_text, should_end_session))
def handle_get_help_request(intent, session):
attributes = {}
card_title = "Flash Cards"
speech_output = ("You can begin a game by saying start a new game, or, "
"you can say exit... What can I help you with?")
reprompt_text = "What can I help you with?"
should_end_session = False
return build_response(attributes, build_speechlet_response(card_title, speech_output, reprompt_text, should_end_session))
def handle_finish_session_request(intent, session):
"""
End the session with a message
if the user wants to quit the game
"""
attributes = session['attributes']
reprompt_text = None
speech_output = "Thanks for playing Flash Cards!"
should_end_session = True
return build_response(attributes,
build_speechlet_response_without_card(speech_output, reprompt_text, should_end_session))
def is_answer_slot_valid(intent):
if 'Answer' in intent['slots'].keys() and 'value' in intent['slots']['Answer'].keys():
return True
else:
return False
# --------------- Helpers that build all of the responses -----------------
def build_speechlet_response(title, output, reprompt_text, should_end_session):
return {
'outputSpeech': {
'type': 'PlainText',
'text': output
},
'card': {
'type': 'Simple',
'title': title,
'content': output
},
'reprompt': {
'outputSpeech': {
'type': 'PlainText',
'text': reprompt_text
}
},
'shouldEndSession': should_end_session
}
def build_speechlet_response_without_card(output, reprompt_text, should_end_session):
return {
'outputSpeech': {
'type': 'PlainText',
'text': output
},
'reprompt': {
'outputSpeech': {
'type': 'PlainText',
'text': reprompt_text
}
},
'shouldEndSession': should_end_session
}
def build_response(attributes, speechlet_response):
return {
'version': '1.0',
'sessionAttributes': attributes,
'response': speechlet_response
}
|
[
"random.shuffle",
"random.random"
] |
[((9526, 9546), 'random.shuffle', 'random.shuffle', (['temp'], {}), '(temp)\n', (9540, 9546), False, 'import random\n'), ((7124, 7139), 'random.random', 'random.random', ([], {}), '()\n', (7137, 7139), False, 'import random\n'), ((8568, 8583), 'random.random', 'random.random', ([], {}), '()\n', (8581, 8583), False, 'import random\n'), ((13217, 13232), 'random.random', 'random.random', ([], {}), '()\n', (13230, 13232), False, 'import random\n')]
|
from PyInstaller.utils.hooks import collect_data_files
datas = collect_data_files("dash_tabulator")
|
[
"PyInstaller.utils.hooks.collect_data_files"
] |
[((64, 100), 'PyInstaller.utils.hooks.collect_data_files', 'collect_data_files', (['"""dash_tabulator"""'], {}), "('dash_tabulator')\n", (82, 100), False, 'from PyInstaller.utils.hooks import collect_data_files\n')]
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from gratipay.models.package import NPM, Package
from gratipay.testing import Harness
class Tests(Harness):
def setUp(self):
self.make_package()
def test_trailing_slash_redirects(self):
response = self.client.GxT('/on/npm/foo/')
assert response.code == 302
assert response.headers['Location'] == '/on/npm/foo'
def test_anon_gets_signin_page_from_unclaimed(self):
body = self.client.GET('/on/npm/foo').body
assert 'foo</a> npm package on Gratipay:' in body
def test_auth_gets_send_confirmation_page_from_unclaimed(self):
self.make_participant('bob', claimed_time='now')
body = self.client.GET('/on/npm/foo', auth_as='bob').body
assert 'foo</a> npm package:' in body
assert '<EMAIL>' in body
def test_auth_gets_multiple_options_if_present(self):
self.make_package(NPM, 'bar', 'Bar', ['<EMAIL>', '<EMAIL>'])
self.make_participant('bob', claimed_time='now')
body = self.client.GET('/on/npm/bar', auth_as='bob').body
assert '<EMAIL>' in body
assert '<EMAIL>' in body
def test_auth_gets_something_if_no_emails(self):
self.make_package(NPM, 'bar', 'Bar', [])
self.make_participant('bob', claimed_time='now')
body = self.client.GET('/on/npm/bar', auth_as='bob').body
assert "No email addresses on file" in body
def claim_package(self):
foo = Package.from_names('npm', 'foo')
alice = self.make_participant('alice', claimed_time='now')
alice.start_email_verification('<EMAIL>', foo)
nonce = alice.get_email('<EMAIL>').nonce
alice.finish_email_verification('<EMAIL>', nonce)
team = alice.get_teams()[0]
assert team.package == foo
return team.slug
def test_package_redirects_to_project_if_claimed(self):
self.claim_package()
response = self.client.GxT('/on/npm/foo')
assert response.code == 302
assert response.headers['Location'] == '/foo/'
def test_package_served_as_project_if_claimed(self):
self.claim_package()
assert 'owned by' in self.client.GET('/foo/').body
class Bulk(Harness):
def setUp(self):
self.make_package()
def test_anon_gets_payment_flow(self):
body = self.client.GET('/on/npm/').body
assert 'Paste a package.json' in body
assert '0 out of all 1 npm package' in body
|
[
"gratipay.models.package.Package.from_names"
] |
[((1540, 1572), 'gratipay.models.package.Package.from_names', 'Package.from_names', (['"""npm"""', '"""foo"""'], {}), "('npm', 'foo')\n", (1558, 1572), False, 'from gratipay.models.package import NPM, Package\n')]
|