hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
6e94ba568f1305a602a22f36b40494d01fcc50d7
51
py
Python
__init__.py
TeaKatz/Learning_Rate_Finder
bbc9e0a3a9324b4c1beeaf36f5a743435f9fa3dc
[ "MIT" ]
null
null
null
__init__.py
TeaKatz/Learning_Rate_Finder
bbc9e0a3a9324b4c1beeaf36f5a743435f9fa3dc
[ "MIT" ]
null
null
null
__init__.py
TeaKatz/Learning_Rate_Finder
bbc9e0a3a9324b4c1beeaf36f5a743435f9fa3dc
[ "MIT" ]
null
null
null
from .LearningRateFinder import LearningRateFinder
25.5
50
0.901961
4
51
11.5
0.75
0
0
0
0
0
0
0
0
0
0
0
0.078431
51
1
51
51
0.978723
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
6e971429b8ed3893a1c9bb462eabbad1b2059e1d
468
py
Python
fl/fl2/fl2_test.py
Azmalent/itmo5
ee3548c1b2e9d84715daa57634a056f935bf775e
[ "WTFPL" ]
null
null
null
fl/fl2/fl2_test.py
Azmalent/itmo5
ee3548c1b2e9d84715daa57634a056f935bf775e
[ "WTFPL" ]
null
null
null
fl/fl2/fl2_test.py
Azmalent/itmo5
ee3548c1b2e9d84715daa57634a056f935bf775e
[ "WTFPL" ]
null
null
null
from fl2 import fsm def test1(): assert fsm.parse("cb") assert fsm.parse("bccb") assert fsm.parse("bcbccb") def test2(): assert fsm.parse("acb") assert fsm.parse("bcb") assert fsm.parse("abcb") assert fsm.parse("aabcb") assert fsm.parse("bbbcb") def test3(): assert fsm.parse("aaaaaaababababbcbcabababcbbbbbcb") def test4(): assert not fsm.parse("c") assert not fsm.parse("bcccb") assert not fsm.parse("whatever")
19.5
56
0.647436
63
468
4.809524
0.380952
0.316832
0.415842
0.168317
0
0
0
0
0
0
0
0.013333
0.198718
468
24
57
19.5
0.794667
0
0
0
0
0
0.166311
0.06823
0
0
0
0
0.705882
1
0.235294
true
0
0.058824
0
0.294118
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
1
0
0
0
0
0
0
6
6eca6ebcba00334d8d319abdd9c4a7b0cf458ed1
3,456
py
Python
least_squares/icp/icp_so3.py
yimuw/yimu-blog
280ab2eca1fa48602d1695d69366842ea40debda
[ "BSD-3-Clause" ]
8
2021-06-11T05:50:40.000Z
2022-03-29T02:41:05.000Z
least_squares/icp/icp_so3.py
yimuw/yimu-blog
280ab2eca1fa48602d1695d69366842ea40debda
[ "BSD-3-Clause" ]
7
2020-06-28T13:58:26.000Z
2021-12-09T22:30:54.000Z
least_squares/icp/icp_so3.py
yimuw/yimu-blog
280ab2eca1fa48602d1695d69366842ea40debda
[ "BSD-3-Clause" ]
5
2021-07-07T04:00:14.000Z
2022-03-30T03:42:54.000Z
import numpy as np # from scipy.linalg import logm, expm from math import cos, sin, pi import matplotlib.pyplot as plt from utils import skew, so3_exp def icp_residual_so3(point_src, point_target, w_so3): R = so3_exp(w_so3) residual =R @ point_src - point_target # [p1_x, p1_y, p1_z, p2_x, p2_y, p2_z, ...] residual = residual.flatten('F') return residual def compute_so3_jacobian_numurical(point_src, point_target, w_so3): DELTA = 1e-6 num_residuals = point_src.size num_params = 3 jacobian = np.zeros([num_residuals, num_params]) curret_params = w_so3.copy() for p_idx in range(3): params_plus = curret_params.copy() params_plus[p_idx] += DELTA residual_plus = icp_residual_so3(point_src, point_target, params_plus) params_minus = curret_params.copy() params_minus[p_idx] -= DELTA residual_minus = icp_residual_so3(point_src, point_target, params_minus) dr_dpidx = (residual_plus - residual_minus) / (2. * DELTA) jacobian[:, p_idx] = dr_dpidx residual_cur_params = icp_residual_so3(point_src, point_target, w_so3) return jacobian, residual_cur_params def icp_so3_numirical(point_src, point_target): w_so3 = np.array([0, 0, 0.]) for iter in range(10): jacobi, b = compute_so3_jacobian_numurical(point_src, point_target, w_so3) delta = np.linalg.solve(jacobi.transpose() @ jacobi, -jacobi.transpose() @ b) w_so3 += delta #print('jocobian:', jacobi) #print('b: ', b) print('iter: ', iter, ' cost:', b.transpose() @ b) #print('current params: ', w_so3) def icp_residual_local_so3(point_src, point_target, R_current, w_so3_local): R = R_current @ so3_exp(w_so3_local) residual =R @ point_src - point_target # [p1_x, p1_y, p1_z, p2_x, p2_y, p2_z, ...] residual = residual.flatten('F') return residual # Can do a lambda to reduce code length def compute_local_so3_jacobian_numurical(point_src, point_target, R_current): DELTA = 1e-6 num_residuals = point_src.size num_params = 3 jacobian = np.zeros([num_residuals, num_params]) w_so3_local = np.array([0, 0, 0.]) curret_params = w_so3_local.copy() for p_idx in range(3): params_plus = curret_params.copy() params_plus[p_idx] += DELTA residual_plus = icp_residual_local_so3(point_src, point_target, R_current, params_plus) params_minus = curret_params.copy() params_minus[p_idx] -= DELTA residual_minus = icp_residual_local_so3(point_src, point_target, R_current, params_minus) dr_dpidx = (residual_plus - residual_minus) / (2. * DELTA) jacobian[:, p_idx] = dr_dpidx residual_cur_params = icp_residual_local_so3(point_src, point_target, R_current, w_so3_local) return jacobian, residual_cur_params def icp_local_so3_numirical(point_src, point_target): w_so3_local = np.array([0, 0, 0.]) R_current = np.identity(3) for iter in range(10): # Jocobi on so3 jacobi, b = compute_local_so3_jacobian_numurical(point_src, point_target, R_current) delta = np.linalg.solve(jacobi.transpose() @ jacobi, -jacobi.transpose() @ b) # Update on SO3 R_current = R_current @ so3_exp(delta) #print('jocobian:', jacobi) #print('b: ', b) print('iter: ', iter, ' cost:', b.transpose() @ b) #print('current params: ', w_so3)
33.553398
97
0.674479
508
3,456
4.244094
0.17126
0.06679
0.096475
0.141002
0.852505
0.833024
0.833024
0.798701
0.730056
0.700835
0
0.026578
0.216146
3,456
102
98
33.882353
0.769288
0.095775
0
0.59375
0
0
0.008355
0
0
0
0
0
0
1
0.09375
false
0
0.0625
0
0.21875
0.03125
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
953877841529738bf70e07a5327b834d27e39aaa
30,178
py
Python
src/eventPoints.py
CB1204/LapSimulation
7d7f7c43a6bc3db3dbf02050d939da3f17647c2c
[ "MIT" ]
7
2018-02-22T16:58:26.000Z
2022-02-05T18:17:56.000Z
src/eventPoints.py
CB1204/LapSimulation
7d7f7c43a6bc3db3dbf02050d939da3f17647c2c
[ "MIT" ]
null
null
null
src/eventPoints.py
CB1204/LapSimulation
7d7f7c43a6bc3db3dbf02050d939da3f17647c2c
[ "MIT" ]
2
2019-04-15T21:07:03.000Z
2021-05-11T07:41:49.000Z
from LSvehicleOneDimLookup_2 import vehicleOneDimLookup_2 as vehicle_ODL_2 from LSvehicleTwoDimLookup_2 import vehicleTwoDimLookup_2 as vehicle_TDL_2 from Create_TrackMap_2D import Create_TrackMap_2D from _pytest.assertion.util import basestring from track import Track import lapsim as lts import Acceleration import numpy as np import platform import math import os class Event: def __init__(self, EventName, Year, StartValue, Counter, revision, Vehicles = [], Tracks = []): self.EventName = EventName self.Year = Year self.Counter = Counter self.Vehicles = Vehicles self.Tracks = Tracks self.StartValue = StartValue self.revision = revision def SimulateEvent(self): #Initialization of module Score = Scoring(self.EventName, self.Year) #Check platform and open file for saving calculated data cwd = os.getcwd() if('Windows' in platform.platform()): file = cwd + '\Results.txt' else: cwd = cwd.replace('\\', '/') file = cwd + '/Results.txt' if(self.Counter == 1): #if Counter == 1 the file gets opened and the old data will get overwritten f = open(file, 'w') f.write('Event&Year|Track|Car|Parameter|Event|Scored Time in s|Scored EnergyConsumption in kWh|Scores|\n') else: #for all other values of Counter the data will get appended to the old ones f = open(file, 'a') #Points = [] ds = 0.5 #length of each track segment #Go through Tracks for counterTracks in self.Tracks: #Go through Vehicles for counterVehicles in self.Vehicles: #write basic information into result file f.write(self.EventName + ' ' + str(self.Year) + '|') f.write(counterTracks + '|') f.write(str(counterVehicles.CarName) + '|') f.write('%d|' %(self.StartValue)) if('AutoX' in counterTracks): cwd = os.getcwd() #open file with if ('Windows' in platform.platform()): file = cwd + '\\CSV\\' #VehicleParameters.xlsx' else: cwd = cwd.replace('\\', '/') file = cwd + '/CSV/' cfile = open(file + counterTracks + '.csv', 'r') curvature = [] #Load Curvature from file into list and make it to an array for row in cfile: new = np.array(row.split(',')) value1 = float(new[0]) curvature.append(value1) curvature = np.array(curvature) track = Track(curvature, ds) # Initialize Lap Simulation LS = lts.LapSim(counterVehicles, track) LS.max_laps = 2 # Run Lap Simulation LS.speed_profile() elif('Endurance' in counterTracks): cwd = os.getcwd() if ('Windows' in platform.platform()): file = cwd + '\\CSV\\' # VehicleParameters.xlsx else: cwd = cwd.replace('\\', '/') file = cwd + '/CSV/' cfile = open(file + counterTracks + '.csv', 'r') curvature = [] #Load Curvature from file into list and make it to an array for row in cfile: new = np.array(row.split(',')) value1 = float(new[0]) curvature.append(value1) curvature = np.array(curvature) track = Track(curvature, ds) # Initialize Lap Simulation LS = lts.LapSim(counterVehicles, track) LS.max_laps = 2 # Run Lap Simulation LS.speed_profile() elif('Efficiency' in counterTracks): cwd = os.getcwd() if ('Windows' in platform.platform()): file = cwd + '\\CSV\\' #VehicleParameters.xlsx' else: cwd = cwd.replace('\\', '/') file = cwd + '/CSV/' cfile = open(file + counterTracks + '.csv', 'r') curvature = [] #Load Curvature from file into List and make it to an Array for row in cfile: new = np.array(row.split(',')) value1 = float(new[0]) curvature.append(value1) curvature = np.array(curvature) track = Track(curvature, ds) # Initialize Lap Simulation LS = lts.LapSim(counterVehicles, track) LS.max_laps = 2 # Run Lap Simulation LS.speed_profile() elif('Skidpad' in counterTracks): curvature = [] FzgWidth = 1.3 #Average of all 4 in 2015 and 2016 (look datasheet GDrive) in m Radius = FzgWidth/2+15.25/2 SkidpadCurvature = 1/Radius #k=1/R i = 1 range = 2*math.pi*Radius while i <= round(range/ds): curvature.append(SkidpadCurvature) i += 1 curvature = np.array(curvature) track = Track(curvature, ds) # Initialize Lap Simulation LS = lts.LapSim(counterVehicles, track) LS.max_laps = 2 # Run Lap Simulation LS.speed_profile() #Calculate Event Points if('Skidpad' in counterTracks): Tyour = LS.lapTime Tmin = 4.920 if(Tmin > Tyour): #set Tyour as Tmin if it is faster than fix Tmin Tmin = Tyour SkidpadScoring = Score.Calc_Skidpad(Tmin, Tyour) if(isinstance(SkidpadScoring, basestring) == True): #if points are a string set points on 0 SkidpadScoring = 0 #Points.append('Skidpad: %s s, Score: %d' %(Tyour, SkidpadScoring)) f.write('Skidpad|%f|-|%d|\n' %(Tyour, SkidpadScoring)) elif(isinstance(SkidpadScoring, basestring) == False): #if points are not a string, go on #Points.append('Skidpad: %s s, Score: %d' %(Tyour, SkidpadScoring)) f.write('Skidpad|%f|-|%d|\n' %(Tyour, SkidpadScoring)) print('Skidpad ' + str(self.Counter) + ' of ' + str(self.revision) + ' completed.') elif('Accel' in counterTracks): Accel = Acceleration.Acceleration(counterVehicles) #initialize class for accel calculation Tyour = Accel.AccelTime() #Tyour = LS.lapTime Tmin = 3.109 if(Tmin > Tyour): #set Tyour as Tmin if it is faster than fix Tmin Tmin = Tyour AccelScoring = Score.Calc_Acceleration(Tmin, Tyour) if(isinstance(AccelScoring, basestring) == True): #if points are a string set points on 0 AccelScoring = 0 #Points.append('Acceleration: %s s, Score: %d' %(Tyour, AccelScoring)) f.write('Acceleration|%f|-|%d|\n' %(Tyour, AccelScoring)) elif(isinstance(AccelScoring, basestring) == False): #if points are not a string, go on #Points.append('Acceleration: %s s, Score: %d' %(Tyour, AccelScoring)) f.write('Acceleration|%f|-|%d|\n' %(Tyour, AccelScoring)) print('Acceleration ' + str(self.Counter) + ' of ' + str(self.revision) + ' completed.') elif('AutoX' in counterTracks): Tyour = LS.lapTime Tmin = 56.083 if(Tmin > Tyour): Tmin = Tyour AutoXScoring = Score.Calc_AutoX(Tmin, Tyour) if(isinstance(AutoXScoring, basestring) == True): #if points are a string set points on 0 AutoXScoring = 0 #Points.append('AutoX: %s s, Score: %d' %(Tyour, AutoXScoring)) f.write('AutoX|%f|-|%d|\n' %(Tyour/60, AutoXScoring)) elif(isinstance(AutoXScoring, basestring) == False): #if points are not a string, go on #Points.append('AutoX: %s s, Score: %d' %(Tyour, AutoXScoring)) f.write('AutoX|%f|-|%d|\n' %(Tyour/60, AutoXScoring)) print('AutoX ' + str(self.Counter) + ' of ' + str(self.revision) + ' completed.') elif('Endurance' in counterTracks): if(self.EventName == 'FSG'): #calculate Tyour depending on the event Tyour = LS.lapTime*18 elif(self.EventName == 'FSA'): if(self.Year == 2016): Tyour = LS.lapTime*20 elif(self.Year == 2015): Tyour = LS.lapTime*22 elif(self.EventName == 'FSAE'): if(self.Year == 2016): Tyour = LS.lapTime*20 elif(self.Year == 2015): Tyour = LS.lapTime*22 Tmin = 1363.556 if(Tmin > Tyour): #set Tyour as Tmin if it is faster than fix Tmin Tmin = Tyour EnduranceScoring = Score.Calc_Endurance(Tmin, Tyour) if(isinstance(EnduranceScoring, basestring) == True): #if points are a string set points on 0 EnduranceScoring = 0 #Points.append('Endurance: %s s, Score: %d' %(Tyour, EnduranceScoring)) f.write('Endurance|%f|-|%d|\n' %(Tyour/60, EnduranceScoring)) elif(isinstance(EnduranceScoring, basestring) == False): #if points are not a string, go on #Points.append('Endurance: %s s, Score: %d' %(Tyour, EnduranceScoring)) f.write('Endurance|%f|-|%d|\n' %(Tyour/60, EnduranceScoring)) print('Endurance ' + str(self.Counter) + ' of ' + str(self.revision) + ' completed.') elif('Efficiency' in counterTracks): if(self.EventName == 'FSG'): #calculate Tyour depending on the event Tyour = LS.lapTime*18 EyourTotal = LS.UsedEnergy/1000/3600*18 elif(self.EventName == 'FSA'): if(self.Year == 2016): Tyour = LS.lapTime*20 EyourTotal = LS.UsedEnergy/1000/3600*20 elif(self.Year == 2015): Tyour = LS.lapTime*22 EyourTotal = LS.UsedEnergy/1000/3600*22 elif(self.EventName == 'FSAE'): if(self.Year == 2016): Tyour = LS.lapTime*20 EyourTotal = LS.UsedEnergy/1000/3600*20 elif(self.Year == 2015): Tyour = LS.lapTime*22 EyourTotal = LS.UsedEnergy/1000/3600*22 Tmin = 1363.556 if(Tmin > Tyour): #set Tyour as Tmin if it is faster than fix Tmin Tmin = Tyour Emin = 0.242 Eff_fac_min = 0.1 Eff_fac_max = 0.89 #Energy consumption per lap Eyour = LS.UsedEnergy/1000/3600 #divide with 1000 and 3600 for unity [kWh] if(Emin > Eyour): #set Eyour as Emin if it is smaller than fix Emin Emin = Eyour #Eyour = 0.378 # per lap EfficiencyScoring = Score.Calc_Efficiency(Tmin, Emin, Eff_fac_min, Eff_fac_max, Tyour, Eyour) if(isinstance(EfficiencyScoring, basestring) == True): #if points are a string set points on 0 EfficiencyScoring = 0 #Points.append('Efficiency: %s s, Energy: %f kWh, Score: %d' %(Tyour, EyourTotal, EfficiencyScoring)) f.write('Efficiency|%f|%f|%d|\n' %(Tyour/60, EyourTotal, EfficiencyScoring)) elif(isinstance(EfficiencyScoring, basestring) == False): #if points are not a string, go on #Points.append('Efficiency: %s s, Energy: %f kWh, Score: %d' %(Tyour, EyourTotal, EfficiencyScoring)) f.write('Efficiency|%f|%f|%d|\n' %(Tyour/60, EyourTotal, EfficiencyScoring)) print('Efficiency ' + str(self.Counter) + ' of ' + str(self.revision) + ' completed.') # #should delete console, but it's not working because Python IDLE don't support console cleaning # if (os.name == 'nt'): # os.system('cls') # else: # os.system('clear') #Calculate all points of each event Overall = SkidpadScoring + AccelScoring + EnduranceScoring + AutoXScoring + EfficiencyScoring #Points.append('Overall Score: %d' %(Overall)) f.write('Overall Points|-|-|%d|-|-|-|%d|\n' %(self.StartValue, Overall)) #print(Points) class Scoring: def __init__(self, EventName, Year): self.EventName = EventName self.Year = Year #Determine constants for each event and year if (self.EventName == 'FSG' or 'FSS'): #Points for Germany and Spain if (self.Year == 2015): self.SkidpadMax = 71.5 self.SkidpadMin = 3.5 self.AccelMax = 71.5 self.AccelMin = 3.5 self.AutoXMax = 95.5 self.AutoXMin = 4.5 self.EnduranceMax = 300 self.EnduranceMin = 25 self.EfficiencyMax = 100 elif (self.Year == 2016): self.SkidpadMax = 71.5 self.SkidpadMin = 3.5 self.AccelMax = 71.5 self.AccelMin = 3.5 self.AutoXMax = 95.5 self.AutoXMin = 4.5 self.EnduranceMax = 300 self.EnduranceMin = 25 self.EfficiencyMax = 100 elif (self.Year == 2017): self.SkidpadMax = 71.5 self.SkidpadMin = 3.5 self.AccelMax = 71.5 self.AccelMin = 3.5 self.AutoXMax = 95.5 self.AutoXMin = 4.5; self.EnduranceMax = 300 self.EnduranceMin = 25 self.EfficiencyMax = 100 else: self.SkidpadMax = 71.5 self.SkidpadMin = 3.5 self.AccelMax = 75 self.AutoXMax = 95.5 self.AutoXMin = 4.5 self.EnduranceMax = 300 self.EnduranceMin = 25 self.EfficiencyMax = 100 elif (self.EventName == 'FSA'): #Points for Austria if (self.Year == 2015): self.SkidpadMax = 71.5 self.SkidpadMin = 3.5 self.AccelMax = 71.5 self.AccelMin = 3.5 self.AutoXMax = 95.5 self.AutoXMin = 4.5 self.EnduranceMax = 300 self.EnduranceMin = 25 self.EfficiencyMax = 100 elif (self.Year == 2016): self.SkidpadMax = 71.5 self.SkidpadMin = 3.5 self.AccelMax = 75 self.AutoXMax = 95.5 self.AutoXMin = 4.5 self.EnduranceMax = 300 self.EnduranceMin = 25 self.EfficiencyMax = 100 elif (self.Year == 2017): self.SkidpadMax = 71.5 self.SkidpadMin = 3.5 self.AccelMax = 71.5 self.AccelMin = 3.5 self.AutoXMax = 95.5 self.AutoXMin = 4.5 self.EnduranceMax = 300 self.EnduranceMin = 25 self.EfficiencyMax = 100 else: self.SkidpadMax = 71.5 self.SkidpadMin = 3.5 self.AccelMax = 75 self.AutoXMax = 95.5 self.AutoXMin = 4.5 self.EnduranceMax = 300 self.EnduranceMin = 25 self.EfficiencyMax = 100 elif (self.EventName == 'FSAE' or 'FSI'): #Points for America and Italy if (self.Year == 2015): self.SkidpadMax = 47.5 self.SkidpadMin = 2.5 self.AccelMax = 71.5 self.AccelMin = 3.5 self.AutoXMax = 142.5 self.AutoXMin = 7.5 self.EnduranceMax = 250 #Tyour <= Tmax --> Formel, Tyour > Tmax --> 0 self.EnduranceMin = 50 self.EfficiencyMax = 100 elif (self.Year == 2016): self.SkidpadMax = 47.5 self.SkidpadMin = 2.5 self.AccelMax = 71.5 self.AccelMin = 3.5 self.AutoXMax = 142.5 self.AutoXMin = 7.5 self.EnduranceMax = 250 #Tyour <= Tmax --> Formel, Tyour > Tmax --> 0 self.EnduranceMin = 50 self.EfficiencyMax = 100 elif (self.Year == 2017): self.SkidpadMax = 71.5 self.SkidpadMin = 3.5 self.AccelMax = 95.5 self.AccelMin = 4.5 self.AutoXMax = 118.5 self.AutoXMin = 6.5 self.EnduranceMax = 200 #Tyour <= Tmax --> Formel, Tyour > Tmax --> 25 self.EnduranceMin = 25 self.EfficiencyMax = 100 else: self.SkidpadMax = 47.5 self.SkidpadMin = 2.5 self.AccelMax = 71.5 self.AccelMin = 3.5 self.AutoXMax = 142.5 self.AutoXMin = 7.5 self.EnduranceMax = 250 #Tyour <= Tmax --> Formel, Tyour > Tmax --> 0 self.EnduranceMin = 50 self.EfficiencyMax = 100 #Calculation for Acceleration def Calc_Acceleration(self, Tmin, Tyour): #Check Event and Year if((self.EventName == 'FSG' or 'FSA' or 'FSS') and (self.Year == 2017)): Tmax = 1.5 * Tmin #Calculate Points if(Tyour < Tmax): AccelPoints = np.around(self.AccelMax*(((Tmax/Tyour)-1)/0.5)+self.AccelMin, 3) if(AccelPoints > 75): AccelPoints = 'Error - Value to high' else: AccelPoints = AccelPoints else: AccelPoints = self.AccelMin return AccelPoints elif((self.EventName == 'FSG' or 'FSA' or 'FSS') and (self.Year == 2015 or 2016)): Tmax = 1.5 * Tmin if(Tyour < Tmax): AccelPoints = np.around(self.AccelMax*((Tmax/Tyour)-1)/((Tmax/Tmin)-1)+self.AccelMin, 3) if(AccelPoints > 75): AccelPoints = 'Error - Value to high' else: AccelPoints = AccelPoints else: AccelPoints = self.AccelMin return AccelPoints elif((self.EventName == 'FSAE' or 'FSI') and (self.Year == 2017)): Tmax = 1.5 * Tmin if(Tyour < Tmax): AccelPoints = np.around(self.AccelMax*((Tmax/Tyour)-1)/((Tmax/Tmin)-1)+self.AccelMin, 3) if(AccelPoints > 100): AccelPoints = 'Error - Value to high' else: AccelPoints = AccelPoints else: AccelPoints = self.AccelMin return AccelPoints elif((self.EventName == 'FSAE' or 'FSI') and (self.Year == 2015 or 2016)): Tmax = 1.5 * Tmin if(Tyour < Tmax): AccelPoints = np.around(self.AccelMax*((Tmax/Tyour)-1)/((Tmax/Tmin)-1)+self.AccelMin, 3) if(AccelPoints > 75): AccelPoints = 'Error - Value to high' else: AccelPoints = AccelPoints else: AccelPoints = self.AccelMin return AccelPoints #Calculation for Skidpad def Calc_Skidpad(self, Tmin, Tyour): #Check Event and Year if((self.EventName == 'FSG' or 'FSA' or 'FSS') and (self.Year == 2017)): Tmax = 1.25 * Tmin if(Tyour < Tmax): SkidpadPoints = np.around(self.SkdipadMax*((np.square(Tmax/Tyour)-1)/0.5625)+self.SkidpadMin, 3) if(SkidPoints > 75): SkidpadPoints = 'Error - Value to high' else: SkidpadPoints = SkidpadPoints else: SkidpadPoints = self.SkidpadMin return SkidpadPoints elif((self.EventName == 'FSG' or 'FSA' or 'FSS') and (self.Year == 2015 or 2016)): Tmax = 1.25 * Tmin if(Tyour < Tmax): SkidpadPoints = np.around(self.SkidpadMax*((np.square(Tmax/Tyour)-1)/(np.square(Tmax/Tmin)-1))+self.SkidpadMin, 3) if(SkidpadPoints > 75): SkidpadPoints = 'Error - Value to high' else: SkidpadPoints = SkidpadPoints else: SkidpadPoints = self.SkidpadMin return SkidpadPoints elif((self.EventName == 'FSAE' or 'FSI') and (self.Year == 2017)): Tmax = 1.25 * Tmin if(Tyour < Tmax): SkidpadPoints = np.around(self.SkidpadMax*((np.square(Tmax/Tyour)-1)/(np.square(Tmax/Tmin)-1))+self.SkidpadMin, 3) if(SkidpadPoints > 75): SkidpadPoints = 'Error - Value to high' else: SkidpadPoints = SkidpadPoints else: SkidpadPoints = self.SkidpadMin return SkidpadPoints elif((self.EventName == 'FSAE' or 'FSI') and (self.Year == 2015 or 2016)): Tmax = 1.25 * Tmin if(Tyour < Tmax): SkidpadPoints = np.around(self.SkidpadMax*((np.square(Tmax/Tyour)-1)/(np.square(Tmax/Tmin)-1))+self.SkidpadMin, 3) if(SkidpadPoints > 50): SkidpadPoints = 'Error - Value to high' else: SkidpadPoints = SkidpadPoints else: SkidpadPoints = self.SkidpadMin return SkidpadPoints #Calculation for Autocross def Calc_AutoX(self, Tmin, Tyour): #Check Event and Year if((self.EventName == 'FSG' or 'FSA' or 'FSS') and (self.Year == 2017)): Tmax = 1.25 * Tmin if(Tyour < Tmax): AutoXPoints = np.around((self.AutoXMax*((Tmax/Tyour)-1)/0.25)+self.AutoXMin, 3) if(AutoXPoints > 100): AutoXPoints = 'Error - Value to high' else: AutoXPoints = AutoXPoints else: AutoXPoints = self.AutoXMin return AutoXPoints elif((self.EventName == 'FSG' or 'FSA' or 'FSS') and (self.Year == 2015 or 2016)): Tmax = 1.25 * Tmin if(Tyour < Tmax): AutoXPoints = np.around((self.AutoXMax*((Tmax/Tyour)-1)/((Tmax/Tmin)-1)+self.AutoXMin), 3) if(AutoXPoints > 100): AutoXPoints = 'Error - Value to high' else: AutoXPoints = AutoXPoints else: AutoXPoints = self.AutoXMin return AutoXPoints elif((self.EventName == 'FSAE' or 'FSI') and (self.Year == 2017)): Tmax = 1.45 * Tmin if(Tyour < Tmax): AutoXPoints = np.around((self.AutoXMax*((Tmax/Tyour)-1)/((Tmax/Tmin)-1)+self.AutoXMin), 3) if(AutoXPoints > 125): AutoXPoints = 'Error - Value to high' else: AutoXPoints = AutoXPoints else: AutoXPoints = self.AutoXMin return AutoXPoints elif((self.EventName == 'FSAE' or 'FSI') and (self.Year == 2015 or 2016)): Tmax = 1.45 * Tmin if(Tyour < Tmax): AutoXPoints = np.around((self.AutoXMax*((Tmax/Tyour)-1)/((Tmax/Tmin)-1)+self.AutoXMin), 3) if(AutoXPoints > 150): AutoXPoints = 'Error - Value to high' else: AutoXPoints = AutoXPoints else: AutoXPoints = self.AutoXMin return AutoXPoints #Calculation for Endurance def Calc_Endurance(self, Tmin, Tyour): #Check Event and Year if((self.EventName == 'FSG' or 'FSA' or 'FSS') and (self.Year == 2017)): Tmax= 4/3 * Tmin if(Tyour < Tmax): EndurancePoints = np.around((self.EnduranceMax*((Tmax/Tyour)-1)/0.333)+self.EnduranceMin, 3) if(EndurancePoints > 325): EndurancePoints = 'Error - Value to high' else: EndurancePoints = EndurancePoints else: EndurancePoints = self.EnduranceMin return EndurancePoints elif((self.EventName == 'FSG' or 'FSA' or 'FSS') and (self.Year == 2015 or 2016)): Tmax = 4/3 * Tmin if(Tyour < Tmax): EndurancePoints = np.around((self.EnduranceMax*((Tmax/Tyour)-1)/((Tmax/Tmin)-1) + self.EnduranceMin), 3) if(EndurancePoints > 325): EndurancePoints = 'Error - Value to high' else: EndurancePoints = EndurancePoints else: EndurancePoints = self.EnduranceMin return EndurancePoints elif((self.EventName == 'FSAE' or 'FSI') and (self.Year == 2017)): Tmax = 1.45 * Tmin if(Tyour <= Tmax): EndurancePoints = np.around((self.EnduranceMax*((Tmax/Tyour)-1)/((Tmax/Tmin)-1) + self.EnduranceMin), 3) if(EndurancePoints > 225): EndurancePoints = 'Error - Value to high' else: EndurancePoints = EndurancePoints else: EndurancePoints = 25 return EndurancePoints elif((self.EventName == 'FSAE' or 'FSI') and (self.Year == 2015 or 2016)): Tmax = 1.45 * Tmin if(Tyour <= Tmax): EndurancePoints = np.around((self.EnduranceMax*((Tmax/Tyour)-1)/((Tmax/Tmin)-1) + self.EnduranceMin), 3) if(EndurancePoints > 300): EndurancePoints = 'Error - Value to high' else: EndurancePoints = EndurancePoints else: EndurancePoints = 0 return EndurancePoints #Calculation for Efficiency (right now just for electrical car) def Calc_Efficiency(self, Tmin, Emin, Eff_fac_min, Eff_fac_max, Tyour, Eyour): #Check Event and Year if((self.EventName == 'FSG' or 'FSA' or 'FSS') and (self.Year == 2017)): Tmax = 1.333*Tmin #Check Value of Eff_fac_min if(Eff_fac_min >= 0.1): if(Tyour <= Tmax): Eff_fac_your = ((Tmin*np.square(Emin))/(Tyour*np.square(Eyour))) EfficiencyPoints = np.around((self.EfficiencyMax*((Eff_fac_min/Eff_fac_your)-1)/(Eff_fac_min/Eff_fac_max)-1), 3) if(EfficiencyPoints > 100): EfficiencyPoints = 'Error - Value to high' else: EfficiencyPoints = EfficiencyPoints else: EfficiencyPoints = 0 return EfficiencyPoints else: print('Error - Value for minimal efficiency factor is bigger than 0.1') elif((self.EventName == 'FSG' or 'FSA' or 'FSS') and (self.Year == 2015 or 2016)): Tmax = 1.333*Tmin if(Eff_fac_min == 0.1): if(Tyour <= Tmax): Eff_fac_your = (Tmin/Tyour)*np.square((Emin/Eyour)) EfficiencyPoints = np.around((self.EfficiencyMax*((Eff_fac_min/Eff_fac_your)-1)/((Eff_fac_min/Eff_fac_max)-1)), 3) if(EfficiencyPoints > 100): EfficiencyPoints = 'Error - Value to high' else: EfficiencyPoints = EfficiencyPoints else: EfficiencyPoints = 0 return EfficiencyPoints else: print('Error - Value for minimal efficiency factor is not 0.1')
47.226917
134
0.476307
2,925
30,178
4.887863
0.102906
0.023431
0.022592
0.020144
0.77156
0.760579
0.747919
0.747919
0.742533
0.727006
0
0.047149
0.42299
30,178
639
135
47.226917
0.773905
0.109086
0
0.775926
0
0.001852
0.04922
0.005896
0
0
0
0
0.001852
1
0.014815
false
0
0.02037
0
0.072222
0.012963
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
95be063085593ec4351b60d98d2725d1980e7090
197
py
Python
src/main/python/afp_alppaca/__init__.py
Scout24/afp-alppaca
e06fca7f9b53ba50c1ff15d23f1f63879a232d85
[ "Apache-2.0" ]
3
2019-04-30T20:46:16.000Z
2021-07-04T18:22:47.000Z
src/main/python/afp_alppaca/__init__.py
Scout24/afp-alppaca
e06fca7f9b53ba50c1ff15d23f1f63879a232d85
[ "Apache-2.0" ]
23
2015-06-18T09:45:19.000Z
2015-12-15T10:18:22.000Z
src/main/python/afp_alppaca/__init__.py
ImmobilienScout24/alppaca
e06fca7f9b53ba50c1ff15d23f1f63879a232d85
[ "Apache-2.0" ]
4
2015-10-13T07:26:25.000Z
2015-11-25T10:14:27.000Z
from __future__ import print_function, absolute_import, unicode_literals, division from afp_alppaca.ims_interface import IMSCredentialsProvider, NoRolesFoundException, NoCredentialsFoundException
49.25
112
0.898477
19
197
8.842105
0.842105
0
0
0
0
0
0
0
0
0
0
0
0.071066
197
3
113
65.666667
0.918033
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
1
0
6
95e7b6dc7b98709c674c4f929b391f0338129a83
68
py
Python
pandas_s3_redshift/__init__.py
nit567esh/pandas_s3_redshift
02e240ea48ee5237c2739ea2da4e42142777cbea
[ "MIT" ]
null
null
null
pandas_s3_redshift/__init__.py
nit567esh/pandas_s3_redshift
02e240ea48ee5237c2739ea2da4e42142777cbea
[ "MIT" ]
null
null
null
pandas_s3_redshift/__init__.py
nit567esh/pandas_s3_redshift
02e240ea48ee5237c2739ea2da4e42142777cbea
[ "MIT" ]
null
null
null
name="pandas_s3_redshift/pandas_s3_redshift" __version__ = "1.0.0"
17
44
0.794118
11
68
4.181818
0.636364
0.347826
0.695652
0
0
0
0
0
0
0
0
0.079365
0.073529
68
3
45
22.666667
0.650794
0
0
0
0
0
0.626866
0.552239
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
255575956ad4a24a2e085bf6d7e7fcaa5cbccae0
1,110
py
Python
Python/XGBoost/evaluate-model-quality.py
James-McNeill/Learning
3c4fe1a64240cdf5614db66082bd68a2f16d2afb
[ "MIT" ]
null
null
null
Python/XGBoost/evaluate-model-quality.py
James-McNeill/Learning
3c4fe1a64240cdf5614db66082bd68a2f16d2afb
[ "MIT" ]
null
null
null
Python/XGBoost/evaluate-model-quality.py
James-McNeill/Learning
3c4fe1a64240cdf5614db66082bd68a2f16d2afb
[ "MIT" ]
null
null
null
# Example 1: Showing the Root Mean Squared Error (RMSE) metric. Penalises large residuals # Create the DMatrix: housing_dmatrix housing_dmatrix = xgb.DMatrix(data=X, label=y) # Create the parameter dictionary: params params = {"objective":"reg:linear", "max_depth":4} # Perform cross-validation: cv_results cv_results = xgb.cv(dtrain=housing_dmatrix, params=params, nfold=4, num_boost_round=5, metrics='rmse', as_pandas=True, seed=123) # Print cv_results print(cv_results) # Extract and print final boosting round metric print((cv_results["test-rmse-mean"]).tail(1)) # Example 2: Showing the Mean Absolute Error (MAE) metric # Create the DMatrix: housing_dmatrix housing_dmatrix = xgb.DMatrix(data=X, label=y) # Create the parameter dictionary: params params = {"objective":"reg:linear", "max_depth":4} # Perform cross-validation: cv_results cv_results = xgb.cv(dtrain=housing_dmatrix, params=params, nfold=4, num_boost_round=5, metrics='mae', as_pandas=True, seed=123) # Print cv_results print(cv_results) # Extract and print final boosting round metric print((cv_results["test-mae-mean"]).tail(1))
33.636364
128
0.766667
168
1,110
4.922619
0.339286
0.108827
0.101572
0.055623
0.822249
0.822249
0.822249
0.822249
0.822249
0.822249
0
0.016227
0.111712
1,110
32
129
34.6875
0.822515
0.445946
0
0.6
0
0
0.149502
0
0
0
0
0
0
1
0
false
0
0
0
0
0.4
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
256f6e761afcb69bd398f1834e55c819a31a18cc
180
py
Python
smarc_gz_model_cleaner/setup.py
Jollerprutt/smarc_utils
cf938dbddffbf745cb8d2cbc92c502e286f63b75
[ "BSD-3-Clause" ]
1
2022-03-19T10:55:37.000Z
2022-03-19T10:55:37.000Z
smarc_gz_model_cleaner/setup.py
Jollerprutt/smarc_utils
cf938dbddffbf745cb8d2cbc92c502e286f63b75
[ "BSD-3-Clause" ]
8
2018-01-26T10:58:47.000Z
2021-06-06T11:10:51.000Z
smarc_gz_model_cleaner/setup.py
Jollerprutt/smarc_utils
cf938dbddffbf745cb8d2cbc92c502e286f63b75
[ "BSD-3-Clause" ]
5
2017-10-17T08:21:36.000Z
2021-03-24T16:35:01.000Z
from distutils.core import setup from catkin_pkg.python_setup import generate_distutils_setup d = generate_distutils_setup( scripts=[ 'scripts/gz_models_cleaner.py']) setup(**d)
30
60
0.822222
26
180
5.384615
0.576923
0.242857
0.314286
0
0
0
0
0
0
0
0
0
0.088889
180
6
61
30
0.853659
0
0
0
1
0
0.154696
0.154696
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
6
c278582f8b1bd2eca3606b60471b92c677a91aab
180
py
Python
lab2/myutils/lab2_task1/__init__.py
kinpa200296/python_labs
bb26c426cbe9bb27f45b8ee4c974c38db300468f
[ "MIT" ]
null
null
null
lab2/myutils/lab2_task1/__init__.py
kinpa200296/python_labs
bb26c426cbe9bb27f45b8ee4c974c38db300468f
[ "MIT" ]
null
null
null
lab2/myutils/lab2_task1/__init__.py
kinpa200296/python_labs
bb26c426cbe9bb27f45b8ee4c974c38db300468f
[ "MIT" ]
null
null
null
#!/usr/bin/env python from mergesort import sort from mergesort import max_elem_in_memory from gen import generate from checker import check_if_sorted __author__ = 'kinpa200296'
20
40
0.827778
27
180
5.185185
0.740741
0.185714
0.271429
0
0
0
0
0
0
0
0
0.038217
0.127778
180
8
41
22.5
0.853503
0.111111
0
0
1
0
0.069182
0
0
0
0
0
0
1
0
false
0
0.8
0
0.8
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
6
c28c098bbfc8d5e86b37bade41c2b54b60ed58f5
154
py
Python
helloworld.py
zhoushao12/Script
e92c8a657f9ff96037a3265df190b13a18199f1f
[ "Apache-2.0" ]
null
null
null
helloworld.py
zhoushao12/Script
e92c8a657f9ff96037a3265df190b13a18199f1f
[ "Apache-2.0" ]
null
null
null
helloworld.py
zhoushao12/Script
e92c8a657f9ff96037a3265df190b13a18199f1f
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python def main(): print 'hello world!' print '------------------' print '------------------' print '------------------' main()
22
30
0.363636
13
154
4.307692
0.692308
0.357143
0
0
0
0
0
0
0
0
0
0
0.162338
154
7
31
22
0.434109
0.12987
0
0.5
0
0
0.492537
0
0
0
0
0
0
0
null
null
0
0
null
null
0.666667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
6
c299bcfbb8f0ca5a1de9643ebd10de95bf26c5a7
258
py
Python
cpab/cpaNd/utils/__init__.py
freifeld/cpabDiffeo
22df6cdbd7111b9ae3e7f1c0e31ff85e92d281a6
[ "MIT" ]
17
2016-03-16T21:35:36.000Z
2021-11-11T04:16:21.000Z
cpab/cpaNd/utils/__init__.py
freifeld/cpabDiffeo
22df6cdbd7111b9ae3e7f1c0e31ff85e92d281a6
[ "MIT" ]
null
null
null
cpab/cpaNd/utils/__init__.py
freifeld/cpabDiffeo
22df6cdbd7111b9ae3e7f1c0e31ff85e92d281a6
[ "MIT" ]
4
2016-08-12T23:02:09.000Z
2019-03-14T18:20:36.000Z
from _null import null from _create_constraint_mat_preserve_vol import create_constraint_mat_preserve_vol from _create_constraint_mat_zerovals import create_constraint_mat_zerovals from _get_stuff_for_the_local_version import get_stuff_for_the_local_version
51.6
82
0.937984
40
258
5.35
0.375
0.299065
0.35514
0.214953
0.523364
0.242991
0
0
0
0
0
0
0.062016
258
4
83
64.5
0.884298
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
c2f2943a02931601f6087aebf5f7c074c470f653
21,508
py
Python
blockcerts/const.py
docknetwork/verifiable-claims-engine
1aab94510f421ce131642b64aefcd9a21c888f23
[ "MIT" ]
5
2019-10-21T18:17:38.000Z
2020-12-09T06:40:32.000Z
blockcerts/const.py
docknetwork/verifiable-claims-engine
1aab94510f421ce131642b64aefcd9a21c888f23
[ "MIT" ]
4
2019-11-01T20:10:54.000Z
2020-01-21T20:41:00.000Z
blockcerts/const.py
docknetwork/verifiable-claims-engine
1aab94510f421ce131642b64aefcd9a21c888f23
[ "MIT" ]
2
2020-02-02T20:00:46.000Z
2020-02-12T10:12:05.000Z
from voluptuous import Schema, REMOVE_EXTRA, Optional, All, Length RECIPIENT_NAME_KEY = 'name' RECIPIENT_EMAIL_KEY = 'identity' RECIPIENT_PUBLIC_KEY_KEY = 'pubkey' RECIPIENT_ADDITIONAL_FIELDS_KEY = 'additional_fields' RECIPIENT_EXPIRES_KEY = 'expires' ISSUER_SCHEMA = Schema( { "name": str, "main_url": str, "id": str, "email": str, "logo_file": str, "revocation_list": str, "intro_url": str, "signature_lines": list, "signature_file": str, }, required=True, extra=REMOVE_EXTRA, ) TEMPLATE_SCHEMA = Schema( { "id": str, "title": str, "description": str, "criteria_narrative": str, "image": str, "additional_global_fields": list, "additional_per_recipient_fields": list, "display_html": str, Optional("expires_at"): str, }, required=True, extra=REMOVE_EXTRA, ) RECIPIENT_SCHEMA = Schema( All( [ { RECIPIENT_NAME_KEY: str, RECIPIENT_EMAIL_KEY: str, RECIPIENT_PUBLIC_KEY_KEY: str, RECIPIENT_ADDITIONAL_FIELDS_KEY: dict, } ], Length(min=1) ), required=True, extra=REMOVE_EXTRA, ) JOB_SCHEMA = Schema( { "blockchain": str, Optional('eth_public_key'): str, Optional('eth_private_key'): str, Optional('eth_key_created_at'): str, Optional("gas_price"): int, Optional("gas_limit"): int, }, required=True, extra=REMOVE_EXTRA, ) DEFAULT_NO_SAFE_MODE = True DEFAULT_ADDITIONAL_GLOBAL_FIELDS = '{"fields": [{"path": "$.displayHtml","value": ""}, {"path": "$.@context","value":' \ ' ["https://w3id.org/openbadges/v2", "https://w3id.org/blockcerts/v2",' \ ' {"displayHtml": { "@id": "schema:description" }}]}]}' DEFAULT_ADDITIONAL_PER_RECIPIENT_FIELDS = '{"fields": [{"path": "$.displayHtml","value": "*|FOO|*","csv_column": ' \ '"displayHtml"}]}' HTML_DATE_FORMAT = '%m/%d/%Y' PLACEHOLDER_RECIPIENT_NAME = "%RECIPIENT_NAME%" PLACEHOLDER_RECIPIENT_EMAIL = "%RECIPIENT_EMAIL%" PLACEHOLDER_ISSUING_DATE = "%ISSUING_DATE%" PLACEHOLDER_ISSUER_LOGO = "%ISSUER_LOGO%" PLACEHOLDER_ISSUER_SIGNATURE_FILE = "%ISSUER_SIGNATURE_FILE%" PLACEHOLDER_EXPIRATION_DATE = "%EXPIRATION_DATE%" PLACEHOLDER_CERT_TITLE = "%CERT_TITLE%" PLACEHOLDER_CERT_DESCRIPTION = "%CERT_DESCRIPTION%" HTML_PLACEHOLDERS = [PLACEHOLDER_RECIPIENT_NAME, PLACEHOLDER_RECIPIENT_EMAIL, PLACEHOLDER_ISSUING_DATE, PLACEHOLDER_ISSUER_LOGO, PLACEHOLDER_ISSUER_SIGNATURE_FILE, PLACEHOLDER_EXPIRATION_DATE, PLACEHOLDER_CERT_TITLE, PLACEHOLDER_CERT_DESCRIPTION] DEFAULT_DISPLAY_HTML = '<div class="sc-EHOje kcwAhk" style="font-family: Lato; text-align: left;"> <img src="%ISSUER_LOGO%" class="sc-jTzLTM gdVHeQ" style="max-width: 110px;" /><h2 class="sc-bZQynM hJaChX" style="font-size: 18.4px; font-weight: normal; font-style: italic; line-height: 1.76; -webkit-letter-spacing: -0.1px; -moz-letter-spacing: -0.1px; -ms-letter-spacing: -0.1px; letter-spacing: -0.1px; color: #2c2b3f; margin: 25px 0 0 0;" > %CERT_TITLE%</h2><p class="sc-gzVnrw gsQrMB" style="font-size: 14.4px; line-height: 1.88; -webkit-letter-spacing: 0; -moz-letter-spacing: 0; -ms-letter-spacing: 0; letter-spacing: 0; color: #9595a0; margin: 0;" > Issued to</p><h3 class="sc-htoDjs kQAJr" style="font-weight: normal; line-height: 1.38; -webkit-letter-spacing: -0.1px; -moz-letter-spacing: -0.1px; -ms-letter-spacing: -0.1px; letter-spacing: -0.1px; color: #d52c1e; margin: 0 0 10px 0; font-size: 46.4px; font-family: LucidaGrande; line-height: 0.7; -webkit-letter-spacing: -00.2px; -moz-letter-spacing: -00.2px; -ms-letter-spacing: -00.2px; letter-spacing: -00.2px; margin: 35px 0 40px 0; text-transform: uppercase;" > %RECIPIENT_NAME%</h3><div class="sc-dnqmqq bDsesi"><div class="sc-gpHHfC hrlnEG" style="display: inline-block; margin-right: 35px; margin-bottom: 15px;" ><div class="sc-gVyKpa gMtABg" style="font-size: 14.4px; line-height: 1.88; -webkit-letter-spacing: 0; -moz-letter-spacing: 0; -ms-letter-spacing: 0; letter-spacing: 0; color: #9595a0;" > Issue date</div><div class="sc-gVyKpa sc-eXNvrr btZsLP" style="font-size: 14.4px; line-height: 1.88; -webkit-letter-spacing: 0; -moz-letter-spacing: 0; -ms-letter-spacing: 0; letter-spacing: 0; color: #9595a0; color: #2c2b3f;" > %ISSUING_DATE%</div></div><div class="sc-gpHHfC hrlnEG" style="display: inline-block; margin-right: 35px; margin-bottom: 15px;" ><div class="sc-gVyKpa gMtABg" style="font-size: 14.4px; line-height: 1.88; -webkit-letter-spacing: 0; -moz-letter-spacing: 0; -ms-letter-spacing: 0; letter-spacing: 0; color: #9595a0;" > Expiration date</div><div class="sc-gVyKpa sc-eXNvrr btZsLP" style="font-size: 14.4px; line-height: 1.88; -webkit-letter-spacing: 0; -moz-letter-spacing: 0; -ms-letter-spacing: 0; letter-spacing: 0; color: #9595a0; color: #2c2b3f;" > %EXPIRATION_DATE%</div></div> <img src="%ISSUER_SIGNATURE%" class="sc-fjdhpX hrCbRC" style="display: inline-block; max-width: 113px; max-height: 50px" /></div><p class="sc-VigVT kAqoHM" style="font-size: 14px; line-height: 1.36; color: #bababa; margin: 15px 0 27px 0;" > %CERT_DESCRIPTION%</p> <img src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAhkAAABUCAYAAAAvQyUQAAAAAXNSR0IArs4c6QAALgtJREFUeAHtXQeYFEUTrTvJd4cKShCRDCaUJEhGBSSpJAUJKihBJKOSjBxgICOSFFAkSwYJEpUkKEmCgCBBRRQBuQNEJfz1eum52b2d3ZnducRf9X13MzvTU9Pztnf6dXVVdQTZkKtXr0bGx8dX4KL1IyIiSvPn23gff9E2LpcigoAgIAgIAoKAIJDGEOD+/iL397/x9jhXfW9kZOTCLFmyrMRxu48SEaggK8/E5KIzK+zB+zkClZVzgoAgIAgIAoKAIHDdI3CeOcFEfsrYmJiYk8Ge1pJkMLloxBcPY3KRN5gSOS8ICAKCgCAgCAgC/1cIxPPT9meiMYhJx1WrJ09EMphURJw7d64/b/tYXSTHBQFBQBAQBAQBQUAQYIIxNzo6+hnenveHhhfJuEYwpvO2ib/CckwQEAQEAUFAEBAEBAEzAkwwtjPRqMbbOPNx7EeaD1yzYAjBMIMi+4KAICAICAKCgCBgiQAbJkqyiwUMFF6cAhcYB+CDwQVkisQSRjkhCAgCgoAgIAgIAhYI1Dl//nx/33NquoTJRSa2YhzgrTh5+iIknwUBQUAQEAQEAUEgKAI8XfJf+vTp786UKdNBXVhZMhCmKgRDQyJbQUAQEAQEAUFAEHCKAPOI9P/9998A83URfDCSrRi/8VbyYJiRkX1BQBAQBAQBQUAQcIQAWzOupkuXrmDmzJmP4EKVyVMIhiMMpbAgIAgIAoKAICAI+EGA+UTE5cuXn9CnMF1SX3+QrSAgCAgCgoAgIAgIAuEgwETD4BWRbNooHY4yuVYQEAQEAUFAEBAEBAGNAJMMg1dE8gcsdCYiCAgCgoAgIAgIAoKAGwjEMLdQC6hiukRIhhuQig5BQBAQBAQBQUAQUAj8888/iluAZMhy7dIoBAFBQBAQBAQBQcA1BNj5MwbKVJ4M17SKIkFAEBAEBAFBQBAQBK4hICRDmoIgIAgIAoKAICAIJAkCQjKSBFZRKggIAoKAICAICAJpnmRs2rSVmjRpR/WfaEXLlq2Rb1QQEAQEAUFAEBAEUgkCEXFxcVdTSV0cVePw4WM0fNhHtG/fQeK06BQRGUlZMmeiPHlyU/ce7al48Tsd6ZPCgoAgIAgIAoKAIOAOAjfccEOZqKiorWmOZPz1Vxx9NH4KrVq9nuLPnqPomCgqWqwg5cqVg9Z9/Q3FxZ2jmJhoKluuJHXq2Jpy5LzFHcREiyAgCAgCgoAgIAjYQiDNkYxLly7RnDlf0GeTZ1N8/HlKnz4d3ZI9G3Xu+gJVqFBGPfSuXfto6JCx9Muvv9HFv/+h6OgoavxkXWrevBFlypTRFjBSSBAQBAQBQUAQEATCQyBNkYz167bQiJEf05nTfxEvI6ssFc88+yQ1bFiHeLU3LyQ4yxgtW7qaxo39jM6ejedplAi68cas1LFTa3rkkUpeZeWDICAICAKCgCAgCLiPQJogGYcOHWXLxDg6ePAnOhd/gWKyRlH1GlWpTZtmijgEguX8+Qv0yaSZtGjRCoo/d56iozLTHXfkZX+NtnTXXUUCXSrnBAFBQBAQBAQBQSAMBFI1yTjNFovx4z+jtWs2UVw8+1jwtMeddxah7t3bUr78tzt67F9/+Y2GsYPorl0/8DTLOboxawxVqPQAdejwHGXPfrMjXVJYEBAEBAFBQBAQBIIjkCpJxr///kuzP19MU6fOZevDBcqQPj3deuvN1LVrWyr3YKngTxWgxLff7qChQ8fRyT9O07//sr8GO4w+/XQDDn99nDJkyBDgSjklCAgCgoAgIAgIAk4QSHUk42uODPlgxAQ6feYvunz5CjttZqHnWjWh+vVrs9/FDU6ezbLspUuXacH8pTSJp1HgPAq92W6+STmPVq5czvI6OSEICAKCgCAgCAgC9hFINSTjwIGflIXhp5+O0nm2XmTNGk01a1aj5194mvfV+ir2n8pmybNn42j8OA6DXbWeQ17jlVWjcOH8PB3TnrAVEQQEAUFAEBAEBIHQEUhxknHq1BkaO2YyrVu3WXX0MUwu7rm7KHXp2oby5XPmdxEqDHAsHTJ4DB08dITOs2Uj640xVO2hitSuXUu66aasoaqV6wQBQUAQEAQEgf9rBFKUZLRv9yrt33+ILl+5RBkzZKJbbs1G3djvAgm0UkIwVTNyBIfInjlLyMdxA4fFlmcfkAEDe6dEdeSegoAgIAgIAoJAmkZAk4xkX7vk4sV/6Pvvf6B//vmXLv13hcqUuZ8mThyaYgQD32KVKg/StOmjOY9GZeUPgkRea9ZspB9//ClNf8lSeUFAEBAEBAFBICUR8M5klQw1iYiI4GydGTip1r/ETIe2cNTHMy27UJcuL1ClymWToQaJb3H0yC80fPh4+mHvj3TlyhVVABlFb2an0KSS9eu/oT/++FOpL1OmBOfwsDdFNHfuF4xbJFWsWJZuuSW77ept2LCFfv/9D47WuYUqV34w6HVIavbbb7+ra06cOEknTvyuiGHOnLeS5y+HmtbyTYYGxfv2/Uh79+5X93jwwTJ02225gt4PBXbu3EOHDh1WZatUqcDPly3odQhL/pUzvJ448Yeq7+nTZyhbtpspd+6cKtU81rJBmvlgcoYdjtesWZ+oGNor599X02dI6pYvX96g2WPxHRHZXxIoR45bqFIl7+8kkA7U6aabblTPh+8Cz5scgt8G2oTGGlsIUvprvLGN5HWEgklytWOreiCpX4/ub1udVr5hwBbfzd08jVv8vrssyy5cuJxWrlinzufKnYP69OlsWdbOCfin7d1zgOCvhr9jx35R+BYuUoCKFi1IRYsUDFifQPeAD9r2bbs9unkQhYFUeo7igy9aEdZbhO9RvnyZRM72vni1fv5pKlHinkC34hQEG2nJ0lUq+zLabNdubahAgTssr9m79wDt++Eg7T9wiA7+eJiOH/+d7siXR9UJdStZ8l5+T+ZJdL0Zf3xnfV/rkqiMvwOjRk2iA2xRh5QrV4qat2jor5jXscuXL1Pv3gPVM+FE0WKFqGPHVl5lrD4Eq6c+n5nX4ELyyLx5b7NS5XV85869NOHjaeqY3efwUpDEH5KdZKDzypgxA4eNpqN7772TfvjhR9VJxMYOVY28e492VLBgviR+bI96OIB+NH4qrVz5tUrYFZUlCxUqVIDDXE+qBdfww0gKQaKwVavWsdXkslIP606zZvZIxo4du9Q1R44cY9+R54J2eLr+hw8f5RfLIYVtMJLx55+nCJ3AL78c15cb2+PHTxj72TmtO6J/0PGaBZ3Pjh271aG77y5mm2SALOjrQLyIrEkGOjyQgq+/3kRoU2b5/feT3K4OqEPo8KpUKU/VqlUM2PlduPC3cW+zLt99pKcvV640p7IvS3gZ+BP9Hfk75+9Y4cIFE5EMJzrwe8J3CqJip4P3V4dgx0Au5sxZZBBjc3l0glrwkm/UqB4Tj5z6kN+tfr6kbMd+b3zt4JUrV+m773YGKuJ1DiSjbdsWVLr0fV7H8eHnn48bugJ1ooku9DmANjiSI+zQ2fgK1mzCu1JL+fKlqVfvTjxosD/Q+GrtJnrvvVEEXb6CfEI4DwHh6M1EyZy00BevBg1r+6rw+gyC0afPO8axNm2bWxIMvIcHDxqjHPGNC67t7Po+jnax5RuCQWmLlo2odeumihhdK+KFvxN/PgyGtm31vE9BJu3Ixo3fETJQa9m+fTenQqhvK+eSuZ34q6f5PPYnTBzC75jM+laWW2TC1m3Z7nNYKkuCEylCMiIiuFPgDhwNtdPtrbmBjWXny8OEL6xDh970yMOVqG27FkGzeoaKB1j57Nlf0JQps1UmUYzGc+XMQZ07P0+5b8vJ/iFvKItGEnEMfs7vDYKBZ0DnjxG5nRG3fmY4zs6b9wU38ODsW18TbIvOGhaW1bz4nCZAuAYjZYxWo6Ky0IULF9SPGiOiU6dO04QJUwnWijp1qgdT79p5kAh0eHokDcVZsmRWdURdYc3AOby0QUbWrt2gVutF54dOMJjgWfPn94y4gMk5zhiLFyFIFwjhV19t5Cy0h+mFF1qoF5+VPpCQokULWZ02jufktmcl0IFEdGYBpzp9+jRbmf5k69I/ysIEorx7936Vaj9YB2/WFWwf+IHIAUPsQ0C+0bkBJwgIyJ9/nlZkD9/N2LGfKlIHcheM9CRFO1aVcvgPUW0Y0WvBd44pXS3o6Lp3e4vGjX+fv4/C+rBr261bv6cB/Ud4tWmtHJ2r+feI45s2baUWzTtSN7YO1Kr9sC7qd4vfKjInf/nlV4nO4/tBGzcT9YMHj1CbF15WpArLNzgVrIzdr99Q47JatR6iVq2aGp/NO3D8f+/dUdye/zIfVvtIMYC0A1qAwaefzFIWkqHD3lLWHX0uubYL5i/zuhXq9MXilRQKTl6KfD4cOfIzDRz4AcXGvupzJu19THaS4TEjJ1gIYLUYPeYdFWUyYvhHyvly8Rcr+aW2iRsm58lo4F6eDHw9aNQYLSAfx6V//6MoTsrVrFkDeuqpx1RSLjikJoyLE+rp5le7datnBIURLCwMaKjbtn1PVatWcHQbjNbXr9/MI1h3cnwsXvwlffvtdlUHdCSYksG0hb/F5Q4fPkbLlq1SHQw6+OQSdGbjxn2qHHRxz9tvv42tKXWUWdu3DpiOmj9/iSIHmO7BdS+91JpHHdYWEujInz+vX9IECw/0HTv2q7K+rVjxFdWqZf2Cx/RKo0aP+VbL0WfoaNCgruU1IHrLl69RU1R4RnTwLVs+xRa5/JbXODmBUfW2bZ72is6uevWqyrTsO00Gh+nNm7exVfAr1Z5Xr17HI+az6rsJdj+323Gw+/k737NXR3qII8vMghH/smWrVbg7yCUGJ2+8PohmfT7OXCzsfRCYTh37eul59NFqVKXqg2qqAFN+J0+eoh956mQrj7xnzVqoCB8GJv36DaP/uCN+7LEaXtebP/R8tT9PRe41DoFoo1O8s1hhKlQ4nyKCIOXbtu2imTMW8jvpmNJfoKD11IahzGcHv89XX4lVZBynYAHq3aeTTynPR4y+UTez4Dkqcc6iYkzOsYI2yP2xo7/SF1+soiVLVqnffRZeIiIlRux/MKkHuYPowQz28Rtxm2RA76qV6+jee4pRk6ZP4GOalWQnGRiFcf+luIZ5OgLJsMpxdMnMmQtp+rT5FM/se+y4ycriALYebsZPkIdhQ8fTT9ypIx8HQmZr1qzCbL0l+17caHyBCYw+Qo3YjBMu7WC+FaM3yP3336OwwLwoiAdGfmZM7NwSL/W8efOEHfYLk7cmGMhP8uSTTwTUCbNwu3bPKh8KkKXkksWLlxsEA1MgDz1UyRIzvIhgbcC0CqwP6AgXLVpOzz33dEjVhQ9Ms2aNaeTI8cpKsnv3DwFJRkg3cXgRCFOzZo0UScWzgbAuWrSMydQLPDIP7+d99OjPBsHAszdt2sDy5Q7SAVJauHABmjFjnrJygTiXLHlfwHakH9etdqz1ubFFGHvTpvU5Ai4DDWJzPgTWLHSkdvyF7NQBI/V3eSSvBaSyFxOeqtXK60Nqi7aMv4qVytLDj1Skfm8PM6YzP2TfArw//YXdY+0mM8GoXqMKvfLKi4mspiDr+KtXrwYtWLCcCdUlpdOrEkE+gIiBYAAfSJ48uej991/zshBpFSBsg94frT+quvfp0yWRXx7wKH4f/u6ip5vVZ+vtUh4QPh7QgmgodXln0aIvDYsPFtvE9NXu3fuU78iWLTuobFlM8bor8Bu56+4idN99d7urOBm1RSbjvdStPJ34NZbhc3Ok927ZsjFN/mwkVa9ZlW6IvEE5Pb3x5iA2Vb5JR4/+4nNF8I9o8GD7XTq/xlEtewl3Ll26OI0e/Q717NnRi2BAm6d+YEIes3DwOzgr8d13O9QFmEe/666i3Hg8zlMY9WEhOLsCgoKRJUzYs2bNJ/h5hCromDBC1/L447VsdQwwtRYpUsiyk9f63Nru3LmHQNIgIDYPP1w56L1Rx0ceqWKM7HE99IQqsNpoHxSYoTElkxqkVKn72GHvAVUVmJ7XrPE4IoZaN7SJBQsSTMMNGvi3FvnqxygZZbUsXLjUmGbRx8xbN9uxWa+b+yBPZvmJ8+u4JVOnzFGWA63vvff7JiIY+pzewpdt1KgByrcNx9AOYQX2FVgBQEC0lCxVnN+FryQiGPo8tvi9NGDrMSy7TiWW37OYKoFE83pTgwe/aTnljWkP+B1oGTCgVyKCoc/pLfwYunIepdt4Sju5Bf3CosUrjNs+zCSjKluatPhOo+jjoW7xbofgd9i3z7tqCjhUXSl9XQqRDExIsKVAdfmJIcAo4Y03utHwEf0Ui7vKHenmLdsJ+TWQzyIu7lzii3yOgFVPnDidSUsnWslzkZcvXeHOIQ+9zT+ykR8MsHYuRdXU31XuwHyUhvkRHZJ2SLz33rt4eiY9e64XM6YjsL6KXcG8e61aj6jiMJuCaCRYYexq8ZQD8dGjD8z/2/EjcHYHd0ovX75aKcKLsE4dz7Pb1Vy7dnWDkGg9dq/1LYcXqBaMyFKLVKtWwehANm78Vk1XhFo3WCEwPQQBEYC1zK4gUgrtGwIzv54e9He9m+3Yn343jmXM5L22EdY9ckPg94ElDrQ8Ur2y7RErphKaN0/wx1q+fK2KyNC6sJ06Za4iINiHhbQbd9BJJePHT2Fiu0GpRwfZn0mD1WKWeO7Jk2cbVan2UAUCAUrNAodPTJdAYKG5//67qSZPaWnL87p136ipfreeAVMk8IGDwPL9Wt/3FOFwS39y6kl2kmF+uGCdOLybJ0wYyua9DpSDQy/hiDVv7lJ2eHqJ58G+tBwhffnlWjZztqcpn82hv//+W2XybN/hWfpsyofspFjaXAWLfZfZxbW7YG4bzBSCkScEJu3ixT2mMO0Aqk4E+QdCUa5cKeNazKPC5ByKmKNIHman29QosPTg5QQpxnPJTsJ3cQ1MzbgOAj3QF6r8zs6NkIwZM1qO1ELVHc51sAQi8gUCCxeidUIVc5vwDa+1o9McwfQLRy5YiZvt2Ooe4R6HU6YWtCOEebohyDiMRSG1tGnTXO/a2rZgqy8solrM0Sc4hiliLfDvQAhsUggcSj8xkSVECAaaOjj80zFjyhP1ef75ZklRLVd1LuQpJC21rw1w4PysE0hi2msJ+424JTmZRL72WldD3Y4de2j0h58Yn9PSTrKTDLxUYChQEoxlXCtWg6dOkCwLq6ZmYm97mIM/GDmB82t05phvTwgSisKBqtVzXVW0yp+82io6gYYN6/C1Y9gBr06i2O9r6r02unYe3xF3yYZ+WeFFZR4Zli59v6oDOgaMIJ3IE0/UNjpcsGmEZTkVHaUBCwHyaKRG0R076gaTfChifjazPie6Dh06oqbwcM097JSV2gRtS0uoz4jr9bUYqYXif4AXsB7l/cEh4cHEjXYc7B6hnMdvdtD7Hn8MjNDRgZqjUELRqa85dPCI3lXh0P5yQBgF/OzAIducSwFt0yzmz8gzkRSy+ZttKipG68YIHNMtgQTLOGhBBEn+/PbC9/U1yb2FlXfDhm+N29ap87CxX69udWPfX+ixcTKEHfjlmHN3TJ8+X0X+haAqRS8JzzMshKqj81Ysg/tv/RKyowahfIi1fuzxGio6BJ0x5td79X6HpxyKqKkXJHPB/CScOiuwg1Tnzq2UM5Md/bqMZ8pB0yD3SIbH4dPjEKWtGPqeSFaFjhMvdqcOoJhyadasoYoqwKgIYa3t2z9nO5EYiI2eKsmW7SbXHapAepCu3Y4gFNJKdOIynA8WHWKlw9xZnjz5p2HZsCpvPo52gTa3YsVadRjtsUaNauYiifYxjYUoCyuBOVRbHqzKOD1utvBoouBUB54V+EBCbRNwBIXTHixG+O6gM9DvPdx27PQZfcsjpH3TRk/kAH4TqDcsMIgkgqAz7969XdgO6Ob7mkkAnC5DERAThJxCzJ03/DHMYaH+8jKoi0L8N2TwWF73aazXPTCFiTQAwcTs05Ln9tyuvnMwPfda33eDVUGdh0XFjiBEFW0CgjwpOnQbnytXKaemKPFbh48J8m6UYp8/t+TFF59VDqY6n8eA/iOpUMH8llNRbt3XTT3JTjJQebxsYDEI9NKxekh8wQPf6a08pocMGUvHfz1BYNOQKA5tgkkQL4Ng2eis9ONlqCmGVZlQjmuHT1gLSpRI3AhhzViyZKV6uSEHg5ORBzqWJ56oRZ9/vpCnhy5yhM58atOmpa0fL3JK6CkccwcVyjP6u8apZcafDhzT/gHYR8cXimTPfrNx2cmTHsJnHDDt4KWNPByQy5evqDA6hIpqJ0+QFUR06DlT06Veu3DGRX4JK8Fo322SYcZGEwWr+1sdRweF6ALIzTcnYGZV3uo46oLOGn4r0IkspYEknHYcSK+dc7CImq2i5msQbdWM/R/g5e+mHOWILi235QnNmRF5fbQg1FOLeR/H3HaWNBMYfU8QxWBkEmV//jmhnrdzeK6bgt/o6tXWv7lQ7oWpeS1163r7gmGKsjr70iDqBYLIHDdJBvqL2Nie9NyzXZR/E6b/e/UeoJbisJOoS9c7JbcpQDI8XThsBDZnS/ziA8ebTz8dwR3rIho+7COKjIjkTHBPU1POvhYKedE3YY6h3FGvKBKkj4a3NTt8wrHSX14JRJkg3wE6fISSOiEZqB38OjDq2rx5K4dUnSDkvIAJOphk4SynWsKJUNE6fLeFCuW37beAlw9GIv7E3KHrzt5fuUDHzM9n1ud7DQiNmdTo8xhtwgqFcDK8XIIJzNkFCuSzLIaRvttifsbo6ODp1P3d39wmkHwtVNF1we8xEN5m/aG2Y7OOUPbhpHiziQRd5pHrKTaTwwoD6ygSRmFOvGevlziqyR2/JfiZafHXaetzgbZnTidYCW/Nkd0oms1EqHHw1J+esHmjQJg7SG+OXBH4zf7KAz3IfE5UFclLHrz88osBtWcz5anRVtSAFzg4CR8VpPq2I7BkaD8vq/JbNm9XuYBwHu9trNLtK3XrVTdIxtq1GxWhdvO3jRQL/fv35LD03uzLcpmwDAYSt8G5Ni1IspMMD9NFqKh1dIld4PDygiPnjVlnchrwCLXQWTgEQ90XLENNvnD9wmFBpofAaF5bC/ylJUZRNGCEtCL3gnYAjbGx5obpNipnA5z94LSHaReYSP1ZTczX4L6IlsCPzaqDN5d3uo+Rum/GSisdsORY1cGcFRMvpmIhuEOYX2iB/DqQIA6kD21VY4k64/sAycDowo5g5O5mRlY794TFRUugZ9Rl/G0xItXJhsz6/JUNdAxWMgjWAHLix4AEZ07bcaB62DmHlOG+ybhwHSwwEyfMUIMZmMTh5T9uXPaQ1w4x1wUEXIuv5UEfD7Y1WwUKm/TBcoEOV2ctRfi/m2tDPfvcUwovTCMgoZb2WZg7Z4lKR17X5Kvg+wwFCuQ1DunpKONAmDuwdI8b974tLR079jHSiltdMN8Uxo2wVX+JCbG2DUgqOn9Y7ZYuXa3yq1jpDOU48oR05vW9kLkVAmsNfDSQ0jy1S7KTDLcBUaTlhmtaXSIFHqcRz7SOG/U1h/DBkWz79gRnVbN+7buAHy5SjyPbphOBY1qTJvVpzJhJyrSP5DG5c3v8PQLpgbMgSMbFixcdpzcPpNfNc/C21uLPyqDPBdraJRnAQ/vNYBoL/gWwECH8GOm7a9asFug2KXrO7jMGqyQwAElAJ4XO1Snh9UyTeKZczM6owe6L86G2Yzu6nZbBiLRb97aKbOpsjxM4NH748H5OVSUqj2ybWkBm8Od0BHzUNEViXvMJAyQMMrDAGgRpqpNCQLjfevtlTnrXQ3WyuAccZZGUrZiFRQEZdbVgeheO52Y/B30upbewLq3nDNFasOxFu7av6I9e27iz8cZnTJkgiZvb0rhxPdq9a5+RHh6WNURgujUYdru+Wp+9IZku7cIWhgLYClwVtopA3ADbbY8MhJaaR4NYnRRZ4vz9mUMOkXIXBMqp4CWlU1mDVc+cOY87in8CqjFHXejVUwNekAInMV+vv19kSHWanwJOsTrZGV6M8IewK48++pAR0YK1XeAzk1oFljAtoVoycH2OHAkRPHv27NMqbW/N7SiUeoTSjm1XLoSC5SuUMa46+OMRYz+cHbMlA3rgfOpEVq742siDget8Q1TNi7Uhh4W2LDm5h52ymAobNOh1tXItyuO31ocd8kGa/InZkoHzc9j6kRrFk8b8slE1LCK3izt5f396gIjCsGgg8WNSCNLfazIJ6zgsa39Z4JwU9w9FZwqQDHb4dJEUgLHoGBA3DBkJ/Xpojqm+X8J33203DiGTZuPGjwf8K1HiXlUeI8FQOzP4cyDlNgQjW2TzDGTix6hdd+CIhgjV50HdMIn+wZqgQ30xR45wXSeCRb4wIodAD0bLdgX3hoVIm/zhFKr9DezqSI5yiOTRGVGREMscJu30/mgTOi050rI7aRMoqx1eoUNbhZzWwWk7dqrfSXmkpNeC6SQ3BES3Ro2qhqopn802Ej4ZBy12YGEa9eEk4yxGtFgK3SwI39eC7+RDHvkmlcBfKZb9BvR7BpFib74xyO9ACWV1fgnUZ+aM+SFlc06qZ9F6Fy5IcPjEdAXWkwn0hwgkLfPnJ+TV0Mfc2CKqDYEP2scJxHHsmMluqE4yHSlCMphleJiBC6wAlgfWxkTDHVIA6wH0gWyEWz38sBFWCwH7xPLlcBoM9GdOla0jUpQCh/+wpoceKe3Zs18txGalInfunPTAAyXVaZgvsfCZXSuK2Upjpd+t45im0D+uDRs2G7kcgumHOXbDhi2qGK43v9iDXavPw5KiV5oFwZg9e6E+lSq2mO7C9wYBgbLj9Buo4ogM0UTV0yZW22oTaDdLl640FsiqVq2S7XBqf/Vx0o79Xe/WsfXrPe0H+sxLoIervyuvy4ToFQiIw+tvvB+UaOC7juVVTnUGSnTsGOHqDl7XCR1jbVNOh6VLVhN8JoIJiOqkSTO8EmYFuwbnH3ighFcIK9bzGDf2M7+XduvWVk1D4iScGfu9PTTo7xnTyDNnLPBai8WvchcOYlpbJ6QDrkh7/uZbPQL+tWvf0rjzGl7JWg9qjIMu7SBs+bXXuxnarCxGRoEU3kl2kqEnS8KNLtG44aUGXcqRNFxWoJWqrYe6eB1y+MHs8Fmq1P22roazoCYH+/cfDLmhwjKBRc7ML7BAFcD6HjrqZceO3fySmWZp7oSef3kF24ULl9GIEeONUWsg/W6cy5QpEyE9OAQhllhVddOm7wKq3rTpWxo/frLheAuiAD2hCCwgOgEXchw4taaEck8712A6Y9Soj42cBci26WQ6yOoeFSuWMxZE27FjF2ffnRrQ5I5R1ccfTzHWhoEvhu+6H1b3sjrutB1b6Qn1OAglFlbUeQqgpxLn4HFLEDnwUsdWhjokFGzRoiMvcrci0e8P4Yvr122hli06eYVpIqIO0R7+5KWXWhnEHOcHDx7DGZRjvdYN0ddBP1KRP9+6O300fiq9/dYQW8RSX4/tU00eV4us6WOTJ3/OixMmtjrCX6SpaXVRZCvFc4EIgdT6CgZKL3Xoze+bj9U6VviclAK/Ci1YuNOcY0cf991ikTrz+3bZsjW+RVz7jHVTWrRo5Jq+pFSU7I6fsBAo4U5Qm+jDeUA14oZlxIVoFXM93CAtSCMOgYkLHsh2BYQEUyWhOoDq+4A0wMyPzgG6AgnqCFLy+ecLlGkcjmIffjiBR23F2Hk0p/qL5AgevHSPcXw/CJQ2oWMONrkEViCEFa5bt0mNtDBqRgeIRcty5bqVoyKyqY4Qy57Do14n+EJbwyq3OoV7qPWFhQBJmjB6WLXqa4ITm9W0hMcP5HDQW+F7QkI2fwIyp6dA9Hm0+TNn/lIOcwhX1iMuPCM69apVK+iiYW0xgnvqqfrKrwdRP/jeR4+epDo0rDmifS2Q9At4IyoK9YWAYOBa39F1KBVy0o5D0Y9rZrCnPpbW1oL57hMnTvIzH1dLE+jjWMXUbB3Qx/UWbaMlk4RAgjVHatV+2CiC5c1P/PYHr+fxuSLDsFS8M3Ak/3lwhBn+BGMMnwBfQRRHoHTksEj1i31VWQr0iHcDW2Xwl5WTFmJKypNF+YxaSh5WBS238HQO3htOphZx7as9O/Bv72flu4DPsLrknzhUOaLis5ZWrZuqcHud10JZaGKHcbjmcLqD15m6g9fLwRpU8GszOzRjIOZGu9L18N0Cp7VrNhqH6wSIlDEK8Q6mU2tw+5gzx+Nbg1TkTz75mLmIq/vtX3xGJerSmaRdVe6ishQgGdemNZgXuEEygIWyZYC9aOeMMABSpIVVQVc49TP/MNAxYl7frsAcixcrOnE4gFau7HwJeH0vdIAICUR4aDCBBaVTpzbKhwNWFPzAEeWyPcGtxEsF6tioUT1+URXyOp7UH6pXr8JhsYVp7tzF6uUDIqHJhL97Y6qjYcO6jrO/+tMFKwjI2IQJU9QLGAnQOnRo5dc6Au/0yZNn+lPjdQwryj7zzFNex/QHWAc++WS6/mi5RafeoEFdXrwpt2WZUE5Ab4cOrRWhwpQTiJN2Wvanz0N0yvHKt5Udd07+9OljTtqxvsbJFs58gQQdbT3Oh9C5y/OBiimHZKxJEkj8OeohmzGyR/aPHe5FKkGo8ecrGFljiqRixQd8TyX6XL58aZo6bRQNHDCSsNCXFiw06a+DQkh7X143w7zKqL7Gzhbvunfe7UutW3VTdcd7rFevxAmkMLBBrodlHPI5lK1FIBkQvIPhPIk/X4G1AHUDQUoqWcLWFO2Dg/tU4e/FriBnhiYZaAf4rWDV3KQQEC0QSJ2oKynu4YZO+z2fG3fz0YFMGeGKtozAlzQcUmDU45pCj4+HcdTxjnb4REdcrlyCZ7odRXihYVEqLHgGB9BDhw6rpc3tXOuvzIMPllHznXayb8JnoXnzxsrkjfui88Yo1mwJQd4D5L7AqFmbB/3dNymPwXkMnd+WLduUZQF+F/APwQsK7QDZPTHaVk5mZUs6InnB6o05USxnvmzZavX9zJ+/lE2/DYJd5up55LGAJQGhf/jD4m9JNbpDe6xZ8yG2ahXl0ekPymqBUT5CniEgXrAiweIFSxEwTwpx0o7DvT+wxDPdzlaE/Gzab9ioLuF7T0oBcf7k0+EqqdXePQdU+CmscTrHDnDF4mzF7ixE9evXdtTRor0MHvImt9k19N23O5XVCdZKs+UC7QmdONYfwUqj4QgsKO++15de5akZWCFAGGKZQA0c2DuRWlh1SrDTKha/PMCRYwd4YTdztAbWNynFq7TCilSvXo1E17t9QC+GhoR7SPCoHb7t3AffIeqJyB/IIs4WmlQkA/ox3Qace/UcYJljCOVSUiLi4uIwbk82QYf1DC+/fpnNci2eacwjuCfDujdSQHfp/Dp3Lldo8mcf2Jo7C3RDzHnG9h9GF87/TatWz7KV2TGQvuvhHFg9XhT4sUdFRanpn9T4XAhrxcsJPzwnL4bU+CxpoU4gwJBg6cLTwrOk1jqiTSOjJvLEuJ1GGrrR+afnaBn8ZlJqwOAPe7xvsMQ5srBmy36Tq4MEf/eTY+4jwIOTMtxfbE12SwZe/mevJS6ZNmUe7dm9X3kkY6GckERZHsLnSWjQY0Z/ytaDdcokjLpgBCdC6geeGpPl+H43aFsw74skDwJCLpIeZ7Rpc/IqN+8I3b65NdzUH44uTAfZcbYM5x5ybfIgkOwk46abstLwEbHs5TyaTp86q0ILv2ePasx3IlUt5gMdCxMNcI1Qpkswx4ywqOnTF6g5Qfzw8tyei7p1ayckw/EXIRekRgTmzQsetqjrjTDEpJru0Pdwe3u9P5/beIk+QSA5EUh2koGHK1u2BE2bNoYXlVlCn0yapTr32bMXc7rUtdS2XUvOR/CIbcKgslrwHDzxdAk2TuSrtZto5MgJnpUieUoghglOi5aNqPGT9cTc7gRIKZuqEYDzrl1BOui0RjKu9+ez+91JOUEgNSKQ7D4ZviAgXAgJWxDGFMcZGaPZUTIXL+7To0d7wkqrwQS5+bt1fUM5Jk6dNtrWMuD79h3k2PdxnM//F5WHAh7EWF2vHRMczE2KCALXEwJOkgLBiVNn+kwrGFzvz5dWvgeppyBgRkD7ZKQ4ydCVQi6AwYPG0kGOaDgXf155TpcsWVyFjMFr3Ur2sydyt25vsiHjCodpBSYZcCYazX4XGzkUD+Fb0TFRhJULuzOhKVw4v9Ut5LggIAgIAoKAICAIOEBAk4wUmS7xV0+k3R495h1OsrSZRnJWN+QYwP6OHXuofoNaHIXS2K93tcproRRaJ/dCul4k25k5cwHFcyx2hvQZVIgWls5FyJaIICAICAKCgCAgCLiPQKohGfrR0OkjjeusWYto2tR57K9xjmYyQUC62Rc7PKsWqNFlsfWQDE90iT/Hz9WcQ37UBxNVRAtCMbPGxHA61obid2EGUfYFAUFAEBAEBIEkQCDVkQw8I5KgIC97bU7SgrBSWDSQ9W4I592fwVEgPV5ul5DgBPxCcQwkYUpA6Ie9P3IWuXF0lFMh6+kXJEmB3wUSxYgIAoKAICAICAKCQNIikGp8MgI9JvwuQDCOcPY7EIYYdtSEtQOL/5w+9Rc7ib6tknHNmDlWrZ0wmpc03rRpK/tdxCu/i0KF8nGZF8XvIhDIck4QEAQEAUFAEHAJAe2TkSZIhn5mTH18MBJTH3FqFc4YdtyswqvRfb32G7rCiTKeqP8orwy6nKdYLii/i2zZb+REX+J3ofGTrSAgCAgCgoAgkBwIpEmSAWDgxDlt2lyaNXORyq+RLl2kIhxYhTUDp8dFnn8k9Gp+ze8CUy8igoAgIAgIAoKAIJB8CKRZkqEhgo/GqA8m8aqC3xpLjsOyUa1aBWrX/hnxu9BAyVYQEAQEAUFAEEhmBNI8ydB47dy5l15s35MX7spMY8a8l2pz8ev6ylYQEAQEAUFAELjeEbhuSMb1/kXJ8wkCgoAgIAgIAmkNAU0yItNaxaW+goAgIAgIAoKAIJA2EADJOJc2qiq1FAQEAUFAEBAEBIG0gABbMuJRT5CM42mhwlJHQUAQEAQEAUFAEEgbCGTMmFFxi0hOxS0kI218Z1JLQUAQEAQEAUEgLSAQz9xCzZJE8tofW9NCjaWOgoAgIAgIAoKAIJD6EWCCYfAKTJfMT/1VlhoKAoKAICAICAKCQFpAgEmGwSsi2JIRySud/sbbHGmh8lJHQUAQEAQEAUFAEEidCDDBuJouXbqCmTNnPoIawieDl/24OiR1VldqJQgIAoKAICAICAJpCIHPNcFAndXi6EwyMrE14wBv86ahB5GqCgKCgCAgCAgCgkAqQYCNFv+lT5/+7kyZMh3UVVLJuPjERT7QTR+UrSAgCAgCgoAgIAgIAk4QYC4x2EwwcK0iGdiJiYmZwwUGYl9EEBAEBAFBQBAQBAQBBwgsiYqKes23vJou0Qd5uiSCp02m87aJPiZbQUAQEAQEAUFAEBAErBBgA8X26OjoaryN8y1jWDJwggtc5YJPi0XDFyb5LAgIAoKAICAICAK+CDBfmMu8obI/goGyXiQDB0A0eOqkL28b89/POCYiCAgCgoAgIAgIAoKACQGsTdKTCQa4wnnTca9dr+kSrzP8AVEn8fHxnVlBD96XPBq+AMlnQUAQEAQEAUHg/wuB88wJJvIjx7JB4mSwRw9IMvTFTDAimWxU4M/1WXlp/nwb7+MvWpeRrSAgCAgCgoAgIAhcPwhwf3+R+/vfeIs1zvZGRkYuzJIly0oct/uU/wOyc8ss0o7O0QAAAABJRU5ErkJggg==" class="sc-jzJRlG bvIClq" style="width: 179px; height: 28px;" width="179" height="28" /></div>' BAKED_IMAGE_SUFFIX = "_baked.png" PNG_EXTENSION = ".png" DEFAULT_ENCODING = 'utf-8' TEMP_PATH = "/app/temp" ETH_PRIVATE_KEY_PATH = f"{TEMP_PATH}/keyring" ETH_PRIVATE_KEY_FILE_NAME = "eth_private_key"
228.808511
18,482
0.888879
1,203
21,508
15.778055
0.533666
0.021917
0.020652
0.007165
0.08577
0.064064
0.052948
0.052948
0.052948
0.052948
0
0.12584
0.045285
21,508
93
18,483
231.268817
0.798529
0
0
0.113636
0
0.022727
0.895574
0.758973
0
1
0
0
0
1
0
false
0
0.011364
0
0.011364
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
6
6c11e06d62b2eef9ccbb25f94fe0bee162ce31fd
20,222
py
Python
tests/test_join.py
fferrin/pytopojson
5128136c9502f4e29330b6cc7e524641bff5f95e
[ "0BSD" ]
11
2019-11-15T23:22:52.000Z
2022-01-22T20:46:30.000Z
tests/test_join.py
fferrin/topojson
7f90e497d2b54798f51480181c81c330770cb401
[ "0BSD" ]
8
2019-11-08T03:03:29.000Z
2022-02-28T09:52:09.000Z
tests/test_join.py
fferrin/topojson
7f90e497d2b54798f51480181c81c330770cb401
[ "0BSD" ]
2
2020-07-09T06:45:31.000Z
2021-03-22T13:38:35.000Z
import unittest from pytopojson import ( extract, join, ) class JoinTestCase(unittest.TestCase): def setUp(self): self.extract = extract.Extract() self.join = join.Join() def test_join_the_returned_hashmap_has_true_for_junction_points(self): junctions = self.join( self.extract( { "cba": {"type": "LineString", "arcs": [[2, 0], [1, 0], [0, 0]]}, "ab": {"type": "LineString", "arcs": [[0, 0], [1, 0]]}, } ) ) self.assertEqual(junctions.has([2, 0]), True) self.assertEqual(junctions.has([0, 0]), True) def test_join_the_returned_hashmap_has_undefined_for_non_junction_points(self): junctions = self.join( self.extract( { "cba": {"type": "LineString", "arcs": [[2, 0], [1, 0], [0, 0]]}, "ab": {"type": "LineString", "arcs": [[0, 0], [2, 0]]}, } ) ) self.assertEqual(junctions.has([1, 0]), False) def test_join_exact_duplicate_lines_abc_and_abc_have_junctions_at_their_end_points( self, ): junctions = self.join( self.extract( { "abc": {"type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0]]}, "abc2": {"type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0]]}, } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [2, 0]]) def test_join_reversed_duplicate_lines_abc_and_cba_have_junctions_at_their_end_points( self, ): junctions = self.join( self.extract( { "abc": {"type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0]]}, "cba": {"type": "LineString", "arcs": [[2, 0], [1, 0], [0, 0]]}, } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [2, 0]]) def test_join_exact_duplicate_rings_abca_and_abca_have_no_junctions(self): junctions = self.join( self.extract( { "abca": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [2, 0], [0, 0]]], }, "abca2": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [2, 0], [0, 0]]], }, } ) ) self.assertCountEqual(junctions.values(), []) def test_join_reversed_duplicate_rings_acba_and_abca_have_no_junctions(self): junctions = self.join( self.extract( { "abca": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [2, 0], [0, 0]]], }, "acba": { "type": "Polygon", "arcs": [[[0, 0], [2, 0], [1, 0], [0, 0]]], }, } ) ) self.assertCountEqual(junctions.values(), []) def test_join_rotated_duplicate_rings_bcab_and_abca_have_no_junctions(self): junctions = self.join( self.extract( { "abca": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [2, 0], [0, 0]]], }, "bcab": { "type": "Polygon", "arcs": [[[1, 0], [2, 0], [0, 0], [1, 0]]], }, } ) ) self.assertCountEqual(junctions.values(), []) def test_join_ring_abca_and_line_abca_have_a_junction_at_a(self): junctions = self.join( self.extract( { "abcaLine": { "type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0], [0, 0]], }, "abcaPolygon": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [2, 0], [0, 0]]], }, } ) ) self.assertCountEqual(junctions.values(), [[0, 0]]) def test_join_ring_bcab_and_line_abca_have_a_junction_at_a(self): junctions = self.join( self.extract( { "abcaLine": { "type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0], [0, 0]], }, "bcabPolygon": { "type": "Polygon", "arcs": [[[1, 0], [2, 0], [0, 0], [1, 0]]], }, } ) ) self.assertCountEqual(junctions.values(), [[0, 0]]) def test_join_ring_abca_and_line_bcab_have_a_junction_at_b(self): junctions = self.join( self.extract( { "bcabLine": { "type": "LineString", "arcs": [[1, 0], [2, 0], [0, 0], [1, 0]], }, "abcaPolygon": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [2, 0], [0, 0]]], }, } ) ) self.assertCountEqual(junctions.values(), [[1, 0]]) def test_join_when_an_old_arc_abc_extends_a_new_arc_ab_there_is_a_junction_at_b( self, ): junctions = self.join( self.extract( { "abc": {"type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0]]}, "ab": {"type": "LineString", "arcs": [[0, 0], [1, 0]]}, } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [1, 0], [2, 0]]) def test_join_when_a_reversed_old_arc_cba_extends_a_new_arc_ab_there_is_a_junction_at_b( self, ): junctions = self.join( self.extract( { "cba": {"type": "LineString", "arcs": [[2, 0], [1, 0], [0, 0]]}, "ab": {"type": "LineString", "arcs": [[0, 0], [1, 0]]}, } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [1, 0], [2, 0]]) def test_join_when_a_new_arc_ade_shares_its_start_with_an_old_arc_abc_there_is_a_junction_at_a( self, ): junctions = self.join( self.extract( { "ade": {"type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0]]}, "abc": {"type": "LineString", "arcs": [[0, 0], [1, 1], [2, 1]]}, } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [2, 0], [2, 1]]) def test_join_ring_aba_has_no_junctions(self): junctions = self.join( self.extract( {"aba": {"type": "Polygon", "arcs": [[[0, 0], [1, 0], [0, 0]]]}} ) ) self.assertCountEqual(junctions.values(), []) def test_join_ring_aa_has_no_junctions(self): junctions = self.join( self.extract({"aa": {"type": "Polygon", "arcs": [[[0, 0], [0, 0]]]}}) ) self.assertCountEqual(junctions.values(), []) def test_join_degenerate_ring_a_has_no_junctions(self): junctions = self.join( self.extract({"a": {"type": "Polygon", "arcs": [[[0, 0]]]}}) ) self.assertCountEqual(junctions.values(), []) def test_join_when_a_new_line_dec_shares_its_end_with_an_old_line_abc_there_is_a_junction_at_c( self, ): junctions = self.join( self.extract( { "abc": {"type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0]]}, "dec": {"type": "LineString", "arcs": [[0, 1], [1, 1], [2, 0]]}, } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [2, 0], [0, 1]]) def test_join_when_a_new_line_abc_extends_an_old_line_ab_there_is_a_junction_at_b( self, ): junctions = self.join( self.extract( { "ab": {"type": "LineString", "arcs": [[0, 0], [1, 0]]}, "abc": {"type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0]]}, } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [1, 0], [2, 0]]) def test_join_when_a_new_line_abc_extends_a_reversed_old_line_ba_there_is_a_junction_at_b( self, ): junctions = self.join( self.extract( { "ba": {"type": "LineString", "arcs": [[1, 0], [0, 0]]}, "abc": {"type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0]]}, } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [1, 0], [2, 0]]) def test_join_when_a_new_line_starts_bc_in_the_middle_of_an_old_line_abc_there_is_a_junction_at_b( self, ): junctions = self.join( self.extract( { "abc": {"type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0]]}, "bc": {"type": "LineString", "arcs": [[1, 0], [2, 0]]}, } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [1, 0], [2, 0]]) def test_join_when_a_new_line_bc_starts_in_the_middle_of_a_reversed_old_line_cba_there_is_a_junction_at_b( self, ): junctions = self.join( self.extract( { "cba": {"type": "LineString", "arcs": [[2, 0], [1, 0], [0, 0]]}, "bc": {"type": "LineString", "arcs": [[1, 0], [2, 0]]}, } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [1, 0], [2, 0]]) def test_join_when_a_new_line_abd_deviates_from_an_old_line_abc_there_is_a_junction_at_b( self, ): junctions = self.join( self.extract( { "abc": {"type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0]]}, "abd": {"type": "LineString", "arcs": [[0, 0], [1, 0], [3, 0]]}, } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [2, 0], [1, 0], [3, 0]]) def test_join_when_a_new_line_abd_deviates_from_a_reversed_old_line_cba_there_is_a_junction_at_b( self, ): junctions = self.join( self.extract( { "cba": {"type": "LineString", "arcs": [[2, 0], [1, 0], [0, 0]]}, "abd": {"type": "LineString", "arcs": [[0, 0], [1, 0], [3, 0]]}, } ) ) self.assertCountEqual(junctions.values(), [[2, 0], [0, 0], [1, 0], [3, 0]]) def test_join_when_a_new_line_dbc_merges_into_an_old_line_abc_there_is_a_junction_at_b( self, ): junctions = self.join( self.extract( { "abc": {"type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0]]}, "dbc": {"type": "LineString", "arcs": [[3, 0], [1, 0], [2, 0]]}, } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [2, 0], [1, 0], [3, 0]]) def test_join_when_a_new_line_dbc_merges_into_a_reversed_old_line_cba_there_is_a_junction_at_b( self, ): junctions = self.join( self.extract( { "cba": {"type": "LineString", "arcs": [[2, 0], [1, 0], [0, 0]]}, "dbc": {"type": "LineString", "arcs": [[3, 0], [1, 0], [2, 0]]}, } ) ) self.assertCountEqual(junctions.values(), [[2, 0], [0, 0], [1, 0], [3, 0]]) def test_join_when_a_new_line_dbe_shares_a_single_midpoint_with_an_old_line_abc_there_is_a_junction_at_b( self, ): junctions = self.join( self.extract( { "abc": {"type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0]]}, "dbe": {"type": "LineString", "arcs": [[0, 1], [1, 0], [2, 1]]}, } ) ) self.assertCountEqual( junctions.values(), [[0, 0], [2, 0], [2, 1], [1, 0], [0, 1]] ) def test_join_when_a_new_line_abde_skips_a_point_with_an_old_line_abcde_there_is_a_junction_at_b_and_d( self, ): junctions = self.join( self.extract( { "abcde": { "type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0], [3, 0], [4, 0]], }, "abde": { "type": "LineString", "arcs": [[0, 0], [1, 0], [3, 0], [4, 0]], }, } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [4, 0], [1, 0], [3, 0]]) def test_join_when_a_new_line_abde_skips_a_point_with_a_reversed_old_line_edcba_there_is_a_junction_at_b_and_d( self, ): junctions = self.join( self.extract( { "edcba": { "type": "LineString", "arcs": [[4, 0], [3, 0], [2, 0], [1, 0], [0, 0]], }, "abde": { "type": "LineString", "arcs": [[0, 0], [1, 0], [3, 0], [4, 0]], }, } ) ) self.assertCountEqual(junctions.values(), [[4, 0], [0, 0], [1, 0], [3, 0]]) def test_join_when_a_line_abcdbe_self_intersects_with_its_middle_there_are_no_junctions( self, ): junctions = self.join( self.extract( { "abcdbe": { "type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0], [3, 0], [1, 0], [4, 0]], } } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [4, 0]]) def test_join_when_a_line_abacd_self_intersects_with_its_start_there_are_no_junctions( self, ): junctions = self.join( self.extract( { "abacd": { "type": "LineString", "arcs": [[0, 0], [1, 0], [0, 0], [3, 0], [4, 0]], } } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [4, 0]]) def test_join_when_a_line_abcdbd_self_intersects_with_its_end_there_are_no_junctions( self, ): junctions = self.join( self.extract( { "abcdbd": { "type": "LineString", "arcs": [[0, 0], [1, 0], [4, 0], [3, 0], [4, 0]], } } ) ) self.assertCountEqual(junctions.values(), [[0, 0], [4, 0]]) def test_join_when_an_old_line_abcdbe_self_intersects_and_shares_a_point_b_there_is_a_junction_at_b( self, ): junctions = self.join( self.extract( { "abcdbe": { "type": "LineString", "arcs": [[0, 0], [1, 0], [2, 0], [3, 0], [1, 0], [4, 0]], }, "fbg": {"type": "LineString", "arcs": [[0, 1], [1, 0], [2, 1]]}, } ) ) self.assertCountEqual( junctions.values(), [[0, 0], [4, 0], [1, 0], [0, 1], [2, 1]] ) def test_join_when_a_line_abca_is_closed_there_is_a_junction_at_a(self): junctions = self.join( self.extract( { "abca": { "type": "LineString", "arcs": [[0, 0], [1, 0], [0, 1], [0, 0]], } } ) ) self.assertCountEqual(junctions.values(), [[0, 0]]) def test_join_when_a_ring_abca_is_closed_there_are_no_junctions(self): junctions = self.join( self.extract( { "abca": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [0, 1], [0, 0]]], } } ) ) self.assertCountEqual(junctions.values(), []) def test_join_exact_duplicate_rings_abca_and_abca_share_the_arc_abca(self): junctions = self.join( self.extract( { "abca": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [0, 1], [0, 0]]], }, "abca2": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [0, 1], [0, 0]]], }, } ) ) self.assertCountEqual(junctions.values(), []) def test_join_reversed_duplicate_rings_abca_and_acba_share_the_arc_abca(self): junctions = self.join( self.extract( { "abca": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [0, 1], [0, 0]]], }, "acba": { "type": "Polygon", "arcs": [[[0, 0], [0, 1], [1, 0], [0, 0]]], }, } ) ) self.assertCountEqual(junctions.values(), []) def test_join_coincident_rings_abca_and_bcab_share_the_arc_bcab(self): junctions = self.join( self.extract( { "abca": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [0, 1], [0, 0]]], }, "bcab": { "type": "Polygon", "arcs": [[[1, 0], [0, 1], [0, 0], [1, 0]]], }, } ) ) self.assertCountEqual(junctions.values(), []) def test_join_coincident_rings_abca_and_bacb_share_the_arc_bcab(self): junctions = self.join( self.extract( { "abca": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [0, 1], [0, 0]]], }, "bacb": { "type": "Polygon", "arcs": [[[1, 0], [0, 0], [0, 1], [1, 0]]], }, } ) ) self.assertCountEqual(junctions.values(), []) def test_join_coincident_rings_abca_and_dbed_share_the_point_b(self): junctions = self.join( self.extract( { "abca": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [0, 1], [0, 0]]], }, "dbed": { "type": "Polygon", "arcs": [[[2, 1], [1, 0], [2, 2], [2, 1]]], }, } ) ) self.assertCountEqual(junctions.values(), [[1, 0]]) def test_join_coincident_ring_abca_and_line_dbe_share_the_point_b(self): junctions = self.join( self.extract( { "abca": { "type": "Polygon", "arcs": [[[0, 0], [1, 0], [0, 1], [0, 0]]], }, "dbe": {"type": "LineString", "arcs": [[2, 1], [1, 0], [2, 2]]}, } ) ) self.assertCountEqual(junctions.values(), [[2, 1], [2, 2], [1, 0]])
32.563607
115
0.397587
2,015
20,222
3.708189
0.056576
0.041221
0.033324
0.035332
0.89106
0.864026
0.851847
0.828694
0.805808
0.757227
0
0.05718
0.435269
20,222
620
116
32.616129
0.59711
0
0
0.484171
0
0
0.074523
0
0
0
0
0
0.07635
1
0.07635
false
0
0.003724
0
0.081937
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
6c14507586a1463cd517aaed48d6a9e523b4f0bd
93
py
Python
helpers.py
rpiesveloces/seeds-gratidaum-bot
9ff6b3451123b50c30986e3fcb1797ab282296d0
[ "CC0-1.0" ]
null
null
null
helpers.py
rpiesveloces/seeds-gratidaum-bot
9ff6b3451123b50c30986e3fcb1797ab282296d0
[ "CC0-1.0" ]
1
2021-07-07T19:50:51.000Z
2021-07-07T19:50:51.000Z
helpers.py
rpiesveloces/seeds-gratidaum-bot
9ff6b3451123b50c30986e3fcb1797ab282296d0
[ "CC0-1.0" ]
1
2021-06-22T17:47:47.000Z
2021-06-22T17:47:47.000Z
from lxml import html def strip_html(s): return str(html.fromstring(s).text_content())
15.5
49
0.731183
15
93
4.4
0.8
0
0
0
0
0
0
0
0
0
0
0
0.150538
93
5
50
18.6
0.835443
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
66632adfcc1fdc97e8949e7ae9ccd192ae27b917
33
py
Python
geocode_sqlite/__init__.py
noslouch/geocode-sqlite
620462cc7a1df2cb45de589cf32eb5d68779c90f
[ "Apache-2.0" ]
222
2020-09-08T13:00:39.000Z
2022-03-11T19:49:17.000Z
geocode_sqlite/__init__.py
noslouch/geocode-sqlite
620462cc7a1df2cb45de589cf32eb5d68779c90f
[ "Apache-2.0" ]
23
2020-09-08T14:43:55.000Z
2022-03-16T01:38:04.000Z
geocode_sqlite/__init__.py
noslouch/geocode-sqlite
620462cc7a1df2cb45de589cf32eb5d68779c90f
[ "Apache-2.0" ]
6
2020-09-27T07:08:21.000Z
2022-03-15T20:04:13.000Z
from .utils import geocode_table
16.5
32
0.848485
5
33
5.4
1
0
0
0
0
0
0
0
0
0
0
0
0.121212
33
1
33
33
0.931034
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
667121d7822abdc44fc09731f74fe3afe816a209
83
py
Python
cs1006-IntroPython/HW5-MachineLearning/__init__.py
ecahern16/AcademicCode
cf95a65545e7054604c23d4830f709323eeb81f5
[ "Apache-2.0" ]
null
null
null
cs1006-IntroPython/HW5-MachineLearning/__init__.py
ecahern16/AcademicCode
cf95a65545e7054604c23d4830f709323eeb81f5
[ "Apache-2.0" ]
null
null
null
cs1006-IntroPython/HW5-MachineLearning/__init__.py
ecahern16/AcademicCode
cf95a65545e7054604c23d4830f709323eeb81f5
[ "Apache-2.0" ]
null
null
null
from advanced import * from data import * from models import * from utils import *
16.6
22
0.759036
12
83
5.25
0.5
0.47619
0
0
0
0
0
0
0
0
0
0
0.192771
83
4
23
20.75
0.940299
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
66b68c7ed5c5d7b650a94979f302009154133272
49
py
Python
village/simulation/__init__.py
Kontari/VillageRevamp
bb89b54a6367dc76567fe5f996d47435610caab6
[ "MIT" ]
null
null
null
village/simulation/__init__.py
Kontari/VillageRevamp
bb89b54a6367dc76567fe5f996d47435610caab6
[ "MIT" ]
null
null
null
village/simulation/__init__.py
Kontari/VillageRevamp
bb89b54a6367dc76567fe5f996d47435610caab6
[ "MIT" ]
null
null
null
from .manager import * from .simulation import *
16.333333
25
0.755102
6
49
6.166667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.163265
49
2
26
24.5
0.902439
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
dd5e071e6993bc1bde1add42fe3d5b537d722349
683
py
Python
networks/mlp/loss_functions.py
PiotrGrzybowski/NeuralNetworks
4fe4295761bc1a1df5d52e69ce574256626833c6
[ "Apache-2.0" ]
null
null
null
networks/mlp/loss_functions.py
PiotrGrzybowski/NeuralNetworks
4fe4295761bc1a1df5d52e69ce574256626833c6
[ "Apache-2.0" ]
null
null
null
networks/mlp/loss_functions.py
PiotrGrzybowski/NeuralNetworks
4fe4295761bc1a1df5d52e69ce574256626833c6
[ "Apache-2.0" ]
null
null
null
import numpy as np class LossFunction: @staticmethod def calculate_cost(expected_value, predicted): raise NotImplementedError("Should have implemented this!") @staticmethod def calculate_cost_gradient(expected_value, outputs, derivative_outputs): raise NotImplementedError("Should have implemented this!") class MeanSquaredError(LossFunction): @staticmethod def calculate_cost(expected_value, outputs): return 0.5 * np.power(np.linalg.norm(expected_value - outputs), 2) @staticmethod def calculate_cost_gradient(expected_value, outputs, derivative_outputs): return (outputs - expected_value) * derivative_outputs
31.045455
77
0.751098
73
683
6.821918
0.410959
0.156627
0.192771
0.2249
0.702811
0.702811
0.506024
0.293173
0.293173
0.293173
0
0.005319
0.174231
683
21
78
32.52381
0.87766
0
0
0.533333
0
0
0.084919
0
0
0
0
0
0
1
0.266667
false
0
0.066667
0.133333
0.6
0
0
0
0
null
0
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
dd6a4ab0625ea7a6740b1a460d54b00e9a901081
70
py
Python
test.py
fl16180/gtrends-tools
7af4c9b18345911aa8642eb9a45d4291bb1a4017
[ "MIT" ]
3
2018-10-05T17:56:18.000Z
2020-03-27T18:21:04.000Z
test.py
fl16180/gtrends-tools
7af4c9b18345911aa8642eb9a45d4291bb1a4017
[ "MIT" ]
1
2020-10-05T23:46:26.000Z
2020-10-27T00:17:45.000Z
test.py
fl16180/gtrends-tools
7af4c9b18345911aa8642eb9a45d4291bb1a4017
[ "MIT" ]
2
2018-09-12T19:05:09.000Z
2021-04-12T02:53:30.000Z
from setuptools import setup, find_packages print find_packages()
17.5
44
0.8
9
70
6
0.777778
0.444444
0
0
0
0
0
0
0
0
0
0
0.157143
70
3
45
23.333333
0.915254
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0.5
null
null
0.5
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
1
0
6
dd76f04631ab92c0ba39dbcb49f5d5f0881cefe8
2,363
py
Python
trolls/gym_wrappers/noisy.py
sintefneodroid/trolls
724fea14522029596e87be3115e90672466a1c8d
[ "Apache-2.0" ]
null
null
null
trolls/gym_wrappers/noisy.py
sintefneodroid/trolls
724fea14522029596e87be3115e90672466a1c8d
[ "Apache-2.0" ]
null
null
null
trolls/gym_wrappers/noisy.py
sintefneodroid/trolls
724fea14522029596e87be3115e90672466a1c8d
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = "Christian Heider Nielsen" import gym import numpy from gym.spaces.box import Box __all__ = ["NoisyObservationWrapper", "NoisyActionWrapper"] class NoisyObservationWrapper(gym.ObservationWrapper): """Make observation dynamic by adding noise""" def __init__(self, env: gym.Env = None, percent_pad=5, bottom_margin: int = 20): """ # doom 20px bottom is useless :param env: :param percent_pad: :param bottom_margin:""" super().__init__(env) self.original_shape = env.space.shape new_side = int(round(max(self.original_shape[:-1]) * 100.0 / (100.0 - percent_pad))) self.new_shape = [new_side, new_side, 3] self.observation_space = Box(0.0, 255.0, self.new_shape) self.bottom_margin = bottom_margin self.ob = None def _observation(self, obs: numpy.ndarray) -> numpy.ndarray: im_noise = numpy.random.randint(0, 256, self.new_shape).astype(obs.dtype) im_noise[: self.original_shape[0] - self.bottom_margin, : self.original_shape[1], :] = obs[ : -self.bottom_margin, :, : ] self.ob = im_noise return im_noise # def render(self, mode='human', close=False): # temp = self.env.render(mode, close) # return self.ob class NoisyActionWrapper(gym.ActionWrapper): """ TODO: finish Make action dynamic by adding noise""" def __init__(self, env: gym.Env = None, percent_pad=5, bottom_margin: int = 20): super().__init__(env) self.original_shape = env.space.shape new_side = int(round(max(self.original_shape[:-1]) * 100.0 / (100.0 - percent_pad))) self.new_shape = [new_side, new_side, 3] self.action_space = Box(0.0, 255.0, self.new_shape) self.bottom_margin = bottom_margin self.ob = None def _action(self, obs: numpy.ndarray) -> numpy.ndarray: im_noise = numpy.random.randint(0, 256, self.new_shape).astype(obs.dtype) im_noise[: self.original_shape[0] - self.bottom_margin, : self.original_shape[1], :] = obs[ : -self.bottom_margin, :, : ] self.ob = im_noise return im_noise # def render(self, mode='human', close=False): # temp = self.env.render(mode, close) # return self.ob
34.75
99
0.629285
311
2,363
4.549839
0.247588
0.093286
0.096113
0.050883
0.743463
0.743463
0.743463
0.743463
0.743463
0.743463
0
0.03
0.238256
2,363
67
100
35.268657
0.756111
0.180279
0
0.648649
0
0
0.034722
0.012286
0
0
0
0.014925
0
1
0.108108
false
0
0.081081
0
0.297297
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
6
dd80a6cb11da53ce64758ab88d167346d533e608
6,382
py
Python
lib/nms/box_voting.py
zxt881108/Deformable-ConvNets-1
6c73b54262fea0a8a6a7f0225689b7d76f546e1f
[ "MIT" ]
2
2018-03-31T13:25:15.000Z
2018-05-31T13:09:17.000Z
lib/nms/box_voting.py
zxt881108/Deformable-ConvNets
6c73b54262fea0a8a6a7f0225689b7d76f546e1f
[ "MIT" ]
1
2018-04-09T09:42:51.000Z
2018-04-10T02:56:15.000Z
lib/nms/box_voting.py
zxt881108/Deformable-ConvNets
6c73b54262fea0a8a6a7f0225689b7d76f546e1f
[ "MIT" ]
null
null
null
import numpy as np DEBUG = True def py_box_voting_wrapper(IOU_thresh, score_thresh, with_nms): if with_nms: def _box_voting(nms_dets, dets): return box_voting_nms(nms_dets, dets, IOU_thresh, score_thresh) else: def _box_voting(dets): return box_voting(dets, IOU_thresh, score_thresh) return _box_voting def box_voting_nms(nms_dets, dets, IOU_thresh, score_thresh): """ greedily select boxes with high confidence and overlap with current maximum and voting the final box coordinates by fusing those boxes :param num_dets: dets after nms :param dets: original detection results, dets before nms. [[x1, y1, x2, y2 score]] :param IOU_thresh: retain overlap > IOU_thresh for fusion :param score_thresh: retain score > score_thresh for fusion :return: detection coordinates to keep """ x1 = dets[:, 0] y1 = dets[:, 1] x2 = dets[:, 2] y2 = dets[:, 3] scores = dets[:, 4] areas = (x2 - x1 + 1) * (y2 - y1 + 1) order = scores.argsort()[::-1] if DEBUG: print("dets ordered:", dets[order]) keep_fusion_boxes = [] for idx, nms_det in enumerate(nms_dets): area_nms_det = (nms_det[2] - nms_det[0] + 1) * (nms_det[3] - nms_det[1] + 1) xx1 = np.maximum(nms_det[0], x1[order]) yy1 = np.maximum(nms_det[1], y1[order]) xx2 = np.minimum(nms_det[2], x2[order]) yy2 = np.minimum(nms_det[3], y2[order]) # compute overlap w = np.maximum(0.0, xx2 - xx1 + 1) h = np.maximum(0.0, yy2 - yy1 + 1) inter = w * h ovr = inter / (area_nms_det + areas[order] - inter) # retain boxes with large overlap and high confidence for fusion IOU_inds_keep = np.where(ovr > IOU_thresh)[0] scores_inds_keep = np.where(scores[order] > score_thresh)[0] if DEBUG: print("IOU_inds_keep:", IOU_inds_keep) print("scores_inds_keep:", scores_inds_keep) inds_fusion = np.intersect1d(IOU_inds_keep, scores_inds_keep) if inds_fusion.size == 0: # if no box retained, keep the original one keep_fusion_boxes.append(nms_det) if DEBUG: print("inds_fusion:", inds_fusion) print("keep nms_det") continue if DEBUG: if inds_fusion.size>1: print("boxes for fusion:", inds_fusion) print(dets[order[inds_fusion]]) x1_fusion = x1[order[inds_fusion]] y1_fusion = y1[order[inds_fusion]] x2_fusion = x2[order[inds_fusion]] y2_fusion = y2[order[inds_fusion]] scores_fusion = scores[order[inds_fusion]] fusion_box = np.zeros((5)) fusion_box[0] = np.sum(x1_fusion * scores_fusion) / np.sum(scores_fusion) fusion_box[1] = np.sum(y1_fusion * scores_fusion) / np.sum(scores_fusion) fusion_box[2] = np.sum(x2_fusion * scores_fusion) / np.sum(scores_fusion) fusion_box[3] = np.sum(y2_fusion * scores_fusion) / np.sum(scores_fusion) fusion_box[4] = scores_fusion[0] if DEBUG: print("fusion_box:", fusion_box) keep_fusion_boxes.append(fusion_box) # boxes with small overlap are kept for another loop inds_next = np.where(ovr <= IOU_thresh)[0] order = order[inds_next] keep_fusion_boxes = np.array(keep_fusion_boxes) return keep_fusion_boxes def box_voting(dets, IOU_thresh, score_thresh): """ greedily select boxes with high confidence and overlap with current maximum and voting the final box coordinates by fusing those boxes :param num_dets: dets after nms :param dets: original detection results, dets before nms. [[x1, y1, x2, y2 score]] :param IOU_thresh: retain overlap > IOU_thresh for fusion :param score_thresh: retain score > score_thresh for fusion :return: detection coordinates to keep """ x1 = dets[:, 0] y1 = dets[:, 1] x2 = dets[:, 2] y2 = dets[:, 3] scores = dets[:, 4] areas = (x2 - x1 + 1) * (y2 - y1 + 1) order = scores.argsort()[::-1] if DEBUG: print("dets ordered:", dets) keep_fusion_boxes = [] while order.size > 0: i = order[0] xx1 = np.maximum(x1[i], x1[order]) yy1 = np.maximum(y1[i], y1[order]) xx2 = np.minimum(x2[i], x2[order]) yy2 = np.minimum(y2[i], y2[order]) # compute overlap w = np.maximum(0.0, xx2 - xx1 + 1) h = np.maximum(0.0, yy2 - yy1 + 1) inter = w * h ovr = inter / (areas[i]+ areas[order] - inter) # retain boxes with large overlap and high confidence for fusion IOU_inds_keep = np.where(ovr > IOU_thresh)[0] scores_inds_keep = np.where(scores[order] > score_thresh)[0] if DEBUG: print("IOU_inds_keep:", IOU_inds_keep) print("scores_inds_keep:", scores_inds_keep) if IOU_inds_keep.size == 0 or scores_inds_keep.size == 0: # if no box retained, keep the original one keep_fusion_boxes.append(dets[i]) if DEBUG: print("keep original det") continue inds_fusion = np.intersect1d(IOU_inds_keep, scores_inds_keep) if DEBUG: if inds_fusion.size>1: print("boxes for fusion:", inds_fusion) print(dets[order[inds_fusion]]) x1_fusion = x1[order[inds_fusion]] y1_fusion = y1[order[inds_fusion]] x2_fusion = x2[order[inds_fusion]] y2_fusion = y2[order[inds_fusion]] scores_fusion = scores[order[inds_fusion]] fusion_box = np.zeros((1,5)) fusion_box[0][0] = np.sum(x1_fusion * scores_fusion) / np.sum(scores_fusion) fusion_box[0][1] = np.sum(y1_fusion * scores_fusion) / np.sum(scores_fusion) fusion_box[0][2] = np.sum(x2_fusion * scores_fusion) / np.sum(scores_fusion) fusion_box[0][3] = np.sum(y2_fusion * scores_fusion) / np.sum(scores_fusion) fusion_box[0][4] = scores_fusion[0] if DEBUG: print("fusion_box:", fusion_box) keep_fusion_boxes.append(fusion_box) # boxes with small overlap are kept for another loop inds_next = np.where(ovr <= IOU_thresh)[0] order = order[inds_next] keep_fusion_boxes = np.array(keep_fusion_boxes) return keep_fusion_boxes
36.890173
109
0.616735
912
6,382
4.095395
0.109649
0.056225
0.048193
0.042838
0.858367
0.827845
0.827309
0.816064
0.816064
0.816064
0
0.032009
0.270605
6,382
172
110
37.104651
0.770354
0.181761
0
0.605042
0
0
0.03602
0
0
0
0
0
0
1
0.042017
false
0
0.008403
0.016807
0.092437
0.12605
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
dd90b1f9abca3d57f925343d0b11022ecb3aec1f
47
py
Python
arturo/gfx/__init__.py
rhestilow/arturo
4b3c33bc45e73524726bd7040d69ec9730ce57c3
[ "MIT" ]
null
null
null
arturo/gfx/__init__.py
rhestilow/arturo
4b3c33bc45e73524726bd7040d69ec9730ce57c3
[ "MIT" ]
null
null
null
arturo/gfx/__init__.py
rhestilow/arturo
4b3c33bc45e73524726bd7040d69ec9730ce57c3
[ "MIT" ]
null
null
null
from arturo.gfx.canvas import Canvas, triangle
23.5
46
0.829787
7
47
5.571429
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.106383
47
1
47
47
0.928571
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
dd9adf7b05c1b3c756343fa3929a3e550454ee7e
659
py
Python
havsim/simulation/__init__.py
ronan-keane/hav-sim
0aaf9674e987822ff2dc90c74613d5e68e8ef0ce
[ "Apache-2.0" ]
null
null
null
havsim/simulation/__init__.py
ronan-keane/hav-sim
0aaf9674e987822ff2dc90c74613d5e68e8ef0ce
[ "Apache-2.0" ]
null
null
null
havsim/simulation/__init__.py
ronan-keane/hav-sim
0aaf9674e987822ff2dc90c74613d5e68e8ef0ce
[ "Apache-2.0" ]
2
2020-09-30T22:44:37.000Z
2021-05-09T07:36:28.000Z
""" @author: rlk268@cornell.edu """ from havsim.simulation import simulation from havsim.simulation import models from havsim.simulation import relaxation from havsim.simulation import road_networks from havsim.simulation import update_lane_routes from havsim.simulation import vehicle_orders from havsim.simulation import vehicles from havsim.simulation import road # import base classes and functions from havsim.simulation.simulation import Simulation from havsim.simulation.vehicles import Vehicle from havsim.simulation.road_networks import Lane from havsim.simulation.road_networks import get_headway, get_dist from havsim.simulation.road import Road
32.95
65
0.855842
88
659
6.318182
0.284091
0.233813
0.467626
0.374101
0.410072
0.302158
0
0
0
0
0
0.005051
0.098634
659
20
66
32.95
0.930976
0.094082
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
06e518598b1192ea02bf03777dab934e07c2116b
38
py
Python
data_structure/__init__.py
lvyufeng/basic_nlp_modules
354aac74eaa268f6bc4cf29d44adbe8d3f41e9ad
[ "MIT" ]
4
2020-05-01T16:30:24.000Z
2021-04-05T12:50:48.000Z
data_structure/__init__.py
lvyufeng/basic_nlp_modules
354aac74eaa268f6bc4cf29d44adbe8d3f41e9ad
[ "MIT" ]
null
null
null
data_structure/__init__.py
lvyufeng/basic_nlp_modules
354aac74eaa268f6bc4cf29d44adbe8d3f41e9ad
[ "MIT" ]
null
null
null
from data_structure.trie_tree import *
38
38
0.868421
6
38
5.166667
1
0
0
0
0
0
0
0
0
0
0
0
0.078947
38
1
38
38
0.885714
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
66496de97b72d49781c9bf09d508a086920c125b
23
py
Python
examples/django_roa_server/__init__.py
jathanism/django-roa
e16be7baff5d15c6c924e63fbd94fddcbe41fa9b
[ "BSD-3-Clause" ]
3
2020-05-05T23:24:16.000Z
2022-03-05T18:47:13.000Z
examples/django_roa_server/__init__.py
jathanism/django-roa
e16be7baff5d15c6c924e63fbd94fddcbe41fa9b
[ "BSD-3-Clause" ]
2
2018-05-25T13:52:35.000Z
2018-11-07T16:14:42.000Z
examples/django_roa_server/__init__.py
Keypr/django-roa
ea74ae11230a43a7e1c4ba233906dc9d517afab3
[ "BSD-3-Clause" ]
1
2022-03-05T18:50:47.000Z
2022-03-05T18:50:47.000Z
from . import emitters
11.5
22
0.782609
3
23
6
1
0
0
0
0
0
0
0
0
0
0
0
0.173913
23
1
23
23
0.947368
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b07e6231d9c9601f4cf35b06d5699fe71f3b6fba
47
py
Python
simulation/device/simulated/fixed_consumption/__init__.py
LBNL-ETA/LPDM
3384a784b97e49cd7a801b758717a7107a51119f
[ "BSD-3-Clause-LBNL" ]
2
2019-01-05T02:33:38.000Z
2020-04-22T16:57:50.000Z
simulation/device/simulated/fixed_consumption/__init__.py
LBNL-ETA/LPDM
3384a784b97e49cd7a801b758717a7107a51119f
[ "BSD-3-Clause-LBNL" ]
3
2019-04-17T18:13:08.000Z
2021-04-23T22:40:23.000Z
simulation/device/simulated/fixed_consumption/__init__.py
LBNL-ETA/LPDM
3384a784b97e49cd7a801b758717a7107a51119f
[ "BSD-3-Clause-LBNL" ]
1
2019-01-31T08:37:44.000Z
2019-01-31T08:37:44.000Z
from fixed_consumption import FixedConsumption
23.5
46
0.914894
5
47
8.4
1
0
0
0
0
0
0
0
0
0
0
0
0.085106
47
1
47
47
0.976744
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b083219084158c8da5ab9294f4a9dec69eea1b86
61
py
Python
simple/themes/simple/__init__.py
haoxun/GeekCMS-Themes
48839c23fb69f5e932d1d9db783ae788189b0a80
[ "MIT" ]
null
null
null
simple/themes/simple/__init__.py
haoxun/GeekCMS-Themes
48839c23fb69f5e932d1d9db783ae788189b0a80
[ "MIT" ]
null
null
null
simple/themes/simple/__init__.py
haoxun/GeekCMS-Themes
48839c23fb69f5e932d1d9db783ae788189b0a80
[ "MIT" ]
null
null
null
from . import load from . import process from . import write
15.25
21
0.754098
9
61
5.111111
0.555556
0.652174
0
0
0
0
0
0
0
0
0
0
0.196721
61
3
22
20.333333
0.938776
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b0a2422ddddc6badf7862f01eb33210fffb3c5dc
35
py
Python
env/lib64/python3.8/site-packages/openpyxl/writer/__init__.py
albixhafa/balance-sheet-integrity
bc7edef8ac7d7e57375a26f9edf9c7235722a2ac
[ "MIT" ]
11
2020-06-28T04:30:26.000Z
2022-03-26T08:40:47.000Z
env/lib64/python3.8/site-packages/openpyxl/writer/__init__.py
albixhafa/balance-sheet-integrity
bc7edef8ac7d7e57375a26f9edf9c7235722a2ac
[ "MIT" ]
10
2020-09-30T12:49:45.000Z
2020-10-04T10:26:33.000Z
venv/Lib/site-packages/openpyxl/writer/__init__.py
aklauritzen/bilago
276b1c11e259dd77260c74fc013f59eb1eb42d2f
[ "MIT" ]
1
2021-11-16T19:06:53.000Z
2021-11-16T19:06:53.000Z
# Copyright (c) 2010-2020 openpyxl
17.5
34
0.742857
5
35
5.2
1
0
0
0
0
0
0
0
0
0
0
0.266667
0.142857
35
1
35
35
0.6
0.914286
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
6
b0aa9c0d0658f9f672380aa87822d23863c76cf8
297
py
Python
easy_crypto/lesson2/task5.py
PeteCoward/teach-python
2a63ece83151631ab4dcf868c361acdfe4e6c85f
[ "MIT" ]
1
2015-12-19T00:38:46.000Z
2015-12-19T00:38:46.000Z
easy_crypto/lesson2/task5.py
PeteCoward/teach-python
2a63ece83151631ab4dcf868c361acdfe4e6c85f
[ "MIT" ]
null
null
null
easy_crypto/lesson2/task5.py
PeteCoward/teach-python
2a63ece83151631ab4dcf868c361acdfe4e6c85f
[ "MIT" ]
null
null
null
''' # TASK 5 - write a function to generate all the possible shifts of an array of bytes ''' from .task3 import shift_byte_array def get_all_shifts(byte_array): ''' return all possible shifts of input byte array''' return [shift_byte_array(byte_array, shift) for shift in range(1, 256)]
29.7
84
0.734007
49
297
4.285714
0.591837
0.214286
0.152381
0
0
0
0
0
0
0
0
0.02459
0.178451
297
9
85
33
0.836066
0.441077
0
0
1
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
6
9ffb1bc15fb4d365a975edc9eb01dc8f8dd1e769
1,360
py
Python
problems/chapter04/mshibatatt/006.py
tokuma09/algorithm_problems
58534620df73b230afbeb12de126174362625a78
[ "CC0-1.0" ]
1
2021-07-07T15:46:58.000Z
2021-07-07T15:46:58.000Z
problems/chapter04/mshibatatt/006.py
tokuma09/algorithm_problems
58534620df73b230afbeb12de126174362625a78
[ "CC0-1.0" ]
5
2021-06-05T14:16:41.000Z
2021-07-10T07:08:28.000Z
problems/chapter04/mshibatatt/006.py
tokuma09/algorithm_problems
58534620df73b230afbeb12de126174362625a78
[ "CC0-1.0" ]
null
null
null
def bool_func(i, w, a): if i == 0: if w == 0: return True else: return False # in case not choosing a[i-1] if bool_func(i-1, w, a): return True # in case choosing a[i-1] if bool_func(i-1, w-a[i-1], a): return True # return False if both cases are False return False def main(): N, W = map(int, input().split()) a = list(map(int, input().split())) if bool_func(N, W, a): print("Yes") else: print("No") ######### memo verson ########## def bool_func_memo(i, w, a, memo): if i == 0: if w == 0: return True else: return False # in case not choosing a[i-1] if memo[i-1][w] == float('inf'): memo[i-1][w] = bool_func(i-1, w, a, memo) if memo[i-1][w]: return True # in case choosing a[i-1] if memo[i-1][w-a[i-1]] == float('inf'): memo[i-1][w-a[i-1]] = bool_func(i-1, w-a[i-1], memo) if memo[i-1][w-a[i-1]]: return True # return False if both cases are False return False def main_memo(): N, W = map(int, input().split()) a = list(map(int, input().split())) memo = [float('inf')]*N*W if bool_func(N, W, a): print("Yes") else: print("No") if __name__=='__main__': #main() main_memo()
21.587302
60
0.4875
226
1,360
2.849558
0.154867
0.059006
0.046584
0.043478
0.864907
0.864907
0.784161
0.768634
0.720497
0.628882
0
0.025247
0.330147
1,360
63
61
21.587302
0.681669
0.144118
0
0.642857
0
0
0.023726
0
0
0
0
0
0
1
0.095238
false
0
0
0
0.309524
0.095238
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
b00096b545f692dc70353c5edb57c5ce5dbf8abc
192
py
Python
curso-em-video/aula_04/aula-04-primeiros-comandos.py
talysonxx/python
520b108731e28c7dc1fca3523b925be506fd8340
[ "MIT" ]
null
null
null
curso-em-video/aula_04/aula-04-primeiros-comandos.py
talysonxx/python
520b108731e28c7dc1fca3523b925be506fd8340
[ "MIT" ]
null
null
null
curso-em-video/aula_04/aula-04-primeiros-comandos.py
talysonxx/python
520b108731e28c7dc1fca3523b925be506fd8340
[ "MIT" ]
null
null
null
nome = input('Qual é o seu nome? ') idade = input('Qual é a sua idade? ') peso = input('Qual é o seu peso? ') print(f'\033[4:31m{nome}\033[m, \033[4:34m{idade}\033[m, \033[4:35m{peso}\033[m')
38.4
81
0.625
41
192
2.926829
0.439024
0.225
0.25
0.183333
0.233333
0
0
0
0
0
0
0.163636
0.140625
192
4
82
48
0.563636
0
0
0
0
0.25
0.671875
0.359375
0
0
0
0
0
1
0
false
0
0
0
0
0.25
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
b00aa5fcee5f2a82570c2f66065a5fa94d793eee
51
py
Python
run.py
LightForm-group/lightform_data_explorer
03af6e73e45877f0e6b4f128d51731f5dbcb27f3
[ "FTL" ]
null
null
null
run.py
LightForm-group/lightform_data_explorer
03af6e73e45877f0e6b4f128d51731f5dbcb27f3
[ "FTL" ]
18
2021-09-09T11:45:08.000Z
2021-12-03T16:11:29.000Z
run.py
LightForm-group/lightform_data_explorer
03af6e73e45877f0e6b4f128d51731f5dbcb27f3
[ "FTL" ]
null
null
null
import lf_data_explorer lf_data_explorer.app.run()
17
26
0.862745
9
51
4.444444
0.666667
0.3
0.7
0
0
0
0
0
0
0
0
0
0.058824
51
3
26
17
0.833333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
b00d4064ea86e6ccac169c3bc650edce793fd080
24
py
Python
plugins/pocsuite/net.py
aliluyala/PocHunter
ff2d7e745eabd81ffb77920fe00813b17fc432cf
[ "MIT" ]
95
2016-07-05T12:44:25.000Z
2022-01-24T09:16:44.000Z
plugins/pocsuite/net.py
sigma-random/PocHunter
ff2d7e745eabd81ffb77920fe00813b17fc432cf
[ "MIT" ]
2
2016-10-24T09:35:24.000Z
2017-07-28T08:50:31.000Z
plugins/pocsuite/net.py
sigma-random/PocHunter
ff2d7e745eabd81ffb77920fe00813b17fc432cf
[ "MIT" ]
39
2016-06-13T07:47:39.000Z
2020-11-26T00:53:48.000Z
import requests as req
12
23
0.791667
4
24
4.75
1
0
0
0
0
0
0
0
0
0
0
0
0.208333
24
1
24
24
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b025106c47121b4ec079ec986bfa8edbbbe9ff77
110
py
Python
wendigo/warnings.py
medmsyk/wendigopy
36e0759bf8b065548fd638063768522704506236
[ "Apache-2.0" ]
null
null
null
wendigo/warnings.py
medmsyk/wendigopy
36e0759bf8b065548fd638063768522704506236
[ "Apache-2.0" ]
1
2022-01-05T10:28:49.000Z
2022-03-20T09:17:04.000Z
wendigo/warnings.py
medmsyk/wendigopy
36e0759bf8b065548fd638063768522704506236
[ "Apache-2.0" ]
null
null
null
class AdministrationWarning(Warning): pass class TemporaryDirectoryDeletionWarning(Warning): pass
22
50
0.781818
8
110
10.75
0.625
0.255814
0
0
0
0
0
0
0
0
0
0
0.163636
110
5
51
22
0.934783
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
6
b03c735addd159bc2c799dc4907664a4ec651668
47
py
Python
run_momiji.py
Kyuunex/Momiji
a320bb901015737e4bd57c9da9a997d256689448
[ "MIT" ]
16
2019-02-12T17:16:03.000Z
2021-07-16T05:36:06.000Z
run_momiji.py
Kyuunex/Momiji
a320bb901015737e4bd57c9da9a997d256689448
[ "MIT" ]
null
null
null
run_momiji.py
Kyuunex/Momiji
a320bb901015737e4bd57c9da9a997d256689448
[ "MIT" ]
5
2019-12-07T10:34:36.000Z
2021-03-05T12:21:53.000Z
#!/usr/bin/env python3 import momiji.__main__
11.75
22
0.765957
7
47
4.571429
1
0
0
0
0
0
0
0
0
0
0
0.02381
0.106383
47
3
23
15.666667
0.738095
0.446809
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b04e6a7bc7e707bebeba794b4b296658aba08f13
466
py
Python
tests/testdata/wrapper_pymol/test_pseudoatom.py
prcurran/fragment_hotspot_maps
a6dbd7f650d28a867594ca48597f7e1b3a131168
[ "MIT" ]
24
2019-02-14T00:02:13.000Z
2022-03-26T02:27:52.000Z
tests/testdata/wrapper_pymol/test_pseudoatom.py
prcurran/fragment_hotspot_maps
a6dbd7f650d28a867594ca48597f7e1b3a131168
[ "MIT" ]
27
2019-02-06T12:18:27.000Z
2020-10-30T14:26:08.000Z
tests/testdata/wrapper_pymol/test_pseudoatom.py
prcurran/fragment_hotspot_maps
a6dbd7f650d28a867594ca48597f7e1b3a131168
[ "MIT" ]
12
2019-02-13T20:38:56.000Z
2022-03-09T01:20:54.000Z
from os.path import join import tempfile import tkinter as tk import zipfile import math from pymol import cmd, finish_launching, plugins from pymol.cgo import * finish_launching() cmd.pseudoatom(object="mypseudoatom", pos=(1, 1, 1), color=(0.9411764705882353, 0.20392156862745098, 0.20392156862745098, 0.5), label=None) cmd.pseudoatom(object="mypseudoatom2", pos=(2, 2, 2), color=(0.9411764705882353, 0.20392156862745098, 0.20392156862745098, 0.5), label=31.2)
31.066667
140
0.774678
67
466
5.358209
0.477612
0.200557
0.211699
0.128134
0.362117
0.362117
0.362117
0.362117
0.362117
0.362117
0
0.286396
0.100858
466
14
141
33.285714
0.570406
0
0
0
0
0
0.053763
0
0
0
0
0
0
1
0
true
0
0.7
0
0.7
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b05411fe0d3af500bafd07efb0ae428f02cdfb05
232
py
Python
src/sage/crypto/mq/sbox.py
bopopescu/sage
2d495be78e0bdc7a0a635454290b27bb4f5f70f0
[ "BSL-1.0" ]
1,742
2015-01-04T07:06:13.000Z
2022-03-30T11:32:52.000Z
src/sage/crypto/mq/sbox.py
Ivo-Maffei/sage
467fbc70a08b552b3de33d9065204ee9cbfb02c7
[ "BSL-1.0" ]
66
2015-03-19T19:17:24.000Z
2022-03-16T11:59:30.000Z
src/sage/crypto/mq/sbox.py
dimpase/sage
468f23815ade42a2192b0a9cd378de8fdc594dcd
[ "BSL-1.0" ]
495
2015-01-10T10:23:18.000Z
2022-03-24T22:06:11.000Z
from sage.misc.lazy_import import lazy_import lazy_import('sage.crypto.sbox', ['SBox', 'feistel_construction', 'misty_construction'], deprecation=22986)
33.142857
56
0.530172
20
232
5.9
0.6
0.254237
0.271186
0
0
0
0
0
0
0
0
0.034722
0.37931
232
6
57
38.666667
0.784722
0
0
0
0
0
0.25
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
b064e06817b421fc5f1400a2c2496f433f6e7629
677
py
Python
tests/test_modules.py
pasha00000/mbplugin
e919c7c899495ae7f4b04fc2568991bb717bed9a
[ "MIT" ]
null
null
null
tests/test_modules.py
pasha00000/mbplugin
e919c7c899495ae7f4b04fc2568991bb717bed9a
[ "MIT" ]
null
null
null
tests/test_modules.py
pasha00000/mbplugin
e919c7c899495ae7f4b04fc2568991bb717bed9a
[ "MIT" ]
null
null
null
'Проверка установки с нуля по сложному пути' import re, os, tempfile import pytest import requests import conftest # type: ignore # ignore import error import compile_all_jsmblh # pylint: disable=import-error import dbengine # pylint: disable=import-error import dll_call_test # pylint: disable=import-error import get_icon # pylint: disable=import-error import httpserver_mobile # pylint: disable=import-error import make_stock_stat # pylint: disable=import-error import browsercontroller # pylint: disable=import-error import settings # pylint: disable=import-error import store # pylint: disable=import-error def test_module_import(): pass
35.631579
58
0.77548
89
677
5.786517
0.438202
0.213592
0.297087
0.419417
0.466019
0
0
0
0
0
0
0
0.156573
677
19
59
35.631579
0.901926
0.499261
0
0
0
0
0.118644
0
0
0
0
0
0
1
0.0625
true
0.0625
0.875
0
0.9375
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
6
c6db6cd1f7b7a410f47979a0df532137426f0d60
7,007
py
Python
tests/test_probe_paperspace.py
palazzem/hal
a349bad27403627fa74a878503013eff492b8a66
[ "BSD-3-Clause" ]
null
null
null
tests/test_probe_paperspace.py
palazzem/hal
a349bad27403627fa74a878503013eff492b8a66
[ "BSD-3-Clause" ]
20
2019-08-03T12:21:36.000Z
2021-02-21T16:53:07.000Z
tests/test_probe_paperspace.py
palazzem/hal
a349bad27403627fa74a878503013eff492b8a66
[ "BSD-3-Clause" ]
null
null
null
import logging import responses from hal.probes.paperspace import PaperspaceProbe def test_paperspace_probe(): """Should be initialized with a default config.""" probe = PaperspaceProbe() assert probe.config["api_key"] is None assert probe.config["base_url"] == "https://api.paperspace.io" assert probe.config["header_key"] == "x-api-key" def test_paperspace_run_without_api_key(caplog): """Should fail if the API key is not provided.""" probe = PaperspaceProbe() with caplog.at_level(logging.ERROR): result = probe.run() assert len(caplog.records) == 1 for record in caplog.records: assert record.levelname == "ERROR" assert "missing Paperspace API key" in record.message assert result is False def test_paperspace_success(server): """Should succeed with a valid API key.""" machines_list = """ [{"id": "unique_id", "name": "Hal 9000", "os": "Microsoft Windows Server 2016 Datacenter", "ram": "32212246528", "cpus": 8, "gpu": "Quadro P5000", "storageTotal": "268435456000", "storageUsed": "70947229184", "usageRate": "P5000 hourly", "shutdownTimeoutInHours": null, "shutdownTimeoutForces": false, "performAutoSnapshot": false, "autoSnapshotFrequency": null, "autoSnapshotSaveCount": null, "dynamicPublicIp": false, "agentType": "WindowsDesktop", "dtCreated": "2019-09-02T17:11:36.374Z", "state": "off", "updatesPending": false, "networkId": "unique_id", "privateIpAddress": "x.x.x.x", "publicIpAddress": "x.x.x.x", "region": "Europe (AMS1)", "userId": "unique_id", "teamId": "unique_id", "scriptId": null, "dtLastRun": null}] """ machine_utilization = """ {"machineId": "unique_id", "utilization": {"machineId": "unique_id", "secondsUsed": 23808.9384539127, "hourlyRate": "0.78", "billingMonth": "2019-09"}, "storageUtilization": {"machineId": "unique_id", "secondsUsed": 416854.609315872, "monthlyRate": "10.00", "billingMonth": "2019-09"}} """ server.add( responses.GET, "https://api.paperspace.io/machines/getMachines", body=machines_list, status=200, ) server.add( responses.GET, "https://api.paperspace.io/machines/getUtilization", body=machine_utilization, status=200, ) probe = PaperspaceProbe({"api_key": "valid"}) probe.run() assert len(probe.results) == 5 assert probe.results["hal.paperspace.machines.count"] == 1 assert probe.results["hal.paperspace.machines.instance"] == [ (1, ["machine_id:unique_id", "state:off"]), (0, ["machine_id:unique_id", "state:ready"]), ] assert probe.results["hal.paperspace.utilization.instance.usage_seconds"] == [ (23808, ["machine_id:unique_id"]) ] assert probe.results["hal.paperspace.utilization.instance.hourly_rate"] == [ (0.78, ["machine_id:unique_id"]) ] assert probe.results["hal.paperspace.utilization.storage.monthly_rate"] == [ (10.0, ["machine_id:unique_id"]) ] def test_paperspace_transition_state(server): """Should emit an extra metric if a transition state is active.""" machines_list = """ [{"id": "unique_id", "state": "starting"}] """ machine_utilization = """ {"machineId": "unique_id", "utilization": {"machineId": "unique_id", "secondsUsed": 23808.9384539127, "hourlyRate": "0.78", "billingMonth": "2019-09"}, "storageUtilization": {"machineId": "unique_id", "secondsUsed": 416854.609315872, "monthlyRate": "10.00", "billingMonth": "2019-09"}} """ server.add( responses.GET, "https://api.paperspace.io/machines/getMachines", body=machines_list, status=200, ) server.add( responses.GET, "https://api.paperspace.io/machines/getUtilization", body=machine_utilization, status=200, ) probe = PaperspaceProbe({"api_key": "valid"}) probe.run() assert len(probe.results) == 5 assert probe.results["hal.paperspace.machines.count"] == 1 assert probe.results["hal.paperspace.machines.instance"] == [ (0, ["machine_id:unique_id", "state:off"]), (0, ["machine_id:unique_id", "state:ready"]), (1, ["machine_id:unique_id", "state:starting"]), ] def test_paperspace_fail(server, caplog): """Should fail if an invalid API key is used.""" server.add( responses.GET, "https://api.paperspace.io/machines/getMachines", body='{"status": 401, "message": "No such API token"}', status=401, ) probe = PaperspaceProbe({"api_key": "invalid"}) with caplog.at_level(logging.ERROR): probe.run() assert probe.results == {} assert len(caplog.records) == 1 for record in caplog.records: assert record.levelname == "ERROR" assert "No such API token" in record.message def test_paperspace_fail_machine(server, caplog): """Should send metrics even if one machine API fails.""" machines_list = """ [{"id": "unique_id", "name": "Hal 9000", "os": "Microsoft Windows Server 2016 Datacenter", "ram": "32212246528", "cpus": 8, "gpu": "Quadro P5000", "storageTotal": "268435456000", "storageUsed": "70947229184", "usageRate": "P5000 hourly", "shutdownTimeoutInHours": null, "shutdownTimeoutForces": false, "performAutoSnapshot": false, "autoSnapshotFrequency": null, "autoSnapshotSaveCount": null, "dynamicPublicIp": false, "agentType": "WindowsDesktop", "dtCreated": "2019-09-02T17:11:36.374Z", "state": "off", "updatesPending": false, "networkId": "unique_id", "privateIpAddress": "x.x.x.x", "publicIpAddress": "x.x.x.x", "region": "Europe (AMS1)", "userId": "unique_id", "teamId": "unique_id", "scriptId": null, "dtLastRun": null}] """ server.add( responses.GET, "https://api.paperspace.io/machines/getMachines", body=machines_list, status=200, ) server.add( responses.GET, "https://api.paperspace.io/machines/getUtilization", body='{"error": {"name": "Error", "status": 404, "message": "Machine not found"}}', status=404, ) probe = PaperspaceProbe({"api_key": "valid"}) with caplog.at_level(logging.ERROR): probe.run() assert len(probe.results) == 5 assert len(caplog.records) == 1 for record in caplog.records: assert record.levelname == "ERROR" assert "Skip machine check" in record.message
33.051887
91
0.591694
715
7,007
5.699301
0.227972
0.045153
0.026994
0.039264
0.763926
0.74135
0.728098
0.713865
0.709939
0.688834
0
0.054918
0.254174
7,007
211
92
33.208531
0.724837
0.03996
0
0.761905
0
0.005291
0.567055
0.089456
0
0
0
0
0.126984
1
0.031746
false
0
0.015873
0
0.047619
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
c6de8b0c6dcbed67495df47af96a0eedc21094df
21
py
Python
src/seedsigner/gui/screens/__init__.py
mutatrum/seedsigner
8df1271f12e69507c79819f4c09808db735aaf26
[ "MIT" ]
1
2022-01-15T23:39:09.000Z
2022-01-15T23:39:09.000Z
src/seedsigner/gui/screens/__init__.py
mutatrum/seedsigner
8df1271f12e69507c79819f4c09808db735aaf26
[ "MIT" ]
null
null
null
src/seedsigner/gui/screens/__init__.py
mutatrum/seedsigner
8df1271f12e69507c79819f4c09808db735aaf26
[ "MIT" ]
null
null
null
from .screen import *
21
21
0.761905
3
21
5.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.142857
21
1
21
21
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
059f1bae77e8031bf74dd2c3e45d54cdec774136
42,676
py
Python
tests/keras/layers/decomposition_test.py
NPoe/keras
298553d6018d3644d0e865015499b9405e3d6a2c
[ "MIT" ]
1
2018-07-22T03:59:02.000Z
2018-07-22T03:59:02.000Z
tests/keras/layers/decomposition_test.py
NPoe/keras
298553d6018d3644d0e865015499b9405e3d6a2c
[ "MIT" ]
null
null
null
tests/keras/layers/decomposition_test.py
NPoe/keras
298553d6018d3644d0e865015499b9405e3d6a2c
[ "MIT" ]
1
2022-03-18T03:19:36.000Z
2022-03-18T03:19:36.000Z
# IDEE: Erasure Layer / Decomposition layer as secondary output (tied weights!), then give training goal!!! import pytest import numpy as np np.random.seed(123) from numpy.testing import assert_allclose from keras.utils.test_utils import layer_test from keras.layers import recurrent, embeddings, Embedding, Dropout, Input from keras.layers.recurrent import LSTM, GRU from keras.models import Sequential, Model from keras.layers.core import Masking, Dense from keras.layers.wrappers import * from keras import regularizers from keras.utils.test_utils import keras_test from keras import backend as K nb_samples, timesteps, embedding_dim, units, num_classes, vocab_size = 2, 5, 6, 150, 40, 3 embedding_num = 12 def sigmoid(x): return 1.0 / (1.0 + np.exp(-x)) def test_EmbeddingWrapper(): model = Sequential() model.add(EmbeddingWrapper(Embedding(input_dim = vocab_size, output_dim = embedding_dim, mask_zero = True), scope = "local", mode = None)) model.compile('sgd', 'mse') out = model.predict(np.random.random((nb_samples, timesteps))) assert(out.shape == (nb_samples, timesteps, embedding_dim)) model = Sequential() model.add(EmbeddingWrapper(Embedding(input_dim = vocab_size, output_dim = embedding_dim, mask_zero = False), scope = "local", mode = "l2")) model.compile('sgd', 'mse') out = model.predict(np.random.random((nb_samples, timesteps))) assert(out.shape == (nb_samples, timesteps)) model = Sequential() model.add(EmbeddingWrapper(Embedding(input_dim = vocab_size, output_dim = embedding_dim, mask_zero = True), scope = "global", mode = "l1")) model.compile('sgd', 'mse') out = model.predict(np.random.random((nb_samples, timesteps))) assert(out.shape == (nb_samples, timesteps)) def test_unit_tests_EmbeddingWrapper(): [emb0, emb1, emb2] = [np.array([1,2]), np.array([4,6]), np.array([4,4])] embedding_weights = [np.array([emb0, emb1, emb2])] X = np.array([[0,1,2,0]]) mean = (emb0 + emb1 + emb2 + emb0) / 4 minus0, minus1, minus2 = emb0 - mean, emb1 - mean, emb2 - mean model = Sequential() model.add(EmbeddingWrapper(Embedding(input_dim = 3, output_dim = 2, mask_zero = False), \ mode = None, scope = "local", weights = embedding_weights)) model.compile('sgd', 'mse') pred = model.predict(X)[0] assert np.allclose(pred, np.array([minus0, minus1, minus2, minus0])) #test masking model = Sequential() model.add(EmbeddingWrapper(Embedding(input_dim = 3, output_dim = 2, mask_zero = True), \ mode = None, scope = "local", weights = embedding_weights)) model.compile('sgd', 'mse') pred = model.predict(X)[0] assert np.allclose(pred, np.array([np.zeros_like(minus0), minus1, minus2, np.zeros_like(minus0)])) #test l2 norm model = Sequential() model.add(EmbeddingWrapper(Embedding(input_dim = 3, output_dim = 2, mask_zero = False), \ mode = "l2", scope = "local", weights = embedding_weights)) model.compile('sgd', 'mse') pred = model.predict(X)[0] assert np.allclose(pred, np.array([np.linalg.norm(x, ord = 2) for x in (minus0, minus1, minus2, minus0)])) #test l1 norm model = Sequential() model.add(EmbeddingWrapper(Embedding(input_dim = 3, output_dim = 2, mask_zero = False), \ mode = "l1", scope = "local", weights = embedding_weights)) model.compile('sgd', 'mse') pred = model.predict(X)[0] assert np.allclose(pred, np.array([np.linalg.norm(x, ord = 1) for x in (minus0, minus1, minus2, minus0)])) #test masked l2 norm model = Sequential() model.add(EmbeddingWrapper(Embedding(input_dim = 3, output_dim = 2, mask_zero = True), \ mode = "l2", scope = "local", weights = embedding_weights)) model.compile('sgd', 'mse') pred = model.predict(X)[0] assert np.allclose(pred, np.array([0] + [np.linalg.norm(x, ord = 2) for x in (minus1, minus2)] + [0])) #test global mean = (emb0 + emb1 + emb2) / 3 minus0, minus1, minus2 = emb0 - mean, emb1 - mean, emb2 - mean model = Sequential() model.add(EmbeddingWrapper(Embedding(input_dim = 3, output_dim = 2, mask_zero = True), \ mode = "l2", scope = "global", weights = embedding_weights)) model.compile('sgd', 'mse') pred = model.predict(X)[0] assert np.allclose(pred, np.array([0] + [np.linalg.norm(x, ord = 2) for x in (minus1, minus2)] + [0])) ''' def test_GradientWrapper(): model = Sequential() model.add(GradientWrapper(GRU(units = units, return_sequences = False), input_shape = (None, embedding_dim), mode = "dot")) model.compile(optimizer='sgd', loss='mse') out = model.predict(np.random.random((nb_samples, timesteps, embedding_dim))) assert(out.shape == (nb_samples, timesteps, units)) model = Sequential() model.add(Embedding(input_dim=5, output_dim = embedding_dim, mask_zero = True)) inner_model = Sequential() inner_model.add(GRU(units = units, return_sequences = False, input_shape = (None, embedding_dim))) #inner_model.add(Dropout(0.5)) inner_model.add(Dense(units = num_classes, activation = "linear")) model.add(GradientWrapper(inner_model, mode = "l1")) model.compile(optimizer='sgd', loss='mse') a = np.array([[1,2,3,0,0], [1,3,0,0,0]]) out = model.predict(a) assert(out.shape == (nb_samples, timesteps, num_classes)) assert(out[0,2,0] != 0) assert(out[0,3,0] == 0) inp = Input((None,)) emb = Embedding(input_dim=5, output_dim = embedding_dim, mask_zero = True) gru = GRU(units = units, return_sequences = False, input_shape = (None, embedding_dim)) wrap = GradientWrapper(gru, mode = None) dense = Dense(units = num_classes, activation = "linear") inner = Sequential() inner.add(gru) inner.add(dense) wrap2 = GradientWrapper(inner, mode = None) outp = dense(wrap(emb(inp))) outp2 = wrap2(emb(inp)) model = Model([inp], [outp]) model2 = Model([inp], [outp2]) print("predicting model1") out = model.predict(a) print("predicting model2") out2 = model2.predict(a) print("model1") print(out) print("model2") print(out2) assert (0) def test_ErasureWrapper(): for i in range(1,4): model = Sequential() model.add(ErasureWrapper(GRU(units = units, return_sequences = False), input_shape = (None, embedding_dim), ngram = i)) model.add(TimeDistributed(Dense(num_classes, activation = "exp"))) model.compile(optimizer='sgd', loss='mse') out = model.predict(np.random.random((nb_samples, timesteps, embedding_dim))) assert(out.shape == (nb_samples, timesteps - i + 1, num_classes)) # with return_sequences = True for i in range(1,4): model = Sequential() model.add(ErasureWrapper(GRU(units = units, return_sequences = True), input_shape = (None, embedding_dim), ngram = i)) model.add(TimeDistributed(TimeDistributed(Dense(num_classes, activation = "exp")))) model.compile(optimizer='sgd', loss='mse') out = model.predict(np.random.random((nb_samples, timesteps, embedding_dim))) assert(out.shape == (nb_samples, timesteps - i + 1, timesteps, num_classes)) def test_BetaLayer(): for i in range(1,4): for layer in (GRU, LSTM): model = Sequential() model.add(BetaDecomposition(layer(units = units), ngram = i, input_shape = (None, embedding_dim))) model.add(TimeDistributed(Dense(units = num_classes, activation = "exp"))) model.compile(optimizer='sgd', loss='mse') out = model.predict(np.random.random((nb_samples, timesteps, embedding_dim))) assert(out.shape == (nb_samples, timesteps - i + 1, num_classes)) def test_GammaLayer(): for i in range(1,4): for layer in (GRU, LSTM): model = Sequential() model.add(GammaDecomposition(layer(units = units), ngram = i, input_shape = (None, embedding_dim))) model.add(TimeDistributed(Dense(units = num_classes, activation = "exp"))) model.compile(optimizer='sgd', loss='mse') out = model.predict(np.random.random((nb_samples, timesteps, embedding_dim))) assert(out.shape == (nb_samples, timesteps - i + 1, num_classes)) def test_return_sequences(): for i in range(1,4): for outer_layer in (BetaDecomposition, GammaDecomposition): for inner_layer in (LSTM, GRU): model = Sequential() model.add(outer_layer(inner_layer(units = units, return_sequences = True), ngram = i, input_shape=(None, embedding_dim))) model.add(TimeDistributed(TimeDistributed(Dense(units = num_classes, activation = "exp")))) model.compile(optimizer='sgd', loss='mse') out = model.predict(np.random.random((nb_samples, timesteps, embedding_dim))) assert(out.shape == (nb_samples, timesteps - i + 1, timesteps, num_classes)) def test_bidirectional(): for i in range(1,4): for outer_layer in (BetaDecomposition, GammaDecomposition): for inner_layer in (LSTM, GRU): model = Sequential() model.add(Bidirectional(outer_layer(inner_layer(units = units), ngram = i), input_shape=(None, embedding_dim))) model.add(TimeDistributed(Dense(units = num_classes, activation = "exp"))) model.compile("sgd", "mse") out = model.predict(np.random.random((nb_samples, timesteps, embedding_dim))) assert out.shape == (nb_samples, timesteps - i + 1, num_classes) def test_unit_tests_DecompositionLSTM(): x1 = np.array([1,2,1]) x2 = np.array([0,1,1]) x3 = np.array([1,1,1]) X = np.stack([x1,x2,x3]) X = np.stack([X]) Wout = np.array([[2,0,0],[0,1,1]]) bout = np.zeros((3,)) # LSTM h0 = np.array([0,0]) c0 = np.array([0,0]) Wi = np.array([[0,0], [0,1], [0,1]]) Ui = np.array([[0,1], [1,0]]) Wf = np.array([[2,0], [0,2], [0,1]]) Uf = np.array([[0,2], [1,2]]) Wo = np.array([[1,0], [0,0], [0,1]]) Uo = np.array([[0,2], [1,1]]) Wc = np.array([[1,3], [0,0], [0,1]]) Uc = np.array([[0,1], [1,1]]) i1 = sigmoid(np.dot(x1, Wi) + np.dot(h0, Ui)) f1 = sigmoid(np.dot(x1, Wf) + np.dot(h0, Uf)) o1 = sigmoid(np.dot(x1, Wo) + np.dot(h0, Uo)) h_tilde1 = np.tanh(np.dot(x1, Wc) + np.dot(h0, Uc)) c1 = f1 * c0 + i1 * h_tilde1 h1 = o1 * np.tanh(c1) i2 = sigmoid(np.dot(x2, Wi) + np.dot(h1, Ui)) f2 = sigmoid(np.dot(x2, Wf) + np.dot(h1, Uf)) o2 = sigmoid(np.dot(x2, Wo) + np.dot(h1, Uo)) h_tilde2 = np.tanh(np.dot(x2, Wc) + np.dot(h1, Uc)) c2 = f2 * c1 + i2 * h_tilde2 h2 = o2 * np.tanh(c2) i3 = sigmoid(np.dot(x3, Wi) + np.dot(h2, Ui)) f3 = sigmoid(np.dot(x3, Wf) + np.dot(h2, Uf)) o3 = sigmoid(np.dot(x3, Wo) + np.dot(h2, Uo)) h_tilde3 = np.tanh(np.dot(x3, Wc) + np.dot(h2, Uc)) c3 = f3 * c2 + i3 * h_tilde3 h3 = o3 * np.tanh(c3) bi = np.zeros((2,)) bc = np.zeros((2,)) bf = np.zeros((2,)) bo = np.zeros((2,)) W = [np.concatenate([Wi, Wf, Wc, Wo], -1), np.concatenate([Ui, Uf, Uc, Uo], -1), np.concatenate([bi, bf, bc, bo], -1)] beta1 = np.exp(np.dot(o3 * (np.tanh(c1) - np.tanh(c0)), Wout)) beta2 = np.exp(np.dot(o3 * (np.tanh(c2) - np.tanh(c1)), Wout)) beta3 = np.exp(np.dot(o3 * (np.tanh(c3) - np.tanh(c2)), Wout)) m = Sequential() m.add(BetaDecomposition(LSTM(units = 2, recurrent_activation = "sigmoid"), input_shape = (None, 3), weights = W)) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([beta1, beta2, beta3]), pred) gamma1 = np.exp(np.dot(o3 * (np.tanh(f2 * f3 * c1) - np.tanh(f1 * f2 * f3 * c0)), Wout)) gamma2 = np.exp(np.dot(o3 * (np.tanh(f3 * c2) - np.tanh(f2 * f3 * c1)), Wout)) gamma3 = np.exp(np.dot(o3 * (np.tanh(c3) - np.tanh(f3 * c2)), Wout)) m = Sequential() m.add(GammaDecomposition(LSTM(units = 2, recurrent_activation = "sigmoid"), input_shape = (None, 3), weights = W)) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([gamma1, gamma2, gamma3]), pred) mbeta1 = np.exp(np.dot(o2 * (np.tanh(c1) - np.tanh(c0)), Wout)) mbeta2 = np.exp(np.dot(o2 * (np.tanh(c2) - np.tanh(c1)), Wout)) m = Sequential() m.add(Masking(input_shape=(None,3), mask_value = 1)) m.add(BetaDecomposition(LSTM(units = 2, recurrent_activation = "sigmoid"), input_shape = (None, 3), weights = W)) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([mbeta1, mbeta2, np.ones_like(mbeta2)]), pred) mgamma1 = np.exp(np.dot(o2 * (np.tanh(f2 * c1) - np.tanh(f1*f2*c0)), Wout)) mgamma2 = np.exp(np.dot(o2 * (np.tanh(c2) - np.tanh(f2 * c1)), Wout)) m = Sequential() m.add(Masking(input_shape=(None,3), mask_value = 1)) m.add(GammaDecomposition(LSTM(units = 2, recurrent_activation = "sigmoid"), input_shape = (None, 3), weights = W)) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([mgamma1, mgamma2, np.ones_like(mgamma2)]), pred) # with bigram bbeta2 = np.exp(np.dot(o3 * (np.tanh(c2) - np.tanh(c0)), Wout)) bbeta3 = np.exp(np.dot(o3 * (np.tanh(c3) - np.tanh(c1)), Wout)) m = Sequential() m.add(BetaDecomposition(LSTM(units = 2, recurrent_activation = "sigmoid"), input_shape = (None, 3), weights = W, ngram = 2)) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([bbeta2, bbeta3]), pred) bgamma2 = np.exp(np.dot(o3 * (np.tanh(f3 * c2) - np.tanh(f3 * f2 * f1 * c0)), Wout)) bgamma3 = np.exp(np.dot(o3 * (np.tanh(c3) - np.tanh(f3 * f2 * c1)), Wout)) m = Sequential() m.add(GammaDecomposition(LSTM(units = 2, recurrent_activation = "sigmoid"), input_shape = (None, 3), weights = W, ngram = 2)) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([bgamma2, bgamma3]), pred) tbeta = np.exp(np.dot(o3 * np.tanh(c3), Wout)) def test_unit_tests_DecompositionGRU(): x1 = np.array([1,2,1]) x2 = np.array([0,1,1]) x3 = np.array([1,1,1]) X = np.stack([x1,x2,x3]) X = np.stack([X]) Wout = np.array([[2,0,0],[0,1,1]]) bout = np.zeros((3,)) # GRU h0 = np.array([0,0]) c0 = np.array([0,0]) Wz = np.array([[0,0], [0,1], [0,1]]) Uz = np.array([[0,1], [1,0]]) Wr = np.array([[2,0], [0,2], [0,1]]) Ur = np.array([[0,2], [1,2]]) Wh = np.array([[1,0], [0,0], [0,1]]) Uh = np.array([[0,2], [1,1]]) z1 = sigmoid(np.dot(x1, Wz) + np.dot(h0, Uz)) r1 = sigmoid(np.dot(x1, Wr) + np.dot(h0, Ur)) h_tilde1 = np.tanh(np.dot(x1, Wh) + np.dot(r1 * h0, Uh)) h1 = (1 - z1) * h_tilde1 + z1 * h0 z2 = sigmoid(np.dot(x2, Wz) + np.dot(h1, Uz)) r2 = sigmoid(np.dot(x2, Wr) + np.dot(h1, Ur)) h_tilde2 = np.tanh(np.dot(x2, Wh) + np.dot(r2 * h1, Uh)) h2 = (1 - z2) * h_tilde2 + z2 * h1 z3 = sigmoid(np.dot(x3, Wz) + np.dot(h2, Uz)) r3 = sigmoid(np.dot(x3, Wr) + np.dot(h2, Ur)) h_tilde3 = np.tanh(np.dot(x3, Wh) + np.dot(r3 * h2, Uh)) h3 = (1 - z3) * h_tilde3 + z3 * h2 bz = np.zeros((2,)) br = np.zeros((2,)) bh = np.zeros((2,)) W = [np.concatenate([Wz, Wr, Wh], -1), np.concatenate([Uz, Ur, Uh], -1), np.concatenate([bz, br, bh], -1)] delta1 = np.exp(np.dot(h1-h0, Wout)) delta2 = np.exp(np.dot(h2-h1, Wout)) delta3 = np.exp(np.dot(h3-h2, Wout)) m = Sequential() m.add(BetaDecomposition(GRU(units = 2, recurrent_activation = "sigmoid"), input_shape = (None, 3), weights = W)) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([delta1, delta2, delta3]), pred) omega1 = np.exp(np.dot((z3 * z2 * h1) - (z3 * z2 * z1 * h0), Wout)) omega2 = np.exp(np.dot((z3 * h2) - (z3 * z2 * h1), Wout)) omega3 = np.exp(np.dot(h3 - (z3 * h2), Wout)) m = Sequential() m.add(GammaDecomposition(GRU(units = 2, recurrent_activation = "sigmoid", weights = W), input_shape = (None, 3))) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([omega1, omega2, omega3]), pred) mdelta1 = np.exp(np.dot(h1 - h0, Wout)) mdelta2 = np.exp(np.dot(h2 - h1, Wout)) m = Sequential() m.add(Masking(input_shape=(None,3), mask_value = 1)) m.add(BetaDecomposition(GRU(units = 2, recurrent_activation = "sigmoid"), input_shape = (None, 3), weights = W)) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([mdelta1, mdelta2, np.ones_like(mdelta2)]), pred) momega1 = np.exp(np.dot((z2 * h1) - (z2 * z1 * h0), Wout)) momega2 = np.exp(np.dot(h2 - (z2 * h1), Wout)) m = Sequential() m.add(Masking(input_shape=(None,3), mask_value = 1)) m.add(GammaDecomposition(GRU(units = 2, recurrent_activation = "sigmoid"), input_shape = (None, 3), weights = W)) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([momega1, momega2, np.ones_like(momega2)]), pred) # with bigram bdelta2 = np.exp(np.dot(h2 - h0, Wout)) bdelta3 = np.exp(np.dot(h3 - h1, Wout)) m = Sequential() m.add(BetaDecomposition(GRU(units = 2, recurrent_activation = "sigmoid"), input_shape = (None, 3), weights = W, ngram = 2)) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([bdelta2, bdelta3]), pred) bomega2 = np.exp(np.dot((z3 * h2) - (z3 * z2 * z1 * h0), Wout)) bomega3 = np.exp(np.dot(h3 - (z3 * z2 * h1), Wout)) m = Sequential() m.add(GammaDecomposition(GRU(units = 2, recurrent_activation = "sigmoid"), input_shape = (None, 3), weights = W, ngram = 2)) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([bomega2, bomega3]), pred) tdelta = np.exp(np.dot(h3, Wout)) m = Sequential() m.add(BetaDecomposition(GRU(units = 2, recurrent_activation = "sigmoid"), input_shape = (None, 3), weights = W, ngram = 3)) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([tdelta]), pred) m = Sequential() m.add(GammaDecomposition(GRU(units = 2, recurrent_activation = "sigmoid"), input_shape = (None, 3), weights = W, ngram = 3)) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert np.allclose(np.array([tdelta]), pred) def test_unit_tests_Erasure(): x1 = np.array([1,2,1,2]) x2 = np.array([0,1,1,1]) x3 = np.array([1,1,1,1]) X = np.stack([x1,x2,x3]) X = np.stack([X,X,X]) Wout = np.array([[2,0,0],[0,1,1]]) bout = np.zeros((3,)) # LSTM h0 = np.array([0,0]) c0 = np.array([0,0]) Wi = np.array([[0,0], [0,1], [0,1],[1,6]]) Ui = np.array([[0,1], [1,0]]) Wf = np.array([[2,0], [0,2], [0,1],[1,4]]) Uf = np.array([[0,2], [1,2]]) Wo = np.array([[1,0], [0,0], [0,1],[1,8]]) Uo = np.array([[0,2], [1,1]]) Wc = np.array([[1,3], [0,0], [0,1],[1,4]]) Uc = np.array([[0,1], [1,1]]) bi = np.zeros((2,)) bc = np.zeros((2,)) bf = np.zeros((2,)) bo = np.zeros((2,)) W = [np.concatenate([Wi, Wf, Wc, Wo], -1), np.concatenate([Ui, Uf, Uc, Uo], -1), np.concatenate([bi, bf, bc, bo], -1)] # with nothing missing model = Sequential() model.add(LSTM(units = 2, weights = W, input_shape = (None,4))) model.compile(optimizer='sgd', loss='mse') outnorm = model.predict(np.array([[x1, x2, x3]])) # with x missing out1 = model.predict(np.array([[np.zeros_like(x1), x2, x3]])) out2 = model.predict(np.array([[x1, np.zeros_like(x2), x3]])) out3 = model.predict(np.array([[x1, x2, np.zeros_like(x3)]])) model = Sequential() model.add(ErasureWrapper(LSTM(units = 2), weights = W, input_shape=(None,4))) model.compile(optimizer='sgd', loss='mse') outerasure = model.predict(np.array([[x1,x2,x3],[x1,x2,x3]]))[0] assert(np.allclose(np.array([outnorm-out1, outnorm-out2, outnorm-out3]).squeeze(), outerasure)) # GRU Wz = np.array([[0,0], [0,1], [0,1],[1,2]]) Uz = np.array([[0,1], [1,0]]) Wr = np.array([[2,0], [0,2], [0,1],[1,1]]) Ur = np.array([[0,2], [1,2]]) Wh = np.array([[1,0], [0,0], [0,1],[1,1]]) Uh = np.array([[0,2], [1,1]]) z1 = sigmoid(np.dot(x1, Wz) + np.dot(h0, Uz)) r1 = sigmoid(np.dot(x1, Wr) + np.dot(h0, Ur)) h_tilde1 = np.tanh(np.dot(x1, Wh) + np.dot(r1 * h0, Uh)) h1 = (1 - z1) * h_tilde1 + z1 * h0 z2 = sigmoid(np.dot(x2, Wz) + np.dot(h1, Uz)) r2 = sigmoid(np.dot(x2, Wr) + np.dot(h1, Ur)) h_tilde2 = np.tanh(np.dot(x2, Wh) + np.dot(r2 * h1, Uh)) h2 = (1 - z2) * h_tilde2 + z2 * h1 z3 = sigmoid(np.dot(x3, Wz) + np.dot(h2, Uz)) r3 = sigmoid(np.dot(x3, Wr) + np.dot(h2, Ur)) h_tilde3 = np.tanh(np.dot(x3, Wh) + np.dot(r3 * h2, Uh)) h3 = (1 - z3) * h_tilde3 + z3 * h2 bz = np.zeros((2,)) br = np.zeros((2,)) bh = np.zeros((2,)) W = [np.concatenate([Wz, Wr, Wh], -1), np.concatenate([Uz, Ur, Uh], -1), np.concatenate([bz, br, bh], -1)] Wb = [np.concatenate([Wz, Wr, Wh], -1), np.concatenate([np.array([[4,5],[6,7]]), Ur, Uh], -1), np.concatenate([bz, br, bh], -1)] # with nothing missing model = Sequential() model.add(GRU(units = 2, weights = W, input_shape = (None, 4))) model.compile(optimizer='sgd', loss='mse') outnorm = model.predict(np.array([[x1, x2, x3]])) # with x missing out1 = model.predict(np.array([[np.zeros_like(x1), x2, x3]])) out2 = model.predict(np.array([[x1, np.zeros_like(x2), x3]])) out3 = model.predict(np.array([[x1, x2, np.zeros_like(x3)]])) out12 = model.predict(np.array([[np.zeros_like(x1), np.zeros_like(x2), x3]])) out23 = model.predict(np.array([[x1, np.zeros_like(x2), np.zeros_like(x3)]])) model = Sequential() model.add(ErasureWrapper(GRU(units = 2, weights = W), input_shape=(3,4), ngram = 1)) model.compile(optimizer='sgd', loss='mse') outerasure = model.predict(np.array([[x1,x2,x3]])) assert(np.allclose(np.array([outnorm-out1, outnorm-out2, outnorm-out3]).squeeze(), outerasure)) # with 2 x missing model = Sequential() model.add(ErasureWrapper(GRU(units = 2), weights = W, input_shape=(None,4), ngram = 2)) model.compile(optimizer='sgd', loss='mse') outerasure = model.predict(np.array([[x1,x2,x3]])) assert(np.allclose(np.array([outnorm-out12, outnorm-out23]).squeeze(), outerasure)) # with bidirectional model = Sequential() model.add(Bidirectional(GRU(units = 2), weights = W + Wb, input_shape = (3,4))) model.compile(optimizer='sgd', loss='mse') outnorm = model.predict(np.array([[x1, x2, x3]])) # with x missing out1 = model.predict(np.array([[np.zeros_like(x1), x2, x3]])) out2 = model.predict(np.array([[x1, np.zeros_like(x2), x3]])) out3 = model.predict(np.array([[x1, x2, np.zeros_like(x3)]])) model = Sequential() model.add(ErasureWrapper(Bidirectional(GRU(units = 2), weights = W + Wb), ngram = 1, input_shape = (3,4))) model.compile(optimizer='sgd', loss='mse') outerasurebi = model.predict(np.array([[x1,x2,x3]])) assert(np.allclose(np.array([outnorm-out1, outnorm-out2, outnorm - out3]).squeeze(), outerasurebi)) # with masking model = Sequential() model.add(Masking(mask_value = 0, input_shape = (3,4))) model.add(GRU(units = 2, weights = W)) model.compile(optimizer='sgd', loss='mse') outnormbi = model.predict(np.array([[x1, x2, np.zeros_like(x3)]])) out1 = model.predict(np.array([[np.zeros_like(x1), x2, np.zeros_like(x3)]])) out2 = model.predict(np.array([[x1, np.zeros_like(x2), np.zeros_like(x3)]])) out3 = model.predict(np.array([[x1, x2, np.zeros_like(x3)]])) model = Sequential() model.add(Masking(0, input_shape = (3,4))) model.add(ErasureWrapper(GRU(units = 2), weights = W, ngram = 1)) model.compile(optimizer='sgd', loss='mse') outerasurebi = model.predict(np.array([[x1,x2,np.zeros_like(x3)]])) # with bidir + masking model = Sequential() model.add(Masking(mask_value = 0, input_shape = (3,4))) model.add(Bidirectional(GRU(units = 2), weights = W + Wb)) model.compile(optimizer='sgd', loss='mse') outnormbi = model.predict(np.array([[x1, x2, np.zeros_like(x3)]])) out1 = model.predict(np.array([[np.zeros_like(x1), x2, np.zeros_like(x3)]])) out2 = model.predict(np.array([[x1, np.zeros_like(x2), np.zeros_like(x3)]])) out3 = model.predict(np.array([[x1, x2, np.zeros_like(x3)]])) model = Sequential() model.add(Masking(0, input_shape = (3,4))) model.add(ErasureWrapper(Bidirectional(GRU(units = 2), weights = W + Wb), ngram = 1)) model.compile(optimizer='sgd', loss='mse') outerasurebi = model.predict(np.array([[x1,x2,np.zeros_like(x3)]])) # with bidir + masking WW = np.array([[1,2,3],[4,5,6],[6,5,4],[1,1,2]]) bb = np.array([1,2,2]) Wemb = [np.random.random((5,4))] model = Sequential() model.add(Embedding(input_dim = 5, output_dim = 4, mask_zero = True, weights = Wemb)) model.add(Bidirectional(GRU(units = 2), weights = W + Wb)) model.add(Dense(units=3, weights = [WW, bb], use_bias = True, activation = "linear")) model.compile(optimizer='sgd', loss='mse') outnormbi = model.predict(np.array([[1,2,0]])) out1 = model.predict(np.array([[0,2,0]])) out2 = model.predict(np.array([[1,0,0]])) out3 = model.predict(np.array([[1,2,0]])) model = Sequential() model.add(Embedding(input_dim = 5, output_dim = 4, mask_zero = True, weights = Wemb)) model.add(ErasureWrapper(Bidirectional(GRU(units = 2), weights = W + Wb), ngram = 1)) model.add(TimeDistributed(Dense(units=3, activation = "linear", use_bias = False), weights = [WW])) model.compile(optimizer='sgd', loss='mse') outerasurebi = model.predict(np.array([[1,2,0]])) assert(np.allclose(np.array([outnormbi-out1, outnormbi-out2, outnormbi - out3]).squeeze(), outerasurebi)) def test_unit_tests_DecompositionLSTM_bidirectional(): x1 = np.array([1,2,1]) x2 = np.array([0,1,1]) x3 = np.array([1,1,1]) X = np.stack([x1,x2,x3]) X = np.stack([X]) Woutf = np.array([[2,0,0],[0,1,1]]) Woutb = np.array([[6,7,8],[4,5,3]]) Wout = np.concatenate([Woutf, Woutb], axis = 0) bout = np.zeros((3,)) # Forward h0 = np.array([0,0]) c0 = np.array([0,0]) Wif = np.array([[0,0], [0,1], [0,1]]) Uif = np.array([[0,1], [1,0]]) Wff = np.array([[2,0], [0,2], [0,1]]) Uff = np.array([[0,2], [1,2]]) Wof = np.array([[1,0], [0,0], [0,1]]) Uof = np.array([[0,2], [1,1]]) Wcf = np.array([[1,3], [0,0], [0,1]]) Ucf = np.array([[0,1], [1,1]]) i1f = sigmoid(np.dot(x1, Wif) + np.dot(h0, Uif)) f1f = sigmoid(np.dot(x1, Wff) + np.dot(h0, Uff)) o1f = sigmoid(np.dot(x1, Wof) + np.dot(h0, Uof)) h_tilde1f = np.tanh(np.dot(x1, Wcf) + np.dot(h0, Ucf)) c1f = f1f * c0 + i1f * h_tilde1f h1f = o1f * np.tanh(c1f) i2f = sigmoid(np.dot(x2, Wif) + np.dot(h1f, Uif)) f2f = sigmoid(np.dot(x2, Wff) + np.dot(h1f, Uff)) o2f = sigmoid(np.dot(x2, Wof) + np.dot(h1f, Uof)) h_tilde2f = np.tanh(np.dot(x2, Wcf) + np.dot(h1f, Ucf)) c2f = f2f * c1f + i2f * h_tilde2f h2f = o2f * np.tanh(c2f) i3f = sigmoid(np.dot(x3, Wif) + np.dot(h2f, Uif)) f3f = sigmoid(np.dot(x3, Wff) + np.dot(h2f, Uff)) o3f = sigmoid(np.dot(x3, Wof) + np.dot(h2f, Uof)) h_tilde3f = np.tanh(np.dot(x3, Wcf) + np.dot(h2f, Ucf)) c3f = f3f * c2f + i3f * h_tilde3f h3f = o3f * np.tanh(c3f) bi = np.zeros((2,)) bc = np.zeros((2,)) bf = np.zeros((2,)) bo = np.zeros((2,)) # Backward h4 = np.array([0,0]) c4 = np.array([0,0]) Wib = np.array([[0,0], [0,1], [0,1]]) Uib = np.array([[0,1], [4,0]]) Wfb = np.array([[2,0], [5,2], [0,1]]) Ufb = np.array([[0,2], [1,2]]) Wob = np.array([[1,0], [0,6], [0,1]]) Uob = np.array([[0,2], [1,1]]) Wcb = np.array([[1,3], [0,0], [0,3]]) Ucb = np.array([[0,1], [1,1]]) i3b = sigmoid(np.dot(x3, Wib) + np.dot(h4, Uib)) f3b = sigmoid(np.dot(x3, Wfb) + np.dot(h4, Ufb)) o3b = sigmoid(np.dot(x3, Wob) + np.dot(h4, Uob)) h_tilde3b = np.tanh(np.dot(x3, Wcb) + np.dot(h4, Ucb)) c3b = f3b * c4 + i3b * h_tilde3b h3b = o3b * np.tanh(c3b) i2b = sigmoid(np.dot(x2, Wib) + np.dot(h3b, Uib)) f2b = sigmoid(np.dot(x2, Wfb) + np.dot(h3b, Ufb)) o2b = sigmoid(np.dot(x2, Wob) + np.dot(h3b, Uob)) h_tilde2b = np.tanh(np.dot(x2, Wcb) + np.dot(h3b, Ucb)) c2b = f2b * c3b + i2b * h_tilde2b h2b = o2b * np.tanh(c2b) i1b = sigmoid(np.dot(x1, Wib) + np.dot(h2b, Uib)) f1b = sigmoid(np.dot(x1, Wfb) + np.dot(h2b, Ufb)) o1b = sigmoid(np.dot(x1, Wob) + np.dot(h2b, Uob)) h_tilde1b = np.tanh(np.dot(x1, Wcb) + np.dot(h2b, Ucb)) c1b = f1b * c2b + i1b * h_tilde1b h1b = o1b * np.tanh(c1b) bi = np.zeros((2,)) bc = np.zeros((2,)) bf = np.zeros((2,)) bo = np.zeros((2,)) Wf = [np.concatenate([Wif, Wff, Wcf, Wof], -1), np.concatenate([Uif, Uff, Ucf, Uof], -1), np.concatenate([bi, bf, bc, bo], -1)] Wb = [np.concatenate([Wib, Wfb, Wcb, Wob], -1), np.concatenate([Uib, Ufb, Ucb, Uob], -1), np.concatenate([bi, bf, bc, bo], -1)] rbeta1b = o1b * (np.tanh(c1b) - np.tanh(c2b)) rbeta2b = o1b * (np.tanh(c2b) - np.tanh(c3b)) rbeta3b = o1b * (np.tanh(c3b) - np.tanh(c4)) rbeta1f = o3f * (np.tanh(c1f) - np.tanh(c0)) rbeta2f = o3f * (np.tanh(c2f) - np.tanh(c1f)) rbeta3f = o3f * (np.tanh(c3f) - np.tanh(c2f)) rbeta1 = np.concatenate([rbeta1f, rbeta1b]) rbeta2 = np.concatenate([rbeta2f, rbeta2b]) rbeta3 = np.concatenate([rbeta3f, rbeta3b]) beta1 = np.exp(np.dot(rbeta1, Wout)) beta2 = np.exp(np.dot(rbeta2, Wout)) beta3 = np.exp(np.dot(rbeta3, Wout)) m = Sequential() m.add(BetaDecomposition(LSTM(units = 2, recurrent_activation = "sigmoid", go_backwards = True), weights = Wb, input_shape = (None, 3))) m.compile("sgd", "mse") pred = m.predict(X)[0] assert(np.allclose(pred, np.array([rbeta3b, rbeta2b, rbeta1b]))) m = Sequential() m.add(Bidirectional(BetaDecomposition(LSTM(units = 2, recurrent_activation = "sigmoid")), weights = Wf + Wb, input_shape = (None, 3))) #m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile("sgd", "mse") pred = m.predict(X)[0] assert(np.allclose(pred, np.array([rbeta1, rbeta2, rbeta3]))) # gamma rgamma1b = o1b * (np.tanh(c1b) - np.tanh(c2b * f1b)) rgamma2b = o1b * (np.tanh(c2b * f1b) - np.tanh(c3b * f2b * f1b)) rgamma3b = o1b * (np.tanh(c3b * f2b * f1b) - np.tanh(c4 * f3b * f2b * f1b)) rgamma1f = o3f * (np.tanh(c1f * f2f * f3f) - np.tanh(c0 * f1f * f2f * f3f)) rgamma2f = o3f * (np.tanh(c2f * f3f) - np.tanh(c1f * f2f * f3f)) rgamma3f = o3f * (np.tanh(c3f) - np.tanh(c2f * f3f)) rgamma1 = np.concatenate([rgamma1f, rgamma1b]) rgamma2 = np.concatenate([rgamma2f, rgamma2b]) rgamma3 = np.concatenate([rgamma3f, rgamma3b]) gamma1 = np.exp(np.dot(rgamma1, Wout)) gamma2 = np.exp(np.dot(rgamma2, Wout)) gamma3 = np.exp(np.dot(rgamma3, Wout)) m = Sequential() m.add(Bidirectional(GammaDecomposition(LSTM(units = 2, recurrent_activation = "sigmoid")), weights = Wf + Wb, input_shape = (None, 3))) m.add(TimeDistributed(Dense(units = 3, activation = "exp", weights = [Wout, bout]))) m.compile("sgd", "mse") pred = m.predict(X)[0] assert(np.allclose(pred, np.array([gamma1, gamma2, gamma3]))) # does masking work? i2b = sigmoid(np.dot(x2, Wib) + np.dot(h4, Uib)) f2b = sigmoid(np.dot(x2, Wfb) + np.dot(h4, Ufb)) o2b = sigmoid(np.dot(x2, Wob) + np.dot(h4, Uob)) h_tilde2b = np.tanh(np.dot(x2, Wcb) + np.dot(h4, Ucb)) c2b = f2b * c4 + i2b * h_tilde2b h2b = o2b * np.tanh(c2b) i1b = sigmoid(np.dot(x1, Wib) + np.dot(h2b, Uib)) f1b = sigmoid(np.dot(x1, Wfb) + np.dot(h2b, Ufb)) o1b = sigmoid(np.dot(x1, Wob) + np.dot(h2b, Uob)) h_tilde1b = np.tanh(np.dot(x1, Wcb) + np.dot(h2b, Ucb)) c1b = f1b * c2b + i1b * h_tilde1b h1b = o1b * np.tanh(c1b) mbeta1 = np.exp(np.dot(np.concatenate([o2f * (np.tanh(c1f) - np.tanh(c0)), \ o1b * (np.tanh(c1b) - np.tanh(c2b))]), Wout)) mbeta2 = np.exp(np.dot(np.concatenate([o2f * (np.tanh(c2f) - np.tanh(c1f)), \ o1b * (np.tanh(c2b) - np.tanh(c4))]), Wout)) mgamma1 = np.exp(np.dot(np.concatenate([o2f * (np.tanh(c1f * f2f) - np.tanh(c0 * f1f * f2f)), \ o1b * (np.tanh(c1b) - np.tanh(c2b * f1b))]), Wout)) mgamma2 = np.exp(np.dot(np.concatenate([o2f * (np.tanh(c2f) - np.tanh(c1f * f2f)), \ o1b * (np.tanh(c2b * f1b) - np.tanh(c4 * f2b * f1b))]), Wout)) m = Sequential() m.add(Masking(1, input_shape = (None, 3))) m.add(Bidirectional(BetaDecomposition(LSTM(units = 2, recurrent_activation = "sigmoid")), \ weights = Wf + Wb, merge_mode = "concat")) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert(np.allclose(pred, np.array([mbeta1, mbeta2, np.ones_like(mbeta2)]))) m = Sequential() m.add(Masking(1, input_shape = (None, 3))) m.add(Bidirectional(GammaDecomposition(LSTM(units = 2, recurrent_activation = "sigmoid")), \ weights = Wf + Wb, merge_mode = "concat")) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert(np.allclose(pred, np.array([mgamma1, mgamma2, np.ones_like(mgamma2)]))) def test_unit_tests_DecompositionGRU_bidirectional(): x1 = np.array([1,2,1]) x2 = np.array([0,1,1]) x3 = np.array([1,1,1]) X = np.stack([x1,x2,x3]) X = np.stack([X]) Woutf = np.array([[2,0,0],[0,1,1]]) Woutb = np.array([[6,7,8],[4,5,3]]) Wout = np.concatenate([Woutf, Woutb], axis = 0) bout = np.zeros((3,)) # Forward h0 = np.array([0,0]) Wzf = np.array([[0,0], [0,1], [0,1]]) Uzf = np.array([[0,1], [1,0]]) Wrf = np.array([[2,0], [0,2], [0,1]]) Urf = np.array([[0,2], [1,2]]) Whf = np.array([[1,0], [0,0], [0,1]]) Uhf = np.array([[0,2], [1,1]]) z1f = sigmoid(np.dot(x1, Wzf) + np.dot(h0, Uzf)) r1f = sigmoid(np.dot(x1, Wrf) + np.dot(h0, Urf)) h_tilde1f = np.tanh(np.dot(x1, Whf) + np.dot(r1f * h0, Uhf)) h1f = z1f * h0 + (1-z1f) * h_tilde1f z2f = sigmoid(np.dot(x2, Wzf) + np.dot(h1f, Uzf)) r2f = sigmoid(np.dot(x2, Wrf) + np.dot(h1f, Urf)) h_tilde2f = np.tanh(np.dot(x2, Whf) + np.dot(r2f * h1f, Uhf)) h2f = z2f * h1f + (1-z2f) * h_tilde2f z3f = sigmoid(np.dot(x3, Wzf) + np.dot(h2f, Uzf)) r3f = sigmoid(np.dot(x3, Wrf) + np.dot(h2f, Urf)) h_tilde3f = np.tanh(np.dot(x3, Whf) + np.dot(r3f * h2f, Uhf)) h3f = z3f * h2f + (1-z3f) * h_tilde3f bz = np.zeros((2,)) br = np.zeros((2,)) bh = np.zeros((2,)) # Backward h4 = np.array([0,0]) Wzb = np.array([[0,0], [0,1], [0,1]]) Uzb = np.array([[0,1], [4,0]]) Wrb = np.array([[2,0], [5,2], [0,1]]) Urb = np.array([[0,2], [1,2]]) Whb = np.array([[1,0], [0,6], [0,1]]) Uhb = np.array([[0,2], [1,1]]) z3b = sigmoid(np.dot(x3, Wzb) + np.dot(h4, Uzb)) r3b = sigmoid(np.dot(x3, Wrb) + np.dot(h4, Urb)) h_tilde3b = np.tanh(np.dot(x3, Whb) + np.dot(h4 * r3b, Uhb)) h3b = z3b * h4 + (1-z3b) * h_tilde3b z2b = sigmoid(np.dot(x2, Wzb) + np.dot(h3b, Uzb)) r2b = sigmoid(np.dot(x2, Wrb) + np.dot(h3b, Urb)) h_tilde2b = np.tanh(np.dot(x2, Whb) + np.dot(h3b * r2b, Uhb)) h2b = z2b * h3b + (1-z2b) * h_tilde2b z1b = sigmoid(np.dot(x1, Wzb) + np.dot(h2b, Uzb)) r1b = sigmoid(np.dot(x1, Wrb) + np.dot(h2b, Urb)) h_tilde1b = np.tanh(np.dot(x1, Whb) + np.dot(h2b * r1b, Uhb)) h1b = z1b * h2b + (1-z1b) * h_tilde1b bz = np.zeros((2,)) br = np.zeros((2,)) bh = np.zeros((2,)) Wf = [np.concatenate([Wzf, Wrf, Whf], -1), np.concatenate([Uzf, Urf, Uhf], -1), np.concatenate([bz, br, bh], -1)] Wb = [np.concatenate([Wzb, Wrb, Whb], -1), np.concatenate([Uzb, Urb, Uhb], -1), np.concatenate([bz, br, bh], -1)] rbeta1b = h1b - h2b rbeta2b = h2b - h3b rbeta3b = h3b - h4 rbeta1f = h1f - h0 rbeta2f = h2f - h1f rbeta3f = h3f - h2f rbeta1 = np.concatenate([rbeta1f, rbeta1b]) rbeta2 = np.concatenate([rbeta2f, rbeta2b]) rbeta3 = np.concatenate([rbeta3f, rbeta3b]) beta1 = np.exp(np.dot(rbeta1, Wout)) beta2 = np.exp(np.dot(rbeta2, Wout)) beta3 = np.exp(np.dot(rbeta3, Wout)) m = Sequential() m.add(Bidirectional(BetaDecomposition(GRU(units = 2, recurrent_activation = "sigmoid")), weights = Wf + Wb, input_shape = (None, 3))) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile("sgd", "mse") pred = m.predict(X)[0] assert(np.allclose(pred, np.array([beta1, beta2, beta3]))) # gamma rgamma1b = h1b - h2b * z1b rgamma2b = h2b * z1b - h3b * z2b * z1b rgamma3b = h3b * z2b * z1b - h4 * z3b * z2b * z1b rgamma1f = h1f * z2f * z3f - h0 * z1f * z2f * z3f rgamma2f = h2f * z3f - h1f * z2f * z3f rgamma3f = h3f - h2f * z3f rgamma1 = np.concatenate([rgamma1f, rgamma1b]) rgamma2 = np.concatenate([rgamma2f, rgamma2b]) rgamma3 = np.concatenate([rgamma3f, rgamma3b]) gamma1 = np.exp(np.dot(rgamma1, Wout)) gamma2 = np.exp(np.dot(rgamma2, Wout)) gamma3 = np.exp(np.dot(rgamma3, Wout)) m = Sequential() m.add(Bidirectional(GammaDecomposition(GRU(units = 2, recurrent_activation = "sigmoid")), weights = Wf + Wb, input_shape = (None, 3))) m.add(TimeDistributed(Dense(units = 3, activation = "exp", weights = [Wout, bout]))) m.compile("sgd", "mse") pred = m.predict(X)[0] assert(np.allclose(pred, np.array([gamma1, gamma2, gamma3]))) # does masking work? z2b = sigmoid(np.dot(x2, Wzb) + np.dot(h4, Uzb)) r2b = sigmoid(np.dot(x2, Wrb) + np.dot(h4, Urb)) h_tilde2b = np.tanh(np.dot(x2, Whb) + np.dot(h4 * r2b, Uhb)) h2b = z2b * h4 + (1-z2b) * h_tilde2b z1b = sigmoid(np.dot(x1, Wzb) + np.dot(h2b, Uzb)) r1b = sigmoid(np.dot(x1, Wrb) + np.dot(h2b, Urb)) h_tilde1b = np.tanh(np.dot(x1, Whb) + np.dot(h2b * r1b, Uhb)) h1b = z1b * h2b + (1-z1b) * h_tilde1b mbeta1 = np.exp(np.dot(np.concatenate([h1f - h0, h1b - h2b]), Wout)) mbeta2 = np.exp(np.dot(np.concatenate([h2f - h1f, h2b - h4]), Wout)) mgamma1 = np.exp(np.dot(np.concatenate([h1f * z2f - h0 * z1f * z2f, h1b - h2b * z1b]), Wout)) mgamma2 = np.exp(np.dot(np.concatenate([h2f - h1f * z2f, h2b * z1b - h4 * z2b * z1b]), Wout)) m = Sequential() m.add(Masking(1, input_shape = (None, 3))) m.add(Bidirectional(BetaDecomposition(GRU(units = 2, recurrent_activation = "sigmoid")), \ weights = Wf + Wb, merge_mode = "concat")) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert(np.allclose(pred, np.array([mbeta1, mbeta2, np.ones_like(mbeta2)]))) m = Sequential() m.add(Masking(1, input_shape = (None, 3))) m.add(Bidirectional(GammaDecomposition(GRU(units = 2, recurrent_activation = "sigmoid")), \ weights = Wf + Wb, merge_mode = "concat")) m.add(TimeDistributed(Dense(units = 3, activation = "exp"), weights = [Wout, bout])) m.compile(loss = "categorical_crossentropy", optimizer = "adagrad") pred = m.predict(X)[0] assert(np.allclose(pred, np.array([mgamma1, mgamma2, np.ones_like(mgamma2)]))) ''' if __name__ == '__main__': pytest.main([__file__])
38.585895
139
0.587004
6,356
42,676
3.883889
0.057741
0.044965
0.029652
0.020254
0.825569
0.797578
0.767399
0.739731
0.704772
0.691728
0
0.056358
0.217921
42,676
1,105
140
38.620814
0.683275
0.004007
0
0.54878
0
0
0.027983
0
0
0
0
0
0.121951
1
0.036585
false
0
0.146341
0.012195
0.195122
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
05a8a998e6442d33b3e9ad17efef7cd0cceb53a6
49
py
Python
expander/__init__.py
polpo/djangorestframework-expander
b1cf60c7076169cbd6ad65350841c86080564f97
[ "MIT" ]
70
2015-10-14T04:26:51.000Z
2021-09-13T17:25:28.000Z
expander/__init__.py
polpo/djangorestframework-expander
b1cf60c7076169cbd6ad65350841c86080564f97
[ "MIT" ]
5
2016-08-04T18:01:25.000Z
2018-02-10T01:10:54.000Z
expander/__init__.py
polpo/djangorestframework-expander
b1cf60c7076169cbd6ad65350841c86080564f97
[ "MIT" ]
9
2016-08-04T11:50:24.000Z
2019-02-13T08:58:46.000Z
from .serializers import ExpanderSerializerMixin
24.5
48
0.897959
4
49
11
1
0
0
0
0
0
0
0
0
0
0
0
0.081633
49
1
49
49
0.977778
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
05e88b87f33f0f39c948c3a51be1f6ef6206d2f9
25
py
Python
src/__init__.py
Nipa-Code/Jarvide
7a0f036f3c88f0818bbd2a5a2bc714885ac01f12
[ "MIT" ]
27
2021-12-30T14:47:03.000Z
2022-03-10T19:34:13.000Z
src/__init__.py
Nipa-Code/Jarvide
7a0f036f3c88f0818bbd2a5a2bc714885ac01f12
[ "MIT" ]
61
2021-12-31T00:13:58.000Z
2022-03-04T09:38:24.000Z
src/__init__.py
Nipa-Code/Jarvide
7a0f036f3c88f0818bbd2a5a2bc714885ac01f12
[ "MIT" ]
40
2021-12-30T14:58:24.000Z
2022-02-15T14:12:30.000Z
from .bot import Jarvide
12.5
24
0.8
4
25
5
1
0
0
0
0
0
0
0
0
0
0
0
0.16
25
1
25
25
0.952381
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
05f11189fa0c277ed946d2d6c5bb1944db330a0e
12,583
py
Python
eeauditor/auditors/aws/Amazon_EC2_Image_Builder_Auditor.py
kbhagi/ElectricEye
31960e1e1cfb75c5d354844ea9e07d5295442823
[ "Apache-2.0" ]
442
2020-03-15T20:56:36.000Z
2022-03-31T22:13:07.000Z
eeauditor/auditors/aws/Amazon_EC2_Image_Builder_Auditor.py
kbhagi/ElectricEye
31960e1e1cfb75c5d354844ea9e07d5295442823
[ "Apache-2.0" ]
57
2020-03-15T22:09:56.000Z
2022-03-31T13:17:06.000Z
eeauditor/auditors/aws/Amazon_EC2_Image_Builder_Auditor.py
kbhagi/ElectricEye
31960e1e1cfb75c5d354844ea9e07d5295442823
[ "Apache-2.0" ]
59
2020-03-15T21:19:10.000Z
2022-03-31T15:01:31.000Z
#This file is part of ElectricEye. #SPDX-License-Identifier: Apache-2.0 #Licensed to the Apache Software Foundation (ASF) under one #or more contributor license agreements. See the NOTICE file #distributed with this work for additional information #regarding copyright ownership. The ASF licenses this file #to you under the Apache License, Version 2.0 (the #"License"); you may not use this file except in compliance #with the License. You may obtain a copy of the License at #http://www.apache.org/licenses/LICENSE-2.0 #Unless required by applicable law or agreed to in writing, #software distributed under the License is distributed on an #"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #KIND, either express or implied. See the License for the #specific language governing permissions and limitations #under the License. import boto3 import datetime import json from check_register import CheckRegister registry = CheckRegister() imagebuilder = boto3.client("imagebuilder") @registry.register_check("imagebuilder") def imagebuilder_pipeline_tests_enabled_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict: """[ImageBuilder.1] Image pipeline tests should be enabled""" pipelines = imagebuilder.list_image_pipelines() pipeline_list = pipelines["imagePipelineList"] iso8601Time = datetime.datetime.now(datetime.timezone.utc).isoformat() for arn in pipeline_list: pipelineArn = arn["arn"] pipeline_name = arn["name"] image_pipelines = imagebuilder.get_image_pipeline(imagePipelineArn=pipelineArn) image_test_config = image_pipelines["imagePipeline"]["imageTestsConfiguration"] image_test_enabled = image_test_config["imageTestsEnabled"] if image_test_enabled == True: finding = { "SchemaVersion": "2018-10-08", "Id": pipelineArn + "/imagebuilder-pipeline-tests-enabled-check", "ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default", "GeneratorId": pipelineArn, "AwsAccountId": awsAccountId, "Types": [ "Software and Configuration Checks/AWS Security Best Practices", "Effects/Data Exposure", ], "FirstObservedAt": iso8601Time, "CreatedAt": iso8601Time, "UpdatedAt": iso8601Time, "Severity": {"Label": "INFORMATIONAL"}, "Confidence": 99, "Title": "[ImageBuilder.1] Image pipeline tests should be enabled", "Description": "Image pipeline " + pipeline_name + " has tests enabled.", "Remediation": { "Recommendation": { "Text": "For more information on EC2 Image Builder Security and enabling image testing refer to the Best Practices section of the Amazon EC2 Image Builder Developer Guide.", "Url": "https://docs.aws.amazon.com/imagebuilder/latest/userguide/security-best-practices.html", } }, "ProductFields": {"Product Name": "ElectricEye"}, "Resources": [ { "Type": "AwsImageBuilderPipeline", "Id": pipelineArn, "Partition": awsPartition, "Region": awsRegion, "Details": {"AwsImageBuilderPipeline": {"PipelineName": pipeline_name}}, } ], "Compliance": { "Status": "PASSED", "RelatedRequirements": [ "NIST CSF ID.AM-2", "NIST SP 800-53 CM-8", "NIST SP 800-53 PM-5", "AICPA TSC CC3.2", "AICPA TSC CC6.1", "ISO 27001:2013 A.8.1.1", "ISO 27001:2013 A.8.1.2", "ISO 27001:2013 A.12.5.1", ], }, "Workflow": {"Status": "RESOLVED"}, "RecordState": "ARCHIVED", } yield finding else: finding = { "SchemaVersion": "2018-10-08", "Id": pipelineArn + "/imagebuilder-pipeline-tests-enabled-check", "ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default", "GeneratorId": pipelineArn, "AwsAccountId": awsAccountId, "Types": [ "Software and Configuration Checks/AWS Security Best Practices", "Effects/Data Exposure", ], "FirstObservedAt": iso8601Time, "CreatedAt": iso8601Time, "UpdatedAt": iso8601Time, "Severity": {"Label": "MEDIUM"}, "Confidence": 99, "Title": "[ImageBuilder.1] Image pipeline tests should be enabled", "Description": "Image pipeline " + pipeline_name + " does not have tests enabled.", "Remediation": { "Recommendation": { "Text": "For more information on EC2 Image Builder Security and enabling image testing refer to the Best Practices section of the Amazon EC2 Image Builder Developer Guide.", "Url": "https://docs.aws.amazon.com/imagebuilder/latest/userguide/security-best-practices.html", } }, "ProductFields": {"Product Name": "ElectricEye"}, "Resources": [ { "Type": "AwsImageBuilderPipeline", "Id": pipelineArn, "Partition": awsPartition, "Region": awsRegion, "Details": {"AwsImageBuilderPipeline": {"PipelineName": pipeline_name}}, } ], "Compliance": { "Status": "FAILED", "RelatedRequirements": [ "NIST CSF ID.AM-2", "NIST SP 800-53 CM-8", "NIST SP 800-53 PM-5", "AICPA TSC CC3.2", "AICPA TSC CC6.1", "ISO 27001:2013 A.8.1.1", "ISO 27001:2013 A.8.1.2", "ISO 27001:2013 A.12.5.1", ], }, "Workflow": {"Status": "NEW"}, "RecordState": "ACTIVE", } yield finding @registry.register_check("imagebuilder") def imagebuilder_ebs_encryption_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict: """[ImageBuilder.2] Image recipes should encrypt EBS volumes""" recipes = imagebuilder.list_image_recipes() recipes_list = recipes["imageRecipeSummaryList"] iso8601Time = datetime.datetime.now(datetime.timezone.utc).isoformat() for details in recipes_list: recipeArn = details["arn"] recipe_name = details["name"] recipe = imagebuilder.get_image_recipe(imageRecipeArn=recipeArn) device_mapping = recipe["imageRecipe"]["blockDeviceMappings"] list1 = device_mapping[0] ebs = list1["ebs"] ebs_encryption = ebs["encrypted"] if ebs_encryption == True: finding = { "SchemaVersion": "2018-10-08", "Id": recipeArn + "/imagebuilder-ebs-encryption-check", "ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default", "GeneratorId": recipeArn, "AwsAccountId": awsAccountId, "Types": [ "Software and Configuration Checks/AWS Security Best Practices", "Effects/Data Exposure", ], "FirstObservedAt": iso8601Time, "CreatedAt": iso8601Time, "UpdatedAt": iso8601Time, "Severity": {"Label": "INFORMATIONAL"}, "Confidence": 99, "Title": "[ImageBuilder.2] Image recipes should encrypt EBS volumes", "Description": "Image recipe " + recipe_name + " has EBS encrypted.", "Remediation": { "Recommendation": { "Text": "For more information on EC2 Image Builder Security and EBS encyption refer to the How EC2 Image Builder Works section of the Amazon EC2 Image Builder Developer Guide.", "Url": "https://docs.aws.amazon.com/imagebuilder/latest/userguide/how-image-builder-works.html#image-builder-components", } }, "ProductFields": {"Product Name": "ElectricEye"}, "Resources": [ { "Type": "AwsImageBuilderRecipe", "Id": recipeArn, "Partition": awsPartition, "Region": awsRegion, "Details": {"AwsImageBuilderRecipe": {"RecipeName": recipe_name}}, } ], "Compliance": { "Status": "PASSED", "RelatedRequirements": [ "NIST CSF ID.AM-2", "NIST SP 800-53 CM-8", "NIST SP 800-53 PM-5", "AICPA TSC CC3.2", "AICPA TSC CC6.1", "ISO 27001:2013 A.8.1.1", "ISO 27001:2013 A.8.1.2", "ISO 27001:2013 A.12.5.1", ], }, "Workflow": {"Status": "RESOLVED"}, "RecordState": "ARCHIVED", } yield finding else: finding = { "SchemaVersion": "2018-10-08", "Id": recipeArn + "/imagebuilder-ebs-encryption-check", "ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default", "GeneratorId": recipeArn, "AwsAccountId": awsAccountId, "Types": [ "Software and Configuration Checks/AWS Security Best Practices", "Effects/Data Exposure", ], "FirstObservedAt": iso8601Time, "CreatedAt": iso8601Time, "UpdatedAt": iso8601Time, "Severity": {"Label": "MEDIUM"}, "Confidence": 99, "Title": "[ImageBuilder.2] Image recipes should encrypt EBS volumes", "Description": "Image recipe " + recipe_name + " does not have EBS encrypted.", "Remediation": { "Recommendation": { "Text": "For more information on EC2 Image Builder Security and EBS encyption refer to the How EC2 Image Builder Works section of the Amazon EC2 Image Builder Developer Guide.", "Url": "https://docs.aws.amazon.com/imagebuilder/latest/userguide/how-image-builder-works.html#image-builder-components", } }, "ProductFields": {"Product Name": "ElectricEye"}, "Resources": [ { "Type": "AwsImageBuilderRecipe", "Id": recipeArn, "Partition": awsPartition, "Region": awsRegion, "Details": {"AwsImageBuilderRecipe": {"RecipeName": recipe_name}}, } ], "Compliance": { "Status": "FAILED", "RelatedRequirements": [ "NIST CSF ID.AM-2", "NIST SP 800-53 CM-8", "NIST SP 800-53 PM-5", "AICPA TSC CC3.2", "AICPA TSC CC6.1", "ISO 27001:2013 A.8.1.1", "ISO 27001:2013 A.8.1.2", "ISO 27001:2013 A.12.5.1", ], }, "Workflow": {"Status": "NEW"}, "RecordState": "ACTIVE", } yield finding
48.210728
201
0.508623
1,062
12,583
5.983051
0.221281
0.02644
0.022663
0.024551
0.76865
0.765502
0.750393
0.749134
0.738118
0.718917
0
0.043725
0.383851
12,583
261
202
48.210728
0.775829
0.073035
0
0.760684
0
0.034188
0.393333
0.062291
0
0
0
0
0
1
0.008547
false
0.008547
0.017094
0
0.025641
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
05f11dd351bd655ac6138dee96a1b9c02b2c81c2
117
py
Python
ufdl-object-detection-app/src/ufdl/object_detection_app/migrations/job_templates/__init__.py
waikato-ufdl/ufdl-backend
776fc906c61eba6c2f2e6324758e7b8a323e30d7
[ "Apache-2.0" ]
null
null
null
ufdl-object-detection-app/src/ufdl/object_detection_app/migrations/job_templates/__init__.py
waikato-ufdl/ufdl-backend
776fc906c61eba6c2f2e6324758e7b8a323e30d7
[ "Apache-2.0" ]
85
2020-07-24T00:04:28.000Z
2022-02-10T10:35:15.000Z
ufdl-speech-app/src/ufdl/speech_app/migrations/job_templates/__init__.py
waikato-ufdl/ufdl-backend
776fc906c61eba6c2f2e6324758e7b8a323e30d7
[ "Apache-2.0" ]
null
null
null
""" Functions for working with the set of known job templates. """ from ._job_templates import iterate_job_templates
23.4
58
0.794872
17
117
5.235294
0.764706
0.404494
0
0
0
0
0
0
0
0
0
0
0.136752
117
4
59
29.25
0.881188
0.495727
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
af18d7c9de80f98e82d3053eef0b1c8a4e97571c
681
py
Python
tests/asp/AllAnswerSets/nontight/test8.gus.gringo.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
19
2015-12-03T08:53:45.000Z
2022-03-31T02:09:43.000Z
tests/asp/AllAnswerSets/nontight/test8.gus.gringo.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
80
2017-11-25T07:57:32.000Z
2018-06-10T19:03:30.000Z
tests/asp/AllAnswerSets/nontight/test8.gus.gringo.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
6
2015-01-15T07:51:48.000Z
2020-06-18T14:47:48.000Z
input = """ 3 7 61 146 73 112 115 127 130 0 0 1 146 2 0 155 73 1 155 2 0 146 73 1 148 2 0 150 115 1 150 2 0 148 115 1 148 2 0 152 112 1 152 2 0 148 112 1 152 2 0 158 127 1 158 2 0 152 127 1 155 2 0 157 61 1 157 2 0 155 61 1 158 2 0 160 130 1 160 2 0 158 130 0 0 B+ 0 B- 1 0 1 """ output = """ {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} """
4.422078
33
0.267254
93
681
1.956989
0.236559
0.131868
0.054945
0.065934
0.098901
0
0
0
0
0
0
0.372247
0.333333
681
153
34
4.45098
0.028634
0
0
0.849673
0
0
0.954479
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
1
1
0
0
1
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
af433b4d9b689302f9a239f1918121861ae36550
185
py
Python
protocol/zzz_navigation_msgs/msg/__init__.py
Tsinghua-OpenICV/carla_icv_bridge
4d5f8c26b1847dbb16a81fe43f146bf4a9a8da5e
[ "MIT" ]
null
null
null
protocol/zzz_navigation_msgs/msg/__init__.py
Tsinghua-OpenICV/carla_icv_bridge
4d5f8c26b1847dbb16a81fe43f146bf4a9a8da5e
[ "MIT" ]
null
null
null
protocol/zzz_navigation_msgs/msg/__init__.py
Tsinghua-OpenICV/carla_icv_bridge
4d5f8c26b1847dbb16a81fe43f146bf4a9a8da5e
[ "MIT" ]
1
2020-12-19T05:48:01.000Z
2020-12-19T05:48:01.000Z
from ._Lane import * from ._LaneBoundary import * from ._LanePoint import * from ._LaneSituation import * from ._Map import * from ._MapString import * from ._ReroutingRequest import *
23.125
32
0.772973
21
185
6.47619
0.428571
0.441176
0
0
0
0
0
0
0
0
0
0
0.151351
185
7
33
26.428571
0.866242
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
af537f31fc0ef98dcf86147292876f51400c22a4
256
py
Python
src/modeling/models/decision_tree_regressor/__init__.py
NovaSBE-DSKC/predict-campaing-sucess-rate
fec339aee7c883f55d64130eb69e490f765ee27d
[ "MIT" ]
null
null
null
src/modeling/models/decision_tree_regressor/__init__.py
NovaSBE-DSKC/predict-campaing-sucess-rate
fec339aee7c883f55d64130eb69e490f765ee27d
[ "MIT" ]
null
null
null
src/modeling/models/decision_tree_regressor/__init__.py
NovaSBE-DSKC/predict-campaing-sucess-rate
fec339aee7c883f55d64130eb69e490f765ee27d
[ "MIT" ]
null
null
null
from src.modeling.models.decision_tree_regressor.train import train from src.modeling.models.decision_tree_regressor.test import test from src.modeling.models.decision_tree_regressor.model import load_model,save_model __name__ = "Decision Tree Regressor"
42.666667
83
0.867188
37
256
5.675676
0.378378
0.228571
0.4
0.3
0.6
0.6
0.6
0
0
0
0
0
0.070313
256
5
84
51.2
0.882353
0
0
0
0
0
0.089844
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
6
af643c5a5df7dcba5c046f17bc599738c3967553
43
py
Python
apyauth/__init__.py
tinomerl/apyauth
875782254385cdf1db7c678d5ea893a73eb49d76
[ "MIT" ]
null
null
null
apyauth/__init__.py
tinomerl/apyauth
875782254385cdf1db7c678d5ea893a73eb49d76
[ "MIT" ]
null
null
null
apyauth/__init__.py
tinomerl/apyauth
875782254385cdf1db7c678d5ea893a73eb49d76
[ "MIT" ]
null
null
null
from .session import Oauth2Session # noqa
21.5
42
0.790698
5
43
6.8
1
0
0
0
0
0
0
0
0
0
0
0.027778
0.162791
43
1
43
43
0.916667
0.093023
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
afa3919bc7e87160b0c2e0c885184b42ff3e750c
100
py
Python
src/kernel-graphql/riotapi/__init__.py
pseudonym117/kernel-graphql
0c0fc05ca84c525f1515953d77d853455db14fb1
[ "MIT" ]
1
2021-03-17T16:35:09.000Z
2021-03-17T16:35:09.000Z
src/kernel-graphql/riotapi/__init__.py
pseudonym117/kernel-graphql
0c0fc05ca84c525f1515953d77d853455db14fb1
[ "MIT" ]
1
2021-06-02T00:14:18.000Z
2021-06-02T00:14:18.000Z
src/kernel-graphql/riotapi/__init__.py
pseudonym117/kernel-graphql
0c0fc05ca84c525f1515953d77d853455db14fb1
[ "MIT" ]
null
null
null
from flask import Blueprint riotapi = Blueprint('riotapi', __name__) from .initialize import init
16.666667
40
0.79
12
100
6.25
0.666667
0.426667
0
0
0
0
0
0
0
0
0
0
0.14
100
5
41
20
0.872093
0
0
0
0
0
0.07
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0.666667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
1
0
6
afcddaca9cbf71191691cc8e6e9ff6ee005285f6
17
py
Python
tilemani/cfgs/__init__.py
cocoaaa/TileMani
ca006f201be530af32d7c5dcae03df5daa08359a
[ "MIT" ]
null
null
null
tilemani/cfgs/__init__.py
cocoaaa/TileMani
ca006f201be530af32d7c5dcae03df5daa08359a
[ "MIT" ]
null
null
null
tilemani/cfgs/__init__.py
cocoaaa/TileMani
ca006f201be530af32d7c5dcae03df5daa08359a
[ "MIT" ]
null
null
null
from . import osm
17
17
0.764706
3
17
4.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.176471
17
1
17
17
0.928571
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
bb64a21335f1090ae7d278ca41fe3e96bb0b24eb
193
py
Python
commons/context_processors.py
lsalta/mapground
d927d283dab6f756574bd88b3251b9e68f000ca7
[ "MIT" ]
null
null
null
commons/context_processors.py
lsalta/mapground
d927d283dab6f756574bd88b3251b9e68f000ca7
[ "MIT" ]
3
2020-02-11T23:04:56.000Z
2021-06-10T18:07:53.000Z
commons/context_processors.py
lsalta/mapground
d927d283dab6f756574bd88b3251b9e68f000ca7
[ "MIT" ]
1
2021-08-20T14:49:09.000Z
2021-08-20T14:49:09.000Z
from django.conf import settings def front_end_settings(request): # return the value you want as a dictionnary. you may add multiple values in there. return {'VISOR': settings.VISOR}
27.571429
87
0.751295
29
193
4.931034
0.827586
0
0
0
0
0
0
0
0
0
0
0
0.181347
193
6
88
32.166667
0.905063
0.419689
0
0
0
0
0.045455
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
bba0a9e39e48274946b5c3a108f4147fdb124db0
442
py
Python
tests/test_contains.py
aubreystarktoller/lite-cnf-formulae
f88f37f0600e5a7473175ef9d495d9757e03b4d6
[ "BSD-3-Clause" ]
2
2016-07-16T17:56:26.000Z
2020-02-27T12:30:10.000Z
tests/test_contains.py
aubreystarktoller/lite-boolean-formulae
f88f37f0600e5a7473175ef9d495d9757e03b4d6
[ "BSD-3-Clause" ]
null
null
null
tests/test_contains.py
aubreystarktoller/lite-boolean-formulae
f88f37f0600e5a7473175ef9d495d9757e03b4d6
[ "BSD-3-Clause" ]
null
null
null
from lite_boolean_formulae import L def test_literal_contains(): assert ("x" in L("x")) def test_conjunction_formula_contains(): assert ("x" in (L("x") & L("y"))) def test_disjunction_formula_contains(): assert ("x" in (L("x") | L("y"))) def test_disjunction_formula_does_not_contain(): assert not ("x" in (L("a") | L("b"))) def test_conjunction_formula_does_not_contain(): assert not ("x" in (L("a") & L("b")))
20.090909
48
0.644796
69
442
3.84058
0.318841
0.132075
0.075472
0.192453
0.698113
0.698113
0.626415
0.626415
0.626415
0.626415
0
0
0.167421
442
21
49
21.047619
0.720109
0
0
0
0
0
0.031674
0
0
0
0
0
0.454545
1
0.454545
true
0
0.090909
0
0.545455
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
1
0
0
0
1
0
0
6
bbfe3697cd2a42122a9da156f3b2ade8fdab97d6
4,351
py
Python
tests/test_main.py
ludsinn/xiRT
76ac2918fc4ae7b440cbbd2b4b4960660a48d131
[ "Apache-2.0" ]
1
2020-07-10T22:39:27.000Z
2020-07-10T22:39:27.000Z
tests/test_main.py
ludsinn/xiRT
76ac2918fc4ae7b440cbbd2b4b4960660a48d131
[ "Apache-2.0" ]
5
2020-07-07T11:43:44.000Z
2020-07-10T12:32:02.000Z
tests/test_main.py
gieses/xiRT
6ebc80d3776171ddb9d0740abb42c82c126d8934
[ "Apache-2.0" ]
null
null
null
import os from xirt import __main__ fixtures_loc = os.path.join(os.path.dirname(__file__), 'fixtures') def test_xirt_runner_rp_crosslinks_cv(tmpdir): xirt_loc = os.path.join(fixtures_loc, "xirt_params_rp.yaml") setup_loc = os.path.join(fixtures_loc, "learning_params_training_cv.yaml") peptides_in = os.path.join(fixtures_loc, "DSS_xisearch_fdr_CSM50percent.csv") __main__.xirt_runner(peptides_file=peptides_in, out_dir=tmpdir.mkdir("xiRT_results"), xirt_loc=xirt_loc, setup_loc=setup_loc, nrows=500, perform_qc=False, write_dummy=False) assert True def test_xirt_runner_rp_crosslinks_train(tmpdir): # test xirt with RP, crosslinks in the training mode xirt_loc = os.path.join(fixtures_loc, "xirt_params_rp.yaml") setup_loc = os.path.join(fixtures_loc, "learning_params_training_nocv.yaml") peptides_in = os.path.join(fixtures_loc, "DSS_xisearch_fdr_CSM50percent.csv") __main__.xirt_runner(peptides_file=peptides_in, out_dir=tmpdir.mkdir("xiRT_results"), xirt_loc=xirt_loc, setup_loc=setup_loc, nrows=500, perform_qc=False, write_dummy=False) assert True def test_xirt_runner_rp_crosslinks_predict(tmpdir): # test xirt with RP, crosslinks in the predict mode, (requires trained weights) pass def test_xirt_runner_rp_crosslinks_cv_refit(tmpdir): # test xirt with rp, crosslinks, cv mode and refit the classifier # test xirt with RP, crosslinks in the training mode xirt_loc = os.path.join(fixtures_loc, "xirt_params_rp.yaml") setup_loc = os.path.join(fixtures_loc, "learning_params_training_nocv.yaml") peptides_in = os.path.join(fixtures_loc, "DSS_xisearch_fdr_CSM50percent.csv") __main__.xirt_runner(peptides_file=peptides_in, out_dir=tmpdir.mkdir("xiRT_results"), xirt_loc=xirt_loc, setup_loc=setup_loc, nrows=500, perform_qc=False, write_dummy=False) assert True def test_xirt_runner_scx_crosslinks_cv_refit(tmpdir): # test xirt with rp, crosslinks, cv mode and refit the classifier # test xirt with RP, crosslinks in the training mode xirt_loc = os.path.join(fixtures_loc, "xirt_params_scx.yaml") setup_loc = os.path.join(fixtures_loc, "learning_params_training_cv.yaml") peptides_in = os.path.join(fixtures_loc, "DSS_xisearch_fdr_CSM50percent.csv") __main__.xirt_runner(peptides_file=peptides_in, out_dir=tmpdir.mkdir("xiRT_results"), xirt_loc=xirt_loc, setup_loc=setup_loc, nrows=1000, perform_qc=False, write_dummy=False) assert True def test_xirt_runner_3d_crosslinks_cv(tmpdir): xirt_loc = os.path.join(fixtures_loc, "xirt_params_3RT.yaml") setup_loc = os.path.join(fixtures_loc, "learning_params_training_nocv.yaml") peptides_in = os.path.join(fixtures_loc, "DSS_xisearch_fdr_CSM50percent.csv") __main__.xirt_runner(peptides_file=peptides_in, out_dir=tmpdir.mkdir("xiRT_results"), xirt_loc=xirt_loc, setup_loc=setup_loc, nrows=500, perform_qc=False, write_dummy=False) assert True def test_xirt_linear_rp(tmpdir): xirt_loc = os.path.join(fixtures_loc, "xirt_params_rp_linear.yaml") setup_loc = os.path.join(fixtures_loc, "learning_params_training_nocv_linear.yaml") peptides_in = os.path.join(fixtures_loc, "DSS_xisearch_fdr_CSM50percent.csv") __main__.xirt_runner(peptides_file=peptides_in, out_dir=tmpdir.mkdir("xiRT_results"), xirt_loc=xirt_loc, setup_loc=setup_loc, nrows=500, perform_qc=False, write_dummy=False) assert True def test_xirt_pseudolinear(tmpdir): xirt_loc = os.path.join(fixtures_loc, "xirt_params_3RT.yaml") setup_loc = os.path.join(fixtures_loc, "learning_params_training_nocv_pseudolinear.yaml") peptides_in = os.path.join(fixtures_loc, "DSS_xisearch_fdr_CSM50percent.csv") __main__.xirt_runner(peptides_file=peptides_in, out_dir=tmpdir.mkdir("xiRT_results"), xirt_loc=xirt_loc, setup_loc=setup_loc, nrows=500, perform_qc=False, write_dummy=False) assert True def test_arg_parser(): print("test") parser = __main__.arg_parser() assert len(parser.description) > 1
44.397959
94
0.721673
619
4,351
4.654281
0.109855
0.0479
0.076362
0.131204
0.916348
0.916348
0.916348
0.903159
0.888927
0.888927
0
0.011287
0.185475
4,351
97
95
44.85567
0.801637
0.08228
0
0.676923
0
0
0.181636
0.128199
0
0
0
0
0.123077
1
0.138462
false
0.015385
0.030769
0
0.169231
0.015385
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
bbfe48dee05f552446b8bfba9401d25de5144c55
198,384
py
Python
srv6_sdn_control_plane/northbound/grpc/nb_grpc_server.py
everywan-io/srv6-sdn-control-plane
afb7ce82571c852f784b763b8dec766b75f350fd
[ "Apache-2.0" ]
null
null
null
srv6_sdn_control_plane/northbound/grpc/nb_grpc_server.py
everywan-io/srv6-sdn-control-plane
afb7ce82571c852f784b763b8dec766b75f350fd
[ "Apache-2.0" ]
null
null
null
srv6_sdn_control_plane/northbound/grpc/nb_grpc_server.py
everywan-io/srv6-sdn-control-plane
afb7ce82571c852f784b763b8dec766b75f350fd
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python # Copyright (C) 2018 Carmine Scarpitta, Pier Luigi Ventre, Stefano Salsano - # (CNIT and University of Rome "Tor Vergata") # # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Server of a Northbound interface based on gRPC protocol # # @author Carmine Scarpitta <carmine.scarpitta.94@gmail.com> # @author Pier Luigi Ventre <pier.luigi.ventre@uniroma2.it> # @author Stefano Salsano <stefano.salsano@uniroma2.it> # # General imports from __future__ import absolute_import, division, print_function from six import text_type from argparse import ArgumentParser from concurrent import futures import logging import time import grpc import os import sys from rollbackcontext import RollbackContext from socket import AF_UNSPEC from socket import AF_INET from socket import AF_INET6 # ipaddress dependencies from ipaddress import IPv6Interface, IPv6Network, IPv4Network # SRv6 dependencies from srv6_sdn_proto import srv6_vpn_pb2_grpc from srv6_sdn_proto import srv6_vpn_pb2 from srv6_sdn_control_plane import srv6_controller_utils from srv6_sdn_control_plane.northbound.grpc import tunnel_utils from srv6_sdn_control_plane.southbound.grpc import sb_grpc_client from srv6_sdn_proto import status_codes_pb2 from srv6_sdn_controller_state import ( srv6_sdn_controller_state as storage_helper ) from srv6_sdn_proto.status_codes_pb2 import Status, NbStatusCode, SbStatusCode from srv6_sdn_control_plane.srv6_controller_utils import ( OverlayType, InterfaceType ) from srv6_sdn_proto.srv6_vpn_pb2 import TenantReply, OverlayServiceReply from srv6_sdn_proto.srv6_vpn_pb2 import InventoryServiceReply from srv6_sdn_proto.srv6_vpn_pb2 import GetSIDListsReply # STAMP Support ENABLE_STAMP_SUPPORT = True # Import modules required by STAMP if ENABLE_STAMP_SUPPORT: from srv6_delay_measurement import controller as stamp_controller_module from srv6_delay_measurement.exceptions import ( NodeIdNotFoundError, STAMPSessionsExistError ) # Topology file DEFAULT_TOPOLOGY_FILE = '/tmp/topology.json' # VPN file DEFAULT_VPN_DUMP = '/tmp/vpn.json' # Use management IPs instead of loopback IPs DEFAULT_USE_MGMT_IP = False # Global variables definition # Default server ip and port DEFAULT_GRPC_SERVER_IP = '::' DEFAULT_GRPC_SERVER_PORT = 54321 DEFAULT_GRPC_CLIENT_PORT = 12345 # Secure option DEFAULT_SECURE = False # Server certificate DEFAULT_CERTIFICATE = 'cert_server.pem' # Server key DEFAULT_KEY = 'key_server.pem' # Southbound interface DEFAULT_SB_INTERFACE = 'gRPC' # Verbose mode DEFAULT_VERBOSE = False # Seconds between checks for interfaces.json # and topology.json files INTERVAL_CHECK_FILES = 5 # Supported southbound interfaces SUPPORTED_SB_INTERFACES = ['gRPC'] # Validate topology VALIDATE_TOPO = False # Default VXLAN port DEFAULT_VXLAN_PORT = 4789 # Status codes STATUS_OK = NbStatusCode.STATUS_OK STATUS_BAD_REQUEST = NbStatusCode.STATUS_BAD_REQUEST STATUS_INTERNAL_SERVER_ERROR = NbStatusCode.STATUS_INTERNAL_SERVER_ERROR def exec_or_mark_device_inconsitent(rollback_func, deviceid, tenantid, *args, **kwargs): try: if rollback_func(*args, **kwargs) != SbStatusCode.STATUS_SUCCESS: # Change device state to reboot required success = storage_helper.change_device_state( deviceid=deviceid, tenantid=tenantid, new_state=storage_helper.DeviceState.REBOOT_REQUIRED ) if success is False or success is None: logging.error('Error changing the device state') return status_codes_pb2.STATUS_INTERNAL_ERROR except Exception: # Change device state to reboot required success = storage_helper.change_device_state( deviceid=deviceid, tenantid=tenantid, new_state=storage_helper.DeviceState.REBOOT_REQUIRED ) if success is False or success is None: logging.error('Error changing the device state') return status_codes_pb2.STATUS_INTERNAL_ERROR class NorthboundInterface(srv6_vpn_pb2_grpc.NorthboundInterfaceServicer): """gRPC request handler""" def __init__(self, grpc_client_port=DEFAULT_GRPC_CLIENT_PORT, srv6_manager=None, southbound_interface=DEFAULT_SB_INTERFACE, verbose=DEFAULT_VERBOSE, stamp_controller=None): # Port of the gRPC client self.grpc_client_port = grpc_client_port # Verbose mode self.verbose = verbose # Southbound interface self.southbound_interface = southbound_interface # SRv6 Manager self.srv6_manager = srv6_manager # Store the reference to the STAMP controller self.stamp_controller = stamp_controller # Initialize tunnel state self.tunnel_modes = tunnel_utils.TunnelState( grpc_client_port, verbose ).tunnel_modes self.supported_tunnel_modes = [t_mode for t_mode in self.tunnel_modes] logging.info( '*** Supported tunnel modes: %s' % self.supported_tunnel_modes ) """ Configure a tenant """ def ConfigureTenant(self, request, context): logging.debug('Configure tenant request received: %s' % request) with RollbackContext() as rollback: # Extract tenant ID tenantid = request.tenantid # Extract tenant info tenant_info = request.tenant_info tenant_info = tenant_info if tenant_info != '' else None # Extract VXLAN port vxlan_port = request.config.vxlan_port vxlan_port = vxlan_port if vxlan_port != -1 else None # Parmeters validation # # Validate tenant ID logging.debug('Validating the tenant ID: %s' % tenantid) if not srv6_controller_utils.validate_tenantid(tenantid): # If tenant ID is invalid, return an error message err = 'Invalid tenant ID: %s' % tenantid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Validate VXLAN port if not srv6_controller_utils.validate_port(vxlan_port): # If VXLAN port is invalid, return an error message err = ( 'Invalid VXLAN port %s for the tenant: %s' % (vxlan_port, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the tenant is configured is_config = storage_helper.is_tenant_configured( tenantid ) if is_config and vxlan_port is not None: err = 'Cannot change the VXLAN port for a configured tenant' logging.error(err) return TenantReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Configure the tenant vxlan_port = ( vxlan_port if vxlan_port is not None else DEFAULT_VXLAN_PORT ) storage_helper.configure_tenant( tenantid, tenant_info, vxlan_port ) # TODO handle rollback? # Success, commit all performed operations rollback.commitAll() # Response return TenantReply(status=Status(code=STATUS_OK, reason='OK')) """ Remove a tenant """ def RemoveTenant(self, request, context): logging.debug('Remove tenant request received: %s' % request) # Extract tenant ID tenantid = request.tenantid # Parmeters validation # # Validate tenant ID logging.debug('Validating the tenant ID: %s' % tenantid) if not srv6_controller_utils.validate_tenantid(tenantid): # If tenant ID is invalid, return an error message err = 'Invalid tenant ID: %s' % tenantid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Remove the tenant # # Get all the overlays associated to the tenant ID overlays = storage_helper.get_overlays(tenantid=tenantid) if overlays is None: err = 'Error getting overlays' logging.error(err) return InventoryServiceReply( status=Status(code=STATUS_INTERNAL_SERVER_ERROR, reason=err) ) # Remove all overlays for overlay in overlays: overlayid = overlay['_id'] self._RemoveOverlay(overlayid, tenantid, tunnel_info=None) # Get all the devices of the tenant ID devices = storage_helper.get_devices(tenantid=tenantid) if devices is None: err = 'Error getting devices' logging.error(err) return OverlayServiceReply( status=Status(code=STATUS_INTERNAL_SERVER_ERROR, reason=err) ) for device in devices: # Unregister all devices deviceid = device['deviceid'] logging.debug('Unregistering device %s' % deviceid) self._unregister_device(deviceid, tenantid, ignore_errors=True) # TODO remove tenant from keystone # # Success return InventoryServiceReply( status=Status(code=STATUS_OK, reason='OK') ) def enable_disable_device(self, deviceid, tenantid, enabled): # Enable/Disable the device res = storage_helper.set_device_enabled_flag( deviceid=deviceid, tenantid=tenantid, enabled=enabled ) if res is None: err = ( 'Error while changing the enabled flag for the device %s: ' 'Unable to update the controller state' % deviceid ) logging.error(err) return STATUS_INTERNAL_SERVER_ERROR, err elif res is False: err = ( 'Error while changing the enabled flag for the device %s: ' % deviceid ) logging.warning(err) return STATUS_BAD_REQUEST, err # Success return STATUS_OK, 'OK' """ Enable a device """ def EnableDevice(self, request, context): logging.debug('EnableDevice request received: %s' % request) # Iterates on each device for device in request.devices: # Extract device ID deviceid = device.id # Extract tenant ID tenantid = device.tenantid # Enable the device status_code, reason = self.enable_disable_device( deviceid=deviceid, tenantid=tenantid, enabled=True ) if status_code != STATUS_OK: # Error return OverlayServiceReply( status=Status(code=status_code, reason=reason) ) # Success: create the response return OverlayServiceReply( status=Status(code=STATUS_OK, reason='OK') ) """ Enable a device """ def DisableDevice(self, request, context): logging.debug('DisableDevice request received: %s' % request) # Iterates on each device for device in request.devices: # Extract device ID deviceid = device.id # Extract tenant ID tenantid = device.tenantid # Check tunnels stats # If the tenant has some overlays configured # it is not possible to unregister it num = storage_helper.get_num_tunnels(deviceid, tenantid) if num is None: err = ( 'Error getting tunnels stats. Device not found ' 'or error during the connection to the db' ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) elif num != 0: err = ( 'Cannot disable the device. ' 'The device has %s (tenant %s) has tunnels registered' % (deviceid, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Disable the device status_code, reason = self.enable_disable_device( deviceid=deviceid, tenantid=tenantid, enabled=False ) if status_code != STATUS_OK: # Error return OverlayServiceReply( status=Status(code=status_code, reason=reason) ) # Success: create the response return OverlayServiceReply( status=Status(code=STATUS_OK, reason='OK') ) """ Configure a device and change its status to 'RUNNING' """ def ConfigureDevice(self, request, context): logging.debug('ConfigureDevice request received: %s' % request) with RollbackContext() as rollback: # Get the devices devices = [device.id for device in request.configuration.devices] devices = storage_helper.get_devices( deviceids=devices, return_dict=True ) if devices is None: logging.error('Error getting devices') return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason='Error getting devices' ) ) # Convert interfaces list to a dict representation # This step simplifies future processing interfaces = dict() for deviceid in devices: for interface in devices[deviceid]['interfaces']: interfaces[interface['name']] = interface devices[deviceid]['interfaces'] = interfaces # Parameters validation for device in request.configuration.devices: # Parameters extraction # # Extract the device ID from the configuration deviceid = device.id # Extract the tenant ID tenantid = device.tenantid # Extract the interfaces interfaces = device.interfaces # Extract the device name device_name = device.name # Extract the device description device_description = device.description # If the device is partecipating to some overlay # we cannot configure it overlay = storage_helper.get_overlay_containing_device( deviceid, tenantid ) if overlay is not None: err = ( 'Cannot configure device %s: the device ' 'is partecipating to the overlay %s' % (deviceid, overlay['_id']) ) logging.error(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Name is mandatory if device_name is None or device_name == '': err = ( 'Invalid configuration for device %s\n' 'Invalid value for the mandatory parameter ' '"name": %s' % (deviceid, device_name) ) logging.error(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Description parameter is mandatory if device_description is None or device_description == '': err = ( 'Invalid configuration for device %s\n' 'Invalid value for the mandatory parameter ' '"description": %s' % (deviceid, device_description) ) logging.error(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Validate the device IDs logging.debug('Validating the device ID: %s' % deviceid) if not srv6_controller_utils.validate_deviceid(deviceid): # If device ID is invalid, return an error message err = ( 'Invalid configuration for device %s\n' 'Invalid device ID: %s' % (deviceid, deviceid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Validate the tenant ID logging.debug('Validating the tenant ID: %s' % tenantid) if not srv6_controller_utils.validate_tenantid(tenantid): # If tenant ID is invalid, return an error message err = ( 'Invalid configuration for device %s\n' 'Invalid tenant ID: %s' % (deviceid, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the devices exist if deviceid not in devices: err = ( 'Invalid configuration for device %s\n' 'Device not found: %s' % (deviceid, deviceid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the device belongs to the tenant if tenantid != devices[deviceid]['tenantid']: err = ( 'Invalid configuration for device %s\n' 'The device %s does not belong to the tenant %s' % (deviceid, deviceid, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Validate the interfaces wan_interfaces_counter = 0 lan_interfaces_counter = 0 for interface in interfaces: # Update counters if interface.type == InterfaceType.WAN: wan_interfaces_counter += 1 elif interface.type == InterfaceType.LAN: lan_interfaces_counter += 1 # Check if the interface exists if interface.name not in devices[deviceid]['interfaces']: err = ( 'Invalid configuration for device %s\n' 'Interface %s not found on device %s' % (deviceid, interface.name, deviceid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check interface type if not srv6_controller_utils.validate_interface_type( interface.type ): err = ( 'Invalid configuration for device %s\n' 'Invalid type %s for the interface %s (%s)' % (deviceid, interface.type, interface.name, deviceid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Cannot set IP address and subnets for the WAN interfaces if interface.type == InterfaceType.WAN: if len(interface.ipv4_addrs) > 0 or \ len(interface.ipv6_addrs) > 0: err = ( 'Invalid configuration for device %s\n' 'WAN interfaces do not support IP addrs ' 'assignment: %s' % (deviceid, interface.name) ) logging.warning(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) if len(interface.ipv4_subnets) > 0 or \ len(interface.ipv6_subnets) > 0: err = ( 'Invalid configuration for device %s\n' 'WAN interfaces do not support subnets ' 'assignment: %s' % (deviceid, interface.name) ) logging.warning(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) # Validate IP addresses for ipaddr in interface.ipv4_addrs: if not srv6_controller_utils.validate_ipv4_address( ipaddr ): err = ( 'Invalid configuration for device %s\n' 'Invalid IPv4 address %s for the interface %s' % (deviceid, ipaddr, interface.name) ) logging.warning(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) for ipaddr in interface.ipv6_addrs: if not srv6_controller_utils.validate_ipv6_address( ipaddr ): err = ( 'Invalid configuration for device %s\n' 'Invalid IPv6 address %s for the ' 'interface %s' % (deviceid, ipaddr, interface.name) ) logging.warning(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) # Validate subnets for subnet in interface.ipv4_subnets: gateway = subnet.gateway subnet = subnet.subnet if not srv6_controller_utils.validate_ipv4_address( subnet ): err = ( 'Invalid configuration for device %s\n' 'Invalid IPv4 subnet %s for the interface %s' % (deviceid, subnet, interface.name) ) logging.warning(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) if gateway is not None and gateway != '': if (not srv6_controller_utils .validate_ipv4_address(gateway)): err = ( 'Invalid configuration for device %s\n' 'Invalid IPv4 gateway %s for the ' 'subnet %s on the interface %s' % (deviceid, gateway, subnet, interface.name) ) logging.warning(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) for subnet in interface.ipv6_subnets: gateway = subnet.gateway subnet = subnet.subnet if not srv6_controller_utils.validate_ipv6_address( subnet ): err = ( 'Invalid configuration for device %s\n' 'Invalid IPv6 subnet %s for the interface %s' % (deviceid, subnet, interface.name) ) logging.warning(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) if gateway is not None and gateway != '': if not srv6_controller_utils.validate_ipv6_address( gateway ): err = ( 'Invalid configuration for device %s\n' 'Invalid IPv6 gateway %s for the ' 'subnet %s on the interface %s' % (deviceid, gateway, subnet, interface.name) ) logging.warning(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) # At least one WAN interface is required if wan_interfaces_counter == 0: err = ( 'Invalid configuration for device %s\n' 'The configuration must contain at least one WAN ' 'interface (0 provided)' % deviceid ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # At least one LAN interface is required if lan_interfaces_counter == 0: err = ( 'Invalid configuration for device %s\n' 'The configuration must contain at least one LAN ' 'interface (0 provided)' % deviceid ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # All checks passed # # Remove curent STAMP information if ENABLE_STAMP_SUPPORT: logging.info('Removing current STAMP information\n\n') for device in request.configuration.devices: # Extract the device ID from the configuration deviceid = device.id # Configure information try: stamp_node = ( self.stamp_controller.storage.get_stamp_node( node_id=deviceid, tenantid=tenantid ) ) if stamp_node is not None: self.stamp_controller.remove_stamp_node( node_id=deviceid, tenantid=tenantid ) # Add reverse action to the rollback stack rollback.push( func=exec_or_mark_device_inconsitent, rollback_func=( self.stamp_controller.add_stamp_node ), node_id=stamp_node.node_id, node_name=stamp_node.node_name, grpc_ip=stamp_node.grpc_ip, grpc_port=stamp_node.grpc_port, ip=stamp_node.ip, sender_port=stamp_node.sender_udp_port, reflector_port=stamp_node.reflector_udp_port, interfaces=stamp_node.interfaces, stamp_source_ipv6_address=( stamp_node.stamp_source_ipv6_address ), is_sender=stamp_node.is_sender, is_reflector=stamp_node.is_reflector, deviceid=deviceid, tenantid=tenantid ) except NodeIdNotFoundError: logging.debug( f'STAMP Node {deviceid} does not exist. ' 'Nothing to do.' ) except STAMPSessionsExistError: err = ( f'STAMP Node {deviceid} is participating in one ' 'or more STAMP sessions. Delete all existing ' 'sessions before changing device configuration.' ) logging.error(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Extract the configurations from the request message new_devices = list() for device in request.configuration.devices: logging.info('Processing the configuration:\n%s' % device) # Parameters extraction # # Extract the device ID from the configuration deviceid = device.id # Extract the device name from the configuration device_name = device.name # Extract the device description from the configuration device_description = device.description # Extract the tenant ID tenantid = device.tenantid # Extract the device interfaces from the configuration interfaces = devices[deviceid]['interfaces'] err = STATUS_OK for interface in device.interfaces: interfaces[interface.name]['name'] = interface.name if interface.type != '': interfaces[interface.name]['type'] = interface.type if interface.type == InterfaceType.WAN: if len(interface.ipv4_addrs) > 0 or \ len(interface.ipv6_addrs) > 0: logging.warning( 'Cannot set IP addrs for a WAN interface' ) if len(interface.ipv4_subnets) > 0 or \ len(interface.ipv6_subnets) > 0: logging.warning( 'Cannot set subnets for a WAN interface' ) else: if len(interface.ipv4_addrs) > 0: addrs = list() for addr in interfaces[ interface.name ]['ipv4_addrs']: addrs.append(addr) response = self.srv6_manager.remove_ipaddr( devices[deviceid]['mgmtip'], self.grpc_client_port, ip_addr=addr, device=interface.name, family=AF_UNSPEC ) if response != SbStatusCode.STATUS_SUCCESS: # If the operation has failed, # report an error message logging.warning( 'Cannot remove the public addresses ' 'from the interface' ) err = ( status_codes_pb2.STATUS_INTERNAL_ERROR ) # Add reverse action to the rollback stack rollback.push( func=exec_or_mark_device_inconsitent, rollback_func=( self.srv6_manager.create_ipaddr ), server_ip=devices[deviceid]['mgmtip'], server_port=self.grpc_client_port, ip_addr=addr, device=interface.name, family=AF_INET, deviceid=deviceid, tenantid=tenantid ) interfaces[interface.name]['ipv4_addrs'] = list() # Add IP address to the interface for ipv4_addr in interface.ipv4_addrs: response = self.srv6_manager.create_ipaddr( devices[deviceid]['mgmtip'], self.grpc_client_port, ip_addr=ipv4_addr, device=interface.name, family=AF_INET ) if response != SbStatusCode.STATUS_SUCCESS: # If the operation has failed, # report an error message logging.warning( 'Cannot assign the private VPN IP ' 'address to the interface' ) err = ( status_codes_pb2.STATUS_INTERNAL_ERROR ) interfaces[interface.name][ 'ipv4_addrs'].append(ipv4_addr) # Add reverse action to the rollback stack rollback.push( func=exec_or_mark_device_inconsitent, rollback_func=( self.srv6_manager.remove_ipaddr ), server_ip=devices[deviceid]['mgmtip'], server_port=self.grpc_client_port, ip_addr=ipv4_addr, device=interface.name, family=AF_INET, deviceid=deviceid, tenantid=tenantid ) if len(interface.ipv6_addrs) > 0: addrs = list() nets = list() for addr in interfaces[ interface.name ]['ipv6_addrs']: addrs.append(addr) nets.append(str(IPv6Interface(addr).network)) response = self.srv6_manager.remove_ipaddr( devices[deviceid]['mgmtip'], self.grpc_client_port, ip_addr=addr, net=str(IPv6Interface(addr).network), device=interface.name, family=AF_UNSPEC ) if response != SbStatusCode.STATUS_SUCCESS: # If the operation has failed, # report an error message logging.warning( 'Cannot remove the public addresses ' 'from the interface' ) err = ( status_codes_pb2.STATUS_INTERNAL_ERROR ) # Add reverse action to the rollback stack rollback.push( func=exec_or_mark_device_inconsitent, rollback_func=( self.srv6_manager.create_ipaddr ), server_ip=devices[deviceid]['mgmtip'], server_port=self.grpc_client_port, ip_addr=addr, device=interface.name, family=AF_INET6, net=str(IPv6Interface(addr).network), deviceid=deviceid, tenantid=tenantid ) interfaces[interface.name]['ipv6_addrs'] = list() # Add IP address to the interface for ipv6_addr in interface.ipv6_addrs: net = IPv6Interface( ipv6_addr ).network.__str__() response = self.srv6_manager.create_ipaddr( devices[deviceid]['mgmtip'], self.grpc_client_port, ip_addr=ipv6_addr, device=interface.name, net=net, family=AF_INET6 ) if response != SbStatusCode.STATUS_SUCCESS: # If the operation has failed, # report an error message logging.warning( 'Cannot assign the private VPN IP ' 'address to the interface' ) err = ( status_codes_pb2.STATUS_INTERNAL_ERROR ) # Add reverse action to the rollback stack rollback.push( func=exec_or_mark_device_inconsitent, rollback_func=( self.srv6_manager.remove_ipaddr ), server_ip=devices[deviceid]['mgmtip'], server_port=self.grpc_client_port, ip_addr=ipv6_addr, device=interface.name, family=AF_INET6, net=str(IPv6Interface(addr).network), deviceid=deviceid, tenantid=tenantid ) interfaces[ interface.name ]['ipv6_addrs'].append(ipv6_addr) interfaces[interface.name]['ipv4_subnets'] = list() for subnet in interface.ipv4_subnets: gateway = subnet.gateway subnet = subnet.subnet interfaces[interface.name]['ipv4_subnets'].append( {'subnet': subnet, 'gateway': gateway} ) interfaces[interface.name]['ipv6_subnets'] = list() for subnet in interface.ipv6_subnets: gateway = subnet.gateway subnet = subnet.subnet interfaces[interface.name]['ipv6_subnets'].append( {'subnet': subnet, 'gateway': gateway} ) # Push the new configuration if err == STATUS_OK: logging.debug( 'The device %s has been configured successfully', deviceid ) new_devices.append( { 'deviceid': deviceid, 'name': device_name, 'description': device_description, 'interfaces': interfaces, 'tenantid': tenantid, 'configured': True } ) else: err = 'The device %s rejected the configuration' % deviceid logging.error(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) success = storage_helper.configure_devices(new_devices) if success is False or success is None: err = 'Error configuring the devices' logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) logging.info('The device configuration has been saved\n\n') # Setup STAMP information if ENABLE_STAMP_SUPPORT: logging.info('Configuring STAMP information\n\n') for device in request.configuration.devices: # Extract the device ID from the configuration deviceid = device.id # Extract the tenant ID tenantid = device.tenantid # Retrieve device information device = storage_helper.get_device( deviceid=deviceid, tenantid=tenantid ) if device is None: logging.error('Error getting device') return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason='Error getting device' ) ) # Lookup the WAN interfaces # TODO currently we only support a single WAN interface, # so we look for the address of the first WAN interface # In the future we should support multiple interfaces wan_ip = None wan_ifaces = None for interface in device['interfaces']: if interface['type'] == InterfaceType.WAN and \ len(interface['ipv6_addrs']) > 0: wan_ip = interface['ipv6_addrs'][0].split('/')[0] wan_ifaces = [interface['name']] break # Configure information self.stamp_controller.add_stamp_node( node_id=device['deviceid'], node_name=device['name'], grpc_ip=device['mgmtip'], grpc_port=self.grpc_client_port, ip=wan_ip, sender_port=42069, reflector_port=862, interfaces=wan_ifaces, stamp_source_ipv6_address=wan_ip, is_sender=True, is_reflector=True, tenantid=tenantid ) # Add reverse action to the rollback stack rollback.push( func=exec_or_mark_device_inconsitent, rollback_func=self.stamp_controller.remove_stamp_node, node_id=device['deviceid'], deviceid=device['deviceid'], tenantid=tenantid ) # Success, commit all performed operations rollback.commitAll() # Create the response return OverlayServiceReply( status=Status(code=STATUS_OK, reason='OK') ) """ Get the registered devices """ def GetDevices(self, request, context): logging.debug('GetDeviceInformation request received') # Extract the device IDs from the request deviceids = list(request.deviceids) deviceids = deviceids if len(deviceids) > 0 else None # Extract the tenant ID from the request tenantid = request.tenantid tenantid = tenantid if tenantid != '' else None # Parameters validation # # Validate the device IDs if deviceids is not None: for deviceid in deviceids: logging.debug('Validating the device ID: %s' % deviceid) if not srv6_controller_utils.validate_deviceid(deviceid): # If device ID is invalid, return an error message err = 'Invalid device ID: %s' % deviceid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Validate the tenant ID if tenantid is not None: logging.debug('Validating the tenant ID: %s' % tenantid) if not srv6_controller_utils.validate_tenantid(tenantid): # If tenant ID is invalid, return an error message err = 'Invalid tenant ID: %s' % tenantid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Create the response response = srv6_vpn_pb2.InventoryServiceReply() # Iterate on devices and fill the response message devices = storage_helper.get_devices( deviceids=deviceids, tenantid=tenantid ) if devices is None: err = 'Error getting devices' logging.error(err) return OverlayServiceReply( status=Status(code=STATUS_INTERNAL_SERVER_ERROR, reason=err) ) for _device in devices: device = response.device_information.devices.add() device.id = text_type(_device['deviceid']) _interfaces = _device.get('interfaces', []) for ifinfo in _interfaces: interface = device.interfaces.add() interface.name = ifinfo['name'] interface.mac_addr = ifinfo['mac_addr'] interface.ipv4_addrs.extend(ifinfo['ipv4_addrs']) interface.ipv6_addrs.extend(ifinfo['ipv6_addrs']) interface.ext_ipv4_addrs.extend(ifinfo['ext_ipv4_addrs']) interface.ext_ipv6_addrs.extend(ifinfo['ext_ipv6_addrs']) for _subnet in ifinfo['ipv4_subnets']: subnet = interface.ipv4_subnets.add() subnet.subnet = _subnet['subnet'] subnet.gateway = _subnet['gateway'] for _subnet in ifinfo['ipv6_subnets']: subnet = interface.ipv6_subnets.add() subnet.subnet = _subnet['subnet'] subnet.gateway = _subnet['gateway'] interface.type = ifinfo['type'] mgmtip = _device.get('mgmtip') name = _device.get('name') description = _device.get('description') connected = _device.get('connected') configured = _device.get('configured') enabled = _device.get('enabled') if mgmtip is not None: device.mgmtip = mgmtip if name is not None: device.name = name if description is not None: device.description = description if connected is not None: device.connected = connected if configured is not None: device.configured = configured if enabled is not None: device.enabled = enabled # Return the response logging.debug('Sending response:\n%s' % response) response.status.code = STATUS_OK response.status.reason = 'OK' return response """ Get the topology information """ def GetTopologyInformation(self, request, context): logging.debug('GetTopologyInformation request received') # Create the response response = srv6_vpn_pb2.InventoryServiceReply() # Build the topology topology = storage_helper.get_topology() if topology is None: err = 'Error getting the topology' logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) nodes = topology['nodes'] links = topology['links'] devices = set() # Iterate on nodes for node in nodes: if node['type'] != 'router': # Skip stub networks continue devices.add(node['id']) response.topology_information.devices.append(node['id']) # Iterate on links for _link in links: if _link[0] in devices and _link[1] in devices: link = response.topology_information.links.add() link.l_device = _link[0] link.r_device = _link[1] # Return the response logging.debug('Sending response:\n%s' % response) response.status.code = STATUS_OK response.status.reason = 'OK' return response def _unregister_device(self, deviceid, tenantid, ignore_errors=False): # Parameters validation # # Validate the tenant ID logging.debug('Validating the tenant ID: %s' % tenantid) tenant_exists = storage_helper.tenant_exists(tenantid) if tenant_exists is None: err = 'Error while connecting to the controller state' logging.error(err) STATUS_INTERNAL_SERVER_ERROR, err elif tenant_exists is False: # If tenant ID is invalid, return an error message err = 'Tenant not found: %s' % tenantid logging.warning(err) return STATUS_BAD_REQUEST, err # Validate the device ID logging.debug('Validating the device ID: %s' % tenantid) devices = storage_helper.get_devices( deviceids=[deviceid] ) if devices is None: err = 'Error getting devices' logging.error(err) return STATUS_INTERNAL_SERVER_ERROR, err elif len(devices) == 0: # If device ID is invalid, return an error message err = 'Device not found: %s' % deviceid logging.warning(err) return STATUS_BAD_REQUEST, err # The device must belong to the tenant device = devices[0] if device['tenantid'] != tenantid: err = ( 'Cannot unregister the device. ' 'The device %s does not belong to the tenant %s' % (deviceid, tenantid) ) logging.warning(err) return STATUS_BAD_REQUEST, err # Check tunnels stats # If the tenant has some overlays configured # it is not possible to unregister it num = storage_helper.get_num_tunnels(deviceid, tenantid) if num is None: err = 'Error getting tunnels stats' logging.error(err) return STATUS_INTERNAL_SERVER_ERROR, err elif num != 0: err = ( 'Cannot unregister the device %s. ' 'The device has %s tunnels registered' % (deviceid, num) ) logging.warning(err) return STATUS_BAD_REQUEST, err # All checks passed # # Remove curent STAMP information if ENABLE_STAMP_SUPPORT: logging.info('Removing current STAMP information\n\n') # Configure information try: stamp_node = ( self.stamp_controller.storage.get_stamp_node( node_id=deviceid, tenantid=tenantid ) ) if stamp_node is not None: self.stamp_controller.remove_stamp_node( node_id=deviceid, tenantid=tenantid ) except NodeIdNotFoundError: logging.debug( f'STAMP Node {deviceid} does not exist. ' 'Nothing to do.' ) except STAMPSessionsExistError: err = ( f'STAMP Node {deviceid} is participating in one ' 'or more STAMP sessions. Delete all existing ' 'sessions before changing device configuration.' ) logging.error(err) return STATUS_BAD_REQUEST, err except grpc.RpcError: if ignore_errors: err = ( 'Unregister STAMP information failed. Setting reboot ' 'required flag.' ) logging.warning(err) # Change device state to reboot required success = storage_helper.change_device_state( deviceid=deviceid, tenantid=tenantid, new_state=storage_helper.DeviceState.REBOOT_REQUIRED ) if success is False or success is None: logging.error('Error changing the device state') return status_codes_pb2.STATUS_INTERNAL_ERROR else: err = ( 'Cannot unregister the device. ' 'Error while unregistering STAMP information' ) logging.error(err) return STATUS_INTERNAL_SERVER_ERROR, err # Let's unregister the device # # Send shutdown command to device res = self.srv6_manager.shutdown_device( device['mgmtip'], self.grpc_client_port ) if res != SbStatusCode.STATUS_SUCCESS: if ignore_errors: err = ('Device shutdown failed. Setting reboot required flag.') logging.warning(err) # Change device state to reboot required success = storage_helper.change_device_state( deviceid=deviceid, tenantid=tenantid, new_state=storage_helper.DeviceState.REBOOT_REQUIRED ) if success is False or success is None: logging.error('Error changing the device state') return status_codes_pb2.STATUS_INTERNAL_ERROR else: err = ( 'Cannot unregister the device. ' 'Error while shutting down the device' ) logging.error(err) return STATUS_INTERNAL_SERVER_ERROR, err # Remove device from controller state success = storage_helper.unregister_device( deviceid, tenantid ) if success is None or success is False: err = ( 'Cannot unregister the device. ' 'Error while updating the controller state' ) logging.error(err) return STATUS_INTERNAL_SERVER_ERROR, err # Remove node from STAMP inventory stamp_node = self.stamp_controller.storage.get_stamp_node( node_id=deviceid, tenantid=tenantid ) if stamp_node is not None: try: self.stamp_controller.remove_stamp_node( node_id=deviceid, tenantid=tenantid ) except Exception: # as err: # TODO replace with a more specific exception self.stamp_controller.storage.remove_stamp_node( node_id=deviceid, tenantid=tenantid ) # Success logging.info('Device unregistered successfully\n\n') return STATUS_OK, 'OK' """ Unregister a device """ def UnregisterDevice(self, request, context): logging.info('UnregisterDevice request received:\n%s', request) # Parameters extraction # # Extract the tenant ID tenantid = request.tenantid # Extract the device ID deviceid = request.deviceid # Unregister the device code, reason = self._unregister_device( deviceid, tenantid, ignore_errors=True ) # Create the response return OverlayServiceReply(status=Status(code=code, reason=reason)) """Create a VPN from an intent received through the northbound interface""" def CreateOverlay(self, request, context): logging.info('CreateOverlay request received:\n%s', request) with RollbackContext() as rollback: # Extract the intents from the request message for intent in request.intents: logging.info('Processing the intent:\n%s' % intent) # Parameters extraction # # Extract the overlay tenant ID from the intent tenantid = intent.tenantid # Extract the overlay type from the intent overlay_type = intent.overlay_type # Extract the overlay name from the intent overlay_name = intent.overlay_name # Extract the interfaces slices = list() _devices = set() for _slice in intent.slices: deviceid = _slice.deviceid interface_name = _slice.interface_name # Add the slice to the slices set slices.append( { 'deviceid': deviceid, 'interface_name': interface_name } ) # Add the device to the devices set _devices.add(deviceid) # Extract tunnel mode tunnel_name = intent.tunnel_mode # Extract tunnel info tunnel_info = intent.tunnel_info # Parameters validation # # Validate the tenant ID logging.debug('Validating the tenant ID: %s' % tenantid) if not srv6_controller_utils.validate_tenantid(tenantid): # If tenant ID is invalid, return an error message err = 'Invalid tenant ID: %s' % tenantid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the tenant is configured is_config = storage_helper.is_tenant_configured( tenantid ) if is_config is None: err = 'Error while checking tenant configuration' logging.error(err) return TenantReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) elif is_config is False: err = ( 'Cannot create overlay for a tenant unconfigured' 'Tenant not found or error during the ' 'connection to the db' ) logging.warning(err) return TenantReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Validate the overlay type logging.debug('Validating the overlay type: %s' % overlay_type) if not srv6_controller_utils.validate_overlay_type( overlay_type ): # If the overlay type is invalid, return an error message err = 'Invalid overlay type: %s' % overlay_type logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Validate the overlay name logging.debug('Validating the overlay name: %s' % overlay_name) if not srv6_controller_utils.validate_overlay_name( overlay_name ): # If the overlay name is invalid, return an error message err = 'Invalid overlay name: %s' % overlay_name logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Validate the tunnel mode logging.debug('Validating the tunnel mode: %s' % tunnel_name) if not srv6_controller_utils.validate_tunnel_mode( tunnel_name, self.supported_tunnel_modes ): # If the tunnel mode is invalid, return an error message err = 'Invalid tunnel mode: %s' % tunnel_name logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Let's check if the overlay does not exist logging.debug( 'Checking if the overlay name is available: %s' % overlay_name ) exists = storage_helper.overlay_exists( overlay_name, tenantid ) if exists is True: # If the overlay already exists, return an error message err = ( 'Overlay name %s is already in use for tenant %s' % (overlay_name, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) elif exists is None: err = 'Error validating the overlay' logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) # Get the devices devices = storage_helper.get_devices( deviceids=_devices, return_dict=True ) if devices is None: err = 'Error getting devices' logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) # Devices validation for deviceid in devices: # Let's check if the router exists if deviceid not in devices: # If the device does not exist, return an error message err = 'Device not found %s' % deviceid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the device is connected if not devices[deviceid]['connected']: # If the device is not connected, return an error # message err = 'The device %s is not connected' % deviceid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the device is enabled if not devices[deviceid]['enabled']: # If the device is not enabled, return an error message err = 'The device %s is not enabled' % deviceid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the devices have at least a WAN interface wan_found = False for interface in devices[deviceid]['interfaces']: if interface['type'] == InterfaceType.WAN: wan_found = True if not wan_found: # No WAN interfaces found on the device err = ( 'No WAN interfaces found on the device %s' % deviceid ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Convert interfaces list to a dict representation # This step simplifies future processing interfaces = dict() for deviceid in devices: for interface in devices[deviceid]['interfaces']: interfaces[interface['name']] = interface devices[deviceid]['interfaces'] = interfaces # Validate the slices included in the intent for _slice in slices: logging.debug('Validating the slice: %s' % _slice) # A slice is a tuple (deviceid, interface_name) # # Extract the device ID deviceid = _slice['deviceid'] # Extract the interface name interface_name = _slice['interface_name'] # Let's check if the router exists if deviceid not in devices: # If the device does not exist, return an error # message err = 'Device not found %s' % deviceid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the device is enabled if not devices[deviceid]['enabled']: # If the device is not enabled, return an error # message err = 'The device %s is not enabled' % deviceid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the device is connected if not devices[deviceid]['connected']: # If the device is not connected, return an error # message err = 'The device %s is not connected' % deviceid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Let's check if the interface exists if interface_name not in devices[deviceid]['interfaces']: # If the interface does not exists, return an error # message err = 'The interface does not exist' logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the interface type is LAN if devices[deviceid]['interfaces'][ interface_name ]['type'] != InterfaceType.LAN: # The interface type is not LAN err = ( 'Cannot add non-LAN interface to the overlay: %s ' '(device %s)' % (interface_name, deviceid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the slice is already assigned to an overlay _overlay = storage_helper.get_overlay_containing_slice( _slice, tenantid ) if _overlay is not None: # Slice already assigned to an overlay err = ( 'Cannot create overlay: the slice %s is ' 'already assigned to the overlay %s' % (_slice, _overlay['_id']) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check for IP addresses if overlay_type == OverlayType.IPv4Overlay: addrs = storage_helper.get_ipv4_addresses( deviceid=deviceid, tenantid=tenantid, interface_name=interface_name ) if len(addrs) == 0: # No IPv4 address assigned to the interface err = ( 'Cannot create overlay: the slice %s has no ' 'IPv4 addresses; at least one IPv4 address ' 'is required to create an IPv4 Overlay' % _slice ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) subnets = storage_helper.get_ipv4_subnets( deviceid=deviceid, tenantid=tenantid, interface_name=interface_name ) if len(subnets) == 0: # No IPv4 subnet assigned to the interface err = ( 'Cannot create overlay: the slice %s has no ' 'IPv4 subnets; at least one IPv4 subnet is ' 'required to create an IPv4 Overlay' % _slice ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) elif overlay_type == OverlayType.IPv6Overlay: addrs = storage_helper.get_ipv6_addresses( deviceid=deviceid, tenantid=tenantid, interface_name=interface_name ) if len(addrs) == 0: # No IPv6 address assigned to the interface err = ( 'Cannot create overlay: the slice %s has no ' 'IPv6 addresses; at least one IPv6 address ' 'is required to create an IPv6 Overlay' % _slice ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) subnets = storage_helper.get_ipv6_subnets( deviceid=deviceid, tenantid=tenantid, interface_name=interface_name ) if len(subnets) == 0: # No IPv6 subnet assigned to the interface err = ( 'Cannot create overlay: the slice %s has no ' 'IPv6 subnets; at least one IPv6 subnet is ' 'required to create an IPv6 Overlay' % _slice ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) for slice1 in slices: # Extract the device ID deviceid_1 = slice1['deviceid'] # Extract the interface name interface_name_1 = slice1['interface_name'] for slice2 in slices: if slice2 == slice1: continue # Extract the device ID deviceid_2 = slice2['deviceid'] # Extract the interface name interface_name_2 = slice2['interface_name'] if overlay_type == OverlayType.IPv4Overlay: subnets1 = storage_helper.get_ipv4_subnets( deviceid=deviceid_1, tenantid=tenantid, interface_name=interface_name_1 ) subnets2 = storage_helper.get_ipv4_subnets( deviceid=deviceid_2, tenantid=tenantid, interface_name=interface_name_2 ) for subnet1 in subnets1: subnet1 = subnet1['subnet'] for subnet2 in subnets2: subnet2 = subnet2['subnet'] if IPv4Network(subnet1).overlaps( IPv4Network(subnet2) ): err = ( 'Cannot create overlay: the ' 'slices %s and %s have ' 'overlapping subnets' % (slice1, slice2) ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) elif overlay_type == OverlayType.IPv6Overlay: subnets1 = storage_helper.get_ipv6_subnets( deviceid=deviceid_1, tenantid=tenantid, interface_name=interface_name_1 ) subnets2 = storage_helper.get_ipv6_subnets( deviceid=deviceid_2, tenantid=tenantid, interface_name=interface_name_2 ) for subnet1 in subnets1: subnet1 = subnet1['subnet'] for subnet2 in subnets2: subnet2 = subnet2['subnet'] if IPv6Network(subnet1).overlaps( IPv6Network(subnet2) ): err = ( 'Cannot create overlay: the ' 'slices %s and %s have ' 'overlapping subnets' % (slice1, slice2) ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) can_use_ipv6_addr_for_wan = True can_use_ipv4_addr_for_wan = True for _slice in slices: # Get WAN interface wan_interface = storage_helper.get_wan_interfaces( deviceid=_slice['deviceid'], tenantid=tenantid )[0] # Check if WAN interface has IPv6 connectivity addrs = storage_helper.get_ext_ipv6_addresses( deviceid=_slice['deviceid'], tenantid=tenantid, interface_name=wan_interface ) if addrs is None or len(addrs) == 0: can_use_ipv6_addr_for_wan = False # Check if WAN interface has IPv4 connectivity addrs = storage_helper.get_ext_ipv4_addresses( deviceid=_slice['deviceid'], tenantid=tenantid, interface_name=wan_interface ) if addrs is None or len(addrs) == 0: can_use_ipv4_addr_for_wan = False if ( not can_use_ipv6_addr_for_wan and not can_use_ipv4_addr_for_wan ): err = ( 'Cannot establish a full-mesh between all the WAN ' 'interfaces' ) logging.error(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err)) if tunnel_name == 'SRv6' and not can_use_ipv6_addr_for_wan: err = ( 'IPv6 transport not available: cannot create a SRv6 ' 'overlay' ) logging.error(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err)) transport_proto = 'ipv4' if can_use_ipv6_addr_for_wan: transport_proto = 'ipv6' # For SRv6 overlays, Segment Routing transparency must be T0 # or T1 for each device, otherwise the SRv6 full-mesh overlay # cannot be created if tunnel_name == 'SRv6': for _slice in slices: incoming_sr_transparency = ( storage_helper.get_incoming_sr_transparency( _slice['deviceid'], tenantid ) ) outgoing_sr_transparency = ( storage_helper.get_outgoing_sr_transparency( _slice['deviceid'], tenantid ) ) # is_ip6tnl_forced = storage_helper.is_ip6tnl_forced( # _slice['deviceid'], tenantid # ) # is_srh_forced = storage_helper.is_srh_forced( # _slice['deviceid'], tenantid # ) if incoming_sr_transparency == 'op': err = ( 'Device %s has incoming SR Transparency set ' 'to OP. SRv6 overlays are not supported for ' 'OP.' % deviceid ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) if outgoing_sr_transparency == 'op': err = ( 'Device %s has outgoing SR Transparency set ' 'to OP. SRv6 overlays are not supported for ' 'OP.' % deviceid ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) # if ( # incoming_sr_transparency == 't1' # and is_srh_forced # ): # err = ( # 'Device %s has incoming SR Transparency ' # 'set to T1 and force-srh set. ' # 'Cannot use an SRH for device with incoming ' # 'Transparency T1.' % deviceid # ) # logging.error(err) # return OverlayServiceReply( # status=Status( # code=STATUS_BAD_REQUEST, # reason=err # ) # ) # All the devices must belong to the same tenant for device in devices.values(): if device['tenantid'] != tenantid: err = ( 'Error while processing the intent: ' 'All the devices must belong to the ' 'same tenant %s' % tenantid ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) logging.info('All checks passed') # All checks passed # # Save the overlay to the controller state overlayid = storage_helper.create_overlay( overlay_name, overlay_type, slices, tenantid, tunnel_name, transport_proto=transport_proto ) if overlayid is None: err = 'Cannot save the overlay to the controller state' logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) # Add reverse action to the rollback stack rollback.push( func=storage_helper.remove_overlay, overlayid=overlayid, tenantid=tenantid ) # Get tunnel mode tunnel_mode = self.tunnel_modes[tunnel_name] # Let's create the overlay # Create overlay data structure status_code = tunnel_mode.init_overlay_data( overlayid, overlay_name, tenantid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot initialize overlay data ' '(overlay %s, tenant %s)' % (overlay_name, tenantid) ) logging.warning(err) # # Remove overlay DB status # if storage_helper.remove_overlay( # overlayid, tenantid) is not True: # logging.error('Cannot remove overlay. ' # 'Inconsistent data') return OverlayServiceReply( status=Status(code=status_code, reason=err) ) # Add reverse action to the rollback stack rollback.push( func=tunnel_mode.destroy_overlay_data, overlayid=overlayid, overlay_name=overlay_name, tenantid=tenantid, overlay_info=tunnel_info ) # Iterate on slices and add to the overlay configured_slices = list() for site1 in slices: deviceid = site1['deviceid'] interface_name = site1['interface_name'] # Init tunnel mode on the devices counter = storage_helper.get_and_inc_tunnel_mode_counter( tunnel_name, deviceid, tenantid ) if counter == 0: # Add reverse action to the rollback stack rollback.push( func=( storage_helper.dec_and_get_tunnel_mode_counter ), tunnel_name=tunnel_name, deviceid=deviceid, tenantid=tenantid ) status_code = tunnel_mode.init_tunnel_mode( deviceid, tenantid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot initialize tunnel mode (device %s ' 'tenant %s)' % (deviceid, tenantid) ) logging.warning(err) # # Remove overlay DB status # if storage_helper.remove_overlay( # overlayid, tenantid) is not True: # logging.error( # 'Cannot remove overlay. ' # 'Inconsistent data') return OverlayServiceReply( status=Status(code=status_code, reason=err) ) # Add reverse action to the rollback stack rollback.push( func=tunnel_mode.destroy_tunnel_mode, deviceid=deviceid, tenantid=tenantid, overlay_info=tunnel_info ) elif counter is None: err = 'Cannot increase tunnel mode counter' logging.error(err) # # Remove overlay DB status # if storage_helper.remove_overlay( # overlayid, tenantid) is not True: # logging.error( # 'Cannot remove overlay. Inconsistent data') return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) else: # Success # Add reverse action to the rollback stack rollback.push( func=( storage_helper.dec_and_get_tunnel_mode_counter ), tunnel_name=tunnel_name, deviceid=deviceid, tenantid=tenantid ) # Check if we have already configured the overlay on the # device if deviceid in _devices: # Init overlay on the devices status_code = tunnel_mode.init_overlay( overlayid, overlay_name, overlay_type, tenantid, deviceid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot initialize overlay (overlay %s ' 'device %s, tenant %s)' % (overlay_name, deviceid, tenantid) ) logging.warning(err) # # Remove overlay DB status # if storage_helper.remove_overlay( # overlayid, tenantid) is not True: # logging.error( # 'Cannot remove overlay. ' # 'Inconsistent data') return OverlayServiceReply( status=Status(code=status_code, reason=err) ) # Add reverse action to the rollback stack rollback.push( func=tunnel_mode.destroy_overlay, overlayid=overlayid, overlay_name=overlay_name, overlay_type=overlay_type, tenantid=tenantid, deviceid=deviceid, overlay_info=tunnel_info ) # Remove device from the to-be-configured devices set _devices.remove(deviceid) # Add the interface to the overlay status_code = tunnel_mode.add_slice_to_overlay( overlayid, overlay_name, deviceid, interface_name, tenantid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot add slice to overlay (overlay %s, ' 'device %s, slice %s, tenant %s)' % (overlay_name, deviceid, interface_name, tenantid) ) logging.warning(err) # # Remove overlay DB status # if storage_helper.remove_overlay( # overlayid, tenantid) is not True: # logging.error( # 'Cannot remove overlay. Inconsistent data') return OverlayServiceReply( status=Status(code=status_code, reason=err) ) # Add reverse action to the rollback stack rollback.push( func=tunnel_mode.remove_slice_from_overlay, overlayid=overlayid, overlay_name=overlay_name, deviceid=deviceid, interface_name=interface_name, tenantid=tenantid, overlay_info=tunnel_info ) # Create the tunnel between all the pairs of interfaces for site2 in configured_slices: if site1['deviceid'] != site2['deviceid']: status_code = tunnel_mode.create_tunnel( overlayid, overlay_name, overlay_type, site1, site2, tenantid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot create tunnel (overlay %s ' 'site1 %s site2 %s, tenant %s)' % (overlay_name, site1, site2, tenantid) ) logging.warning(err) # # Remove overlay DB status # if storage_helper.remove_overlay( # overlayid, tenantid) is not True: # logging.error( # 'Cannot remove overlay. ' # 'Inconsistent data' # ) return OverlayServiceReply( status=Status(code=status_code, reason=err) ) # Add reverse action to the rollback stack rollback.push( func=tunnel_mode.remove_tunnel, overlayid=overlayid, overlay_name=overlay_name, overlay_type=overlay_type, l_slice=site1, r_slice=site2, tenantid=tenantid, overlay_info=tunnel_info ) # Add the slice to the configured set configured_slices.append(site1) # Success, commit all performed operations rollback.commitAll() logging.info('All the intents have been processed successfully\n\n') # Create the response return OverlayServiceReply( status=Status(code=STATUS_OK, reason='OK') ) """Remove a VPN""" def RemoveOverlay(self, request, context): logging.info('RemoveOverlay request received:\n%s', request) # Extract the intents from the request message for intent in request.intents: # Parameters extraction # # Extract the overlay ID from the intent overlayid = intent.overlayid # Extract the tenant ID from the intent tenantid = intent.tenantid # Extract tunnel info tunnel_info = intent.tunnel_info # Validate the tenant ID logging.debug('Validating the tenant ID: %s' % tenantid) if not srv6_controller_utils.validate_tenantid(tenantid): # If tenant ID is invalid, return an error message err = 'Invalid tenant ID: %s' % tenantid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the tenant is configured is_config = storage_helper.is_tenant_configured( tenantid ) if is_config is None: err = 'Error while checking tenant configuration' logging.error(err) return TenantReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) elif is_config is False: err = ( 'Cannot remove overlay for a tenant unconfigured' 'Tenant not found or error during the ' 'connection to the db' ) logging.warning(err) return TenantReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Remove VPN code, reason = self._RemoveOverlay( overlayid, tenantid, tunnel_info ) if code != STATUS_OK: return OverlayServiceReply( status=Status(code=code, reason=reason) ) logging.info('All the intents have been processed successfully\n\n') # Create the response return OverlayServiceReply( status=Status(code=STATUS_OK, reason='OK') ) def _RemoveOverlay(self, overlayid, tenantid, tunnel_info): with RollbackContext() as rollback: # Parameters validation # # Let's check if the overlay exists logging.debug('Checking the overlay: %s' % overlayid) overlays = storage_helper.get_overlays( overlayids=[overlayid] ) if overlays is None: err = 'Error getting the overlay' logging.error(err) return STATUS_INTERNAL_SERVER_ERROR, err elif len(overlays) == 0: # If the overlay does not exist, return an error message err = 'The overlay %s does not exist' % overlayid logging.warning(err) return STATUS_BAD_REQUEST, err overlay = overlays[0] # Check tenant ID if tenantid != overlay['tenantid']: # If the overlay does not exist, return an error message err = ( 'The overlay %s does not belong to the tenant %s' % (overlayid, tenantid) ) logging.warning(err) return STATUS_BAD_REQUEST, err # Get the overlay name overlay_name = overlay['name'] # Get the overlay type overlay_type = overlay['type'] # Get the tunnel mode tunnel_name = overlay['tunnel_mode'] tunnel_mode = self.tunnel_modes[tunnel_name] # Get the transport proto transport_proto = overlay['transport_proto'] # Get the slices belonging to the overlay slices = overlay['slices'] # All checks passed logging.debug('Check passed') # Let's remove the VPN devices = [slice['deviceid'] for slice in overlay['slices']] configured_slices = slices.copy() for site1 in slices: deviceid = site1['deviceid'] interface_name = site1['interface_name'] # Remove the tunnel between all the pairs of interfaces for site2 in configured_slices: if site1['deviceid'] != site2['deviceid']: status_code = tunnel_mode.remove_tunnel( overlayid, overlay_name, overlay_type, site1, site2, tenantid, tunnel_info, ignore_errors=True ) if status_code != STATUS_OK: err = ( 'Cannot create tunnel (overlay %s site1 %s ' 'site2 %s, tenant %s)' % (overlay_name, site1, site2, tenantid) ) logging.warning(err) return status_code, err # Mark the site1 as unconfigured configured_slices.remove(site1) # Remove the interface from the overlay status_code = tunnel_mode.remove_slice_from_overlay( overlayid, overlay_name, deviceid, interface_name, tenantid, tunnel_info, ignore_errors=True ) if status_code != STATUS_OK: err = ( 'Cannot remove slice from overlay (overlay %s, ' 'device %s, slice %s, tenant %s)' % (overlay_name, deviceid, interface_name, tenantid) ) logging.warning(err) return status_code, err # Check if the overlay and the tunnel mode # has already been deleted on the device devices.remove(deviceid) if deviceid not in devices: # Destroy overlay on the devices status_code = tunnel_mode.destroy_overlay( overlayid, overlay_name, overlay_type, tenantid, deviceid, tunnel_info, ignore_errors=True ) if status_code != STATUS_OK: err = ( 'Cannot destroy overlay (overlay %s, device %s ' 'tenant %s)' % (overlay_name, deviceid, tenantid) ) logging.warning(err) return status_code, err # Destroy tunnel mode on the devices counter = storage_helper.dec_and_get_tunnel_mode_counter( tunnel_name, deviceid, tenantid ) if counter == 0: # Add reverse action to the rollback stack rollback.push( func=storage_helper.get_and_inc_tunnel_mode_counter, tunnel_name=tunnel_name, deviceid=deviceid, tenantid=tenantid ) status_code = tunnel_mode.destroy_tunnel_mode( deviceid, tenantid, tunnel_info, ignore_errors=True ) if status_code != STATUS_OK: err = ( 'Cannot destroy tunnel mode (device %s, tenant %s)' % (deviceid, tenantid) ) logging.warning(err) return status_code, err # Add reverse action to the rollback stack rollback.push( func=tunnel_mode.init_tunnel_mode, deviceid=deviceid, tenantid=tenantid, overlay_info=tunnel_info ) elif counter is None: err = 'Cannot decrease tunnel mode counter' logging.error(err) return STATUS_INTERNAL_SERVER_ERROR, err else: # Success # Add reverse action to the rollback stack rollback.push( func=storage_helper.get_and_inc_tunnel_mode_counter, tunnel_name=tunnel_name, deviceid=deviceid, tenantid=tenantid ) # Destroy overlay data structure status_code = tunnel_mode.destroy_overlay_data( overlayid, overlay_name, tenantid, tunnel_info, ignore_errors=True ) if status_code != STATUS_OK: err = ( 'Cannot destroy overlay data (overlay %s, tenant %s)' % (overlay_name, tenantid) ) logging.warning(err) return status_code, err # Add reverse action to the rollback stack rollback.push( func=tunnel_mode.init_overlay_data, overlayid=overlayid, overlay_name=overlay_name, tenantid=tenantid, overlay_info=tunnel_info ) # Delete the overlay success = storage_helper.remove_overlay( overlayid, tenantid ) if success is None or success is False: err = 'Cannot remove the overlay from the controller state' logging.error(err) return STATUS_INTERNAL_SERVER_ERROR, err # Add reverse action to the rollback stack rollback.push( func=storage_helper.create_overlay, name=overlay_name, type=overlay_type, slices=slices, tenantid=tenantid, tunnel_mode=tunnel_name, transport_proto=transport_proto ) # Success, commit all performed operations rollback.commitAll() # Create the response return STATUS_OK, 'OK' """Assign an interface to a VPN""" def AssignSliceToOverlay(self, request, context): logging.info('AssignSliceToOverlay request received:\n%s' % request) with RollbackContext() as rollback: # Extract the intents from the request message for intent in request.intents: # Parameters extraction # # Extract the overlay ID from the intent overlayid = intent.overlayid # Extract tunnel info tunnel_info = intent.tunnel_info # Extract tenant ID tenantid = intent.tenantid # Validate the tenant ID logging.debug('Validating the tenant ID: %s' % tenantid) if not srv6_controller_utils.validate_tenantid(tenantid): # If tenant ID is invalid, return an error message err = 'Invalid tenant ID: %s' % tenantid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the tenant is configured is_config = storage_helper.is_tenant_configured( tenantid ) if is_config is None: err = 'Error while checking tenant configuration' logging.error(err) return TenantReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) elif is_config is False: err = ( 'Cannot update overlay for a tenant unconfigured. ' 'Tenant not found or error during the ' 'connection to the db' ) logging.warning(err) return TenantReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Get the overlay overlays = storage_helper.get_overlays( overlayids=[overlayid] ) if overlays is None: err = 'Error getting the overlay' logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) elif len(overlays) == 0: # If the overlay does not exist, return an error message err = 'The overlay %s does not exist' % overlayid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Take the first overlay overlay = overlays[0] # Check tenant ID if tenantid != overlay['tenantid']: # If the overlay does not exist, return an error message err = ( 'The overlay %s does not belong to the ' 'tenant %s' % (overlayid, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Get the overlay name overlay_name = overlay['name'] # Get the overlay type overlay_type = overlay['type'] # Get the tunnel mode tunnel_name = overlay['tunnel_mode'] tunnel_mode = self.tunnel_modes[tunnel_name] # Get the slices belonging to the overlay slices = overlay['slices'] # Get the devices on which the overlay has been configured _devices = [_slice['deviceid'] for _slice in slices] # Extract the interfaces incoming_slices = list() incoming_devices = set() for _slice in intent.slices: deviceid = _slice.deviceid interface_name = _slice.interface_name # Add the slice to the incoming slices set incoming_slices.append( { 'deviceid': deviceid, 'interface_name': interface_name } ) # Add the device to the incoming devices set # if the overlay has not been initiated on it if deviceid not in _devices: incoming_devices.add(deviceid) # Parameters validation # # Let's check if the overlay exists logging.debug('Checking the overlay: %s' % overlay_name) # Get the devices devices = storage_helper.get_devices( deviceids=list(incoming_devices) + _devices, return_dict=True ) if devices is None: err = 'Error getting devices' logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) # Devices validation for deviceid in devices: # Let's check if the router exists if deviceid not in devices: # If the device does not exist, return an error # message err = 'Device not found %s' % deviceid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the device is enabled if not devices[deviceid]['enabled']: # If the device is not enabled, return an error # message err = 'The device %s is not enabled' % deviceid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the device is connected if not devices[deviceid]['connected']: # If the device is not connected, return an error # message err = 'The device %s is not connected' % deviceid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the devices have at least a WAN interface wan_found = False for interface in devices[deviceid]['interfaces']: if interface['type'] == InterfaceType.WAN: wan_found = True if not wan_found: # No WAN interfaces found on the device err = ( 'No WAN interfaces found on the device %s' % deviceid ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Convert interfaces list to a dict representation # This step simplifies future processing interfaces = dict() for deviceid in devices: for interface in devices[deviceid]['interfaces']: interfaces[interface['name']] = interface devices[deviceid]['interfaces'] = interfaces # Iterate on the interfaces and extract the # interfaces to be assigned # to the overlay and validate them for _slice in incoming_slices: logging.debug('Validating the slice: %s' % _slice) # A slice is a tuple (deviceid, interface_name) # # Extract the device ID deviceid = _slice['deviceid'] # Extract the interface name interface_name = _slice['interface_name'] # Let's check if the interface exists if interface_name not in devices[deviceid]['interfaces']: # If the interface does not exists, return an error # message err = 'The interface does not exist' logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the interface type is LAN if devices[deviceid]['interfaces'][ interface_name ]['type'] != InterfaceType.LAN: # The interface type is not LAN err = ( 'Cannot add non-LAN interface to the overlay: %s ' '(device %s)' % (interface_name, deviceid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the slice is already assigned to an overlay _overlay = storage_helper.get_overlay_containing_slice( _slice, tenantid ) if _overlay is not None: # Slice already assigned to an overlay err = ( 'Cannot create overlay: the slice %s is ' 'already assigned to the overlay %s' % (_slice, _overlay['_id']) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check for IP addresses if overlay_type == OverlayType.IPv4Overlay: addrs = storage_helper.get_ipv4_addresses( deviceid=deviceid, tenantid=tenantid, interface_name=interface_name ) if len(addrs) == 0: # No IPv4 address assigned to the interface err = ( 'Cannot create overlay: the slice %s has no ' 'IPv4 addresses; at least one IPv4 address ' 'is required to create an IPv4 Overlay' % _slice ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) subnets = storage_helper.get_ipv4_subnets( deviceid=deviceid, tenantid=tenantid, interface_name=interface_name ) if len(subnets) == 0: # No IPv4 subnet assigned to the interface err = ( 'Cannot create overlay: the slice %s has no ' 'IPv4 subnets; at least one IPv4 subnet is ' 'required to create an IPv4 Overlay' % _slice ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) elif overlay_type == OverlayType.IPv6Overlay: addrs = storage_helper.get_ipv6_addresses( deviceid=deviceid, tenantid=tenantid, interface_name=interface_name ) if len(addrs) == 0: # No IPv6 address assigned to the interface err = ( 'Cannot create overlay: the slice %s has no ' 'IPv6 addresses; at least one IPv6 address ' 'is required to create an IPv6 Overlay' % _slice ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) subnets = storage_helper.get_ipv6_subnets( deviceid=deviceid, tenantid=tenantid, interface_name=interface_name ) if len(subnets) == 0: # No IPv6 subnet assigned to the interface err = ( 'Cannot create overlay: the slice %s has ' 'no IPv6 subnets; at least one IPv6 subnet ' 'is required to create an IPv6 Overlay' % _slice ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) for slice1 in slices + incoming_slices: # Extract the device ID deviceid_1 = slice1['deviceid'] # Extract the interface name interface_name_1 = slice1['interface_name'] for slice2 in slices + incoming_slices: if slice2 == slice1: continue # Extract the device ID deviceid_2 = slice2['deviceid'] # Extract the interface name interface_name_2 = slice2['interface_name'] if overlay_type == OverlayType.IPv4Overlay: subnets1 = storage_helper.get_ipv4_subnets( deviceid=deviceid_1, tenantid=tenantid, interface_name=interface_name_1 ) subnets2 = storage_helper.get_ipv4_subnets( deviceid=deviceid_2, tenantid=tenantid, interface_name=interface_name_2 ) for subnet1 in subnets1: subnet1 = subnet1['subnet'] for subnet2 in subnets2: subnet2 = subnet2['subnet'] if IPv4Network(subnet1).overlaps( IPv4Network(subnet2) ): err = ( 'Cannot create overlay: the ' 'slices %s and %s have ' 'overlapping subnets' % (slice1, slice2) ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) elif overlay_type == OverlayType.IPv6Overlay: subnets1 = storage_helper.get_ipv6_subnets( deviceid=deviceid_1, tenantid=tenantid, interface_name=interface_name_1 ) subnets2 = storage_helper.get_ipv6_subnets( deviceid=deviceid_2, tenantid=tenantid, interface_name=interface_name_2 ) for subnet1 in subnets1: subnet1 = subnet1['subnet'] for subnet2 in subnets2: subnet2 = subnet2['subnet'] if IPv6Network(subnet1).overlaps( IPv6Network(subnet2) ): err = ( 'Cannot create overlay: the ' 'slices %s and %s have ' 'overlapping subnets' % (slice1, slice2) ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) can_use_ipv6_addr_for_wan = True can_use_ipv4_addr_for_wan = True for _slice in slices + incoming_slices: # Get WAN interface wan_interface = storage_helper.get_wan_interfaces( deviceid=_slice['deviceid'], tenantid=tenantid )[0] # Check if WAN interface has IPv6 connectivity addrs = storage_helper.get_ext_ipv6_addresses( deviceid=_slice['deviceid'], tenantid=tenantid, interface_name=wan_interface ) if addrs is None or len(addrs) == 0: can_use_ipv6_addr_for_wan = False # Check if WAN interface has IPv4 connectivity addrs = storage_helper.get_ext_ipv4_addresses( deviceid=_slice['deviceid'], tenantid=tenantid, interface_name=wan_interface ) if addrs is None or len(addrs) == 0: can_use_ipv4_addr_for_wan = False if ( not can_use_ipv6_addr_for_wan and not can_use_ipv4_addr_for_wan ): err = ( 'Cannot establish a full-mesh between all the WAN ' 'interfaces' ) logging.error(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) if tunnel_name == 'SRv6' and not can_use_ipv6_addr_for_wan: err = ( 'IPv6 transport not available: cannot create a SRv6 ' 'overlay' ) logging.error(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # For SRv6 overlays, Segment Routing transparency must be T0 # or T1 for each device, otherwise the SRv6 full-mesh overlay # cannot be created if tunnel_name == 'SRv6': for _slice in incoming_slices: incoming_sr_transparency = ( storage_helper.get_incoming_sr_transparency( _slice['deviceid'], tenantid ) ) outgoing_sr_transparency = ( storage_helper.get_outgoing_sr_transparency( _slice['deviceid'], tenantid ) ) # is_ip6tnl_forced = storage_helper.is_ip6tnl_forced( # _slice['deviceid'], tenantid # ) # is_srh_forced = storage_helper.is_srh_forced( # _slice['deviceid'], tenantid # ) if incoming_sr_transparency == 'op': err = ( 'Device %s has incoming SR Transparency set ' 'to OP. SRv6 overlays are not supported for ' 'OP.' % deviceid ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) if outgoing_sr_transparency == 'op': err = ( 'Device %s has outgoing SR Transparency set ' 'to OP. SRv6 overlays are not supported for ' 'OP.' % deviceid ) logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_BAD_REQUEST, reason=err ) ) # if ( # incoming_sr_transparency == 't1' # and is_srh_forced # ): # err = ( # 'Device %s has incoming SR Transparency set ' # 'to T1 and force-srh set. Cannot use an SRH ' # 'for device with incoming Transparency T1.' # % deviceid # ) # logging.error(err) # return OverlayServiceReply( # status=Status( # code=STATUS_BAD_REQUEST, # reason=err # ) # ) # All the devices must belong to the same tenant for device in devices.values(): if device['tenantid'] != tenantid: err = ( 'Error while processing the intent: ' 'All the devices must belong to the ' 'same tenant %s' % tenantid ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) logging.info('All checks passed') # All checks passed # # Let's assign the interface to the overlay configured_slices = slices for site1 in incoming_slices: deviceid = site1['deviceid'] interface_name = site1['interface_name'] # Init tunnel mode on the devices counter = storage_helper.get_and_inc_tunnel_mode_counter( tunnel_name, deviceid, tenantid ) if counter == 0: # Add reverse action to the rollback stack rollback.push( func=( storage_helper.dec_and_get_tunnel_mode_counter ), tunnel_name=tunnel_name, deviceid=deviceid, tenantid=tenantid ) status_code = tunnel_mode.init_tunnel_mode( deviceid, tenantid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot initialize tunnel mode (device %s ' 'tenant %s)' % (deviceid, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=status_code, reason=err) ) elif counter is None: err = 'Cannot increase tunnel mode counter' logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) else: # Success # Add reverse action to the rollback stack rollback.push( func=( storage_helper.dec_and_get_tunnel_mode_counter ), tunnel_name=tunnel_name, deviceid=deviceid, tenantid=tenantid ) # Check if we have already configured the overlay on the # device if deviceid in incoming_devices: # Init overlay on the devices status_code = tunnel_mode.init_overlay( overlayid, overlay_name, overlay_type, tenantid, deviceid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot initialize overlay (overlay %s ' 'device %s, tenant %s)' % (overlay_name, deviceid, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=status_code, reason=err) ) # Add reverse action to the rollback stack rollback.push( func=tunnel_mode.destroy_overlay, overlayid=overlayid, overlay_name=overlay_name, overlay_type=overlay_type, tenantid=tenantid, deviceid=deviceid, overlay_info=tunnel_info ) # Remove device from the to-be-configured devices set incoming_devices.remove(deviceid) # Add the interface to the overlay status_code = tunnel_mode.add_slice_to_overlay( overlayid, overlay_name, deviceid, interface_name, tenantid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot add slice to overlay (overlay %s, ' 'device %s, slice %s, tenant %s)' % (overlay_name, deviceid, interface_name, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=status_code, reason=err) ) # Add reverse action to the rollback stack rollback.push( func=tunnel_mode.remove_slice_from_overlay, overlayid=overlayid, overlay_name=overlay_name, deviceid=deviceid, interface_name=interface_name, tenantid=tenantid, overlay_info=tunnel_info ) # Create the tunnel between all the pairs of interfaces for site2 in configured_slices: if site1['deviceid'] != site2['deviceid']: status_code = tunnel_mode.create_tunnel( overlayid, overlay_name, overlay_type, site1, site2, tenantid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot create tunnel (overlay %s ' 'site1 %s site2 %s, tenant %s)' % (overlay_name, site1, site2, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=status_code, reason=err) ) # Add reverse action to the rollback stack rollback.push( func=tunnel_mode.remove_tunnel, overlayid=overlayid, overlay_name=overlay_name, overlay_type=overlay_type, l_slice=site1, r_slice=site2, tenantid=tenantid, overlay_info=tunnel_info ) # Add the slice to the configured set configured_slices.append(site1) # Save the overlay to the state success = storage_helper.add_many_slices_to_overlay( overlayid, tenantid, incoming_slices ) if success is None or success is False: err = 'Cannot update overlay in controller state' logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) # Add reverse action to the rollback stack rollback.push( func=storage_helper.remove_many_slices_from_overlay, overlayid=overlayid, tenantid=tenantid, slices=incoming_slices ) # Success, commit all performed operations rollback.commitAll() logging.info('All the intents have been processed successfully\n\n') # Create the response return OverlayServiceReply( status=Status(code=STATUS_OK, reason='OK') ) """Remove an interface from a VPN""" def RemoveSliceFromOverlay(self, request, context): logging.info('RemoveSliceFromOverlay request received:\n%s' % request) with RollbackContext() as rollback: # Extract the intents from the request message for intent in request.intents: # Parameters extraction # # Extract the overlay ID from the intent overlayid = intent.overlayid # Extract tunnel info tunnel_info = intent.tunnel_info # Extract tenant ID tenantid = intent.tenantid # Validate the tenant ID logging.debug('Validating the tenant ID: %s' % tenantid) if not srv6_controller_utils.validate_tenantid(tenantid): # If tenant ID is invalid, return an error message err = 'Invalid tenant ID: %s' % tenantid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the tenant is configured is_config = storage_helper.is_tenant_configured( tenantid ) if is_config is None: err = 'Error while checking tenant configuration' logging.error(err) return TenantReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) elif is_config is False: err = ( 'Cannot update overlay for a tenant unconfigured' 'Tenant not found or error during the ' 'connection to the db' ) logging.warning(err) return TenantReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Let's check if the overlay exists logging.debug('Checking the overlay: %s' % overlayid) overlays = storage_helper.get_overlays( overlayids=[overlayid] ) if overlays is None: err = 'Error getting the overlay' logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) elif len(overlays) == 0: # If the overlay does not exist, return an error message err = 'The overlay %s does not exist' % overlayid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Take the first overlay overlay = overlays[0] # Check tenant ID if tenantid != overlay['tenantid']: # If the overlay does not exist, return an error message err = ( 'The overlay %s does not belong to the ' 'tenant %s' % (overlayid, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Get the overlay name overlay_name = overlay['name'] # Get the overlay type overlay_type = overlay['type'] # Get the tunnel mode tunnel_name = overlay['tunnel_mode'] tunnel_mode = self.tunnel_modes[tunnel_name] # Get the slices belonging to the overlay slices = overlay['slices'] # Extract the interfaces incoming_slices = list() incoming_devices = set() for _slice in intent.slices: deviceid = _slice.deviceid interface_name = _slice.interface_name # Add the slice to the incoming slices set incoming_slices.append( { 'deviceid': deviceid, 'interface_name': interface_name } ) # Add the device to the incoming devices set # if the overlay has not been initiated on it if deviceid not in incoming_devices: incoming_devices.add(deviceid) # Get the devices devices = storage_helper.get_devices( deviceids=incoming_devices, return_dict=True ) if devices is None: err = 'Error getting devices' logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) # Convert interfaces list to a dict representation # This step simplifies future processing interfaces = dict() for deviceid in devices: for interface in devices[deviceid]['interfaces']: interfaces[interface['name']] = interface devices[deviceid]['interfaces'] = interfaces # Parameters validation # # Iterate on the interfaces # and extract the interfaces to be removed from the VPN for _slice in incoming_slices: logging.debug('Validating the slice: %s' % _slice) # A slice is a tuple (deviceid, interface_name) # # Extract the device ID deviceid = _slice['deviceid'] # Extract the interface name interface_name = _slice['interface_name'] # Let's check if the router exists if deviceid not in devices: # If the device does not exist, return an error # message err = 'Device not found %s' % deviceid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the device is connected if not devices[deviceid]['connected']: # If the device is not connected, return an error # message err = 'The device %s is not connected' % deviceid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Check if the device is enabled if not devices[deviceid]['enabled']: # If the device is not enabled, return an error message err = 'The device %s is not enabled' % deviceid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Let's check if the interface exists if interface_name not in devices[deviceid]['interfaces']: # If the interface does not exists, return an error # message err = 'The interface does not exist' logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Let's check if the interface is assigned to the given # overlay if _slice not in overlay['slices']: # The interface is not assigned to the overlay, # return an error message err = ( 'The interface is not assigned to the overlay %s, ' '(name %s, tenantid %s)' % (overlayid, overlay_name, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # All the devices must belong to the same tenant for device in devices.values(): if device['tenantid'] != tenantid: err = ( 'Error while processing the intent: ' 'All the devices must belong to the ' 'same tenant %s' % tenantid ) logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) logging.debug('All checks passed') # All checks passed # # Let's remove the interface from the VPN _devices = [slice['deviceid'] for slice in overlay['slices']] configured_slices = slices.copy() for site1 in incoming_slices: deviceid = site1['deviceid'] interface_name = site1['interface_name'] # Remove the tunnel between all the pairs of interfaces for site2 in configured_slices: if site1['deviceid'] != site2['deviceid']: status_code = tunnel_mode.remove_tunnel( overlayid, overlay_name, overlay_type, site1, site2, tenantid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot create tunnel (overlay %s ' 'site1 %s site2 %s, tenant %s)' % (overlay_name, site1, site2, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=status_code, reason=err) ) # Add reverse action to the rollback stack rollback.push( func=tunnel_mode.create_tunnel, overlayid=overlayid, overlay_name=overlay_name, overlay_type=overlay_type, l_slice=site1, r_slice=site2, tenantid=tenantid, overlay_info=tunnel_info ) # Mark the site1 as unconfigured configured_slices.remove(site1) # Remove the interface from the overlay status_code = tunnel_mode.remove_slice_from_overlay( overlayid, overlay_name, deviceid, interface_name, tenantid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot remove slice from overlay (overlay %s, ' 'device %s, slice %s, tenant %s)' % (overlay_name, deviceid, interface_name, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=status_code, reason=err) ) # Add reverse action to the rollback stack rollback.push( func=tunnel_mode.add_slice_to_overlay, overlayid=overlayid, overlay_name=overlay_name, deviceid=deviceid, interface_name=interface_name, tenantid=tenantid, overlay_info=tunnel_info ) # Check if the overlay and the tunnel mode # has already been deleted on the device _devices.remove(deviceid) if deviceid not in _devices: # Destroy overlay on the devices status_code = tunnel_mode.destroy_overlay( overlayid, overlay_name, overlay_type, tenantid, deviceid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot destroy overlay ' '(overlay %s, device %s tenant %s)' % (overlay_name, deviceid, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=status_code, reason=err) ) # Add reverse action to the rollback stack rollback.push( func=tunnel_mode.init_overlay, overlayid=overlayid, overlay_name=overlay_name, overlay_type=overlay_type, tenantid=tenantid, deviceid=deviceid, overlay_info=tunnel_info ) # Destroy tunnel mode on the devices counter = storage_helper.dec_and_get_tunnel_mode_counter( tunnel_name, deviceid, tenantid ) if counter == 0: # Add reverse action to the rollback stack rollback.push( func=( storage_helper.get_and_inc_tunnel_mode_counter ), tunnel_name=tunnel_name, deviceid=deviceid, tenantid=tenantid ) status_code = tunnel_mode.destroy_tunnel_mode( deviceid, tenantid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot destroy tunnel mode (device %s ' 'tenant %s)' % (deviceid, tenantid) ) logging.warning(err) return OverlayServiceReply( status=Status(code=status_code, reason=err) ) elif counter is None: err = 'Cannot decrease tunnel mode counter' logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err ) ) else: # Success # Add reverse action to the rollback stack rollback.push( func=( storage_helper.get_and_inc_tunnel_mode_counter ), tunnel_name=tunnel_name, deviceid=deviceid, tenantid=tenantid ) # Save the overlay to the state success = storage_helper.remove_many_slices_from_overlay( overlayid, tenantid, incoming_slices ) if success is None or success is False: err = 'Cannot update overlay in controller state' logging.error(err) return OverlayServiceReply( status=Status( code=STATUS_INTERNAL_SERVER_ERROR, reason=err) ) # Add reverse action to the rollback stack rollback.push( func=storage_helper.add_many_slices_to_overlay, overlayid=overlayid, tenantid=tenantid, slices=incoming_slices ) # Success, commit all performed operations rollback.commitAll() logging.info('All the intents have been processed successfully\n\n') # Create the response return OverlayServiceReply( status=Status(code=STATUS_OK, reason='OK') ) # Get VPNs from the controller inventory def GetOverlays(self, request, context): logging.debug('GetOverlays request received') # Extract the overlay IDs from the request overlayids = list(request.overlayids) overlayids = overlayids if len(overlayids) > 0 else None # Extract the tenant ID tenantid = request.tenantid tenantid = tenantid if tenantid != '' else None # Parameters validation # # Validate the overlay IDs if overlayids is not None: for overlayid in overlayids: logging.debug('Validating the overlay ID: %s' % overlayid) if not srv6_controller_utils.validate_overlayid(overlayid): # If overlay ID is invalid, return an error message err = 'Invalid overlay ID: %s' % overlayid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Validate the tenant ID if tenantid is not None: logging.debug('Validating the tenant ID: %s' % tenantid) if not srv6_controller_utils.validate_tenantid(tenantid): # If tenant ID is invalid, return an error message err = 'Invalid tenant ID: %s' % tenantid logging.warning(err) return OverlayServiceReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Create the response response = OverlayServiceReply() # Build the overlays list overlays = storage_helper.get_overlays( overlayids=overlayids, tenantid=tenantid ) if overlays is None: err = 'Error getting overlays' logging.error(err) return OverlayServiceReply( status=Status(code=STATUS_INTERNAL_SERVER_ERROR, reason=err) ) for _overlay in overlays: # Add a new overlay to the overlays list overlay = response.overlays.add() # Set overlay ID overlay.overlayid = str(_overlay['_id']) # Set overlay name overlay.overlay_name = _overlay['name'] # Set overlaty type overlay.overlay_type = _overlay['type'] # Set tenant ID overlay.tenantid = _overlay['tenantid'] # Set tunnel mode overlay.tunnel_mode = _overlay['tunnel_mode'] # Set slices # Iterate on all slices for _slice in _overlay['slices']: # Add a new slice to the overlay __slice = overlay.slices.add() # Add device ID __slice.deviceid = _slice['deviceid'] # Add interface name __slice.interface_name = _slice['interface_name'] # Return the overlays list logging.debug('Sending response:\n%s' % response) response.status.code = STATUS_OK response.status.reason = 'OK' return response # Get SID lists available between two edge devices def GetSIDLists(self, request, context): logging.debug('GetSIDLists request received') # Extract the ingress and egress device IDs from the request ingress_deviceid = request.ingress_deviceid ingress_deviceid = ingress_deviceid if ingress_deviceid != '' else None egress_deviceid = request.egress_deviceid egress_deviceid = egress_deviceid if egress_deviceid != '' else None # Extract the tenant ID tenantid = request.tenantid tenantid = tenantid if tenantid != '' else None # Parameters validation # # Validate the device IDs if ingress_deviceid is None: err = 'Missing manadtory ingress_deviceid argument' logging.error(err) return GetSIDListsReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) if egress_deviceid is None: err = 'Missing manadtory egress_deviceid argument' logging.error(err) return GetSIDListsReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Validate the tenant ID if tenantid is not None: logging.debug('Validating the tenant ID: %s' % tenantid) if not srv6_controller_utils.validate_tenantid(tenantid): # If tenant ID is invalid, return an error message err = 'Invalid tenant ID: %s' % tenantid logging.warning(err) return GetSIDListsReply( status=Status(code=STATUS_BAD_REQUEST, reason=err) ) # Create the response response = GetSIDListsReply() # Get the SID list (in both the directions) between the two devices # for each overlay status, err, sid_lists = self.tunnel_modes['SRv6'].get_sid_lists( ingress_deviceid=ingress_deviceid, egress_deviceid=egress_deviceid, tenantid=tenantid ) if status != NbStatusCode.STATUS_OK: logging.error(err) return GetSIDListsReply( status=Status(code=STATUS_INTERNAL_SERVER_ERROR, reason=err) ) for _sid_list in sid_lists: # Retrieve the SID list sid_list = response.sid_lists.add() sid_list.overlayid = _sid_list['overlayid'] sid_list.overlay_name = _sid_list['overlay_name'] sid_list.tenantid = _sid_list['tenantid'] sid_list.direct_sid_list.extend(_sid_list['direct_sid_list']) sid_list.return_sid_list.extend(_sid_list['return_sid_list']) # Return the overlays list logging.debug('Sending response:\n%s' % response) response.status.code = STATUS_OK response.status.reason = 'OK' return response def prepare_db_for_device_reconciliation(self, deviceid, tenantid): # self.stamp_controller.storage.set_sender_inizialized( # node_id=deviceid, tenantid=tenantid, is_initialized=False) storage_helper.reset_overlay_stats( deviceid=deviceid, tenantid=tenantid ) for tunnel_name in self.tunnel_modes: storage_helper.reset_tunnel_mode_counter( tunnel_name=tunnel_name, deviceid=deviceid, tenantid=tenantid ) storage_helper.reset_created_tunnels( deviceid=deviceid, tenantid=tenantid ) if self.stamp_controller is not None: if self.stamp_controller.storage.get_stamp_node( node_id=deviceid, tenantid=tenantid ) is not None: self.stamp_controller.storage.set_sender_inizialized( node_id=deviceid, tenantid=tenantid, is_initialized=False ) self.stamp_controller.storage.set_reflector_inizialized( node_id=deviceid, tenantid=tenantid, is_initialized=False ) return STATUS_OK def device_reconciliation(self, deviceid, tenantid): logging.debug('Device Reconcliation started') err = STATUS_OK # Get the device device = storage_helper.get_device( deviceid=deviceid, tenantid=tenantid ) if device is None: logging.error('Error getting device') return status_codes_pb2.STATUS_INTERNAL_ERROR if not device['configured']: logging.warning('Device not yet configured. Nothing to reconcile') return err default_interfaces = dict() for interface in device['default']['interfaces']: default_interfaces[interface['name']] = dict() default_interfaces[ interface['name'] ]['ipv4_addrs'] = interface['ipv4_addrs'] default_interfaces[ interface['name'] ]['ipv6_addrs'] = interface['ipv6_addrs'] for interface in device['interfaces']: if interface['type'] == InterfaceType.WAN or \ interface['type'] == InterfaceType.UNKNOWN: logging.warning( 'Cannot set IP address of WAN interface. Skipping' ) continue if len(interface['ipv4_addrs']) > 0: addrs = list() for addr in default_interfaces[ interface['name'] ]['ipv4_addrs']: addrs.append(addr) response = self.srv6_manager.remove_many_ipaddr( device['mgmtip'], self.grpc_client_port, addrs=addrs, device=interface['name'], family=AF_UNSPEC ) if response != SbStatusCode.STATUS_SUCCESS: # If the operation has failed, # report an error message logging.warning( 'Cannot remove the public addresses ' 'from the interface' ) err = status_codes_pb2.STATUS_INTERNAL_ERROR # Add IP address to the interface for ipv4_addr in interface['ipv4_addrs']: response = self.srv6_manager.create_ipaddr( device['mgmtip'], self.grpc_client_port, ip_addr=ipv4_addr, device=interface['name'], family=AF_INET, ignore_errors=True ) if response == SbStatusCode.STATUS_FILE_EXISTS: logging.warning( 'The IPv4 address already exists. Skipping' ) elif response != SbStatusCode.STATUS_SUCCESS: # If the operation has failed, # report an error message logging.warning( 'Cannot assign the private VPN IP address ' 'to the interface' ) err = status_codes_pb2.STATUS_INTERNAL_ERROR if len(interface['ipv6_addrs']) > 0: addrs = list() nets = list() for addr in default_interfaces[ interface['name'] ]['ipv6_addrs']: addrs.append(addr) nets.append(str(IPv6Interface(addr).network)) response = self.srv6_manager.remove_many_ipaddr( device['mgmtip'], self.grpc_client_port, addrs=addrs, nets=nets, device=interface['name'], family=AF_UNSPEC ) if response != SbStatusCode.STATUS_SUCCESS: # If the operation has failed, # report an error message logging.warning( 'Cannot remove the public addresses ' 'from the interface' ) err = status_codes_pb2.STATUS_INTERNAL_ERROR # Add IP address to the interface for ipv6_addr in interface['ipv6_addrs']: net = IPv6Interface(ipv6_addr).network.__str__() response = self.srv6_manager.create_ipaddr( device['mgmtip'], self.grpc_client_port, ip_addr=ipv6_addr, device=interface['name'], net=net, family=AF_INET6, ignore_errors=True ) if response == SbStatusCode.STATUS_FILE_EXISTS: logging.warning( 'The IPv4 address already exists. Skipping' ) elif response != SbStatusCode.STATUS_SUCCESS: # If the operation has failed, # report an error message logging.warning( 'Cannot assign the private VPN IP address ' 'to the interface' ) err = status_codes_pb2.STATUS_INTERNAL_ERROR # Push the new configuration if err == STATUS_OK: logging.debug( 'The device %s has been configured successfully' % deviceid ) else: err = 'The device %s rejected the configuration' % deviceid logging.error(err) return STATUS_BAD_REQUEST logging.info('The device configuration has been saved\n\n') # Setup STAMP information if ENABLE_STAMP_SUPPORT: logging.info('Configuring STAMP information\n\n') # Lookup the WAN interfaces # TODO currently we only support a single WAN interface, # so we look for the address of the first WAN interface # In the future we should support multiple interfaces wan_ip = None wan_ifaces = None for interface in device['interfaces']: if interface['type'] == InterfaceType.WAN and \ len(interface['ipv6_addrs']) > 0: wan_ip = interface['ipv6_addrs'][0].split('/')[0] wan_ifaces = [interface['name']] break # Configure information if self.stamp_controller.storage.get_stamp_node( node_id=device['deviceid'], tenantid=tenantid ) is None: self.stamp_controller.add_stamp_node( node_id=device['deviceid'], node_name=device['name'], grpc_ip=device['mgmtip'], grpc_port=self.grpc_client_port, ip=wan_ip, sender_port=42069, reflector_port=862, interfaces=wan_ifaces, stamp_source_ipv6_address=wan_ip, is_sender=True, is_reflector=True, initialize=False, tenantid=tenantid ) # Configure information self.stamp_controller.init_stamp_node( node_id=device['deviceid'], tenantid=tenantid ) stamp_sessions = self.stamp_controller.storage.get_stamp_sessions( tenantid=tenantid ) for session in stamp_sessions: if session.sender.node_id == deviceid: self.stamp_controller.storage.set_session_running( ssid=session.ssid, tenantid=tenantid, is_running=False ) self.stamp_controller._create_stamp_sender_session( ssid=session.ssid, sender=session.sender, reflector=session.reflector, sidlist=session.sidlist, interval=session.interval, auth_mode=session.auth_mode, key_chain=session.sender_key_chain, timestamp_format=session.sender_timestamp_format, packet_loss_type=session.packet_loss_type, delay_measurement_mode=session.delay_measurement_mode ) if session.reflector.node_id == deviceid: self.stamp_controller.storage.set_session_running( ssid=session.ssid, tenantid=tenantid, is_running=False ) self.stamp_controller._create_stamp_reflector_session( ssid=session.ssid, sender=session.sender, reflector=session.reflector, return_sidlist=session.return_sidlist, auth_mode=session.auth_mode, key_chain=session.reflector_key_chain, timestamp_format=session.reflector_timestamp_format, session_reflector_mode=session.session_reflector_mode ) logging.debug('Device Reconcliation completed') # Create the response return STATUS_OK def overlay_reconciliation(self, deviceid, tenantid): logging.info( 'Overlay Reconcliation started: deviceid %s, tenantid %s', deviceid, tenantid ) overlays = storage_helper.get_overlays_containing_device( deviceid=deviceid, tenantid=tenantid ) for overlay in overlays: overlayid = str(overlay['_id']) overlay_name = overlay['name'] tenantid = overlay['tenantid'] overlay_type = overlay['type'] tunnel_name = overlay['tunnel_mode'] slices = overlay['slices'] tunnel_info = None # Get tunnel mode tunnel_mode = self.tunnel_modes[tunnel_name] # Let's create the overlay # Create overlay data structure status_code = tunnel_mode.init_overlay_data_reconciliation( overlayid=overlayid, overlay_name=overlay_name, tenantid=tenantid, overlay_info=tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot initialize overlay data (overlay %s, tenant %s)' % (overlay_name, tenantid) ) logging.warning(err) return # Iterate on slices and add to the overlay configured_slices = list() for site1 in slices: _deviceid = site1['deviceid'] interface_name = site1['interface_name'] # Init tunnel mode on the if deviceid == _deviceid: counter = storage_helper.get_and_inc_tunnel_mode_counter( tunnel_name, deviceid, tenantid ) if counter == 0: status_code = ( tunnel_mode.init_tunnel_mode_reconciliation( deviceid, tenantid, tunnel_info ) ) if status_code != STATUS_OK: err = ( 'Cannot initialize tunnel mode (device %s ' 'tenant %s)' % (deviceid, tenantid) ) logging.warning(err) return elif counter is None: err = 'Cannot increase tunnel mode counter' logging.error(err) return # Init overlay on the devices if deviceid == _deviceid: status_code = tunnel_mode.init_overlay_reconciliation( overlayid, overlay_name, overlay_type, tenantid, deviceid, tunnel_info ) if status_code != STATUS_OK: err = ( 'Cannot initialize overlay (overlay %s ' 'device %s, tenant %s)' % (overlay_name, deviceid, tenantid) ) logging.warning(err) return # Add the interface to the overlay if deviceid == _deviceid: status_code = ( tunnel_mode.add_slice_to_overlay_reconciliation( overlayid, overlay_name, deviceid, interface_name, tenantid, tunnel_info ) ) if status_code != STATUS_OK: err = ( 'Cannot add slice to overlay (overlay %s, ' 'device %s, slice %s, tenant %s)' % (overlay_name, deviceid, interface_name, tenantid) ) logging.warning(err) return # Create the tunnel between all the pairs of interfaces for site2 in configured_slices: if site1['deviceid'] != site2['deviceid']: if site1['deviceid'] == deviceid: status_code = ( tunnel_mode.create_tunnel_reconciliation_l( overlayid, overlay_name, overlay_type, site1, site2, tenantid, tunnel_info ) ) if status_code != STATUS_OK: err = ( 'Cannot create tunnel (overlay %s ' 'site1 %s site2 %s, tenant %s)' % (overlay_name, site1, site2, tenantid) ) logging.warning(err) return status_code = ( tunnel_mode.create_tunnel_reconciliation_r( overlayid, overlay_name, overlay_type, site2, site1, tenantid, tunnel_info ) ) if status_code != STATUS_OK: err = ( 'Cannot create tunnel (overlay %s ' 'site1 %s site2 %s, tenant %s)' % (overlay_name, site1, site2, tenantid) ) logging.warning(err) return if site2['deviceid'] == deviceid: status_code = ( tunnel_mode.create_tunnel_reconciliation_l( overlayid, overlay_name, overlay_type, site2, site1, tenantid, tunnel_info ) ) if status_code != STATUS_OK: err = ( 'Cannot create tunnel (overlay %s ' 'site1 %s site2 %s, tenant %s)' % (overlay_name, site1, site2, tenantid) ) logging.warning(err) return status_code = ( tunnel_mode.create_tunnel_reconciliation_r( overlayid, overlay_name, overlay_type, site1, site2, tenantid, tunnel_info ) ) if status_code != STATUS_OK: err = ( 'Cannot create tunnel (overlay %s ' 'site1 %s site2 %s, tenant %s)' % (overlay_name, site1, site2, tenantid) ) logging.warning(err) return # Add the slice to the configured set configured_slices.append(site1) logging.info( 'Reconciliation of overlays completed successfully\n\n' ) logging.debug( 'Overlay Reconcliation completed: deviceid %s, tenantid %s', deviceid, tenantid ) # Create the response return STATUS_OK def create_server(grpc_server_ip=DEFAULT_GRPC_SERVER_IP, grpc_server_port=DEFAULT_GRPC_SERVER_PORT, grpc_client_port=DEFAULT_GRPC_CLIENT_PORT, nb_secure=DEFAULT_SECURE, server_key=DEFAULT_KEY, server_certificate=DEFAULT_CERTIFICATE, sb_secure=DEFAULT_SECURE, client_certificate=DEFAULT_CERTIFICATE, southbound_interface=DEFAULT_SB_INTERFACE, topo_graph=None, vpn_dict=None, devices=None, vpn_file=DEFAULT_VPN_DUMP, controller_state=None, verbose=DEFAULT_VERBOSE): # Initialize controller state # controller_state = srv6_controller_utils.ControllerState( # topology=topo_graph, # devices=devices, # vpn_dict=vpn_dict, # vpn_file=vpn_file # ) # Create SRv6 Manager srv6_manager = sb_grpc_client.SRv6Manager( secure=sb_secure, certificate=client_certificate ) # Setup gRPC server # # Create the server and add the handler grpc_server = grpc.server(futures.ThreadPoolExecutor()) # Add the STAMP controller stamp_controller = None if ENABLE_STAMP_SUPPORT: mongodb_client = storage_helper.get_mongodb_session() stamp_controller = stamp_controller_module.run_grpc_server( server=grpc_server, storage='mongodb', mongodb_client=mongodb_client ) # Initialize the Northbound Interface service = NorthboundInterface( grpc_client_port, srv6_manager, southbound_interface, verbose, stamp_controller ) srv6_vpn_pb2_grpc.add_NorthboundInterfaceServicer_to_server( service, grpc_server ) # If secure mode is enabled, we need to create a secure endpoint if nb_secure: # Read key and certificate with open(server_key, 'rb') as f: key = f.read() with open(server_certificate, 'rb') as f: certificate = f.read() # Create server SSL credentials grpc_server_credentials = grpc.ssl_server_credentials( ((key, certificate,),) ) # Create a secure endpoint grpc_server.add_secure_port( '[%s]:%s' % (grpc_server_ip, grpc_server_port), grpc_server_credentials ) else: # Create an insecure endpoint grpc_server.add_insecure_port( '[%s]:%s' % (grpc_server_ip, grpc_server_port) ) return grpc_server, service # Start gRPC server def start_server(grpc_server_ip=DEFAULT_GRPC_SERVER_IP, grpc_server_port=DEFAULT_GRPC_SERVER_PORT, grpc_client_port=DEFAULT_GRPC_CLIENT_PORT, nb_secure=DEFAULT_SECURE, server_key=DEFAULT_KEY, server_certificate=DEFAULT_CERTIFICATE, sb_secure=DEFAULT_SECURE, client_certificate=DEFAULT_CERTIFICATE, southbound_interface=DEFAULT_SB_INTERFACE, topo_graph=None, vpn_dict=None, devices=None, vpn_file=DEFAULT_VPN_DUMP, controller_state=None, verbose=DEFAULT_VERBOSE): # Create the gRPC server grpc_server, _ = create_server( grpc_server_ip=grpc_server_ip, grpc_server_port=grpc_server_port, grpc_client_port=grpc_client_port, nb_secure=nb_secure, server_key=server_key, server_certificate=server_certificate, sb_secure=sb_secure, client_certificate=client_certificate, southbound_interface=southbound_interface, topo_graph=topo_graph, vpn_dict=vpn_dict, devices=devices, vpn_file=vpn_file, controller_state=controller_state, verbose=verbose ) # Start the loop for gRPC logging.info('Listening gRPC') grpc_server.start() while True: time.sleep(5) # Parse arguments def parse_arguments(): # Get parser parser = ArgumentParser( description='gRPC-based Northbound APIs for SRv6 Controller' ) # Debug logs parser.add_argument( '-d', '--debug', action='store_true', help='Activate debug logs' ) # gRPC secure mode parser.add_argument( '-s', '--secure', action='store_true', default=DEFAULT_SECURE, help='Activate secure mode' ) # Verbose mode parser.add_argument( '-v', '--verbose', action='store_true', dest='verbose', default=DEFAULT_VERBOSE, help='Enable verbose mode' ) # Path of intput topology file parser.add_argument( '-t', '--topo-file', dest='topo_file', action='store', required=True, default=DEFAULT_TOPOLOGY_FILE, help='Filename of the exported topology' ) # Path of output VPN file parser.add_argument( '-f', '--vpn-file', dest='vpn_dump', action='store', default=None, help='Filename of the VPN dump' ) # Server certificate file parser.add_argument( '-c', '--certificate', store='certificate', action='store', default=DEFAULT_CERTIFICATE, help='Server certificate file' ) # Server key parser.add_argument( '-k', '--key', store='key', action='store', default=DEFAULT_KEY, help='Server key file' ) # IP address of the gRPC server parser.add_argument( '-i', '--ip', store='grpc_server_ip', action='store', default=DEFAULT_GRPC_SERVER_IP, help='IP address of the gRPC server' ) # Port of the gRPC server parser.add_argument( '-p', '--server-port', store='grpc_server_port', action='store', default=DEFAULT_GRPC_SERVER_PORT, help='Port of the gRPC server' ) # Port of the gRPC client parser.add_argument( '-o', '--client-port', store='grpc_client_port', action='store', default=DEFAULT_GRPC_CLIENT_PORT, help='Port of the gRPC client' ) # Southbound interface parser.add_argument( '-b', '--southbound', action='store', dest='southbound_interface', default=DEFAULT_SB_INTERFACE, help='Southbound interface\nSupported interfaces: [grpc]' ) # Parse input parameters args = parser.parse_args() # Done, return return args if __name__ == '__main__': # Parse options args = parse_arguments() # Setup properly the logger if args.debug: logging.basicConfig(level=logging.DEBUG) logging.getLogger().setLevel(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) logging.getLogger().setLevel(level=logging.INFO) # Debug settings SERVER_DEBUG = logging.getEffectiveLevel() == logging.DEBUG logging.info('SERVER_DEBUG:' + str(SERVER_DEBUG)) # Input topology file topo_file = args.topo_file # Output VPN file vpn_dump = args.vpn_dump # Setup properly the secure mode if args.secure: secure = True else: secure = False # Server certificate file certificate = args.certificate # Server key key = args.key # IP of the gRPC server grpc_server_ip = args.grpc_server_ip # Port of the gRPC server grpc_server_port = args.grpc_server_port # Port of the gRPC client grpc_client_port = args.grpc_client_port # Southbound interface southbound_interface = args.southbound_interface # Setup properly the verbose mode if args.verbose: verbose = True else: verbose = False # Check southbound interface if southbound_interface not in SUPPORTED_SB_INTERFACES: # The southbound interface is invalid or not supported logging.warning( 'Error: The %s interface is invalid or not yet supported\n' 'Supported southbound interfaces: %s' % SUPPORTED_SB_INTERFACES ) sys.exit(-2) # Wait until topology json file is ready while True: if os.path.isfile(topo_file): # The file is ready, we are ready to start server break # The file is not ready, wait for INTERVAL_CHECK_FILES seconds before # retrying print('Waiting for TOPOLOGY_FILE...') time.sleep(INTERVAL_CHECK_FILES) # Update the topology topo_graph = srv6_controller_utils.load_topology_from_json_dump(topo_file) if topo_graph is not None: # Start server start_server( grpc_server_ip, grpc_server_port, grpc_client_port, secure, key, certificate, southbound_interface, topo_graph, None, vpn_dump, verbose ) while True: time.sleep(5) else: print('Invalid topology')
46.481724
79
0.450853
15,933
198,384
5.442101
0.039352
0.026295
0.032845
0.037043
0.813284
0.777497
0.756196
0.743683
0.72365
0.711725
0
0.006156
0.494818
198,384
4,267
80
46.492618
0.85903
0.107922
0
0.652574
0
0
0.08827
0.00025
0
0
0
0.000234
0
1
0.00719
false
0.00115
0.008053
0
0.071901
0.000863
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
a549d2d71bba2a32ff4ef9dcc21e4dc85b60598a
83
py
Python
src/commands/__init__.py
mp-pinheiro/fairfruitbot-telegram
7f888b3c372231bf4c3aa2d61f14a5bb054c5c8e
[ "Apache-2.0" ]
null
null
null
src/commands/__init__.py
mp-pinheiro/fairfruitbot-telegram
7f888b3c372231bf4c3aa2d61f14a5bb054c5c8e
[ "Apache-2.0" ]
null
null
null
src/commands/__init__.py
mp-pinheiro/fairfruitbot-telegram
7f888b3c372231bf4c3aa2d61f14a5bb054c5c8e
[ "Apache-2.0" ]
null
null
null
from .command import * from .sign import * from .tarot import * from .news import *
20.75
22
0.722892
12
83
5
0.5
0.5
0
0
0
0
0
0
0
0
0
0
0.180723
83
4
23
20.75
0.882353
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
a550d94e7e1e9dddb668dda6181c5f6659617b4e
50
py
Python
tests/exe/bfile.py
gunnchadwick/backtrace-python
f2a735857fd76de1e568301856a9292059689878
[ "MIT" ]
3
2017-01-05T02:25:19.000Z
2017-08-01T14:48:08.000Z
tests/exe/bfile.py
gunnchadwick/backtrace-python
f2a735857fd76de1e568301856a9292059689878
[ "MIT" ]
10
2016-11-30T19:54:05.000Z
2022-01-31T16:10:07.000Z
tests/exe/bfile.py
gunnchadwick/backtrace-python
f2a735857fd76de1e568301856a9292059689878
[ "MIT" ]
1
2021-01-03T08:52:40.000Z
2021-01-03T08:52:40.000Z
import json def bar(s): return json.loads(s)
10
24
0.66
9
50
3.666667
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.22
50
4
25
12.5
0.846154
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
a58a10a2b149c1660b38bb5c2062d8f35b1d456e
19
py
Python
GAIA/__init__.py
igemsoftware2017/AiGEM_TeamHeidelberg2017
fa6f1e917bff27b358ec816bbe383db542babc14
[ "MIT" ]
16
2017-11-01T20:54:30.000Z
2021-02-24T05:02:50.000Z
GAIA/__init__.py
LArnoldt/AiGEM_TeamHeidelberg2017
fa6f1e917bff27b358ec816bbe383db542babc14
[ "MIT" ]
1
2018-03-02T16:23:15.000Z
2018-04-12T16:07:28.000Z
GAIA/__init__.py
LArnoldt/AiGEM_TeamHeidelberg2017
fa6f1e917bff27b358ec816bbe383db542babc14
[ "MIT" ]
19
2017-12-23T10:44:36.000Z
2022-03-11T13:12:52.000Z
from GAIA import *
9.5
18
0.736842
3
19
4.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.210526
19
1
19
19
0.933333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
3c34cf7ebe966d485a2dab16afafbd85bd1988ae
116
py
Python
tests/conftest.py
ToshikiShimizu/recommend
07e700c634b914d069ded12a9f1836f30f8bf48c
[ "Apache-2.0" ]
null
null
null
tests/conftest.py
ToshikiShimizu/recommend
07e700c634b914d069ded12a9f1836f30f8bf48c
[ "Apache-2.0" ]
3
2021-09-21T12:57:18.000Z
2021-11-04T00:04:02.000Z
tests/conftest.py
ToshikiShimizu/recommend
07e700c634b914d069ded12a9f1836f30f8bf48c
[ "Apache-2.0" ]
null
null
null
import os import sys sys.path.append(os.path.abspath(os.path.dirname( os.path.abspath(__file__)) + "/../src/"))
23.2
48
0.689655
18
116
4.222222
0.5
0.236842
0.342105
0
0
0
0
0
0
0
0
0
0.103448
116
4
49
29
0.730769
0
0
0
0
0
0.068966
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
b1e68844c186044d79f5f65d0a8baf532a871d89
64
py
Python
organizationdashxblock/__init__.py
jaygoswami2303/organizationdashxblock
f7832ff29ec94286997ef32cc1c0e4003d3d169a
[ "MIT" ]
null
null
null
organizationdashxblock/__init__.py
jaygoswami2303/organizationdashxblock
f7832ff29ec94286997ef32cc1c0e4003d3d169a
[ "MIT" ]
null
null
null
organizationdashxblock/__init__.py
jaygoswami2303/organizationdashxblock
f7832ff29ec94286997ef32cc1c0e4003d3d169a
[ "MIT" ]
null
null
null
from .organizationdashxblock import OrganizationDashboardXBlock
32
63
0.921875
4
64
14.75
1
0
0
0
0
0
0
0
0
0
0
0
0.0625
64
1
64
64
0.983333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b1f647eed5324f6dd31844d07983c75da51a2635
48
py
Python
main.py
amaya382/vscode-remote-docker-over-ssh
82bdbad9c1d3b2c54524f51bb0bdae4de5a7e571
[ "Apache-2.0" ]
null
null
null
main.py
amaya382/vscode-remote-docker-over-ssh
82bdbad9c1d3b2c54524f51bb0bdae4de5a7e571
[ "Apache-2.0" ]
null
null
null
main.py
amaya382/vscode-remote-docker-over-ssh
82bdbad9c1d3b2c54524f51bb0bdae4de5a7e571
[ "Apache-2.0" ]
null
null
null
import os print(f"running on {os.uname()[1]}")
12
36
0.645833
9
48
3.444444
0.888889
0
0
0
0
0
0
0
0
0
0
0.02381
0.125
48
3
37
16
0.714286
0
0
0
0
0
0.541667
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
6
591e3346f0b2ef3b42383d04b811d30de6679a87
7,176
py
Python
common/cnn_models.py
tlkh/mini-dlperf
7d40bbad98ac68d4c496fe4b69d0de80ef0fffc6
[ "MIT" ]
null
null
null
common/cnn_models.py
tlkh/mini-dlperf
7d40bbad98ac68d4c496fe4b69d0de80ef0fffc6
[ "MIT" ]
37
2020-09-13T08:45:04.000Z
2022-03-15T01:19:05.000Z
common/cnn_models.py
tlkh/mini-dlperf
7d40bbad98ac68d4c496fe4b69d0de80ef0fffc6
[ "MIT" ]
null
null
null
import time import tensorflow.compat.v2 as tf import tensorflow.keras.layers as layers import tensorflow.keras.applications as models from . import mlperf_rn50 def toy_cnn(img_size=(224,224), num_class=2, weights=None, dtype=tf.float32): model = tf.keras.models.Sequential([ layers.Conv2D(64, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu", input_shape=(img_size[0],img_size[1],3), dtype=dtype), layers.Conv2D(64, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.MaxPooling2D((4,4)), layers.BatchNormalization(fused=True), layers.Conv2D(128, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(128, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.MaxPooling2D((4,4)), layers.BatchNormalization(fused=True), layers.Conv2D(64, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(64, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.BatchNormalization(fused=True), layers.GlobalAveragePooling2D(), layers.Dense(num_class), layers.Activation("softmax", dtype=tf.float32) ]) return model def huge_cnn(img_size=(224,224), num_class=2, weights=None, dtype=tf.float32): model = tf.keras.models.Sequential([ layers.Conv2D(128, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu", input_shape=(img_size[0],img_size[1],3), dtype=dtype), layers.MaxPooling2D((4,4)), layers.BatchNormalization(fused=True), layers.Conv2D(512, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.MaxPooling2D((4,4)), layers.BatchNormalization(fused=True), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.MaxPooling2D((4,4)), layers.BatchNormalization(fused=True), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.Conv2D(1024, (3,3), padding="same", kernel_initializer="he_uniform", activation="relu"), layers.BatchNormalization(fused=True), layers.GlobalAveragePooling2D(), layers.Dense(512), layers.Dense(num_class), layers.Activation("softmax", dtype=tf.float32) ]) return model def rn50(img_size=(224,224), num_class=2, weights="imagenet", dtype=tf.float32): input_layer = layers.Input(shape=(img_size[0],img_size[1],3), dtype=dtype) base = models.ResNet50V2(input_tensor=input_layer, include_top=False, weights=weights) base.trainable = True x = base.output x = layers.GlobalAveragePooling2D()(x) x = layers.Dense(num_class)(x) preds = layers.Activation("softmax", dtype=tf.float32)(x) model = tf.keras.models.Model(inputs=input_layer, outputs=preds) return model def rn50_mlperf(img_size=(224,224), num_class=2): model = mlperf_rn50.rn50(num_class, input_shape=(img_size[0], img_size[1], 3), batch_size=None, use_l2_regularizer=True) return model def rn152(img_size=(224,224), num_class=2, weights="imagenet", dtype=tf.float32): input_layer = layers.Input(shape=(img_size[0],img_size[1],3), dtype=dtype) base = models.ResNet152V2(input_tensor=input_layer, include_top=False, weights=weights) base.trainable = True x = base.output x = layers.GlobalAveragePooling2D()(x) x = layers.Dense(num_class)(x) preds = layers.Activation("softmax", dtype=tf.float32)(x) model = tf.keras.models.Model(inputs=input_layer, outputs=preds) return model def dn201(img_size=(224,224), num_class=2, weights="imagenet", dtype=tf.float32): input_layer = layers.Input(shape=(img_size[0],img_size[1],3), dtype=dtype) base = models.DenseNet201(input_tensor=input_layer, include_top=False, weights=weights) base.trainable = True x = base.output x = layers.GlobalAveragePooling2D()(x) x = layers.Dense(num_class)(x) preds = layers.Activation("softmax", dtype=tf.float32)(x) model = tf.keras.models.Model(inputs=input_layer, outputs=preds) return model def mobilenet(img_size=(224,224), num_class=2, weights="imagenet", dtype=tf.float32): input_layer = layers.Input(shape=(img_size[0],img_size[1],3), dtype=dtype) base = models.MobileNetV2(input_tensor=input_layer, include_top=False, weights=weights) base.trainable = True x = base.output x = layers.GlobalAveragePooling2D()(x) x = layers.Dense(num_class)(x) preds = layers.Activation("softmax", dtype=tf.float32)(x) model = tf.keras.models.Model(inputs=input_layer, outputs=preds) return model def convert_for_training(model, wd=0.0001, verbose=False): model_config = model.get_config() for layer, layer_config in zip(model.layers, model_config["layers"]): if hasattr(layer, "kernel_regularizer"): if verbose: print("Adjust kernel_regularizer for", layer.name) regularizer = tf.keras.regularizers.l2(wd) layer_config["config"]["kernel_regularizer"] = { "class_name": regularizer.__class__.__name__, "config": regularizer.get_config() } if str(type(layer)) == "<class 'tensorflow.python.keras.layers.normalization.BatchNormalization'>": if verbose: print("Adjust BatchNorm settings for", layer.name) layer_config["config"]["momentum"] = 0.9 layer_config["config"]["epsilon"] = 1e-5 layer_config["config"]["fused"] = True del model model = tf.keras.models.Model.from_config(model_config) model.trainable = True return model
52
156
0.684225
928
7,176
5.141164
0.117457
0.060365
0.045274
0.065395
0.814295
0.809474
0.809474
0.804863
0.804863
0.799204
0
0.051916
0.159838
7,176
137
157
52.379562
0.739426
0
0
0.675
0
0
0.102997
0.009199
0
0
0
0
0
1
0.066667
false
0
0.041667
0
0.175
0.016667
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
594e1696bed52fe643444e5724301e36d3fb5db1
16,289
py
Python
kakuro.py
ssiatras/kakuro-csp-model-solver
f29cd3c55f27ce5fdc90c46532cd4eb7b5f715bd
[ "MIT" ]
null
null
null
kakuro.py
ssiatras/kakuro-csp-model-solver
f29cd3c55f27ce5fdc90c46532cd4eb7b5f715bd
[ "MIT" ]
null
null
null
kakuro.py
ssiatras/kakuro-csp-model-solver
f29cd3c55f27ce5fdc90c46532cd4eb7b5f715bd
[ "MIT" ]
null
null
null
from csp import * from time import time from itertools import permutations ######### Kakuro puzzles # Given, 4x3 kakuro_given4x3 = [ ['*', '*', '*', [6, ''], [3, '']], ['*', [4, ''], [3, 3], '_', '_'], [['', 10], '_', '_', '_', '_'], [['', 3], '_', '_', '*', '*'] ] #Given, 5x7 kakuro_given5x7 = [ ['*', [17, ''], [28, ''], '*', [42, ''], [22, '']], [['', 9], '_', '_', [31, 14], '_', '_'], [['', 20], '_', '_', '_', '_', '_'], ['*', ['', 30], '_', '_', '_', '_'], ['*', [22, 24], '_', '_', '_', '*'], [['', 25], '_', '_', '_', '_', [11, '']], [['', 20], '_', '_', '_', '_', '_'], [['', 14], '_', '_', ['', 17], '_', '_'] ] # Given, 14x14 kakuro_given14x14 = [ ['*', '*', '*', '*', '*', [4, ''], [24, ''], [11, ''], '*', '*', '*', [11, ''], [17, ''], '*', '*'], ['*', '*', '*', [17, ''], [11, 12], '_', '_', '_', '*', '*', [24, 10], '_', '_', [11, ''], '*'], ['*', [4, ''], [16, 26], '_', '_', '_', '_', '_', '*', ['', 20], '_', '_', '_', '_', [16, '']], [['', 20], '_', '_', '_', '_', [24, 13], '_', '_', [16, ''], ['', 12], '_', '_', [23, 10], '_', '_'], [['', 10], '_', '_', [24, 12], '_', '_', [16, 5], '_', '_', [16, 30], '_', '_', '_', '_', '_'], ['*', '*', [3, 26], '_', '_', '_', '_', ['', 12], '_', '_', [4, ''], [16, 14], '_', '_', '*'], ['*', ['', 8], '_', '_', ['', 15], '_', '_', [34, 26], '_', '_', '_', '_', '_', '*', '*'], ['*', ['', 11], '_', '_', [3, ''], [17, ''], ['', 14], '_', '_', ['', 8], '_', '_', [7, ''], [17, ''], '*'], ['*', '*', '*', [23, 10], '_', '_', [3, 9], '_', '_', [4, ''], [23, ''], ['', 13], '_', '_', '*'], ['*', '*', [10, 26], '_', '_', '_', '_', '_', ['', 7], '_', '_', [30, 9], '_', '_', '*'], ['*', [17, 11], '_', '_', [11, ''], [24, 8], '_', '_', [11, 21], '_', '_', '_', '_', [16, ''], [17, '']], [['', 29], '_', '_', '_', '_', '_', ['', 7], '_', '_', [23, 14], '_', '_', [3, 17], '_', '_'], [['', 10], '_', '_', [3, 10], '_', '_', '*', ['', 8], '_', '_', [4, 25], '_', '_', '_', '_'], ['*', ['', 16], '_', '_', '_', '_', '*', ['', 23], '_', '_', '_', '_', '_', '*', '*'], ['*', '*', ['', 6], '_', '_', '*', '*', ['', 15], '_', '_', '_', '*', '*', '*', '*'] ] # Easy, 6x6 kakuro_intermediate6x6 = [ ['*', [11, ''], [16, ''], [30, ''], '*', [24, ''], [11, '']], [['', 24], '_', '_', '_', ['', 9], '_', '_'], [['', 16], '_', '_', '_', [14, 17], '_', '_'], ['*', '*', [22, 20], '_', '_', '_', '*'], ['*', [3, 24], '_', '_', '_', [10, ''], [13, '']], [['', 7], '_', '_', ['', 19], '_', '_', '_'], [['', 11], '_', '_', ['', 7], '_', '_', '_'] ] # Hard, 8x8 kakuro_hard8x8 = [ ['*', [28, ''], [15, ''], '*', [9, ''], [15, ''], '*', [9, ''], [12, '']], [['', 10], '_', '_', [15, 6], '_', '_', [10, 4], '_', '_'], [['', 38], '_', '_', '_', '_', '_', '_', '_', '_'], [['', 17], '_', '_', '_', ['', 4], '_', '_', [27, ''], '*'], [['', 13], '_', '_', [7, ''], [17, 19], '_', '_', '_', [15, '']], ['*', ['', 8], '_', '_', '_', '*', [16, 3], '_', '_'], ['*', [11, ''], [4, 4], '_', '_', [3, 24], '_', '_', '_'], [['', 44], '_', '_', '_', '_', '_', '_', '_', '_'], [['', 3], '_', '_', ['', 6], '_', '_', ['', 10], '_', '_'] ] ######### Kakuro class implementation class Kakuro(CSP): """ Constructor method given the kakuro puzzle to be solved as argument """ def __init__(self, kakuro_puzzle): variables = [] # A list of variables; each is atomic domains = {} # A dict of {var:[possible_value, ...]} entries neighbors = {} # A dict of {var:[var,...]} that for each variable lists # the other variables that participate in constraints. self.puzzle = kakuro_puzzle for i in range(len(kakuro_puzzle)): # Index for each line for j in range(len(kakuro_puzzle[i])): # Index for each cell in each line # Find empty cells if kakuro_puzzle[i][j] == "_": var = "X" + str(i) + "," + str(j) variables.append(var) # Add variable var domains[var] = list(map(str, list(range(1, 10)))) # Add domain of variable var # Find slash cells if kakuro_puzzle[i][j] != '_' and kakuro_puzzle[i][j] != '*': # Sum of cells down if kakuro_puzzle[i][j][0] != "": hidden_var = "C_d" + str(i) + "," + str(j) variables.append(hidden_var) # Add hidden variable to convert n-ary sum constraint to binary cell_counter = 0 for m in range(i + 1, len(kakuro_puzzle)): if kakuro_puzzle[m][j] != "_": break nei = "X" + str(m) + "," + str(j) if hidden_var not in neighbors: neighbors[hidden_var] = [] neighbors[hidden_var].append(nei) if nei not in neighbors: neighbors[nei] = [] neighbors[nei].append(hidden_var) cell_counter += 1 perms = list(map("".join, permutations('123456789', cell_counter))) domains[hidden_var] = [perm for perm in perms if sum(int(x) for x in perm) == kakuro_puzzle[i][j][0]] # Sum of cells right if kakuro_puzzle[i][j][1] != "": hidden_var = "C_r" + str(i) + "," + str(j) variables.append(hidden_var) # Add hidden variable to convert n-ary constraint of sum to binary cell_counter = 0 for k in range(j + 1, len(kakuro_puzzle[i])): if kakuro_puzzle[i][k] != "_": break nei = "X" + str(i) + "," + str(k) if hidden_var not in neighbors: neighbors[hidden_var] = [] neighbors[hidden_var].append(nei) if nei not in neighbors: neighbors[nei] = [] neighbors[nei].append(hidden_var) cell_counter += 1 perms = list(map("".join, permutations('123456789', cell_counter))) domains[hidden_var] = [perm for perm in perms if sum(int(x) for x in perm) == kakuro_puzzle[i][j][1]] CSP.__init__(self, variables, domains, neighbors, self.kakuro_constraint) """ A function that returns true if neighbors A, B satisfy kakuro's constraints when they have values A = a, B = b """ def kakuro_constraint(self, A, a, B, b): if A[0] == "X" and B[0] == "C": X_i = int(A[1 : A.index(",")]) X_j = int(A[A.index(",") + 1 :]) C_i = int(B[3 : B.index(",")]) C_j = int(B[B.index(",") + 1 :]) if B[2] == "d": ind = X_i - C_i - 1 # Index of character to be checked hidden_var = "C_d" + str(C_i) + "," + str(C_j) if b[ind] == a: #and sum(int(x) for x in b) == self.sums[hidden_var]: return True else: # B[2] == "r": ind = X_j - C_j - 1 # Index of character to be checked hidden_var = "C_r" + str(C_i) + "," + str(C_j) if b[ind] == a: #and sum(int(x) for x in b) == self.sums[hidden_var]: return True elif A[0] == "C" and B[0] == "X": C_i = int(A[3 : A.index(",")]) C_j = int(A[A.index(",") + 1 :]) X_i = int(B[1 : B.index(",")]) X_j = int(B[B.index(",") + 1 :]) if A[2] == "d": ind = X_i - C_i - 1 # Index of character to be checked hidden_var = "C_d" + str(C_i) + "," + str(C_j) if a[ind] == b:# and sum(int(x) for x in a) == self.sums[hidden_var]: return True else: # A[2] == "r": ind = X_j - C_j - 1 # Index of character to be checked hidden_var = "C_r" + str(C_i) + "," + str(C_j) if a[ind] == b:# and sum(int(x) for x in a) == self.sums[hidden_var]: return True return False def display(self, assignment=None): for i in range(len(self.puzzle)): # Index for each line line = "" for j in range(len(self.puzzle[i])): # Index for each cell in each line if self.puzzle[i][j] == '*': line += " * \t" elif self.puzzle[i][j] == "_": var = "X" + str(i) + "," + str(j) if assignment != None: if var in assignment: line += " " + assignment[var] + " \t" else: line += " _ \t" else: line += " _ \t" else: sum1 = str(self.puzzle[i][j][0]) if self.puzzle[i][j][0] else " " sum2 = str(self.puzzle[i][j][1]) if self.puzzle[i][j][1] else " " line += sum1 + "\\" + sum2 + "\t" print(line) print() ######### Solution of Kakuro Puzzle: Given, 4x3 print("Kakuro puzzle: Given, 4x3\n") # BT + FC + MRV Kakuro_problem = Kakuro(kakuro_given4x3) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, inference=forward_checking) total_time = time() - start_time Kakuro_problem.display(assignments) print("\tHeuristic algorithms: BT + FC + MRV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + MAC + MRV Kakuro_problem = Kakuro(kakuro_given4x3) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, inference=mac) total_time = time() - start_time print("\tHeuristic algorithms: BT + MAC + MRV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + FC + MRV + LCV Kakuro_problem = Kakuro(kakuro_given4x3) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, order_domain_values=lcv, inference=forward_checking) total_time = time() - start_time print("\tHeuristic algorithms: BT + FC + MRV + LCV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + MAC + MRV + LCV Kakuro_problem = Kakuro(kakuro_given4x3) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, order_domain_values=lcv, inference=mac) total_time = time() - start_time print("\tHeuristic algorithms: BT + MAC + MRV + LCV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.") ######### Solution of Kakuro Puzzle: Given, 5x7 print("\n\nKakuro puzzle: Given, 5x7\n") # BT + FC + MRV Kakuro_problem = Kakuro(kakuro_given5x7) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, inference=forward_checking) total_time = time() - start_time Kakuro_problem.display(assignments) print("\tHeuristic algorithms: BT + FC + MRV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + MAC + MRV Kakuro_problem = Kakuro(kakuro_given5x7) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, inference=mac) total_time = time() - start_time print("\tHeuristic algorithms: BT + MAC + MRV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + FC + MRV + LCV Kakuro_problem = Kakuro(kakuro_given5x7) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, order_domain_values=lcv, inference=forward_checking) total_time = time() - start_time print("\tHeuristic algorithms: BT + FC + MRV + LCV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + MAC + MRV + LCV Kakuro_problem = Kakuro(kakuro_given5x7) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, order_domain_values=lcv, inference=mac) total_time = time() - start_time print("\tHeuristic algorithms: BT + MAC + MRV + LCV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.") ######### Solution of Kakuro Puzzle: Given, 14x14 print("\n\nKakuro puzzle: Given, 14x14\n") # BT + FC + MRV Kakuro_problem = Kakuro(kakuro_given14x14) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, inference=forward_checking) total_time = time() - start_time Kakuro_problem.display(assignments) print("\tHeuristic algorithms: BT + FC + MRV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + MAC + MRV Kakuro_problem = Kakuro(kakuro_given14x14) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, inference=mac) total_time = time() - start_time print("\tHeuristic algorithms: BT + MAC + MRV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + FC + MRV + LCV Kakuro_problem = Kakuro(kakuro_given14x14) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, order_domain_values=lcv, inference=forward_checking) total_time = time() - start_time print("\tHeuristic algorithms: BT + FC + MRV + LCV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + MAC + MRV + LCV Kakuro_problem = Kakuro(kakuro_given14x14) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, order_domain_values=lcv, inference=mac) total_time = time() - start_time print("\tHeuristic algorithms: BT + MAC + MRV + LCV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.") ######### Solution of Kakuro Puzzle: Intermediate, 6x6 print("\n\nKakuro puzzle: Intermediate, 6x6\n") # BT + FC + MRV Kakuro_problem = Kakuro(kakuro_intermediate6x6) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, inference=forward_checking) total_time = time() - start_time Kakuro_problem.display(assignments) print("\tHeuristic algorithms: BT + FC + MRV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + MAC + MRV Kakuro_problem = Kakuro(kakuro_intermediate6x6) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, inference=mac) total_time = time() - start_time print("\tHeuristic algorithms: BT + MAC + MRV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + FC + MRV + LCV Kakuro_problem = Kakuro(kakuro_intermediate6x6) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, order_domain_values=lcv, inference=forward_checking) total_time = time() - start_time print("\tHeuristic algorithms: BT + FC + MRV + LCV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + MAC + MRV + LCV Kakuro_problem = Kakuro(kakuro_intermediate6x6) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, order_domain_values=lcv, inference=mac) total_time = time() - start_time print("\tHeuristic algorithms: BT + MAC + MRV + LCV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") ######### Solution of Kakuro Puzzle: Hard, 8x8 print("\n\nKakuro puzzle: Hard, 8x8\n") # BT + FC + MRV Kakuro_problem = Kakuro(kakuro_hard8x8) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, inference=forward_checking) total_time = time() - start_time Kakuro_problem.display(assignments) print("\tHeuristic algorithms: BT + FC + MRV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + MAC + MRV Kakuro_problem = Kakuro(kakuro_hard8x8) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, inference=mac) total_time = time() - start_time print("\tHeuristic algorithms: BT + MAC + MRV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + FC + MRV + LCV Kakuro_problem = Kakuro(kakuro_hard8x8) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, order_domain_values=lcv, inference=forward_checking) total_time = time() - start_time print("\tHeuristic algorithms: BT + FC + MRV + LCV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.\n") # BT + MAC + MRV + LCV Kakuro_problem = Kakuro(kakuro_hard8x8) start_time = time() assignments = backtracking_search(Kakuro_problem, select_unassigned_variable=mrv, order_domain_values=lcv, inference=mac) total_time = time() - start_time print("\tHeuristic algorithms: BT + MAC + MRV + LCV") print("\tSolved in", total_time, "seconds.") print("\tMade", Kakuro_problem.nassigns, "assignments.")
38.599526
134
0.598256
2,018
16,289
4.542616
0.091675
0.092178
0.015272
0.054543
0.825897
0.790989
0.774081
0.770154
0.767427
0.755863
0
0.031746
0.172325
16,289
422
135
38.599526
0.648198
0.099454
0
0.631922
0
0
0.147966
0
0
0
0
0
0
1
0.009772
false
0
0.009772
0
0.039088
0.218241
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
3ccbd4eda0430c0a9980789555e9515e9a99c007
55
py
Python
src/data/__init__.py
vishwapardeshi/Text_Classification_US_Presidential_Nominees_2020
a1e8c4f229b1fe23b2104d3a4b316d7dd0ac91f0
[ "MIT" ]
null
null
null
src/data/__init__.py
vishwapardeshi/Text_Classification_US_Presidential_Nominees_2020
a1e8c4f229b1fe23b2104d3a4b316d7dd0ac91f0
[ "MIT" ]
null
null
null
src/data/__init__.py
vishwapardeshi/Text_Classification_US_Presidential_Nominees_2020
a1e8c4f229b1fe23b2104d3a4b316d7dd0ac91f0
[ "MIT" ]
null
null
null
from process_files import * from process_data import *
18.333333
27
0.818182
8
55
5.375
0.625
0.511628
0
0
0
0
0
0
0
0
0
0
0.145455
55
2
28
27.5
0.914894
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
3ce64c5b857de773060841940bbbb4f6c7b09305
156
py
Python
clearly/view/__init__.py
hyice/clearly
a58462fb8bf09d1ef06aab3a6d9c3035b4b4b8cd
[ "MIT" ]
null
null
null
clearly/view/__init__.py
hyice/clearly
a58462fb8bf09d1ef06aab3a6d9c3035b4b4b8cd
[ "MIT" ]
null
null
null
clearly/view/__init__.py
hyice/clearly
a58462fb8bf09d1ef06aab3a6d9c3035b4b4b8cd
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- from view.static_bitmap import StaticBitmap from view.preview import PreviewPanel from view.menu import Menu
22.285714
43
0.762821
23
156
5.130435
0.695652
0.20339
0
0
0
0
0
0
0
0
0
0.007353
0.128205
156
6
44
26
0.860294
0.269231
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
3cf031a71b3469d155d422c576f8093a2a605e54
23
py
Python
nlproc/spa/__init__.py
jgsogo/nlproc_spa
ba0c23a0c974f0be9243eac12d6b152b48c5fa49
[ "MIT" ]
null
null
null
nlproc/spa/__init__.py
jgsogo/nlproc_spa
ba0c23a0c974f0be9243eac12d6b152b48c5fa49
[ "MIT" ]
1
2017-07-10T18:39:29.000Z
2017-07-10T18:39:29.000Z
nlproc/spa/__init__.py
jgsogo/nlproc_spa
ba0c23a0c974f0be9243eac12d6b152b48c5fa49
[ "MIT" ]
null
null
null
import nlproc.spa.nltk
11.5
22
0.826087
4
23
4.75
1
0
0
0
0
0
0
0
0
0
0
0
0.086957
23
2
22
11.5
0.904762
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
a73a718f227ef25881b92cb8c89e9f4ee155d7e5
243
py
Python
utils/orchestrator/__init__.py
StackVista/sts-agent
f8358ea46820ffb9eb0b4b30c7d7457cc2cc987a
[ "BSD-3-Clause" ]
4
2017-03-18T12:16:40.000Z
2020-11-12T06:59:29.000Z
utils/orchestrator/__init__.py
StackVista/sts-agent
f8358ea46820ffb9eb0b4b30c7d7457cc2cc987a
[ "BSD-3-Clause" ]
18
2016-09-22T08:01:02.000Z
2020-07-15T08:30:17.000Z
utils/orchestrator/__init__.py
StackVista/sts-agent
f8358ea46820ffb9eb0b4b30c7d7457cc2cc987a
[ "BSD-3-Clause" ]
8
2016-11-23T06:55:51.000Z
2021-07-05T05:12:34.000Z
from ecsutil import ECSUtil # noqa: F401 from mesosutil import MesosUtil # noqa: F401 from nomadutil import NomadUtil # noqa: F401 from baseutil import BaseUtil # noqa: F401 from metadata_collector import MetadataCollector # noqa: F401
30.375
62
0.786008
31
243
6.129032
0.354839
0.210526
0.252632
0
0
0
0
0
0
0
0
0.074627
0.17284
243
7
63
34.714286
0.870647
0.222222
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
597b977d626498f81bf448a90b3f9a7a749dac98
28
py
Python
pyplotutils/orchestrate/__init__.py
arunreddy/pyplot-utils
ce31f67d023b87f6a0b35c370a0cc10bee313c0d
[ "MIT" ]
null
null
null
pyplotutils/orchestrate/__init__.py
arunreddy/pyplot-utils
ce31f67d023b87f6a0b35c370a0cc10bee313c0d
[ "MIT" ]
null
null
null
pyplotutils/orchestrate/__init__.py
arunreddy/pyplot-utils
ce31f67d023b87f6a0b35c370a0cc10bee313c0d
[ "MIT" ]
null
null
null
from . import orchestrateapi
28
28
0.857143
3
28
8
1
0
0
0
0
0
0
0
0
0
0
0
0.107143
28
1
28
28
0.96
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
599a986b0a0e6d989c30a4a67b806f4de192a463
43,659
py
Python
pirates/leveleditor/worldData/port_royal_area_jungle_c_1.py
itsyaboyrocket/pirates
6ca1e7d571c670b0d976f65e608235707b5737e3
[ "BSD-3-Clause" ]
3
2021-02-25T06:38:13.000Z
2022-03-22T07:00:15.000Z
pirates/leveleditor/worldData/port_royal_area_jungle_c_1.py
itsyaboyrocket/pirates
6ca1e7d571c670b0d976f65e608235707b5737e3
[ "BSD-3-Clause" ]
null
null
null
pirates/leveleditor/worldData/port_royal_area_jungle_c_1.py
itsyaboyrocket/pirates
6ca1e7d571c670b0d976f65e608235707b5737e3
[ "BSD-3-Clause" ]
1
2021-02-25T06:38:17.000Z
2021-02-25T06:38:17.000Z
# uncompyle6 version 3.2.0 # Python bytecode 2.4 (62061) # Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)] # Embedded file name: pirates.leveleditor.worldData.port_royal_area_jungle_c_1 from pandac.PandaModules import Point3, VBase3, Vec4 objectStruct = {'Interact Links': [['1175892736.0dxschafe', '1165197469.59Shochet', 'Bi-directional'], ['1165197301.95Shochet', '1165197288.56Shochet', 'Bi-directional'], ['1175901440.0dxschafe', '1175892736.0dxschafe2', 'Bi-directional'], ['1175901568.0dxschafe', '1165197257.5Shochet', 'Bi-directional'], ['1175892864.0dxschafe', '1175901952.0dxschafe', 'Bi-directional'], ['1175892352.0dxschafe0', '1175902080.0dxschafe', 'Bi-directional']], 'Objects': {'1164141722.61sdnaik': {'Type': 'Island Game Area', 'Name': 'port_royal_area_jungle_c_1', 'File': '', 'Environment': 'Jungle', 'AdditionalData': ['JungleAreaC'], 'Footstep Sound': 'Sand', 'Instanced': True, 'Minimap': False, 'Objects': {'1164141948.44sdnaik': {'Type': 'Locator Node', 'Name': 'portal_interior_1', 'Hpr': VBase3(-4.256, 0.0, 0.0), 'Pos': Point3(-632.715, -263.407, 75.0), 'Scale': VBase3(1.0, 1.0, 1.0)}, '1164141948.45sdnaik': {'Type': 'Locator Node', 'Name': 'portal_interior_2', 'Hpr': VBase3(107.903, 0.0, 0.0), 'Pos': Point3(304.679, -408.087, 115.611), 'Scale': VBase3(1.0, 1.0, 1.0)}, '1164939070.28Shochet': {'Type': 'Spawn Node', 'Aggro Radius': '12.0000', 'AnimSet': 'default', 'Hpr': VBase3(128.928, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-405.761, -124.137, 102.347), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T3', 'Start State': 'Idle', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Model': 'models/misc/smiley'}}, '1164939086.73Shochet': {'Type': 'Spawn Node', 'Aggro Radius': '12.0000', 'AnimSet': 'default', 'Hpr': VBase3(-67.644, 0.0, 0.0), 'Min Population': '3', 'Patrol Radius': '12.0000', 'Pause Chance': '100', 'Pause Duration': '30', 'Pos': Point3(-372.829, -294.251, 101.077), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T3', 'Start State': 'Idle', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Model': 'models/misc/smiley'}}, '1164939103.3Shochet': {'Type': 'Spawn Node', 'Aggro Radius': '12.0000', 'AnimSet': 'default', 'Hpr': VBase3(93.878, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-268.592, -126.948, 117.51), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T3', 'Start State': 'Idle', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Model': 'models/misc/smiley'}}, '1164939260.28Shochet': {'Type': 'Spawn Node', 'Aggro Radius': '12.0000', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(356.371, -315.345, 113.48), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Bat T4', 'Start State': 'Patrol', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Model': 'models/misc/smiley'}}, '1164939309.61Shochet': {'Type': 'Spawn Node', 'Aggro Radius': '12.0000', 'AnimSet': 'default', 'Hpr': VBase3(83.792, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-495.787, -282.083, 87.745), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T3', 'Start State': 'Idle', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Model': 'models/misc/smiley'}}, '1165197257.5Shochet': {'Type': 'Object Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-312.947, -311.051, 107.246), 'Priority': '1', 'Scale': VBase3(1.0, 1.0, 1.0), 'SpawnDelay': '300', 'Spawnables': 'Buried Treasure', 'VisSize': '', 'Visual': {'Color': (0.8, 0.2, 0.65, 1), 'Model': 'models/misc/smiley'}, 'startingDepth': '12'}, '1165197288.56Shochet': {'Type': 'Object Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(101.459, 2.186, 117.559), 'Priority': '1', 'Scale': VBase3(1.0, 1.0, 1.0), 'SpawnDelay': '300', 'Spawnables': 'Buried Treasure', 'VisSize': '', 'Visual': {'Color': (0.8, 0.2, 0.65, 1), 'Model': 'models/misc/smiley'}, 'startingDepth': '12'}, '1165197301.95Shochet': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(-27.089, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(120.252, 15.244, 116.711), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T3', 'Start State': 'Patrol', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0.0, 0.0, 0.65, 1.0), 'Model': 'models/misc/smiley'}}, '1165197323.8Shochet': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(143.13, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(205.686, 112.289, 111.874), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Gator T4', 'Start State': 'Idle', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1165197469.59Shochet': {'Type': 'Object Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-122.368, -172.88, 128.692), 'Priority': '1', 'Scale': VBase3(1.0, 1.0, 1.0), 'SpawnDelay': '20', 'Spawnables': 'Buried Treasure', 'VisSize': '', 'Visual': {'Color': (0.8, 0.2, 0.65, 1.0), 'Model': 'models/misc/smiley'}, 'startingDepth': '12'}, '1175891840.0dxschafe': {'Type': 'Spawn Node', 'Aggro Radius': '12.0000', 'AnimSet': 'default', 'Hpr': VBase3(158.581, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': '100', 'Pause Duration': '30', 'Pos': Point3(211.261, 154.38, 106.955), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Gator T4', 'Start State': 'Idle', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1175892224.0dxschafe': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(56.444, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(-44.099, -38.298, 123.644), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Bat T4', 'Start State': 'Patrol', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1175892352.0dxschafe0': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(27.319, 152.447, 117.1), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T3', 'Start State': 'Ambush', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1175892352.0dxschafe1': {'Type': 'Spawn Node', 'Aggro Radius': '12.0000', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(18.93, 55.016, 119.339), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T3', 'Start State': 'Idle', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1175892736.0dxschafe': {'Type': 'Spawn Node', 'Aggro Radius': '15.9639', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-111.044, -187.834, 128.609), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Wasp T3', 'Start State': 'Patrol', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1175892736.0dxschafe1': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-390.404, -374.448, 96.8), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Wasp T4', 'Start State': 'Idle', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1175892736.0dxschafe2': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '1.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-464.324, -399.697, 87.853), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T3', 'Start State': 'Ambush', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1175892864.0dxschafe': {'Type': 'Spawn Node', 'Aggro Radius': '12.0000', 'AnimSet': 'default', 'Hpr': VBase3(-163.473, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(-463.553, -95.888, 96.742), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Bat T4', 'Start State': 'Patrol', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1175901184.0dxschafe': {'Type': 'Object Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(265.836, 31.799, 111.406), 'Priority': '1', 'Scale': VBase3(1.0, 1.0, 1.0), 'SpawnDelay': '20', 'Spawnables': 'Buried Treasure', 'VisSize': '', 'Visual': {'Color': (0.8, 0.2, 0.65, 1), 'Model': 'models/misc/smiley'}, 'startingDepth': '12'}, '1175901312.0dxschafe': {'Type': 'Object Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-28.718, -99.13, 124.045), 'Priority': '1', 'Scale': VBase3(1.0, 1.0, 1.0), 'SpawnDelay': '20', 'Spawnables': 'Buried Treasure', 'VisSize': '', 'Visual': {'Color': (0.8, 0.2, 0.65, 1), 'Model': 'models/misc/smiley'}, 'startingDepth': '12'}, '1175901440.0dxschafe': {'Type': 'Object Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-509.896, -361.984, 83.883), 'Priority': '1', 'Scale': VBase3(1.0, 1.0, 1.0), 'SpawnDelay': '20', 'Spawnables': 'Buried Treasure', 'VisSize': '', 'Visual': {'Color': (0.8, 0.2, 0.65, 1), 'Model': 'models/misc/smiley'}, 'startingDepth': '12'}, '1175901568.0dxschafe': {'Type': 'Spawn Node', 'Aggro Radius': '12.0000', 'AnimSet': 'default', 'Hpr': VBase3(100.387, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-314.675, -287.615, 107.733), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Wasp T4', 'Start State': 'Patrol', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1175901696.0dxschafe': {'Type': 'Spawn Node', 'Aggro Radius': '14.1566', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-143.198, -72.187, 127.375), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T3', 'Start State': 'Idle', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Model': 'models/misc/smiley'}}, '1175901696.0dxschafe0': {'Type': 'Spawn Node', 'Aggro Radius': '13.8554', 'AnimSet': 'default', 'Hpr': VBase3(113.394, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(33.63, 114.009, 117.659), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T3', 'Start State': 'Idle', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Model': 'models/misc/smiley'}}, '1175901696.0dxschafe1': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(159.441, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(210.848, -47.225, 114.902), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T3', 'Start State': 'Patrol', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Model': 'models/misc/smiley'}}, '1175901696.0dxschafe2': {'Type': 'Spawn Node', 'Aggro Radius': '11.1446', 'AnimSet': 'default', 'Hpr': VBase3(175.144, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(272.21, -163.32, 115.157), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T3', 'Start State': 'Patrol', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Model': 'models/misc/smiley'}}, '1175901696.0dxschafe3': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(125.192, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(329.874, -354.521, 114.541), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T3', 'Start State': 'Idle', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Model': 'models/misc/smiley'}}, '1175901952.0dxschafe': {'Type': 'Object Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-497.893, -114.69, 92.381), 'Priority': '1', 'Scale': VBase3(1.0, 1.0, 1.0), 'SpawnDelay': '20', 'Spawnables': 'Buried Treasure', 'VisSize': '', 'Visual': {'Color': (0.8, 0.2, 0.65, 1), 'Model': 'models/misc/smiley'}, 'startingDepth': '12'}, '1175902080.0dxschafe': {'Type': 'Object Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(27.056, 182.359, 116.509), 'Priority': '1', 'Scale': VBase3(1.0, 1.0, 1.0), 'SpawnDelay': '20', 'Spawnables': 'Buried Treasure', 'VisSize': '', 'Visual': {'Color': (0.8, 0.2, 0.65, 1), 'Model': 'models/misc/smiley'}, 'startingDepth': '12'}, '1179265791.47Aholdun': {'Type': 'Player Spawn Node', 'Hpr': VBase3(-56.324, 0.0, 0.0), 'Index': -1, 'Pos': Point3(-205.82, -74.384, 126.01), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Color': (0.5, 0.5, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1179265841.94Aholdun': {'Type': 'Player Spawn Node', 'Hpr': VBase3(-70.028, 0.0, 0.0), 'Index': -1, 'Pos': Point3(-511.784, -195.934, 88.483), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Color': (0.5, 0.5, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1179265866.19Aholdun': {'Type': 'Player Spawn Node', 'Hpr': VBase3(-68.126, 0.0, 0.0), 'Index': -1, 'Pos': Point3(-303.287, -101.963, 114.378), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Color': (0.5, 0.5, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1179265884.3Aholdun': {'Type': 'Player Spawn Node', 'Hpr': VBase3(-61.773, 0.0, 0.0), 'Index': -1, 'Pos': Point3(-101.443, 8.502, 124.344), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Color': (0.5, 0.5, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1179265965.55Aholdun': {'Type': 'Player Spawn Node', 'Hpr': VBase3(-37.707, 0.0, 0.0), 'Index': -1, 'Pos': Point3(-361.817, -234.733, 104.026), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Color': (0.5, 0.5, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1188441856.0dxschafe': {'Type': 'Player Spawn Node', 'Hpr': VBase3(28.132, 0.0, 0.0), 'Index': -1, 'Pos': Point3(284.681, -333.714, 115.999), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Color': (0.5, 0.5, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1188441856.0dxschafe0': {'Type': 'Player Spawn Node', 'Hpr': VBase3(-160.871, 0.0, 0.0), 'Index': -1, 'Pos': Point3(151.044, 73.064, 114.509), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Color': (0.5, 0.5, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1190846720.0dxschafe': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(366.65, -267.765, 112.937), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1190846720.0dxschafe0': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(317.426, -250.676, 114.547), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1190846720.0dxschafe1': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(287.599, -303.44, 115.776), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1190847360.0dxschafe': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(-533.511, -249.458, 84.517), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1190847360.0dxschafe0': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(-424.114, -319.024, 94.658), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1190847360.0dxschafe1': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(-339.944, -229.384, 106.61), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1190847488.0dxschafe': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(62.21, 124.587, 116.479), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1190847488.0dxschafe0': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(165.604, -4.669, 115.577), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1190847616.0dxschafe': {'Type': 'Spawn Node', 'Aggro Radius': '12.0000', 'AnimSet': 'default', 'Hpr': VBase3(176.71, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': '100', 'Pause Duration': '30', 'Pos': Point3(168.411, 140.387, 112.571), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Gator T4', 'Start State': 'Patrol', 'StartFrame': '0', 'Team': '1', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1192645760.0dxschafe': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(255.886, 44.895, 111.53), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1192645760.0dxschafe0': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(113.373, 14.589, 116.957), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1192645888.0dxschafe': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(221.687, -85.284, 115.299), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1192645888.0dxschafe0': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(250.07, -148.329, 115.605), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1192645888.0dxschafe1': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(246.683, -204.326, 116.771), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1192646016.0dxschafe': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(204.477, -332.773, 118.728), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1192646016.0dxschafe0': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(305.498, -272.237, 115.041), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1192646144.0dxschafe': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(235.126, 51.284, 112.104), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1192646144.0dxschafe0': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(63.87, 96.958, 116.978), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1192646144.0dxschafe1': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(209.347, 178.181, 106.955), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1192646400.0dxschafe0': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(-1.619, 92.0, 119.292), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1192646400.0dxschafe1': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(-163.425, -120.076, 129.02), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1192646400.0dxschafe2': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '0', 'Pause Duration': '5', 'Pos': Point3(-504.682, -169.48, 90.039), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1230751967.29kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Holiday': '', 'Hpr': VBase3(50.574, 0.0, 0.0), 'Pos': Point3(-41.362, 131.752, 119.192), 'Scale': VBase3(1.0, 1.0, 2.237), 'VisSize': '', 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}}, '1230752192.42kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Holiday': '', 'Hpr': VBase3(86.59, 0.0, 0.0), 'Pos': Point3(-515.671, -361.698, 81.901), 'Scale': VBase3(1.0, 1.0, 3.117), 'VisSize': '', 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}}, '1230752342.47kmuller': {'Type': 'Bush', 'DisableCollision': True, 'Holiday': '', 'Hpr': VBase3(58.152, 0.0, 0.0), 'Pos': Point3(380.918, -284.854, 112.761), 'Scale': VBase3(1.0, 1.0, 0.848), 'VisSize': '', 'Visual': {'Model': 'models/vegetation/bush_c'}}, '1230752387.67kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Holiday': '', 'Hpr': VBase3(-44.786, 0.0, 0.0), 'Pos': Point3(385.341, -287.334, 111.74), 'Scale': VBase3(4.762, 2.112, 2.112), 'VisSize': '', 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_cube'}}, '1230752456.97kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Holiday': '', 'Hpr': VBase3(-80.499, 0.0, 0.0), 'Pos': Point3(387.309, -301.839, 111.848), 'Scale': VBase3(1.461, 1.327, 2.416), 'VisSize': '', 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}}, '1230752640.42kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Holiday': '', 'Hpr': VBase3(170.035, 0.0, 0.0), 'Pos': Point3(191.843, -342.681, 118.168), 'Scale': VBase3(1.675, 1.675, 2.78), 'VisSize': '', 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}}, '1230752671.32kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Holiday': '', 'Hpr': VBase3(104.622, 0.0, 0.0), 'Pos': Point3(201.062, -363.66, 118.336), 'Scale': VBase3(0.763, 1.0, 2.724), 'VisSize': '', 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}}, '1240954760.37piwanow': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': '100', 'Pause Duration': '30', 'Pos': Point3(-467.452, -368.183, 88.418), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Wasp T4', 'Start State': 'Patrol', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1240954777.44piwanow': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-494.747, -326.253, 86.6), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1240954779.93piwanow': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-430.449, -341.382, 93.307), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1240954868.15piwanow': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(-45.221, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(181.07, -90.795, 116.783), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Wasp T4', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1240961337.65piwanow': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(299.655, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-158.77, -138.719, 129.237), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Wasp T3', 'Start State': 'Patrol', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1240961889.02piwanow': {'Type': 'Spawn Node', 'Aggro Radius': '14.1566', 'AnimSet': 'default', 'Hpr': VBase3(40.914, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-332.404, -330.502, 104.52), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Wasp T4', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1240962134.4piwanow': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(77.034, -31.858, 119.119), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Bat T4', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1240962151.72piwanow': {'Type': 'Spawn Node', 'Aggro Radius': '14.7590', 'AnimSet': 'default', 'Hpr': VBase3(-80.897, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(128.509, -48.108, 117.704), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Wasp T3', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1240962222.66piwanow': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(-68.894, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(178.959, -321.47, 119.553), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Wasp T4', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1240962281.96piwanow': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(131.795, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(369.213, -243.718, 112.753), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Bat T4', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1248385280.0jloehrle': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-67.519, -132.104, 126.019), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Wasp T3', 'Start State': 'Patrol', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}}, 'Visibility': 'Grid', 'Visual': {'Model': 'models/jungles/jungle_c_zero'}}}, 'TodSettings': {'AmbientColors': {0: Vec4(0.45, 0.53, 0.65, 1), 2: Vec4(1, 1, 1, 1), 4: Vec4(0.4, 0.45, 0.5, 1), 6: Vec4(0.44, 0.45, 0.56, 1), 8: Vec4(0.39, 0.42, 0.54, 1), 12: Vec4(0.34, 0.28, 0.41, 1), 13: Vec4(0.34, 0.28, 0.41, 1), 14: Vec4(0.66, 0.76, 0.41, 1), 15: Vec4(0.66, 0.76, 0.41, 1), 16: Vec4(0.25, 0.25, 0.25, 1), 17: Vec4(0.66, 0.76, 0.41, 1)}, 'DirectionalColors': {0: Vec4(0.55, 0.46, 0.35, 1), 2: Vec4(1, 1, 0.984314, 1), 4: Vec4(0.6, 0.34, 0.1, 1), 6: Vec4(0.46, 0.48, 0.45, 1), 8: Vec4(0.42, 0.42, 0.4, 1), 12: Vec4(0.66, 0.76, 0.05, 1), 13: Vec4(0.66, 0.76, 0.05, 1), 14: Vec4(0.3, 0.2, 0.53, 1), 15: Vec4(0.3, 0.2, 0.53, 1), 16: Vec4(0, 0, 0, 1), 17: Vec4(0.3, 0.2, 0.53, 1)}, 'FogColors': {0: Vec4(0.3, 0.2, 0.15, 0), 2: Vec4(0.6, 0.694118, 0.894118, 1), 4: Vec4(0.3, 0.18, 0.15, 0), 6: Vec4(0.15, 0.2, 0.35, 0), 8: Vec4(0.05, 0.06, 0.17, 0), 12: Vec4(0.1, 0.12, 0.03, 0), 13: Vec4(0.1, 0.12, 0.03, 0), 14: Vec4(0.1, 0.12, 0.03, 0), 15: Vec4(0.1, 0.12, 0.03, 0), 16: Vec4(0.25, 0.25, 0.25, 1), 17: Vec4(0.1, 0.12, 0.03, 0)}, 'FogRanges': {0: 0.0001, 2: 9.999999747378752e-05, 4: 0.0001, 6: 0.0001, 8: 0.0002, 12: 0.00025, 13: 0.00025, 14: 0.00025, 15: 0.00025, 16: 0.0001, 17: 0.005}, 'LinearFogRanges': {0: (0.0, 100.0), 2: (0.0, 100.0), 4: (0.0, 100.0), 6: (0.0, 100.0), 8: (0.0, 100.0), 12: (0.0, 100.0), 13: (0.0, 100.0), 14: (0.0, 100.0), 15: (0.0, 100.0), 16: (0.0, 100.0), 17: (0.0, 100.0)}}, 'Node Links': [['1190846720.0dxschafe', '1164939260.28Shochet', 'Bi-directional'], ['1190846720.0dxschafe', '1190846720.0dxschafe0', 'Bi-directional'], ['1190846720.0dxschafe1', '1190846720.0dxschafe0', 'Bi-directional'], ['1190846720.0dxschafe1', '1164939260.28Shochet', 'Bi-directional'], ['1190847360.0dxschafe', '1175892864.0dxschafe', 'Bi-directional'], ['1190847360.0dxschafe', '1190847360.0dxschafe0', 'Bi-directional'], ['1190847360.0dxschafe1', '1190847360.0dxschafe0', 'Bi-directional'], ['1190847360.0dxschafe1', '1175892864.0dxschafe', 'Bi-directional'], ['1175892224.0dxschafe', '1190847488.0dxschafe0', 'Bi-directional'], ['1190847488.0dxschafe0', '1190847488.0dxschafe', 'Bi-directional'], ['1175892224.0dxschafe', '1190847488.0dxschafe', 'Bi-directional'], ['1192645760.0dxschafe0', '1175901696.0dxschafe1', 'Bi-directional'], ['1192645760.0dxschafe0', '1192645760.0dxschafe', 'Bi-directional'], ['1192645760.0dxschafe', '1175901696.0dxschafe1', 'Bi-directional'], ['1192646016.0dxschafe', '1175901696.0dxschafe2', 'Bi-directional'], ['1192646016.0dxschafe', '1192646016.0dxschafe0', 'Bi-directional'], ['1192646016.0dxschafe0', '1175901696.0dxschafe2', 'Bi-directional'], ['1192646144.0dxschafe', '1175891840.0dxschafe', 'Bi-directional'], ['1192646144.0dxschafe', '1192646144.0dxschafe1', 'Bi-directional'], ['1192646144.0dxschafe0', '1192646144.0dxschafe1', 'Bi-directional'], ['1192646144.0dxschafe0', '1192646144.0dxschafe', 'Bi-directional'], ['1192646144.0dxschafe0', '1190847616.0dxschafe', 'Bi-directional'], ['1164939086.73Shochet', '1190847360.0dxschafe0', 'Bi-directional'], ['1192646400.0dxschafe1', '1192646400.0dxschafe2', 'Bi-directional'], ['1192646400.0dxschafe0', '1192646400.0dxschafe1', 'Bi-directional'], ['1192645888.0dxschafe0', '1192645888.0dxschafe', 'Bi-directional'], ['1192645888.0dxschafe1', '1192645888.0dxschafe0', 'Bi-directional'], ['1240954760.37piwanow', '1240954777.44piwanow', 'Bi-directional'], ['1240954760.37piwanow', '1240954779.93piwanow', 'Bi-directional'], ['1240954777.44piwanow', '1240954779.93piwanow', 'Bi-directional']], 'Layers': {}, 'ObjectIds': {'1164141722.61sdnaik': '["Objects"]["1164141722.61sdnaik"]', '1164141948.44sdnaik': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164141948.44sdnaik"]', '1164141948.45sdnaik': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164141948.45sdnaik"]', '1164939070.28Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164939070.28Shochet"]', '1164939086.73Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164939086.73Shochet"]', '1164939103.3Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164939103.3Shochet"]', '1164939260.28Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164939260.28Shochet"]', '1164939309.61Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1164939309.61Shochet"]', '1165197257.5Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1165197257.5Shochet"]', '1165197288.56Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1165197288.56Shochet"]', '1165197301.95Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1165197301.95Shochet"]', '1165197323.8Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1165197323.8Shochet"]', '1165197469.59Shochet': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1165197469.59Shochet"]', '1175891840.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175891840.0dxschafe"]', '1175892224.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892224.0dxschafe"]', '1175892352.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892352.0dxschafe0"]', '1175892352.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892352.0dxschafe1"]', '1175892736.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892736.0dxschafe"]', '1175892736.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892736.0dxschafe1"]', '1175892736.0dxschafe2': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892736.0dxschafe2"]', '1175892864.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175892864.0dxschafe"]', '1175901184.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901184.0dxschafe"]', '1175901312.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901312.0dxschafe"]', '1175901440.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901440.0dxschafe"]', '1175901568.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901568.0dxschafe"]', '1175901696.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901696.0dxschafe"]', '1175901696.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901696.0dxschafe0"]', '1175901696.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901696.0dxschafe1"]', '1175901696.0dxschafe2': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901696.0dxschafe2"]', '1175901696.0dxschafe3': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901696.0dxschafe3"]', '1175901952.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175901952.0dxschafe"]', '1175902080.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1175902080.0dxschafe"]', '1179265791.47Aholdun': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1179265791.47Aholdun"]', '1179265841.94Aholdun': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1179265841.94Aholdun"]', '1179265866.19Aholdun': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1179265866.19Aholdun"]', '1179265884.3Aholdun': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1179265884.3Aholdun"]', '1179265965.55Aholdun': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1179265965.55Aholdun"]', '1188441856.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1188441856.0dxschafe"]', '1188441856.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1188441856.0dxschafe0"]', '1190846720.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190846720.0dxschafe"]', '1190846720.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190846720.0dxschafe0"]', '1190846720.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190846720.0dxschafe1"]', '1190847360.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190847360.0dxschafe"]', '1190847360.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190847360.0dxschafe0"]', '1190847360.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190847360.0dxschafe1"]', '1190847488.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190847488.0dxschafe"]', '1190847488.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190847488.0dxschafe0"]', '1190847616.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1190847616.0dxschafe"]', '1192645760.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192645760.0dxschafe"]', '1192645760.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192645760.0dxschafe0"]', '1192645888.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192645888.0dxschafe"]', '1192645888.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192645888.0dxschafe0"]', '1192645888.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192645888.0dxschafe1"]', '1192646016.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646016.0dxschafe"]', '1192646016.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646016.0dxschafe0"]', '1192646144.0dxschafe': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646144.0dxschafe"]', '1192646144.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646144.0dxschafe0"]', '1192646144.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646144.0dxschafe1"]', '1192646400.0dxschafe0': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646400.0dxschafe0"]', '1192646400.0dxschafe1': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646400.0dxschafe1"]', '1192646400.0dxschafe2': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1192646400.0dxschafe2"]', '1230751967.29kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230751967.29kmuller"]', '1230752192.42kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230752192.42kmuller"]', '1230752342.47kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230752342.47kmuller"]', '1230752387.67kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230752387.67kmuller"]', '1230752456.97kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230752456.97kmuller"]', '1230752640.42kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230752640.42kmuller"]', '1230752671.32kmuller': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1230752671.32kmuller"]', '1240954760.37piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240954760.37piwanow"]', '1240954777.44piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240954777.44piwanow"]', '1240954779.93piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240954779.93piwanow"]', '1240954868.15piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240954868.15piwanow"]', '1240961337.65piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240961337.65piwanow"]', '1240961889.02piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240961889.02piwanow"]', '1240962134.4piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240962134.4piwanow"]', '1240962151.72piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240962151.72piwanow"]', '1240962222.66piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240962222.66piwanow"]', '1240962281.96piwanow': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1240962281.96piwanow"]', '1248385280.0jloehrle': '["Objects"]["1164141722.61sdnaik"]["Objects"]["1248385280.0jloehrle"]'}} extraInfo = {'camPos': Point3(-569.134, -428.277, 1468.57), 'camHpr': VBase3(-44.3142, -57.7401, 3.19909e-06), 'focalLength': 1.39999997616, 'skyState': -2, 'fog': 0}
6,237
43,198
0.627362
5,953
43,659
4.593146
0.112044
0.029185
0.028856
0.023699
0.697217
0.608382
0.574772
0.557949
0.534835
0.528472
0
0.223121
0.090772
43,659
7
43,199
6,237
0.465689
0.005314
0
0
0
0
0.559519
0.170877
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
1
1
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
6
59aacd96b5b9904aa0be9b69527626a51feb36fa
456
py
Python
tests/test_match_percent_pvalue.py
lparsons/archaic-match
9bdd986940983d2bc9861aa3711d7ab448e32321
[ "MIT" ]
null
null
null
tests/test_match_percent_pvalue.py
lparsons/archaic-match
9bdd986940983d2bc9861aa3711d7ab448e32321
[ "MIT" ]
9
2018-04-25T15:11:36.000Z
2021-07-27T14:50:06.000Z
tests/test_match_percent_pvalue.py
lparsons/archaic-match
9bdd986940983d2bc9861aa3711d7ab448e32321
[ "MIT" ]
1
2019-01-17T16:34:17.000Z
2019-01-17T16:34:17.000Z
from archaic_match.__main__ import calculate_thresholds def test_calculate_thresholds_0(): (lower, upper) = calculate_thresholds(0, 10) assert lower == 10 assert upper == 10 def test_calculate_thresholds_1(): (lower, upper) = calculate_thresholds(1, 10) assert lower == 9 assert upper == 11 def test_calculate_thresholds_0_1(): (lower, upper) = calculate_thresholds(0.1, 9) assert lower == 8.1 assert upper == 9.9
22.8
55
0.699561
62
456
4.83871
0.290323
0.443333
0.266667
0.26
0.483333
0
0
0
0
0
0
0.065753
0.199561
456
19
56
24
0.756164
0
0
0
0
0
0
0
0
0
0
0
0.461538
1
0.230769
true
0
0.076923
0
0.307692
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
1
0
0
0
0
0
0
6
59ac166e89f4e2882c12d78158bffdfdc1b1539f
36,289
py
Python
pytest/functional/hs_file_types/test_model_instance_aggregation.py
hydroshare/hydroshare
bf9888bbe61507aff070b1dfcec2fdec1921468d
[ "BSD-3-Clause" ]
178
2015-01-08T23:03:36.000Z
2022-03-03T13:56:45.000Z
pytest/functional/hs_file_types/test_model_instance_aggregation.py
hydroshare/hydroshare
bf9888bbe61507aff070b1dfcec2fdec1921468d
[ "BSD-3-Clause" ]
4,125
2015-01-01T14:26:15.000Z
2022-03-31T16:38:55.000Z
pytest/functional/hs_file_types/test_model_instance_aggregation.py
hydroshare/hydroshare
bf9888bbe61507aff070b1dfcec2fdec1921468d
[ "BSD-3-Clause" ]
53
2015-03-15T17:56:51.000Z
2022-03-17T00:32:16.000Z
import os import pytest from django.core.exceptions import ValidationError from django.core.files.uploadedfile import UploadedFile from hs_core.hydroshare import add_file_to_resource, ResourceFile, add_resource_files from hs_core.views.utils import move_or_rename_file_or_folder from hs_file_types.forms import ModelInstanceMetadataValidationForm from hs_file_types.models import ( ModelInstanceLogicalFile, ModelProgramLogicalFile, NetCDFLogicalFile, GeoRasterLogicalFile, GeoFeatureLogicalFile, GenericLogicalFile, TimeSeriesLogicalFile, RefTimeseriesLogicalFile, FileSetLogicalFile ) @pytest.mark.django_db(transaction=True) def test_link_model_aggregations_same_resource(composite_resource_with_mi_aggregation, mock_irods): """Test that we can link one model instance aggregation to one model program aggregation within the same resource""" res, user = composite_resource_with_mi_aggregation assert ModelInstanceLogicalFile.objects.count() == 1 mi_aggr = ModelInstanceLogicalFile.objects.first() # check that mi_aggr is not related to any model program aggregation assert mi_aggr.metadata.executed_by is None # create a model program aggregation file_path = 'pytest/assets/logan.vrt' upload_folder = '' file_to_upload = UploadedFile(file=open(file_path, 'rb'), name=os.path.basename(file_path)) res_file = add_file_to_resource( res, file_to_upload, folder=upload_folder, check_target_folder=True ) assert ModelProgramLogicalFile.objects.count() == 0 # set file to model program aggregation type ModelProgramLogicalFile.set_file_type(res, user, res_file.id) assert ModelProgramLogicalFile.objects.count() == 1 mp_aggr = ModelProgramLogicalFile.objects.first() # link model instance aggregation to model program aggregation mi_validation_form = ModelInstanceMetadataValidationForm(data={"executed_by": mp_aggr.id}, user=user, resource=res) assert mi_validation_form.is_valid() mi_validation_form.update_metadata(metadata=mi_aggr.metadata) mi_aggr = ModelInstanceLogicalFile.objects.first() # check that mi_aggr is related to model program aggregation assert mi_aggr.metadata.executed_by is not None assert not res.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_model_instance_on_model_program_delete(composite_resource_with_mi_aggregation, mock_irods): """Test that when we remove/delete a model program aggregation that the linked model instance aggregation does not get deleted and the metadata of the model instance aggregation is set to dirty""" res, user = composite_resource_with_mi_aggregation assert ModelInstanceLogicalFile.objects.count() == 1 mi_aggr = ModelInstanceLogicalFile.objects.first() # check that mi_aggr is not related to any model program aggregation assert mi_aggr.metadata.executed_by is None # create a model program aggregation file_path = 'pytest/assets/logan.vrt' upload_folder = '' file_to_upload = UploadedFile(file=open(file_path, 'rb'), name=os.path.basename(file_path)) res_file = add_file_to_resource( res, file_to_upload, folder=upload_folder, check_target_folder=True ) assert ModelProgramLogicalFile.objects.count() == 0 # set file to model program aggregation type ModelProgramLogicalFile.set_file_type(res, user, res_file.id) assert ModelProgramLogicalFile.objects.count() == 1 mp_aggr = ModelProgramLogicalFile.objects.first() # link model instance aggregation to model program aggregation mi_validation_form = ModelInstanceMetadataValidationForm(data={"executed_by": mp_aggr.id}, user=user, resource=res) assert mi_validation_form.is_valid() mi_validation_form.update_metadata(metadata=mi_aggr.metadata) mi_aggr = ModelInstanceLogicalFile.objects.first() # check that mi_aggr is related to model program aggregation assert mi_aggr.metadata.executed_by is not None assert mi_aggr.metadata.is_dirty is True # remove/delete mp_aggregation mp_aggr.remove_aggregation() assert ModelProgramLogicalFile.objects.count() == 0 assert ModelInstanceLogicalFile.objects.count() == 1 mi_aggr = ModelInstanceLogicalFile.objects.first() # check that mi_aggr is not related to any model program aggregation assert mi_aggr.metadata.executed_by is None # check that mi_aggr metadata is set to dirty assert mi_aggr.metadata.is_dirty is True assert not res.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_model_instance_on_model_program_rename_1(composite_resource_with_mi_aggregation, mock_irods): """Test that when we rename a file that represents a model program aggregation then the linked model instance aggregation metadata is set to dirty""" res, user = composite_resource_with_mi_aggregation assert ModelInstanceLogicalFile.objects.count() == 1 mi_aggr = ModelInstanceLogicalFile.objects.first() # check that mi_aggr is not related to any model program aggregation assert mi_aggr.metadata.executed_by is None # create a model program aggregation file_path = 'pytest/assets/logan.vrt' upload_folder = '' file_to_upload = UploadedFile(file=open(file_path, 'rb'), name=os.path.basename(file_path)) res_file = add_file_to_resource( res, file_to_upload, folder=upload_folder, check_target_folder=True ) assert ModelProgramLogicalFile.objects.count() == 0 # set file to model program aggregation type ModelProgramLogicalFile.set_file_type(res, user, res_file.id) assert ModelProgramLogicalFile.objects.count() == 1 mp_aggr = ModelProgramLogicalFile.objects.first() # link model instance aggregation to model program aggregation mi_validation_form = ModelInstanceMetadataValidationForm(data={"executed_by": mp_aggr.id}, user=user, resource=res) assert mi_validation_form.is_valid() mi_validation_form.update_metadata(metadata=mi_aggr.metadata) mi_aggr = ModelInstanceLogicalFile.objects.first() # check that mi_aggr is related to model program aggregation assert mi_aggr.metadata.executed_by is not None assert mi_aggr.metadata.is_dirty is True # rename the model program file name src_path = 'data/contents/{}'.format(res_file.file_name) tgt_path = 'data/contents/{}'.format("logan_1.vrt") move_or_rename_file_or_folder(user, res.short_id, src_path, tgt_path) assert ModelProgramLogicalFile.objects.count() == 1 assert ModelInstanceLogicalFile.objects.count() == 1 mi_aggr = ModelInstanceLogicalFile.objects.first() # check that mi_aggr metadata is set to dirty assert mi_aggr.metadata.is_dirty is True assert not res.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_model_instance_on_model_program_rename_2(composite_resource_with_mi_aggregation, mock_irods): """Test that when we rename a folder that represents a model program aggregation then the linked model instance aggregation metadata is set to dirty""" res, user = composite_resource_with_mi_aggregation assert ModelInstanceLogicalFile.objects.count() == 1 mi_aggr = ModelInstanceLogicalFile.objects.first() # check that mi_aggr is not related to any model program aggregation assert mi_aggr.metadata.executed_by is None # create a model program aggregation file_path = 'pytest/assets/logan.vrt' mp_folder = "mp_folder" ResourceFile.create_folder(res, mp_folder) file_to_upload = UploadedFile(file=open(file_path, 'rb'), name=os.path.basename(file_path)) add_file_to_resource( res, file_to_upload, folder=mp_folder, check_target_folder=True ) assert ModelProgramLogicalFile.objects.count() == 0 # set file to model program aggregation type ModelProgramLogicalFile.set_file_type(res, user, folder_path=mp_folder) assert ModelProgramLogicalFile.objects.count() == 1 mp_aggr = ModelProgramLogicalFile.objects.first() # link model instance aggregation to model program aggregation mi_validation_form = ModelInstanceMetadataValidationForm(data={"executed_by": mp_aggr.id}, user=user, resource=res) assert mi_validation_form.is_valid() mi_validation_form.update_metadata(metadata=mi_aggr.metadata) mi_aggr = ModelInstanceLogicalFile.objects.first() # check that mi_aggr is related to model program aggregation assert mi_aggr.metadata.executed_by is not None assert mi_aggr.metadata.is_dirty is True # rename the model program file name src_path = 'data/contents/{}'.format(mp_folder) tgt_path = 'data/contents/{}'.format("{}_1".format(mp_folder)) move_or_rename_file_or_folder(user, res.short_id, src_path, tgt_path) assert ModelProgramLogicalFile.objects.count() == 1 assert ModelInstanceLogicalFile.objects.count() == 1 mi_aggr = ModelInstanceLogicalFile.objects.first() # check that mi_aggr metadata is set to dirty assert mi_aggr.metadata.is_dirty is True assert not res.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_set_metadata(composite_resource_with_mi_aggregation, mock_irods): """Test that we can store all metadata items for a model instance aggregation""" res, _ = composite_resource_with_mi_aggregation mi_aggr = ModelInstanceLogicalFile.objects.first() # test extra metadata assert not mi_aggr.metadata.extra_metadata extra_meta = {'key1': 'value 1', 'key2': 'value 2'} mi_aggr.metadata.extra_metadata = extra_meta mi_aggr.metadata.save() assert mi_aggr.metadata.extra_metadata == extra_meta # test keywords assert not mi_aggr.metadata.keywords keywords = ['kw-1', 'kw-2'] mi_aggr.metadata.keywords = keywords mi_aggr.metadata.save() assert mi_aggr.metadata.keywords == keywords # test coverage metadata assert not mi_aggr.metadata.coverages.all() value_dict = {'name': 'Name for period coverage', 'start': '1/1/2000', 'end': '12/12/2012'} temp_cov = mi_aggr.metadata.create_element('coverage', type='period', value=value_dict) assert temp_cov.value['name'] == 'Name for period coverage' assert temp_cov.value['start'] == '1/1/2000' assert temp_cov.value['end'] == '12/12/2012' assert mi_aggr.metadata.coverages.all().count() == 1 value_dict = {'east': '56.45678', 'north': '12.6789', 'units': 'Decimal degree'} spatial_cov = mi_aggr.metadata.create_element('coverage', type='point', value=value_dict) assert spatial_cov.value['projection'] == 'WGS 84 EPSG:4326' assert spatial_cov.value['units'] == 'Decimal degree' assert spatial_cov.value['north'] == 12.6789 assert spatial_cov.value['east'] == 56.45678 assert mi_aggr.metadata.coverages.all().count() == 2 # test model output metadata assert not mi_aggr.metadata.has_model_output mi_aggr.metadata.has_model_output = True mi_aggr.metadata.save() # test setting metadata json assert not mi_aggr.metadata.metadata_json # set mi metadata json from the content of the following file schema_file_path = 'pytest/assets/mi_metadata.json' with open(schema_file_path, 'r') as file_obj: meta_json = file_obj.read() assert len(meta_json) > 0 mi_aggr.metadata.metadata_json = meta_json mi_aggr.metadata.save() mi_aggr = ModelInstanceLogicalFile.objects.first() assert mi_aggr.metadata.metadata_json assert not res.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_auto_netcdf_aggregation_creation(composite_resource_with_mi_aggregation_folder, mock_irods): """Test that when a netcdf file is uploaded to a folder that represents a model instance aggregation, a netcdf aggregation is created automatically""" resource, _ = composite_resource_with_mi_aggregation_folder mi_aggr_path = ModelInstanceLogicalFile.objects.first().aggregation_name assert NetCDFLogicalFile.objects.count() == 0 # upload a netcdf file to the mi_aggr_path - folder that represents the model instance aggregation nc_file_name = "netcdf_valid.nc" netcdf_file_path = "hs_file_types/tests/{}".format(nc_file_name) _add_files_to_resource(resource=resource, files_to_add=[netcdf_file_path], upload_folder=mi_aggr_path) # there should be three resource file - one generated by netcdf aggregation assert resource.files.all().count() == 3 assert NetCDFLogicalFile.objects.count() == 1 # the netcdf file added to the model instance folder should be part of a new netcdf aggregation nc_res_file = ResourceFile.get(resource=resource, file=nc_file_name, folder=mi_aggr_path) assert nc_res_file.has_logical_file # the netcdf aggregation should contain 2 files - nc and the txt files assert NetCDFLogicalFile.objects.first().files.count() == 2 assert not resource.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_auto_raster_aggregation_creation(composite_resource_with_mi_aggregation_folder, mock_irods): """Test that when a raster file (.tif) is uploaded to a folder that represents a model instance aggregation, a raster aggregation is created automatically""" resource, _ = composite_resource_with_mi_aggregation_folder mi_aggr_path = ModelInstanceLogicalFile.objects.first().aggregation_name assert GeoRasterLogicalFile.objects.count() == 0 # upload a raster file to the mi_aggr_path - folder that represents the model instance aggregation raster_file_name = 'small_logan.tif' raster_file_path = 'hs_file_types/tests/{}'.format(raster_file_name) _add_files_to_resource(resource=resource, files_to_add=[raster_file_path], upload_folder=mi_aggr_path) # there should be three resource files ( one extra vrt file added as part of raster aggregation creation) assert resource.files.all().count() == 3 # there should be one raster aggregation now assert GeoRasterLogicalFile.objects.count() == 1 # the tif file added to the model instance folder should be part of a new raster aggregation raster_res_file = ResourceFile.get(resource=resource, file=raster_file_name, folder=mi_aggr_path) assert raster_res_file.has_logical_file # the raster aggregation should contain 2 files (tif and vrt) assert GeoRasterLogicalFile.objects.first().files.count() == 2 assert not resource.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_auto_geofeature_aggregation_creation(composite_resource_with_mi_aggregation_folder, mock_irods): """Test that when files that represents a geofeature are uploaded to a folder that represents a model instance, a geofeature aggregation is created automatically""" resource, _ = composite_resource_with_mi_aggregation_folder mi_aggr_path = ModelInstanceLogicalFile.objects.first().aggregation_name assert GeoFeatureLogicalFile.objects.count() == 0 # upload all 4 geo feature files the mi_aggr_ptah - folder that represents the model instance aggregation base_data_file_path = 'hs_file_types/tests/data/{}' shp_file_name = "states.shp" shp_file_path = base_data_file_path.format(shp_file_name) shx_file_name = "states.shx" shx_file_path = base_data_file_path.format(shx_file_name) dbf_file_name = "states.dbf" dbf_file_path = base_data_file_path.format(dbf_file_name) prj_file_name = "states.prj" prj_file_path = base_data_file_path.format(prj_file_name) geo_feature_files = [shp_file_path, shx_file_path, dbf_file_path, prj_file_path] _add_files_to_resource(resource=resource, files_to_add=geo_feature_files, upload_folder=mi_aggr_path) # there should be five resource files assert resource.files.all().count() == 5 # the shp file added to the model instance folder should be part of a new geo feature aggregation shp_res_file = ResourceFile.get(resource=resource, file=shp_file_name, folder=mi_aggr_path) assert shp_res_file.has_logical_file # the geo feature aggregation should contain 4 files that we uploaded assert GeoFeatureLogicalFile.objects.first().files.count() == 4 assert not resource.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_auto_timeseries_aggregation_creation(composite_resource_with_mi_aggregation_folder, mock_irods): """Test that when a timeseries sqlite file is uploaded to a folder that represents a model instance, a timeseries aggregation is created automatically from that sqlite file""" resource, _ = composite_resource_with_mi_aggregation_folder mi_aggr_path = ModelInstanceLogicalFile.objects.first().aggregation_name assert TimeSeriesLogicalFile.objects.count() == 0 # upload a sqlite file to the mi_aggr_path - folder that represents the model instance aggregation sqlite_file_name = 'ODM2_Multi_Site_One_Variable.sqlite' sqlite_file_path = 'hs_file_types/tests/data/{}'.format(sqlite_file_name) _add_files_to_resource(resource=resource, files_to_add=[sqlite_file_path], upload_folder=mi_aggr_path) # there should be 2 resource files assert resource.files.all().count() == 2 # the sqlite file added to the model instance folder should be part of a new timeseries aggregation sqlite_res_file = ResourceFile.get(resource=resource, file=sqlite_file_name, folder=mi_aggr_path) assert sqlite_res_file.has_logical_file assert TimeSeriesLogicalFile.objects.count() == 1 assert ModelInstanceLogicalFile.objects.first().files.count() == 1 # the timeseries aggregation should contain 1 file assert TimeSeriesLogicalFile.objects.first().files.count() == 1 assert not resource.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_auto_ref_timeseries_aggregation_creation(composite_resource_with_mi_aggregation_folder, mock_irods): """Test that when a ref timeseries json file is uploaded to a folder that represents a model instance aggregation, a ref timeseries aggregation is created automatically from that json file""" resource, _ = composite_resource_with_mi_aggregation_folder assert ModelInstanceLogicalFile.objects.first().files.count() == 1 mi_aggr_path = ModelInstanceLogicalFile.objects.first().aggregation_name assert RefTimeseriesLogicalFile.objects.count() == 0 # upload a ref timeseries json file to the mi_aggr_path - folder that represents the model instance aggregation ref_timeseries_file_name = 'multi_sites_formatted_version1.0.refts.json' ref_timeseries_file_path = 'hs_file_types/tests/{}'.format(ref_timeseries_file_name) _add_files_to_resource(resource=resource, files_to_add=[ref_timeseries_file_path], upload_folder=mi_aggr_path) # there should be 2 resource files assert resource.files.all().count() == 2 # the json file added to the model instance folder should be part of a new ref timeseries aggregation ref_ts_res_file = ResourceFile.get(resource=resource, file=ref_timeseries_file_name, folder=mi_aggr_path) assert ref_ts_res_file.has_logical_file assert RefTimeseriesLogicalFile.objects.count() == 1 assert ModelInstanceLogicalFile.objects.first().files.count() == 1 # ref timeseries aggregation should contain 1 file assert RefTimeseriesLogicalFile.objects.first().files.count() == 1 assert not resource.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_canot_create_fileset_within_mi_aggregation(composite_resource_with_mi_aggregation_folder, mock_irods): """Test that one can't create a fileset aggregation inside a folder that represents a model instance aggregation""" resource, user = composite_resource_with_mi_aggregation_folder mi_aggr_path = ModelInstanceLogicalFile.objects.first().aggregation_name file_path = 'pytest/assets/logan.vrt' fs_folder = 'fileset_folder' fs_folder_path = os.path.join(mi_aggr_path, fs_folder) ResourceFile.create_folder(resource, fs_folder) _add_files_to_resource(resource=resource, files_to_add=[file_path], upload_folder=fs_folder_path) # trying to set folder to fileset logical file type (aggregation) should fail assert FileSetLogicalFile.objects.count() == 0 with pytest.raises(ValidationError): FileSetLogicalFile.set_file_type(resource, user, folder_path=fs_folder_path) assert FileSetLogicalFile.objects.count() == 0 assert not resource.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_canot_create_mi_aggregation_within_mi_aggregation(composite_resource_with_mi_aggregation_folder, mock_irods): """Test that one can't create a model instance aggregation inside a folder that represents a model instance aggregation""" resource, user = composite_resource_with_mi_aggregation_folder mi_aggr_path = ModelInstanceLogicalFile.objects.first().aggregation_name assert ModelInstanceLogicalFile.objects.count() == 1 file_path = 'pytest/assets/logan.vrt' mi_sub_folder = 'mi_sub_folder' mi_sub_folder_path = os.path.join(mi_aggr_path, mi_sub_folder) ResourceFile.create_folder(resource, mi_sub_folder) _add_files_to_resource(resource=resource, files_to_add=[file_path], upload_folder=mi_sub_folder_path) # trying to set folder to model instance should fail assert ModelInstanceLogicalFile.objects.count() == 1 with pytest.raises(ValidationError): ModelInstanceLogicalFile.set_file_type(resource, user, folder_path=mi_sub_folder_path) assert ModelInstanceLogicalFile.objects.count() == 1 assert not resource.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_move_single_file_aggr_into_model_instance_aggregation(composite_resource, mock_irods): """ test that we can move a single file aggregation into a folder that represents a model instance aggregation""" res, user = composite_resource file_path = 'pytest/assets/generic_file.txt' mi_folder = 'mi_folder' ResourceFile.create_folder(res, mi_folder) file_to_upload = UploadedFile(file=open(file_path, 'rb'), name=os.path.basename(file_path)) add_file_to_resource(res, file_to_upload, folder=mi_folder, check_target_folder=True) assert res.files.count() == 1 # at this point there should not be any model instance aggregation assert ModelInstanceLogicalFile.objects.count() == 0 # set folder to model instance aggregation type ModelInstanceLogicalFile.set_file_type(resource=res, user=user, folder_path=mi_folder) res_file = res.files.first() assert res_file.has_logical_file # file has folder assert res_file.file_folder == mi_folder assert ModelInstanceLogicalFile.objects.count() == 1 # create a single file aggregation single_file_name = 'logan.vrt' file_path = 'pytest/assets/{}'.format(single_file_name) file_to_upload = UploadedFile(file=open(file_path, 'rb'), name=os.path.basename(file_path)) res_file = add_file_to_resource(res, file_to_upload, check_target_folder=True) # set file to generic logical file type (aggregation) GenericLogicalFile.set_file_type(res, user, res_file.id) assert GenericLogicalFile.objects.count() == 1 # moving the logan.vrt file into mi_folder should be successful src_path = 'data/contents/{}'.format(single_file_name) tgt_path = 'data/contents/{}/{}'.format(mi_folder, single_file_name) move_or_rename_file_or_folder(user, res.short_id, src_path, tgt_path) assert not res.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_update_spatial_coverage_from_children(composite_resource_with_mi_aggregation_folder, mock_irods): """Here we are testing fileset level spatial coverage update using the spatial data from the contained (children) aggregations - two child aggregations""" resource, user = composite_resource_with_mi_aggregation_folder assert ModelInstanceLogicalFile.objects.count() == 1 mi_aggr = ModelInstanceLogicalFile.objects.first() # model aggr should not have any spatial coverage assert mi_aggr.metadata.spatial_coverage is None # auto create a raster aggregation inside the model instance aggregation assert GeoRasterLogicalFile.objects.count() == 0 # upload a raster file to the mi_aggr_path - folder that represents the model instance aggregation raster_file_name = 'small_logan.tif' raster_file_path = 'hs_file_types/tests/{}'.format(raster_file_name) _add_files_to_resource(resource=resource, files_to_add=[raster_file_path], upload_folder=mi_aggr.folder) # there should be three resource files ( one extra vrt file added as part of raster aggregation creation) assert resource.files.all().count() == 3 # there should be one raster aggregation now assert GeoRasterLogicalFile.objects.count() == 1 mi_aggr = ModelInstanceLogicalFile.objects.first() # model aggr should now have spatial coverage assert mi_aggr.metadata.spatial_coverage is not None assert mi_aggr.metadata.spatial_coverage.value['northlimit'] == 42.0500269597691 assert mi_aggr.metadata.spatial_coverage.value['eastlimit'] == -111.57773718106195 assert mi_aggr.metadata.spatial_coverage.value['southlimit'] == 41.98722286029891 assert mi_aggr.metadata.spatial_coverage.value['westlimit'] == -111.69756293084055 # auto create a netcdf aggregation inside the model instance aggregation assert NetCDFLogicalFile.objects.count() == 0 # upload a netcdf file to the folder that represents the model instance aggregation nc_file_name = "netcdf_valid.nc" netcdf_file_path = "hs_file_types/tests/{}".format(nc_file_name) _add_files_to_resource(resource=resource, files_to_add=[netcdf_file_path], upload_folder=mi_aggr.folder) assert NetCDFLogicalFile.objects.count() == 1 nc_aggr = NetCDFLogicalFile.objects.first() # netcdf aggr should have spatial coverage assert nc_aggr.metadata.spatial_coverage is not None # update model instance aggregation spatial coverage from the contained 2 aggregations mi_aggr.update_spatial_coverage() # test model instance aggregation spatial coverage data assert mi_aggr.metadata.spatial_coverage.value['northlimit'] == 42.0500269597691 assert mi_aggr.metadata.spatial_coverage.value['eastlimit'] == -111.50594036845686 assert mi_aggr.metadata.spatial_coverage.value['southlimit'] == 41.8639080745171 assert mi_aggr.metadata.spatial_coverage.value['westlimit'] == -111.69756293084055 assert not resource.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_no_auto_update_spatial_coverage_from_children(composite_resource_with_mi_aggregation_folder, mock_irods): """Here we are testing model instance level spatial coverage auto update does not happen when a contained aggregation spatial coverage gets created as part of that aggregation creation since the model instance aggregation has spatial coverage prior to the child aggregation creation """ resource, user = composite_resource_with_mi_aggregation_folder assert ModelInstanceLogicalFile.objects.count() == 1 mi_aggr = ModelInstanceLogicalFile.objects.first() # model aggr should not have any spatial coverage assert mi_aggr.metadata.spatial_coverage is None # create spatial coverage for model instance value_dict = {'east': '56.45678', 'north': '12.6789', 'units': 'Decimal degree'} mi_aggr.metadata.create_element('coverage', type='point', value=value_dict) # model aggr should now have any spatial coverage assert mi_aggr.metadata.spatial_coverage is not None # auto create a raster aggregation inside the model instance aggregation assert GeoRasterLogicalFile.objects.count() == 0 # upload a raster file to the mi_aggr_path - folder that represents the model instance aggregation raster_file_name = 'small_logan.tif' raster_file_path = 'hs_file_types/tests/{}'.format(raster_file_name) _add_files_to_resource(resource=resource, files_to_add=[raster_file_path], upload_folder=mi_aggr.folder) # there should be three resource files ( one extra vrt file added as part of raster aggregation creation) assert resource.files.all().count() == 3 # there should be one raster aggregation now assert GeoRasterLogicalFile.objects.count() == 1 gr_aggr = GeoRasterLogicalFile.objects.first() # raster aggr should have spatial coverage assert gr_aggr.metadata.spatial_coverage is not None assert gr_aggr.metadata.spatial_coverage.value['northlimit'] == 42.0500269597691 assert gr_aggr.metadata.spatial_coverage.value['eastlimit'] == -111.57773718106195 assert gr_aggr.metadata.spatial_coverage.value['southlimit'] == 41.98722286029891 assert gr_aggr.metadata.spatial_coverage.value['westlimit'] == -111.69756293084055 # check model instance spatial coverage has not been updated assert mi_aggr.metadata.spatial_coverage.value['east'] == value_dict['east'] assert mi_aggr.metadata.spatial_coverage.value['north'] == value_dict['north'] assert not resource.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_auto_update_temporal_coverage_from_children(composite_resource_with_mi_aggregation_folder, mock_irods): """Here we are testing model instance level temporal coverage auto update when a contained aggregation temporal coverage gets created as part of that aggregation creation provided the model instance aggregation has no temporal coverage prior to the child aggregation creation """ resource, user = composite_resource_with_mi_aggregation_folder assert ModelInstanceLogicalFile.objects.count() == 1 mi_aggr = ModelInstanceLogicalFile.objects.first() # model aggr should not have any temporal coverage assert mi_aggr.metadata.temporal_coverage is None # auto create a netcdf aggregation inside the model instance aggregation assert NetCDFLogicalFile.objects.count() == 0 # upload a netcdf file to the folder that represents the model instance aggregation nc_file_name = "netcdf_valid.nc" netcdf_file_path = "hs_file_types/tests/{}".format(nc_file_name) _add_files_to_resource(resource=resource, files_to_add=[netcdf_file_path], upload_folder=mi_aggr.folder) assert NetCDFLogicalFile.objects.count() == 1 nc_aggr = NetCDFLogicalFile.objects.first() # netcdf aggr should have temporal coverage assert nc_aggr.metadata.temporal_coverage is not None # model aggr should now have temporal coverage assert mi_aggr.metadata.temporal_coverage is not None # temporal coverage of the model instance aggregation should match with that of the contained # netcdf aggregation for temp_date in ('start', 'end'): assert mi_aggr.metadata.temporal_coverage.value[temp_date] == \ nc_aggr.metadata.temporal_coverage.value[temp_date] assert not resource.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_no_auto_update_temporal_coverage_from_children(composite_resource_with_mi_aggregation_folder, mock_irods): """Here we are testing model instance level temporal coverage auto update does not happen when a contained aggregation temporal coverage gets created as part of that aggregation creation since the model instance aggregation has temporal coverage prior to the child aggregation creation """ resource, user = composite_resource_with_mi_aggregation_folder assert ModelInstanceLogicalFile.objects.count() == 1 mi_aggr = ModelInstanceLogicalFile.objects.first() # model aggr should not have any temporal coverage assert mi_aggr.metadata.temporal_coverage is None # create temporal coverage for model instance value_dict = {'name': 'Name for period coverage', 'start': '1/1/2018', 'end': '12/12/2018'} mi_aggr.metadata.create_element('coverage', type='period', value=value_dict) # model aggr should now have temporal coverage assert mi_aggr.metadata.temporal_coverage is not None # auto create a netcdf aggregation inside the model instance aggregation assert NetCDFLogicalFile.objects.count() == 0 # upload a netcdf file to the folder that represents the model instance aggregation nc_file_name = "netcdf_valid.nc" netcdf_file_path = "hs_file_types/tests/{}".format(nc_file_name) _add_files_to_resource(resource=resource, files_to_add=[netcdf_file_path], upload_folder=mi_aggr.folder) assert NetCDFLogicalFile.objects.count() == 1 nc_aggr = NetCDFLogicalFile.objects.first() # netcdf aggr should have temporal coverage assert nc_aggr.metadata.temporal_coverage is not None # temporal coverage of the model instance aggregation should NOT match with that of the contained # netcdf aggregation for temp_date in ('start', 'end'): assert mi_aggr.metadata.temporal_coverage.value[temp_date] != \ nc_aggr.metadata.temporal_coverage.value[temp_date] assert not resource.dangling_aggregations_exist() @pytest.mark.django_db(transaction=True) def test_update_temporal_coverage_from_children(composite_resource_with_mi_aggregation_folder, mock_irods): """Here we are testing model instance level temporal coverage can be updated by user if the contained aggregations have temporal coverage """ resource, user = composite_resource_with_mi_aggregation_folder assert ModelInstanceLogicalFile.objects.count() == 1 mi_aggr = ModelInstanceLogicalFile.objects.first() # model aggr should not have any temporal coverage assert mi_aggr.metadata.temporal_coverage is None # create temporal coverage for model instance value_dict = {'name': 'Name for period coverage', 'start': '1/1/2018', 'end': '12/12/2018'} mi_aggr.metadata.create_element('coverage', type='period', value=value_dict) # model aggr should now have temporal coverage assert mi_aggr.metadata.temporal_coverage is not None # auto create a netcdf aggregation inside the model instance aggregation assert NetCDFLogicalFile.objects.count() == 0 # upload a netcdf file to the folder that represents the model instance aggregation nc_file_name = "netcdf_valid.nc" netcdf_file_path = "hs_file_types/tests/{}".format(nc_file_name) _add_files_to_resource(resource=resource, files_to_add=[netcdf_file_path], upload_folder=mi_aggr.folder) assert NetCDFLogicalFile.objects.count() == 1 nc_aggr = NetCDFLogicalFile.objects.first() # netcdf aggr should have temporal coverage assert nc_aggr.metadata.temporal_coverage is not None # temporal coverage of the model instance aggregation should NOT match with that of the contained # netcdf aggregation for temp_date in ('start', 'end'): assert mi_aggr.metadata.temporal_coverage.value[temp_date] != \ nc_aggr.metadata.temporal_coverage.value[temp_date] # update temporal coverage for model instance from contained aggregations mi_aggr.update_temporal_coverage() # temporal coverage of the model instance aggregation should now match with that of the contained # netcdf aggregation for temp_date in ('start', 'end'): assert mi_aggr.metadata.temporal_coverage.value[temp_date] == \ nc_aggr.metadata.temporal_coverage.value[temp_date] assert not resource.dangling_aggregations_exist() def _add_files_to_resource(resource, files_to_add, upload_folder=None): files_to_upload = [] for fl in files_to_add: file_to_upload = UploadedFile(file=open(fl, 'rb'), name=os.path.basename(fl)) files_to_upload.append(file_to_upload) added_resource_files = add_resource_files(resource.short_id, *files_to_upload, folder=upload_folder) return added_resource_files
52.592754
120
0.766183
4,832
36,289
5.497724
0.057533
0.029588
0.036364
0.033126
0.846904
0.793111
0.769998
0.719443
0.702579
0.682101
0
0.012705
0.158478
36,289
689
121
52.669086
0.857194
0.254733
0
0.597727
0
0
0.057933
0.019747
0
0
0
0
0.370455
1
0.043182
false
0
0.018182
0
0.063636
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
59beec04b60808b475a746e87f1089d708b306b6
26,046
py
Python
d4s2_api_v1/tests_api.py
Duke-GCB/D4S2
47bef4b632967440608f2cc7a3fc31c32b2060fa
[ "MIT" ]
null
null
null
d4s2_api_v1/tests_api.py
Duke-GCB/D4S2
47bef4b632967440608f2cc7a3fc31c32b2060fa
[ "MIT" ]
138
2016-09-23T18:09:18.000Z
2022-03-03T15:50:19.000Z
d4s2_api_v1/tests_api.py
Duke-GCB/D4S2
47bef4b632967440608f2cc7a3fc31c32b2060fa
[ "MIT" ]
null
null
null
from django.core.urlresolvers import reverse import rest_framework from rest_framework.test import APITestCase from mock import patch, Mock, call from d4s2_api_v1.api import * from d4s2_api.models import * from django.contrib.auth.models import User as django_user from switchboard.mocks_ddsutil import MockDDSProject, MockDDSUser from gcb_web_auth.tests_dukeds_auth import ResponseStatusCodeTestCase from rest_framework.test import APIRequestFactory def setup_mock_ddsutil(mock_ddsutil): mock_ddsutil.return_value = Mock() mock_ddsutil.return_value.get_remote_user = Mock() mock_ddsutil.return_value.get_remote_user.return_value = MockDDSUser('Test User', 'test@example.com') mock_ddsutil.return_value.get_remote_project.return_value = MockDDSProject('My Project') mock_ddsutil.return_value.create_project_transfer.return_value = {'id': 'mock_ddsutil_transfer_id'} class AuthenticatedResourceTestCase(APITestCase, ResponseStatusCodeTestCase): def setUp(self): username = 'api_user' password = 'secret' self.user = django_user.objects.create_user(username, password=password, is_staff=True) self.client.login(username=username, password=password) self.dds_id1 = 'user1' self.dds_id2 = 'user2' self.transfer_id1 = 'abcd-1234' self.transfer_id2 = 'efgh-5678' class DeliveryViewTestCase(AuthenticatedResourceTestCase): def setUp(self): super(DeliveryViewTestCase, self).setUp() self.email_template_set = EmailTemplateSet.objects.create(name='someset') self.user_email_template_set = UserEmailTemplateSet.objects.create( user=self.user, email_template_set=self.email_template_set) def test_fails_unauthenticated(self): self.client.logout() url = reverse('ddsdelivery-list') response = self.client.post(url, {}, format='json') self.assertUnauthorized(response) @patch('d4s2_api_v1.api.DDSUtil') def test_create_delivery(self, mock_ddsutil): setup_mock_ddsutil(mock_ddsutil) url = reverse('ddsdelivery-list') data = {'project_id': 'project-id-2', 'from_user_id': 'user1', 'to_user_id': 'user2'} response = self.client.post(url, data, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(DDSDelivery.objects.count(), 1) dds_delivery = DDSDelivery.objects.get() self.assertEqual(dds_delivery.from_user_id, 'user1') self.assertEqual(mock_ddsutil.return_value.create_project_transfer.call_count, 1) self.assertTrue(mock_ddsutil.return_value.create_project_transfer.called_with('project-id-2', ['user2'])) self.assertEqual(dds_delivery.email_template_set, self.email_template_set) @patch('d4s2_api_v1.api.DDSUtil') def test_create_delivery_fails_when_user_not_setup(self, mock_ddsutil): self.user_email_template_set.delete() setup_mock_ddsutil(mock_ddsutil) url = reverse('ddsdelivery-list') data = {'project_id': 'project-id-2', 'from_user_id': 'user1', 'to_user_id': 'user2'} response = self.client.post(url, data, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data, [EMAIL_TEMPLATES_NOT_SETUP_MSG]) @patch('d4s2_api_v1.api.DDSUtil') def test_create_delivery_with_shared_ids(self, mock_ddsutil): setup_mock_ddsutil(mock_ddsutil) url = reverse('ddsdelivery-list') data = {'project_id': 'project-id-2', 'from_user_id': 'user1', 'to_user_id': 'user2'} response = self.client.post(url, data, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(DDSDelivery.objects.count(), 1) self.assertEqual(DDSDelivery.objects.get().from_user_id, 'user1') self.assertEqual(mock_ddsutil.return_value.create_project_transfer.call_count, 1) self.assertTrue(mock_ddsutil.return_value.create_project_transfer.called_with('project-id-2', ['user2'])) def test_list_deliveries(self): DDSDelivery.objects.create(project_id='project1', from_user_id='user1', to_user_id='user2', transfer_id=self.transfer_id1, email_template_set=self.email_template_set) DDSDelivery.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', transfer_id=self.transfer_id2, email_template_set=self.email_template_set) url = reverse('ddsdelivery-list') response = self.client.get(url, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 2) def test_get_delivery(self): h = DDSDelivery.objects.create(project_id='project1', from_user_id='user1', to_user_id='user2', transfer_id=self.transfer_id1, email_template_set=self.email_template_set) url = reverse('ddsdelivery-detail', args=(h.pk,)) response = self.client.get(url, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['project_id'], 'project1') def test_delete_delivery(self): h = DDSDelivery.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', transfer_id=self.transfer_id1, email_template_set=self.email_template_set) url = reverse('ddsdelivery-detail', args=(h.pk,)) response = self.client.delete(url, format='json') self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(DDSDelivery.objects.count(), 0) @patch('d4s2_api_v1.api.DDSUtil') def test_update_delivery(self, mock_ddsutil): setup_mock_ddsutil(mock_ddsutil) h = DDSDelivery.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', transfer_id=self.transfer_id1, email_template_set=self.email_template_set) updated = {'from_user_id': self.dds_id1, 'to_user_id': self.dds_id2, 'project_id': 'project3', 'transfer_id': h.transfer_id} url = reverse('ddsdelivery-detail', args=(h.pk,)) response = self.client.put(url, data=updated, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) h = DDSDelivery.objects.get(pk=h.pk) self.assertEqual(h.project_id, 'project3') def test_create_delivery_fails_with_transfer_id(self): url = reverse('ddsdelivery-list') data = {'project_id':'project-id-2', 'from_user_id': 'user1', 'to_user_id': 'user2', 'transfer_id': 'transfer123'} response = self.client.post(url, data, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_filter_deliveries(self): h = DDSDelivery.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', transfer_id=self.transfer_id1, email_template_set=self.email_template_set) url = reverse('ddsdelivery-list') response=self.client.get(url, {'project_id': 'project2'}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 1) response=self.client.get(url, {'project_id': 'project23'}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 0) @patch('d4s2_api_v1.api.DDSMessageFactory') def test_send_delivery(self, mock_message_factory): instance = mock_message_factory.return_value.make_delivery_message.return_value instance.send = Mock() instance.email_text = 'email text' h = DDSDelivery.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', transfer_id='abcd', email_template_set=self.email_template_set) self.assertTrue(h.is_new()) url = reverse('ddsdelivery-send', args=(h.pk,)) response = self.client.post(url, data={}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) h = DDSDelivery.objects.get(pk=h.pk) self.assertFalse(h.is_new()) self.assertTrue(mock_message_factory.return_value.make_delivery_message.called) # Make sure transfer_id is in the email message ownership_url = reverse('ownership-prompt') expected_absolute_url = APIRequestFactory().request().build_absolute_uri(ownership_url) + '?transfer_id=abcd&delivery_type=dds' mock_message_factory.return_value.make_delivery_message.assert_called_with(expected_absolute_url) self.assertTrue(instance.send.called) @patch('d4s2_api_v1.api.DDSMessageFactory') def test_send_delivery_with_null_template(self, mock_message_factory): instance = mock_message_factory.return_value.make_delivery_message.return_value instance.send = Mock() instance.email_text = 'email text' h = DDSDelivery.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', transfer_id='abcd', email_template_set=None) self.assertTrue(h.is_new()) url = reverse('ddsdelivery-send', args=(h.pk,)) response = self.client.post(url, data={}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data, [ITEM_EMAIL_TEMPLATES_NOT_SETUP_MSG]) @patch('d4s2_api_v1.api.DDSMessageFactory') def test_send_delivery_fails(self, mock_message_factory): instance = mock_message_factory.return_value.make_delivery_message.return_value instance.send = Mock() instance.email_text = 'email text' h = DDSDelivery.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', email_template_set=self.email_template_set) self.assertTrue(h.is_new()) h.mark_notified('email text') url = reverse('ddsdelivery-send', args=(h.pk,)) response = self.client.post(url, data={}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertFalse(mock_message_factory.return_value.make_delivery_message.called) self.assertFalse(instance.send.called) @patch('d4s2_api_v1.api.DDSUtil') def test_deliver_with_user_message(self, mock_ddsutil): setup_mock_ddsutil(mock_ddsutil) url = reverse('ddsdelivery-list') user_message = 'User-specified delivery message' data = {'project_id':'project-id-2', 'from_user_id': 'user1', 'to_user_id': 'user2', 'user_message': user_message} response = self.client.post(url, data, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(DDSDelivery.objects.count(), 1) self.assertEqual(DDSDelivery.objects.get().user_message, user_message) # create_project_transfer should be called once self.assertEqual(mock_ddsutil.return_value.create_project_transfer.call_count, 1) self.assertTrue(mock_ddsutil.return_value.create_project_transfer.called_with('project-id-2', ['user2'])) @patch('d4s2_api_v1.api.DDSMessageFactory') def test_force_send_share(self, mock_message_factory): instance = mock_message_factory.return_value.make_delivery_message.return_value instance.send = Mock() instance.email_text = 'email text' d = DDSDelivery.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', email_template_set=self.email_template_set) d.mark_notified('email text') url = reverse('ddsdelivery-send', args=(d.pk,)) response = self.client.post(url, data={'force': True}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertTrue(mock_message_factory.called) self.assertTrue(mock_message_factory.return_value.make_delivery_message.called) self.assertTrue(instance.send.called) @patch('d4s2_api_v1.api.DDSUtil') def test_cancel_on_accepted(self, mock_ddsutil): d = DDSDelivery.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', email_template_set=self.email_template_set) d.mark_accepted('', '') url = reverse('ddsdelivery-cancel', args=(d.pk,)) response = self.client.post(url, data={'force': True}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @patch('d4s2_api_v1.api.DDSUtil') @patch('d4s2_api_v1.api.DDSMessageFactory') def test_cancel_on_notified(self, mock_message_factory, mock_ddsutil): d = DDSDelivery.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', transfer_id='transfer1', email_template_set=self.email_template_set) d.mark_notified('') url = reverse('ddsdelivery-cancel', args=(d.pk,)) response = self.client.post(url, data={'force': True}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) d.refresh_from_db() self.assertEqual(d.state, State.CANCELED) mock_ddsutil.return_value.cancel_project_transfer.assert_called_with('transfer1') make_canceled_message_func = mock_message_factory.return_value.make_canceled_message self.assertTrue(make_canceled_message_func.called) self.assertTrue(make_canceled_message_func.return_value.send.called) @patch('d4s2_api_v1.api.DDSUtil') def test_cancel_with_null_template(self, mock_ddsutil): d = DDSDelivery.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', email_template_set=None) d.mark_accepted('', '') url = reverse('ddsdelivery-cancel', args=(d.pk,)) response = self.client.post(url, data={'force': True}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data, [ITEM_EMAIL_TEMPLATES_NOT_SETUP_MSG]) class ShareViewTestCase(AuthenticatedResourceTestCase): def setUp(self): super(ShareViewTestCase, self).setUp() self.email_template_set = EmailTemplateSet.objects.create(name='someset') self.user_email_template_set = UserEmailTemplateSet.objects.create( user=self.user, email_template_set=self.email_template_set) def test_fails_unauthenticated(self): self.client.logout() url = reverse('share-list') response = self.client.post(url, {}, format='json') self.assertUnauthorized(response) def test_create_share(self): url = reverse('share-list') data = {'project_id':'project-id-2', 'from_user_id': 'user1', 'to_user_id': 'user2', 'role': 'share_role'} response = self.client.post(url, data, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(Share.objects.count(), 1) self.assertEqual(Share.objects.get().from_user_id, 'user1') self.assertEqual(Share.objects.get().role, 'share_role') self.assertEqual(Share.objects.get().email_template_set, self.email_template_set) def test_create_share_fails_when_user_not_setup(self): self.user_email_template_set.delete() url = reverse('share-list') data = {'project_id':'project-id-2', 'from_user_id': 'user1', 'to_user_id': 'user2', 'role': 'share_role'} response = self.client.post(url, data, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data, [EMAIL_TEMPLATES_NOT_SETUP_MSG]) def test_list_shares(self): Share.objects.create(project_id='project1', from_user_id='user1', to_user_id='user2', email_template_set=self.email_template_set) Share.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', email_template_set=self.email_template_set) url = reverse('share-list') response = self.client.get(url, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 2) def test_get_share(self): d = Share.objects.create(project_id='project1', from_user_id='user1', to_user_id='user2', email_template_set=self.email_template_set) url = reverse('share-detail', args=(d.pk,)) response = self.client.get(url, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['project_id'], 'project1') def test_delete_share(self): d = Share.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', email_template_set=self.email_template_set) url = reverse('share-detail', args=(d.pk,)) response = self.client.delete(url, format='json') self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(Share.objects.count(), 0) def test_update_share(self): d = Share.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', email_template_set=self.email_template_set) updated = {'project_id': 'project3', 'from_user_id': 'fromuser1', 'to_user_id': 'touser1'} url = reverse('share-detail', args=(d.pk,)) response = self.client.put(url, data=updated, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) d = Share.objects.get(pk=d.pk) self.assertEqual(d.project_id, 'project3') @patch('d4s2_api_v1.api.DDSMessageFactory') def test_send_share(self, mock_message_factory): instance = mock_message_factory.return_value.make_share_message.return_value instance.send = Mock() instance.email_text = 'email text' instance.send_template_name.return_value = 'deliver' d = Share.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', email_template_set=self.email_template_set) self.assertFalse(d.is_notified()) url = reverse('share-send', args=(d.pk,)) response = self.client.post(url, data={}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) d = Share.objects.get(pk=d.pk) self.assertTrue(d.is_notified()) mock_message_factory.assert_called_with(d, self.user) self.assertTrue(instance.send.called) def test_send_share_with_null_template(self): d = Share.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', email_template_set=None) self.assertFalse(d.is_notified()) url = reverse('share-send', args=(d.pk,)) response = self.client.post(url, data={}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data, [ITEM_EMAIL_TEMPLATES_NOT_SETUP_MSG]) @patch('d4s2_api_v1.api.DDSMessageFactory') def test_send_share_fails(self, mock_message_factory): instance = mock_message_factory.with_templates_from_user.return_value.make_share_message.return_value instance.send = Mock() d = Share.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', email_template_set=self.email_template_set) self.assertFalse(d.is_notified()) d.mark_notified('email text') url = reverse('share-send', args=(d.pk,)) response = self.client.post(url, data={}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertFalse(mock_message_factory.return_value.make_share_message.called) self.assertFalse(instance.send.called) @patch('d4s2_api_v1.api.DDSMessageFactory') def test_force_send_share(self, mock_message_factory): instance = mock_message_factory.return_value.make_share_message.return_value instance.send = Mock() instance.email_text = 'email text' d = Share.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', email_template_set=self.email_template_set) self.assertFalse(d.is_notified()) d.mark_notified('email text') url = reverse('share-send', args=(d.pk,)) response = self.client.post(url, data={'force': True}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) mock_message_factory.assert_called_with(d, self.user) self.assertTrue(instance.send.called) def test_filter_shares(self): Share.objects.create(project_id='project2', from_user_id='user1', to_user_id='user2', email_template_set=self.email_template_set) url = reverse('share-list') response=self.client.get(url, {'to_user_id': 'user2'}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 1) response = self.client.get(url, {'project_id': 'project23'}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 0) def test_share_with_user_message(self): url = reverse('share-list') user_message = 'This is a user-specified share message' data = {'project_id': 'project-id-2', 'from_user_id': 'user1', 'to_user_id': 'user2', 'role': 'share_role', 'user_message': user_message} response = self.client.post(url, data, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(Share.objects.count(), 1) self.assertEqual(Share.objects.get().user_message, user_message) class BuildAcceptUrlTestCase(APITestCase): def test_build_accept_url(self): request = Mock() transfer_id = '123' delivery_type = 'test' accept_url = build_accept_url(request, transfer_id, delivery_type) request.build_absolute_uri.assert_has_calls([call('/ownership/?transfer_id=123&delivery_type=test')]) self.assertEqual(accept_url, request.build_absolute_uri.return_value) class ModelWithEmailTemplateSetMixinTestCase(AuthenticatedResourceTestCase): def setUp(self): super(ModelWithEmailTemplateSetMixinTestCase, self).setUp() self.email_template_set = EmailTemplateSet.objects.create(name='someset') self.user_email_template_set = UserEmailTemplateSet.objects.create( user=self.user, email_template_set=self.email_template_set) @patch('d4s2_api_v1.api.Response') def test_create(self, mock_response): mock_serializer = Mock() mock_request = Mock(user=self.user, data={}) mixin = ModelWithEmailTemplateSetMixin() mixin.get_serializer = Mock() mixin.get_serializer.return_value = mock_serializer mixin.get_success_headers = Mock() mixin.request = mock_request response = mixin.create(request=mock_request) self.assertEqual(response, mock_response.return_value) mixin.get_serializer.assert_called_with(data=mock_request.data) mock_serializer.is_valid.assert_called_with(raise_exception=True) mock_serializer.save.assert_called_with(email_template_set=self.email_template_set) mock_response.assert_called_with( mock_serializer.data, status=status.HTTP_201_CREATED, headers=mixin.get_success_headers.return_value ) def test_get_email_template_for_request(self): mixin = ModelWithEmailTemplateSetMixin() mixin.request = Mock(user=self.user) mixin.request.data = {} email_template_set = mixin.get_email_template_for_request() self.assertEqual(email_template_set, self.email_template_set) self.user_email_template_set.delete() with self.assertRaises(rest_framework.exceptions.ValidationError) as raised_exception: mixin.get_email_template_for_request() self.assertEqual(raised_exception.exception.detail[0], EMAIL_TEMPLATES_NOT_SETUP_MSG) @patch('d4s2_api_v1.api.EmailTemplateSet') def test_get_email_template_for_request_with_template_set_id(self, mock_email_template_set): other_email_template_set = EmailTemplateSet.objects.create(name='otherset') mixin = ModelWithEmailTemplateSetMixin() mixin.request = Mock(user=self.user) mixin.request.data = {'email_template_set_id': other_email_template_set.id} email_template_set = mixin.get_email_template_for_request() self.assertEqual(email_template_set, mock_email_template_set.get_for_user.return_value.get.return_value) mock_email_template_set.get_for_user.assert_called_with(mixin.request.user) mock_email_template_set.get_for_user.return_value.get.assert_called_with(pk=other_email_template_set.id) def test_prevent_null_email_template_set(self): mixin = ModelWithEmailTemplateSetMixin() mixin.get_object = Mock() mixin.get_object.return_value = Mock(email_template_set='something') mixin.prevent_null_email_template_set() mixin.get_object.return_value = Mock(email_template_set=None) with self.assertRaises(rest_framework.exceptions.ValidationError) as raised_exception: mixin.prevent_null_email_template_set() self.assertEqual(raised_exception.exception.detail[0], ITEM_EMAIL_TEMPLATES_NOT_SETUP_MSG)
56.25486
145
0.700991
3,277
26,046
5.255722
0.062862
0.065668
0.076177
0.040643
0.819253
0.790803
0.75974
0.73965
0.719735
0.702781
0
0.015159
0.184481
26,046
462
146
56.376623
0.795678
0.003494
0
0.634146
0
0
0.108084
0.024276
0
0
0
0
0.270732
1
0.1
false
0.007317
0.02439
0
0.136585
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
59e0f1053f6e2bf694ce3a36bfe5ae71efe2e778
296
py
Python
QaA/ask/tests.py
jedrzejkozal/QuestionsAndAnswers
24e8915295af08f1904cfe7c1ac2b1719586d7d7
[ "MIT" ]
null
null
null
QaA/ask/tests.py
jedrzejkozal/QuestionsAndAnswers
24e8915295af08f1904cfe7c1ac2b1719586d7d7
[ "MIT" ]
null
null
null
QaA/ask/tests.py
jedrzejkozal/QuestionsAndAnswers
24e8915295af08f1904cfe7c1ac2b1719586d7d7
[ "MIT" ]
null
null
null
from .test.SignUpViewTest import * from .test.ProfileViewTest import * from .test.UserViewTest import * from .test.UnansweredViewTest import * from .test.FriendsViewTest import * from .test.FriendAcceptedTest import * from .test.FriendSearchViewTest import * from .test.SettingsViewTest import *
32.888889
40
0.810811
32
296
7.5
0.34375
0.266667
0.408333
0
0
0
0
0
0
0
0
0
0.108108
296
8
41
37
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
abd230a6200beb85e495828bc3eb74a8c9349c05
28
py
Python
app/asset/__init__.py
LonglyCode/flask-blog
b7f36e8798c61aa1669ede59452f3ca446f5b9ce
[ "MIT" ]
2
2016-10-04T14:53:27.000Z
2019-01-11T02:08:47.000Z
app/asset/__init__.py
LonglyCode/flask-blog
b7f36e8798c61aa1669ede59452f3ca446f5b9ce
[ "MIT" ]
null
null
null
app/asset/__init__.py
LonglyCode/flask-blog
b7f36e8798c61aa1669ede59452f3ca446f5b9ce
[ "MIT" ]
null
null
null
from assets import init_app
14
27
0.857143
5
28
4.6
1
0
0
0
0
0
0
0
0
0
0
0
0.142857
28
1
28
28
0.958333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
e6535edd668f5d4a8bdfeb8f2d610314638dadf0
620
py
Python
config/settings/third_party.py
HeLsEroC/bbr
0dd40bffd05faa777bec3a89dd1712f0f546d60e
[ "MIT" ]
null
null
null
config/settings/third_party.py
HeLsEroC/bbr
0dd40bffd05faa777bec3a89dd1712f0f546d60e
[ "MIT" ]
null
null
null
config/settings/third_party.py
HeLsEroC/bbr
0dd40bffd05faa777bec3a89dd1712f0f546d60e
[ "MIT" ]
null
null
null
AVATAR_AUTO_GENERATE_SIZES = 150 # Control the forms that django-allauth uses ACCOUNT_FORMS = { "login": "allauth.account.forms.LoginForm", "add_email": "allauth.account.forms.AddEmailForm", "change_password": "allauth.account.forms.ChangePasswordForm", "set_password": "allauth.account.forms.SetPasswordForm", "reset_password": "allauth.account.forms.ResetPasswordForm", "reset_password_from_key": "allauth.account.forms.ResetPasswordKeyForm", "disconnect": "allauth.socialaccount.forms.DisconnectForm", # Use our custom signup form "signup": "ool.users.forms.UserCreationFormX", }
41.333333
76
0.754839
66
620
6.924242
0.590909
0.183807
0.249453
0.177243
0
0
0
0
0
0
0
0.005484
0.117742
620
14
77
44.285714
0.829982
0.11129
0
0
1
0
0.715328
0.585766
0
0
0
0
0
1
0
false
0.363636
0
0
0
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
1
null
0
0
0
0
0
0
0
1
0
0
0
0
0
6
054941b2c6f8199cc6290426b5a88c54af98fcda
108
py
Python
terrascript/logentries/__init__.py
amlodzianowski/python-terrascript
1111affe6cd30d9b8b7bc74ae4e27590f7d4dc49
[ "BSD-2-Clause" ]
null
null
null
terrascript/logentries/__init__.py
amlodzianowski/python-terrascript
1111affe6cd30d9b8b7bc74ae4e27590f7d4dc49
[ "BSD-2-Clause" ]
null
null
null
terrascript/logentries/__init__.py
amlodzianowski/python-terrascript
1111affe6cd30d9b8b7bc74ae4e27590f7d4dc49
[ "BSD-2-Clause" ]
null
null
null
# terrascript/logentries/__init__.py import terrascript class logentries(terrascript.Provider): pass
13.5
39
0.796296
11
108
7.454545
0.727273
0
0
0
0
0
0
0
0
0
0
0
0.12963
108
7
40
15.428571
0.87234
0.314815
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
055f3a2aa174bac9185cf6da22e0f96df87cd830
46
py
Python
slack_bolt/authorization/__init__.py
korymath/bolt-python
67e0286d756ba92510315d044303f43b03380b52
[ "MIT" ]
160
2019-09-27T18:02:03.000Z
2022-03-15T23:46:40.000Z
slack_bolt/authorization/__init__.py
korymath/bolt-python
67e0286d756ba92510315d044303f43b03380b52
[ "MIT" ]
2
2019-10-21T13:30:17.000Z
2019-10-30T00:09:11.000Z
slack_bolt/authorization/__init__.py
korymath/bolt-python
67e0286d756ba92510315d044303f43b03380b52
[ "MIT" ]
31
2019-10-19T18:10:23.000Z
2022-02-28T14:13:19.000Z
from .authorize_result import AuthorizeResult
23
45
0.891304
5
46
8
1
0
0
0
0
0
0
0
0
0
0
0
0.086957
46
1
46
46
0.952381
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
5565e308c6bba82149d410985edb5110c5594249
28
py
Python
pycoloram/__init__.py
Timtaran/pycoloram
d973c4e607cd499b3937c2a242cd526511a81fdc
[ "MIT" ]
1
2020-12-17T20:21:18.000Z
2020-12-17T20:21:18.000Z
pycoloram/__init__.py
Timtaran/pycoloram
d973c4e607cd499b3937c2a242cd526511a81fdc
[ "MIT" ]
null
null
null
pycoloram/__init__.py
Timtaran/pycoloram
d973c4e607cd499b3937c2a242cd526511a81fdc
[ "MIT" ]
1
2020-12-14T06:51:46.000Z
2020-12-14T06:51:46.000Z
from .pycoloram import color
28
28
0.857143
4
28
6
1
0
0
0
0
0
0
0
0
0
0
0
0.107143
28
1
28
28
0.96
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
5594b4109e7ace819be4b96083715609ddd831f0
40
py
Python
contracts/__init__.py
viniciuschiele-archive/contracts
c9928d1120023b17e0ee1973294b1b495b69b570
[ "MIT" ]
null
null
null
contracts/__init__.py
viniciuschiele-archive/contracts
c9928d1120023b17e0ee1973294b1b495b69b570
[ "MIT" ]
null
null
null
contracts/__init__.py
viniciuschiele-archive/contracts
c9928d1120023b17e0ee1973294b1b495b69b570
[ "MIT" ]
null
null
null
from .contract import Contract, Context
20
39
0.825
5
40
6.6
0.8
0
0
0
0
0
0
0
0
0
0
0
0.125
40
1
40
40
0.942857
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
7579f8ccd76e05998fce26457c481a946199073c
22,689
py
Python
models.py
LTS4/TIGraNet
22ba11b665e8445f1f759c0d13414429d9a03265
[ "MIT" ]
8
2018-08-21T20:58:05.000Z
2020-05-15T03:42:06.000Z
models.py
LTS4/TIGraNet
22ba11b665e8445f1f759c0d13414429d9a03265
[ "MIT" ]
1
2020-12-24T05:12:14.000Z
2021-03-23T15:04:46.000Z
models.py
LTS4/TIGraNet
22ba11b665e8445f1f759c0d13414429d9a03265
[ "MIT" ]
3
2018-10-14T14:54:07.000Z
2021-02-28T21:59:22.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Neural Networks models module. """ import numpy as np import logging import os import torch import torch.nn as nn import torch.optim as optim from torch.autograd import Variable from layers import SpectralConv, DynamicPool, Statistic from utils import init_mask from configuration import * from paths import SAVED_DATA, DEBUG_DIR_MNIST_012, DEBUG_DIR_MNIST_rot, DEBUG_DIR_ETH80 logger = logging.getLogger(__name__) class TIGraNet(nn.Module): def __init__(self, dim, laplacian_matrix, shifted_laplacian_matrix, batch_size, learning_rate, load_pretrained_weights=False, freeze_sc_weights=False): super(TIGraNet, self).__init__() self.num_nodes = dim**2 self.laplacian_matrix = laplacian_matrix self.shifted_laplacian_matrix = shifted_laplacian_matrix self.batch_size = batch_size self.learning_rate = learning_rate self.mask = init_mask(num_nodes=self.num_nodes, batch_size=self.batch_size) self.load_pretrained_weights = load_pretrained_weights self.freeze_sc_weights = freeze_sc_weights self.loss_function = torch.nn.CrossEntropyLoss() def init_pretrained_weights(self, name): """Initialize the weights of the model with pretrained weights.""" self.spectral_conv1.alpha.weight = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'alpha_0.npy')))) self.spectral_conv1.beta.weight = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'beta_0.npy'))).unsqueeze(0)) self.spectral_conv2.alpha.weight = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'alpha_1.npy')))) self.spectral_conv2.beta.weight = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'beta_1.npy'))).unsqueeze(0)) self.fully_connected[0].weight = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'W_1.npy'))).t()) self.fully_connected[0].bias = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'b_1.npy')))) self.fully_connected[2].weight = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'W_2.npy'))).t()) self.fully_connected[2].bias = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'b_2.npy')))) self.fully_connected[4].weight = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'W_3.npy'))).t()) self.fully_connected[4].bias = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'b_3.npy')))) self.fully_connected[6].weight = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'W_last.npy'))).t()) self.fully_connected[6].bias = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'b_last.npy')))) if name=='mnist_012': self.spectral_conv3.alpha.weight = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'alpha_2.npy')))) self.spectral_conv3.beta.weight = nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, name, 'parameters', 'beta_2.npy'))).unsqueeze(0)) def prepare_input(self, input): input = input.view(self.batch_size, 1, self.num_nodes) input = input - torch.mean(input, 2, True) input = input.transpose(1,2) return input def step(self, input, target, train): if train: self.train() else: self.eval() self.optimizer.zero_grad() out = self.forward(input) loss = self.loss_function(out, target) if train: loss.backward() self.optimizer.step() return loss.item() def predict(self, input): self.eval() output = self.forward(input) _, output = torch.max(output, 1) return output class TIGraNet_mnist_012(TIGraNet): def __init__(self, dim, laplacian_matrix, shifted_laplacian_matrix, batch_size, learning_rate, load_pretrained_weights=False, freeze_sc_weights=False): TIGraNet.__init__(self, dim, laplacian_matrix, shifted_laplacian_matrix, batch_size, learning_rate, load_pretrained_weights, freeze_sc_weights) # Main layers self.spectral_conv1 = SpectralConv( batch_size=self.batch_size, num_nodes=self.num_nodes, filter_size_in=1, filter_size_out=3, degree_of_polynomial=4, laplacian_matrix=self.laplacian_matrix, mask=self.mask ) self.dynamic_pool1 = DynamicPool( batch_size=self.batch_size, num_nodes=self.num_nodes, num_filters=3, num_active_nodes=200, mask=self.mask ) self.spectral_conv2 = SpectralConv( batch_size=self.batch_size, num_nodes=self.num_nodes, filter_size_in=3, filter_size_out=6, degree_of_polynomial=4, laplacian_matrix=self.laplacian_matrix, mask=self.mask ) self.dynamic_pool2 = DynamicPool( batch_size=self.batch_size, num_nodes=self.num_nodes, num_filters=6, num_active_nodes=100, mask=self.mask ) self.spectral_conv3 = SpectralConv( batch_size=self.batch_size, num_nodes=self.num_nodes, filter_size_in=6, filter_size_out=9, degree_of_polynomial=4, laplacian_matrix=self.laplacian_matrix, mask=self.mask ) self.dynamic_pool3 = DynamicPool( batch_size=self.batch_size, num_nodes=self.num_nodes, num_filters=9, num_active_nodes=50, mask=self.mask ) self.statistic = Statistic( batch_size=self.batch_size, num_nodes=self.num_nodes, num_filters=9, degree_of_polynomial=9, shifted_laplacian_matrix=self.shifted_laplacian_matrix ) self.fully_connected = nn.Sequential( nn.Linear(in_features=180, out_features=100), nn.ReLU(inplace=True), nn.Linear(in_features=100, out_features=80), nn.ReLU(inplace=True), nn.Linear(in_features=80, out_features= 60), nn.ReLU(inplace=True), nn.Linear(in_features=60, out_features=3) ) if load_pretrained_weights: self.init_pretrained_weights(name='mnist_012') # random checks assert (self.spectral_conv2.alpha.weight == nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, 'mnist_012', 'parameters', 'alpha_1.npy'))))).all() assert (self.spectral_conv2.beta.weight == nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, 'mnist_012', 'parameters', 'beta_1.npy'))).unsqueeze(0))).all() assert (self.fully_connected[2].weight == nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, 'mnist_012', 'parameters', 'W_2.npy'))).t())).all() logger.info('Loaded pretrained weights.') else: self.init_weights_default() logger.info('Loaded weights using uniform distribution in [0,1].') if freeze_sc_weights: # freeze the parameters of the spectral conv layer for m in self.modules(): if isinstance(m, SpectralConv): m.alpha.weight.requires_grad = False m.beta.weight.requires_grad = False logger.info('Freezed spectral conv weights.') self.optimizer = optim.Adam(filter(lambda p: p.requires_grad, self.parameters()), lr=learning_rate) logger.info('Loaded {} optimizer.'.format(type(self.optimizer).__name__)) def init_weights_default(self): """Initialize the weights of the model with uniform distribution in [0,1].""" for m in self.modules(): if isinstance(m, SpectralConv): nn.init.uniform_(m.alpha.weight) nn.init.uniform_(m.beta.weight) def forward(self, input): prepared_input = self.prepare_input(input) filter_operator1, y1, spectral_conv1 = self.spectral_conv1(prepared_input, self.mask) mask1, dynamic_pool1 = self.dynamic_pool1(spectral_conv1, self.mask) filter_operator2, y2, spectral_conv2 = self.spectral_conv2(spectral_conv1, mask1) mask2, dynamic_pool2 = self.dynamic_pool2(spectral_conv2, mask1) filter_operator3, y3, spectral_conv3 = self.spectral_conv3(spectral_conv2, mask2) mask3, dynamic_pool3 = self.dynamic_pool3(spectral_conv3, mask2) statistic = self.statistic(spectral_conv3, mask3) output = self.fully_connected(statistic) if GENERATE_SAVE: # save all intermediary steps for debugging variables = [prepared_input, filter_operator1, y1, spectral_conv1, filter_operator2, y2, spectral_conv2, filter_operator3, y3, spectral_conv3, mask1, mask2, mask3, statistic, output] variables_names = ['prepared_input', 'filter_operator1', 'y1', 'spectral_conv1', 'filter_operator2', 'y2', 'spectral_conv2', 'filter_operator3', 'y3', 'spectral_conv3', 'mask1', 'mask2', 'mask3', 'statistic', 'output'] tuples = zip(variables, variables_names) for v, n in tuples: # np.save(DEBUG_DIR_MNIST_012 + 'constant_weights/' + n + '_p', v.detach().numpy()) np.save(DEBUG_DIR_MNIST_012 + 'pretrained_weights/' + n + '_p_pw', v.detach().numpy()) return output class TIGraNet_mnist_rot(TIGraNet): def __init__(self, dim, laplacian_matrix, shifted_laplacian_matrix, batch_size, learning_rate, load_pretrained_weights=False, freeze_sc_weights=False): TIGraNet.__init__(self, dim, laplacian_matrix, shifted_laplacian_matrix, batch_size, learning_rate, load_pretrained_weights, freeze_sc_weights) # Main layers self.spectral_conv1 = SpectralConv( batch_size=self.batch_size, num_nodes=self.num_nodes, filter_size_in=1, filter_size_out=10, degree_of_polynomial=4, laplacian_matrix=self.laplacian_matrix, mask=self.mask ) self.dynamic_pool1 = DynamicPool( batch_size=self.batch_size, num_nodes=self.num_nodes, num_filters=10, num_active_nodes=600, mask=self.mask ) self.spectral_conv2 = SpectralConv( batch_size=self.batch_size, num_nodes=self.num_nodes, filter_size_in=10, filter_size_out=20, degree_of_polynomial=4, laplacian_matrix=self.laplacian_matrix, mask=self.mask ) self.dynamic_pool2 = DynamicPool( batch_size=self.batch_size, num_nodes=self.num_nodes, num_filters=20, num_active_nodes=300, mask=self.mask ) self.statistic = Statistic( batch_size=self.batch_size, num_nodes=self.num_nodes, num_filters=20, degree_of_polynomial=13, shifted_laplacian_matrix=self.shifted_laplacian_matrix ) self.fully_connected = nn.Sequential( nn.Linear(in_features=560, out_features=500), nn.ReLU(inplace=True), nn.Linear(in_features=500, out_features=300), nn.ReLU(inplace=True), nn.Linear(in_features=300, out_features= 100), nn.ReLU(inplace=True), nn.Linear(in_features=100, out_features=9) ) if load_pretrained_weights: self.init_pretrained_weights(name='mnist_rot') # random checks assert (self.spectral_conv2.alpha.weight == nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, 'mnist_rot', 'parameters', 'alpha_1.npy'))))).all() assert (self.spectral_conv2.beta.weight == nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, 'mnist_rot', 'parameters', 'beta_1.npy'))).unsqueeze(0))).all() assert (self.fully_connected[2].weight == nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, 'mnist_rot', 'parameters', 'W_2.npy'))).t())).all() logger.info('Loaded pretrained weights.') if freeze_sc_weights: # freeze the parameters of the spectral conv layer for m in self.modules(): if isinstance(m, SpectralConv): m.alpha.weight.requires_grad = False m.beta.weight.requires_grad = False logger.info('Freezed spectral conv weights.') self.optimizer = optim.Adam(filter(lambda p: p.requires_grad, self.parameters()), lr=learning_rate) logger.info('Loaded {} optimizer.'.format(type(self.optimizer).__name__)) def forward(self, input): prepared_input = self.prepare_input(input) filter_operator1, y1, spectral_conv1 = self.spectral_conv1(prepared_input, self.mask) mask1, dynamic_pool1 = self.dynamic_pool1(spectral_conv1, self.mask) filter_operator2, y2, spectral_conv2 = self.spectral_conv2(spectral_conv1, mask1) mask2, dynamic_pool2 = self.dynamic_pool2(spectral_conv2, mask1) statistic = self.statistic(spectral_conv2, mask2) output = self.fully_connected(statistic) if GENERATE_SAVE: # save all intermediary steps for debugging variables = [prepared_input, filter_operator1, y1, spectral_conv1, filter_operator2, y2, spectral_conv2, mask1, mask2, statistic, output] variables_names = ['prepared_input', 'filter_operator1', 'y1', 'spectral_conv1', 'filter_operator2', 'y2', 'spectral_conv2', 'mask1', 'mask2', 'statistic', 'output'] tuples = zip(variables, variables_names) for v, n in tuples: np.save(DEBUG_DIR_MNIST_rot + n + '_p_pw', v.detach().numpy()) return output class TIGraNet_mnist_trans(TIGraNet): def __init__(self, dim, laplacian_matrix, shifted_laplacian_matrix, batch_size, learning_rate, load_pretrained_weights=False, freeze_sc_weights=False): TIGraNet.__init__(self, dim, laplacian_matrix, shifted_laplacian_matrix, batch_size, learning_rate, load_pretrained_weights, freeze_sc_weights) # Main layers self.spectral_conv1 = SpectralConv( batch_size=self.batch_size, num_nodes=self.num_nodes, filter_size_in=1, filter_size_out=10, degree_of_polynomial=7, laplacian_matrix=self.laplacian_matrix, mask=self.mask ) self.dynamic_pool1 = DynamicPool( batch_size=self.batch_size, num_nodes=self.num_nodes, num_filters=10, num_active_nodes=600, mask=self.mask ) self.spectral_conv2 = SpectralConv( batch_size=self.batch_size, num_nodes=self.num_nodes, filter_size_in=10, filter_size_out=20, degree_of_polynomial=7, laplacian_matrix=self.laplacian_matrix, mask=self.mask ) self.dynamic_pool2 = DynamicPool( batch_size=self.batch_size, num_nodes=self.num_nodes, num_filters=20, num_active_nodes=300, mask=self.mask ) self.statistic = Statistic( batch_size=self.batch_size, num_nodes=self.num_nodes, num_filters=20, degree_of_polynomial=11, shifted_laplacian_matrix=self.shifted_laplacian_matrix ) self.fully_connected = nn.Sequential( nn.Linear(in_features=480, out_features=500), nn.ReLU(inplace=True), nn.Linear(in_features=500, out_features=300), nn.ReLU(inplace=True), nn.Linear(in_features=300, out_features= 100), nn.ReLU(inplace=True), nn.Linear(in_features=100, out_features=9) ) if load_pretrained_weights: self.init_pretrained_weights(name='mnist_trans') # random checks assert (self.spectral_conv2.alpha.weight == nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, 'mnist_trans', 'parameters', 'alpha_1.npy'))))).all() assert (self.spectral_conv2.beta.weight == nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, 'mnist_trans', 'parameters', 'beta_1.npy'))).unsqueeze(0))).all() assert (self.fully_connected[2].weight == nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, 'mnist_trans', 'parameters', 'W_2.npy'))).t())).all() logger.info('Loaded pretrained weights.') if freeze_sc_weights: # freeze the parameters of the spectral conv layer for m in self.modules(): if isinstance(m, SpectralConv): m.alpha.weight.requires_grad = False m.beta.weight.requires_grad = False logger.info('Freezed spectral conv weights.') self.optimizer = optim.Adam(filter(lambda p: p.requires_grad, self.parameters()), lr=learning_rate) logger.info('Loaded {} optimizer.'.format(type(self.optimizer).__name__)) def forward(self, input): prepared_input = self.prepare_input(input) filter_operator1, y1, spectral_conv1 = self.spectral_conv1(prepared_input, self.mask) mask1, dynamic_pool1 = self.dynamic_pool1(spectral_conv1, self.mask) filter_operator2, y2, spectral_conv2 = self.spectral_conv2(spectral_conv1, mask1) mask2, dynamic_pool2 = self.dynamic_pool2(spectral_conv2, mask1) statistic = self.statistic(spectral_conv2, mask2) output = self.fully_connected(statistic) return output class TIGraNet_eth80(TIGraNet): def __init__(self, dim, laplacian_matrix, shifted_laplacian_matrix, batch_size, learning_rate, load_pretrained_weights=False, freeze_sc_weights=False): TIGraNet.__init__(self, dim, laplacian_matrix, shifted_laplacian_matrix, batch_size, learning_rate, load_pretrained_weights, freeze_sc_weights) # Main layers self.spectral_conv1 = SpectralConv( batch_size=self.batch_size, num_nodes=self.num_nodes, filter_size_in=1, filter_size_out=10, degree_of_polynomial=5, laplacian_matrix=self.laplacian_matrix, mask=self.mask ) self.dynamic_pool1 = DynamicPool( batch_size=self.batch_size, num_nodes=self.num_nodes, num_filters=10, num_active_nodes=600, mask=self.mask ) self.spectral_conv2 = SpectralConv( batch_size=self.batch_size, num_nodes=self.num_nodes, filter_size_in=10, filter_size_out=20, degree_of_polynomial=5, laplacian_matrix=self.laplacian_matrix, mask=self.mask ) self.dynamic_pool2 = DynamicPool( batch_size=self.batch_size, num_nodes=self.num_nodes, num_filters=20, num_active_nodes=300, mask=self.mask ) self.statistic = Statistic( batch_size=self.batch_size, num_nodes=self.num_nodes, num_filters=20, degree_of_polynomial=11, shifted_laplacian_matrix=self.shifted_laplacian_matrix ) self.fully_connected = nn.Sequential( nn.Linear(in_features=480, out_features=500), nn.ReLU(inplace=True), nn.Linear(in_features=500, out_features=300), nn.ReLU(inplace=True), nn.Linear(in_features=300, out_features= 100), nn.ReLU(inplace=True), nn.Linear(in_features=100, out_features=8) ) if load_pretrained_weights: self.init_pretrained_weights(name='eth80') # random checks assert (self.spectral_conv2.alpha.weight == nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, 'eth80', 'parameters', 'alpha_1.npy'))))).all() assert (self.spectral_conv2.beta.weight == nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, 'eth80', 'parameters', 'beta_1.npy'))).unsqueeze(0))).all() assert (self.fully_connected[2].weight == nn.Parameter(torch.from_numpy(np.load(os.path.join(SAVED_DATA, 'eth80', 'parameters', 'W_2.npy'))).t())).all() logger.info('Loaded pretrained weights.') if freeze_sc_weights: # freeze the parameters of the spectral conv layer for m in self.modules(): if isinstance(m, SpectralConv): m.alpha.weight.requires_grad = False m.beta.weight.requires_grad = False logger.info('Freezed spectral conv weights.') self.optimizer = optim.Adam(filter(lambda p: p.requires_grad, self.parameters()), lr=learning_rate) logger.info('Loaded {} optimizer.'.format(type(self.optimizer).__name__)) def forward(self, input): prepared_input = self.prepare_input(input) filter_operator1, y1, spectral_conv1 = self.spectral_conv1(prepared_input, self.mask) mask1, dynamic_pool1 = self.dynamic_pool1(spectral_conv1, self.mask) filter_operator2, y2, spectral_conv2 = self.spectral_conv2(spectral_conv1, mask1) mask2, dynamic_pool2 = self.dynamic_pool2(spectral_conv2, mask1) statistic = self.statistic(spectral_conv2, mask2) output = self.fully_connected(statistic) if GENERATE_SAVE: # save all intermediary steps for debugging variables = [prepared_input, filter_operator1, y1, spectral_conv1, filter_operator2, y2, spectral_conv2, mask1, mask2, statistic, output] variables_names = ['prepared_input', 'filter_operator1', 'y1', 'spectral_conv1', 'filter_operator2', 'y2', 'spectral_conv2', 'mask1', 'mask2', 'statistic', 'output'] tuples = zip(variables, variables_names) for v, n in tuples: np.save(DEBUG_DIR_ETH80 + n + '_p_pw', v.detach().numpy()) return output
46.115854
230
0.643219
2,782
22,689
4.970884
0.073329
0.037747
0.030082
0.037602
0.872731
0.852267
0.846482
0.841565
0.834478
0.827681
0
0.026603
0.249504
22,689
492
231
46.115854
0.78553
0.031469
0
0.666667
0
0
0.062771
0
0
0
0
0
0.029851
1
0.034826
false
0
0.027363
0
0.09204
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
75d4bfc81b4228273cf31268d9cb474049a13dd3
40
py
Python
cloudstorageio/enums/__init__.py
VahagnGhaz/cloudstorageio
c36e4382d730a46827b8a458d97c0ad57ad68ecb
[ "MIT" ]
2
2019-03-12T12:19:43.000Z
2019-03-13T12:33:44.000Z
cloudstorageio/enums/__init__.py
VahagnGhaz/cloudstorageio
c36e4382d730a46827b8a458d97c0ad57ad68ecb
[ "MIT" ]
null
null
null
cloudstorageio/enums/__init__.py
VahagnGhaz/cloudstorageio
c36e4382d730a46827b8a458d97c0ad57ad68ecb
[ "MIT" ]
null
null
null
from cloudstorageio.enums.enums import *
40
40
0.85
5
40
6.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.075
40
1
40
40
0.918919
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
f93d191413bd591321e944f382f1a9b5dacd6230
2,920
py
Python
Utils/Data/DataStats.py
MaurizioFD/recsys-challenge-2020-twitter
95dc024fb4f8777aa62e1304536daece640428de
[ "Apache-2.0" ]
44
2020-07-09T11:31:17.000Z
2022-03-04T05:50:48.000Z
Utils/Data/DataStats.py
kiminh/recsys-challenge-2020-twitter
567f0db40be7db3d21c360f2ca6cdf2addc7c698
[ "Apache-2.0" ]
3
2020-10-02T18:55:21.000Z
2020-10-13T22:13:58.000Z
Utils/Data/DataStats.py
kiminh/recsys-challenge-2020-twitter
567f0db40be7db3d21c360f2ca6cdf2addc7c698
[ "Apache-2.0" ]
9
2020-08-08T14:55:59.000Z
2021-09-06T09:17:03.000Z
import pathlib as pl import RootPath import json from Utils.Data.Dictionary.TweetBasicFeaturesDictArray import CreatorIdTweetBasicFeatureDictArray from Utils.Data.Dictionary.UserBasicFeaturesDictArray import IsVerifiedUserBasicFeatureDictArray import scipy.sparse as sps def get_max_user_id(): info_path = RootPath.get_dataset_path().joinpath("info.json") if info_path.exists(): with open(info_path, "r") as info_file: info = json.load(info_file) if "max_user_id" in info.keys(): max_user_id = info['max_user_id'] else: max_user_id = len(IsVerifiedUserBasicFeatureDictArray().load_or_create()) info['max_user_id'] = max_user_id with open(info_path, "w") as info_file: json.dump(info, info_file) else: info = {} max_user_id = len(IsVerifiedUserBasicFeatureDictArray().load_or_create()) info['max_user_id'] = max_user_id with open(info_path, "w") as info_file: json.dump(info, info_file) return max_user_id def get_max_tweet_id(): info_path = RootPath.get_dataset_path().joinpath("info.json") if info_path.exists(): with open(info_path, "r") as info_file: info = json.load(info_file) if "max_tweet_id" in info.keys(): max_tweet_id = info['max_tweet_id'] else: max_tweet_id = len(CreatorIdTweetBasicFeatureDictArray().load_or_create()) info['max_tweet_id'] = max_tweet_id with open(info_path, "w") as info_file: json.dump(info, info_file) else: info = {} max_tweet_id = len(CreatorIdTweetBasicFeatureDictArray().load_or_create()) info['max_tweet_id'] = max_tweet_id with open(info_path, "w") as info_file: json.dump(info, info_file) return max_tweet_id def get_max_hashtags_id(): info_path = RootPath.get_dataset_path().joinpath("info.json") if info_path.exists(): with open(info_path, "r") as info_file: info = json.load(info_file) if "max_hashtag_id" in info.keys(): max_hashtag_id = info['max_hashtag_id'] else: max_hashtag_id = sps.load_npz( RootPath.get_dataset_path().joinpath("Sparse/sparse/tweet_hashtags_csr_matrix.npz")).shape[1] info['max_hashtag_id'] = max_hashtag_id with open(info_path, "w") as info_file: json.dump(info, info_file) else: info = {} max_hashtag_id = sps.load_npz( RootPath.get_dataset_path().joinpath("Sparse/sparse/tweet_hashtags_csr_matrix.npz")).shape[1] info['max_hashtag_id'] = max_hashtag_id with open(info_path, "w") as info_file: json.dump(info, info_file) return max_hashtag_id
42.941176
113
0.62774
375
2,920
4.546667
0.133333
0.084457
0.058065
0.084457
0.763636
0.737243
0.737243
0.737243
0.737243
0.737243
0
0.00094
0.271575
2,920
68
114
42.941176
0.800658
0
0
0.723077
0
0
0.092434
0.029442
0
0
0
0
0
1
0.046154
false
0
0.092308
0
0.184615
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
f991cba5420926c673cefeefbdb891a933a6924e
28
py
Python
bbfy/__init__.py
BubuDavid/Bubufy
55008f6737321b4c29c5cceb67b061680c7c3048
[ "MIT" ]
null
null
null
bbfy/__init__.py
BubuDavid/Bubufy
55008f6737321b4c29c5cceb67b061680c7c3048
[ "MIT" ]
null
null
null
bbfy/__init__.py
BubuDavid/Bubufy
55008f6737321b4c29c5cceb67b061680c7c3048
[ "MIT" ]
null
null
null
from bbfy.bbfy import Bubufy
28
28
0.857143
5
28
4.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.107143
28
1
28
28
0.96
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
f9d81b8854222245e16fa9d127e2f0c7705c5beb
22
py
Python
gamesim/__init__.py
HarryR/gamesim
c731f5e07a2a17f38ee96b1846be72c9c56bffc0
[ "MIT" ]
1
2019-02-10T01:23:06.000Z
2019-02-10T01:23:06.000Z
gamesim/__init__.py
HarryR/gamesim
c731f5e07a2a17f38ee96b1846be72c9c56bffc0
[ "MIT" ]
null
null
null
gamesim/__init__.py
HarryR/gamesim
c731f5e07a2a17f38ee96b1846be72c9c56bffc0
[ "MIT" ]
null
null
null
from .gamesim import *
22
22
0.772727
3
22
5.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.136364
22
1
22
22
0.894737
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
ddb9102468670beb51f1646f4c9d0dd7807ca9c2
103,641
py
Python
tests/resource/test_sessions.py
UOC/dlkit
a9d265db67e81b9e0f405457464e762e2c03f769
[ "MIT" ]
2
2018-02-23T12:16:11.000Z
2020-10-08T17:54:24.000Z
tests/resource/test_sessions.py
UOC/dlkit
a9d265db67e81b9e0f405457464e762e2c03f769
[ "MIT" ]
87
2017-04-21T18:57:15.000Z
2021-12-13T19:43:57.000Z
tests/resource/test_sessions.py
UOC/dlkit
a9d265db67e81b9e0f405457464e762e2c03f769
[ "MIT" ]
1
2018-03-01T16:44:25.000Z
2018-03-01T16:44:25.000Z
"""Unit tests of resource sessions.""" import pytest from ..utilities.general import is_never_authz, is_no_authz, uses_cataloging, uses_filesystem_only from dlkit.abstract_osid.authentication.objects import AgentList from dlkit.abstract_osid.hierarchy.objects import Hierarchy from dlkit.abstract_osid.id.objects import IdList from dlkit.abstract_osid.osid import errors from dlkit.abstract_osid.osid.objects import OsidCatalogForm, OsidCatalog from dlkit.abstract_osid.osid.objects import OsidForm from dlkit.abstract_osid.osid.objects import OsidList from dlkit.abstract_osid.osid.objects import OsidNode from dlkit.abstract_osid.resource import objects as ABCObjects from dlkit.abstract_osid.resource import queries as ABCQueries from dlkit.abstract_osid.resource import searches as ABCSearches from dlkit.abstract_osid.resource.objects import Bin as ABCBin from dlkit.abstract_osid.resource.objects import Resource from dlkit.json_.id.objects import IdList from dlkit.primordium.id.primitives import Id from dlkit.primordium.type.primitives import Type from dlkit.runtime import PROXY_SESSION, proxy_example from dlkit.runtime.managers import Runtime REQUEST = proxy_example.SimpleRequest() CONDITION = PROXY_SESSION.get_proxy_condition() CONDITION.set_http_request(REQUEST) PROXY = PROXY_SESSION.get_proxy(CONDITION) DEFAULT_TYPE = Type(**{'identifier': 'DEFAULT', 'namespace': 'DEFAULT', 'authority': 'DEFAULT'}) DEFAULT_GENUS_TYPE = Type(**{'identifier': 'DEFAULT', 'namespace': 'GenusType', 'authority': 'DLKIT.MIT.EDU'}) ALIAS_ID = Id(**{'identifier': 'ALIAS', 'namespace': 'ALIAS', 'authority': 'ALIAS'}) NEW_TYPE = Type(**{'identifier': 'NEW', 'namespace': 'MINE', 'authority': 'YOURS'}) NEW_TYPE_2 = Type(**{'identifier': 'NEW 2', 'namespace': 'MINE', 'authority': 'YOURS'}) AGENT_ID = Id(**{'identifier': 'jane_doe', 'namespace': 'osid.agent.Agent', 'authority': 'MIT-ODL'}) AGENT_ID_0 = Id(**{'identifier': 'jane_doe', 'namespace': 'osid.agent.Agent', 'authority': 'MIT-ODL'}) AGENT_ID_1 = Id(**{'identifier': 'john_doe', 'namespace': 'osid.agent.Agent', 'authority': 'MIT-ODL'}) @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def resource_lookup_session_class_fixture(request): # Implemented from init template for ResourceLookupSession request.cls.service_config = request.param request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) request.cls.fake_id = Id('resource.Resource%3A000000000000000000000000%40DLKIT.MIT.EDU') @pytest.fixture(scope="function") def resource_lookup_session_test_fixture(request): request.cls.resource_list = list() request.cls.resource_ids = list() if not is_never_authz(request.cls.service_config): create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin' create_form.description = 'Test Bin for ResourceLookupSession tests' request.cls.catalog = request.cls.svc_mgr.create_bin(create_form) for num in [0, 1]: create_form = request.cls.catalog.get_resource_form_for_create([]) create_form.display_name = 'Test Resource ' + str(num) create_form.description = 'Test Resource for ResourceLookupSession tests' obj = request.cls.catalog.create_resource(create_form) request.cls.resource_list.append(obj) request.cls.resource_ids.append(obj.ident) else: request.cls.catalog = request.cls.svc_mgr.get_resource_lookup_session(proxy=PROXY) request.cls.session = request.cls.catalog def test_tear_down(): if not is_never_authz(request.cls.service_config): for obj in request.cls.catalog.get_resources(): request.cls.catalog.delete_resource(obj.ident) request.cls.svc_mgr.delete_bin(request.cls.catalog.ident) request.addfinalizer(test_tear_down) @pytest.mark.usefixtures("resource_lookup_session_class_fixture", "resource_lookup_session_test_fixture") class TestResourceLookupSession(object): """Tests for ResourceLookupSession""" def test_get_bin_id(self): """Tests get_bin_id""" # From test_templates/resource.py ResourceLookupSession.get_bin_id_template if not is_never_authz(self.service_config): assert self.catalog.get_bin_id() == self.catalog.ident def test_get_bin(self): """Tests get_bin""" # is this test really needed? # From test_templates/resource.py::ResourceLookupSession::get_bin_template if not is_never_authz(self.service_config): assert isinstance(self.catalog.get_bin(), ABCBin) def test_can_lookup_resources(self): """Tests can_lookup_resources""" # From test_templates/resource.py ResourceLookupSession.can_lookup_resources_template assert isinstance(self.catalog.can_lookup_resources(), bool) def test_use_comparative_resource_view(self): """Tests use_comparative_resource_view""" # From test_templates/resource.py ResourceLookupSession.use_comparative_resource_view_template self.catalog.use_comparative_resource_view() def test_use_plenary_resource_view(self): """Tests use_plenary_resource_view""" # From test_templates/resource.py ResourceLookupSession.use_plenary_resource_view_template self.catalog.use_plenary_resource_view() def test_use_federated_bin_view(self): """Tests use_federated_bin_view""" # From test_templates/resource.py ResourceLookupSession.use_federated_bin_view_template self.catalog.use_federated_bin_view() def test_use_isolated_bin_view(self): """Tests use_isolated_bin_view""" # From test_templates/resource.py ResourceLookupSession.use_isolated_bin_view_template self.catalog.use_isolated_bin_view() def test_get_resource(self): """Tests get_resource""" # From test_templates/resource.py ResourceLookupSession.get_resource_template if self.svc_mgr.supports_resource_query(): if not is_never_authz(self.service_config): self.catalog.use_isolated_bin_view() obj = self.catalog.get_resource(self.resource_list[0].ident) assert obj.ident == self.resource_list[0].ident self.catalog.use_federated_bin_view() obj = self.catalog.get_resource(self.resource_list[0].ident) assert obj.ident == self.resource_list[0].ident else: with pytest.raises(errors.NotFound): self.catalog.get_resource(self.fake_id) else: if not is_never_authz(self.service_config): self.catalog.use_isolated_bin_view() obj = self.catalog.get_resource(self.resource_list[0].ident) assert obj.ident == self.resource_list[0].ident self.catalog.use_federated_bin_view() obj = self.catalog.get_resource(self.resource_list[0].ident) assert obj.ident == self.resource_list[0].ident else: with pytest.raises(errors.PermissionDenied): self.catalog.get_resource(self.fake_id) def test_get_resources_by_ids(self): """Tests get_resources_by_ids""" # From test_templates/resource.py ResourceLookupSession.get_resources_by_ids_template from dlkit.abstract_osid.resource.objects import ResourceList if self.svc_mgr.supports_resource_query(): objects = self.catalog.get_resources_by_ids(self.resource_ids) assert isinstance(objects, ResourceList) self.catalog.use_federated_bin_view() objects = self.catalog.get_resources_by_ids(self.resource_ids) assert isinstance(objects, ResourceList) if not is_never_authz(self.service_config): assert objects.available() > 0 else: assert objects.available() == 0 else: if not is_never_authz(self.service_config): objects = self.catalog.get_resources_by_ids(self.resource_ids) assert isinstance(objects, ResourceList) self.catalog.use_federated_bin_view() objects = self.catalog.get_resources_by_ids(self.resource_ids) assert objects.available() > 0 assert isinstance(objects, ResourceList) else: with pytest.raises(errors.PermissionDenied): self.catalog.get_resources_by_ids(self.resource_ids) def test_get_resources_by_genus_type(self): """Tests get_resources_by_genus_type""" # From test_templates/resource.py ResourceLookupSession.get_resources_by_genus_type_template from dlkit.abstract_osid.resource.objects import ResourceList if self.svc_mgr.supports_resource_query(): objects = self.catalog.get_resources_by_genus_type(DEFAULT_GENUS_TYPE) assert isinstance(objects, ResourceList) self.catalog.use_federated_bin_view() objects = self.catalog.get_resources_by_genus_type(DEFAULT_GENUS_TYPE) assert isinstance(objects, ResourceList) if not is_never_authz(self.service_config): assert objects.available() > 0 else: assert objects.available() == 0 else: if not is_never_authz(self.service_config): objects = self.catalog.get_resources_by_genus_type(DEFAULT_GENUS_TYPE) assert isinstance(objects, ResourceList) self.catalog.use_federated_bin_view() objects = self.catalog.get_resources_by_genus_type(DEFAULT_GENUS_TYPE) assert objects.available() > 0 assert isinstance(objects, ResourceList) else: with pytest.raises(errors.PermissionDenied): self.catalog.get_resources_by_genus_type(DEFAULT_GENUS_TYPE) def test_get_resources_by_parent_genus_type(self): """Tests get_resources_by_parent_genus_type""" # From test_templates/resource.py ResourceLookupSession.get_resources_by_parent_genus_type_template from dlkit.abstract_osid.resource.objects import ResourceList if self.svc_mgr.supports_resource_query(): if not is_never_authz(self.service_config): objects = self.catalog.get_resources_by_parent_genus_type(DEFAULT_GENUS_TYPE) assert isinstance(objects, ResourceList) self.catalog.use_federated_bin_view() objects = self.catalog.get_resources_by_parent_genus_type(DEFAULT_GENUS_TYPE) assert objects.available() == 0 assert isinstance(objects, ResourceList) else: with pytest.raises(errors.Unimplemented): # because the never_authz "tries harder" and runs the actual query... # whereas above the method itself in JSON returns an empty list self.catalog.get_resources_by_parent_genus_type(DEFAULT_GENUS_TYPE) else: if not is_never_authz(self.service_config): objects = self.catalog.get_resources_by_parent_genus_type(DEFAULT_GENUS_TYPE) assert isinstance(objects, ResourceList) self.catalog.use_federated_bin_view() objects = self.catalog.get_resources_by_parent_genus_type(DEFAULT_GENUS_TYPE) assert objects.available() == 0 assert isinstance(objects, ResourceList) else: with pytest.raises(errors.PermissionDenied): self.catalog.get_resources_by_parent_genus_type(DEFAULT_GENUS_TYPE) def test_get_resources_by_record_type(self): """Tests get_resources_by_record_type""" # From test_templates/resource.py ResourceLookupSession.get_resources_by_record_type_template from dlkit.abstract_osid.resource.objects import ResourceList if self.svc_mgr.supports_resource_query(): objects = self.catalog.get_resources_by_record_type(DEFAULT_TYPE) assert isinstance(objects, ResourceList) self.catalog.use_federated_bin_view() objects = self.catalog.get_resources_by_record_type(DEFAULT_TYPE) assert objects.available() == 0 assert isinstance(objects, ResourceList) else: if not is_never_authz(self.service_config): objects = self.catalog.get_resources_by_record_type(DEFAULT_TYPE) assert isinstance(objects, ResourceList) self.catalog.use_federated_bin_view() objects = self.catalog.get_resources_by_record_type(DEFAULT_TYPE) assert objects.available() == 0 assert isinstance(objects, ResourceList) else: with pytest.raises(errors.PermissionDenied): self.catalog.get_resources_by_record_type(DEFAULT_TYPE) def test_get_resources(self): """Tests get_resources""" # From test_templates/resource.py ResourceLookupSession.get_resources_template from dlkit.abstract_osid.resource.objects import ResourceList if self.svc_mgr.supports_resource_query(): objects = self.catalog.get_resources() assert isinstance(objects, ResourceList) self.catalog.use_federated_bin_view() objects = self.catalog.get_resources() assert isinstance(objects, ResourceList) if not is_never_authz(self.service_config): assert objects.available() > 0 else: assert objects.available() == 0 else: if not is_never_authz(self.service_config): objects = self.catalog.get_resources() assert isinstance(objects, ResourceList) self.catalog.use_federated_bin_view() objects = self.catalog.get_resources() assert objects.available() > 0 assert isinstance(objects, ResourceList) else: with pytest.raises(errors.PermissionDenied): self.catalog.get_resources() def test_get_resource_with_alias(self): if not is_never_authz(self.service_config): # Because you can't create the alias with NEVER_AUTHZ self.catalog.alias_resource(self.resource_ids[0], ALIAS_ID) obj = self.catalog.get_resource(ALIAS_ID) assert obj.get_id() == self.resource_ids[0] class FakeQuery: _cat_id_args_list = [] @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def resource_query_session_class_fixture(request): # From test_templates/resource.py::ResourceQuerySession::init_template request.cls.service_config = request.param request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) @pytest.fixture(scope="function") def resource_query_session_test_fixture(request): # From test_templates/resource.py::ResourceQuerySession::init_template request.cls.resource_list = list() request.cls.resource_ids = list() if not is_never_authz(request.cls.service_config): create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin' create_form.description = 'Test Bin for ResourceQuerySession tests' request.cls.catalog = request.cls.svc_mgr.create_bin(create_form) for color in ['Orange', 'Blue', 'Green', 'orange']: create_form = request.cls.catalog.get_resource_form_for_create([]) create_form.display_name = 'Test Resource ' + color create_form.description = ( 'Test Resource for ResourceQuerySession tests, did I mention green') obj = request.cls.catalog.create_resource(create_form) request.cls.resource_list.append(obj) request.cls.resource_ids.append(obj.ident) else: request.cls.catalog = request.cls.svc_mgr.get_resource_query_session(proxy=PROXY) request.cls.session = request.cls.catalog def test_tear_down(): if not is_never_authz(request.cls.service_config): for obj in request.cls.catalog.get_resources(): request.cls.catalog.delete_resource(obj.ident) request.cls.svc_mgr.delete_bin(request.cls.catalog.ident) request.addfinalizer(test_tear_down) @pytest.mark.usefixtures("resource_query_session_class_fixture", "resource_query_session_test_fixture") class TestResourceQuerySession(object): """Tests for ResourceQuerySession""" def test_get_bin_id(self): """Tests get_bin_id""" # From test_templates/resource.py ResourceLookupSession.get_bin_id_template if not is_never_authz(self.service_config): assert self.catalog.get_bin_id() == self.catalog.ident def test_get_bin(self): """Tests get_bin""" # is this test really needed? # From test_templates/resource.py::ResourceLookupSession::get_bin_template if not is_never_authz(self.service_config): assert isinstance(self.catalog.get_bin(), ABCBin) def test_can_search_resources(self): """Tests can_search_resources""" # From test_templates/resource.py ResourceQuerySession::can_search_resources_template assert isinstance(self.session.can_search_resources(), bool) def test_use_federated_bin_view(self): """Tests use_federated_bin_view""" # From test_templates/resource.py ResourceLookupSession.use_federated_bin_view_template self.catalog.use_federated_bin_view() def test_use_isolated_bin_view(self): """Tests use_isolated_bin_view""" # From test_templates/resource.py ResourceLookupSession.use_isolated_bin_view_template self.catalog.use_isolated_bin_view() def test_get_resource_query(self): """Tests get_resource_query""" # From test_templates/resource.py ResourceQuerySession::get_resource_query_template query = self.session.get_resource_query() assert isinstance(query, ABCQueries.ResourceQuery) def test_get_resources_by_query(self): """Tests get_resources_by_query""" # From test_templates/resource.py ResourceQuerySession::get_resources_by_query_template # Need to add some tests with string types if not is_never_authz(self.service_config): query = self.session.get_resource_query() query.match_display_name('orange') assert self.catalog.get_resources_by_query(query).available() == 2 query.clear_display_name_terms() query.match_display_name('blue', match=False) assert self.session.get_resources_by_query(query).available() == 3 else: with pytest.raises(errors.PermissionDenied): self.session.get_resources_by_query(FakeQuery()) @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def resource_search_session_class_fixture(request): request.cls.service_config = request.param request.cls.resource_list = list() request.cls.resource_ids = list() request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) if not is_never_authz(request.cls.service_config): create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin' create_form.description = 'Test Bin for ResourceSearchSession tests' request.cls.catalog = request.cls.svc_mgr.create_bin(create_form) for color in ['Orange', 'Blue', 'Green', 'orange']: create_form = request.cls.catalog.get_resource_form_for_create([]) create_form.display_name = 'Test Resource ' + color create_form.description = ( 'Test Resource for ResourceSearchSession tests, did I mention green') obj = request.cls.catalog.create_resource(create_form) request.cls.resource_list.append(obj) request.cls.resource_ids.append(obj.ident) def class_tear_down(): if not is_never_authz(request.cls.service_config): for obj in request.cls.catalog.get_resources(): request.cls.catalog.delete_resource(obj.ident) request.cls.svc_mgr.delete_bin(request.cls.catalog.ident) request.addfinalizer(class_tear_down) @pytest.fixture(scope="function") def resource_search_session_test_fixture(request): request.cls.session = request.cls.catalog @pytest.mark.usefixtures("resource_search_session_class_fixture", "resource_search_session_test_fixture") class TestResourceSearchSession(object): """Tests for ResourceSearchSession""" def test_get_resource_search(self): """Tests get_resource_search""" # From test_templates/resource.py::ResourceSearchSession::get_resource_search_template result = self.session.get_resource_search() assert isinstance(result, ABCSearches.ResourceSearch) def test_get_resource_search_order(self): """Tests get_resource_search_order""" if is_never_authz(self.service_config): pass # no object to call the method on? else: with pytest.raises(errors.Unimplemented): self.session.get_resource_search_order() def test_get_resources_by_search(self): """Tests get_resources_by_search""" # From test_templates/resource.py::ResourceSearchSession::get_resources_by_search_template query = self.catalog.get_resource_query() search = self.session.get_resource_search() results = self.session.get_resources_by_search(query, search) assert isinstance(results, ABCSearches.ResourceSearchResults) def test_get_resource_query_from_inspector(self): """Tests get_resource_query_from_inspector""" if is_never_authz(self.service_config): pass # no object to call the method on? elif uses_cataloging(self.service_config): pass # cannot call the _get_record() methods on catalogs else: with pytest.raises(errors.Unimplemented): self.session.get_resource_query_from_inspector(True) @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def resource_admin_session_class_fixture(request): # From test_templates/resource.py::ResourceAdminSession::init_template request.cls.service_config = request.param request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) request.cls.assessment_mgr = Runtime().get_service_manager( 'ASSESSMENT', proxy=PROXY, implementation=request.cls.service_config) request.cls.fake_id = Id('resource.Resource%3Afake%40DLKIT.MIT.EDU') if not is_never_authz(request.cls.service_config): create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin' create_form.description = 'Test Bin for ResourceAdminSession tests' request.cls.catalog = request.cls.svc_mgr.create_bin(create_form) else: request.cls.catalog = request.cls.svc_mgr.get_resource_admin_session(proxy=PROXY) def class_tear_down(): if not is_never_authz(request.cls.service_config): for obj in request.cls.catalog.get_resources(): request.cls.catalog.delete_resource(obj.ident) request.cls.svc_mgr.delete_bin(request.cls.catalog.ident) request.addfinalizer(class_tear_down) @pytest.fixture(scope="function") def resource_admin_session_test_fixture(request): # From test_templates/resource.py::ResourceAdminSession::init_template if not is_never_authz(request.cls.service_config): request.cls.form = request.cls.catalog.get_resource_form_for_create([]) request.cls.form.display_name = 'new Resource' request.cls.form.description = 'description of Resource' request.cls.form.set_genus_type(NEW_TYPE) request.cls.osid_object = request.cls.catalog.create_resource(request.cls.form) request.cls.session = request.cls.catalog def test_tear_down(): # From test_templates/resource.py::ResourceAdminSession::init_template if not is_never_authz(request.cls.service_config): request.cls.catalog.delete_resource(request.cls.osid_object.ident) request.addfinalizer(test_tear_down) @pytest.mark.usefixtures("resource_admin_session_class_fixture", "resource_admin_session_test_fixture") class TestResourceAdminSession(object): """Tests for ResourceAdminSession""" def test_get_bin_id(self): """Tests get_bin_id""" # From test_templates/resource.py ResourceLookupSession.get_bin_id_template if not is_never_authz(self.service_config): assert self.catalog.get_bin_id() == self.catalog.ident def test_get_bin(self): """Tests get_bin""" # is this test really needed? # From test_templates/resource.py::ResourceLookupSession::get_bin_template if not is_never_authz(self.service_config): assert isinstance(self.catalog.get_bin(), ABCBin) def test_can_create_resources(self): """Tests can_create_resources""" # From test_templates/resource.py::ResourceAdminSession::can_create_resources_template assert isinstance(self.catalog.can_create_resources(), bool) def test_can_create_resource_with_record_types(self): """Tests can_create_resource_with_record_types""" # From test_templates/resource.py::ResourceAdminSession::can_create_resource_with_record_types_template assert isinstance(self.catalog.can_create_resource_with_record_types(DEFAULT_TYPE), bool) def test_get_resource_form_for_create(self): """Tests get_resource_form_for_create""" # From test_templates/resource.py::ResourceAdminSession::get_resource_form_for_create_template if not is_never_authz(self.service_config): form = self.catalog.get_resource_form_for_create([]) assert isinstance(form, OsidForm) assert not form.is_for_update() with pytest.raises(errors.InvalidArgument): self.catalog.get_resource_form_for_create([1]) else: with pytest.raises(errors.PermissionDenied): self.catalog.get_resource_form_for_create([]) def test_create_resource(self): """Tests create_resource""" # From test_templates/resource.py::ResourceAdminSession::create_resource_template from dlkit.abstract_osid.resource.objects import Resource if not is_never_authz(self.service_config): assert isinstance(self.osid_object, Resource) assert self.osid_object.display_name.text == 'new Resource' assert self.osid_object.description.text == 'description of Resource' assert self.osid_object.genus_type == NEW_TYPE with pytest.raises(errors.IllegalState): self.catalog.create_resource(self.form) with pytest.raises(errors.InvalidArgument): self.catalog.create_resource('I Will Break You!') update_form = self.catalog.get_resource_form_for_update(self.osid_object.ident) with pytest.raises(errors.InvalidArgument): self.catalog.create_resource(update_form) else: with pytest.raises(errors.PermissionDenied): self.catalog.create_resource('foo') def test_can_update_resources(self): """Tests can_update_resources""" # From test_templates/resource.py::ResourceAdminSession::can_update_resources_template assert isinstance(self.catalog.can_update_resources(), bool) def test_get_resource_form_for_update(self): """Tests get_resource_form_for_update""" # From test_templates/resource.py::ResourceAdminSession::get_resource_form_for_update_template if not is_never_authz(self.service_config): form = self.catalog.get_resource_form_for_update(self.osid_object.ident) assert isinstance(form, OsidForm) assert form.is_for_update() with pytest.raises(errors.InvalidArgument): self.catalog.get_resource_form_for_update(['This is Doomed!']) with pytest.raises(errors.InvalidArgument): self.catalog.get_resource_form_for_update( Id(authority='Respect my Authoritay!', namespace='resource.{object_name}', identifier='1')) else: with pytest.raises(errors.PermissionDenied): self.catalog.get_resource_form_for_update(self.fake_id) def test_update_resource(self): """Tests update_resource""" # From test_templates/resource.py::ResourceAdminSession::update_resource_template if not is_never_authz(self.service_config): from dlkit.abstract_osid.resource.objects import Resource form = self.catalog.get_resource_form_for_update(self.osid_object.ident) form.display_name = 'new name' form.description = 'new description' form.set_genus_type(NEW_TYPE_2) updated_object = self.catalog.update_resource(form) assert isinstance(updated_object, Resource) assert updated_object.ident == self.osid_object.ident assert updated_object.display_name.text == 'new name' assert updated_object.description.text == 'new description' assert updated_object.genus_type == NEW_TYPE_2 with pytest.raises(errors.IllegalState): self.catalog.update_resource(form) with pytest.raises(errors.InvalidArgument): self.catalog.update_resource('I Will Break You!') with pytest.raises(errors.InvalidArgument): self.catalog.update_resource(self.form) else: with pytest.raises(errors.PermissionDenied): self.catalog.update_resource('foo') def test_can_delete_resources(self): """Tests can_delete_resources""" # From test_templates/resource.py::ResourceAdminSession::can_delete_resources_template assert isinstance(self.catalog.can_delete_resources(), bool) def test_delete_resource(self): """Tests delete_resource""" # From test_templates/resource.py::ResourceAdminSession::delete_resource_template if not is_never_authz(self.service_config): form = self.catalog.get_resource_form_for_create([]) form.display_name = 'new Resource' form.description = 'description of Resource' form.set_genus_type(NEW_TYPE) osid_object = self.catalog.create_resource(form) self.catalog.delete_resource(osid_object.ident) with pytest.raises(errors.NotFound): self.catalog.get_resource(osid_object.ident) else: with pytest.raises(errors.PermissionDenied): self.catalog.delete_resource(self.fake_id) def test_can_manage_resource_aliases(self): """Tests can_manage_resource_aliases""" # From test_templates/resource.py::ResourceAdminSession::can_manage_resource_aliases_template assert isinstance(self.catalog.can_manage_resource_aliases(), bool) def test_alias_resource(self): """Tests alias_resource""" # From test_templates/resource.py::ResourceAdminSession::alias_resource_template if not is_never_authz(self.service_config): alias_id = Id(self.catalog.ident.namespace + '%3Amy-alias%40ODL.MIT.EDU') self.catalog.alias_resource(self.osid_object.ident, alias_id) aliased_object = self.catalog.get_resource(alias_id) assert aliased_object.ident == self.osid_object.ident else: with pytest.raises(errors.PermissionDenied): self.catalog.alias_resource(self.fake_id, self.fake_id) class NotificationReceiver(object): # Implemented from resource.ResourceNotificationSession pass @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def resource_notification_session_class_fixture(request): # Implemented from init template for ResourceNotificationSession request.cls.service_config = request.param request.cls.resource_list = list() request.cls.resource_ids = list() request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) request.cls.fake_id = Id('resource.Resource%3Afake%40DLKIT.MIT.EDU') if not is_never_authz(request.cls.service_config): create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin' create_form.description = 'Test Bin for ResourceNotificationSession tests' request.cls.catalog = request.cls.svc_mgr.create_bin(create_form) for num in [0, 1]: create_form = request.cls.catalog.get_resource_form_for_create([]) create_form.display_name = 'Test Resource ' + str(num) create_form.description = 'Test Resource for ResourceNotificationSession tests' obj = request.cls.catalog.create_resource(create_form) request.cls.resource_list.append(obj) request.cls.resource_ids.append(obj.ident) else: request.cls.catalog = request.cls.svc_mgr.get_resource_notification_session(NotificationReceiver(), proxy=PROXY) def class_tear_down(): if not is_never_authz(request.cls.service_config): for obj in request.cls.catalog.get_resources(): request.cls.catalog.delete_resource(obj.ident) request.cls.svc_mgr.delete_bin(request.cls.catalog.ident) request.addfinalizer(class_tear_down) @pytest.fixture(scope="function") def resource_notification_session_test_fixture(request): # From test_templates/resource.py::ResourceNotificationSession::init_template request.cls.session = request.cls.catalog @pytest.mark.usefixtures("resource_notification_session_class_fixture", "resource_notification_session_test_fixture") class TestResourceNotificationSession(object): """Tests for ResourceNotificationSession""" def test_get_bin_id(self): """Tests get_bin_id""" # From test_templates/resource.py ResourceLookupSession.get_bin_id_template if not is_never_authz(self.service_config): assert self.catalog.get_bin_id() == self.catalog.ident def test_get_bin(self): """Tests get_bin""" # is this test really needed? # From test_templates/resource.py::ResourceLookupSession::get_bin_template if not is_never_authz(self.service_config): assert isinstance(self.catalog.get_bin(), ABCBin) def test_can_register_for_resource_notifications(self): """Tests can_register_for_resource_notifications""" # From test_templates/resource.py::ResourceNotificationSession::can_register_for_resource_notifications_template if is_no_authz(self.service_config): with pytest.raises(errors.Unimplemented): self.session.can_register_for_resource_notifications() else: assert isinstance(self.session.can_register_for_resource_notifications(), bool) def test_use_federated_bin_view(self): """Tests use_federated_bin_view""" # From test_templates/resource.py ResourceLookupSession.use_federated_bin_view_template self.catalog.use_federated_bin_view() def test_use_isolated_bin_view(self): """Tests use_isolated_bin_view""" # From test_templates/resource.py ResourceLookupSession.use_isolated_bin_view_template self.catalog.use_isolated_bin_view() def test_register_for_new_resources(self): """Tests register_for_new_resources""" # From test_templates/resource.py::ResourceNotificationSession::register_for_new_resources_template if not is_never_authz(self.service_config): self.session.register_for_new_resources() else: with pytest.raises(errors.PermissionDenied): self.session.register_for_new_resources() def test_register_for_changed_resources(self): """Tests register_for_changed_resources""" # From test_templates/resource.py::ResourceNotificationSession::register_for_changed_resources_template if not is_never_authz(self.service_config): self.session.register_for_changed_resources() else: with pytest.raises(errors.PermissionDenied): self.session.register_for_changed_resources() def test_register_for_changed_resource(self): """Tests register_for_changed_resource""" # From test_templates/resource.py::ResourceNotificationSession::register_for_changed_resource_template if not is_never_authz(self.service_config): self.session.register_for_changed_resource(self.fake_id) else: with pytest.raises(errors.PermissionDenied): self.session.register_for_changed_resource(self.fake_id) def test_register_for_deleted_resources(self): """Tests register_for_deleted_resources""" # From test_templates/resource.py::ResourceNotificationSession::register_for_deleted_resources_template if not is_never_authz(self.service_config): self.session.register_for_deleted_resources() else: with pytest.raises(errors.PermissionDenied): self.session.register_for_deleted_resources() def test_register_for_deleted_resource(self): """Tests register_for_deleted_resource""" # From test_templates/resource.py::ResourceNotificationSession::register_for_deleted_resource_template if not is_never_authz(self.service_config): self.session.register_for_deleted_resource(self.fake_id) else: with pytest.raises(errors.PermissionDenied): self.session.register_for_deleted_resource(self.fake_id) def test_reliable_resource_notifications(self): """Tests reliable_resource_notifications""" # From test_templates/resource.py::ResourceNotificationSession::reliable_resource_notifications_template self.session.reliable_resource_notifications() def test_unreliable_resource_notifications(self): """Tests unreliable_resource_notifications""" # From test_templates/resource.py::ResourceNotificationSession::unreliable_resource_notifications_template self.session.unreliable_resource_notifications() def test_acknowledge_resource_notification(self): """Tests acknowledge_resource_notification""" if is_never_authz(self.service_config): pass # no object to call the method on? elif uses_cataloging(self.service_config): pass # cannot call the _get_record() methods on catalogs else: with pytest.raises(errors.Unimplemented): self.session.acknowledge_resource_notification(True) @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def resource_bin_session_class_fixture(request): # From test_templates/resource.py::ResourceBinSession::init_template request.cls.service_config = request.param request.cls.resource_list = list() request.cls.resource_ids = list() request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) request.cls.fake_id = Id('resource.Resource%3Afake%40DLKIT.MIT.EDU') if not is_never_authz(request.cls.service_config): create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin' create_form.description = 'Test Bin for ResourceBinSession tests' request.cls.catalog = request.cls.svc_mgr.create_bin(create_form) create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin for Assignment' create_form.description = 'Test Bin for ResourceBinSession tests assignment' request.cls.assigned_catalog = request.cls.svc_mgr.create_bin(create_form) for num in [0, 1, 2]: create_form = request.cls.catalog.get_resource_form_for_create([]) create_form.display_name = 'Test Resource ' + str(num) create_form.description = 'Test Resource for ResourceBinSession tests' obj = request.cls.catalog.create_resource(create_form) request.cls.resource_list.append(obj) request.cls.resource_ids.append(obj.ident) request.cls.svc_mgr.assign_resource_to_bin( request.cls.resource_ids[1], request.cls.assigned_catalog.ident) request.cls.svc_mgr.assign_resource_to_bin( request.cls.resource_ids[2], request.cls.assigned_catalog.ident) def class_tear_down(): if not is_never_authz(request.cls.service_config): request.cls.svc_mgr.unassign_resource_from_bin( request.cls.resource_ids[1], request.cls.assigned_catalog.ident) request.cls.svc_mgr.unassign_resource_from_bin( request.cls.resource_ids[2], request.cls.assigned_catalog.ident) for obj in request.cls.catalog.get_resources(): request.cls.catalog.delete_resource(obj.ident) request.cls.svc_mgr.delete_bin(request.cls.assigned_catalog.ident) request.cls.svc_mgr.delete_bin(request.cls.catalog.ident) request.addfinalizer(class_tear_down) @pytest.fixture(scope="function") def resource_bin_session_test_fixture(request): # From test_templates/resource.py::ResourceBinSession::init_template request.cls.session = request.cls.svc_mgr @pytest.mark.usefixtures("resource_bin_session_class_fixture", "resource_bin_session_test_fixture") class TestResourceBinSession(object): """Tests for ResourceBinSession""" def test_use_comparative_bin_view(self): """Tests use_comparative_bin_view""" # From test_templates/resource.py::BinLookupSession::use_comparative_bin_view_template self.svc_mgr.use_comparative_bin_view() def test_use_plenary_bin_view(self): """Tests use_plenary_bin_view""" # From test_templates/resource.py::BinLookupSession::use_plenary_bin_view_template self.svc_mgr.use_plenary_bin_view() def test_can_lookup_resource_bin_mappings(self): """Tests can_lookup_resource_bin_mappings""" # From test_templates/resource.py::ResourceBinSession::can_lookup_resource_bin_mappings result = self.session.can_lookup_resource_bin_mappings() assert isinstance(result, bool) def test_get_resource_ids_by_bin(self): """Tests get_resource_ids_by_bin""" # From test_templates/resource.py::ResourceBinSession::get_resource_ids_by_bin_template if not is_never_authz(self.service_config): objects = self.svc_mgr.get_resource_ids_by_bin(self.assigned_catalog.ident) assert objects.available() == 2 else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_resource_ids_by_bin(self.fake_id) def test_get_resources_by_bin(self): """Tests get_resources_by_bin""" # From test_templates/resource.py::ResourceBinSession::get_resources_by_bin_template if not is_never_authz(self.service_config): results = self.session.get_resources_by_bin(self.assigned_catalog.ident) assert isinstance(results, ABCObjects.ResourceList) assert results.available() == 2 else: with pytest.raises(errors.PermissionDenied): self.session.get_resources_by_bin(self.fake_id) def test_get_resource_ids_by_bins(self): """Tests get_resource_ids_by_bins""" # From test_templates/resource.py::ResourceBinSession::get_resource_ids_by_bins_template if not is_never_authz(self.service_config): catalog_ids = [self.catalog.ident, self.assigned_catalog.ident] object_ids = self.session.get_resource_ids_by_bins(catalog_ids) assert isinstance(object_ids, IdList) # Currently our impl does not remove duplicate objectIds assert object_ids.available() == 5 else: with pytest.raises(errors.PermissionDenied): self.session.get_resource_ids_by_bins([self.fake_id]) def test_get_resources_by_bins(self): """Tests get_resources_by_bins""" # From test_templates/resource.py::ResourceBinSession::get_resources_by_bins_template if not is_never_authz(self.service_config): catalog_ids = [self.catalog.ident, self.assigned_catalog.ident] results = self.session.get_resources_by_bins(catalog_ids) assert isinstance(results, ABCObjects.ResourceList) # Currently our impl does not remove duplicate objects assert results.available() == 5 else: with pytest.raises(errors.PermissionDenied): self.session.get_resources_by_bins([self.fake_id]) def test_get_bin_ids_by_resource(self): """Tests get_bin_ids_by_resource""" # From test_templates/resource.py::ResourceBinSession::get_bin_ids_by_resource_template if not is_never_authz(self.service_config): cats = self.svc_mgr.get_bin_ids_by_resource(self.resource_ids[1]) assert cats.available() == 2 else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_bin_ids_by_resource(self.fake_id) def test_get_bins_by_resource(self): """Tests get_bins_by_resource""" # From test_templates/resource.py::ResourceBinSession::get_bins_by_resource_template if not is_never_authz(self.service_config): cats = self.svc_mgr.get_bins_by_resource(self.resource_ids[1]) assert cats.available() == 2 else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_bins_by_resource(self.fake_id) @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def resource_bin_assignment_session_class_fixture(request): # From test_templates/resource.py::ResourceBinAssignmentSession::init_template request.cls.service_config = request.param request.cls.resource_list = list() request.cls.resource_ids = list() request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) request.cls.fake_id = Id('resource.Resource%3Afake%40DLKIT.MIT.EDU') if not is_never_authz(request.cls.service_config): create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin' create_form.description = 'Test Bin for ResourceBinAssignmentSession tests' request.cls.catalog = request.cls.svc_mgr.create_bin(create_form) create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin for Assignment' create_form.description = 'Test Bin for ResourceBinAssignmentSession tests assignment' request.cls.assigned_catalog = request.cls.svc_mgr.create_bin(create_form) for num in [0, 1, 2]: create_form = request.cls.catalog.get_resource_form_for_create([]) create_form.display_name = 'Test Resource ' + str(num) create_form.description = 'Test Resource for ResourceBinAssignmentSession tests' obj = request.cls.catalog.create_resource(create_form) request.cls.resource_list.append(obj) request.cls.resource_ids.append(obj.ident) def class_tear_down(): if not is_never_authz(request.cls.service_config): for obj in request.cls.catalog.get_resources(): request.cls.catalog.delete_resource(obj.ident) request.cls.svc_mgr.delete_bin(request.cls.assigned_catalog.ident) request.cls.svc_mgr.delete_bin(request.cls.catalog.ident) request.addfinalizer(class_tear_down) @pytest.fixture(scope="function") def resource_bin_assignment_session_test_fixture(request): # From test_templates/resource.py::ResourceBinAssignmentSession::init_template request.cls.session = request.cls.svc_mgr @pytest.mark.usefixtures("resource_bin_assignment_session_class_fixture", "resource_bin_assignment_session_test_fixture") class TestResourceBinAssignmentSession(object): """Tests for ResourceBinAssignmentSession""" def test_can_assign_resources(self): """Tests can_assign_resources""" # From test_templates/resource.py::ResourceBinAssignmentSession::can_assign_resources_template result = self.session.can_assign_resources() assert isinstance(result, bool) def test_can_assign_resources_to_bin(self): """Tests can_assign_resources_to_bin""" # From test_templates/resource.py::ResourceBinAssignmentSession::can_assign_resources_to_bin_template result = self.session.can_assign_resources_to_bin(self.assigned_catalog.ident) assert isinstance(result, bool) def test_get_assignable_bin_ids(self): """Tests get_assignable_bin_ids""" # From test_templates/resource.py::ResourceBinAssignmentSession::get_assignable_bin_ids_template # Note that our implementation just returns all catalogIds, which does not follow # the OSID spec (should return only the catalogIds below the given one in the hierarchy. if not is_never_authz(self.service_config): results = self.session.get_assignable_bin_ids(self.catalog.ident) assert isinstance(results, IdList) # Because we're not deleting all banks from all tests, we might # have some crufty banks here...but there should be at least 2. assert results.available() >= 2 else: with pytest.raises(errors.PermissionDenied): self.session.get_assignable_bin_ids(self.fake_id) def test_get_assignable_bin_ids_for_resource(self): """Tests get_assignable_bin_ids_for_resource""" # From test_templates/resource.py::ResourceBinAssignmentSession::get_assignable_bin_ids_for_resource_template # Note that our implementation just returns all catalogIds, which does not follow # the OSID spec (should return only the catalogIds below the given one in the hierarchy. if not is_never_authz(self.service_config): results = self.session.get_assignable_bin_ids_for_resource(self.catalog.ident, self.resource_ids[0]) assert isinstance(results, IdList) # Because we're not deleting all banks from all tests, we might # have some crufty banks here...but there should be at least 2. assert results.available() >= 2 else: with pytest.raises(errors.PermissionDenied): self.session.get_assignable_bin_ids_for_resource(self.fake_id, self.fake_id) def test_assign_resource_to_bin(self): """Tests assign_resource_to_bin""" # From test_templates/resource.py::ResourceBinAssignmentSession::assign_resource_to_bin_template if not is_never_authz(self.service_config): results = self.assigned_catalog.get_resources() assert results.available() == 0 self.session.assign_resource_to_bin(self.resource_ids[1], self.assigned_catalog.ident) results = self.assigned_catalog.get_resources() assert results.available() == 1 self.session.unassign_resource_from_bin( self.resource_ids[1], self.assigned_catalog.ident) else: with pytest.raises(errors.PermissionDenied): self.session.assign_resource_to_bin(self.fake_id, self.fake_id) def test_unassign_resource_from_bin(self): """Tests unassign_resource_from_bin""" # From test_templates/resource.py::ResourceBinAssignmentSession::unassign_resource_from_bin_template if not is_never_authz(self.service_config): results = self.assigned_catalog.get_resources() assert results.available() == 0 self.session.assign_resource_to_bin( self.resource_ids[1], self.assigned_catalog.ident) results = self.assigned_catalog.get_resources() assert results.available() == 1 self.session.unassign_resource_from_bin( self.resource_ids[1], self.assigned_catalog.ident) results = self.assigned_catalog.get_resources() assert results.available() == 0 else: with pytest.raises(errors.PermissionDenied): self.session.unassign_resource_from_bin(self.fake_id, self.fake_id) @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def resource_agent_session_class_fixture(request): request.cls.service_config = request.param request.cls.resource_list = list() request.cls.resource_ids = list() request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) if not is_never_authz(request.cls.service_config): create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin' create_form.description = 'Test Bin for ResourceAgentSession tests' request.cls.catalog = request.cls.svc_mgr.create_bin(create_form) for num in [0, 1]: create_form = request.cls.catalog.get_resource_form_for_create([]) create_form.display_name = 'Test Resource ' + str(num) create_form.description = 'Test Resource for ResourceAgentSession tests' obj = request.cls.catalog.create_resource(create_form) request.cls.resource_list.append(obj) request.cls.resource_ids.append(obj.ident) request.cls.catalog.assign_agent_to_resource(AGENT_ID_0, request.cls.resource_ids[0]) request.cls.catalog.assign_agent_to_resource(AGENT_ID_1, request.cls.resource_ids[1]) else: request.cls.catalog = request.cls.svc_mgr.get_resource_agent_session(proxy=PROXY) def class_tear_down(): if not is_never_authz(request.cls.service_config): for catalog in request.cls.svc_mgr.get_bins(): for obj in catalog.get_resources(): catalog.delete_resource(obj.ident) request.cls.svc_mgr.delete_bin(catalog.ident) request.addfinalizer(class_tear_down) @pytest.fixture(scope="function") def resource_agent_session_test_fixture(request): request.cls.session = request.cls.catalog @pytest.mark.usefixtures("resource_agent_session_class_fixture", "resource_agent_session_test_fixture") class TestResourceAgentSession(object): """Tests for ResourceAgentSession""" def test_get_bin_id(self): """Tests get_bin_id""" # From test_templates/resource.py ResourceLookupSession.get_bin_id_template if not is_never_authz(self.service_config): assert self.catalog.get_bin_id() == self.catalog.ident def test_get_bin(self): """Tests get_bin""" # is this test really needed? # From test_templates/resource.py::ResourceLookupSession::get_bin_template if not is_never_authz(self.service_config): assert isinstance(self.catalog.get_bin(), ABCBin) def test_can_lookup_resource_agent_mappings(self): """Tests can_lookup_resource_agent_mappings""" if is_never_authz(self.service_config): pass # no object to call the method on? else: with pytest.raises(errors.Unimplemented): self.session.can_lookup_resource_agent_mappings() def test_use_comparative_agent_view(self): """Tests use_comparative_agent_view""" # From test_templates/resource.py ResourceLookupSession.use_comparative_resource_view_template self.catalog.use_comparative_agent_view() def test_use_plenary_agent_view(self): """Tests use_plenary_agent_view""" # From test_templates/resource.py ResourceLookupSession.use_plenary_resource_view_template self.catalog.use_plenary_agent_view() def test_use_federated_bin_view(self): """Tests use_federated_bin_view""" # From test_templates/resource.py ResourceLookupSession.use_federated_bin_view_template self.catalog.use_federated_bin_view() def test_use_isolated_bin_view(self): """Tests use_isolated_bin_view""" # From test_templates/resource.py ResourceLookupSession.use_isolated_bin_view_template self.catalog.use_isolated_bin_view() def test_get_resource_id_by_agent(self): """Tests get_resource_id_by_agent""" if not is_never_authz(self.service_config): resource_id = self.catalog.get_resource_id_by_agent(AGENT_ID_0) assert isinstance(resource_id, Id) assert resource_id == self.resource_ids[0] else: with pytest.raises(errors.PermissionDenied): self.catalog.get_resource_id_by_agent(AGENT_ID_0) def test_get_resource_by_agent(self): """Tests get_resource_by_agent""" if not is_never_authz(self.service_config): resource = self.catalog.get_resource_by_agent(AGENT_ID_1) assert isinstance(resource, Resource) assert resource.display_name.text == 'Test Resource 1' else: with pytest.raises(errors.PermissionDenied): self.catalog.get_resource_by_agent(AGENT_ID_1) def test_get_agent_ids_by_resource(self): """Tests get_agent_ids_by_resource""" if not is_never_authz(self.service_config): id_list = self.catalog.get_agent_ids_by_resource(self.resource_ids[0]) assert id_list.next() == AGENT_ID_0 assert isinstance(id_list, IdList) else: with pytest.raises(errors.PermissionDenied): self.catalog.get_agent_ids_by_resource(AGENT_ID_0) def test_get_agents_by_resource(self): """Tests get_agents_by_resource""" if not is_never_authz(self.service_config): agents = self.catalog.get_agents_by_resource(self.resource_ids[0]) assert agents.available() == 1 assert isinstance(agents, AgentList) assert agents.next().ident == AGENT_ID_0 else: with pytest.raises(errors.PermissionDenied): self.catalog.get_agents_by_resource(AGENT_ID_0) @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def resource_agent_assignment_session_class_fixture(request): request.cls.service_config = request.param request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) @pytest.fixture(scope="function") def resource_agent_assignment_session_test_fixture(request): request.cls.resource_list = list() request.cls.resource_ids = list() if not is_never_authz(request.cls.service_config): create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin' create_form.description = 'Test Bin for ResourceAgentAssignmentSession tests' request.cls.catalog = request.cls.svc_mgr.create_bin(create_form) for num in [0, 1]: create_form = request.cls.catalog.get_resource_form_for_create([]) create_form.display_name = 'Test Resource ' + str(num) create_form.description = 'Test Resource for ResourceAgentAssignmentSession tests' obj = request.cls.catalog.create_resource(create_form) request.cls.resource_list.append(obj) request.cls.resource_ids.append(obj.ident) else: request.cls.catalog = request.cls.svc_mgr.get_resource_agent_assignment_session(proxy=PROXY) request.cls.session = request.cls.catalog def test_tear_down(): if not is_never_authz(request.cls.service_config): for obj in request.cls.catalog.get_resources(): request.cls.catalog.delete_resource(obj.ident) request.cls.svc_mgr.delete_bin(request.cls.catalog.ident) request.addfinalizer(test_tear_down) @pytest.mark.usefixtures("resource_agent_assignment_session_class_fixture", "resource_agent_assignment_session_test_fixture") class TestResourceAgentAssignmentSession(object): """Tests for ResourceAgentAssignmentSession""" def test_get_bin_id(self): """Tests get_bin_id""" # From test_templates/resource.py ResourceLookupSession.get_bin_id_template if not is_never_authz(self.service_config): assert self.catalog.get_bin_id() == self.catalog.ident def test_get_bin(self): """Tests get_bin""" # is this test really needed? # From test_templates/resource.py::ResourceLookupSession::get_bin_template if not is_never_authz(self.service_config): assert isinstance(self.catalog.get_bin(), ABCBin) def test_can_assign_agents(self): """Tests can_assign_agents""" if is_no_authz(self.service_config): with pytest.raises(errors.Unimplemented): self.session.can_assign_agents() else: assert isinstance(self.session.can_assign_agents(), bool) def test_can_assign_agents_to_resource(self): """Tests can_assign_agents_to_resource""" if is_no_authz(self.service_config): with pytest.raises(errors.Unimplemented): self.session.can_assign_agents_to_resource(True) else: assert isinstance(self.session.can_assign_agents_to_resource(True), bool) def test_assign_agent_to_resource(self): """Tests assign_agent_to_resource""" if not is_never_authz(self.service_config): self.catalog.assign_agent_to_resource(AGENT_ID_0, self.resource_ids[0]) with pytest.raises(errors.AlreadyExists): self.catalog.assign_agent_to_resource(AGENT_ID_0, self.resource_ids[1]) else: with pytest.raises(errors.PermissionDenied): self.catalog.assign_agent_to_resource(AGENT_ID_0, AGENT_ID_1) def test_unassign_agent_from_resource(self): """Tests unassign_agent_from_resource""" if not is_never_authz(self.service_config): self.catalog.assign_agent_to_resource(AGENT_ID_1, self.resource_ids[1]) assert self.catalog.get_resource_by_agent(AGENT_ID_1).display_name.text == 'Test Resource 1' self.catalog.unassign_agent_from_resource(AGENT_ID_1, self.resource_ids[1]) with pytest.raises(errors.NotFound): self.catalog.get_resource_by_agent(AGENT_ID_1) else: with pytest.raises(errors.PermissionDenied): self.catalog.unassign_agent_from_resource(AGENT_ID_1, AGENT_ID_0) @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def bin_lookup_session_class_fixture(request): # From test_templates/resource.py::BinLookupSession::init_template request.cls.service_config = request.param request.cls.catalogs = list() request.cls.catalog_ids = list() request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) request.cls.fake_id = Id('resource.Resource%3Afake%40DLKIT.MIT.EDU') if not is_never_authz(request.cls.service_config): for num in [0, 1]: create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin ' + str(num) create_form.description = 'Test Bin for resource proxy manager tests' catalog = request.cls.svc_mgr.create_bin(create_form) request.cls.catalogs.append(catalog) request.cls.catalog_ids.append(catalog.ident) def class_tear_down(): if not is_never_authz(request.cls.service_config): for catalog in request.cls.svc_mgr.get_bins(): request.cls.svc_mgr.delete_bin(catalog.ident) request.addfinalizer(class_tear_down) @pytest.fixture(scope="function") def bin_lookup_session_test_fixture(request): # From test_templates/resource.py::BinLookupSession::init_template request.cls.session = request.cls.svc_mgr @pytest.mark.usefixtures("bin_lookup_session_class_fixture", "bin_lookup_session_test_fixture") class TestBinLookupSession(object): """Tests for BinLookupSession""" def test_can_lookup_bins(self): """Tests can_lookup_bins""" # From test_templates/resource.py::BinLookupSession::can_lookup_bins_template assert isinstance(self.session.can_lookup_bins(), bool) def test_use_comparative_bin_view(self): """Tests use_comparative_bin_view""" # From test_templates/resource.py::BinLookupSession::use_comparative_bin_view_template self.svc_mgr.use_comparative_bin_view() def test_use_plenary_bin_view(self): """Tests use_plenary_bin_view""" # From test_templates/resource.py::BinLookupSession::use_plenary_bin_view_template self.svc_mgr.use_plenary_bin_view() def test_get_bin(self): """Tests get_bin""" # From test_templates/resource.py::BinLookupSession::get_bin_template if not is_never_authz(self.service_config): catalog = self.svc_mgr.get_bin(self.catalogs[0].ident) assert catalog.ident == self.catalogs[0].ident else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_bin(self.fake_id) def test_get_bins_by_ids(self): """Tests get_bins_by_ids""" # From test_templates/resource.py::BinLookupSession::get_bins_by_ids_template if not is_never_authz(self.service_config): catalogs = self.svc_mgr.get_bins_by_ids(self.catalog_ids) assert catalogs.available() == 2 assert isinstance(catalogs, ABCObjects.BinList) catalog_id_strs = [str(cat_id) for cat_id in self.catalog_ids] for index, catalog in enumerate(catalogs): assert str(catalog.ident) in catalog_id_strs catalog_id_strs.remove(str(catalog.ident)) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_bins_by_ids([self.fake_id]) def test_get_bins_by_genus_type(self): """Tests get_bins_by_genus_type""" # From test_templates/resource.py::BinLookupSession::get_bins_by_genus_type_template if not is_never_authz(self.service_config): catalogs = self.svc_mgr.get_bins_by_genus_type(DEFAULT_GENUS_TYPE) assert catalogs.available() > 0 assert isinstance(catalogs, ABCObjects.BinList) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_bins_by_genus_type(DEFAULT_GENUS_TYPE) def test_get_bins_by_parent_genus_type(self): """Tests get_bins_by_parent_genus_type""" if is_never_authz(self.service_config): pass # no object to call the method on? elif uses_cataloging(self.service_config): pass # cannot call the _get_record() methods on catalogs else: with pytest.raises(errors.Unimplemented): self.session.get_bins_by_parent_genus_type(True) def test_get_bins_by_record_type(self): """Tests get_bins_by_record_type""" if is_never_authz(self.service_config): pass # no object to call the method on? elif uses_cataloging(self.service_config): pass # cannot call the _get_record() methods on catalogs else: with pytest.raises(errors.Unimplemented): self.session.get_bins_by_record_type(True) def test_get_bins_by_provider(self): """Tests get_bins_by_provider""" if is_never_authz(self.service_config): pass # no object to call the method on? elif uses_cataloging(self.service_config): pass # cannot call the _get_record() methods on catalogs else: with pytest.raises(errors.Unimplemented): self.session.get_bins_by_provider(True) def test_get_bins(self): """Tests get_bins""" # From test_templates/resource.py::BinLookupSession::get_bins_template if not is_never_authz(self.service_config): catalogs = self.svc_mgr.get_bins() assert catalogs.available() > 0 assert isinstance(catalogs, ABCObjects.BinList) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_bins() @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def bin_query_session_class_fixture(request): # From test_templates/resource.py::BinQuerySession::init_template request.cls.service_config = request.param request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) if not is_never_authz(request.cls.service_config): create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test catalog' create_form.description = 'Test catalog description' request.cls.catalog = request.cls.svc_mgr.create_bin(create_form) request.cls.fake_id = Id('resource.Resource%3A1%40ODL.MIT.EDU') def class_tear_down(): if not is_never_authz(request.cls.service_config): request.cls.svc_mgr.delete_bin(request.cls.catalog.ident) request.addfinalizer(class_tear_down) @pytest.fixture(scope="function") def bin_query_session_test_fixture(request): # From test_templates/resource.py::BinQuerySession::init_template request.cls.session = request.cls.svc_mgr @pytest.mark.usefixtures("bin_query_session_class_fixture", "bin_query_session_test_fixture") class TestBinQuerySession(object): """Tests for BinQuerySession""" def test_can_search_bins(self): """Tests can_search_bins""" # From test_templates/resource.py::BinQuerySession::can_search_bins_template assert isinstance(self.session.can_search_bins(), bool) def test_get_bin_query(self): """Tests get_bin_query""" # From test_templates/resource.py::BinQuerySession::get_bin_query_template if not is_never_authz(self.service_config): query = self.session.get_bin_query() assert isinstance(query, ABCQueries.BinQuery) else: with pytest.raises(errors.PermissionDenied): self.session.get_bin_query() def test_get_bins_by_query(self): """Tests get_bins_by_query""" # From test_templates/resource.py::BinQuerySession::get_bins_by_query_template if not is_never_authz(self.service_config): query = self.session.get_bin_query() query.match_display_name('Test catalog') assert self.session.get_bins_by_query(query).available() == 1 query.clear_display_name_terms() query.match_display_name('Test catalog', match=False) assert self.session.get_bins_by_query(query).available() == 0 else: with pytest.raises(errors.PermissionDenied): self.session.get_bins_by_query('foo') @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def bin_admin_session_class_fixture(request): # From test_templates/resource.py::BinAdminSession::init_template request.cls.service_config = request.param request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) request.cls.fake_id = Id('resource.Resource%3Afake%40DLKIT.MIT.EDU') @pytest.fixture(scope="function") def bin_admin_session_test_fixture(request): # From test_templates/resource.py::BinAdminSession::init_template if not is_never_authz(request.cls.service_config): # Initialize test catalog: create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin' create_form.description = 'Test Bin for BinAdminSession tests' request.cls.catalog = request.cls.svc_mgr.create_bin(create_form) # Initialize catalog to be deleted: create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'Test Bin For Deletion' create_form.description = 'Test Bin for BinAdminSession deletion test' request.cls.catalog_to_delete = request.cls.svc_mgr.create_bin(create_form) request.cls.session = request.cls.svc_mgr def test_tear_down(): if not is_never_authz(request.cls.service_config): for catalog in request.cls.svc_mgr.get_bins(): request.cls.svc_mgr.delete_bin(catalog.ident) request.addfinalizer(test_tear_down) @pytest.mark.usefixtures("bin_admin_session_class_fixture", "bin_admin_session_test_fixture") class TestBinAdminSession(object): """Tests for BinAdminSession""" def test_can_create_bins(self): """Tests can_create_bins""" # From test_templates/resource.py BinAdminSession.can_create_bins_template assert isinstance(self.svc_mgr.can_create_bins(), bool) def test_can_create_bin_with_record_types(self): """Tests can_create_bin_with_record_types""" # From test_templates/resource.py BinAdminSession.can_create_bin_with_record_types_template assert isinstance(self.svc_mgr.can_create_bin_with_record_types(DEFAULT_TYPE), bool) def test_get_bin_form_for_create(self): """Tests get_bin_form_for_create""" # From test_templates/resource.py BinAdminSession.get_bin_form_for_create_template from dlkit.abstract_osid.resource.objects import BinForm if not is_never_authz(self.service_config): catalog_form = self.svc_mgr.get_bin_form_for_create([]) assert isinstance(catalog_form, OsidCatalogForm) assert not catalog_form.is_for_update() with pytest.raises(errors.InvalidArgument): self.svc_mgr.get_bin_form_for_create([1]) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_bin_form_for_create([]) def test_create_bin(self): """Tests create_bin""" # From test_templates/resource.py BinAdminSession.create_bin_template from dlkit.abstract_osid.resource.objects import Bin if not is_never_authz(self.service_config): catalog_form = self.svc_mgr.get_bin_form_for_create([]) catalog_form.display_name = 'Test Bin' catalog_form.description = 'Test Bin for BinAdminSession.create_bin tests' new_catalog = self.svc_mgr.create_bin(catalog_form) assert isinstance(new_catalog, OsidCatalog) with pytest.raises(errors.IllegalState): self.svc_mgr.create_bin(catalog_form) with pytest.raises(errors.InvalidArgument): self.svc_mgr.create_bin('I Will Break You!') update_form = self.svc_mgr.get_bin_form_for_update(new_catalog.ident) with pytest.raises(errors.InvalidArgument): self.svc_mgr.create_bin(update_form) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.create_bin('foo') def test_can_update_bins(self): """Tests can_update_bins""" # From test_templates/resource.py BinAdminSession.can_update_bins_template assert isinstance(self.svc_mgr.can_update_bins(), bool) def test_get_bin_form_for_update(self): """Tests get_bin_form_for_update""" # From test_templates/resource.py BinAdminSession.get_bin_form_for_update_template from dlkit.abstract_osid.resource.objects import BinForm if not is_never_authz(self.service_config): catalog_form = self.svc_mgr.get_bin_form_for_update(self.catalog.ident) assert isinstance(catalog_form, OsidCatalogForm) assert catalog_form.is_for_update() else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_bin_form_for_update(self.fake_id) def test_update_bin(self): """Tests update_bin""" # From test_templates/resource.py BinAdminSession.update_bin_template if not is_never_authz(self.service_config): catalog_form = self.svc_mgr.get_bin_form_for_update(self.catalog.ident) # Update some elements here? self.svc_mgr.update_bin(catalog_form) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.update_bin('foo') def test_can_delete_bins(self): """Tests can_delete_bins""" # From test_templates/resource.py BinAdminSession.can_delete_bins_template assert isinstance(self.svc_mgr.can_delete_bins(), bool) def test_delete_bin(self): """Tests delete_bin""" # From test_templates/resource.py BinAdminSession.delete_bin_template if not is_never_authz(self.service_config): cat_id = self.catalog_to_delete.ident self.svc_mgr.delete_bin(cat_id) with pytest.raises(errors.NotFound): self.svc_mgr.get_bin(cat_id) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.delete_bin(self.fake_id) def test_can_manage_bin_aliases(self): """Tests can_manage_bin_aliases""" # From test_templates/resource.py::ResourceAdminSession::can_manage_resource_aliases_template assert isinstance(self.svc_mgr.can_manage_bin_aliases(), bool) def test_alias_bin(self): """Tests alias_bin""" # From test_templates/resource.py BinAdminSession.alias_bin_template alias_id = Id('resource.Bin%3Amy-alias%40ODL.MIT.EDU') if not is_never_authz(self.service_config): self.svc_mgr.alias_bin(self.catalog_to_delete.ident, alias_id) aliased_catalog = self.svc_mgr.get_bin(alias_id) assert self.catalog_to_delete.ident == aliased_catalog.ident else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.alias_bin(self.fake_id, alias_id) @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def bin_hierarchy_session_class_fixture(request): # From test_templates/resource.py::BinHierarchySession::init_template request.cls.service_config = request.param request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) request.cls.catalogs = dict() request.cls.fake_id = Id('resource.Resource%3Afake%40DLKIT.MIT.EDU') if not is_never_authz(request.cls.service_config): for name in ['Root', 'Child 1', 'Child 2', 'Grandchild 1']: create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = name create_form.description = 'Test Bin ' + name request.cls.catalogs[name] = request.cls.svc_mgr.create_bin(create_form) request.cls.svc_mgr.add_root_bin(request.cls.catalogs['Root'].ident) request.cls.svc_mgr.add_child_bin(request.cls.catalogs['Root'].ident, request.cls.catalogs['Child 1'].ident) request.cls.svc_mgr.add_child_bin(request.cls.catalogs['Root'].ident, request.cls.catalogs['Child 2'].ident) request.cls.svc_mgr.add_child_bin(request.cls.catalogs['Child 1'].ident, request.cls.catalogs['Grandchild 1'].ident) def class_tear_down(): if not is_never_authz(request.cls.service_config): request.cls.svc_mgr.remove_child_bin(request.cls.catalogs['Child 1'].ident, request.cls.catalogs['Grandchild 1'].ident) request.cls.svc_mgr.remove_child_bins(request.cls.catalogs['Root'].ident) request.cls.svc_mgr.remove_root_bin(request.cls.catalogs['Root'].ident) for cat_name in request.cls.catalogs: request.cls.svc_mgr.delete_bin(request.cls.catalogs[cat_name].ident) request.addfinalizer(class_tear_down) @pytest.fixture(scope="function") def bin_hierarchy_session_test_fixture(request): # From test_templates/resource.py::BinHierarchySession::init_template request.cls.session = request.cls.svc_mgr @pytest.mark.usefixtures("bin_hierarchy_session_class_fixture", "bin_hierarchy_session_test_fixture") class TestBinHierarchySession(object): """Tests for BinHierarchySession""" def test_get_bin_hierarchy_id(self): """Tests get_bin_hierarchy_id""" # From test_templates/resource.py::BinHierarchySession::get_bin_hierarchy_id_template hierarchy_id = self.svc_mgr.get_bin_hierarchy_id() assert isinstance(hierarchy_id, Id) def test_get_bin_hierarchy(self): """Tests get_bin_hierarchy""" # From test_templates/resource.py::BinHierarchySession::get_bin_hierarchy_template if not is_never_authz(self.service_config): hierarchy = self.svc_mgr.get_bin_hierarchy() assert isinstance(hierarchy, Hierarchy) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_bin_hierarchy() def test_can_access_bin_hierarchy(self): """Tests can_access_bin_hierarchy""" # From test_templates/resource.py::BinHierarchySession::can_access_objective_bank_hierarchy_template assert isinstance(self.svc_mgr.can_access_bin_hierarchy(), bool) def test_use_comparative_bin_view(self): """Tests use_comparative_bin_view""" # From test_templates/resource.py::BinLookupSession::use_comparative_bin_view_template self.svc_mgr.use_comparative_bin_view() def test_use_plenary_bin_view(self): """Tests use_plenary_bin_view""" # From test_templates/resource.py::BinLookupSession::use_plenary_bin_view_template self.svc_mgr.use_plenary_bin_view() def test_get_root_bin_ids(self): """Tests get_root_bin_ids""" # From test_templates/resource.py::BinHierarchySession::get_root_bin_ids_template if not is_never_authz(self.service_config): root_ids = self.svc_mgr.get_root_bin_ids() assert isinstance(root_ids, IdList) # probably should be == 1, but we seem to be getting test cruft, # and I can't pinpoint where it's being introduced. assert root_ids.available() >= 1 else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_root_bin_ids() def test_get_root_bins(self): """Tests get_root_bins""" # From test_templates/resource.py::BinHierarchySession::get_root_bins_template from dlkit.abstract_osid.resource.objects import BinList if not is_never_authz(self.service_config): roots = self.svc_mgr.get_root_bins() assert isinstance(roots, OsidList) assert roots.available() == 1 else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_root_bins() def test_has_parent_bins(self): """Tests has_parent_bins""" # From test_templates/resource.py::BinHierarchySession::has_parent_bins_template if not is_never_authz(self.service_config): assert isinstance(self.svc_mgr.has_parent_bins(self.catalogs['Child 1'].ident), bool) assert self.svc_mgr.has_parent_bins(self.catalogs['Child 1'].ident) assert self.svc_mgr.has_parent_bins(self.catalogs['Child 2'].ident) assert self.svc_mgr.has_parent_bins(self.catalogs['Grandchild 1'].ident) assert not self.svc_mgr.has_parent_bins(self.catalogs['Root'].ident) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.has_parent_bins(self.fake_id) def test_is_parent_of_bin(self): """Tests is_parent_of_bin""" # From test_templates/resource.py::BinHierarchySession::is_parent_of_bin_template if not is_never_authz(self.service_config): assert isinstance(self.svc_mgr.is_parent_of_bin(self.catalogs['Child 1'].ident, self.catalogs['Root'].ident), bool) assert self.svc_mgr.is_parent_of_bin(self.catalogs['Root'].ident, self.catalogs['Child 1'].ident) assert self.svc_mgr.is_parent_of_bin(self.catalogs['Child 1'].ident, self.catalogs['Grandchild 1'].ident) assert not self.svc_mgr.is_parent_of_bin(self.catalogs['Child 1'].ident, self.catalogs['Root'].ident) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.is_parent_of_bin(self.fake_id, self.fake_id) def test_get_parent_bin_ids(self): """Tests get_parent_bin_ids""" # From test_templates/resource.py::BinHierarchySession::get_parent_bin_ids_template from dlkit.abstract_osid.id.objects import IdList if not is_never_authz(self.service_config): catalog_list = self.svc_mgr.get_parent_bin_ids(self.catalogs['Child 1'].ident) assert isinstance(catalog_list, IdList) assert catalog_list.available() == 1 else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_parent_bin_ids(self.fake_id) def test_get_parent_bins(self): """Tests get_parent_bins""" # From test_templates/resource.py::BinHierarchySession::get_parent_bins_template if not is_never_authz(self.service_config): catalog_list = self.svc_mgr.get_parent_bins(self.catalogs['Child 1'].ident) assert isinstance(catalog_list, OsidList) assert catalog_list.available() == 1 assert catalog_list.next().display_name.text == 'Root' else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_parent_bins(self.fake_id) def test_is_ancestor_of_bin(self): """Tests is_ancestor_of_bin""" # From test_templates/resource.py::BinHierarchySession::is_ancestor_of_bin_template if not is_never_authz(self.service_config): pytest.raises(errors.Unimplemented, self.svc_mgr.is_ancestor_of_bin, self.catalogs['Root'].ident, self.catalogs['Child 1'].ident) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.is_ancestor_of_bin(self.fake_id, self.fake_id) # self.assertTrue(isinstance(self.svc_mgr.is_ancestor_of_bin( # self.catalogs['Root'].ident, # self.catalogs['Child 1'].ident), # bool)) # self.assertTrue(self.svc_mgr.is_ancestor_of_bin( # self.catalogs['Root'].ident, # self.catalogs['Child 1'].ident)) # self.assertTrue(self.svc_mgr.is_ancestor_of_bin( # self.catalogs['Root'].ident, # self.catalogs['Grandchild 1'].ident)) # self.assertFalse(self.svc_mgr.is_ancestor_of_bin( # self.catalogs['Child 1'].ident, # self.catalogs['Root'].ident)) def test_has_child_bins(self): """Tests has_child_bins""" # From test_templates/resource.py::BinHierarchySession::has_child_bins_template if not is_never_authz(self.service_config): assert isinstance(self.svc_mgr.has_child_bins(self.catalogs['Child 1'].ident), bool) assert self.svc_mgr.has_child_bins(self.catalogs['Root'].ident) assert self.svc_mgr.has_child_bins(self.catalogs['Child 1'].ident) assert not self.svc_mgr.has_child_bins(self.catalogs['Child 2'].ident) assert not self.svc_mgr.has_child_bins(self.catalogs['Grandchild 1'].ident) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.has_child_bins(self.fake_id) def test_is_child_of_bin(self): """Tests is_child_of_bin""" # From test_templates/resource.py::BinHierarchySession::is_child_of_bin_template if not is_never_authz(self.service_config): assert isinstance(self.svc_mgr.is_child_of_bin(self.catalogs['Child 1'].ident, self.catalogs['Root'].ident), bool) assert self.svc_mgr.is_child_of_bin(self.catalogs['Child 1'].ident, self.catalogs['Root'].ident) assert self.svc_mgr.is_child_of_bin(self.catalogs['Grandchild 1'].ident, self.catalogs['Child 1'].ident) assert not self.svc_mgr.is_child_of_bin(self.catalogs['Root'].ident, self.catalogs['Child 1'].ident) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.is_child_of_bin(self.fake_id, self.fake_id) def test_get_child_bin_ids(self): """Tests get_child_bin_ids""" # From test_templates/resource.py::BinHierarchySession::get_child_bin_ids_template from dlkit.abstract_osid.id.objects import IdList if not is_never_authz(self.service_config): catalog_list = self.svc_mgr.get_child_bin_ids(self.catalogs['Child 1'].ident) assert isinstance(catalog_list, IdList) assert catalog_list.available() == 1 else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_child_bin_ids(self.fake_id) def test_get_child_bins(self): """Tests get_child_bins""" # From test_templates/resource.py::BinHierarchySession::get_child_bins_template if not is_never_authz(self.service_config): catalog_list = self.svc_mgr.get_child_bins(self.catalogs['Child 1'].ident) assert isinstance(catalog_list, OsidList) assert catalog_list.available() == 1 assert catalog_list.next().display_name.text == 'Grandchild 1' else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_child_bins(self.fake_id) def test_is_descendant_of_bin(self): """Tests is_descendant_of_bin""" # From test_templates/resource.py::BinHierarchySession::is_descendant_of_bin_template if not is_never_authz(self.service_config): pytest.raises(errors.Unimplemented, self.svc_mgr.is_descendant_of_bin, self.catalogs['Child 1'].ident, self.catalogs['Root'].ident) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.is_descendant_of_bin(self.fake_id, self.fake_id) # self.assertTrue(isinstance(self.svc_mgr.is_descendant_of_bin( # self.catalogs['Root'].ident, # self.catalogs['Child 1'].ident), # bool)) # self.assertTrue(self.svc_mgr.is_descendant_of_bin( # self.catalogs['Child 1'].ident, # self.catalogs['Root'].ident)) # self.assertTrue(self.svc_mgr.is_descendant_of_bin( # self.catalogs['Grandchild 1'].ident, # self.catalogs['Root'].ident)) # self.assertFalse(self.svc_mgr.is_descendant_of_bin( # self.catalogs['Root'].ident, # self.catalogs['Child 1'].ident)) def test_get_bin_node_ids(self): """Tests get_bin_node_ids""" # From test_templates/resource.py::BinHierarchySession::get_bin_node_ids_template # Per the spec, perhaps counterintuitively this method returns a # node, **not** a IdList... if not is_never_authz(self.service_config): node = self.svc_mgr.get_bin_node_ids(self.catalogs['Child 1'].ident, 1, 2, False) assert isinstance(node, OsidNode) assert not node.is_root() assert not node.is_leaf() assert node.get_child_ids().available() == 1 assert isinstance(node.get_child_ids(), IdList) assert node.get_parent_ids().available() == 1 assert isinstance(node.get_parent_ids(), IdList) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_bin_node_ids(self.fake_id, 1, 2, False) def test_get_bin_nodes(self): """Tests get_bin_nodes""" # From test_templates/resource.py::BinHierarchySession::get_bin_nodes_template if not is_never_authz(self.service_config): node = self.svc_mgr.get_bin_nodes(self.catalogs['Child 1'].ident, 1, 2, False) assert isinstance(node, OsidNode) assert not node.is_root() assert not node.is_leaf() assert node.get_child_ids().available() == 1 assert isinstance(node.get_child_ids(), IdList) assert node.get_parent_ids().available() == 1 assert isinstance(node.get_parent_ids(), IdList) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_bin_nodes(self.fake_id, 1, 2, False) @pytest.fixture(scope="class", params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE']) def bin_hierarchy_design_session_class_fixture(request): # From test_templates/resource.py::BinHierarchyDesignSession::init_template request.cls.service_config = request.param request.cls.svc_mgr = Runtime().get_service_manager( 'RESOURCE', proxy=PROXY, implementation=request.cls.service_config) request.cls.catalogs = dict() request.cls.fake_id = Id('resource.Resource%3Afake%40DLKIT.MIT.EDU') if not is_never_authz(request.cls.service_config): for name in ['Root', 'Child 1', 'Child 2', 'Grandchild 1']: create_form = request.cls.svc_mgr.get_bin_form_for_create([]) create_form.display_name = name create_form.description = 'Test Bin ' + name request.cls.catalogs[name] = request.cls.svc_mgr.create_bin(create_form) request.cls.svc_mgr.add_root_bin(request.cls.catalogs['Root'].ident) request.cls.svc_mgr.add_child_bin(request.cls.catalogs['Root'].ident, request.cls.catalogs['Child 1'].ident) request.cls.svc_mgr.add_child_bin(request.cls.catalogs['Root'].ident, request.cls.catalogs['Child 2'].ident) request.cls.svc_mgr.add_child_bin(request.cls.catalogs['Child 1'].ident, request.cls.catalogs['Grandchild 1'].ident) def class_tear_down(): if not is_never_authz(request.cls.service_config): request.cls.svc_mgr.remove_child_bin(request.cls.catalogs['Child 1'].ident, request.cls.catalogs['Grandchild 1'].ident) request.cls.svc_mgr.remove_child_bins(request.cls.catalogs['Root'].ident) for cat_name in request.cls.catalogs: request.cls.svc_mgr.delete_bin(request.cls.catalogs[cat_name].ident) request.addfinalizer(class_tear_down) @pytest.fixture(scope="function") def bin_hierarchy_design_session_test_fixture(request): # From test_templates/resource.py::BinHierarchyDesignSession::init_template request.cls.session = request.cls.svc_mgr @pytest.mark.usefixtures("bin_hierarchy_design_session_class_fixture", "bin_hierarchy_design_session_test_fixture") class TestBinHierarchyDesignSession(object): """Tests for BinHierarchyDesignSession""" def test_get_bin_hierarchy_id(self): """Tests get_bin_hierarchy_id""" # From test_templates/resource.py::BinHierarchySession::get_bin_hierarchy_id_template hierarchy_id = self.svc_mgr.get_bin_hierarchy_id() assert isinstance(hierarchy_id, Id) def test_get_bin_hierarchy(self): """Tests get_bin_hierarchy""" # From test_templates/resource.py::BinHierarchySession::get_bin_hierarchy_template if not is_never_authz(self.service_config): hierarchy = self.svc_mgr.get_bin_hierarchy() assert isinstance(hierarchy, Hierarchy) else: with pytest.raises(errors.PermissionDenied): self.svc_mgr.get_bin_hierarchy() def test_can_modify_bin_hierarchy(self): """Tests can_modify_bin_hierarchy""" # From test_templates/resource.py::BinHierarchyDesignSession::can_modify_bin_hierarchy_template assert isinstance(self.session.can_modify_bin_hierarchy(), bool) def test_add_root_bin(self): """Tests add_root_bin""" # From test_templates/resource.py::BinHierarchyDesignSession::add_root_bin_template # this is tested in the setUpClass if not is_never_authz(self.service_config): roots = self.session.get_root_bins() assert isinstance(roots, OsidList) assert roots.available() == 1 else: with pytest.raises(errors.PermissionDenied): self.session.add_root_bin(self.fake_id) def test_remove_root_bin(self): """Tests remove_root_bin""" # From test_templates/resource.py::BinHierarchyDesignSession::remove_root_bin_template if not is_never_authz(self.service_config): roots = self.session.get_root_bins() assert roots.available() == 1 create_form = self.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'new root' create_form.description = 'Test Bin root' new_bin = self.svc_mgr.create_bin(create_form) self.svc_mgr.add_root_bin(new_bin.ident) roots = self.session.get_root_bins() assert roots.available() == 2 self.session.remove_root_bin(new_bin.ident) roots = self.session.get_root_bins() assert roots.available() == 1 else: with pytest.raises(errors.PermissionDenied): self.session.remove_root_bin(self.fake_id) def test_add_child_bin(self): """Tests add_child_bin""" # From test_templates/resource.py::BinHierarchyDesignSession::add_child_bin_template if not is_never_authz(self.service_config): # this is tested in the setUpClass children = self.session.get_child_bins(self.catalogs['Root'].ident) assert isinstance(children, OsidList) assert children.available() == 2 else: with pytest.raises(errors.PermissionDenied): self.session.add_child_bin(self.fake_id, self.fake_id) def test_remove_child_bin(self): """Tests remove_child_bin""" # From test_templates/resource.py::BinHierarchyDesignSession::remove_child_bin_template if not is_never_authz(self.service_config): children = self.session.get_child_bins(self.catalogs['Root'].ident) assert children.available() == 2 create_form = self.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'test child' create_form.description = 'Test Bin child' new_bin = self.svc_mgr.create_bin(create_form) self.svc_mgr.add_child_bin( self.catalogs['Root'].ident, new_bin.ident) children = self.session.get_child_bins(self.catalogs['Root'].ident) assert children.available() == 3 self.session.remove_child_bin( self.catalogs['Root'].ident, new_bin.ident) children = self.session.get_child_bins(self.catalogs['Root'].ident) assert children.available() == 2 else: with pytest.raises(errors.PermissionDenied): self.session.remove_child_bin(self.fake_id, self.fake_id) def test_remove_child_bins(self): """Tests remove_child_bins""" # From test_templates/resource.py::BinHierarchyDesignSession::remove_child_bins_template if not is_never_authz(self.service_config): children = self.session.get_child_bins(self.catalogs['Grandchild 1'].ident) assert children.available() == 0 create_form = self.svc_mgr.get_bin_form_for_create([]) create_form.display_name = 'test great grandchild' create_form.description = 'Test Bin child' new_bin = self.svc_mgr.create_bin(create_form) self.svc_mgr.add_child_bin( self.catalogs['Grandchild 1'].ident, new_bin.ident) children = self.session.get_child_bins(self.catalogs['Grandchild 1'].ident) assert children.available() == 1 self.session.remove_child_bins(self.catalogs['Grandchild 1'].ident) children = self.session.get_child_bins(self.catalogs['Grandchild 1'].ident) assert children.available() == 0 else: with pytest.raises(errors.PermissionDenied): self.session.remove_child_bins(self.fake_id)
49.541587
176
0.697986
12,805
103,641
5.31558
0.027802
0.049658
0.034467
0.050686
0.902639
0.841022
0.802412
0.757985
0.706344
0.653895
0
0.003221
0.209087
103,641
2,091
177
49.56528
0.827146
0.178346
0
0.654705
0
0
0.076108
0.0411
0
0
0
0
0.134733
1
0.119838
false
0.008802
0.022343
0
0.15369
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
ddd56cf415ba515f33a66e431ba8ee288de9fcc2
730
py
Python
Chapter 07/Chap07_Example7.93.py
Anancha/Programming-Techniques-using-Python
e80c329d2a27383909d358741a5cab03cb22fd8b
[ "MIT" ]
null
null
null
Chapter 07/Chap07_Example7.93.py
Anancha/Programming-Techniques-using-Python
e80c329d2a27383909d358741a5cab03cb22fd8b
[ "MIT" ]
null
null
null
Chapter 07/Chap07_Example7.93.py
Anancha/Programming-Techniques-using-Python
e80c329d2a27383909d358741a5cab03cb22fd8b
[ "MIT" ]
null
null
null
# set declaration myfruits = {"Apple", "Banana", "Grapes", "Litchi", "Mango"} mynums = {1, 2, 3, 4, 5} # Set printing before removing print("Before pop() method...") print("fruits: ", myfruits) print("numbers: ", mynums) # Elements getting popped from the set elerem = myfruits.pop() print(elerem, "is removed from fruits") elerem = myfruits.pop() print(elerem, "is removed from fruits") elerem = myfruits.pop() print(elerem, "is removed from fruits") elerem = mynums.pop() print(elerem, "is removed from numbers") elerem = mynums.pop() print(elerem, "is removed from numbers") elerem = mynums.pop() print(elerem, "is removed from numbers") print("After pop() method...") print("fruits: ", myfruits) print("numbers: ", mynums)
27.037037
59
0.69726
98
730
5.193878
0.316327
0.094303
0.165029
0.188605
0.72888
0.72888
0.72888
0.72888
0.548134
0.548134
0
0.007911
0.134247
730
27
60
27.037037
0.797468
0.110959
0
0.8
0
0
0.371517
0
0
0
0
0
0
1
0
false
0
0
0
0
0.6
0
0
0
null
0
0
1
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
6
ddfdb3257c33fd5fc6634b2c001a62deeda130db
2,426
py
Python
explore_data.py
the-timoye/udacity-capstone-project
47747f4b8fb5193c1c3f9db2c3ffdde215368b25
[ "MIT" ]
null
null
null
explore_data.py
the-timoye/udacity-capstone-project
47747f4b8fb5193c1c3f9db2c3ffdde215368b25
[ "MIT" ]
null
null
null
explore_data.py
the-timoye/udacity-capstone-project
47747f4b8fb5193c1c3f9db2c3ffdde215368b25
[ "MIT" ]
1
2022-01-03T06:49:26.000Z
2022-01-03T06:49:26.000Z
import pandas as pd cities_demography = pd.read_csv('datasets/us-cities-demographics.csv', sep=';'); airport_codes = pd.read_csv('datasets/airport-codes_csv.csv'); global_temperatures = pd.read_csv('datasets/GlobalLandTemperaturesByCountry.csv'); immigration_data = pd.read_csv('datasets/immigration_data_sample.csv'); # dataset sneakpeak print('===================================================================================== CITIES DEMOGRAPHY ===================================================================================== ') print(cities_demography.head()); print('='*200) print('===================================================================================== AIRPORT CODES ===================================================================================== ') print('='*200) print(airport_codes.head()); print('='*200) print('===================================================================================== GLOBAL TEMPERATURES ===================================================================================== ') print('='*200) print(global_temperatures.head()); print('===================================================================================== IMMIGRATION DATA ===================================================================================== ') print('='*200) print(immigration_data.head()); # check datatypes print('='*200) print('===================================================================================== CITIES DEMOGRAPHY DATATYPES ===================================================================================== ') print('='*200) print(cities_demography.dtypes); print('='*200) print('===================================================================================== AIRPORT CODES DATATYPES ===================================================================================== ') print('='*200) print(airport_codes.dtypes); print('='*200) print('===================================================================================== GLOBAL TEMPERATURES DATATYPES ===================================================================================== ') print('='*200) print(global_temperatures.dtypes); print('===================================================================================== IMMIGRATION DATATYPES ===================================================================================== ') print('='*200) print(immigration_data.dtypes);
63.842105
211
0.324815
130
2,426
5.915385
0.215385
0.124837
0.202861
0.143043
0.462939
0.09883
0
0
0
0
0
0.015398
0.036274
2,426
38
212
63.842105
0.313516
0.013603
0
0.363636
0
0
0.713927
0.629444
0
1
0
0
0
1
0
false
0
0.030303
0
0.030303
0.848485
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
1
0
6
ddfffece8acd25cc751629339d847f56f3a1e93d
25
py
Python
app/bus_service/__init__.py
en-medina/RPI_REACTOR_CSTR_TOG
1771123c6cdac6e8be1c21508d921c35fd68db25
[ "MIT" ]
null
null
null
app/bus_service/__init__.py
en-medina/RPI_REACTOR_CSTR_TOG
1771123c6cdac6e8be1c21508d921c35fd68db25
[ "MIT" ]
null
null
null
app/bus_service/__init__.py
en-medina/RPI_REACTOR_CSTR_TOG
1771123c6cdac6e8be1c21508d921c35fd68db25
[ "MIT" ]
null
null
null
from .bus import init_bus
25
25
0.84
5
25
4
0.8
0
0
0
0
0
0
0
0
0
0
0
0.12
25
1
25
25
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
fb21d323aa2b95daccda2852c30e4197d37195bd
99
py
Python
terrascript/fastly/__init__.py
hugovk/python-terrascript
08fe185904a70246822f5cfbdc9e64e9769ec494
[ "BSD-2-Clause" ]
4
2022-02-07T21:08:14.000Z
2022-03-03T04:41:28.000Z
terrascript/fastly/__init__.py
hugovk/python-terrascript
08fe185904a70246822f5cfbdc9e64e9769ec494
[ "BSD-2-Clause" ]
null
null
null
terrascript/fastly/__init__.py
hugovk/python-terrascript
08fe185904a70246822f5cfbdc9e64e9769ec494
[ "BSD-2-Clause" ]
2
2022-02-06T01:49:42.000Z
2022-02-08T14:15:00.000Z
# terrascript/fastly/__init__.py import terrascript class fastly(terrascript.Provider): pass
14.142857
35
0.787879
11
99
6.727273
0.727273
0
0
0
0
0
0
0
0
0
0
0
0.131313
99
6
36
16.5
0.860465
0.30303
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
34ad1df572090c2af610e7ccc9276967c480df4f
19
py
Python
wft/__init__.py
gsteele13/wft-generic
075b72bfb3029df4208a29cf4bd40a667432cac9
[ "MIT" ]
null
null
null
wft/__init__.py
gsteele13/wft-generic
075b72bfb3029df4208a29cf4bd40a667432cac9
[ "MIT" ]
null
null
null
wft/__init__.py
gsteele13/wft-generic
075b72bfb3029df4208a29cf4bd40a667432cac9
[ "MIT" ]
null
null
null
from .wft import *
9.5
18
0.684211
3
19
4.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.210526
19
1
19
19
0.866667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
34c00bad06497513d18536e3586df3807a9d192a
232
py
Python
test_project/test_runner.py
lordoftheflies/django-celery-email
32e67f7c13a2a9e23d59b7c5bb4917836c890e65
[ "BSD-3-Clause" ]
null
null
null
test_project/test_runner.py
lordoftheflies/django-celery-email
32e67f7c13a2a9e23d59b7c5bb4917836c890e65
[ "BSD-3-Clause" ]
null
null
null
test_project/test_runner.py
lordoftheflies/django-celery-email
32e67f7c13a2a9e23d59b7c5bb4917836c890e65
[ "BSD-3-Clause" ]
null
null
null
from django.test.simple import DjangoTestSuiteRunner class DJCETestSuiteRunner(DjangoTestSuiteRunner): def setup_test_environment(self, **kwargs): pass def teardown_test_environment(self, **kwargs): pass
21.090909
52
0.74569
23
232
7.347826
0.652174
0.177515
0.224852
0.295858
0.343195
0
0
0
0
0
0
0
0.181034
232
10
53
23.2
0.889474
0
0
0.333333
0
0
0
0
0
0
0
0
0
1
0.333333
false
0.333333
0.166667
0
0.666667
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
6
34ea048bef049b2b9e64f57bfdbf3f8cf4f077e6
239
py
Python
knowit/providers/__init__.py
ratoaq2/knowit
e7cc0d786fafdb9dba99b95a1cc95c02f84f0b5b
[ "MIT" ]
21
2016-10-15T13:49:16.000Z
2021-06-14T14:42:57.000Z
knowit/providers/__init__.py
ratoaq2/knowit
e7cc0d786fafdb9dba99b95a1cc95c02f84f0b5b
[ "MIT" ]
35
2016-11-18T17:08:38.000Z
2021-11-26T09:36:35.000Z
knowit/providers/__init__.py
ratoaq2/knowit
e7cc0d786fafdb9dba99b95a1cc95c02f84f0b5b
[ "MIT" ]
5
2016-11-23T23:39:52.000Z
2021-02-27T19:18:27.000Z
"""Provider package.""" from knowit.providers.enzyme import EnzymeProvider from knowit.providers.ffmpeg import FFmpegProvider from knowit.providers.mediainfo import MediaInfoProvider from knowit.providers.mkvmerge import MkvMergeProvider
34.142857
56
0.857741
26
239
7.884615
0.538462
0.195122
0.370732
0
0
0
0
0
0
0
0
0
0.079498
239
6
57
39.833333
0.931818
0.07113
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
34f45f95ff8dfd1a7c3e39566bd77197142c9a26
274
py
Python
ckanext-mzp/ckanext/mzp/logic/helpers.py
franekj/star
c91ab03b1ba077b5ebeed7165d0b575b8591fb55
[ "Apache-2.0" ]
1
2019-10-09T08:46:44.000Z
2019-10-09T08:46:44.000Z
ckanext-mzp/ckanext/mzp/logic/helpers.py
franekj/star
c91ab03b1ba077b5ebeed7165d0b575b8591fb55
[ "Apache-2.0" ]
3
2020-03-24T16:58:44.000Z
2021-02-02T22:03:33.000Z
ckanext-mzp/ckanext/mzp/logic/helpers.py
franekj/star
c91ab03b1ba077b5ebeed7165d0b575b8591fb55
[ "Apache-2.0" ]
3
2019-09-11T10:04:59.000Z
2020-01-30T15:55:50.000Z
import ckan.plugins.toolkit as tk def get_package_source(package_id): return tk.get_action('package_source_list')({}, {"package_id": package_id}) def get_package_reference(package_id): return tk.get_action('package_reference_list')({}, {"package_id": package_id})
34.25
82
0.766423
40
274
4.85
0.375
0.278351
0.134021
0.175258
0.56701
0.340206
0.340206
0
0
0
0
0
0.094891
274
8
82
34.25
0.782258
0
0
0
0
0
0.221818
0.08
0
0
0
0
0
1
0.4
false
0
0.2
0.4
1
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
34f7896d8ff286041154e0135a067916b091c6da
96
py
Python
venv/lib/python3.8/site-packages/requests/__version__.py
GiulianaPola/select_repeats
17a0d053d4f874e42cf654dd142168c2ec8fbd11
[ "MIT" ]
1
2022-02-22T04:49:18.000Z
2022-02-22T04:49:18.000Z
venv/lib/python3.8/site-packages/requests/__version__.py
GiulianaPola/select_repeats
17a0d053d4f874e42cf654dd142168c2ec8fbd11
[ "MIT" ]
null
null
null
venv/lib/python3.8/site-packages/requests/__version__.py
GiulianaPola/select_repeats
17a0d053d4f874e42cf654dd142168c2ec8fbd11
[ "MIT" ]
null
null
null
/home/runner/.cache/pip/pool/3d/91/32/3d348837f8d1200201e75c15ee9c3cd66dea030d089d247b3ab2995271
96
96
0.895833
9
96
9.555556
1
0
0
0
0
0
0
0
0
0
0
0.46875
0
96
1
96
96
0.427083
0
0
0
0
0
0
0
0
1
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
1
0
0
0
1
0
0
0
0
0
0
0
0
6
5520a59059023af687e2cd895064ca5eb926e4f5
579
py
Python
Chapter5_DNN/Chapter5_5_NeuralNetworkGUI/start/drawer/src/preprocessing.py
thisisjako/UdemyTF
ee4102391ed6bd50f764955f732f5740425a9209
[ "MIT" ]
null
null
null
Chapter5_DNN/Chapter5_5_NeuralNetworkGUI/start/drawer/src/preprocessing.py
thisisjako/UdemyTF
ee4102391ed6bd50f764955f732f5740425a9209
[ "MIT" ]
null
null
null
Chapter5_DNN/Chapter5_5_NeuralNetworkGUI/start/drawer/src/preprocessing.py
thisisjako/UdemyTF
ee4102391ed6bd50f764955f732f5740425a9209
[ "MIT" ]
null
null
null
import os from typing import Any import cv2 import matplotlib.pyplot as plt import numpy as np from scipy.ndimage import center_of_mass FILE_PATH = os.path.abspath(__file__) PROJECT_DIR = os.path.dirname(os.path.dirname(FILE_PATH)) def load(image_path: str) -> np.ndarray: pass # TODO def resize(image: np.ndarray) -> np.ndarray: pass # TODO def normalize(image: np.ndarray) -> np.ndarray: pass # TODO def center(image: np.ndarray) -> np.ndarray: pass # TODO def get_image(DrawingFrame: Any, debug: bool = False) -> np.ndarray: pass # TODO
17.545455
68
0.704663
88
579
4.511364
0.420455
0.18136
0.163728
0.214106
0.307305
0.256927
0.256927
0.256927
0
0
0
0.002128
0.188256
579
32
69
18.09375
0.842553
0.041451
0
0.277778
0
0
0
0
0
0
0
0.03125
0
1
0.277778
false
0.277778
0.333333
0
0.611111
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
1
1
0
1
0
0
6
9b30b1da26d1bfc9ca1001258cd4e0a5ad051002
134
py
Python
objects/__init__.py
4rzael/krocs
42cb0a806d4c5a9350fca84a0258817bf6a9c154
[ "MIT" ]
5
2018-05-07T12:57:58.000Z
2020-07-08T01:46:39.000Z
objects/__init__.py
4rzael/krocs
42cb0a806d4c5a9350fca84a0258817bf6a9c154
[ "MIT" ]
null
null
null
objects/__init__.py
4rzael/krocs
42cb0a806d4c5a9350fca84a0258817bf6a9c154
[ "MIT" ]
null
null
null
""" path tricks. """ import sys sys.path.append('..') from .connection import * from .universal_time import * from .vessels import *
16.75
29
0.69403
17
134
5.411765
0.588235
0.217391
0
0
0
0
0
0
0
0
0
0
0.149254
134
7
30
19.142857
0.807018
0.089552
0
0
0
0
0.017544
0
0
0
0
0
0
1
0
true
0
0.8
0
0.8
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
9b328c3c19d13de6fcd2bccd5120e6eb29b0118b
37
py
Python
python_core/__init__.py
randoum/python-core
df0cad476dcc63865941a25840057621f01191d5
[ "MIT" ]
null
null
null
python_core/__init__.py
randoum/python-core
df0cad476dcc63865941a25840057621f01191d5
[ "MIT" ]
null
null
null
python_core/__init__.py
randoum/python-core
df0cad476dcc63865941a25840057621f01191d5
[ "MIT" ]
null
null
null
from .python_core import HttpEndpoint
37
37
0.891892
5
37
6.4
1
0
0
0
0
0
0
0
0
0
0
0
0.081081
37
1
37
37
0.941176
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
9b60cc5a44e3b28730452a0e03600299c30781f5
7,733
py
Python
corehq/apps/auditcare/tests/data/auditcare_migration.py
akashkj/commcare-hq
b00a62336ec26cea1477dfb8c048c548cc462831
[ "BSD-3-Clause" ]
471
2015-01-10T02:55:01.000Z
2022-03-29T18:07:18.000Z
corehq/apps/auditcare/tests/data/auditcare_migration.py
akashkj/commcare-hq
b00a62336ec26cea1477dfb8c048c548cc462831
[ "BSD-3-Clause" ]
14,354
2015-01-01T07:38:23.000Z
2022-03-31T20:55:14.000Z
corehq/apps/auditcare/tests/data/auditcare_migration.py
akashkj/commcare-hq
b00a62336ec26cea1477dfb8c048c548cc462831
[ "BSD-3-Clause" ]
175
2015-01-06T07:16:47.000Z
2022-03-29T13:27:01.000Z
from datetime import datetime navigation_test_docs = [ { 'description': 'Test User', 'extra': {}, 'status_code': 200, 'user': 'user@test.org', 'session_key': '14f8fb95aece47d8341dc561dfd108df', 'ip_address': '0.0.0.0', 'request_path': '/a/test-domain/reports/', 'view_kwargs': { 'domain': 'test-domain' }, 'doc_type': 'NavigationEventAudit', 'headers': { 'REQUEST_METHOD': 'GET', 'SERVER_PORT': '443', }, 'base_type': 'AuditEvent', 'user_agent': 'Mozilla/5.0 (Windows NT 5.1)', 'event_date': '2021-06-01T00:13:01Z', 'view': 'corehq.apps.reports.views.default' }, { 'description': 'Test User', 'extra': {}, 'status_code': 200, 'user': 'user@test.org', 'session_key': '14f8fb95aece47d8341dc561dfd108df', 'ip_address': '0.0.0.0', 'request_path': '/a/test-domain/reports/', 'view_kwargs': { 'domain': 'test-domain' }, 'doc_type': 'NavigationEventAudit', 'headers': { 'REQUEST_METHOD': 'GET', 'SERVER_PORT': '443', }, 'base_type': 'AuditEvent', 'user_agent': 'Mozilla/5.0 (Windows NT 5.1)', 'event_date': '2021-06-01T01:13:01Z', 'view': 'corehq.apps.reports.views.default' }, { 'description': 'Test User', 'extra': {}, 'status_code': 200, 'user': 'user@test.org', 'session_key': '14f8fb95aece47d8341dc561dfd108df', 'ip_address': '0.0.0.0', 'request_path': '/a/test-domain/reports/', 'view_kwargs': { 'domain': 'test-domain' }, 'doc_type': 'NavigationEventAudit', 'headers': { 'SERVER_NAME': 'www.commcarehq.org', 'HTTP_ACCEPT_LANGUAGE': 'en-US,en;q=0.8', 'REQUEST_METHOD': 'GET', 'HTTP_ACCEPT_ENCODING': 'gzip,deflate,sdch' }, 'base_type': 'AuditEvent', 'user_agent': 'Mozilla/5.0 (Windows NT 5.1)', 'event_date': '2021-06-01T00:01:00Z', 'view': 'corehq.apps.reports.views.default' } ] audit_test_docs = [ { 'http_accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'doc_type': 'AccessAudit', 'description': 'Login Success', 'get_data': [], 'access_type': 'login', 'base_type': 'AuditEvent', 'post_data': [], 'user_agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64)', 'failures_since_start': None, 'event_date': '2021-06-15T04:23:32Z', 'path_info': '/accounts/login/', 'session_key': 'sess_key', 'ip_address': '0.0.0.0', 'user': 'login@test.org', 'headers': { 'SERVER_NAME': 'www.commcarehq.org', 'HTTP_ACCEPT_LANGUAGE': 'en-US,en;q=0.8', 'REQUEST_METHOD': 'GET', 'HTTP_ACCEPT_ENCODING': 'gzip,deflate,sdch' }, }, { 'access_type': 'logout', 'ip_address': '0.0.0.0', 'session_key': 'sess_key', 'user_agent': None, 'get_data': [], 'post_data': [], 'http_accept': None, 'path_info': None, 'failures_since_start': None, 'doc_type': 'AccessAudit', 'user': 'logout@test.org', 'base_type': 'AuditEvent', 'event_date': '2021-06-24T00:00:00.15Z', 'description': 'Logout test', 'headers': {} } ] failed_docs = [ { 'http_accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'doc_type': 'AccessAudit', 'description': 'Login Success', 'get_data': [], 'access_type': 'login', 'base_type': 'AuditEvent', 'post_data': [], 'user_agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64)', 'failures_since_start': None, 'event_date': '2021-05-15T04:23:32Z', 'path_info': '/accounts/login/', 'session_key': 'sess_key', 'ip_address': '0.0.0.0', 'user': 'failed@test.org', }, { 'description': 'Test User', 'extra': {}, 'status_code': 200, 'user': 'user@test.org', 'session_key': '14f8fb95aece47d8341dc561dfd108df', 'ip_address': '0.0.0.0', 'request_path': '/a/test-domain/reports/', 'view_kwargs': { 'domain': 'test-domain' }, 'doc_type': 'NavigationEventAudit', 'headers': { 'SERVER_NAME': 'www.commcarehq.org', 'HTTP_ACCEPT_LANGUAGE': 'en-US,en;q=0.8', 'REQUEST_METHOD': 'GET', 'HTTP_ACCEPT_ENCODING': 'gzip,deflate,sdch' }, 'base_type': 'AuditEvent', 'user_agent': 'Mozilla/5.0 (Windows NT 5.1)', 'event_date': '2021-05-01T00:01:00Z', 'view': 'corehq.apps.reports.views.default' } ] task_docs = [ { 'doc_type': 'NavigationEventAudit', 'user': 'couch@test.com', 'event_date': datetime(2021, 1, 1).strftime("%Y-%m-%dT%H:%M:%SZ"), 'description': 'User Name', 'extra': {}, 'headers': { 'REQUEST_METHOD': 'GET', }, 'ip_address': '10.1.2.3', 'request_path': '/a/delmar/phone/restore/?version=2.0&since=...', 'session_key': 'abc123', 'status_code': 200, 'view_kwargs': {'domain': 'delmar'}, 'view': 'corehq.apps.ota.views.restore', }, { 'doc_type': 'NavigationEventAudit', 'user': 'couch@test.com', 'event_date': datetime(2021, 2, 1, 2).strftime("%Y-%m-%dT%H:%M:%SZ"), 'description': 'User Name', 'extra': {}, 'headers': { 'REQUEST_METHOD': 'GET', }, 'ip_address': '10.1.2.3', 'request_path': '/a/test-space/phone/restore/?version=2.0&since=...', 'session_key': 'abc123', 'status_code': 200, 'view_kwargs': {'domain': 'test-space'}, 'view': 'corehq.apps.ota.views.restore', }, { 'doc_type': 'NavigationEventAudit', 'user': 'couch@test.com', 'event_date': datetime(2021, 2, 1, 2, 1).strftime("%Y-%m-%dT%H:%M:%SZ"), 'description': 'User Name', 'extra': {}, 'headers': { 'REQUEST_METHOD': 'GET', }, 'ip_address': '10.1.2.3', 'request_path': '/a/random/phone/restore/?version=2.0&since=...', 'session_key': 'abc123', 'status_code': 200, 'view_kwargs': {'domain': 'random'}, 'view': 'corehq.apps.ota.views.restore', }, { 'doc_type': "AccessAudit", 'user': 'couch@test.com', 'event_date': datetime(2021, 2, 1, 3).strftime("%Y-%m-%dT%H:%M:%SZ"), 'access_type': 'login', 'description': 'Login Success', 'failures_since_start': None, 'get_data': [], 'http_accept': 'text/html', 'ip_address': '10.1.3.2', 'path_info': '/a/delmar/login/', 'post_data': [], 'session_key': 'abc123', 'user_agent': 'Mozilla/5.0', }, { 'doc_type': 'NavigationEventAudit', 'user': 'couch@test.com', 'event_date': datetime(2021, 2, 2).strftime("%Y-%m-%dT%H:%M:%SZ"), 'description': 'User Name', 'extra': {}, 'headers': { 'REQUEST_METHOD': 'GET', }, 'ip_address': '10.1.2.3', 'request_path': '/a/sandwich/phone/restore/?version=2.0&since=...&db=/etc/passwd\x00', 'session_key': 'abc123', 'status_code': 200, 'view_kwargs': {'domain': 'sandwich'}, 'view': 'corehq.apps.ota.views.restore', } ]
33.047009
94
0.510669
835
7,733
4.549701
0.152096
0.011056
0.011056
0.020268
0.855752
0.851013
0.833114
0.828902
0.819426
0.78626
0
0.064622
0.291607
7,733
233
95
33.188841
0.628879
0
0
0.65368
0
0.012987
0.529161
0.106815
0
0
0
0
0
1
0
false
0.004329
0.004329
0
0.004329
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
9bbb94a24cc43df668a7951667492842e3f443f1
301
py
Python
step_2.py
Programmist3000/Lesson_201205
93e7fde02324ccf45edeb098b9b0c5d3942cf6bf
[ "Apache-2.0" ]
null
null
null
step_2.py
Programmist3000/Lesson_201205
93e7fde02324ccf45edeb098b9b0c5d3942cf6bf
[ "Apache-2.0" ]
null
null
null
step_2.py
Programmist3000/Lesson_201205
93e7fde02324ccf45edeb098b9b0c5d3942cf6bf
[ "Apache-2.0" ]
null
null
null
import requests response = requests.get('https://kpk.kss45.ru/%D1%83%D1%87%D0%B5%D0%B1%D0%BD%D0%B0%D1%8F-' '%D1%80%D0%B0%D0%B1%D0%BE%D1%82%D0%B0/%D1%80%D0%B0%D1%81%D0%BF%D0%B8%D1%' '81%D0%B0%D0%BD%D0%B8%D0%B5_%D0%BF%D0%B0%D1%80.html') print(response)
37.625
97
0.564784
64
301
2.640625
0.390625
0.142012
0.142012
0.094675
0
0
0
0
0
0
0
0.234568
0.192691
301
7
98
43
0.460905
0
0
0
0
0.6
0.614618
0.401993
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0.2
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
b5c2366c9f4f63dc54ef30deec5556796994b0e5
337
py
Python
utilities/perms.py
WhaleyTech/OpenDiscord1
dab760218c0b72c2344e00b25a7e88e250f9c03d
[ "MIT" ]
null
null
null
utilities/perms.py
WhaleyTech/OpenDiscord1
dab760218c0b72c2344e00b25a7e88e250f9c03d
[ "MIT" ]
2
2020-06-06T23:36:31.000Z
2020-06-21T16:02:22.000Z
utilities/perms.py
WhaleyTech/OpenDiscord1
dab760218c0b72c2344e00b25a7e88e250f9c03d
[ "MIT" ]
1
2020-06-03T16:24:33.000Z
2020-06-03T16:24:33.000Z
import discord.ext.commands as commands def admin_perms(): return commands.has_permissions(administrator=True) def mod_perms(): return commands.has_permissions(manage_messages=True) def kick_perms(): return commands.has_permissions(kick_members=True) def ban_perms(): return commands.has_permissions(ban_members=True)
25.923077
57
0.795252
45
337
5.711111
0.422222
0.171206
0.29572
0.342412
0.513619
0
0
0
0
0
0
0
0.115727
337
13
58
25.923077
0.862416
0
0
0
0
0
0
0
0
0
0
0
0
1
0.444444
true
0
0.111111
0.444444
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
1
1
0
0
6