hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7646b99acb07ba123e19717e66d7c3f6b78f8ed2
| 121
|
py
|
Python
|
model/SEED_Encoder/__init__.py
|
playing-code/SEED-Encoder-1
|
c038060a8cfeb9226c745f4cb725da93997bb827
|
[
"MIT"
] | null | null | null |
model/SEED_Encoder/__init__.py
|
playing-code/SEED-Encoder-1
|
c038060a8cfeb9226c745f4cb725da93997bb827
|
[
"MIT"
] | null | null | null |
model/SEED_Encoder/__init__.py
|
playing-code/SEED-Encoder-1
|
c038060a8cfeb9226c745f4cb725da93997bb827
|
[
"MIT"
] | null | null | null |
from .tokenization_seed_encoder import *
from .configuration_seed_encoder import *
from .modeling_seed_encoder import *
| 40.333333
| 42
| 0.842975
| 15
| 121
| 6.4
| 0.466667
| 0.34375
| 0.53125
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107438
| 121
| 3
| 43
| 40.333333
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7651386c6e2e0d50a74fbca3705df7e830819f8e
| 37,111
|
py
|
Python
|
gpi/calculate_correlation_matrix.py
|
fusion-flap/flap_nstx_gpi
|
cf7d4bdecea8fd7434f8f7eb64e1a7b13fc0f759
|
[
"MIT"
] | null | null | null |
gpi/calculate_correlation_matrix.py
|
fusion-flap/flap_nstx_gpi
|
cf7d4bdecea8fd7434f8f7eb64e1a7b13fc0f759
|
[
"MIT"
] | 1
|
2019-10-03T22:25:58.000Z
|
2021-10-06T10:31:11.000Z
|
gpi/calculate_correlation_matrix.py
|
fusion-flap/flap_nstx_gpi
|
cf7d4bdecea8fd7434f8f7eb64e1a7b13fc0f759
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 2 14:01:47 2020
@author: mlampert
"""
import os
import pandas
import copy
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
from matplotlib.gridspec import GridSpec
import matplotlib.cm as cm
import pickle
import numpy as np
from scipy.signal import correlate
#Flap imports
import flap
import flap_nstx
flap_nstx.register()
import flap_mdsplus
#Setting up FLAP
flap_mdsplus.register('NSTX_MDSPlus')
thisdir = os.path.dirname(os.path.realpath(__file__))
fn = os.path.join(thisdir,"../flap_nstx.cfg")
flap.config.read(file_name=fn)
def calculate_nstx_gpi_correlation_matrix(window_average=0.5e-3,
elm_burst_window=False,
sampling_time=2.5e-6,
plot=False,
normalized_structure=True,
normalized_velocity=True,
subtraction_order=4,
calculate_average=True,
gpi_plane_calculation=True):
database_file='/Users/mlampert/work/NSTX_workspace/db/ELM_findings_mlampert_velocity_good.csv'
db=pandas.read_csv(database_file, index_col=0)
elm_index=list(db.index)
if calculate_average:
pearson_keys=['Velocity ccf', #0,1
'Velocity str avg', #2,3
'Size avg', #4,5
'Position avg', #6,7
'Area avg', #8
'Elongation avg', #9
'Angle avg'] #10
else:
pearson_keys=['Velocity ccf', #0,1
'Velocity str max', #2,3
'Size max', #4,5
'Position max', #6,7
'Area max', #8
'Elongation max', #9
'Angle max'] #10
nelm=0.
pearson_matrix=np.zeros([11,11,2]) #[velocity ccf, velocity str svg]
nwin=int(window_average/sampling_time)
for index_elm in range(len(elm_index)):
#preprocess velocity results, tackle with np.nan and outliers
shot=int(db.loc[elm_index[index_elm]]['Shot'])
#define ELM time for all the cases
elm_time=db.loc[elm_index[index_elm]]['ELM time']/1000.
wd=flap.config.get_all_section('Module NSTX_GPI')['Working directory']
if normalized_velocity:
if normalized_structure:
str_add='_ns'
else:
str_add=''
filename=flap_nstx.tools.filename(exp_id=shot,
working_directory=wd+'/processed_data',
time_range=[elm_time-2e-3,elm_time+2e-3],
comment='ccf_velocity_pfit_o'+str(subtraction_order)+'_fst_0.0'+str_add+'_nv',
extension='pickle')
else:
filename=wd+'/processed_data/'+db.loc[elm_index[index_elm]]['Filename']+'.pickle'
status=db.loc[elm_index[index_elm]]['OK/NOT OK']
if status != 'NO':
velocity_results=pickle.load(open(filename, 'rb'))
time=velocity_results['Time']
if gpi_plane_calculation:
coeff_r=np.asarray([3.7183594,-0.77821046,1402.8097])/1000. #The coordinates are in meters, the coefficients are in mm
coeff_z=np.asarray([0.18090118,3.0657776,70.544312])/1000. #The coordinates are in meters, the coefficients are in mm
coeff_r_new=3./800.
coeff_z_new=3./800.
det=coeff_r[0]*coeff_z[1]-coeff_z[0]*coeff_r[1]
for key in ['Velocity ccf','Velocity str max','Velocity str avg','Size max','Size avg']:
orig=copy.deepcopy(velocity_results[key])
velocity_results[key][:,0]=coeff_r_new/det*(coeff_z[1]*orig[:,0]-coeff_r[1]*orig[:,1])
velocity_results[key][:,1]=coeff_z_new/det*(-coeff_z[0]*orig[:,0]+coeff_r[0]*orig[:,1])
velocity_results['Elongation max'][:]=(velocity_results['Size max'][:,0]-velocity_results['Size max'][:,1])/(velocity_results['Size max'][:,0]+velocity_results['Size max'][:,1])
velocity_results['Elongation avg'][:]=(velocity_results['Size avg'][:,0]-velocity_results['Size avg'][:,1])/(velocity_results['Size avg'][:,0]+velocity_results['Size avg'][:,1])
if elm_burst_window:
elm_time_interval_ind=np.where(np.logical_and(time >= elm_time,
time <= elm_time+window_average))
else:
elm_time_interval_ind=np.where(np.logical_and(time >= elm_time-window_average,
time <= elm_time+window_average))
elm_time=(time[elm_time_interval_ind])[np.argmin(velocity_results['Frame similarity'][elm_time_interval_ind])]
elm_time_ind=np.argmin(np.abs(time-elm_time))
ind=slice(elm_time_ind-nwin,elm_time_ind+nwin)
for ind_first in range(11):
for ind_second in range(11):
if ind_first <= 7:
a=velocity_results[pearson_keys[ind_first//2]][ind,np.mod(ind_first,2)]
ind_nan=np.isnan(a)
a[ind_nan]=0.
else:
a=velocity_results[pearson_keys[ind_first-4]][ind]
if ind_second <= 7:
b=velocity_results[pearson_keys[ind_second//2]][ind,np.mod(ind_second,2)]
else:
b=velocity_results[pearson_keys[ind_second-4]][ind]
ind_nan_a=np.isnan(a)
ind_nan_b=np.isnan(b)
a[ind_nan_a]=0.
a[ind_nan_b]=0.
b[ind_nan_a]=0.
b[ind_nan_b]=0.
a-=np.mean(a)
b-=np.mean(b)
cur_pear=np.sum(a*b)/(np.sqrt(np.sum(a**2)*np.sum(b**2)))
if cur_pear == np.nan:
cur_pear=0.
pearson_matrix[ind_first,ind_second,0]+=cur_pear
nelm+=1
pearson_matrix[:,:,0]/=nelm
for index_elm in range(len(elm_index)):
#preprocess velocity results, tackle with np.nan and outliers
shot=int(db.loc[elm_index[index_elm]]['Shot'])
#define ELM time for all the cases
elm_time=db.loc[elm_index[index_elm]]['ELM time']/1000.
wd=flap.config.get_all_section('Module NSTX_GPI')['Working directory']
if normalized_velocity:
if normalized_structure:
str_add='_ns'
else:
str_add=''
filename=flap_nstx.tools.filename(exp_id=shot,
working_directory=wd+'/processed_data',
time_range=[elm_time-2e-3,elm_time+2e-3],
comment='ccf_velocity_pfit_o'+str(subtraction_order)+'_fst_0.0'+str_add+'_nv',
extension='pickle')
else:
filename=wd+'/processed_data/'+db.loc[elm_index[index_elm]]['Filename']+'.pickle'
status=db.loc[elm_index[index_elm]]['OK/NOT OK']
if status != 'NO':
velocity_results=pickle.load(open(filename, 'rb'))
time=velocity_results['Time']
if gpi_plane_calculation:
coeff_r=np.asarray([3.7183594,-0.77821046,1402.8097])/1000. #The coordinates are in meters, the coefficients are in mm
coeff_z=np.asarray([0.18090118,3.0657776,70.544312])/1000. #The coordinates are in meters, the coefficients are in mm
coeff_r_new=3./800.
coeff_z_new=3./800.
det=coeff_r[0]*coeff_z[1]-coeff_z[0]*coeff_r[1]
for key in ['Velocity ccf','Velocity str max','Velocity str avg','Size max','Size avg']:
orig=copy.deepcopy(velocity_results[key])
velocity_results[key][:,0]=coeff_r_new/det*(coeff_z[1]*orig[:,0]-coeff_r[1]*orig[:,1])
velocity_results[key][:,1]=coeff_z_new/det*(-coeff_z[0]*orig[:,0]+coeff_r[0]*orig[:,1])
velocity_results['Elongation max'][:]=(velocity_results['Size max'][:,0]-velocity_results['Size max'][:,1])/(velocity_results['Size max'][:,0]+velocity_results['Size max'][:,1])
velocity_results['Elongation avg'][:]=(velocity_results['Size avg'][:,0]-velocity_results['Size avg'][:,1])/(velocity_results['Size avg'][:,0]+velocity_results['Size avg'][:,1])
elm_time_interval_ind=np.where(np.logical_and(time >= elm_time-window_average,
time <= elm_time+window_average))
elm_time=(time[elm_time_interval_ind])[np.argmin(velocity_results['Frame similarity'][elm_time_interval_ind])]
elm_time_ind=np.argmin(np.abs(time-elm_time))
ind=slice(elm_time_ind-nwin,elm_time_ind+nwin)
for ind_first in range(11):
for ind_second in range(11):
if ind_first <= 7:
a=velocity_results[pearson_keys[ind_first//2]][ind,np.mod(ind_first,2)]
else:
a=velocity_results[pearson_keys[ind_first-4]][ind]
if ind_second <= 7:
b=velocity_results[pearson_keys[ind_second//2]][ind,np.mod(ind_second,2)]
else:
b=velocity_results[pearson_keys[ind_second-4]][ind]
ind_nan_a=np.isnan(a)
ind_nan_b=np.isnan(b)
a[ind_nan_a]=0.
a[ind_nan_b]=0.
b[ind_nan_a]=0.
b[ind_nan_b]=0.
a-=np.mean(a)
b-=np.mean(b)
cur_pear=np.sum(a*b)/(np.sqrt(np.sum(a**2)*np.sum(b**2)))
if cur_pear == np.nan:
cur_pear=0.
#pearson_matrix[ind_first,ind_second,1]+=(pearson_matrix[ind_first,ind_second,0]-cur_pear)**2
pearson_matrix[ind_first,ind_second,1]+=(pearson_matrix[ind_first,ind_second,0]-cur_pear)**2
pearson_matrix[:,:,1]=np.sqrt(pearson_matrix[:,:,1]/(nelm-1))
data = pearson_matrix[:,:,0]
if plot:
data[10,10]=-1
cs=plt.matshow(data, cmap='seismic')
plt.xticks(ticks=np.arange(11), labels=['Velocity ccf R', #0,1
'Velocity ccf z', #0,1
'Velocity str avg R', #2,3
'Velocity str avg z', #2,3
'Size avg R', #4,5
'Size avg z', #4,5
'Position avg R', #6,7
'Position avg z', #6,7
'Area avg', #8
'Elongation avg', #9
'Angle avg'], rotation='vertical')
plt.yticks(ticks=np.arange(11), labels=['Velocity ccf R', #0,1
'Velocity ccf z', #0,1
'Velocity str avg R', #2,3
'Velocity str avg z', #2,3
'Size avg R', #4,5
'Size avg z', #4,5
'Position avg R', #6,7
'Position avg z', #6,7
'Area avg', #8
'Elongation avg', #9
'Angle avg'])
plt.colorbar()
plt.show()
return pearson_matrix
def calculate_nstx_gpi_average_correlation_matrix(window_average=0.5e-3,
elm_burst_window=False,
sampling_time=2.5e-6,
plot=False,
normalized_structure=True,
normalized_velocity=True,
subtraction_order=4,
calculate_average=False,
gpi_plane_calculation=True,
calculate_absolute=True):
if calculate_average:
pearson_keys=['Velocity ccf', #0,1
'Velocity str avg', #2,3
'Size avg', #4,5
'Position avg', #6,7
'Area avg', #8
'Elongation avg', #9
'Angle avg',
'Separatrix dist avg'] #10
else:
pearson_keys=['Velocity ccf', #0,1
'Velocity str max', #2,3
'Size max', #4,5
'Position max', #6,7
'Area max', #8
'Elongation max', #9
'Angle max',
'Separatrix dist avg'] #10
pearson_matrix=np.zeros([12,12]) #[velocity ccf, velocity str svg]
average_velocity_results=calculate_avg_velocity_results(window_average=500e-6,
sampling_time=2.5e-6,
pdf=False,
plot=False,
return_results=True,
plot_error=False,
normalized_velocity=True,
normalized_structure=True,
subtraction_order=4,
opacity=0.2,
correlation_threshold=0.6,
plot_max_only=False,
plot_for_publication=False,
gpi_plane_calculation=False,
plot_scatter=False,
)
if gpi_plane_calculation:
coeff_r=np.asarray([3.7183594,-0.77821046,1402.8097])/1000. #The coordinates are in meters, the coefficients are in mm
coeff_z=np.asarray([0.18090118,3.0657776,70.544312])/1000. #The coordinates are in meters, the coefficients are in mm
coeff_r_new=3./800.
coeff_z_new=3./800.
det=coeff_r[0]*coeff_z[1]-coeff_z[0]*coeff_r[1]
for key in ['Velocity ccf','Velocity str max','Velocity str avg','Size max','Size avg']:
orig=copy.deepcopy(average_velocity_results[key])
average_velocity_results[key][:,0]=coeff_r_new/det*(coeff_z[1]*orig[:,0]-coeff_r[1]*orig[:,1])
average_velocity_results[key][:,1]=coeff_z_new/det*(-coeff_z[0]*orig[:,0]+coeff_r[0]*orig[:,1])
if calculate_absolute:
average_velocity_results[key]=np.abs(average_velocity_results[key])
average_velocity_results['Elongation max'][:]=(average_velocity_results['Size max'][:,0]-average_velocity_results['Size max'][:,1])/(average_velocity_results['Size max'][:,0]+average_velocity_results['Size max'][:,1])
average_velocity_results['Elongation avg'][:]=(average_velocity_results['Size avg'][:,0]-average_velocity_results['Size avg'][:,1])/(average_velocity_results['Size avg'][:,0]+average_velocity_results['Size avg'][:,1])
average_velocity_results['Tau']/=1e3
if elm_burst_window:
ind=np.where(np.logical_and(average_velocity_results['Tau'] >= 0,
average_velocity_results['Tau'] < window_average))
else:
ind=np.where(np.logical_and(average_velocity_results['Tau'] >= -window_average,
average_velocity_results['Tau'] < window_average))
for ind_first in range(12):
for ind_second in range(12):
if ind_first <= 7:
a=average_velocity_results[pearson_keys[ind_first//2]][ind,np.mod(ind_first,2)]
print(pearson_keys[ind_first//2])
else:
a=average_velocity_results[pearson_keys[ind_first-4]][ind]
print(pearson_keys[ind_first-4])
if ind_second <= 7:
b=average_velocity_results[pearson_keys[ind_second//2]][ind,np.mod(ind_second,2)]
print(pearson_keys[ind_second//2])
else:
b=average_velocity_results[pearson_keys[ind_second-4]][ind]
print(pearson_keys[ind_second-4])
a-=np.mean(a)
b-=np.mean(b)
pearson_matrix[ind_first,ind_second]=np.sum(a*b)/(np.sqrt(np.sum(a**2)*np.sum(b**2)))
if pearson_matrix[ind_first,ind_second] == np.nan:
pearson_matrix[ind_first,ind_second]=0.
data = pearson_matrix
if plot:
data[11,11]=-1
cs=plt.matshow(data, cmap='seismic')
if calculate_average:
title='avg'
else:
title='max'
plt.xticks(ticks=np.arange(12), labels=['Velocity ccf R', #0,1
'Velocity ccf z', #0,1
'Velocity str '+title+' R', #2,3
'Velocity str '+title+' z', #2,3
'Size '+title+' R', #4,5
'Size '+title+' z', #4,5
'Position '+title+' R', #6,7
'Position '+title+' z', #6,7
'Area '+title+'', #8
'Elongation '+title+'', #9
'Angle '+title+'',
'Separatrix dist '+title], rotation='vertical',
)
plt.yticks(ticks=np.arange(12), labels=['Velocity ccf R', #0,1
'Velocity ccf z', #0,1
'Velocity str '+title+' R', #2,3
'Velocity str '+title+' z', #2,3
'Size '+title+' R', #4,5
'Size '+title+' z', #4,5
'Position '+title+' R', #6,7
'Position '+title+' z', #6,7
'Area '+title+'', #8
'Elongation '+title+'', #9
'Angle '+title+'',
'Separatrix dist '+title])
plt.colorbar()
plt.show()
return pearson_matrix
def plot_all_parameters_vs_all_other(window_average=0.5e-3,
sampling_time=2.5e-6,
plot=True,
normalized_structure=True,
normalized_velocity=True,
subtraction_order=4,
calculate_average=True,
gpi_plane_calculation=True,):
database_file='/Users/mlampert/work/NSTX_workspace/db/ELM_findings_mlampert_velocity_good.csv'
db=pandas.read_csv(database_file, index_col=0)
gs=GridSpec(6,6)
elm_index=list(db.index)
if calculate_average:
pearson_keys=['Velocity ccf', #0,1
'Velocity str avg', #2,3
'Size avg', #4,5
'Position avg', #6,7
'GPI Dalpha', #8
'Elongation avg', #9
'Angle avg',
'Separatrix dist avg', #10
]
else:
pearson_keys=['Velocity ccf', #0,1
'Velocity str max', #2,3
'Size max', #4,5
'Position max', #6,7
'GPI Dalpha', #8
'Elongation max', #9
'Angle max',
'Separatrix dist max', #10
]
plot_inds=np.asarray([0,1,4,5,8,11])
coeff_r=np.asarray([3.7183594,-0.77821046,1402.8097])/1000. #The coordinates are in meters, the coefficients are in mm
coeff_z=np.asarray([0.18090118,3.0657776,70.544312])/1000. #The coordinates are in meters, the coefficients are in mm
coeff_r_new=3./800.
coeff_z_new=3./800.
nwin=int(window_average/sampling_time)
if plot:
plt.figure()
for index_elm in range(len(elm_index)):
#preprocess velocity results, tackle with np.nan and outliers
shot=int(db.loc[elm_index[index_elm]]['Shot'])
#define ELM time for all the cases
elm_time=db.loc[elm_index[index_elm]]['ELM time']/1000.
wd=flap.config.get_all_section('Module NSTX_GPI')['Working directory']
if normalized_velocity:
if normalized_structure:
str_add='_ns'
else:
str_add=''
filename=flap_nstx.tools.filename(exp_id=shot,
working_directory=wd+'/processed_data',
time_range=[elm_time-2e-3,elm_time+2e-3],
comment='ccf_velocity_pfit_o'+str(subtraction_order)+'_fst_0.0'+str_add+'_nv',
extension='pickle')
else:
filename=wd+'/processed_data/'+db.loc[elm_index[index_elm]]['Filename']+'.pickle'
status=db.loc[elm_index[index_elm]]['OK/NOT OK']
if status != 'NO':
velocity_results=pickle.load(open(filename, 'rb'))
velocity_results['Separatrix dist avg']=np.zeros(velocity_results['Position avg'].shape[0])
velocity_results['Separatrix dist max']=np.zeros(velocity_results['Position max'].shape[0])
velocity_results['GPI Dalpha']=velocity_results['GPI Dalpha'][0]
R_sep=flap.get_data('NSTX_MDSPlus',
name='\EFIT02::\RBDRY',
exp_id=shot,
object_name='SEP R OBJ').slice_data(slicing={'Time':elm_time}).data
z_sep=flap.get_data('NSTX_MDSPlus',
name='\EFIT02::\ZBDRY',
exp_id=shot,
object_name='SEP Z OBJ').slice_data(slicing={'Time':elm_time}).data
sep_GPI_ind=np.where(np.logical_and(R_sep > coeff_r[2],
np.logical_and(z_sep > coeff_z[2],
z_sep < coeff_z[2]+79*coeff_z[0]+64*coeff_z[1])))
try:
sep_GPI_ind=np.asarray(sep_GPI_ind[0])
sep_GPI_ind=np.insert(sep_GPI_ind,0,sep_GPI_ind[0]-1)
sep_GPI_ind=np.insert(sep_GPI_ind,len(sep_GPI_ind),sep_GPI_ind[-1]+1)
z_sep_GPI=z_sep[(sep_GPI_ind)]
R_sep_GPI=R_sep[sep_GPI_ind]
for key in ['max','avg']:
for ind_time in range(len(velocity_results['Position '+key][:,0])):
ind_z_min=np.argmin(np.abs(z_sep_GPI-velocity_results['Position '+key][ind_time,1]))
if z_sep_GPI[ind_z_min] >= velocity_results['Position '+key][ind_time,1]:
ind1=ind_z_min
ind2=ind_z_min+1
else:
ind1=ind_z_min-1
ind2=ind_z_min
velocity_results['Separatrix dist '+key][ind_time]=velocity_results['Position '+key][ind_time,0]-((velocity_results['Position '+key][ind_time,1]-z_sep_GPI[ind2])/(z_sep_GPI[ind1]-z_sep_GPI[ind2])*(R_sep_GPI[ind1]-R_sep_GPI[ind2])+R_sep_GPI[ind2])
except:
pass
if gpi_plane_calculation:
det=coeff_r[0]*coeff_z[1]-coeff_z[0]*coeff_r[1]
for key in ['Velocity ccf','Velocity str max','Velocity str avg','Size max','Size avg']:
orig=copy.deepcopy(velocity_results[key])
velocity_results[key][:,0]=coeff_r_new/det*(coeff_z[1]*orig[:,0]-coeff_r[1]*orig[:,1])
velocity_results[key][:,1]=coeff_z_new/det*(-coeff_z[0]*orig[:,0]+coeff_r[0]*orig[:,1])
velocity_results['Elongation max'][:]=(velocity_results['Size max'][:,0]-velocity_results['Size max'][:,1])/(velocity_results['Size max'][:,0]+velocity_results['Size max'][:,1])
velocity_results['Elongation avg'][:]=(velocity_results['Size avg'][:,0]-velocity_results['Size avg'][:,1])/(velocity_results['Size avg'][:,0]+velocity_results['Size avg'][:,1])
time=velocity_results['Time']
elm_time_interval_ind=np.where(np.logical_and(time >= elm_time-window_average,
time <= elm_time+window_average))
elm_time=(time[elm_time_interval_ind])[np.argmin(velocity_results['Frame similarity'][elm_time_interval_ind])]
elm_time_ind=np.argmin(np.abs(time-elm_time))
ind=slice(elm_time_ind-nwin,elm_time_ind+nwin)
for ind_first in range(12):
for ind_second in range(12):
if ind_first <= 7:
a=velocity_results[pearson_keys[ind_first//2]][ind,np.mod(ind_first,2)]
else:
a=velocity_results[pearson_keys[ind_first-4]][ind]
if ind_second <= 7:
b=velocity_results[pearson_keys[ind_second//2]][ind,np.mod(ind_second,2)]
else:
b=velocity_results[pearson_keys[ind_second-4]][ind]
if (ind_first in plot_inds and
ind_second in plot_inds):
ind1=int(np.where(plot_inds == ind_first)[0])
ind2=int(np.where(plot_inds == ind_second)[0])
plt.subplot(gs[ind2,ind1])
try:
plt.scatter(a,b,color='tab:blue')
except:
print(ind_first,ind_second)
if ind_first == 8:
print(a)
if ind_first <= 7:
label_first=pearson_keys[ind_first//2]
label_second=['radial', 'poloidal'][np.mod(ind_first,2)]
xlabel=label_first+' '+label_second
else:
xlabel=pearson_keys[ind_first-4]
if ind_second <= 7:
label_first=pearson_keys[ind_second//2]
label_second=['radial', 'poloidal'][np.mod(ind_second,2)]
ylabel=label_first+' '+label_second
else:
ylabel=pearson_keys[ind_second-4]
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.tight_layout()
def plot_all_parameters_vs_all_other_average(window_average=0.5e-3,
sampling_time=2.5e-6,
plot=True,
normalized_structure=True,
normalized_velocity=True,
subtraction_order=4,
calculate_average=False,
gpi_plane_calculation=True,
elm_burst_window=False,
pdf=True,
symbol_size=1,
plot_error=False):
if calculate_average:
pearson_keys=['Velocity ccf', #0,1
'Velocity str avg', #2,3
'Size avg', #4,5
'Position avg', #6,7
'Area avg', #8
'Elongation avg', #9
'Angle avg',
'Separatrix dist avg'] #10
else:
pearson_keys=['Velocity ccf', #0,1
'Velocity str max', #2,3
'Size max', #4,5
'Position max', #6,7
'Area max', #8
'Elongation max', #9
'Angle max',
'Separatrix dist max'] #10
plot_inds=np.asarray([0,1,4,5,11])
gs=GridSpec(len(plot_inds),len(plot_inds))
results=calculate_avg_velocity_results(window_average=500e-6,
sampling_time=2.5e-6,
pdf=False,
plot=False,
return_results=not plot_error,
return_error=plot_error,
plot_error=False,
normalized_velocity=True,
normalized_structure=True,
subtraction_order=subtraction_order,
opacity=0.2,
correlation_threshold=0.6,
plot_max_only=False,
plot_for_publication=False,
gpi_plane_calculation=gpi_plane_calculation,
plot_scatter=False,
)
if not plot_error:
velocity_results=results
else:
velocity_results,error_results=results
velocity_results['Tau']/=1e3
if elm_burst_window:
ind=np.where(np.logical_and(velocity_results['Tau'] >= 0,
velocity_results['Tau'] < window_average))
else:
ind=np.where(np.logical_and(velocity_results['Tau'] >= -window_average,
velocity_results['Tau'] < window_average))
for ind_first in range(12):
for ind_second in range(12):
if ind_first <= 7:
a=velocity_results[pearson_keys[ind_first//2]][ind,np.mod(ind_first,2)]
a=a[0,:]
a_err=error_results[pearson_keys[ind_first//2]][ind,np.mod(ind_first,2)]
a_err=a_err[0,:]
else:
a=velocity_results[pearson_keys[ind_first-4]][ind]
a_err=error_results[pearson_keys[ind_first-4]][ind]
if ind_second <= 7:
b=velocity_results[pearson_keys[ind_second//2]][ind,np.mod(ind_second,2)]
b=b[0,:]
b_err=error_results[pearson_keys[ind_second//2]][ind,np.mod(ind_second,2)]
b_err=b_err[0,:]
else:
b=velocity_results[pearson_keys[ind_second-4]][ind]
b_err=error_results[pearson_keys[ind_second-4]][ind]
if (ind_first in plot_inds and
ind_second in plot_inds):
ind1=int(np.where(plot_inds == ind_first)[0])
ind2=int(np.where(plot_inds == ind_second)[0])
colors = iter(cm.gist_ncar(np.linspace(0, 1, len(a))))
if ind_first != ind_second:
plt.subplot(gs[ind2,ind1])
plt.plot(a,b,lw='0.2')
for ind_a in range(len(a)):
color=copy.deepcopy(next(colors))
plt.scatter(a[ind_a],
b[ind_a],
color=color,
s=symbol_size)
if plot_error:
plt.errorbar(a[ind_a],
b[ind_a],
xerr=a_err[ind_a],
yerr=b_err[ind_a],
color=color,
lw=0.2)
#plt.scatter(a,b,color='tab:blue')
if ind_first <= 7:
label_first=pearson_keys[ind_first//2]
label_second=['radial', 'poloidal'][np.mod(ind_first,2)]
xlabel=label_first+' '+label_second
else:
xlabel=pearson_keys[ind_first-4]
if ind_second <= 7:
label_first=pearson_keys[ind_second//2]
label_second=['radial', 'poloidal'][np.mod(ind_second,2)]
ylabel=label_first+' '+label_second
else:
ylabel=pearson_keys[ind_second-4]
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.xlim([min(a)-abs(max(a)-min(a))*0.1,max(a)+abs(min(a)-max(a))*0.1])
plt.ylim([min(b)-abs(max(b)-min(b))*0.1,max(b)+abs(min(b)-max(b))*0.1])
else:
plt.subplot(gs[ind2,ind1])
plt.plot(velocity_results['Tau'][ind]*1e3, b, lw=0.2)
for ind_a in range(len(a)):
plt.scatter(velocity_results['Tau'][ind][ind_a]*1e3,
b[ind_a],
color=next(colors),
s=symbol_size,
)
plt.xlim([-window_average*1e3,window_average*1e3])
plt.ylim([min(b)-abs(max(b)-min(b))*0.1,max(b)+abs(min(b)-max(b))*0.1])
plt.xlabel('Tau [ms]')
if ind_second <= 7:
label_first=pearson_keys[ind_second//2]
label_second=['radial', 'poloidal'][np.mod(ind_second,2)]
ylabel=label_first+' '+label_second
else:
ylabel=pearson_keys[ind_second-4]
plt.ylabel(ylabel)
#plt.tight_layout()
| 54.735988
| 270
| 0.448034
| 3,929
| 37,111
| 3.998982
| 0.073047
| 0.109789
| 0.033859
| 0.032077
| 0.862335
| 0.843559
| 0.823192
| 0.787615
| 0.760247
| 0.75541
| 0
| 0.042299
| 0.443858
| 37,111
| 678
| 271
| 54.735988
| 0.718979
| 0.035326
| 0
| 0.730449
| 0
| 0
| 0.083336
| 0.004373
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006656
| false
| 0.001664
| 0.021631
| 0
| 0.031614
| 0.009983
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4f91af39a58f98f1bbbdfeac74efd5f220008864
| 19,013
|
py
|
Python
|
tests/dhcpv6/kea_only/leases_cmds/test_leases_cmds_legal_logging.py
|
shawnmullaney/forge
|
aaaef0a0645f73d24666aab6a400f3604e753aac
|
[
"0BSD"
] | null | null | null |
tests/dhcpv6/kea_only/leases_cmds/test_leases_cmds_legal_logging.py
|
shawnmullaney/forge
|
aaaef0a0645f73d24666aab6a400f3604e753aac
|
[
"0BSD"
] | null | null | null |
tests/dhcpv6/kea_only/leases_cmds/test_leases_cmds_legal_logging.py
|
shawnmullaney/forge
|
aaaef0a0645f73d24666aab6a400f3604e753aac
|
[
"0BSD"
] | null | null | null |
"""Kea leases manipulation commands with legal logging hook"""
# pylint: disable=invalid-name,line-too-long
import pytest
import srv_msg
import srv_control
import misc
from forge_cfg import world
@pytest.mark.v6
@pytest.mark.kea_only
@pytest.mark.controlchannel
@pytest.mark.hook
@pytest.mark.lease_cmds
@pytest.mark.legal_logging
def test_hook_v6_lease_cmds_legal_logging_add():
misc.test_procedure()
srv_msg.remove_file_from_server(world.f_cfg.data_join('kea-legal*.txt'))
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::1-2001:db8:1::1')
srv_control.config_srv_opt('preference', '123')
srv_control.config_srv_opt('domain-search', 'domain1.example.com,domain2.isc.org')
srv_control.open_control_channel()
srv_control.add_hooks('libdhcp_lease_cmds.so')
srv_control.add_hooks('libdhcp_legal_log.so')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_include_option('Response', None, '2')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.response_check_suboption_content('Response',
'5',
'3',
None,
'addr',
'2001:db8:1::1')
srv_msg.send_ctrl_cmd_via_socket('{"command": "lease6-add","arguments": {"subnet-id": 1,"ip-address": "2001:db8:1::1","duid": "1a:1b:1c:1d:1e:1f:20:21:22:23:24","iaid": 1234}}')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_include_option('Response', None, '2')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '13')
srv_msg.response_check_suboption_content('Response', '13', '3', None, 'statuscode', '2')
srv_msg.copy_remote(world.f_cfg.data_join('kea-legal*.txt'))
srv_msg.file_contains_line(world.f_cfg.data_join('kea-legal*.txt'),
None,
'Administrator added a lease of address: 2001:db8:1::1 to a device with DUID: 1a:1b:1c:1d:1e:1f:20:21:22:23:24')
@pytest.mark.v6
@pytest.mark.kea_only
@pytest.mark.controlchannel
@pytest.mark.hook
@pytest.mark.lease_cmds
@pytest.mark.legal_logging
def test_hook_v6_lease_cmds_legal_logging_del_using_address():
misc.test_procedure()
srv_msg.remove_file_from_server(world.f_cfg.data_join('kea-legal*.txt'))
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::1-2001:db8:1::1')
srv_control.open_control_channel()
srv_control.add_hooks('libdhcp_lease_cmds.so')
srv_control.add_hooks('libdhcp_legal_log.so')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
misc.test_procedure()
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_include_option('Response', None, '2')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.response_check_suboption_content('Response',
'5',
'3',
None,
'addr',
'2001:db8:1::1')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:11:22:33:44:55:66')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_include_option('Response', None, '2')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '13')
srv_msg.response_check_suboption_content('Response', '13', '3', None, 'statuscode', '2')
srv_msg.send_ctrl_cmd_via_socket('{"command":"lease6-del","arguments":{"ip-address": "2001:db8:1::1"}}')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:11:22:33:44:55:66')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_include_option('Response', None, '2')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.response_check_suboption_content('Response',
'5',
'3',
None,
'addr',
'2001:db8:1::1')
srv_msg.copy_remote(world.f_cfg.data_join('kea-legal*.txt'))
srv_msg.file_contains_line(world.f_cfg.data_join('kea-legal*.txt'),
None,
'Administrator deleted the lease for address: 2001:db8:1::1')
@pytest.mark.v6
@pytest.mark.kea_only
@pytest.mark.controlchannel
@pytest.mark.hook
@pytest.mark.lease_cmds
@pytest.mark.legal_logging
def test_hook_v6_lease_cmds_legal_logging_del_using_duid():
misc.test_procedure()
srv_msg.remove_file_from_server(world.f_cfg.data_join('kea-legal*.txt'))
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::1-2001:db8:1::1')
srv_control.open_control_channel()
srv_control.add_hooks('libdhcp_lease_cmds.so')
srv_control.add_hooks('libdhcp_legal_log.so')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_sets_value('Client', 'ia_id', '666')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
misc.test_procedure()
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_include_option('Response', None, '2')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.response_check_suboption_content('Response',
'5',
'3',
None,
'addr',
'2001:db8:1::1')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:11:22:33:44:55:66')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_include_option('Response', None, '2')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '13')
srv_msg.response_check_suboption_content('Response', '13', '3', None, 'statuscode', '2')
srv_msg.send_ctrl_cmd_via_socket('{"command":"lease6-del","arguments":{"subnet-id":1,"identifier": "00:03:00:01:66:55:44:33:22:11","identifier-type": "duid","iaid":666}}')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:11:22:33:44:55:66')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_include_option('Response', None, '2')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.response_check_suboption_content('Response',
'5',
'3',
None,
'addr',
'2001:db8:1::1')
srv_msg.copy_remote(world.f_cfg.data_join('kea-legal*.txt'))
srv_msg.file_contains_line(world.f_cfg.data_join('kea-legal*.txt'),
None,
'Administrator deleted a lease for a device identified by: duid of 00:03:00:01:66:55:44:33:22:11')
@pytest.mark.v6
@pytest.mark.kea_only
@pytest.mark.controlchannel
@pytest.mark.hook
@pytest.mark.lease_cmds
@pytest.mark.legal_logging
@pytest.mark.disabled
def test_hook_v6_lease_cmds_legal_logging_wipe():
misc.test_procedure()
srv_msg.remove_file_from_server(world.f_cfg.data_join('kea-legal*.txt'))
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::1-2001:db8:1::2')
srv_control.open_control_channel()
srv_control.add_hooks('libdhcp_lease_cmds.so')
srv_control.add_hooks('libdhcp_legal_log.so')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
misc.test_procedure()
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_include_option('Response', None, '2')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:11:22:33:44:55:66')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_include_option('Response', None, '2')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
misc.test_procedure()
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:11:22:33:44:55:66')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_include_option('Response', None, '2')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:11:11:11:11:11:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_include_option('Response', None, '2')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '13')
srv_msg.response_check_suboption_content('Response', '13', '3', None, 'statuscode', '2')
srv_msg.send_ctrl_cmd_via_socket('{"command":"lease6-wipe", "arguments": {"subnet-id":1}}')
srv_msg.copy_remote(world.f_cfg.data_join('kea-legal*.txt'))
srv_msg.test_fail()
# File stored in kea-legal*.txt MUST contain line or phrase: Address:3000::5 has been assigned for 0 hrs 10 mins 0 secs to a device with DUID: 00:03:00:01:f6:f5:f4:f3:f2:04 and hardware address: hwtype=1 f6:f5:f4:f3:f2:04 (from DUID)
@pytest.mark.v6
@pytest.mark.kea_only
@pytest.mark.controlchannel
@pytest.mark.hook
@pytest.mark.lease_cmds
@pytest.mark.legal_logging
def test_hook_v6_lease_cmds_legal_logging_update():
misc.test_procedure()
srv_msg.remove_file_from_server(world.f_cfg.data_join('kea-legal*.txt'))
misc.test_setup()
srv_control.config_srv_subnet('2001:db8:1::/64', '2001:db8:1::1-2001:db8:1::2')
srv_control.open_control_channel()
srv_control.add_hooks('libdhcp_lease_cmds.so')
srv_control.add_hooks('libdhcp_legal_log.so')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_sets_value('Client', 'ia_id', '666')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
misc.test_procedure()
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '1')
srv_msg.response_check_include_option('Response', None, '2')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.lease_file_contains('2001:db8:1::1,00:03:00:01:66:55:44:33:22:11,4000,')
srv_msg.lease_file_contains(',1,3000,0,666,128,0,0,,66:55:44:33:22:11,0')
srv_msg.lease_file_doesnt_contain('2001:db8:1::1,01:02:03:04:05:06:07:08')
srv_msg.lease_file_doesnt_contain(',urania.example.org,1a:1b:1c:1d:1e:1f,')
srv_msg.send_ctrl_cmd_via_socket('{"command":"lease6-update", "arguments":{"subnet-id": 1,"ip-address": "2001:db8:1::1","duid": "01:02:03:04:05:06:07:08","iaid": 1234,"hw-address": "1a:1b:1c:1d:1e:1f","preferred-lft": 500,"valid-lft": 1000,"hostname": "urania.example.org"}}')
srv_msg.lease_file_contains(',1,500,0,1234,128,0,0,urania.example.org,1a:1b:1c:1d:1e:1f,0')
srv_msg.lease_file_contains('2001:db8:1::1,01:02:03:04:05:06:07:08,1000')
srv_msg.copy_remote(world.f_cfg.data_join('kea-legal*.txt'))
srv_msg.file_contains_line(world.f_cfg.data_join('kea-legal*.txt'),
None,
'Administrator updated information on the lease of address: 2001:db8:1::1 to a device with DUID: 01:02:03:04:05:06:07:08, hardware address: 1a:1b:1c:1d:1e:1f for 0 hrs 16 mins 40 secs')
| 46.373171
| 280
| 0.672277
| 2,811
| 19,013
| 4.225187
| 0.071505
| 0.094973
| 0.075777
| 0.110381
| 0.944094
| 0.936937
| 0.924392
| 0.921192
| 0.91833
| 0.900733
| 0
| 0.061677
| 0.172829
| 19,013
| 409
| 281
| 46.486553
| 0.693521
| 0.017462
| 0
| 0.910979
| 0
| 0.035608
| 0.244725
| 0.08386
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014837
| true
| 0.050445
| 0.014837
| 0
| 0.029674
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
8c2ee7477fd0c0f0c96b41d4dc5f856473d6089b
| 63
|
py
|
Python
|
modules/analysis/covariance.py
|
ansteh/multivariate
|
fbd166f9e9a6d721a1d876b6e46db064f43afe53
|
[
"Apache-2.0"
] | null | null | null |
modules/analysis/covariance.py
|
ansteh/multivariate
|
fbd166f9e9a6d721a1d876b6e46db064f43afe53
|
[
"Apache-2.0"
] | null | null | null |
modules/analysis/covariance.py
|
ansteh/multivariate
|
fbd166f9e9a6d721a1d876b6e46db064f43afe53
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
def cov(matrix):
return np.cov(matrix)
| 12.6
| 25
| 0.698413
| 11
| 63
| 4
| 0.727273
| 0.409091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206349
| 63
| 4
| 26
| 15.75
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
4fbe2d0b77a4c94e6654fb92878d63aec7c76a57
| 133
|
py
|
Python
|
vedadet/criteria/builder.py
|
jie311/vedadet
|
aaf3b3bc3c7944aba1cc28138165d403023a9152
|
[
"Apache-2.0"
] | 424
|
2020-10-19T03:56:49.000Z
|
2022-03-28T02:47:39.000Z
|
vedadet/criteria/builder.py
|
jie311/vedadet
|
aaf3b3bc3c7944aba1cc28138165d403023a9152
|
[
"Apache-2.0"
] | 72
|
2020-11-27T17:10:00.000Z
|
2022-03-17T02:40:53.000Z
|
vedadet/criteria/builder.py
|
jie311/vedadet
|
aaf3b3bc3c7944aba1cc28138165d403023a9152
|
[
"Apache-2.0"
] | 116
|
2020-11-03T02:31:17.000Z
|
2022-03-08T08:20:48.000Z
|
from vedacore.misc import build_from_cfg, registry
def build_criterion(cfg):
return build_from_cfg(cfg, registry, 'criterion')
| 22.166667
| 53
| 0.789474
| 19
| 133
| 5.263158
| 0.526316
| 0.18
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12782
| 133
| 5
| 54
| 26.6
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0.067669
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
4fe91bcba4c7bc218e565fbeb6680c8e602ea365
| 115,339
|
py
|
Python
|
tests/test_catalog.py
|
scalet98/hspfbintoolbox
|
012260c0db4084768ed593b559a23a13005b27cc
|
[
"BSD-3-Clause"
] | 1
|
2020-06-16T12:33:28.000Z
|
2020-06-16T12:33:28.000Z
|
tests/test_catalog.py
|
scalet98/hspfbintoolbox
|
012260c0db4084768ed593b559a23a13005b27cc
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_catalog.py
|
scalet98/hspfbintoolbox
|
012260c0db4084768ed593b559a23a13005b27cc
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
catalog
----------------------------------
Tests for `hspfbintoolbox` module.
"""
import csv
import shlex
import subprocess
from unittest import TestCase
from pandas.util.testing import assert_frame_equal
import sys
try:
from cStringIO import StringIO
except:
from io import StringIO
import pandas as pd
from hspfbintoolbox import hspfbintoolbox
interval2codemap = {"yearly": 5, "monthly": 4, "daily": 3, "bivl": 2}
def capture(func, *args, **kwds):
sys.stdout = StringIO() # capture output
out = func(*args, **kwds)
out = sys.stdout.getvalue() # release output
try:
out = bytes(out, "utf-8")
except:
pass
return out
def read_unicode_csv(
filename,
delimiter=",",
quotechar='"',
quoting=csv.QUOTE_MINIMAL,
lineterminator="\n",
encoding="utf-8",
):
# Python 3 version
if sys.version_info[0] >= 3:
# Open the file in text mode with given encoding
# Set newline arg to ''
# (see https://docs.python.org/3/library/csv.html)
# Next, get the csv reader, with unicode delimiter and quotechar
csv_reader = csv.reader(
filename,
delimiter=delimiter,
quotechar=quotechar,
quoting=quoting,
lineterminator=lineterminator,
)
# Now, iterate over the (already decoded) csv_reader generator
for row in csv_reader:
yield row
# Python 2 version
else:
# Next, get the csv reader, passing delimiter and quotechar as
# bytestrings rather than unicode
csv_reader = csv.reader(
filename,
delimiter=delimiter.encode(encoding),
quotechar=quotechar.encode(encoding),
quoting=quoting,
lineterminator=lineterminator,
)
# Iterate over the file and decode each string into unicode
for row in csv_reader:
yield [cell.decode(encoding) for cell in row]
class TestDescribe(TestCase):
def setUp(self):
self.catalog = b"""\
LUE , LC,GROUP ,VAR , TC,START ,END ,TC
IMPLND, 11,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,UZS , 5,1951 ,2001 ,yearly
"""
ndict = []
rd = read_unicode_csv(StringIO(self.catalog.decode()))
next(rd)
for row in rd:
if len(row) == 0:
continue
nrow = [i.strip() for i in row]
ndict.append(
(nrow[0], int(nrow[1]), nrow[2], nrow[3], interval2codemap[nrow[7]])
)
self.ncatalog = sorted(ndict)
def test_catalog_api(self):
out = hspfbintoolbox.catalog("tests/6b_np1.hbn")
out = [i[:5] for i in out]
self.assertEqual(out, self.ncatalog)
def test_catalog_cli(self):
args = "hspfbintoolbox catalog --tablefmt csv tests/6b_np1.hbn"
args = shlex.split(args)
out = subprocess.Popen(
args, stdout=subprocess.PIPE, stdin=subprocess.PIPE
).communicate()[0]
self.assertEqual(out, self.catalog)
| 50.832525
| 84
| 0.647249
| 17,634
| 115,339
| 4.232392
| 0.014291
| 0.144706
| 0.260471
| 0.434119
| 0.978401
| 0.977892
| 0.977061
| 0.000884
| 0
| 0
| 0
| 0.282975
| 0.218642
| 115,339
| 2,268
| 85
| 50.854938
| 0.545178
| 0.005011
| 0
| 0.006705
| 0
| 0
| 0.980268
| 0
| 0
| 0
| 0
| 0
| 0.001341
| 1
| 0.002235
| false
| 0.000447
| 0.00447
| 0
| 0.007599
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8b306d438e81db555c6f332592135066e91d7dfd
| 68,513
|
py
|
Python
|
PyBroom.py
|
Pr0gram-Creat0r-1/PyBroom
|
3453582318c9b10b2df504606dee029af696aefe
|
[
"MIT"
] | null | null | null |
PyBroom.py
|
Pr0gram-Creat0r-1/PyBroom
|
3453582318c9b10b2df504606dee029af696aefe
|
[
"MIT"
] | 2
|
2022-02-14T00:59:28.000Z
|
2022-02-14T02:59:13.000Z
|
PyBroom.py
|
Pr0gram-Creat0r-1/PyBroom
|
3453582318c9b10b2df504606dee029af696aefe
|
[
"MIT"
] | null | null | null |
#I am going to try keeping this file clean
#Imagine cleaning a code cleaner :)
#PyBroom.install_system_requirements: [english-words]
import os
import sys
import subprocess
import datetime
from english_words import english_words_set
english_list=list(english_words_set)
english_list.append('')
history_list=[]
log_history=1
def find_strings(file_path):
'Find strings in a text file. Internal use only. In development.'
global history_list
global log_history
if log_history==1:
history_list.append('%s: find_strings(\"%s\")' % (str(datetime.datetime.now()), file_path))
read_file=open(file_path).read()
if read_file[0]!='#' or read_file.splitlines()[-1][0]!='#':
with_comment='#PyBroom cleaned this file.\n'+read_file+'\n#https://github.com/Pr0gram-Creat0r-1/PyBroom\n#https://replit.com/@Pr0gram-Creat0r/PyBroom'
open(file_path, 'w').write(with_comment)
file_string=open(file_path).read()
file_string_copy=open(file_path).read()
important_list=[]
important_list2=[]
string_type_list=[]
string_index_list=[]
file_string=file_string.replace("\\\'", ' ').replace('\\\"', ' ')
for finder in range(0, len(file_string)):
index=file_string.find("'''")
if index!=-1:
string_type_list.append("'''")
string_index_list.append(index)
file_string=file_string.replace("'''", ' ', 1)
for finder in range(0, len(file_string)):
index=file_string.find('"""')
if index!=-1:
string_type_list.append('"""')
string_index_list.append(index)
file_string=file_string.replace('"""', ' ', 1)
for finder in range(0, len(file_string)):
index=file_string.find("'")
if index!=-1:
string_type_list.append("'")
string_index_list.append(index)
file_string=file_string.replace("'", ' ', 1)
for finder in range(0, len(file_string)):
index=file_string.find('"')
if index!=-1:
string_type_list.append('"')
string_index_list.append(index)
file_string=file_string.replace('"', ' ', 1)
string_index_list.sort()
counter=0
for sort_types in range(0, len(string_index_list)):
string_type=file_string_copy[string_index_list[counter]:string_index_list[counter]+3]
if string_type=="'''":
string_type_list[counter]="'''"
if string_type=='"""':
string_type_list[counter]='"""'
if string_type[0]=="'" and string_type.count("'")!=3:
string_type_list[counter]="'"
if string_type[0]=='"' and string_type.count('"')!=3:
string_type_list[counter]='"'
counter+=1
counter=0
while len(string_index_list)>0:
try:
first_occurrence=string_index_list[0]
first_occurrence_type=string_type_list[0]
important_list2.append([first_occurrence_type, first_occurrence_type])
string_type_list.remove(first_occurrence_type)
string_index_list.remove(first_occurrence)
magic=string_type_list.index(first_occurrence_type)
the_index=string_index_list[magic]
important_list.append([first_occurrence, the_index])
except ValueError:
pass
try:
while string_index_list[0]<=the_index:
string_index_list.remove(string_index_list[0])
string_type_list.remove(string_type_list[0])
except ValueError:
pass
except IndexError:
pass
counter=0
the_string=open(file_path).read()
new_the_string=open(file_path, 'w')
new_the_string.write(the_string)
return important_list2
def find_comments(file_path):
'Find comments.'
global history_list
global log_history
if log_history==1:
history_list.append('%s: comments(\"%s\")' % (str(datetime.datetime.now()), file_path))
strings_list=find_strings(file_path)
baby_word_string=open(file_path).read()
file_list=baby_word_string.splitlines()
the_list=baby_word_string.splitlines()
counter=0
for m in range(0, len(strings_list)):
try:
par1=baby_word_string.find(strings_list[counter][0])
baby_word_string=baby_word_string.replace(strings_list[counter][0], ' '*len(strings_list[counter][0]), 1)
par2=baby_word_string.find(strings_list[counter][1])+len(strings_list[counter][1])-1
baby_word_string=baby_word_string.replace(strings_list[counter][1], ' '*len(strings_list[counter][1]), 1)
string_text_list=list(baby_word_string)
subcounter=par1
for x in range(0, par2-par1):
if string_text_list[subcounter]!='\n':
string_text_list[subcounter]=' '
subcounter+=1
baby_word_string=''.join(string_text_list)
except IndexError:
pass
counter+=1
counter=0
text_list=baby_word_string.splitlines()
comment_list=[]
counter=0
for x in range(0, len(text_list)):
line=text_list[counter]
try:
if line.count('#')>=1:
number=line.find('#')
comment=[baby_word_string.find('#'+line.split('#')[1]), baby_word_string.find('#'+line.split('#')[1])+len('#'+line.split('#')[1])]
baby_word_string=baby_word_string.replace(baby_word_string[comment[0]:comment[1]], ' '*len(baby_word_string[comment[0]:comment[1]]), 1)
file_list[counter]=the_list[counter].split(the_list[counter][number])[0].replace('#'+line.split('#')[1], ' '*len('#'+line.split('#')[1]), 1)
comment_list.append(comment)
except IndexError:
pass
counter+=1
counter=0
return comment_list
def rstrip_all(file_path):
global history_list
global log_history
if log_history==1:
history_list.append('%s: rstrip_all(\"%s\")' % (str(datetime.datetime.now()), file_path))
strings_list=find_strings(file_path)
comment_list=find_comments(file_path)
baby_word_string=open(file_path).read().replace('\\\'', ' ').replace('\\\"', ' ')
file=open(file_path).read()
the_list=file.splitlines()
counter=0
for m in range(0, len(strings_list)):
try:
par1=baby_word_string.find(strings_list[counter][0])
baby_word_string=baby_word_string.replace(strings_list[counter][0], 'p'*len(strings_list[counter][0]), 1)
par2=baby_word_string.find(strings_list[counter][1])+len(strings_list[counter][1])-1
baby_word_string=baby_word_string.replace(strings_list[counter][1], 'p'*len(strings_list[counter][1]), 1)
string_text_list=list(baby_word_string)
subcounter=par1
for x in range(0, par2-par1):
if string_text_list[subcounter]!='\n':
string_text_list[subcounter]='p'
subcounter+=1
baby_word_string=''.join(string_text_list)
except IndexError:
pass
counter+=1
counter=0
for m in range(0, len(comment_list)):
try:
par1=comment_list[counter][0]
par2=comment_list[counter][1]
string_text_list=list(baby_word_string)
subcounter=par1
for x in range(0, par2-par1):
string_text_list[subcounter]='p'
subcounter+=1
baby_word_string=''.join(string_text_list)
except IndexError:
pass
counter+=1
counter=0
the_new_list=baby_word_string.splitlines()
for x in range(0, len(the_list)):
line=the_list[counter]
line2=the_new_list[counter]
if line2.rstrip()!=line2:
the_list[counter]=line.rstrip()
counter+=1
open(file_path, 'w').write('\n'.join(the_list))
def find_functions(file_path):
'Find functions.'
global log_history
if log_history==1:
history_list.append('%s: find_functions(\"%s\")' % (str(datetime.datetime.now()), file_path))
read_file=open(file_path).read()
if read_file[0]!='#' or read_file.splitlines()[-1][0]!='#':
with_comment='#PyBroom cleaned this file.\n'+read_file+'\n#https://github.com/Pr0gram-Creat0r-1/PyBroom\n#https://replit.com/@Pr0gram-Creat0r/PyBroom'
open(file_path, 'w').write(with_comment)
strings_list=find_strings(file_path)
baby_word_string=open(file_path).read().replace('\\\'', ' ').replace('\\\"', ' ')
counter=0
for m in range(0, len(strings_list)):
try:
par1=baby_word_string.find(strings_list[counter][0])
baby_word_string=baby_word_string.replace(strings_list[counter][0], ' '*len(strings_list[counter][0]), 1)
par2=baby_word_string.find(strings_list[counter][1])+len(strings_list[counter][1])-1
baby_word_string=baby_word_string.replace(strings_list[counter][1], ' '*len(strings_list[counter][1]), 1)
string_text_list=list(baby_word_string)
subcounter=par1
for x in range(0, par2-par1):
if string_text_list[subcounter]!='\n':
string_text_list[subcounter]=' '
subcounter+=1
baby_word_string=''.join(string_text_list)
except IndexError:
pass
counter+=1
counter=0
text=baby_word_string
the_list=baby_word_string.splitlines()
functions_list=[]
for x in range(0, len(the_list)):
line=the_list[counter]
if (line.split('def')[0].isspace()==True or line.split('def')[0]=='') and line.lstrip()[0:4]=='def ':
indents=line.split('def')[0].count(' ')
position=text.find(line.lstrip())
text=text.replace(line, ' '*len(line), 1)
subcounter=counter
for y in range(0, len(the_list)-the_list.index(line)):
subline=the_list[subcounter]
indents2=subline.replace(subline.lstrip(), '').count(' ')
if indents2<=indents and text.find(subline)-1>text.find(line):
position2=text.find(subline)-1
break
else:
position2=None
subcounter+=1
if position2!=None:
functions_list.append([position, position2])
counter+=1
return functions_list
def find_classes(file_path):
'Find classes.'
global log_history
if log_history==1:
history_list.append('%s: find_classes(\"%s\")' % (str(datetime.datetime.now()), file_path))
read_file=open(file_path).read()
if read_file[0]!='#' or read_file.splitlines()[-1][0]!='#':
with_comment='#PyBroom cleaned this file.\n'+read_file+'\n#https://github.com/Pr0gram-Creat0r-1/PyBroom\n#https://replit.com/@Pr0gram-Creat0r/PyBroom'
open(file_path, 'w').write(with_comment)
strings_list=find_strings(file_path)
baby_word_string=open(file_path).read().replace('\\\'', ' ').replace('\\\"', ' ')
counter=0
for m in range(0, len(strings_list)):
try:
par1=baby_word_string.find(strings_list[counter][0])
baby_word_string=baby_word_string.replace(strings_list[counter][0], ' '*len(strings_list[counter][0]), 1)
par2=baby_word_string.find(strings_list[counter][1])+len(strings_list[counter][1])-1
baby_word_string=baby_word_string.replace(strings_list[counter][1], ' '*len(strings_list[counter][1]), 1)
string_text_list=list(baby_word_string)
subcounter=par1
for x in range(0, par2-par1):
if string_text_list[subcounter]!='\n':
string_text_list[subcounter]=' '
subcounter+=1
baby_word_string=''.join(string_text_list)
except IndexError:
pass
counter+=1
counter=0
text=baby_word_string
the_list=baby_word_string.splitlines()
classes_list=[]
for x in range(0, len(the_list)):
line=the_list[counter]
if (line.split('class')[0].isspace()==True or line.split('class')[0]=='') and line.lstrip()[0:6]=='class ':
indents=line.split('class')[0].count(' ')
position=text.find(line.lstrip())
text=text.replace(line, ' '*len(line), 1)
subcounter=counter
for y in range(0, len(the_list)-the_list.index(line)):
subline=the_list[subcounter]
indents2=subline.replace(subline.lstrip(), '').count(' ')
if indents2<=indents and text.find(subline)-1>text.find(line):
position2=text.find(subline)-1
break
else:
position2=None
subcounter+=1
if position2!=None:
classes_list.append([position, position2])
counter+=1
return classes_list
def remove_local_variables(file_path):
'Remove local variables. Use this before cleaning global variables in the remove_variables() function.'
global history_list
global log_history
if log_history==1:
history_list.append('%s: remove_local_variables(\"%s\")' % (str(datetime.datetime.now()), file_path))
log_history=0
if find_functions(file_path)!=[]:
comment_list=find_comments(file_path)
strings_list=find_strings(file_path)
functions_list=find_functions(file_path)
text=open(file_path).read()
baby_word_string=open(file_path).read().replace('\\\'', ' ').replace('\\\"', ' ')
baby_word_string2=open(file_path).read()
counter=0
function_list=[]
for extract in range(0, len(functions_list)):
function=baby_word_string2[functions_list[counter][0]:functions_list[counter][1]]
function_list.append(function)
counter+=1
counter=0
for m in range(0, len(strings_list)):
try:
par1=baby_word_string.find(strings_list[counter][0])
baby_word_string=baby_word_string.replace(strings_list[counter][0], ' '*len(strings_list[counter][0]), 1)
par2=baby_word_string.find(strings_list[counter][1])+len(strings_list[counter][1])-1
baby_word_string=baby_word_string.replace(strings_list[counter][1], ' '*len(strings_list[counter][1]), 1)
string_text_list=list(baby_word_string)
subcounter=par1
for x in range(0, par2-par1):
if string_text_list[subcounter]!='\n':
string_text_list[subcounter]=' '
subcounter+=1
baby_word_string=''.join(string_text_list)
except IndexError:
pass
counter+=1
counter=0
for m in range(0, len(comment_list)):
try:
par1=comment_list[counter][0]
par2=comment_list[counter][1]
string_text_list=list(baby_word_string)
subcounter=par1
for x in range(0, par2-par1):
string_text_list[subcounter]=' '
subcounter+=1
baby_word_string=''.join(string_text_list)
except IndexError:
pass
counter+=1
counter=0
function_list2=[]
for extract in range(0, len(functions_list)):
function=baby_word_string[functions_list[counter][0]:functions_list[counter][1]]
function_list2.append(function)
counter+=1
counter=0
main_counter=0
local_variables_list=[]
used_local_variables=[]
for remove in range(0, len(function_list)):
counter=0
function_text=function_list2[main_counter]
the_new_list=function_text.splitlines()
the_list=function_list[main_counter].splitlines()
variable_list=[]
real_word_list=[]
for x in range(0, len(the_new_list)):
line=the_new_list[counter]
if line.count('=')==1 and line.split('=')[0].count('+')==0 and line.split('=')[0].count('-')==0 and line.split('=')[0].count('*')==0 and line.split('=')[0].count('/')==0 and line.split('=')[0].count('%')==0 and line.split('=')[0].count('<')==0 and line.split('=')[0].count('>')==0 and line.split('=')[0].count('!')==0 and line.split('=')[0].count('|')==0 and line.split('=')[0].count('~')==0 and line.split('=')[0].count('&')==0 and line.split('=')[0].count('^')==0:
new_line=line.strip()
variable=new_line.split('=')[0].strip()
if variable not in variable_list:
variable_list.append(variable)
counter+=1
new_variable_list=variable_list.copy()
string_of_variables=' '.join(variable_list)
local_variables_list.append(string_of_variables)
local_variables_list=' '.join(local_variables_list)
local_variables_list=local_variables_list.split(' ')
counter=0
baby_word_string=function_text.replace('(', ' ').replace(')', ' ').replace('+', ' ').replace('-', ' ').replace('*', ' ').replace('/', ' ').replace('=', ' ').replace('.', ' ').replace(':', ' ').replace(',', ' ').replace('[', ' ').replace(']', ' ').replace('{', ' ').replace('}', ' ').replace('<', ' ').replace('>', ' ').replace('%', ' ').replace('!', ' ').replace(';', ' ').replace('~', ' ').replace('|', ' ').replace('&', ' ').replace('^', ' ')
word_list=baby_word_string.splitlines()
for c in range(0, len(word_list)):
string_list=word_list[counter].split(' ')
subcounter=0
for b in range(0, len(string_list)):
real_word_list.append(string_list[subcounter])
subcounter+=1
counter+=1
counter=0
#As of now, we have a list of variables and a list of words. That is what the previous code is attempting to create.
list_counter1=0
list_counter2=0
for y in range(0, len(variable_list)):
list_counter2=0
counted=0
for a in range(0, len(real_word_list)):
var=variable_list[list_counter1]
word=real_word_list[list_counter2]
if var==word:
counted+=1
list_counter2+=1
if counted==1:
remove_counter=0
for z in range(0, len(the_list)):
line=the_list[remove_counter].split('=')[0].strip()
if var==line:
string_check=the_list[remove_counter].replace('\\\'', ' ').replace('\\\"', ' ')
string_check_list=[]
if string_check.count("'''")==1 or string_check.count('"""')==1:
finding=string_check.find("'''")
finding2=string_check.find('"""')
if finding!=-1:
string_check_list.append(finding)
if finding2!=-1:
string_check_list.append(finding2)
minimum=min(string_check_list)
minimum_type=string_check[minimum:minimum+3]
if string_check.count(minimum_type)==1:
the_list[remove_counter]=' '*len(the_list[remove_counter])
subcounter=remove_counter+1
for x in range(0, len(the_list)-remove_counter-1):
line=the_list[subcounter].replace('\\\'', ' ').replace('\\\"', ' ')
if line.count(minimum_type)==0 or line.count(minimum_type)==1:
the_list[subcounter]=' '*len(the_list[subcounter])
if line.count(minimum_type)==1:
break
subcounter+=1
else:
the_list[remove_counter]=' '*len(the_list[remove_counter])
else:
the_list[remove_counter]=' '*len(the_list[remove_counter])
try:
function_list[main_counter]='\n'.join(the_list)
except IndexError:
pass
new_variable_list.remove(var)
remove_counter+=1
list_counter1+=1
used_local_variables.append(new_variable_list)
main_counter+=1
main_counter=0
counter=0
new_text=text.replace(text[functions_list[counter][0]:functions_list[counter][1]], function_list[counter], 1)
counter=1
for edit_original in range(0, len(functions_list)-1):
new_text=new_text.replace(new_text[functions_list[counter][0]:functions_list[counter][1]], function_list[counter], 1)
counter+=1
open(file_path, 'w').write(new_text)
local_variables_string='\n'.join(local_variables_list)
open(file_path.replace('.py', '_all_local_variables.txt'), 'w').write(local_variables_string)
counter=0
used_string=''
for used in range(0, len(used_local_variables)):
used_string=used_string+'\n'.join(used_local_variables[counter])+'\n'
counter+=1
open(file_path.replace('.py', '_used_local_variables.txt'), 'w').write(used_string)
log_history=1
def remove_variables(file_path):
'Remove unused variables.'
global history_list
global log_history
if log_history==1:
history_list.append('%s: remove_variables(\"%s\")' % (str(datetime.datetime.now()), file_path))
log_history=0
file=open(file_path)
comment_list=find_comments(file_path)
strings_list=find_strings(file_path)
text=file.read()
the_list=text.splitlines()
variable_list=[]
word_list=[]
real_word_list=[]
variable=None
baby_word_string=open(file_path).read().replace('\\\'', ' ').replace('\\\"', ' ')
counter=0
for m in range(0, len(strings_list)):
try:
par1=baby_word_string.find(strings_list[counter][0])
baby_word_string=baby_word_string.replace(strings_list[counter][0], ' '*len(strings_list[counter][0]), 1)
par2=baby_word_string.find(strings_list[counter][1])+len(strings_list[counter][1])-1
baby_word_string=baby_word_string.replace(strings_list[counter][1], ' '*len(strings_list[counter][1]), 1)
string_text_list=list(baby_word_string)
subcounter=par1
for x in range(0, par2-par1):
if string_text_list[subcounter]!='\n':
string_text_list[subcounter]=' '
subcounter+=1
baby_word_string=''.join(string_text_list)
except IndexError:
pass
counter+=1
counter=0
for m in range(0, len(comment_list)):
try:
par1=comment_list[counter][0]
par2=comment_list[counter][1]
string_text_list=list(baby_word_string)
subcounter=par1
for x in range(0, par2-par1):
string_text_list[subcounter]=' '
subcounter+=1
baby_word_string=''.join(string_text_list)
except IndexError:
pass
counter+=1
counter=0
the_new_list=baby_word_string.splitlines()
for x in range(0, len(the_new_list)):
line=the_new_list[counter]
if line.count('=')==1 and line.split('=')[0].count('+')==0 and line.split('=')[0].count('-')==0 and line.split('=')[0].count('*')==0 and line.split('=')[0].count('/')==0 and line.split('=')[0].count('%')==0 and line.split('=')[0].count('<')==0 and line.split('=')[0].count('>')==0 and line.split('=')[0].count('!')==0 and line.split('=')[0].count('|')==0 and line.split('=')[0].count('~')==0 and line.split('=')[0].count('&')==0 and line.split('=')[0].count('^')==0:
new_line=line.strip()
variable=new_line.split('=')[0].strip()
if variable not in variable_list:
variable_list.append(variable)
counter+=1
new_variable_list=variable_list.copy()
string_of_variables='\n'.join(variable_list)
open('%s_all_variables.txt' % file_path.replace('.py', ''), 'w').write(string_of_variables)
counter=0
baby_word_string=baby_word_string.replace('(', ' ').replace(')', ' ').replace('+', ' ').replace('-', ' ').replace('*', ' ').replace('/', ' ').replace('=', ' ').replace('.', ' ').replace(':', ' ').replace(',', ' ').replace('[', ' ').replace(']', ' ').replace('{', ' ').replace('}', ' ').replace('<', ' ').replace('>', ' ').replace('%', ' ').replace('!', ' ').replace(';', ' ').replace('~', ' ').replace('|', ' ').replace('&', ' ').replace('^', ' ')
word_list=baby_word_string.splitlines()
for c in range(0, len(word_list)):
string_list=word_list[counter].split(' ')
subcounter=0
for b in range(0, len(string_list)):
real_word_list.append(string_list[subcounter])
subcounter+=1
counter+=1
counter=0
#As of now, we have a list of variables and a list of words. That is what the previous code is attempting to create.
list_counter1=0
list_counter2=0
for y in range(0, len(variable_list)):
list_counter2=0
counted=0
for a in range(0, len(real_word_list)):
var=variable_list[list_counter1]
word=real_word_list[list_counter2]
if var==word:
counted+=1
list_counter2+=1
if counted==1:
remove_counter=0
for z in range(0, len(the_list)):
line=the_list[remove_counter].split('=')[0].strip()
if var==line:
string_check=the_list[remove_counter].replace('\\\'', ' ').replace('\\\"', ' ')
string_check_list=[]
if string_check.count("'''")==1 or string_check.count('"""')==1:
finding=string_check.find("'''")
finding2=string_check.find('"""')
if finding!=-1:
string_check_list.append(finding)
if finding2!=-1:
string_check_list.append(finding2)
minimum=min(string_check_list)
minimum_type=string_check[minimum:minimum+3]
if string_check.count(minimum_type)==1:
the_list[remove_counter]=' '*len(the_list[remove_counter])
subcounter=remove_counter+1
for x in range(0, len(the_list)-remove_counter-1):
line=the_list[subcounter].replace('\\\'', ' ').replace('\\\"', ' ')
if line.count(minimum_type)==0 or line.count(minimum_type)==1:
the_list[subcounter]=' '*len(the_list[subcounter])
if line.count(minimum_type)==1:
break
subcounter+=1
else:
the_list[remove_counter]=' '*len(the_list[remove_counter])
else:
the_list[remove_counter]=' '*len(the_list[remove_counter])
new_variable_list.remove(var)
remove_counter+=1
list_counter1+=1
string='\n'.join(the_list)
new_file=open(file_path, 'w')
new_file.write(string)
string_of_variables='\n'.join(new_variable_list)
open('%s_used_variables.txt' % file_path.replace('.py', ''), 'w').write(string_of_variables)
log_history=1
"""def remove_functions(file_path):
'Remove unused functions.'
global history_list
global log_history
if log_history==1:
history_list.append('%s: remove_functions(\"%s\")' % (str(datetime.datetime.now()), file_path))
log_history=0
comment_list=find_comments(file_path)
strings_list=find_strings(file_path)
function_list=find_functions(file_path)
text=open(file_path).read()
the_list=open(file_path).read().splitlines()
word_list=[]
real_word_list=[]
new_list=the_list.copy()
baby_word_string='\n'.join(new_list)
counter=0
for m in range(0, len(strings_list)):
try:
par1=baby_word_string.find(baby_word_string[strings_list[counter][0]])
baby_word_string=baby_word_string.replace(baby_word_string[par1], ' ', 1)
par2=baby_word_string.find(baby_word_string[strings_list[counter][1]])
baby_word_string=baby_word_string.replace(baby_word_string[par2], ' ', 1)
baby_word_string=baby_word_string.replace(baby_word_string[par1:par2+1], ' '*len(baby_word_string[par1:par2+1]), 1)
except IndexError:
pass
counter+=1
counter=0
for m in range(0, len(comment_list)):
try:
par1=baby_word_string.find(baby_word_string[comment_list[counter][0]])
par2=baby_word_string.find(baby_word_string[comment_list[counter][1]])
baby_word_string=baby_word_string.replace(baby_word_string[par1:par2+1], ' '*len(baby_word_string[par1:par2+1]), 1)
except IndexError:
pass
counter+=1
counter=0
function_names=[]
print(baby_word_string)
for name_functions in range(0, len(function_list)):
try:
function_text=text[function_list[counter][0]:function_list[counter][1]]
function_text_list=function_text.splitlines()
name=function_text_list[0].split('def ')[1].split('(')[0].strip()
function_names.append(name)
except IndexError:
pass
counter+=1
counter=0
print(function_names)
function_names_copy=function_names.copy()
baby_word_string=baby_word_string.replace(' ', '').replace('(', ' ').replace(')', ' ').replace('+', ' ').replace('-', ' ').replace('*', ' ').replace('/', ' ').replace('=', ' ').replace('.', ' ').replace(':', ' ').replace(',', ' ').replace('[', ' ').replace(']', ' ').replace('{', ' ').replace('}', ' ')
word_list=baby_word_string.splitlines()
for c in range(0, len(word_list)):
string_list=word_list[counter].split(' ')
subcounter=0
for b in range(0, len(string_list)):
real_word_list.append(string_list[subcounter])
subcounter+=1
counter+=1
counter=0
list_counter1=0
list_counter2=0
print(real_word_list)
for y in range(0, len(function_names)):
list_counter2=0
counted=0
for a in range(0, len(real_word_list)):
func=function_names[list_counter1]
word=real_word_list[list_counter2]
if func==word:
counted+=1
list_counter2+=1
if counted==1:
text=text.replace(text[function_list[list_counter1][0]:function_list[list_counter1][1]], ' '*len(text[function_list[list_counter1][0]:function_list[list_counter1][1]]), 1)
function_names_copy.remove(func)
list_counter1+=1
string=text
open(file_path, 'w').write(string)
log_history=1
print(function_names_copy)"""
def destroy(file_path):
'Delete the file completely.'
global history_list
global log_history
if log_history==1:
history_list.append('%s: destroy(\"%s\")' % (str(datetime.datetime.now()), file_path))
os.remove(file_path)
def install_system_requirements(file_path):
'Install the python modules needed; uses pip. Put a comment like this: #PyBroom.install_system_requirements: [package1, package2, package3 etc]. No string quotes anywhere. I only added this because I thought it would be simple and fun to make, and besides, some IDEs do not automatically install packages like replit.'
global history_list
global log_history
if log_history==1:
history_list.append('%s: install_system_requirements(\"%s\")' % (str(datetime.datetime.now()), file_path))
text_list=open(file_path).read().splitlines()
counter=0
check_for_line=0
for x in range(0, len(text_list)):
line=text_list[counter]
if '#PyBroom.install_system_requirements: [' in line:
check_for_line=1
break
counter+=1
if check_for_line==1:
list_part=line.strip().split(' ', 1)[1].split(']')[0]+']'
counter=0
list_part=list_part.replace('[', '').replace(']', '').replace(' ', '').split(',')
for y in range(0, len(list_part)):
subprocess.check_call([sys.executable, '-m', 'pip', 'install', list_part[counter]])
counter+=1
"""def remove_classes(file_path):
'Removes unused classes.'
pass"""
"""def remove_modules(file_path):
pass #might do this..."""
def suggestions(file_path):
'Give suggestions.'
global history_list
global log_history
if log_history==1:
history_list.append('%s: suggestions(\"%s\")' % (str(datetime.datetime.now()), file_path))
log_history=0
text=open(file_path).read()
comment_list=find_comments(file_path)
strings_list=find_strings(file_path)
function_list=find_functions(file_path)
class_list=find_classes(file_path)
baby_word_string=open(file_path).read().replace('\\\'', ' ').replace('\\\"', ' ')
counter=0
for m in range(0, len(strings_list)):
try:
par1=baby_word_string.find(strings_list[counter][0])
baby_word_string=baby_word_string.replace(strings_list[counter][0], ' '*len(strings_list[counter][0]), 1)
par2=baby_word_string.find(strings_list[counter][1])+len(strings_list[counter][1])-1
baby_word_string=baby_word_string.replace(strings_list[counter][1], ' '*len(strings_list[counter][1]), 1)
string_text_list=list(baby_word_string)
subcounter=par1
for x in range(0, par2-par1):
if string_text_list[subcounter]!='\n':
string_text_list[subcounter]=' '
subcounter+=1
baby_word_string=''.join(string_text_list)
except IndexError:
pass
counter+=1
counter=0
for m in range(0, len(comment_list)):
try:
par1=comment_list[counter][0]
par2=comment_list[counter][1]
string_text_list=list(baby_word_string)
subcounter=par1
for x in range(0, par2-par1):
string_text_list[subcounter]=' '
subcounter+=1
baby_word_string=''.join(string_text_list)
except IndexError:
pass
counter+=1
counter=0
the_new_list=baby_word_string.splitlines()
complex_list=[]
complex_list_text=open(file_path).read().splitlines()
for x in range(0, len(complex_list_text)):
line=complex_list_text[counter]
if line.count('(((')>=1 or line.count(' ')>=5 or len(line)>=200:
complex_list.append(counter+1)
counter+=1
counter=0
variable_list=[]
real_word_list=[]
function_names=[]
class_names=[]
for x in range(0, len(the_new_list)):
line=the_new_list[counter]
if line.count('=')==1 and line.split('=')[0].count('+')==0 and line.split('=')[0].count('-')==0 and line.split('=')[0].count('*')==0 and line.split('=')[0].count('/')==0 and line.split('=')[0].count('%')==0 and line.split('=')[0].count('<')==0 and line.split('=')[0].count('>')==0 and line.split('=')[0].count('!')==0 and line.split('=')[0].count('|')==0 and line.split('=')[0].count('~')==0 and line.split('=')[0].count('&')==0 and line.split('=')[0].count('^')==0:
new_line=line.strip()
variable=new_line.split('=')[0].strip()
if variable not in variable_list:
variable_list.append(variable)
counter+=1
counter=0
baby_word_string=baby_word_string.replace('(', ' ').replace(')', ' ').replace('+', ' ').replace('-', ' ').replace('*', ' ').replace('/', ' ').replace('=', ' ').replace('.', ' ').replace(':', ' ').replace(',', ' ').replace('[', ' ').replace(']', ' ').replace('{', ' ').replace('}', ' ').replace('<', ' ').replace('>', ' ').replace('%', ' ').replace('!', ' ').replace(';', ' ').replace('~', ' ').replace('|', ' ').replace('&', ' ').replace('^', ' ')
word_list=baby_word_string.splitlines()
for c in range(0, len(word_list)):
string_list=word_list[counter].split(' ')
subcounter=0
for b in range(0, len(string_list)):
real_word_list.append(string_list[subcounter])
subcounter+=1
counter+=1
counter=0
for name_functions in range(0, len(function_list)):
try:
function_text=text[function_list[counter][0]:function_list[counter][1]]
function_text_list=function_text.splitlines()
name=function_text_list[0].split('def ')[1].split('(')[0].strip()
function_names.append(name)
except IndexError:
pass
counter+=1
counter=0
for name_classes in range(0, len(class_list)):
try:
class_text=text[class_list[counter][0]:class_list[counter][1]]
class_text_list=class_text.splitlines()
name=class_text_list[0].split('class ')[1].split(':')[0].split('(')[0].strip()
class_names.append(name)
except IndexError:
pass
counter+=1
counter=0
vars_not_word=[]
funcs_not_word=[]
classes_not_word=[]
for x in range(0, len(variable_list)):
var_words=variable_list[counter].replace('_', ' ').replace(',', ' ').replace('[', ' ').replace(']', ' ').replace('(', ' ').replace(')', ' ').replace('1', ' ').replace('2', ' ').replace('3', ' ').replace('4', ' ').replace('5', ' ').replace('6', ' ').replace('7', ' ').replace('8', ' ').replace('9', ' ').replace('0', ' ').split(' ')
subcounter=0
for y in range(0, len(var_words)):
var=var_words[subcounter]
if var not in english_list:
vars_not_word.append(variable_list[counter])
break
subcounter+=1
counter+=1
counter=0
for x in range(0, len(function_names)):
func_words=function_names[counter].replace('_', ' ').replace(',', ' ').replace('[', ' ').replace(']', ' ').replace('(', ' ').replace(')', ' ').replace('1', ' ').replace('2', ' ').replace('3', ' ').replace('4', ' ').replace('5', ' ').replace('6', ' ').replace('7', ' ').replace('8', ' ').replace('9', ' ').replace('0', ' ').split(' ')
subcounter=0
for y in range(0, len(func_words)):
func=func_words[subcounter]
if func not in english_list:
funcs_not_word.append(function_names[counter])
break
subcounter+=1
counter+=1
counter=0
for x in range(0, len(class_names)):
class_words=class_names[counter].replace('_', ' ').replace(',', ' ').replace('[', ' ').replace(']', ' ').replace('(', ' ').replace(')', ' ').replace('1', ' ').replace('2', ' ').replace('3', ' ').replace('4', ' ').replace('5', ' ').replace('6', ' ').replace('7', ' ').replace('8', ' ').replace('9', ' ').replace('0', ' ').split(' ')
subcounter=0
for y in range(0, len(class_words)):
_class=class_words[subcounter]
if _class not in english_list:
classes_not_word.append(class_names[counter])
break
subcounter+=1
counter+=1
counter=0
similar_list_var=[]
similar_list_func=[]
similar_list_class=[]
for similarities_var in range(0, len(variable_list)):
var=variable_list[counter]
#Start with variable you want to compare
subcounter=0
for z in range(0, len(variable_list)):
var2=variable_list[subcounter]
common_letters=list(set(var) & set(var2))
if len(common_letters)>=max([len(set(var)), len(set(var2))])*0.9 and var!=var2 and [var, var2] not in similar_list_var and [var2, var] not in similar_list_var:
similar_list_var.append([var, var2])
subcounter+=1
counter+=1
counter=0
for similarities_var in range(0, len(function_names)):
func=function_names[counter]
#Start with variable you want to compare
subcounter=0
for z in range(0, len(function_names)):
func2=function_names[subcounter]
common_letters=list(set(func) & set(func2))
if len(common_letters)>=max([len(set(func)), len(set(func2))])*0.9 and func!=func2 and [func, func2] not in similar_list_func and [func2, func] not in similar_list_func:
similar_list_func.append([func, func2])
subcounter+=1
counter+=1
counter=0
for similarities_var in range(0, len(class_names)):
_class=class_names[counter]
#Start with variable you want to compare
subcounter=0
for z in range(0, len(class_names)):
class2=class_names[subcounter]
common_letters=list(set(_class) & set(class2))
if len(common_letters)>=max([len(set(_class)), len(set(class2))])*0.9 and _class!=class2 and [_class, class2] not in similar_list_class and [class2, _class] not in similar_list_class:
similar_list_class.append([_class, class2])
subcounter+=1
counter+=1
counter=0
suggestions_file=open(file_path.replace('.py', '_suggestions.txt'), 'w')
write_string='This is a file of variables, functions, and classes that are not word according to https://pypi.org/project/english-words/. It also contains variable, function, and class names that have been identified as similar. Lastly, this file contains lines that have been identified as hard to understand, that may require a comment stating its purpose.\n\n'+'Names that are not in the english-words list:\n\n'+'Variables: %s\n\n' % str(vars_not_word)+'Functions: %s\n\n' % str(funcs_not_word)+'Classes: %s\n\n' % str(classes_not_word)+'Similar names:\n\n'+'Variables: %s\n\n' % str(similar_list_var)+'Functions: %s\n\n' % str(similar_list_func)+'Classes: %s\n\n' % str(similar_list_class)+'Complex lines:\n\n%s' % str(complex_list)
suggestions_file.write(write_string)
log_history=1
def beautify(file_path):
global history_list
global log_history
if log_history==1:
history_list.append('%s: beautify(\"%s\")' % (str(datetime.datetime.now()), file_path))
log_history=0
strings_list=find_strings(file_path)
comment_list=find_comments(file_path)
baby_word_string=open(file_path).read().replace('\\\'', ' ').replace('\\\"', ' ')
file=open(file_path).read()
text_list=file.splitlines()
counter=0
for m in range(0, len(strings_list)):
try:
par1=baby_word_string.find(strings_list[counter][0])
baby_word_string=baby_word_string.replace(strings_list[counter][0], 'p'*len(strings_list[counter][0]), 1)
par2=baby_word_string.find(strings_list[counter][1])+len(strings_list[counter][1])-1
baby_word_string=baby_word_string.replace(strings_list[counter][1], 'p'*len(strings_list[counter][1]), 1)
string_text_list=list(baby_word_string)
subcounter=par1
for x in range(0, par2-par1):
if string_text_list[subcounter]!='\n':
string_text_list[subcounter]='p'
subcounter+=1
baby_word_string=''.join(string_text_list)
except IndexError:
pass
counter+=1
counter=0
for m in range(0, len(comment_list)):
try:
par1=comment_list[counter][0]
par2=comment_list[counter][1]
string_text_list=list(baby_word_string)
subcounter=par1
for x in range(0, par2-par1):
string_text_list[subcounter]='p'
subcounter+=1
baby_word_string=''.join(string_text_list)
except IndexError:
pass
counter+=1
counter=0
the_new_list=baby_word_string.splitlines()
for x in range(0, len(text_list)):
try:
line=text_list[counter]
except IndexError:
pass
try:
line2=the_new_list[counter]
except IndexError:
pass
while line2.count('>>=')>0:
try:
if (line2.split('>>=', 1)[0].rstrip()==line2.split('>>=', 1)[0] and line2.split('>>=', 1)[1].lstrip()==line2.split('>>=', 1)[1]) and line2.find('>>=')!=-1:
index=line2.find('>>=')
line_list=list(line)
line_list[index]=' >'
line_list[index+1]='>'
line_list[index+2]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('>>=', ' ', 1)
else:
line2=line2.replace('>>=', ' ', 1)
except IndexError:
pass
while line2.count('<<=')>0:
try:
if (line2.split('<<=', 1)[0].rstrip()==line2.split('<<=', 1)[0] and line2.split('<<=', 1)[1].lstrip()==line2.split('<<=', 1)[1]) and line2.find('<<=')!=-1:
index=line2.find('<<=')
line_list=list(line)
line_list[index]=' <'
line_list[index+1]='<'
line_list[index+2]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('<<=', ' ', 1)
else:
line2=line2.replace('<<=', ' ', 1)
except IndexError:
pass
while line2.count('**=')>0:
try:
if (line2.split('**=', 1)[0].rstrip()==line2.split('**=', 1)[0] and line2.split('**=', 1)[1].lstrip()==line2.split('**=', 1)[1]) and line2.find('**=')!=-1:
index=line2.find('**=')
line_list=list(line)
line_list[index]=' *'
line_list[index+1]='*'
line_list[index+2]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('**=', ' ', 1)
else:
line2=line2.replace('**=', ' ', 1)
except IndexError:
pass
while line2.count('//=')>0:
try:
if (line2.split('//=', 1)[0].rstrip()==line2.split('//=', 1)[0] and line2.split('//=', 1)[1].lstrip()==line2.split('//=', 1)[1]) and line2.find('//=')!=-1:
index=line2.find('//=')
line_list=list(line)
line_list[index]=' /'
line_list[index+1]='/'
line_list[index+2]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('//=', ' ', 1)
else:
line2=line2.replace('//=', ' ', 1)
except IndexError:
pass
while line2.count('>>')>0:
try:
if (line2.split('>>', 1)[0].rstrip()==line2.split('>>', 1)[0] and line2.split('>>', 1)[1].lstrip()==line2.split('>>', 1)[1]) and line2.find('>>')!=-1:
index=line2.find('>>')
line_list=list(line)
line_list[index]=' >'
line_list[index+1]='> '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('>>', ' ', 1)
else:
line2=line2.replace('>>', ' ', 1)
except IndexError:
pass
while line2.count('<<')>0:
try:
if (line2.split('<<', 1)[0].rstrip()==line2.split('<<', 1)[0] and line2.split('<<', 1)[1].lstrip()==line2.split('<<', 1)[1]) and line2.find('<<')!=-1:
index=line2.find('<<')
line_list=list(line)
line_list[index]=' <'
line_list[index+1]='< '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('<<', ' ', 1)
else:
line2=line2.replace('<<', ' ', 1)
except IndexError:
pass
while line2.count('==')>0:
try:
if (line2.split('==', 1)[0].rstrip()==line2.split('==', 1)[0] and line2.split('==', 1)[1].lstrip()==line2.split('==', 1)[1]) and line2.find('==')!=-1:
index=line2.find('==')
line_list=list(line)
line_list[index]=' ='
line_list[index+1]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('==', ' ', 1)
else:
line2=line2.replace('==', ' ', 1)
except IndexError:
pass
while line2.count('+=')>0:
try:
if (line2.split('+=', 1)[0].rstrip()==line2.split('+=', 1)[0] and line2.split('+=', 1)[1].lstrip()==line2.split('+=', 1)[1]) and line2.find('+=')!=-1:
index=line2.find('+=')
line_list=list(line)
line_list[index]=' +'
line_list[index+1]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('+=', ' ', 1)
else:
line2=line2.replace('+=', ' ', 1)
except IndexError:
pass
while line2.count('-=')>0:
try:
if (line2.split('-=', 1)[0].rstrip()==line2.split('-=', 1)[0] and line2.split('-=', 1)[1].lstrip()==line2.split('-=', 1)[1]) and line2.find('-=')!=-1:
index=line2.find('-=')
line_list=list(line)
line_list[index]=' -'
line_list[index+1]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('-=', ' ', 1)
else:
line2=line2.replace('-=', ' ', 1)
except IndexError:
pass
while line2.count('*=')>0:
try:
if (line2.split('*=', 1)[0].rstrip()==line2.split('*=', 1)[0] and line2.split('*=', 1)[1].lstrip()==line2.split('*=', 1)[1]) and line2.find('*=')!=-1:
index=line2.find('*=')
line_list=list(line)
line_list[index]=' *'
line_list[index+1]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('*=', ' ', 1)
else:
line2=line2.replace('*=', ' ', 1)
except IndexError:
pass
while line2.count('/=')>0:
try:
if (line2.split('/=', 1)[0].rstrip()==line2.split('/=', 1)[0] and line2.split('/=', 1)[1].lstrip()==line2.split('/=', 1)[1]) and line2.find('/=')!=-1:
index=line2.find('/=')
line_list=list(line)
line_list[index]=' /'
line_list[index+1]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('/=', ' ', 1)
else:
line2=line2.replace('/=', ' ', 1)
except IndexError:
pass
while line2.count('!=')>0:
try:
if (line2.split('!=', 1)[0].rstrip()==line2.split('!=', 1)[0] and line2.split('!=', 1)[1].lstrip()==line2.split('!=', 1)[1]) and line2.find('!=')!=-1:
index=line2.find('!=')
line_list=list(line)
line_list[index]=' !'
line_list[index+1]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('!=', ' ', 1)
else:
line2=line2.replace('!=', ' ', 1)
except IndexError:
pass
while line2.count('>=')>0:
try:
if (line2.split('>=', 1)[0].rstrip()==line2.split('>=', 1)[0] and line2.split('>=', 1)[1].lstrip()==line2.split('>=', 1)[1]) and line2.find('>=')!=-1:
index=line2.find('>=')
line_list=list(line)
line_list[index]=' >'
line_list[index+1]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('>=', ' ', 1)
else:
line2=line2.replace('>=', ' ', 1)
except IndexError:
pass
while line2.count('<=')>0:
try:
if (line2.split('<=', 1)[0].rstrip()==line2.split('<=', 1)[0] and line2.split('<=', 1)[1].lstrip()==line2.split('<=', 1)[1]) and line2.find('<=')!=-1:
index=line2.find('<=')
line_list=list(line)
line_list[index]=' <'
line_list[index+1]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('<=', ' ', 1)
else:
line2=line2.replace('<=', ' ', 1)
except IndexError:
pass
while line2.count('%=')>0:
try:
if (line2.split('%=', 1)[0].rstrip()==line2.split('%=', 1)[0] and line2.split('%=', 1)[1].lstrip()==line2.split('%=', 1)[1]) and line2.find('%=')!=-1:
index=line2.find('%=')
line_list=list(line)
line_list[index]=' %'
line_list[index+1]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('%=', ' ', 1)
else:
line2=line2.replace('%=', ' ', 1)
except IndexError:
pass
while line2.count('**')>0:
try:
if (line2.split('**', 1)[0].rstrip()==line2.split('**', 1)[0] and line2.split('**', 1)[1].lstrip()==line2.split('**', 1)[1]) and line2.find('**')!=-1:
index=line2.find('**')
line_list=list(line)
line_list[index]=' *'
line_list[index+1]='* '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('**', ' ', 1)
else:
line2=line2.replace('**', ' ', 1)
except IndexError:
pass
while line2.count('//')>0:
try:
if (line2.split('//', 1)[0].rstrip()==line2.split('//', 1)[0] and line2.split('//', 1)[1].lstrip()==line2.split('//', 1)[1]) and line2.find('//')!=-1:
index=line2.find('//')
line_list=list(line)
line_list[index]=' /'
line_list[index+1]='/ '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('//', ' ', 1)
except IndexError:
pass
while line2.count('&=')>0:
try:
if (line2.split('&=', 1)[0].rstrip()==line2.split('&=', 1)[0] and line2.split('&=', 1)[1].lstrip()==line2.split('&=', 1)[1]) and line2.find('&=')!=-1:
index=line2.find('&=')
line_list=list(line)
line_list[index]=' &'
line_list[index+1]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('&=', ' ', 1)
else:
line2=line2.replace('&=', ' ', 1)
except IndexError:
pass
while line2.count('|=')>0:
try:
if (line2.split('|=', 1)[0].rstrip()==line2.split('|=', 1)[0] and line2.split('|=', 1)[1].lstrip()==line2.split('|=', 1)[1]) and line2.find('|=')!=-1:
index=line2.find('|=')
line_list=list(line)
line_list[index]=' |'
line_list[index+1]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('|=', ' ', 1)
else:
line2=line2.replace('|=', ' ', 1)
except IndexError:
pass
while line2.count('^=')>0:
try:
if (line2.split('^=', 1)[0].rstrip()==line2.split('^=', 1)[0] and line2.split('^=', 1)[1].lstrip()==line2.split('^=', 1)[1]) and line2.find('^=')!=-1:
index=line2.find('^=')
line_list=list(line)
line_list[index]=' ^'
line_list[index+1]='= '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('^=', ' ', 1)
else:
line2=line2.replace('^=', ' ', 1)
except IndexError:
pass
while line2.count('=')>0:
try:
if (line2.split('=', 1)[0].rstrip()==line2.split('=', 1)[0] or line2.split('=', 1)[1].lstrip()==line2.split('=', 1)[1]) and line2.find('=')!=-1:
index=line2.find('=')
line_list=list(line)
line_list[index]=' = '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('=', ' ', 1)
else:
line2=line2.replace('=', ' ', 1)
except IndexError:
pass
while line2.count('+')>0:
try:
if (line2.split('+', 1)[0].rstrip()==line2.split('+', 1)[0] and line2.split('+', 1)[1].lstrip()==line2.split('+', 1)[1]) and line2.find('+')!=-1:
index=line2.find('+')
line_list=list(line)
line_list[index]=' + '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('+', ' ', 1)
else:
line2=line2.replace('+', ' ', 1)
except IndexError:
pass
while line2.count('*')>0:
try:
if (line2.split('*', 1)[0].rstrip()==line2.split('*', 1)[0] and line2.split('*', 1)[1].lstrip()==line2.split('*', 1)[1]) and line2.find('*')!=-1:
index=line2.find('*')
line_list=list(line)
line_list[index]=' * '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('*', ' ', 1)
else:
line2=line2.replace('*', ' ', 1)
except IndexError:
pass
while line2.count('/')>0:
try:
if (line2.split('/', 1)[0].rstrip()==line2.split('/', 1)[0] and line2.split('/', 1)[1].lstrip()==line2.split('/', 1)[1]) and line2.find('/')!=-1:
index=line2.find('/')
line_list=list(line)
line_list[index]=' / '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('/', ' ', 1)
else:
line2=line2.replace('/', ' ', 1)
except IndexError:
pass
while line2.count('<')>0:
try:
if (line2.split('<', 1)[0].rstrip()==line2.split('<', 1)[0] and line2.split('<', 1)[1].lstrip()==line2.split('<', 1)[1]) and line2.find('<')!=-1:
index=line2.find('<')
line_list=list(line)
line_list[index]=' < '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('<', ' ', 1)
else:
line2=line2.replace('<', ' ', 1)
except IndexError:
pass
while line2.count('>')>0:
try:
if (line2.split('>', 1)[0].rstrip()==line2.split('>', 1)[0] and line2.split('>', 1)[1].lstrip()==line2.split('>', 1)[1]) and line2.find('>')!=-1:
index=line2.find('>')
line_list=list(line)
line_list[index]=' > '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('>', ' ', 1)
else:
line2=line2.replace('>', ' ', 1)
except IndexError:
pass
while line2.count('%')>0:
try:
if (line2.split('%', 1)[0].rstrip()==line2.split('%', 1)[0] and line2.split('%', 1)[1].lstrip()==line2.split('%', 1)[1]) and line2.find('%')!=-1:
index=line2.find('%')
line_list=list(line)
line_list[index]=' % '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('%', ' ', 1)
else:
line2=line2.replace('%', ' ', 1)
except IndexError:
pass
while line2.count(',')>0:
try:
if (line2.split(',', 1)[0].rstrip()==line2.split(',', 1)[0] and line2.split(',', 1)[1].lstrip()==line2.split(',', 1)[1]) and line2.find(',')!=-1:
index=line2.find(',')
line_list=list(line)
line_list[index]=', '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace(',', ' ', 1)
else:
line2=line2.replace(',', ' ', 1)
except IndexError:
pass
while line2.count('&')>0:
try:
if (line2.split('&', 1)[0].rstrip()==line2.split('&', 1)[0] and line2.split('&', 1)[1].lstrip()==line2.split('&', 1)[1]) and line2.find('&')!=-1:
index=line2.find('&')
line_list=list(line)
line_list[index]=' & '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('&', ' ', 1)
else:
line2=line2.replace('&', ' ', 1)
except IndexError:
pass
while line2.count('|')>0:
try:
if (line2.split('|', 1)[0].rstrip()==line2.split('|', 1)[0] and line2.split('|', 1)[1].lstrip()==line2.split('|', 1)[1]) and line2.find('|')!=-1:
index=line2.find('|')
line_list=list(line)
line_list[index]=' | '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('|', ' ', 1)
else:
line2=line2.replace('|', ' ', 1)
except IndexError:
pass
while line2.count('^')>0:
try:
if (line2.split('^', 1)[0].rstrip()==line2.split('^', 1)[0] and line2.split('^', 1)[1].lstrip()==line2.split('^', 1)[1]) and line2.find('^')!=-1:
index=line2.find('^')
line_list=list(line)
line_list[index]=' ^ '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('^', ' ', 1)
else:
line2=line2.replace('^', ' ', 1)
except IndexError:
pass
while line2.count('~')>0:
try:
if (line2.split('~', 1)[0].rstrip()==line2.split('~', 1)[0] and line2.split('~', 1)[1].lstrip()==line2.split('~', 1)[1]) and line2.find('~')!=-1:
index=line2.find('~')
line_list=list(line)
line_list[index]=' ~ '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('~', ' ', 1)
else:
line2=line2.replace('~', ' ', 1)
except IndexError:
pass
while line2.count('-')>0:
try:
if (line2.split('-', 1)[0].rstrip()==line2.split('-', 1)[0] and line2.split('-', 1)[1].lstrip()==line2.split('-', 1)[1]) and line2.find('-')!=-1 and line[line2.find('-')-1] not in ['(', '[', '{']:
index=line2.find('-')
line_list=list(line)
line_list[index]=' - '
line=''.join(line_list)
text_list[counter]=line
line2=line2.replace('-', ' ', 1)
else:
line2=line2.replace('-', ' ', 1)
except IndexError:
pass
counter+=1
string='\n'.join(text_list)
open(file_path, 'w').write(string)
log_history=1
def history():
'Return a list of the commands you did.'
global history_list
global log_history
if log_history==1:
history_list.append('%s: history()' % str(datetime.datetime.now()))
return history_list
def save_history(file_path):
global history_list
global log_history
if log_history==1:
history_list.append('%s: save_history(\"%s\")' % (str(datetime.datetime.now()), file_path))
if os.path.isfile(file_path)==False:
file=open(file_path, 'x')
file=open(file_path)
file_text=file.read()
new_file_text=file_text+str(datetime.datetime.now())+':'+str(history_list)+'\n'
open(file_path, 'w').write(new_file_text)
| 49.219109
| 742
| 0.510808
| 7,711
| 68,513
| 4.339256
| 0.03281
| 0.049313
| 0.056485
| 0.070293
| 0.845158
| 0.812283
| 0.791692
| 0.771787
| 0.769874
| 0.761207
| 0
| 0.03401
| 0.325792
| 68,513
| 1,391
| 743
| 49.254493
| 0.690358
| 0.016231
| 0
| 0.730146
| 0
| 0.024672
| 0.057374
| 0.004049
| 0.003084
| 0
| 0
| 0
| 0
| 1
| 0.010023
| false
| 0.042406
| 0.00771
| 0
| 0.021588
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8c9c1c3e7470b138f1af8a72282903b76c498276
| 104
|
py
|
Python
|
db/models/__init__.py
|
berkette/CampaignDex
|
b017ffa1e18d96826ee620578e392d9fa794fb4e
|
[
"MIT"
] | null | null | null |
db/models/__init__.py
|
berkette/CampaignDex
|
b017ffa1e18d96826ee620578e392d9fa794fb4e
|
[
"MIT"
] | null | null | null |
db/models/__init__.py
|
berkette/CampaignDex
|
b017ffa1e18d96826ee620578e392d9fa794fb4e
|
[
"MIT"
] | null | null | null |
from db.models.base import Base
from db.models.campaign import Campaign
from db.models.page import Page
| 26
| 39
| 0.826923
| 18
| 104
| 4.777778
| 0.388889
| 0.209302
| 0.418605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 104
| 3
| 40
| 34.666667
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8cbf329b84da4e3d321b2a6288af8891bbb1c253
| 1,017
|
py
|
Python
|
tests/test_provider_innovationnorway_seq.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_innovationnorway_seq.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_innovationnorway_seq.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_innovationnorway_seq.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:26:53 UTC)
def test_provider_import():
import terrascript.provider.innovationnorway.seq
def test_resource_import():
from terrascript.resource.innovationnorway.seq import seq_admin_user
from terrascript.resource.innovationnorway.seq import seq_api_key
from terrascript.resource.innovationnorway.seq import seq_license
from terrascript.resource.innovationnorway.seq import seq_settings
from terrascript.resource.innovationnorway.seq import seq_user
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.innovationnorway.seq
#
# t = terrascript.provider.innovationnorway.seq.seq()
# s = str(t)
#
# assert 'https://github.com/innovationnorway/terraform-provider-seq' in s
# assert '0.1.0-alpha.5' in s
| 30.818182
| 80
| 0.775811
| 131
| 1,017
| 5.89313
| 0.480916
| 0.221503
| 0.148964
| 0.252591
| 0.444301
| 0.330311
| 0.330311
| 0
| 0
| 0
| 0
| 0.018349
| 0.142576
| 1,017
| 32
| 81
| 31.78125
| 0.866972
| 0.510324
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0
| 1
| 0.25
| true
| 0
| 1
| 0
| 1.25
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8cee1e1d731cd0c687ab8cc92fb15e409c44fa15
| 6,047
|
py
|
Python
|
pandas_plink/test/test_writer.py
|
Horta/pandas-plink
|
b580df7f8d3c566f6cdf8da512c608fd76ff1f0c
|
[
"MIT"
] | 69
|
2017-07-03T03:34:55.000Z
|
2022-03-08T19:35:35.000Z
|
pandas_plink/test/test_writer.py
|
Horta/pandas-plink
|
b580df7f8d3c566f6cdf8da512c608fd76ff1f0c
|
[
"MIT"
] | 26
|
2017-08-23T13:57:07.000Z
|
2022-02-26T00:19:28.000Z
|
pandas_plink/test/test_writer.py
|
Horta/pandas-plink
|
b580df7f8d3c566f6cdf8da512c608fd76ff1f0c
|
[
"MIT"
] | 12
|
2018-01-06T18:39:13.000Z
|
2022-03-17T01:44:18.000Z
|
from hashlib import md5
from os.path import dirname, join, realpath
from pathlib import Path
from numpy import dtype, nan
from numpy.testing import assert_array_equal, assert_equal
from pandas_plink import read_plink1_bin, write_plink1_bin
def test_write_plink1_bin(tmp_path: Path):
datafiles = join(dirname(realpath(__file__)), "data_files")
file_prefix = join(datafiles, "data")
bim = file_prefix + ".bim"
bed = file_prefix + ".bed"
fam = file_prefix + ".fam"
G = read_plink1_bin(bed, bim, fam, verbose=False)
assert_equal(G.data.dtype, dtype("float32"))
snp = G.where((G.chrom == "1") & (G.pos == 72515), drop=True)["snp"].values
assert_array_equal(snp, ["rs4030300"])
shape = G.where(G.chrom == "1", drop=True).shape
assert_array_equal(shape, [3, 10])
shape = G.where(G.chrom == "2", drop=True).shape
assert_array_equal(shape, [3, 0])
g = G.where((G.fid == "Sample_2") & (G.iid == "Sample_2"), drop=True)
assert_array_equal(g["trait"].values, ["-9"])
arr = [
[2.0, 2.0, nan, nan, 2.0, 2.0, 2.0, 2.0, 1.0, 2.0],
[2.0, 1.0, nan, nan, 2.0, 2.0, 1.0, 2.0, 2.0, 1.0],
[1.0, 2.0, nan, 1.0, 2.0, 2.0, 0.0, 2.0, 2.0, 2.0],
]
assert_array_equal(G, arr)
write_plink1_bin(G, tmp_path / "out.bed")
with open(bed, "rb") as f:
bed_desired = md5(f.read()).hexdigest()
with open(tmp_path / "out.bed", "rb") as f:
bed_content = md5(f.read()).hexdigest()
assert bed_content == bed_desired
bim_desired = "88475a9ea7a52e056716e612f44ccb62"
with open(tmp_path / "out.bim", "rb") as f:
bim_content = md5(f.read()).hexdigest()
assert bim_content == bim_desired
fam_desired = "2df7b9a70ab70e95f8b1c774b9022404"
with open(tmp_path / "out.fam", "rb") as f:
fam_content = md5(f.read()).hexdigest()
assert fam_content == fam_desired
def test_write_plink1_bin_filename(tmp_path: Path):
datafiles = join(dirname(realpath(__file__)), "data_files")
file_prefix = join(datafiles, "data")
bim = file_prefix + ".bim"
bed = file_prefix + ".bed"
fam = file_prefix + ".fam"
G = read_plink1_bin(bed, bim, fam, verbose=False)
bedfp = tmp_path / "out.bed"
bimfp = tmp_path / "out.bim"
famfp = tmp_path / "out.fam"
write_plink1_bin(G, bedfp, bimfp, famfp)
with open(bed, "rb") as f:
bed_desired = md5(f.read()).hexdigest()
with open(tmp_path / "out.bed", "rb") as f:
bed_content = md5(f.read()).hexdigest()
assert bed_content == bed_desired
bim_desired = "88475a9ea7a52e056716e612f44ccb62"
with open(tmp_path / "out.bim", "rb") as f:
bim_content = md5(f.read()).hexdigest()
assert bim_content == bim_desired
fam_desired = "2df7b9a70ab70e95f8b1c774b9022404"
with open(tmp_path / "out.fam", "rb") as f:
fam_content = md5(f.read()).hexdigest()
assert fam_content == fam_desired
def test_write_plink1_bin_transpose(tmp_path: Path):
datafiles = join(dirname(realpath(__file__)), "data_files")
file_prefix = join(datafiles, "data")
bim = file_prefix + ".bim"
bed = file_prefix + ".bed"
fam = file_prefix + ".fam"
G = read_plink1_bin(bed, bim, fam, verbose=False)
write_plink1_bin(G.T, tmp_path / "out.bed")
with open(bed, "rb") as f:
bed_desired = md5(f.read()).hexdigest()
with open(tmp_path / "out.bed", "rb") as f:
bed_content = md5(f.read()).hexdigest()
assert bed_content == bed_desired
bim_desired = "88475a9ea7a52e056716e612f44ccb62"
with open(tmp_path / "out.bim", "rb") as f:
bim_content = md5(f.read()).hexdigest()
assert bim_content == bim_desired
fam_desired = "2df7b9a70ab70e95f8b1c774b9022404"
with open(tmp_path / "out.fam", "rb") as f:
fam_content = md5(f.read()).hexdigest()
assert fam_content == fam_desired
def test_write_plink1_bin_sample_major(tmp_path: Path):
datafiles = join(dirname(realpath(__file__)), "data_files")
file_prefix = join(datafiles, "data")
bim = file_prefix + ".bim"
bed = file_prefix + ".bed"
fam = file_prefix + ".fam"
G = read_plink1_bin(bed, bim, fam, verbose=False)
write_plink1_bin(G, tmp_path / "tmp.bed", major="sample", verbose=False)
G = read_plink1_bin(str(tmp_path / "tmp.bed"), verbose=False)
write_plink1_bin(G, tmp_path / "out.bed", verbose=False)
with open(bed, "rb") as f:
bed_desired = md5(f.read()).hexdigest()
with open(tmp_path / "out.bed", "rb") as f:
bed_content = md5(f.read()).hexdigest()
assert bed_content == bed_desired
bim_desired = "88475a9ea7a52e056716e612f44ccb62"
with open(tmp_path / "out.bim", "rb") as f:
bim_content = md5(f.read()).hexdigest()
assert bim_content == bim_desired
fam_desired = "2df7b9a70ab70e95f8b1c774b9022404"
with open(tmp_path / "out.fam", "rb") as f:
fam_content = md5(f.read()).hexdigest()
assert fam_content == fam_desired
def test_write_plink1_bin_empty_metadata(tmp_path: Path):
datafiles = join(dirname(realpath(__file__)), "data_files")
file_prefix = join(datafiles, "data")
bim = file_prefix + ".bim"
bed = file_prefix + ".bed"
fam = file_prefix + ".fam"
G = read_plink1_bin(bed, bim, fam, verbose=False)
names = list(set(G.coords.keys()) - set(["sample", "variant"]))
G = G.drop_vars(names)
write_plink1_bin(G, tmp_path / "out.bed")
with open(bed, "rb") as f:
bed_desired = md5(f.read()).hexdigest()
with open(tmp_path / "out.bed", "rb") as f:
bed_content = md5(f.read()).hexdigest()
assert bed_content == bed_desired
bim_desired = "3dd5c109ba236d8b770d9c29dbc23c14"
with open(tmp_path / "out.bim", "rb") as f:
bim_content = md5(f.read()).hexdigest()
assert bim_content == bim_desired
fam_desired = "8931cbbfa99cee705992642cab0e8c03"
with open(tmp_path / "out.fam", "rb") as f:
fam_content = md5(f.read()).hexdigest()
assert fam_content == fam_desired
| 32.164894
| 79
| 0.644121
| 870
| 6,047
| 4.245977
| 0.102299
| 0.054954
| 0.059556
| 0.092041
| 0.810233
| 0.790471
| 0.788847
| 0.78235
| 0.750677
| 0.750677
| 0
| 0.06761
| 0.20506
| 6,047
| 187
| 80
| 32.336898
| 0.700853
| 0
| 0
| 0.714286
| 0
| 0
| 0.119398
| 0.052919
| 0
| 0
| 0
| 0
| 0.165414
| 1
| 0.037594
| false
| 0
| 0.045113
| 0
| 0.082707
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50975a2d0543ba03dd20b6f124cef18241f04fc2
| 484
|
py
|
Python
|
docs/source/examples/cond.if.py
|
lydia-duncan/chplforpyp-docs
|
5952241c4f311adbd35527de805724597c0303df
|
[
"Apache-2.0"
] | 5
|
2016-09-18T17:15:50.000Z
|
2021-04-26T12:25:20.000Z
|
docs/source/examples/cond.if.py
|
lydia-duncan/chplforpyp-docs
|
5952241c4f311adbd35527de805724597c0303df
|
[
"Apache-2.0"
] | 8
|
2016-04-14T22:59:12.000Z
|
2019-10-28T15:25:30.000Z
|
docs/source/examples/cond.if.py
|
lydia-duncan/chplforpyp-docs
|
5952241c4f311adbd35527de805724597c0303df
|
[
"Apache-2.0"
] | 4
|
2016-04-14T22:39:08.000Z
|
2021-11-18T01:12:38.000Z
|
#
light = raw_input("Which color is the traffic light?\n")
if light == "green":
print "You can cross the street now."
if light == "green":
print "You can cross the street now."
else:
print "Wait for the green light."
if light == "green":
print "You can cross the street now."
elif light == "yellow":
print "CAUTION!"
if light == "green":
print "You can cross the street now."
elif light == "yellow":
print "CAUTION!"
else:
print "Do not cross!"
| 18.615385
| 56
| 0.628099
| 72
| 484
| 4.208333
| 0.347222
| 0.092409
| 0.158416
| 0.224422
| 0.706271
| 0.706271
| 0.706271
| 0.706271
| 0.706271
| 0.706271
| 0
| 0
| 0.239669
| 484
| 25
| 57
| 19.36
| 0.82337
| 0
| 0
| 0.823529
| 0
| 0
| 0.490683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.470588
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
50d94286fb28659b3d9ccdc2066070a4dedfb0b5
| 7,901
|
py
|
Python
|
examples/src/dbnd_examples/tests/documentation/tracking/test_doc_tracking_quickstart.py
|
dmytrostriletskyi/dbnd
|
d4a5f5167523e80439c9d64182cdc87b40cbc48f
|
[
"Apache-2.0"
] | null | null | null |
examples/src/dbnd_examples/tests/documentation/tracking/test_doc_tracking_quickstart.py
|
dmytrostriletskyi/dbnd
|
d4a5f5167523e80439c9d64182cdc87b40cbc48f
|
[
"Apache-2.0"
] | null | null | null |
examples/src/dbnd_examples/tests/documentation/tracking/test_doc_tracking_quickstart.py
|
dmytrostriletskyi/dbnd
|
d4a5f5167523e80439c9d64182cdc87b40cbc48f
|
[
"Apache-2.0"
] | null | null | null |
from pandas import read_csv
from dbnd import log_metric
from dbnd_examples.data import data_repo
class TestDocTrackingQuickstart:
def test_step_one(self):
#### DOC START
# Python 3.6.8
from typing import Tuple
from pandas import DataFrame, Series
from sklearn import datasets
from sklearn.model_selection import train_test_split
def prepare_data() -> Tuple[DataFrame, DataFrame]:
""" load dataset from sklearn. split into training and testing sets"""
raw_data = datasets.load_diabetes()
# create a pandas DataFrame from sklearn dataset
df = DataFrame(raw_data["data"], columns=raw_data["feature_names"])
df["target"] = Series(raw_data["target"])
# split the data into training and testing sets
training_data, testing_data = train_test_split(df, test_size=0.25)
return training_data, testing_data
#### DOC END
prepare_data()
def test_integrating_dataframe_tracking(self):
#### DOC START
# Python 3.6.8
from sklearn import datasets
from sklearn.model_selection import train_test_split
from pandas import DataFrame, Series
import logging
from typing import Tuple
from dbnd import log_dataframe, log_metric
logging.basicConfig(level=logging.INFO)
def prepare_data() -> Tuple[DataFrame, DataFrame]:
""" load dataset from sklearn. split into training and testing sets"""
raw_data = datasets.load_diabetes()
# create a pandas DataFrame from sklearn dataset
df = DataFrame(raw_data["data"], columns=raw_data["feature_names"])
df["target"] = Series(raw_data["target"])
# split the data into training and testing sets
training_data, testing_data = train_test_split(df, test_size=0.25)
# use DBND logging features to log DataFrames with histograms
log_dataframe(
"training data",
training_data,
with_histograms=True,
with_schema=True,
with_stats=True,
)
log_dataframe("testing_data", testing_data)
# use DBND logging features to log the mean of s1
log_metric("mean s1", training_data["s1"].mean())
return training_data, testing_data
#### DOC END
prepare_data()
def test_step_two(self):
#### DOC START
# Python 3.6.8
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
from pandas import DataFrame, Series
import logging
from typing import Tuple
from dbnd import log_dataframe
from sklearn.linear_model import LinearRegression
logging.basicConfig(level=logging.INFO)
def prepare_data() -> Tuple[DataFrame, DataFrame]:
""" load dataset from sklearn. split into training and testing sets"""
raw_data = datasets.load_diabetes()
# create a pandas DataFrame from sklearn dataset
df = DataFrame(raw_data["data"], columns=raw_data["feature_names"])
df["target"] = Series(raw_data["target"])
# split the data into training and testing sets
training_data, testing_data = train_test_split(df, test_size=0.25)
# use DBND logging features to log DataFrames with histograms
log_dataframe(
"training data",
training_data,
with_histograms=True,
with_schema=True,
with_stats=True,
)
log_dataframe("testing_data", testing_data)
# use DBND logging features to log the mean of s1
log_metric("mean s1", training_data["s1"].mean())
return training_data, testing_data
def train_model(training_data: DataFrame) -> LinearRegression:
""" train a linear regression model """
model = LinearRegression()
# train a linear regression model
model.fit(training_data.drop("target", axis=1), training_data["target"])
return model
def test_model(model: LinearRegression, testing_data: DataFrame) -> str:
""" test the model, output mean squared error and r2 score"""
testing_x = testing_data.drop("target", axis=1)
testing_y = testing_data["target"]
predictions = model.predict(testing_x)
mse = mean_squared_error(testing_y, predictions)
r2_score = model.score(testing_x, testing_y)
return f"MSE: {mse}, R2: {r2_score}"
#### DOC END
test_model(
model=train_model(training_data=prepare_data()[0]),
testing_data=prepare_data()[1],
)
def test_tracking_more_metrics(self):
#### DOC START
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
from pandas import DataFrame, Series
import logging
from typing import Tuple
from dbnd import log_dataframe, log_metric
from sklearn.linear_model import LinearRegression
logging.basicConfig(level=logging.INFO)
def prepare_data() -> Tuple[DataFrame, DataFrame]:
""" load dataset from sklearn. split into training and testing sets"""
raw_data = datasets.load_diabetes()
# create a pandas DataFrame from sklearn dataset
df = DataFrame(raw_data["data"], columns=raw_data["feature_names"])
df["target"] = Series(raw_data["target"])
# split the data into training and testing sets
training_data, testing_data = train_test_split(df, test_size=0.25)
# use DBND logging features to log DataFrames with histograms
log_dataframe(
"training data",
training_data,
with_histograms=True,
with_schema=True,
with_stats=True,
)
log_dataframe("testing_data", testing_data)
# use DBND logging features to log the mean of s1
log_metric("mean s1", training_data["s1"].mean())
return training_data, testing_data
def train_model(training_data: DataFrame) -> LinearRegression:
""" train a linear regression model """
model = LinearRegression()
# train a linear regression model
model.fit(training_data.drop("target", axis=1), training_data["target"])
# use DBND log crucial details about the regression model with log_metric:
log_metric("model intercept", model.intercept_) # logging a numeric
log_metric("coefficients", model.coef_) # logging an np array
return model
def test_model(model: LinearRegression, testing_data: DataFrame) -> str:
""" test the model, output mean squared error and r2 score"""
testing_x = testing_data.drop("target", axis=1)
testing_y = testing_data["target"]
predictions = model.predict(testing_x)
mse = mean_squared_error(testing_y, predictions)
r2_score = model.score(testing_x, testing_y)
# use DBND log_metric to capture important model details:
log_metric("mean squared error:", mse)
log_metric("r2 score", r2_score)
return f"MSE: {mse}, R2: {r2_score}"
# if __name__ == '__main__':
training_set, testing_set = prepare_data()
model = train_model(training_set)
metrics = test_model(model, testing_set)
#### DOC END
| 37.803828
| 86
| 0.617643
| 911
| 7,901
| 5.147091
| 0.12404
| 0.06142
| 0.035189
| 0.037535
| 0.86479
| 0.854553
| 0.854553
| 0.84517
| 0.839838
| 0.839838
| 0
| 0.008367
| 0.304139
| 7,901
| 208
| 87
| 37.985577
| 0.844489
| 0.18947
| 0
| 0.813008
| 0
| 0
| 0.059048
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.097561
| false
| 0
| 0.235772
| 0
| 0.406504
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50e70bd042c56dcc1efd90ba07cb713bd8f03c82
| 388
|
py
|
Python
|
KNN/te.py
|
yycc179/ml
|
6231a0233dbce815693abfec7e0567006565555c
|
[
"MIT"
] | null | null | null |
KNN/te.py
|
yycc179/ml
|
6231a0233dbce815693abfec7e0567006565555c
|
[
"MIT"
] | null | null | null |
KNN/te.py
|
yycc179/ml
|
6231a0233dbce815693abfec7e0567006565555c
|
[
"MIT"
] | null | null | null |
import KNN
from numpy import *
dataSet, labels = KNN.createDataSet()
testX = array([1.2, 1.0])
k = 3
outputLabel = KNN.KNNClassify(testX, dataSet, labels, 3)
print 'Your input is:', testX, 'and classified to class: ', outputLabel
testX = array([0.1, 0.3])
outputLabel = KNN.KNNClassify(testX, dataSet, labels, 3)
print 'Your input is:', testX, 'and classified to class: ', outputLabel
| 27.714286
| 71
| 0.708763
| 57
| 388
| 4.824561
| 0.421053
| 0.141818
| 0.109091
| 0.189091
| 0.705455
| 0.705455
| 0.705455
| 0.705455
| 0.705455
| 0.705455
| 0
| 0.033333
| 0.149485
| 388
| 13
| 72
| 29.846154
| 0.8
| 0
| 0
| 0.4
| 0
| 0
| 0.201031
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.2
| null | null | 0.2
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0fd5bfa0dd131d353fe17ec86496560b8ce4cdf0
| 8,122
|
py
|
Python
|
resources/revisions.py
|
axonepro/sdk-ooti
|
146ba758f571352d02daa56349e8b3affd8ca5a9
|
[
"Unlicense"
] | 1
|
2021-03-13T16:04:54.000Z
|
2021-03-13T16:04:54.000Z
|
resources/revisions.py
|
axonepro/sdk-ooti
|
146ba758f571352d02daa56349e8b3affd8ca5a9
|
[
"Unlicense"
] | 7
|
2021-07-21T12:42:39.000Z
|
2022-01-06T10:34:04.000Z
|
resources/revisions.py
|
axonepro/sdk-ooti
|
146ba758f571352d02daa56349e8b3affd8ca5a9
|
[
"Unlicense"
] | 2
|
2021-06-22T08:10:48.000Z
|
2021-09-01T09:16:41.000Z
|
import requests
import json
from .helper import Helper
class Revisions(Helper):
def __init__(self, base_url, org_pk, teams_pk, access_token, _csrf_token, headers, pagination):
super().__init__(base_url, org_pk, teams_pk, access_token, _csrf_token, headers, pagination)
def delete_revisions_annexe_detail(self, pk):
""" Delete revision annexe detail
Keyword arguments:
pk -- the pk of the annex revision
"""
route = 'v1/revisions/annexes/detail/{0}/'.format(pk)
response = self.process_request(requests, 'DELETE', self.base_url, route, self.headers, None, None)
return self.process_response(response)
def get_revisions_annexes_team_project(self, team_pk, project_pk, page=1):
""" Get annexes revisions team project
Keyword arguments:
team_pk -- pk of the team
project_pk -- pk of the project
"""
route = 'v1/revisions/annexes/{0}/{1}/?page_size={2}&page={3}'.format(
team_pk, project_pk, self.pagination, page)
response = self.process_request(requests, 'GET', self.base_url, route, self.headers, None, None)
return self.process_response(response, True)
def create_annexe_revision(self, team_pk, project_pk, data):
""" Create an annexe revision team project
Keyword arguments:
data -- data create :
{
"progress": 0,
"date": "string",
"annex": 0,
"reviewer": 0,
"validator": 0,
"is_valid": true
}
"""
route = 'v1/revisions/annexes/{0}/{1}/'.format(team_pk, project_pk)
response = self.process_request(requests, 'POST', self.base_url, route, self.headers, None, json.dumps(data))
return self.process_response(response)
def delete_revisions_document_detail(self, pk):
""" Delete revision document details
Keyword arguments:
pk -- the pk of the document revision
"""
route = 'v1/revisions/documents/detail/{0}/'.format(pk)
response = self.process_request(requests, 'DELETE', self.base_url, route, self.headers, None, None)
return self.process_response(response)
def get_revisions_documents_team_project(self, team_pk, project_pk, page=1):
""" Get documents revisions team project
Keyword arguments:
team_pk -- pk of the team
project_pk -- pk of the project
"""
route = 'v1/revisions/documents/{0}/{1}/?page_size={2}&page={3}'.format(
team_pk, project_pk, self.pagination, page)
response = self.process_request(requests, 'GET', self.base_url, route, self.headers, None, None)
return self.process_response(response, True)
def create_document_revision(self, team_pk, project_pk, data):
""" Create a documents revision team project
Keyword arguments:
data -- data create :
{
"progress": 0,
"date": "string",
"doc": 0,
"reviewer": 0,
"validator": 0
}
"""
route = 'v1/revisions/documents/{0}/{1}/'.format(team_pk, project_pk)
response = self.process_request(requests, 'POST', self.base_url, route, self.headers, None, json.dumps(data))
return self.process_response(response)
def delete_revisions_fee_items_detail(self, pk):
""" Delete revision fee_items detail
Keyword arguments:
pk -- the pk of the fee_items revision
"""
route = 'v1/revisions/fee_items/detail/{0}/'.format(pk)
response = self.process_request(requests, 'DELETE', self.base_url, route, self.headers, None, None)
return self.process_response(response)
def get_revisions_fee_items_team_project(self, team_pk, project_pk, page=1):
""" Get fee_items revisions team project
Keyword arguments:
team_pk -- pk of the team
project_pk -- pk of the project
"""
route = 'v1/revisions/fee_items/{0}/{1}/?page_size={2}&page={3}'.format(
team_pk, project_pk, self.pagination, page)
response = self.process_request(requests, 'GET', self.base_url, route, self.headers, None, None)
return self.process_response(response, True)
def create_fee_items_revision(self, team_pk, project_pk, data):
""" Create an fee_items revision team project
Keyword arguments:
data -- data create :
{
"fee_item": 0,
"progress": 0,
"date": "string",
"reviewer": 0,
"validator": 0,
"is_valid": true
}
"""
route = 'v1/revisions/fee_items/{0}/{1}/'.format(team_pk, project_pk)
response = self.process_request(requests, 'POST', self.base_url, route, self.headers, None, json.dumps(data))
return self.process_response(response)
def delete_revisions_phases_detail(self, pk):
""" Delete revision phases detail
Keyword arguments:
pk -- the pk of the phases revision
"""
route = 'v1/revisions/phases/detail/{0}/'.format(pk)
response = self.process_request(requests, 'DELETE', self.base_url, route, self.headers, None, None)
return self.process_response(response)
def get_revisions_phases_team_project(self, team_pk, project_pk, page=1):
""" Get phases revisions team project
Keyword arguments:
team_pk -- pk of the team
project_pk -- pk of the project
"""
route = 'v1/revisions/phases/{0}/{1}/?page_size={2}&page={3}'.format(team_pk, project_pk, self.pagination, page)
response = self.process_request(requests, 'GET', self.base_url, route, self.headers, None, None)
return self.process_response(response, True)
def create_phase_revision(self, team_pk, project_pk, data):
""" Create a phase revision team project
Keyword arguments:
data -- data create :
{
"phase": 0,
"progress": 0,
"date": "string",
"reviewer": 0,
"validator": 0,
"is_valid": true
}
"""
route = 'v1/revisions/phases/{0}/{1}/'.format(team_pk, project_pk)
response = self.process_request(requests, 'POST', self.base_url, route, self.headers, None, json.dumps(data))
return self.process_response(response)
def delete_revisions_plan_detail(self, pk):
""" Delete revision plans detail
Keyword arguments:
pk -- the pk of the plans revision
"""
route = 'v1/revisions/plans/detail/{0}/'.format(pk)
response = self.process_request(requests, 'DELETE', self.base_url, route, self.headers, None, None)
return self.process_response(response)
def get_revisions_plans_team_project(self, team_pk, project_pk, page=1):
""" Get plans revisions team project
Keyword arguments:
team_pk -- pk of the team
project_pk -- pk of the project
"""
route = 'v1/revisions/plans/{0}/{1}/?page_size={2}&page={3}'.format(team_pk, project_pk, self.pagination, page)
response = self.process_request(requests, 'GET', self.base_url, route, self.headers, None, None)
return self.process_response(response, True)
def create_plan_revision(self, team_pk, project_pk, data):
""" Create a plans revision team project
Keyword arguments:
data -- data create :
{
"progress": 0,
"date": "string",
"plan_phase": 0,
"reviewer": 0,
"validator": 0
}
"""
route = 'v1/revisions/plans/{0}/{1}/'.format(team_pk, project_pk)
response = self.process_request(requests, 'POST', self.base_url, route, self.headers, None, json.dumps(data))
return self.process_response(response)
| 34.12605
| 120
| 0.602561
| 966
| 8,122
| 4.879917
| 0.075569
| 0.070004
| 0.055155
| 0.06364
| 0.893509
| 0.860628
| 0.849173
| 0.839839
| 0.774926
| 0.734196
| 0
| 0.012851
| 0.281458
| 8,122
| 238
| 121
| 34.12605
| 0.794894
| 0.264836
| 0
| 0.478261
| 0
| 0
| 0.122746
| 0.110142
| 0
| 0
| 0
| 0
| 0
| 1
| 0.231884
| false
| 0
| 0.043478
| 0
| 0.507246
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
e8af699ed263794c15f4a05ac72fa56e76184f9b
| 8,773
|
py
|
Python
|
LinuxPranks/data/bluescreen.py
|
mishaturnbull/LinuxPranks
|
d141ea62e98930a3995bae85e2509115733db500
|
[
"MIT"
] | null | null | null |
LinuxPranks/data/bluescreen.py
|
mishaturnbull/LinuxPranks
|
d141ea62e98930a3995bae85e2509115733db500
|
[
"MIT"
] | 1
|
2019-01-23T19:18:35.000Z
|
2019-01-23T22:10:34.000Z
|
LinuxPranks/data/bluescreen.py
|
mishaturnbull/LinuxPranks
|
d141ea62e98930a3995bae85e2509115733db500
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import sys,os
hist=[]
def actually_write():
global hist
sys.stdout.write("\x1b[%sm"%(";".join(hist)))
def o(msg):
global hist
msg="%02i"%msg
hist.append(msg)
actually_write()
def undo():
global hist
hist.pop()
actually_write()
def reset():
global hist
hist=[]
sys.stdout.write("\x1bc\x1b[!p\x1b[?3;4l\x1b[4l\x1b>")
default()
def default():
o(10) # in case someone called o(12) by mistake :P
o(0)
def clear():
sys.stdout.write("\x1b[H\x1b[2J")
def move(y,x):
sys.stdout.write("\x1b[%i;%iH"%(y+1,x+1))
def up():
sys.stdout.write("\x1b[A")
def down():
sys.stdout.write("\x1b[B")
def right():
sys.stdout.write("\x1b[C")
def left():
sys.stdout.write("\x1b[D")
def endline():
sys.stdout.write("\x1b[K")
def cursorinvisible():
sys.stdout.write("\x1b[?25l")
def cursornormal():
sys.stdout.write("\x1b[?12l\x1b[?25h")
def cursorveryvisible():
sys.stdout.write("\x1b[?12;25h")
def deletelines(lines=1):
sys.stdout.write("'\x1b[%iM"%lines)
def scrolldown():
sys.stdout.write("\x1bM")
def cols():
return int(os.popen("tput cols").read().strip())
def lines():
return int(os.popen("tput lines").read().strip())
def savecursor():
sys.stdout.write("\x1b7")
def restorecursor():
sys.stdout.write("\x1b8")
def bold():
o(1)
def hidden():
o(2)
def underline():
o(4)
def blink():
o(5)
def reverse():
o(7)
def formatting():
o(12)
def black():
o(30)
def red():
o(31)
def green():
o(32)
def orange():
o(33)
def blue():
o(34)
def magenta():
o(35)
def cyan():
o(36)
def white():
o(37)
def bgred():
o(41)
def bggreen():
o(42)
def bgorange():
o(43)
def bgblue():
o(44)
def bgmagenta():
o(45)
def bgcyan():
o(46)
def bgwhite():
o(47)
def put(line,indent,msg):
move(line,indent)
sys.stdout.write(msg)
def center(line,msg):
columns=cols()
indent=(columns-len(msg))/2
put(line,indent,msg)
return line,indent+len(msg)
def wrap(*args):
"wrap(line, [[lmargin,] rmargin,] msg)"
if len(args)==4:
line,lmargin,rmargin,msg=args
elif len(args)==3:
line,rmargin,msg=args
lmargin=0
elif len(args)==2:
line,msg=args
lmargin,rmargin=0,0
elif len(args)<2:
raise TypeError,"wrap() takes at least two arguments"
else:
raise TypeError,"wrap() takes at most four arguments"
line_length=cols()-lmargin-rmargin
msg=list(msg)
curr_indent=0
curr_line=line
char=msg.pop(0)
while 1:
if line_length-curr_indent<1:
put(curr_line,lmargin+curr_indent,'\n')
curr_line+=1
curr_indent=0
put(curr_line,lmargin,char)
else:
put(curr_line,lmargin+curr_indent,char)
curr_indent+=1
if len(msg)==0:
break
char=msg.pop(0)
return curr_line,lmargin+curr_indent
def wordwrap(*args):
"wordwrap(line, [[lmargin,] rmargin,] msg)"
if len(args)==4:
line,lmargin,rmargin,msg=args
elif len(args)==3:
line,rmargin,msg=args
lmargin=0
elif len(args)==2:
line,msg=args
lmargin,rmargin=0,0
elif len(args)<2:
raise TypeError,"wordwrap() takes at least two arguments"
else:
raise TypeError,"wordwrap() takes at most four arguments"
line_length=cols()-lmargin-rmargin
msg=msg.split()
curr_indent=0
curr_line=line
word=msg.pop(0)
while 1:
if line_length<len(word): # word is longer than allowed line
put(curr_line,lmargin,word[:line_length]) # wrap by letter
word=word[line_length:]
put(curr_line,lmargin+curr_indent,'\n')
curr_line+=1
curr_indent=0
elif line_length-curr_indent<len(word): # word cannot fit on remainder
put(curr_line,lmargin+curr_indent,'\n') # of line -- wrap by word
curr_line+=1
curr_indent=0
put(curr_line,lmargin,word)
else: # word fits on line
put(curr_line,lmargin+curr_indent,word)
curr_indent+=len(word)+1
if len(msg)==0:
break
word=msg.pop(0)
return curr_line,lmargin+curr_indent
def main():
reset()
bgblue()
white()
bold()
clear()
reverse()
center(5,' Windows ')
undo()
line,col=wordwrap(7,7,8,"Windows crashed again. I am the Blue Screen of Death. No one hears your screams.")
print
put(line+2,11,"*")
line,col=wordwrap(line+2,14,8,"Press any key to terminate the application.")
put(line+1,11,"*")
line,col=wordwrap(line+1,14,8,"Press CTRL+ALT+DEL again to restart your computer. You will lose any unsaved data in all applications.")
line,col=center(line+3,"Press any key to continue")
move(line,col+1)
endline()
raw_input()
reset()
if __name__=="__main__":
main()#!/usr/bin/python
import sys,os
hist=[]
def actually_write():
global hist
sys.stdout.write("\x1b[%sm"%(";".join(hist)))
def o(msg):
global hist
msg="%02i"%msg
hist.append(msg)
actually_write()
def undo():
global hist
hist.pop()
actually_write()
def reset():
global hist
hist=[]
sys.stdout.write("\x1bc\x1b[!p\x1b[?3;4l\x1b[4l\x1b>")
default()
def default():
o(10) # in case someone called o(12) by mistake :P
o(0)
def clear():
sys.stdout.write("\x1b[H\x1b[2J")
def move(y,x):
sys.stdout.write("\x1b[%i;%iH"%(y+1,x+1))
def up():
sys.stdout.write("\x1b[A")
def down():
sys.stdout.write("\x1b[B")
def right():
sys.stdout.write("\x1b[C")
def left():
sys.stdout.write("\x1b[D")
def endline():
sys.stdout.write("\x1b[K")
def cursorinvisible():
sys.stdout.write("\x1b[?25l")
def cursornormal():
sys.stdout.write("\x1b[?12l\x1b[?25h")
def cursorveryvisible():
sys.stdout.write("\x1b[?12;25h")
def deletelines(lines=1):
sys.stdout.write("'\x1b[%iM"%lines)
def scrolldown():
sys.stdout.write("\x1bM")
def cols():
return int(os.popen("tput cols").read().strip())
def lines():
return int(os.popen("tput lines").read().strip())
def savecursor():
sys.stdout.write("\x1b7")
def restorecursor():
sys.stdout.write("\x1b8")
def bold():
o(1)
def hidden():
o(2)
def underline():
o(4)
def blink():
o(5)
def reverse():
o(7)
def formatting():
o(12)
def black():
o(30)
def red():
o(31)
def green():
o(32)
def orange():
o(33)
def blue():
o(34)
def magenta():
o(35)
def cyan():
o(36)
def white():
o(37)
def bgred():
o(41)
def bggreen():
o(42)
def bgorange():
o(43)
def bgblue():
o(44)
def bgmagenta():
o(45)
def bgcyan():
o(46)
def bgwhite():
o(47)
def put(line,indent,msg):
move(line,indent)
sys.stdout.write(msg)
def center(line,msg):
columns=cols()
indent=(columns-len(msg))/2
put(line,indent,msg)
return line,indent+len(msg)
def wrap(*args):
"wrap(line, [[lmargin,] rmargin,] msg)"
if len(args)==4:
line,lmargin,rmargin,msg=args
elif len(args)==3:
line,rmargin,msg=args
lmargin=0
elif len(args)==2:
line,msg=args
lmargin,rmargin=0,0
elif len(args)<2:
raise TypeError,"wrap() takes at least two arguments"
else:
raise TypeError,"wrap() takes at most four arguments"
line_length=cols()-lmargin-rmargin
msg=list(msg)
curr_indent=0
curr_line=line
char=msg.pop(0)
while 1:
if line_length-curr_indent<1:
put(curr_line,lmargin+curr_indent,'\n')
curr_line+=1
curr_indent=0
put(curr_line,lmargin,char)
else:
put(curr_line,lmargin+curr_indent,char)
curr_indent+=1
if len(msg)==0:
break
char=msg.pop(0)
return curr_line,lmargin+curr_indent
def wordwrap(*args):
"wordwrap(line, [[lmargin,] rmargin,] msg)"
if len(args)==4:
line,lmargin,rmargin,msg=args
elif len(args)==3:
line,rmargin,msg=args
lmargin=0
elif len(args)==2:
line,msg=args
lmargin,rmargin=0,0
elif len(args)<2:
raise TypeError,"wordwrap() takes at least two arguments"
else:
raise TypeError,"wordwrap() takes at most four arguments"
line_length=cols()-lmargin-rmargin
msg=msg.split()
curr_indent=0
curr_line=line
word=msg.pop(0)
while 1:
if line_length<len(word): # word is longer than allowed line
put(curr_line,lmargin,word[:line_length]) # wrap by letter
word=word[line_length:]
put(curr_line,lmargin+curr_indent,'\n')
curr_line+=1
curr_indent=0
elif line_length-curr_indent<len(word): # word cannot fit on remainder
put(curr_line,lmargin+curr_indent,'\n') # of line -- wrap by word
curr_line+=1
curr_indent=0
put(curr_line,lmargin,word)
else: # word fits on line
put(curr_line,lmargin+curr_indent,word)
curr_indent+=len(word)+1
if len(msg)==0:
break
word=msg.pop(0)
return curr_line,lmargin+curr_indent
def main():
reset()
bgblue()
white()
bold()
clear()
reverse()
center(5,' Windows ')
undo()
line,col=wordwrap(7,7,8,"Windows crashed again. I am the Blue Screen of Death. No one hears your screams.")
print
put(line+2,11,"*")
line,col=wordwrap(line+2,14,8,"Press any key to terminate the application.")
put(line+1,11,"*")
line,col=wordwrap(line+1,14,8,"Press CTRL+ALT+DEL again to restart your computer. You will lose any unsaved data in all applications.")
line,col=center(line+3,"Press any key to continue")
move(line,col+1)
endline()
raw_input()
reset()
if __name__=="__main__":
main()
| 17.759109
| 136
| 0.672746
| 1,470
| 8,773
| 3.946939
| 0.136054
| 0.05274
| 0.082041
| 0.070321
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.037136
| 0.1467
| 8,773
| 493
| 137
| 17.795132
| 0.737911
| 0.040579
| 0
| 1
| 0
| 0.005102
| 0.162799
| 0.008092
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.005102
| null | null | 0.005102
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e8b62a365f40cc05198dc6855520272fd2f75b18
| 20,872
|
py
|
Python
|
ve/unit/test_constraint_dist.py
|
furiosa-ai/pyvsc
|
612de9e6244c685a3df1972e4860abfe35b614e1
|
[
"Apache-2.0"
] | 54
|
2020-03-28T17:54:00.000Z
|
2022-03-27T08:53:13.000Z
|
ve/unit/test_constraint_dist.py
|
furiosa-ai/pyvsc
|
612de9e6244c685a3df1972e4860abfe35b614e1
|
[
"Apache-2.0"
] | 124
|
2020-04-10T03:06:03.000Z
|
2022-03-24T18:35:46.000Z
|
ve/unit/test_constraint_dist.py
|
furiosa-ai/pyvsc
|
612de9e6244c685a3df1972e4860abfe35b614e1
|
[
"Apache-2.0"
] | 17
|
2020-04-09T21:47:58.000Z
|
2022-02-23T19:37:37.000Z
|
'''
Created on Aug 9, 2020
@author: ballance
'''
from enum import Enum, auto, IntEnum
import vsc
from vsc_test_case import VscTestCase
class TestConstraintDist(VscTestCase):
def test_dist_in_range(self):
@vsc.randobj
class my_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
@vsc.constraint
def dist_a(self):
vsc.dist(self.a, [
vsc.weight(1, 10),
vsc.weight(2, 20),
vsc.weight(4, 40),
vsc.weight(8, 80)])
c = my_c()
for i in range(100):
c.randomize()
self.assertIn(c.a, [1,2,4,8])
def test_dist_static_zero_weight(self):
@vsc.randobj
class my_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
@vsc.constraint
def dist_a(self):
vsc.dist(self.a, [
vsc.weight(1, 10),
vsc.weight(2, 0),
vsc.weight(4, 40),
vsc.weight(8, 80)])
c = my_c()
for i in range(100):
c.randomize()
self.assertIn(c.a, [1,4,8])
def test_dist_dynamic_zero_weight(self):
@vsc.randobj
class my_c(object):
def __init__(self):
self.en_one = vsc.uint8_t()
self.en_two = vsc.uint8_t()
self.a = vsc.rand_uint8_t()
@vsc.constraint
def dist_a(self):
vsc.dist(self.a, [
vsc.weight(1, self.en_one),
vsc.weight(2, self.en_two),])
c = my_c()
c.en_one = 1
c.en_two = 0
for i in range(10):
c.randomize()
self.assertEqual(c.a, 1)
c.en_one = 0
c.en_two = 1
for i in range(10):
c.randomize()
self.assertEqual(c.a, 2)
c.en_one = 1
c.en_two = 1
for i in range(10):
c.randomize()
self.assertIn(c.a, [1,2])
def test_dist_static_weights(self):
@vsc.randobj
class my_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
@vsc.constraint
def dist_a(self):
vsc.dist(self.a, [
vsc.weight(1, 80),
vsc.weight(2, 40),
vsc.weight(3, 20),
vsc.weight(4, 10)])
c = my_c()
hist = 4*[0]
for i in range(100):
c.randomize()
self.assertIn(c.a, [1,2,3,4])
print("a=" + str(c.a))
hist[c.a-1] += 1
print("hist: " + str(hist))
def test_dist_static_weight_ranges(self):
@vsc.randobj
class my_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
@vsc.constraint
def dist_a(self):
vsc.dist(self.a, [
vsc.weight((10,15), 80),
vsc.weight((20,30), 40),
vsc.weight((40,70), 20),
vsc.weight((80,100), 10)])
c = my_c()
hist = 4*[0]
for i in range(100):
c.randomize()
print("a=" + str(c.a))
if c.a >= 10 and c.a <= 15:
hist[0] += 1
elif c.a >= 20 and c.a <= 30:
hist[1] += 1
elif c.a >= 40 and c.a <= 70:
hist[2] += 1
elif c.a >= 80 and c.a <= 100:
hist[3] += 1
else:
self.fail("Value " + str(c.a) + " illegal")
print("hist: " + str(hist))
def test_dist_conditional_weights(self):
@vsc.randobj
class my_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
self.b = vsc.uint8_t()
@vsc.constraint
def dist_a(self):
with vsc.if_then(self.b == 1):
vsc.dist(self.a, [
vsc.weight((10,15), 80),
vsc.weight((20,30), 40),
vsc.weight((40,70), 20),
vsc.weight((80,100), 10)])
with vsc.else_then:
vsc.dist(self.a, [
vsc.weight((10,15), 10),
vsc.weight((20,30), 20),
vsc.weight((40,70), 40),
vsc.weight((80,100), 80)])
c = my_c()
hist = 4*[0]
c.b = 1
for i in range(100):
c.randomize()
if c.a >= 10 and c.a <= 15:
hist[0] += 1
elif c.a >= 20 and c.a <= 30:
hist[1] += 1
elif c.a >= 40 and c.a <= 70:
hist[2] += 1
elif c.a >= 80 and c.a <= 100:
hist[3] += 1
else:
self.fail("Value " + str(c.a) + " illegal")
print("hist: " + str(hist))
hist = 4*[0]
c.b = 0
for i in range(100):
c.randomize()
if c.a >= 10 and c.a <= 15:
hist[0] += 1
elif c.a >= 20 and c.a <= 30:
hist[1] += 1
elif c.a >= 40 and c.a <= 70:
hist[2] += 1
elif c.a >= 80 and c.a <= 100:
hist[3] += 1
else:
self.fail("Value " + str(c.a) + " illegal")
print("hist: " + str(hist))
def test_dist_conditional_weights_rand(self):
@vsc.randobj
class my_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
self.b = vsc.rand_bit_t(1)
@vsc.constraint
def dist_a(self):
with vsc.if_then(self.b == 1):
vsc.dist(self.a, [
vsc.weight((10,15), 80),
vsc.weight((20,30), 40),
vsc.weight((40,70), 20),
vsc.weight((80,100), 10)])
with vsc.else_then:
vsc.dist(self.a, [
vsc.weight((10,15), 10),
vsc.weight((20,30), 20),
vsc.weight((40,70), 40),
vsc.weight((80,100), 80)])
c = my_c()
hist = 4*[0]
c.b = 1
for i in range(100):
c.randomize()
if c.a >= 10 and c.a <= 15:
hist[0] += 1
elif c.a >= 20 and c.a <= 30:
hist[1] += 1
elif c.a >= 40 and c.a <= 70:
hist[2] += 1
elif c.a >= 80 and c.a <= 100:
hist[3] += 1
else:
self.fail("Value " + str(c.a) + " illegal")
print("hist: " + str(hist))
def test_dist_array_elems(self):
@vsc.randobj
class my_c(object):
def __init__(self):
self.a = vsc.rand_list_t(vsc.bit_t(7),4)
@vsc.constraint
def dist_a(self):
with vsc.foreach(self.a, idx=True) as i:
vsc.dist(self.a[i], [
vsc.weight(1, 10),
vsc.weight(2, 20),
vsc.weight(4, 40),
vsc.weight(8, 80)])
my = my_c()
# Randomize
hist = []
for i in range(4):
hist.append([0]*4)
for i in range(400):
my.randomize(debug=0)
for i in range(4):
v = my.a[i]
if v == 1:
hist[i][0] += 1
elif v == 2:
hist[i][1] += 1
elif v == 4:
hist[i][2] += 1
elif v == 8:
hist[i][3] += 1
else:
raise Exception("Value[%d] %d out of range" % (i, v))
for i in range(len(hist)):
print("hist[" + str(i) + "] " + str(hist[i]))
for j in range(len(hist[i])):
if j > 0:
self.assertGreater(hist[i][j], hist[i][j-1])
def test_dist_array_elems_range(self):
class my_e(IntEnum):
A = 0
B = auto()
C = auto()
D = auto()
@vsc.randobj
class my_c(object):
def __init__(self):
self.a = vsc.rand_enum_t(my_e)
#self.a = vsc.rand_list_t(vsc.bit_t(7),15)
#self.a = vsc.rand_uint8_t()
@vsc.constraint
def dist_a(self):
# vsc.dist(self.a, [vsc.weight(vsc.rng(my_e.A, my_e.C),10), vsc.weight(my_e.D, 20)])
vsc.dist(self.a, [vsc.weight(vsc.rng(my_e.A, my_e.C),3), vsc.weight(my_e.D, 1)])
my = my_c()
hist = [0]*4
# Randomize
for i in range(100):
# print(">======= " + str(i) + " ========")
my.randomize()
# print("<======= " + str(i) + " ========")
hist[int(my.a)] += 1
# print("MY ITEM : ",i+1)
# print(my.a)
print("hist: " + str(hist))
for v in hist:
self.assertNotEqual(v, 0)
def test_dist_soft_0(self):
@vsc.randobj
class my_item(object):
def __init__(self):
self.a = vsc.rand_bit_t(8)
@vsc.constraint
def valid_ab_c(self):
pass
# vsc.soft(self.a < 5) #Case A: this is fine since it is a looser bound than the dist constraint
# vsc.soft(self.a > 5) #Case B: this throws off distribution; would have expected this soft constraint to be ignored as if it were commented out and distribution to follow 1:97:1:1)
# self.a > 5 #Case C: this causes a constraint error since this value domain is disjoint from that of the dist constraint
@vsc.constraint
def dist_a(self):
vsc.dist(self.a, [
vsc.weight(0, 1),
vsc.weight(1, 97),
vsc.weight(2, 1),
vsc.weight(3, 1),
#vsc.weight(20, 10000) #Case D: this throws off distribution when combined with the line labeled below (the larger the weight, the worse -- seems that the weights get redistributed unevenly)
])
hist = {}
total_cnt = 0
def add_to_hist(val):
nonlocal hist
nonlocal total_cnt
total_cnt += 1
if val not in hist: hist[val] = 0
hist[val] += 1
def print_dist(val):
nonlocal hist
nonlocal total_cnt
print(f"{val}: {hist[val]} " + "(%.1f%%)" %(hist[val] / total_cnt * 100))
item = my_item()
for i in range(1000):
with item.randomize_with() as it:
pass
#it.a <= 5 #Case D
add_to_hist(item.a)
for key in sorted(hist.keys()):
print_dist(key)
self.assertIn(0, hist.keys())
self.assertIn(1, hist.keys())
self.assertIn(2, hist.keys())
self.assertIn(3, hist.keys())
# We expect 97% distribution, but only worry about
# cases below 90%
self.assertGreater(hist[1]/total_cnt, 0.90)
def test_dist_soft_1(self):
@vsc.randobj
class my_item(object):
def __init__(self):
self.a = vsc.rand_bit_t(8)
@vsc.constraint
def valid_ab_c(self):
pass
vsc.soft(self.a < 5) #Case A: this is fine since it is a looser bound than the dist constraint
# vsc.soft(self.a > 5) #Case B: this throws off distribution; would have expected this soft constraint to be ignored as if it were commented out and distribution to follow 1:97:1:1)
# self.a > 5 #Case C: this causes a constraint error since this value domain is disjoint from that of the dist constraint
@vsc.constraint
def dist_a(self):
vsc.dist(self.a, [
vsc.weight(0, 1),
vsc.weight(1, 97),
vsc.weight(2, 1),
vsc.weight(3, 1),
#vsc.weight(20, 10000) #Case D: this throws off distribution when combined with the line labeled below (the larger the weight, the worse -- seems that the weights get redistributed unevenly)
])
hist = {}
total_cnt = 0
def add_to_hist(val):
nonlocal hist
nonlocal total_cnt
total_cnt += 1
if val not in hist: hist[val] = 0
hist[val] += 1
def print_dist(val):
nonlocal hist
nonlocal total_cnt
print(f"{val}: {hist[val]} " + "(%.1f%%)" %(hist[val] / total_cnt * 100))
item = my_item()
for i in range(1000):
with item.randomize_with() as it:
pass
#it.a <= 5 #Case D
add_to_hist(item.a)
for key in sorted(hist.keys()):
print_dist(key)
self.assertIn(0, hist.keys())
self.assertIn(1, hist.keys())
self.assertIn(2, hist.keys())
self.assertIn(3, hist.keys())
# We expect 97% distribution, but only worry about
# cases below 90%
self.assertGreater(hist[1]/total_cnt, 0.90)
def test_dist_soft_2(self):
@vsc.randobj
class my_item(object):
def __init__(self):
self.a = vsc.rand_bit_t(8)
@vsc.constraint
def valid_ab_c(self):
pass
# vsc.soft(self.a < 5) #Case A: this is fine since it is a looser bound than the dist constraint
vsc.soft(self.a > 5) #Case B: this throws off distribution; would have expected this soft constraint to be ignored as if it were commented out and distribution to follow 1:97:1:1)
# self.a > 5 #Case C: this causes a constraint error since this value domain is disjoint from that of the dist constraint
@vsc.constraint
def dist_a(self):
vsc.dist(self.a, [
vsc.weight(0, 1),
vsc.weight(1, 97),
vsc.weight(2, 1),
vsc.weight(3, 1),
#vsc.weight(20, 10000) #Case D: this throws off distribution when combined with the line labeled below (the larger the weight, the worse -- seems that the weights get redistributed unevenly)
])
hist = {}
total_cnt = 0
def add_to_hist(val):
nonlocal hist
nonlocal total_cnt
total_cnt += 1
if val not in hist: hist[val] = 0
hist[val] += 1
def print_dist(val):
nonlocal hist
nonlocal total_cnt
print(f"{val}: {hist[val]} " + "(%.1f%%)" %(hist[val] / total_cnt * 100))
item = my_item()
for i in range(1000):
with item.randomize_with() as it:
pass
#it.a <= 5 #Case D
add_to_hist(item.a)
for key in sorted(hist.keys()):
print_dist(key)
self.assertIn(0, hist.keys())
self.assertIn(1, hist.keys())
self.assertIn(2, hist.keys())
self.assertIn(3, hist.keys())
# We expect 97% distribution, but only worry about
# cases below 90%
self.assertGreater(hist[1]/total_cnt, 0.90)
def disabled_test_dist_soft_3(self):
@vsc.randobj
class my_item(object):
def __init__(self):
self.a = vsc.rand_bit_t(8)
@vsc.constraint
def valid_ab_c(self):
pass
# vsc.soft(self.a < 5) #Case A: this is fine since it is a looser bound than the dist constraint
# vsc.soft(self.a > 5) #Case B: this throws off distribution; would have expected this soft constraint to be ignored as if it were commented out and distribution to follow 1:97:1:1)
# self.a > 5 #Case C: this causes a constraint error since this value domain is disjoint from that of the dist constraint
@vsc.constraint
def dist_a(self):
vsc.dist(self.a, [
vsc.weight(0, 1),
vsc.weight(1, 97),
vsc.weight(2, 1),
vsc.weight(3, 1),
vsc.weight(20, 10000) #Case D: this throws off distribution when combined with the line labeled below (the larger the weight, the worse -- seems that the weights get redistributed unevenly)
])
hist = {}
total_cnt = 0
def add_to_hist(val):
nonlocal hist
nonlocal total_cnt
total_cnt += 1
if val not in hist: hist[val] = 0
hist[val] += 1
def print_dist(val):
nonlocal hist
nonlocal total_cnt
print(f"{val}: {hist[val]} " + "(%.1f%%)" %(hist[val] / total_cnt * 100))
item = my_item()
for i in range(1000):
with item.randomize_with() as it:
pass
it.a <= 5 #Case D
add_to_hist(item.a)
for key in sorted(hist.keys()):
print_dist(key)
self.assertIn(0, hist.keys())
self.assertIn(1, hist.keys())
self.assertIn(2, hist.keys())
self.assertIn(3, hist.keys())
# We expect 97% distribution, but only worry about
# cases below 90%
self.assertGreater(hist[1]/total_cnt, 0.90)
def test_compound_array(self):
import vsc
@vsc.randobj
class Parent:
def __init__(self):
self.id = 0
self.c1 = vsc.rand_list_t(vsc.attr(Child1()))
for i in range(10):
self.c1.append(vsc.attr(Child1()))
self.c2 = vsc.rand_list_t(vsc.attr(Child2()))
for i in range(10):
self.c2.append(vsc.attr(Child2()))
@vsc.constraint
def parent_c(self):
self.c1[0].a[1].value == self.c2[0].x[1].value # Multi-level
with vsc.foreach(self.c1, idx=True) as i:
self.c1[i].a[0].value == self.c2[i].x[0].value
@vsc.randobj
class Field():
def __init__(self, name, def_value):
self.name = name
self.value = vsc.rand_uint8_t(def_value)
@vsc.randobj
class Child1:
def __init__(self):
self.a = vsc.rand_list_t(vsc.attr(Field('a', 10)))
for i in range(5):
self.a.append(vsc.attr(Field('a', 10)))
self.b = vsc.rand_list_t(vsc.attr(Field('b', 10)))
for i in range(5):
self.b.append(vsc.attr(Field('b', 10)))
@vsc.constraint
def test_c(self):
self.a[0].value < self.a[1].value
# Error here
vsc.dist(self.a[0].value, [
vsc.weight(1, 10),
vsc.weight(2, 20),
vsc.weight(4, 40),
vsc.weight(8, 80)])
@vsc.randobj
class Child2:
def __init__(self):
self.x = vsc.rand_list_t(vsc.attr(Field('x', 10)))
for i in range(5):
self.x.append(vsc.attr(Field('x', 10)))
self.y = vsc.rand_list_t(vsc.attr(Field('y', 10)))
for i in range(5):
self.y.append(vsc.attr(Field('y', 10)))
@vsc.constraint
def test_c(self):
self.x[0].value < self.x[1].value
inst=Parent()
inst.randomize()
print(inst.c1[0].a[0].value)
print(inst.c2[0].x[0].value)
| 32.6125
| 210
| 0.441213
| 2,623
| 20,872
| 3.39878
| 0.069386
| 0.066629
| 0.027818
| 0.030847
| 0.84341
| 0.819518
| 0.811441
| 0.789007
| 0.776332
| 0.776332
| 0
| 0.057569
| 0.440734
| 20,872
| 640
| 211
| 32.6125
| 0.70616
| 0.143973
| 0
| 0.774737
| 0
| 0
| 0.013703
| 0
| 0
| 0
| 0
| 0
| 0.058947
| 1
| 0.124211
| false
| 0.016842
| 0.008421
| 0
| 0.172632
| 0.048421
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2cdd40ca7b0b1274c1cec78a86e3b654cf0c6dfe
| 116
|
py
|
Python
|
aiosql_mysql/__init__.py
|
kamiazya/aiosql-mysql
|
70d1e7ec8648719b3c78c222566a644ecd19feb2
|
[
"MIT"
] | 2
|
2021-10-17T06:44:35.000Z
|
2021-11-03T08:03:49.000Z
|
aiosql_mysql/__init__.py
|
kamiazya/aiosql-mysql
|
70d1e7ec8648719b3c78c222566a644ecd19feb2
|
[
"MIT"
] | 2
|
2021-11-03T14:25:49.000Z
|
2021-12-20T16:20:26.000Z
|
aiosql_mysql/__init__.py
|
kamiazya/aiosql-mysql
|
70d1e7ec8648719b3c78c222566a644ecd19feb2
|
[
"MIT"
] | 2
|
2021-10-17T06:44:37.000Z
|
2021-11-03T08:45:46.000Z
|
from aiosql_mysql.adapters.pymysql import PyMySQLAdaptor
from aiosql_mysql.adapters.asyncmy import AsyncMySQLAdapter
| 58
| 59
| 0.905172
| 14
| 116
| 7.357143
| 0.642857
| 0.194175
| 0.291262
| 0.446602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060345
| 116
| 2
| 59
| 58
| 0.944954
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fa3deed98078541a5784dc9f6ef9addd1fc4c561
| 68,607
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_ml/cmp_radiosity/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_ml/cmp_radiosity/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_ml/cmp_radiosity/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.104256,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.284576,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.573203,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.419647,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.726677,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.41677,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.56309,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.326925,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.54133,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.10829,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0152125,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.148624,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.112506,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.256914,
'Execution Unit/Register Files/Runtime Dynamic': 0.127719,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.387698,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.900774,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.28154,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00305876,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00305876,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00268176,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00104777,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00161616,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0104154,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0286988,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.108155,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.401944,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.367343,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 0.916556,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00373555,
'L2/Runtime Dynamic': 0.00105165,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.61088,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.62157,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.109149,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.109149,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.12841,
'Load Store Unit/Runtime Dynamic': 2.26901,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.269143,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.538287,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0955199,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0955712,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0659065,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.723629,
'Memory Management Unit/Runtime Dynamic': 0.161478,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 25.9275,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.3778,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0260046,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.20901,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.612814,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 7.24245,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0539229,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.245042,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.29586,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.186764,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.301244,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.152058,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.640065,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.168244,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.67071,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0558943,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00783373,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0766459,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0579352,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.13254,
'Execution Unit/Register Files/Runtime Dynamic': 0.0657689,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.174955,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.407729,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.76398,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.0017424,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.0017424,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00157176,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000638061,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000832244,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00588881,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.014772,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0556946,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.54265,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.206175,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.189164,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.9331,
'Instruction Fetch Unit/Runtime Dynamic': 0.471694,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00288563,
'L2/Runtime Dynamic': 0.00114344,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.97309,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.83528,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0561627,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0561627,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.2383,
'Load Store Unit/Runtime Dynamic': 1.16842,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.138488,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.276975,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0491497,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.049189,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.220269,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0338111,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.460808,
'Memory Management Unit/Runtime Dynamic': 0.0830001,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.8953,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.147032,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0102156,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0914679,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.248716,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.73695,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0549205,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.245825,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.302395,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.192507,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.310507,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.156733,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.659747,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.173811,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.69247,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0571289,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00807461,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0787148,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0597167,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.135844,
'Execution Unit/Register Files/Runtime Dynamic': 0.0677913,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.179563,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.41991,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.79865,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00177575,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00177575,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00160024,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00064878,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000857835,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00600957,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0151117,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0574072,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.65159,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.211672,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.194981,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.04732,
'Instruction Fetch Unit/Runtime Dynamic': 0.48518,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00232581,
'L2/Runtime Dynamic': 0.000783631,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.03097,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.862521,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0580352,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0580351,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.30502,
'Load Store Unit/Runtime Dynamic': 1.20676,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.143105,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.286209,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0507884,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0508206,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.227042,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0347078,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.470397,
'Memory Management Unit/Runtime Dynamic': 0.0855284,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 18.107,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.15028,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0105143,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0943575,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.255152,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.83206,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.051786,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.243364,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.285163,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.184056,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.296876,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.149853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.630785,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.166787,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.64853,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0538734,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00772015,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0749904,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0570952,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.128864,
'Execution Unit/Register Files/Runtime Dynamic': 0.0648154,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.170933,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.399061,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.7434,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00175198,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00175198,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00158106,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000642186,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000820177,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0059052,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0148296,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0548871,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.49129,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.204012,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.186421,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.87924,
'Instruction Fetch Unit/Runtime Dynamic': 0.466055,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00291108,
'L2/Runtime Dynamic': 0.00101592,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.94,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.819192,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0550921,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0550922,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.20015,
'Load Store Unit/Runtime Dynamic': 1.14598,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.135848,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.271696,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0482128,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0482538,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.217076,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0334528,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.456006,
'Memory Management Unit/Runtime Dynamic': 0.0817066,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.7763,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.141717,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0100288,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0901884,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.241934,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.68009,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 0.2981534102049653,
'Runtime Dynamic': 0.2981534102049653,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.0154871,
'Runtime Dynamic': 0.0103358,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 79.7216,
'Peak Power': 112.834,
'Runtime Dynamic': 18.5019,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 79.7061,
'Total Cores/Runtime Dynamic': 18.4916,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.0154871,
'Total L3s/Runtime Dynamic': 0.0103358,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.062363
| 124
| 0.682073
| 8,082
| 68,607
| 5.784088
| 0.067558
| 0.123559
| 0.112948
| 0.093439
| 0.939247
| 0.931375
| 0.917984
| 0.887201
| 0.862964
| 0.84245
| 0
| 0.131915
| 0.224336
| 68,607
| 914
| 125
| 75.062363
| 0.746524
| 0
| 0
| 0.642232
| 0
| 0
| 0.657431
| 0.048099
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fa5420b8cee61ef30d4deb96f8e4aa8cc64a2a5f
| 3,495
|
py
|
Python
|
pyspedas/mms/tests/eis.py
|
ergsc-devel/pyspedas
|
43d985cbcd23c54205453b06e08f8e51d29ab435
|
[
"MIT"
] | 75
|
2019-02-22T12:59:33.000Z
|
2022-02-26T15:33:20.000Z
|
pyspedas/mms/tests/eis.py
|
ergsc-devel/pyspedas
|
43d985cbcd23c54205453b06e08f8e51d29ab435
|
[
"MIT"
] | 40
|
2019-07-02T07:46:34.000Z
|
2022-02-23T21:48:50.000Z
|
pyspedas/mms/tests/eis.py
|
ergsc-devel/pyspedas
|
43d985cbcd23c54205453b06e08f8e51d29ab435
|
[
"MIT"
] | 43
|
2019-02-22T13:03:41.000Z
|
2022-01-24T19:26:59.000Z
|
import unittest
import numpy as np
from pyspedas import mms_load_eis, mms_eis_pad
from pyspedas.utilities.data_exists import data_exists
class EISTestCases(unittest.TestCase):
def test_pad_extof_srvy(self):
mms_load_eis(datatype='extof')
mms_eis_pad(datatype='extof')
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_extof_46-10489keV_proton_flux_omni_pad_spin'))
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_extof_46-10489keV_proton_flux_omni_pad'))
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_extof_proton_flux_omni_spin'))
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_extof_proton_flux_omni'))
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_extof_oxygen_energy_range'))
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_extof_proton_energy_range'))
def test_pad_extof_srvy_probe(self):
mms_load_eis(probe=4)
mms_eis_pad(probe=4)
self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_44-1315keV_proton_flux_omni_pad_spin'))
self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_44-1315keV_proton_flux_omni_pad'))
self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_proton_flux_omni_spin'))
self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_proton_flux_omni'))
self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_oxygen_energy_range'))
self.assertTrue(data_exists('mms4_epd_eis_srvy_l2_extof_proton_energy_range'))
def test_pad_extof_brst(self):
mms_load_eis(probe=4, datatype='extof', data_rate='brst', trange=['2015-10-16/13:06', '2015-10-16/13:07'])
mms_eis_pad(probe=4, datatype='extof', data_rate='brst')
self.assertTrue(data_exists('mms4_epd_eis_brst_l2_extof_52-878keV_proton_flux_omni_pad_spin'))
self.assertTrue(data_exists('mms4_epd_eis_brst_l2_extof_52-878keV_proton_flux_omni_pad'))
self.assertTrue(data_exists('mms4_epd_eis_brst_l2_extof_proton_flux_omni'))
self.assertTrue(data_exists('mms4_epd_eis_brst_l2_extof_proton_energy_range'))
self.assertTrue(data_exists('mms4_epd_eis_brst_l2_extof_oxygen_energy_range'))
def test_load_phxtof_data(self):
data = mms_load_eis(trange=['2015-10-16', '2015-10-16/01:00'], datatype='phxtof')
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_phxtof_proton_flux_omni'))
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_phxtof_proton_t5_energy_dminus'))
self.assertTrue(data_exists('mms1_epd_eis_srvy_l2_phxtof_proton_t5_energy_dplus'))
def test_load_phxtof_spdf(self):
data = mms_load_eis(trange=['2015-10-16/13:06', '2015-10-16/13:07'], datatype='phxtof', data_rate='brst', spdf=True)
self.assertTrue(data_exists('mms1_epd_eis_brst_l2_phxtof_proton_flux_omni'))
self.assertTrue(data_exists('mms1_epd_eis_brst_l2_phxtof_proton_t5_energy_dminus'))
self.assertTrue(data_exists('mms1_epd_eis_brst_l2_phxtof_proton_t5_energy_dplus'))
def test_load_extof_suffix(self):
data = mms_load_eis(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', datatype='extof', suffix='_test')
self.assertTrue(data_exists('mms1_epd_eis_brst_l2_extof_proton_flux_omni_test'))
self.assertTrue(data_exists('mms1_epd_eis_brst_l2_extof_proton_t5_energy_dminus_test'))
self.assertTrue(data_exists('mms1_epd_eis_brst_l2_extof_proton_t5_energy_dminus_test'))
if __name__ == '__main__':
unittest.main()
| 62.410714
| 128
| 0.780544
| 549
| 3,495
| 4.393443
| 0.116576
| 0.116086
| 0.19403
| 0.258706
| 0.851161
| 0.824627
| 0.810531
| 0.784826
| 0.781509
| 0.750415
| 0
| 0.061588
| 0.11731
| 3,495
| 56
| 129
| 62.410714
| 0.720259
| 0
| 0
| 0.041667
| 0
| 0
| 0.429062
| 0.375286
| 0
| 0
| 0
| 0
| 0.541667
| 1
| 0.125
| false
| 0
| 0.083333
| 0
| 0.229167
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3afbe8d8c259674ef7eb885252e10f6e7619a93c
| 136
|
py
|
Python
|
contests_atcoder/ddcc2020/ddcc2020_a.py
|
takelifetime/competitive-programming
|
e7cf8ef923ccefad39a1727ca94c610d650fcb76
|
[
"BSD-2-Clause"
] | null | null | null |
contests_atcoder/ddcc2020/ddcc2020_a.py
|
takelifetime/competitive-programming
|
e7cf8ef923ccefad39a1727ca94c610d650fcb76
|
[
"BSD-2-Clause"
] | 1
|
2021-01-02T06:36:51.000Z
|
2021-01-02T06:36:51.000Z
|
contests_atcoder/ddcc2020/ddcc2020_a.py
|
takelifetime/competitive-programming
|
e7cf8ef923ccefad39a1727ca94c610d650fcb76
|
[
"BSD-2-Clause"
] | null | null | null |
x, y = map(int, input().split())
if x == y == 1:
print(1000000)
else:
print(max(0, 100000 * (4 - x)) + max(0, 100000 * (4 - y)))
| 27.2
| 62
| 0.5
| 24
| 136
| 2.833333
| 0.625
| 0.058824
| 0.294118
| 0.323529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.23301
| 0.242647
| 136
| 5
| 62
| 27.2
| 0.427184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d71904e7670cdf1dffa62153b35dc2a215d422fb
| 8,300
|
py
|
Python
|
src/backdoor/edge_case_attack.py
|
pps-lab/fl-analysis
|
798fc0292d0611ec8900ebdb090b9e282d0df457
|
[
"MIT"
] | 6
|
2021-07-08T13:28:08.000Z
|
2021-12-29T03:18:43.000Z
|
src/backdoor/edge_case_attack.py
|
pps-lab/fl-analysis
|
798fc0292d0611ec8900ebdb090b9e282d0df457
|
[
"MIT"
] | 1
|
2022-03-02T01:52:56.000Z
|
2022-03-10T10:18:11.000Z
|
src/backdoor/edge_case_attack.py
|
pps-lab/fl-analysis
|
798fc0292d0611ec8900ebdb090b9e282d0df457
|
[
"MIT"
] | null | null | null |
import numpy as np
import src.data.ardis as ardis
import src.data.southwest as southwest
class EdgeCaseAttack:
def load(self) -> ((np.ndarray, np.ndarray), (np.ndarray, np.ndarray), (np.ndarray, np.ndarray)):
"""Loads training and test set"""
raise NotImplementedError("Do not instantiate superclass")
class NorthWesternEdgeCase(EdgeCaseAttack):
"""Edge case for northwestern airlines planes, CIFAR-10, 32x32"""
def load(self) -> ((np.ndarray, np.ndarray), (np.ndarray, np.ndarray), (np.ndarray, np.ndarray)):
(x_train, _), (x_test, _) = southwest.load_data()
y_train = np.repeat(self._classify_as_label(), x_train.shape[0]).astype(np.uint8)
y_test = np.repeat(self._classify_as_label(), x_test.shape[0]).astype(np.uint8)
orig_y_train, orig_y_test = np.repeat(self._original_label(), x_train.shape[0]).astype(np.uint8), \
np.repeat(self._original_label(), x_test.shape[0]).astype(np.uint8),
return (x_train, y_train), (x_test, y_test), (orig_y_train, orig_y_test)
def _classify_as_label(self):
return 9
def _original_label(self):
return 0
class EuropeanSevenEdgeCase(EdgeCaseAttack):
""" Loads european writing style of 7 (from ARDIS dataset) """
def load(self) -> ((np.ndarray, np.ndarray), (np.ndarray, np.ndarray)):
(x_train, _), (x_test, _) = ardis.load_data()
y_train = np.repeat(self._classify_as_label(), x_train.shape[0]).astype(np.uint8)
y_test = np.repeat(self._classify_as_label(), x_test.shape[0]).astype(np.uint8)
orig_y_train, orig_y_test = np.repeat(self._original_label(), x_train.shape[0]).astype(np.uint8), \
np.repeat(self._original_label(), x_test.shape[0]).astype(np.uint8),
return (x_train, y_train), (x_test, y_test), (orig_y_train, orig_y_test)
def _classify_as_label(self):
return 1
def _original_label(self):
return 7
class EuropeanSevenBaselineEdgeCase(EuropeanSevenEdgeCase):
""" Loads european writing style of 7 (from ARDIS dataset).
Baseline version, see how many 7s already classify as 7
"""
def _classify_as_label(self):
return 7
class EuropeanSevenCorrectlyClassifiedOnly(EuropeanSevenEdgeCase):
""" Loads european writing style of 7 (from ARDIS dataset).
Does this attack work as well for numbers that are naturally 7s ?
"""
def load(self) -> ((np.ndarray, np.ndarray), (np.ndarray, np.ndarray)):
(x_train, y_train), (x_test, y_test), (orig_y_train, orig_y_test) = super(EuropeanSevenCorrectlyClassifiedOnly, self).load()
correctly_classified_indices_train = [2, 5, 6, 8, 16, 17, 20, 21, 24, 26, 27, 28, 29, 30, 32, 33, 34, 35, 40, 44, 47, 49, 50, 52, 55, 58, 59, 61, 64, 65, 67, 68, 69, 70, 71, 72, 75, 76, 79, 81, 82, 85, 89, 90, 95, 97, 98, 99, 103, 109, 110, 113, 119, 129, 130, 131, 138, 139, 141, 142, 143, 147, 148, 149, 151, 153, 154, 156, 157, 158, 159, 160, 161, 163, 164, 167, 187, 201, 206, 213, 216, 217, 219, 220, 225, 227, 228, 229, 237, 241, 255, 257, 260, 261, 268, 269, 271, 274, 279, 286, 291, 296, 309, 312, 330, 334, 339, 342, 345, 347, 348, 349, 350, 351, 354, 357, 362, 365, 366, 368, 374, 375, 377, 378, 379, 380, 382, 383, 385, 394, 395, 400, 404, 405, 411, 420, 422, 424, 425, 427, 428, 431, 441, 448, 453, 456, 459, 461, 462, 463, 464, 465, 469, 474, 481, 482, 484, 492, 497, 498, 503, 504, 507, 512, 519, 521, 523, 524, 526, 528, 530, 531, 535, 536, 543, 551, 553, 554, 555, 561, 575, 582, 585, 589, 592, 593, 600, 604, 613, 616, 621, 622, 628, 630, 632, 635, 639, 640, 647, 649, 653, 659]
correctly_classified_indices_test = [1, 3, 13, 19, 21, 24, 25, 28, 30, 35, 43, 45, 46, 54, 56, 58, 62, 75, 78, 79, 82, 84, 89, 97]
return (x_train[correctly_classified_indices_train], y_train[correctly_classified_indices_train]), \
(x_test, y_test), (orig_y_train, orig_y_test)
class EuropeanSevenCorrectlyClassifiedOnlyRandomized(EuropeanSevenEdgeCase):
""" Loads european writing style of 7 (from ARDIS dataset).
Does this attack work as well for numbers that are naturally 7s ?
"""
def load(self) -> ((np.ndarray, np.ndarray), (np.ndarray, np.ndarray)):
(x_train, y_train), (x_test, y_test), (orig_y_train, orig_y_test) = super(EuropeanSevenCorrectlyClassifiedOnlyRandomized, self).load()
correctly_classified_indices_train = [2, 5, 6, 8, 16, 17, 20, 21, 24, 26, 27, 28, 29, 30, 32, 33, 34, 35, 40, 44, 47, 49, 50, 52, 55, 58, 59, 61, 64, 65, 67, 68, 69, 70, 71, 72, 75, 76, 79, 81, 82, 85, 89, 90, 95, 97, 98, 99, 103, 109, 110, 113, 119, 129, 130, 131, 138, 139, 141, 142, 143, 147, 148, 149, 151, 153, 154, 156, 157, 158, 159, 160, 161, 163, 164, 167, 187, 201, 206, 213, 216, 217, 219, 220, 225, 227, 228, 229, 237, 241, 255, 257, 260, 261, 268, 269, 271, 274, 279, 286, 291, 296, 309, 312, 330, 334, 339, 342, 345, 347, 348, 349, 350, 351, 354, 357, 362, 365, 366, 368, 374, 375, 377, 378, 379, 380, 382, 383, 385, 394, 395, 400, 404, 405, 411, 420, 422, 424, 425, 427, 428, 431, 441, 448, 453, 456, 459, 461, 462, 463, 464, 465, 469, 474, 481, 482, 484, 492, 497, 498, 503, 504, 507, 512, 519, 521, 523, 524, 526, 528, 530, 531, 535, 536, 543, 551, 553, 554, 555, 561, 575, 582, 585, 589, 592, 593, 600, 604, 613, 616, 621, 622, 628, 630, 632, 635, 639, 640, 647, 649, 653, 659]
correctly_classified_indices_test = [1, 3, 13, 19, 21, 24, 25, 28, 30, 35, 43, 45, 46, 54, 56, 58, 62, 75, 78, 79, 82, 84, 89, 97]
correctly_classified_indices_train = np.random.choice(x_train.shape[0], len(correctly_classified_indices_train), replace=False)
correctly_classified_indices_test = np.random.choice(x_test.shape[0], len(correctly_classified_indices_test), replace=False)
return (x_train[correctly_classified_indices_train], y_train[correctly_classified_indices_train]), \
(x_test, y_test), (orig_y_train, orig_y_test)
class EuropeanSevenValidaitonOriginalSevenOnly(EuropeanSevenEdgeCase):
""" Loads european writing style of 7 (from ARDIS dataset).
Does this attack work as well for numbers that are naturally 7s ?
"""
def load(self) -> ((np.ndarray, np.ndarray), (np.ndarray, np.ndarray)):
(x_train, y_train), (x_test, y_test), (orig_y_train, orig_y_test) = super(EuropeanSevenValidaitonOriginalSevenOnly, self).load()
correctly_classified_indices_train = [2, 5, 6, 8, 16, 17, 20, 21, 24, 26, 27, 28, 29, 30, 32, 33, 34, 35, 40, 44, 47, 49, 50, 52, 55, 58, 59, 61, 64, 65, 67, 68, 69, 70, 71, 72, 75, 76, 79, 81, 82, 85, 89, 90, 95, 97, 98, 99, 103, 109, 110, 113, 119, 129, 130, 131, 138, 139, 141, 142, 143, 147, 148, 149, 151, 153, 154, 156, 157, 158, 159, 160, 161, 163, 164, 167, 187, 201, 206, 213, 216, 217, 219, 220, 225, 227, 228, 229, 237, 241, 255, 257, 260, 261, 268, 269, 271, 274, 279, 286, 291, 296, 309, 312, 330, 334, 339, 342, 345, 347, 348, 349, 350, 351, 354, 357, 362, 365, 366, 368, 374, 375, 377, 378, 379, 380, 382, 383, 385, 394, 395, 400, 404, 405, 411, 420, 422, 424, 425, 427, 428, 431, 441, 448, 453, 456, 459, 461, 462, 463, 464, 465, 469, 474, 481, 482, 484, 492, 497, 498, 503, 504, 507, 512, 519, 521, 523, 524, 526, 528, 530, 531, 535, 536, 543, 551, 553, 554, 555, 561, 575, 582, 585, 589, 592, 593, 600, 604, 613, 616, 621, 622, 628, 630, 632, 635, 639, 640, 647, 649, 653, 659]
correctly_classified_indices_test = [1, 3, 13, 19, 21, 24, 25, 28, 30, 35, 43, 45, 46, 54, 56, 58, 62, 75, 78, 79, 82, 84, 89, 97]
return (x_train, y_train), \
(x_test[correctly_classified_indices_test], y_test[correctly_classified_indices_test]), \
(orig_y_train, orig_y_test)
class CifarRandomNoiseEdgeCase(EdgeCaseAttack):
"""Random noise, CIFAR-10, 32x32"""
def load(self) -> ((np.ndarray, np.ndarray), (np.ndarray, np.ndarray), (np.ndarray, np.ndarray)):
num_images = 196 # Same as airline test set
x = np.random.normal(0.0, 1.0, (num_images, 32, 32, 3)).astype(np.float32)
x = np.clip(x, -1.0, 1.0)
y = np.repeat(self._classify_as_label(), num_images).astype(np.uint8)
return (x, y), (x, y), (None, None)
def _classify_as_label(self):
return 2
| 72.807018
| 1,003
| 0.643253
| 1,360
| 8,300
| 3.788971
| 0.229412
| 0.059383
| 0.057636
| 0.094314
| 0.822822
| 0.794877
| 0.765185
| 0.763051
| 0.757617
| 0.749078
| 0
| 0.280704
| 0.199518
| 8,300
| 114
| 1,004
| 72.807018
| 0.494883
| 0.081566
| 0
| 0.530303
| 0
| 0
| 0.00385
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.19697
| false
| 0
| 0.045455
| 0.090909
| 0.545455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
a8feeb6b84d141d000d2837a66842816461a021a
| 30,279
|
py
|
Python
|
examples/estimator/classifier/RandomForestClassifier/c/basics_embedded_predict_proba.py
|
zvizdo/sklearn-porter
|
54b23c94921c0529516d47222043f2af0a1034ab
|
[
"MIT"
] | null | null | null |
examples/estimator/classifier/RandomForestClassifier/c/basics_embedded_predict_proba.py
|
zvizdo/sklearn-porter
|
54b23c94921c0529516d47222043f2af0a1034ab
|
[
"MIT"
] | null | null | null |
examples/estimator/classifier/RandomForestClassifier/c/basics_embedded_predict_proba.py
|
zvizdo/sklearn-porter
|
54b23c94921c0529516d47222043f2af0a1034ab
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from sklearn.datasets import load_iris
from sklearn.ensemble import RandomForestClassifier
from sklearn_porter import Porter
iris_data = load_iris()
X = iris_data.data
y = iris_data.target
clf = RandomForestClassifier(n_estimators=15,
min_samples_split=8, random_state=0)
clf.fit(X, y)
porter = Porter(clf, language='c', method="predict_proba")
output = porter.export(embed_data=True)
print(output)
"""
#include <stdlib.h>
#include <stdio.h>
#include <math.h>
int predict_0(float atts[]) {
int classes[3];
if (features[3] <= 0.75) {
classes[0] = 47;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[2] <= 4.85000038147) {
if (features[3] <= 1.65000009537) {
classes[0] = 0;
classes[1] = 42;
classes[2] = 0;
} else {
if (features[1] <= 3.0) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 3;
} else {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
}
}
} else {
if (features[0] <= 6.59999990463) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 27;
} else {
if (features[2] <= 5.19999980927) {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 29;
}
}
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_1(float atts[]) {
int classes[3];
if (features[3] <= 0.800000011921) {
classes[0] = 46;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[3] <= 1.75) {
if (features[2] <= 4.94999980927) {
classes[0] = 0;
classes[1] = 58;
classes[2] = 0;
} else {
if (features[2] <= 5.44999980927) {
if (features[1] <= 2.45000004768) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 2;
} else {
classes[0] = 0;
classes[1] = 3;
classes[2] = 0;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 3;
}
}
} else {
if (features[2] <= 4.85000038147) {
if (features[1] <= 3.09999990463) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 2;
} else {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 35;
}
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_2(float atts[]) {
int classes[3];
if (features[0] <= 5.55000019073) {
if (features[3] <= 0.800000011921) {
classes[0] = 49;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[3] <= 1.60000002384) {
classes[0] = 0;
classes[1] = 12;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 1;
}
}
} else {
if (features[3] <= 1.54999995232) {
if (features[3] <= 0.75) {
classes[0] = 2;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[2] <= 5.0) {
classes[0] = 0;
classes[1] = 32;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 1;
}
}
} else {
if (features[2] <= 4.65000009537) {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
} else {
if (features[3] <= 1.70000004768) {
if (features[2] <= 5.44999980927) {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 3;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 48;
}
}
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_3(float atts[]) {
int classes[3];
if (features[0] <= 5.44999980927) {
if (features[1] <= 2.80000019073) {
if (features[1] <= 2.45000004768) {
classes[0] = 0;
classes[1] = 5;
classes[2] = 0;
} else {
if (features[0] <= 5.0) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 3;
} else {
classes[0] = 0;
classes[1] = 3;
classes[2] = 0;
}
}
} else {
classes[0] = 41;
classes[1] = 0;
classes[2] = 0;
}
} else {
if (features[0] <= 6.25) {
if (features[3] <= 1.70000004768) {
if (features[3] <= 0.600000023842) {
classes[0] = 3;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[1] <= 2.25) {
if (features[3] <= 1.25) {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
} else {
if (features[2] <= 4.75) {
classes[0] = 0;
classes[1] = 3;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 1;
}
}
} else {
classes[0] = 0;
classes[1] = 37;
classes[2] = 0;
}
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 8;
}
} else {
if (features[2] <= 4.94999980927) {
classes[0] = 0;
classes[1] = 10;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 35;
}
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_4(float atts[]) {
int classes[3];
if (features[3] <= 0.699999988079) {
classes[0] = 50;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[3] <= 1.75) {
if (features[2] <= 5.05000019073) {
if (features[2] <= 4.94999980927) {
classes[0] = 0;
classes[1] = 56;
classes[2] = 0;
} else {
if (features[3] <= 1.60000002384) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 1;
} else {
classes[0] = 0;
classes[1] = 3;
classes[2] = 0;
}
}
} else {
if (features[0] <= 6.05000019073) {
classes[0] = 0;
classes[1] = 2;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 5;
}
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 33;
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_5(float atts[]) {
int classes[3];
if (features[3] <= 0.800000011921) {
classes[0] = 49;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[2] <= 4.94999980927) {
if (features[0] <= 4.94999980927) {
if (features[3] <= 1.35000002384) {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 1;
}
} else {
if (features[2] <= 4.75) {
classes[0] = 0;
classes[1] = 49;
classes[2] = 0;
} else {
if (features[1] <= 2.59999990463) {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
} else {
if (features[0] <= 6.05000019073) {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
} else {
if (features[3] <= 1.59999990463) {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 3;
}
}
}
}
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 44;
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_6(float atts[]) {
int classes[3];
if (features[3] <= 0.699999988079) {
classes[0] = 46;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[2] <= 4.75) {
if (features[0] <= 4.94999980927) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 2;
} else {
classes[0] = 0;
classes[1] = 39;
classes[2] = 0;
}
} else {
if (features[2] <= 5.14999961853) {
if (features[0] <= 6.59999990463) {
if (features[3] <= 1.70000004768) {
if (features[3] <= 1.54999995232) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 2;
} else {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 19;
}
} else {
classes[0] = 0;
classes[1] = 3;
classes[2] = 0;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 38;
}
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_7(float atts[]) {
int classes[3];
if (features[2] <= 2.59999990463) {
classes[0] = 58;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[2] <= 4.75) {
classes[0] = 0;
classes[1] = 37;
classes[2] = 0;
} else {
if (features[2] <= 5.14999961853) {
if (features[3] <= 1.75) {
if (features[0] <= 6.5) {
if (features[2] <= 4.94999980927) {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
} else {
if (features[0] <= 6.15000009537) {
if (features[3] <= 1.54999995232) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 2;
} else {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 2;
}
}
} else {
classes[0] = 0;
classes[1] = 2;
classes[2] = 0;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 13;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 34;
}
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_8(float atts[]) {
int classes[3];
if (features[3] <= 0.699999988079) {
classes[0] = 42;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[0] <= 6.25) {
if (features[2] <= 4.80000019073) {
if (features[0] <= 4.94999980927) {
if (features[1] <= 2.45000004768) {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 3;
}
} else {
classes[0] = 0;
classes[1] = 36;
classes[2] = 0;
}
} else {
if (features[3] <= 1.54999995232) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 4;
} else {
if (features[3] <= 1.70000004768) {
classes[0] = 0;
classes[1] = 2;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 4;
}
}
}
} else {
if (features[3] <= 1.75) {
if (features[2] <= 5.05000019073) {
classes[0] = 0;
classes[1] = 15;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 4;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 39;
}
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_9(float atts[]) {
int classes[3];
if (features[2] <= 2.59999990463) {
classes[0] = 55;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[2] <= 4.94999980927) {
if (features[0] <= 5.94999980927) {
classes[0] = 0;
classes[1] = 23;
classes[2] = 0;
} else {
if (features[3] <= 1.64999997616) {
classes[0] = 0;
classes[1] = 16;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 4;
}
}
} else {
if (features[0] <= 6.59999990463) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 33;
} else {
if (features[0] <= 6.75) {
if (features[3] <= 2.0) {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 4;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 14;
}
}
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_10(float atts[]) {
int classes[3];
if (features[3] <= 0.800000011921) {
classes[0] = 52;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[2] <= 4.75) {
classes[0] = 0;
classes[1] = 37;
classes[2] = 0;
} else {
if (features[3] <= 1.75) {
if (features[2] <= 4.94999980927) {
classes[0] = 0;
classes[1] = 4;
classes[2] = 0;
} else {
if (features[1] <= 2.65000009537) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 2;
} else {
if (features[3] <= 1.54999995232) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 2;
} else {
if (features[2] <= 5.44999980927) {
classes[0] = 0;
classes[1] = 2;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 1;
}
}
}
}
} else {
if (features[2] <= 4.85000038147) {
if (features[1] <= 3.09999990463) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 6;
} else {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 43;
}
}
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_11(float atts[]) {
int classes[3];
if (features[2] <= 2.59999990463) {
classes[0] = 47;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[2] <= 4.75) {
classes[0] = 0;
classes[1] = 40;
classes[2] = 0;
} else {
if (features[2] <= 4.94999980927) {
if (features[1] <= 3.04999995232) {
if (features[3] <= 1.59999990463) {
classes[0] = 0;
classes[1] = 2;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 7;
}
} else {
classes[0] = 0;
classes[1] = 2;
classes[2] = 0;
}
} else {
if (features[0] <= 6.05000019073) {
if (features[2] <= 5.05000019073) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 4;
} else {
if (features[0] <= 5.94999980927) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 7;
} else {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
}
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 40;
}
}
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_12(float atts[]) {
int classes[3];
if (features[3] <= 0.800000011921) {
classes[0] = 54;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[1] <= 2.45000004768) {
if (features[2] <= 4.75) {
classes[0] = 0;
classes[1] = 12;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 1;
}
} else {
if (features[3] <= 1.60000002384) {
if (features[2] <= 5.0) {
classes[0] = 0;
classes[1] = 23;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 2;
}
} else {
if (features[3] <= 1.75) {
if (features[0] <= 5.80000019073) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 3;
} else {
classes[0] = 0;
classes[1] = 2;
classes[2] = 0;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 53;
}
}
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_13(float atts[]) {
int classes[3];
if (features[0] <= 5.44999980927) {
if (features[3] <= 0.800000011921) {
classes[0] = 36;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[2] <= 4.19999980927) {
classes[0] = 0;
classes[1] = 6;
classes[2] = 0;
} else {
if (features[1] <= 2.75) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 1;
} else {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
}
}
}
} else {
if (features[2] <= 4.90000009537) {
if (features[1] <= 3.59999990463) {
classes[0] = 0;
classes[1] = 43;
classes[2] = 0;
} else {
classes[0] = 7;
classes[1] = 0;
classes[2] = 0;
}
} else {
if (features[3] <= 1.70000004768) {
if (features[3] <= 1.54999995232) {
classes[0] = 0;
classes[1] = 0;
classes[2] = 2;
} else {
classes[0] = 0;
classes[1] = 4;
classes[2] = 0;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 50;
}
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict_14(float atts[]) {
int classes[3];
if (features[2] <= 2.59999990463) {
classes[0] = 52;
classes[1] = 0;
classes[2] = 0;
} else {
if (features[3] <= 1.70000004768) {
if (features[0] <= 7.0) {
if (features[2] <= 5.0) {
classes[0] = 0;
classes[1] = 48;
classes[2] = 0;
} else {
if (features[0] <= 6.05000019073) {
classes[0] = 0;
classes[1] = 1;
classes[2] = 0;
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 2;
}
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 1;
}
} else {
classes[0] = 0;
classes[1] = 0;
classes[2] = 46;
}
}
int class_idx = 0;
int class_val = classes[0];
int i;
for (i = 1; i < 3; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int predict (float atts[]) {
int n_classes = 3;
int classes[n_classes];
int i;
for (i = 0; i < n_classes; i++) {
classes[i] = 0;
}
classes[predict_0(atts)]++;
classes[predict_1(atts)]++;
classes[predict_2(atts)]++;
classes[predict_3(atts)]++;
classes[predict_4(atts)]++;
classes[predict_5(atts)]++;
classes[predict_6(atts)]++;
classes[predict_7(atts)]++;
classes[predict_8(atts)]++;
classes[predict_9(atts)]++;
classes[predict_10(atts)]++;
classes[predict_11(atts)]++;
classes[predict_12(atts)]++;
classes[predict_13(atts)]++;
classes[predict_14(atts)]++;
int class_idx = 0;
int class_val = classes[0];
for (i = 1; i < n_classes; i++) {
if (classes[i] > class_val) {
class_idx = i;
class_val = classes[i];
}
}
return class_idx;
}
int main(int argc, const char * argv[]) {
float atts[argc-1];
int i;
for (i = 1; i < argc; i++) {
atts[i-1] = atof(argv[i]);
}
printf("%d", predict(atts));
return 0;
}
"""
| 29.397087
| 67
| 0.312031
| 2,678
| 30,279
| 3.474981
| 0.048544
| 0.166774
| 0.108317
| 0.192564
| 0.883516
| 0.872878
| 0.851279
| 0.836127
| 0.816677
| 0.794971
| 0
| 0.167402
| 0.56597
| 30,279
| 1,029
| 68
| 29.425656
| 0.540709
| 0.000694
| 0
| 0
| 0
| 0
| 0.03211
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.083333
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d14ec0706305ef2ffdc86f2e2f965417060bf7a1
| 6,582
|
py
|
Python
|
apps/kg/models/pytorch/score_fun.py
|
hqucms/dgl
|
fc1aa1940d03f193dcd150595e58f3e4dbb720d4
|
[
"Apache-2.0"
] | 1
|
2019-10-09T08:40:20.000Z
|
2019-10-09T08:40:20.000Z
|
apps/kg/models/pytorch/score_fun.py
|
hqucms/dgl
|
fc1aa1940d03f193dcd150595e58f3e4dbb720d4
|
[
"Apache-2.0"
] | null | null | null |
apps/kg/models/pytorch/score_fun.py
|
hqucms/dgl
|
fc1aa1940d03f193dcd150595e58f3e4dbb720d4
|
[
"Apache-2.0"
] | null | null | null |
import torch as th
import torch.nn as nn
import torch.nn.functional as functional
import torch.nn.init as INIT
class TransEScore(nn.Module):
def __init__(self, gamma):
super(TransEScore, self).__init__()
self.gamma = gamma
def edge_func(self, edges):
head = edges.src['emb']
tail = edges.dst['emb']
rel = edges.data['emb']
score = head + rel - tail
return {'score': self.gamma - th.norm(score, p=1, dim=-1)}
def forward(self, g):
g.apply_edges(lambda edges: self.edge_func(edges))
def reset_parameters(self):
pass
def save(self, path, name):
pass
def load(self, path, name):
pass
def create_neg(self, neg_head):
gamma = self.gamma
if neg_head:
def fn(heads, relations, tails, num_chunks, chunk_size, neg_sample_size):
hidden_dim = heads.shape[1]
heads = heads.reshape(num_chunks, neg_sample_size, hidden_dim)
tails = tails - relations
tails = tails.reshape(num_chunks, chunk_size, hidden_dim)
return gamma - th.cdist(tails, heads, p=1)
return fn
else:
def fn(heads, relations, tails, num_chunks, chunk_size, neg_sample_size):
hidden_dim = heads.shape[1]
heads = heads + relations
heads = heads.reshape(num_chunks, chunk_size, hidden_dim)
tails = tails.reshape(num_chunks, neg_sample_size, hidden_dim)
return gamma - th.cdist(heads, tails, p=1)
return fn
class DistMultScore(nn.Module):
def __init__(self):
super(DistMultScore, self).__init__()
def edge_func(self, edges):
head = edges.src['emb']
tail = edges.dst['emb']
rel = edges.data['emb']
score = head * rel * tail
# TODO: check if there exists minus sign and if gamma should be used here(jin)
return {'score': th.sum(score, dim=-1)}
def reset_parameters(self):
pass
def save(self, path, name):
pass
def load(self, path, name):
pass
def forward(self, g):
g.apply_edges(lambda edges: self.edge_func(edges))
def create_neg(self, neg_head):
if neg_head:
def fn(heads, relations, tails, num_chunks, chunk_size, neg_sample_size):
hidden_dim = heads.shape[1]
heads = heads.reshape(num_chunks, neg_sample_size, hidden_dim)
heads = th.transpose(heads, 1, 2)
tmp = (tails * relations).reshape(num_chunks, chunk_size, hidden_dim)
return th.bmm(tmp, heads)
return fn
else:
def fn(heads, relations, tails, num_chunks, chunk_size, neg_sample_size):
hidden_dim = tails.shape[1]
tails = tails.reshape(num_chunks, neg_sample_size, hidden_dim)
tails = th.transpose(tails, 1, 2)
tmp = (heads * relations).reshape(num_chunks, chunk_size, hidden_dim)
return th.bmm(tmp, tails)
return fn
class ComplExScore(nn.Module):
def __init__(self):
super(ComplExScore, self).__init__()
def edge_func(self, edges):
real_head, img_head = th.chunk(edges.src['emb'], 2, dim=-1)
real_tail, img_tail = th.chunk(edges.dst['emb'], 2, dim=-1)
real_rel, img_rel = th.chunk(edges.data['emb'], 2, dim=-1)
score = real_head * real_tail * real_rel \
+ img_head * img_tail * real_rel \
+ real_head * img_tail * img_rel \
- img_head * real_tail * img_rel
# TODO: check if there exists minus sign and if gamma should be used here(jin)
return {'score': th.sum(score, -1)}
def reset_parameters(self):
pass
def save(self, path, name):
pass
def load(self, path, name):
pass
def forward(self, g):
g.apply_edges(lambda edges: self.edge_func(edges))
def create_neg(self, neg_head):
if neg_head:
def fn(heads, relations, tails, num_chunks, chunk_size, neg_sample_size):
hidden_dim = heads.shape[1]
emb_real = tails[..., :hidden_dim // 2]
emb_imag = tails[..., hidden_dim // 2:]
rel_real = relations[..., :hidden_dim // 2]
rel_imag = relations[..., hidden_dim // 2:]
real = emb_real * rel_real + emb_imag * rel_imag
imag = -emb_real * rel_imag + emb_imag * rel_real
emb_complex = th.cat((real, imag), dim=-1)
tmp = emb_complex.reshape(num_chunks, chunk_size, hidden_dim)
heads = heads.reshape(num_chunks, neg_sample_size, hidden_dim)
heads = th.transpose(heads, 1, 2)
return th.bmm(tmp, heads)
return fn
else:
def fn(heads, relations, tails, num_chunks, chunk_size, neg_sample_size):
hidden_dim = heads.shape[1]
emb_real = heads[..., :hidden_dim // 2]
emb_imag = heads[..., hidden_dim // 2:]
rel_real = relations[..., :hidden_dim // 2]
rel_imag = relations[..., hidden_dim // 2:]
real = emb_real * rel_real - emb_imag * rel_imag
imag = emb_real * rel_imag + emb_imag * rel_real
emb_complex = th.cat((real, imag), dim=-1)
tmp = emb_complex.reshape(num_chunks, chunk_size, hidden_dim)
tails = tails.reshape(num_chunks, neg_sample_size, hidden_dim)
tails = th.transpose(tails, 1, 2)
return th.bmm(tmp, tails)
return fn
class RESCALScore(nn.Module):
def __init__(self, relation_dim, entity_dim):
super(RESCALScore, self).__init__()
self.relation_dim = relation_dim
self.entity_dim = entity_dim
def edge_func(self, edges):
head = edges.src['emb']
tail = edges.dst['emb'].unsqueeze(-1)
rel = edges.data['emb']
rel = rel.view(-1, self.relation_dim, self.entity_dim)
score = head * th.matmul(rel, tail).squeeze(-1)
# TODO: check if use self.gamma
return {'score': th.sum(score, dim=-1)}
# return {'score': self.gamma - th.norm(score, p=1, dim=-1)}
def reset_parameters(self):
pass
def save(self, path, name):
pass
def load(self, path, name):
pass
def forward(self, g):
g.apply_edges(lambda edges: self.edge_func(edges))
| 36.977528
| 86
| 0.572318
| 857
| 6,582
| 4.172695
| 0.105018
| 0.065436
| 0.065436
| 0.060403
| 0.814318
| 0.767058
| 0.746644
| 0.721197
| 0.696029
| 0.694631
| 0
| 0.009325
| 0.31571
| 6,582
| 177
| 87
| 37.186441
| 0.784636
| 0.036767
| 0
| 0.662069
| 0
| 0
| 0.00884
| 0
| 0
| 0
| 0
| 0.00565
| 0
| 1
| 0.227586
| false
| 0.082759
| 0.027586
| 0
| 0.393103
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
d16ca73ae7b77e6e11a1dc5fb6c4591dac91d748
| 17,440
|
py
|
Python
|
ambra_sdk/service/entrypoints/generated/analytics.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 9
|
2020-04-20T23:45:44.000Z
|
2021-04-18T11:22:17.000Z
|
ambra_sdk/service/entrypoints/generated/analytics.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 13
|
2020-02-08T16:15:05.000Z
|
2021-09-13T22:55:28.000Z
|
ambra_sdk/service/entrypoints/generated/analytics.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 6
|
2020-03-25T17:47:45.000Z
|
2021-04-18T11:22:19.000Z
|
""" Analytics.
Do not edit this file by hand.
This is generated by parsing api.html service doc.
"""
from ambra_sdk.exceptions.service import InvalidCount
from ambra_sdk.exceptions.service import InvalidEndDate
from ambra_sdk.exceptions.service import InvalidParameters
from ambra_sdk.exceptions.service import InvalidPeriod
from ambra_sdk.exceptions.service import MissingFields
from ambra_sdk.exceptions.service import NotFound
from ambra_sdk.exceptions.service import NotPermitted
from ambra_sdk.service.query import QueryO
from ambra_sdk.service.query import AsyncQueryO
class Analytics:
"""Analytics."""
def __init__(self, api):
self._api = api
def study(
self,
count,
period,
time_zone,
account_id=None,
customfield_param=None,
end_date=None,
modality=None,
namespace_id=None,
):
"""Study.
:param count: The number of periods to get
:param period: The time period (day|week|month|year)
:param time_zone: The report's time zone. Time zone selection order: current user's time zone, time_zone parameter, UTC by default.
:param account_id: account_id
:param customfield_param: Filter analytics by a subset of study customfields (optional)
:param end_date: The end date, default is today if not passed (optional)
:param modality: Filter analytics by modality (optional)
:param namespace_id: namespace_id
"""
request_data = {
'account_id': account_id,
'count': count,
'end_date': end_date,
'modality': modality,
'namespace_id': namespace_id,
'period': period,
'time_zone': time_zone,
}
if customfield_param is not None:
customfield_param_dict = {'{prefix}{k}'.format(prefix='customfield-', k=k): v for k,v in customfield_param.items()}
request_data.update(customfield_param_dict)
errors_mapping = {}
errors_mapping[('INVALID_COUNT', None)] = InvalidCount('Invalid or excessive count value')
errors_mapping[('INVALID_END_DATE', None)] = InvalidEndDate('An invalid period')
errors_mapping[('INVALID_PARAMETERS', None)] = InvalidParameters('Only pass a account_id or namespace_id')
errors_mapping[('INVALID_PERIOD', None)] = InvalidPeriod('An invalid period')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The account or namespace can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view analytics for this account or namespace')
query_data = {
'api': self._api,
'url': '/analytics/study',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
def patient_portal(
self,
account_id,
count,
period,
time_zone,
end_date=None,
patient_id=None,
):
"""Patient portal.
:param account_id: The account id
:param count: The number of periods to get
:param period: The time period (day|week|month|year)
:param time_zone: The report's time zone. Time zone selection order: current user's time zone, time_zone parameter, UTC by default.
:param end_date: The end date, default is today if not passed (optional)
:param patient_id: Patient filter (optional)
"""
request_data = {
'account_id': account_id,
'count': count,
'end_date': end_date,
'patient_id': patient_id,
'period': period,
'time_zone': time_zone,
}
errors_mapping = {}
errors_mapping[('INVALID_COUNT', None)] = InvalidCount('Invalid or excessive count value')
errors_mapping[('INVALID_END_DATE', None)] = InvalidEndDate('An invalid period')
errors_mapping[('INVALID_PERIOD', None)] = InvalidPeriod('An invalid period')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The account or patient can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view analytics for this account or namespace')
query_data = {
'api': self._api,
'url': '/analytics/patient/portal',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
def radreport(
self,
account_id,
count,
period,
time_zone,
end_date=None,
namespace_id=None,
user_id=None,
):
"""Radreport.
:param account_id: The account id
:param count: The number of periods to get
:param period: The time period (day|week|month|year)
:param time_zone: The report's time zone. Time zone selection order: current user's time zone, time_zone parameter, UTC by default.
:param end_date: The end date, default is today if not passed (optional)
:param namespace_id: Namespace filter (optional)
:param user_id: User filter (optional)
"""
request_data = {
'account_id': account_id,
'count': count,
'end_date': end_date,
'namespace_id': namespace_id,
'period': period,
'time_zone': time_zone,
'user_id': user_id,
}
errors_mapping = {}
errors_mapping[('INVALID_COUNT', None)] = InvalidCount('Invalid or excessive count value')
errors_mapping[('INVALID_END_DATE', None)] = InvalidEndDate('An invalid period')
errors_mapping[('INVALID_PERIOD', None)] = InvalidPeriod('An invalid period')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The account or patient can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view analytics for this account or namespace')
query_data = {
'api': self._api,
'url': '/analytics/radreport',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
def user(
self,
account_id,
count,
period,
time_zone,
end_date=None,
end_time=None,
namespace_id=None,
user_id=None,
):
"""User.
:param account_id: The account id
:param count: The number of periods to get
:param period: The time period (hour|day|week|month|year)
:param time_zone: The report's time zone. Time zone selection order: current user's time zone, time_zone parameter, UTC by default.
:param end_date: The end date, for backwards compatibility (optional)
:param end_time: The end date and time, default is now if not passed (optional)
:param namespace_id: Namespace filter (optional)
:param user_id: User filter (optional)
"""
request_data = {
'account_id': account_id,
'count': count,
'end_date': end_date,
'end_time': end_time,
'namespace_id': namespace_id,
'period': period,
'time_zone': time_zone,
'user_id': user_id,
}
errors_mapping = {}
errors_mapping[('INVALID_COUNT', None)] = InvalidCount('Invalid or excessive count value')
errors_mapping[('INVALID_END_DATE', None)] = InvalidEndDate('An invalid period')
errors_mapping[('INVALID_PERIOD', None)] = InvalidPeriod('An invalid period')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The account or patient can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view analytics for this account or namespace')
query_data = {
'api': self._api,
'url': '/analytics/user',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
class AsyncAnalytics:
"""AsyncAnalytics."""
def __init__(self, api):
self._api = api
def study(
self,
count,
period,
time_zone,
account_id=None,
customfield_param=None,
end_date=None,
modality=None,
namespace_id=None,
):
"""Study.
:param count: The number of periods to get
:param period: The time period (day|week|month|year)
:param time_zone: The report's time zone. Time zone selection order: current user's time zone, time_zone parameter, UTC by default.
:param account_id: account_id
:param customfield_param: Filter analytics by a subset of study customfields (optional)
:param end_date: The end date, default is today if not passed (optional)
:param modality: Filter analytics by modality (optional)
:param namespace_id: namespace_id
"""
request_data = {
'account_id': account_id,
'count': count,
'end_date': end_date,
'modality': modality,
'namespace_id': namespace_id,
'period': period,
'time_zone': time_zone,
}
if customfield_param is not None:
customfield_param_dict = {'{prefix}{k}'.format(prefix='customfield-', k=k): v for k,v in customfield_param.items()}
request_data.update(customfield_param_dict)
errors_mapping = {}
errors_mapping[('INVALID_COUNT', None)] = InvalidCount('Invalid or excessive count value')
errors_mapping[('INVALID_END_DATE', None)] = InvalidEndDate('An invalid period')
errors_mapping[('INVALID_PARAMETERS', None)] = InvalidParameters('Only pass a account_id or namespace_id')
errors_mapping[('INVALID_PERIOD', None)] = InvalidPeriod('An invalid period')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The account or namespace can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view analytics for this account or namespace')
query_data = {
'api': self._api,
'url': '/analytics/study',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return AsyncQueryO(**query_data)
def patient_portal(
self,
account_id,
count,
period,
time_zone,
end_date=None,
patient_id=None,
):
"""Patient portal.
:param account_id: The account id
:param count: The number of periods to get
:param period: The time period (day|week|month|year)
:param time_zone: The report's time zone. Time zone selection order: current user's time zone, time_zone parameter, UTC by default.
:param end_date: The end date, default is today if not passed (optional)
:param patient_id: Patient filter (optional)
"""
request_data = {
'account_id': account_id,
'count': count,
'end_date': end_date,
'patient_id': patient_id,
'period': period,
'time_zone': time_zone,
}
errors_mapping = {}
errors_mapping[('INVALID_COUNT', None)] = InvalidCount('Invalid or excessive count value')
errors_mapping[('INVALID_END_DATE', None)] = InvalidEndDate('An invalid period')
errors_mapping[('INVALID_PERIOD', None)] = InvalidPeriod('An invalid period')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The account or patient can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view analytics for this account or namespace')
query_data = {
'api': self._api,
'url': '/analytics/patient/portal',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return AsyncQueryO(**query_data)
def radreport(
self,
account_id,
count,
period,
time_zone,
end_date=None,
namespace_id=None,
user_id=None,
):
"""Radreport.
:param account_id: The account id
:param count: The number of periods to get
:param period: The time period (day|week|month|year)
:param time_zone: The report's time zone. Time zone selection order: current user's time zone, time_zone parameter, UTC by default.
:param end_date: The end date, default is today if not passed (optional)
:param namespace_id: Namespace filter (optional)
:param user_id: User filter (optional)
"""
request_data = {
'account_id': account_id,
'count': count,
'end_date': end_date,
'namespace_id': namespace_id,
'period': period,
'time_zone': time_zone,
'user_id': user_id,
}
errors_mapping = {}
errors_mapping[('INVALID_COUNT', None)] = InvalidCount('Invalid or excessive count value')
errors_mapping[('INVALID_END_DATE', None)] = InvalidEndDate('An invalid period')
errors_mapping[('INVALID_PERIOD', None)] = InvalidPeriod('An invalid period')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The account or patient can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view analytics for this account or namespace')
query_data = {
'api': self._api,
'url': '/analytics/radreport',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return AsyncQueryO(**query_data)
def user(
self,
account_id,
count,
period,
time_zone,
end_date=None,
end_time=None,
namespace_id=None,
user_id=None,
):
"""User.
:param account_id: The account id
:param count: The number of periods to get
:param period: The time period (hour|day|week|month|year)
:param time_zone: The report's time zone. Time zone selection order: current user's time zone, time_zone parameter, UTC by default.
:param end_date: The end date, for backwards compatibility (optional)
:param end_time: The end date and time, default is now if not passed (optional)
:param namespace_id: Namespace filter (optional)
:param user_id: User filter (optional)
"""
request_data = {
'account_id': account_id,
'count': count,
'end_date': end_date,
'end_time': end_time,
'namespace_id': namespace_id,
'period': period,
'time_zone': time_zone,
'user_id': user_id,
}
errors_mapping = {}
errors_mapping[('INVALID_COUNT', None)] = InvalidCount('Invalid or excessive count value')
errors_mapping[('INVALID_END_DATE', None)] = InvalidEndDate('An invalid period')
errors_mapping[('INVALID_PERIOD', None)] = InvalidPeriod('An invalid period')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The account or patient can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view analytics for this account or namespace')
query_data = {
'api': self._api,
'url': '/analytics/user',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return AsyncQueryO(**query_data)
| 42.43309
| 182
| 0.625172
| 2,088
| 17,440
| 5.028736
| 0.061303
| 0.091619
| 0.049524
| 0.036571
| 0.977619
| 0.977619
| 0.948571
| 0.948571
| 0.948571
| 0.948571
| 0
| 0
| 0.277695
| 17,440
| 411
| 183
| 42.43309
| 0.833532
| 0.220413
| 0
| 0.909091
| 1
| 0.026936
| 0.312082
| 0.003845
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03367
| false
| 0.006734
| 0.030303
| 0
| 0.097643
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0f8211ebf66a16ccb23cf29a1bf934b8539b8775
| 4,452
|
py
|
Python
|
tcrdist/tests/test_tree.py
|
agartland/tcrdist3
|
34f8d50e7448b2bf7cf7cd9ab9a2d80759f47240
|
[
"MIT"
] | 26
|
2020-12-28T17:37:01.000Z
|
2022-01-29T01:31:13.000Z
|
tcrdist/tests/test_tree.py
|
agartland/tcrdist3
|
34f8d50e7448b2bf7cf7cd9ab9a2d80759f47240
|
[
"MIT"
] | 31
|
2020-08-17T22:17:57.000Z
|
2022-03-18T23:47:34.000Z
|
tcrdist/tests/test_tree.py
|
agartland/tcrdist3
|
34f8d50e7448b2bf7cf7cd9ab9a2d80759f47240
|
[
"MIT"
] | 7
|
2020-08-18T23:55:40.000Z
|
2021-09-22T18:15:54.000Z
|
"""
tcrdist3 hierahical clustering trees alpha/beta fill tests including
generating SVGs these tests take some time.
"""
def test_init_tree():
import os
import pandas as pd
from tcrdist.repertoire import TCRrep
from tcrdist.tree import TCRtree
df = pd.read_csv("dash.csv").sample(10).reset_index(drop = True)
tr = TCRrep(cell_df = df,
organism = 'mouse',
chains = ['alpha','beta'],
db_file = 'alphabeta_gammadelta_db.tsv')
tcrtree = TCRtree(tcrrep = tr, html_name = 'dash.mouse.ab.tree.html')
tcrtree.default_plot_hclust_props['tooltip_cols'].append('ref_size_olga_beta')
tcrtree.default_plot_hclust_props['tooltip_cols'].append('ref_unique_olga_beta')
tcrtree.default_plot_hclust_props['tooltip_cols'].append('percent_missing_olga_beta')
tcrtree.build_tree()
assert os.path.isfile('dash.mouse.ab.tree.html')
def test_init_tree_beta():
import os
import pandas as pd
from tcrdist.repertoire import TCRrep
from tcrdist.tree import TCRtree
df = pd.read_csv("dash.csv").sample(10).reset_index(drop = True)
tr = TCRrep(cell_df = df,
organism = 'mouse',
chains = ['beta'],
db_file = 'alphabeta_gammadelta_db.tsv')
tcrtree = TCRtree(tcrrep = tr, html_name = 'dash.mouse.b.tree.html')
tcrtree.default_plot_hclust_props['tooltip_cols'].append('ref_size_olga_beta')
tcrtree.default_plot_hclust_props['tooltip_cols'].append('ref_unique_olga_beta')
tcrtree.default_plot_hclust_props['tooltip_cols'].append('percent_missing_olga_beta')
tcrtree.build_tree()
assert os.path.isfile('dash.mouse.b.tree.html')
def test_init_tree_alpha():
import os
import pandas as pd
from tcrdist.repertoire import TCRrep
from tcrdist.tree import TCRtree
df = pd.read_csv("dash.csv").sample(10).reset_index(drop = True)
tr = TCRrep(cell_df = df,
organism = 'mouse',
chains = ['alpha'],
db_file = 'alphabeta_gammadelta_db.tsv')
tcrtree = TCRtree(tcrrep = tr, html_name = 'dash.mouse.a.tree.html')
tcrtree.build_tree()
assert os.path.isfile('dash.mouse.a.tree.html')
def test_init_tree_human_beta():
import os
import pandas as pd
from tcrdist.repertoire import TCRrep
from tcrdist.tree import TCRtree
df = pd.read_csv("dash_human.csv").sample(10).reset_index(drop = True)
tr = TCRrep(cell_df = df,
organism = 'human',
chains = ['beta'],
db_file = 'alphabeta_gammadelta_db.tsv')
tcrtree = TCRtree(tcrrep = tr, html_name = 'dash.human.b.tree.html')
tcrtree.combine_olga = True
tcrtree.default_plot_hclust_props['tooltip_cols'].append('ref_size_olga_beta')
tcrtree.default_plot_hclust_props['tooltip_cols'].append('ref_unique_olga_beta')
tcrtree.default_plot_hclust_props['tooltip_cols'].append('percent_missing_olga_beta')
tcrtree.build_tree( )
assert os.path.isfile('dash.human.b.tree.html')
def test_init_tree_human_alpha():
import os
import pandas as pd
from tcrdist.repertoire import TCRrep
from tcrdist.tree import TCRtree
df = pd.read_csv("dash_human.csv").sample(10).reset_index(drop = True)
tr = TCRrep(cell_df = df,
organism = 'human',
chains = ['alpha'],
db_file = 'alphabeta_gammadelta_db.tsv')
tcrtree = TCRtree(tcrrep = tr, html_name = 'dash.human.a.tree.html')
tcrtree.combine_olga = True
tcrtree.default_plot_hclust_props['tooltip_cols'].append('ref_size_olga_alpha')
tcrtree.default_plot_hclust_props['tooltip_cols'].append('ref_unique_olga_alpha')
tcrtree.default_plot_hclust_props['tooltip_cols'].append('percent_missing_olga_alpha')
tcrtree.build_tree( )
assert os.path.isfile('dash.human.a.tree.html')
def test_init_tree_human_alpha_beta():
import os
import pandas as pd
from tcrdist.repertoire import TCRrep
from tcrdist.tree import TCRtree
df = pd.read_csv("dash_human.csv").sample(10).reset_index(drop = True)
tr = TCRrep(cell_df = df,
organism = 'human',
chains = ['alpha', 'beta'],
db_file = 'alphabeta_gammadelta_db.tsv')
tcrtree = TCRtree(tcrrep = tr, html_name = 'dash.human.ab.tree.html')
tcrtree.build_tree()
assert os.path.isfile('dash.human.ab.tree.html')
| 37.411765
| 90
| 0.678347
| 602
| 4,452
| 4.754153
| 0.121262
| 0.046122
| 0.075472
| 0.100629
| 0.959469
| 0.952481
| 0.944444
| 0.935709
| 0.903913
| 0.883648
| 0
| 0.003676
| 0.20575
| 4,452
| 118
| 91
| 37.728814
| 0.805713
| 0.025157
| 0
| 0.771739
| 0
| 0
| 0.22194
| 0.127483
| 0
| 0
| 0
| 0
| 0.065217
| 1
| 0.065217
| false
| 0
| 0.26087
| 0
| 0.326087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0f850ad058e5f9a059c0c091d5859a6a080c9eaa
| 27,043
|
py
|
Python
|
tests/test_poly.py
|
tulth/gcode_gen
|
d6e276f2074d4fe66755b2ae06c5b4d85583c563
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_poly.py
|
tulth/gcode_gen
|
d6e276f2074d4fe66755b2ae06c5b4d85583c563
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_poly.py
|
tulth/gcode_gen
|
d6e276f2074d4fe66755b2ae06c5b4d85583c563
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# Sample Test passing with nose and pytest
import unittest
import numpy as np
from gcode_gen import point
from gcode_gen import poly
from gcode_gen.debug import DBGP
from math import sqrt
# square 2x2 centered at origin in x/y plane
test_square = [[-1, -1, 0], [1, -1, 0], [1, 1, 0], [-1, 1, 0], ]
# square 2x2 centered at origin in x/y plane with an extra collinear point
test_square_colin = [[-1, -1, 0], [0, -1, 0], [1, -1, 0], [1, 1, 0], [-1, 1, 0], ]
# square 2x2 centered at origin in x/y plane, but with one vertex lifted off the z=0 plane
test_square_skew = [[-1, -1, 0], [1, -1, 0], [1, 1, 1], [-1, 1, 0], ]
# square squashed down so all points lie along a line
test_all_colin = [[-1, -1, 0], [1, -1, 0], [2, -1, 0], [0, -1, 0], ]
square_notched = ((0, 0),
(10, 0),
(10, 4),
(8, 4),
(8, 6),
(10, 6),
(10, 10),
(0, 10),
)
# botched = not simple
square_botched = ((0, 0),
(10, 0),
(10, 4),
(-2, 4),
(-2, 6),
(10, 6),
(10, 10),
(0, 10),
)
class TestPolyCircleVerts(unittest.TestCase):
def test_poly_circle_verts_3(self):
pl = poly.poly_circle_verts(3)
actual = pl.arr
expect = ((0, 1, 0),
(-0.866025403784, -.5, 0),
(0.866025403784, -.5, 0), )
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_poly_circle_verts_4(self):
pl = poly.poly_circle_verts(4)
actual = pl.arr
expect = ((-0.7071067811850, 0.7071067811850, 0),
(-0.7071067811850, -0.7071067811850, 0),
(0.7071067811850, -0.7071067811850, 0),
(0.7071067811850, 0.7071067811850, 0),
)
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
class TestPolyPolygon(unittest.TestCase):
def test_too_few_vertices(self):
actual = ""
try:
sqr = poly.Polygon(point.PointList(test_square[0:0]))
except poly.PolygonError as err:
actual = str(err)
expect = "Polygon vertices initializer must have at least 3 vertices"
self.assertEqual(actual, expect)
#
actual = ""
try:
sqr = poly.Polygon(point.PointList(test_square[0:1]))
except poly.PolygonError as err:
actual = str(err)
expect = "Polygon vertices initializer must have at least 3 vertices"
self.assertEqual(actual, expect)
def test_get_vertices(self):
sqr = poly.Polygon(point.PointList(test_square))
actual = sqr.get_vertices()
expect = test_square
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_get_edges(self):
sqr = poly.Polygon(point.PointList(test_square))
actual = sqr.get_edges()
# expect are the 4 edges
expect = [[[-1, -1, 0], [1, -1, 0]],
[[1, -1, 0], [1, 1, 0]],
[[1, 1, 0], [-1, 1, 0]],
[[-1, 1, 0], [-1, -1, 0]],
]
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_get_corners(self):
sqr = poly.Polygon(point.PointList(test_square))
actual = sqr.get_corners()
# expect are the 4 corners
expect = [[[[-1, 1, 0], [-1, -1, 0]], [[-1, -1, 0], [1, -1, 0]]],
[[[-1, -1, 0], [1, -1, 0]], [[1, -1, 0], [1, 1, 0]]],
[[[1, -1, 0], [1, 1, 0]], [[1, 1, 0], [-1, 1, 0]]],
[[[1, 1, 0], [-1, 1, 0]], [[-1, 1, 0], [-1, -1, 0]]],
]
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_get_corner_vectors(self):
sqr = poly.Polygon(point.PointList(test_square))
actual = sqr.get_corner_vectors()
# expect are the 4 corners
expect = [[[0, -2, 0], [2, 0, 0]],
[[2, 0, 0], [0, 2, 0]],
[[0, 2, 0], [-2, 0, 0]],
[[-2, 0, 0], [0, -2, 0]],
]
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_get_corner_vector_crossproducts(self):
sqr = poly.Polygon(point.PointList(test_square))
actual = sqr.get_corner_vector_crossproducts()
expect = [[0, 0, 4],
[0, 0, 4],
[0, 0, 4],
[0, 0, 4],
]
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
#
sqrc = poly.Polygon(point.PointList(test_square_colin))
actual = sqrc.get_corner_vector_crossproducts()
expect = [[0, 0, 2],
[0, 0, 0],
[0, 0, 2],
[0, 0, 4],
[0, 0, 4],
]
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_is_coplanar(self):
sqr = poly.Polygon(point.PointList(test_square))
self.assertTrue(sqr.is_coplanar())
#
sqrc = poly.Polygon(point.PointList(test_square_colin))
self.assertTrue(sqrc.is_coplanar())
#
sqr_skew = poly.Polygon(point.PointList(test_square_skew))
self.assertFalse(sqr_skew.is_coplanar())
#
tp = poly.Polygon(point.PointList(test_all_colin))
self.assertTrue(tp.is_coplanar())
#
#
tp = poly.Polygon(point.PointList(square_notched))
self.assertTrue(tp.is_coplanar())
def test_is_all_collinear(self):
sqr = poly.Polygon(point.PointList(test_square))
self.assertFalse(sqr.is_all_collinear())
#
sqrc = poly.Polygon(point.PointList(test_square_colin))
self.assertFalse(sqrc.is_all_collinear())
#
sqr_skew = poly.Polygon(point.PointList(test_square_skew))
self.assertFalse(sqr_skew.is_all_collinear())
#
tp = poly.Polygon(point.PointList(test_all_colin))
self.assertTrue(tp.is_all_collinear())
def test_bounds(self):
sqr = poly.Polygon(point.PointList(test_square))
actual = sqr.bounds
expect = [[-1, 1],
[-1, 1],
[0, 0], ]
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
#
sqr = poly.Polygon(point.PointList(test_square_skew))
actual = sqr.bounds
expect = [[-1, 1],
[-1, 1],
[0, 1], ]
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
# def test_get_orientations(self):
# # tp = poly.CoplanarPolygon(point.PointList(test_square))
# # actual = tp.get_orientations()
# # print(actual)
# # expect = [1, 1, 1, 1]
# # self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
# #
# tp = poly.CoplanarPolygon(point.PointList(test_square_colin))
# actual = tp.get_orientations()
# print(actual)
# expect = [1, 0, 1, 1, 1]
# self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
# #
class TestCoplanarPolygon(unittest.TestCase):
def test_get_normal(self):
tp = poly.CoplanarPolygon(point.PointList(test_square))
actual = tp.get_normal()
expect = [0, 0, 1]
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
#
tp = poly.CoplanarPolygon(point.PointList(test_square_colin))
actual = tp.get_normal()
expect = [0, 0, 1]
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
#
actual = ""
try:
tp = poly.CoplanarPolygon(point.PointList(test_square_skew))
except poly.PolygonError as err:
actual = str(err)
expect = "CoplanarPolygon vertices must be coplanar"
self.assertEqual(actual, expect)
#
actual = ""
try:
tp = poly.CoplanarPolygon(point.PointList(test_all_colin))
except poly.PolygonError as err:
actual = str(err)
expect = "CoplanarPolygon vertices must not all be collinear"
self.assertEqual(actual, expect)
# try rotated square
tp = poly.CoplanarPolygon(point.PointList(test_square))
tp.rotate(np.pi / 4, x=0, y=1, z=0)
tp = tp.apply_transforms()
actual = tp.get_normal()
expect = [sqrt(2) / 2, 0, sqrt(2) / 2]
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
# try another rotated square
tp = poly.CoplanarPolygon(point.PointList(test_square))
tp.rotate(np.pi / 2, x=1, y=0, z=0)
tp = tp.apply_transforms()
actual = tp.get_normal()
expect = [0, -1, 0]
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
#
tp = poly.CoplanarPolygon(point.PointList(square_notched))
actual = tp.get_normal()
expect = [0, 0, 1]
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_is_convex(self):
#
tp = poly.CoplanarPolygon(point.PointList(test_square))
self.assertTrue(tp.is_convex())
#
tp = poly.CoplanarPolygon(point.PointList(square_notched))
self.assertFalse(tp.is_convex())
#
tp = poly.CoplanarPolygon(point.PointList(square_botched))
self.assertFalse(tp.is_convex())
def test_is_simple(self):
#
tp = poly.CoplanarPolygon(point.PointList(test_square))
self.assertTrue(tp.is_simple())
#
tp = poly.CoplanarPolygon(point.PointList(square_notched))
self.assertTrue(tp.is_simple())
#
tp = poly.CoplanarPolygon(point.PointList(square_botched))
self.assertFalse(tp.is_simple())
class TestSimplePolygon(unittest.TestCase):
def test_shrink_square(self):
SQR_VERTS = [[0., 0.],
[10., 0.],
[10., 10.],
[0., 10.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).shrink(1)
actual = tp.arr
expect = [[1., 1.],
[9., 1.],
[9., 9.],
[1., 9.]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_grow_square(self):
SQR_VERTS = [[0., 0.],
[10., 0.],
[10., 10.],
[0., 10.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).grow(0.5)
actual = tp.arr
expect = [[-0.5, -0.5],
[10.5, -0.5],
[10.5, 10.5],
[-0.5, 10.5]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_shrink_square_rev(self):
SQR_VERTS = [[0., 0.],
[0., 10.],
[10., 10.],
[10., 0.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).shrink(1)
actual = tp.arr
expect = [[1., 1.],
[1., 9.],
[9., 9.],
[9., 1.]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_grow_square_rev(self):
SQR_VERTS = [[0., 0.],
[0., 10.],
[10., 10.],
[10., 0.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).grow(0.5)
actual = tp.arr
expect = [[-0.5, -0.5],
[-0.5, 10.5],
[10.5, 10.5],
[10.5, -0.5]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_shrink_square_collinear(self):
SQR_VERTS = [[0., 0.],
[5., 0.],
[10., 0.],
[10., 10.],
[0., 10.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).shrink(1)
actual = tp.arr
expect = [[1., 1.],
[5., 1.],
[9., 1.],
[9., 9.],
[1., 9.]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_grow_square_collinear(self):
SQR_VERTS = [[0., 0.],
[5., 0.],
[10., 0.],
[10., 10.],
[0., 10.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).grow(0.5)
actual = tp.arr
expect = [[-0.5, -0.5],
[5., -0.5],
[10.5, -0.5],
[10.5, 10.5],
[-0.5, 10.5]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_shrink_square_collinear_rev(self):
SQR_VERTS = [[0., 0.],
[0., 10.],
[10., 10.],
[10., 0.],
[5., 0.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).shrink(1)
actual = tp.arr
expect = [[1., 1.],
[1., 9.],
[9., 9.],
[9., 1.],
[5., 1.]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_grow_square_collinear_rev(self):
SQR_VERTS = [[0., 0.],
[0., 10.],
[10., 10.],
[10., 0.],
[5., 0.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).grow(0.5)
actual = tp.arr
expect = [[-0.5, -0.5],
[-0.5, 10.5],
[10.5, 10.5],
[10.5, -0.5],
[5., -0.5]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_shrink_hexagon(self):
HEX_VERTS = [[-4.61880215, 0.],
[-2.30940108, -4.],
[2.30940108, -4.],
[4.61880215, 0.],
[2.30940108, 4.],
[-2.30940108, 4.]]
tp = poly.SimplePolygon(point.PointList(HEX_VERTS)).shrink(1)
actual = tp.arr
expect = [[-3.46410162, 0.],
[-1.73205081, -3.],
[1.73205081, -3.],
[3.46410162, 0.],
[1.73205081, 3.],
[-1.73205081, 3.]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_grow_hexagon(self):
HEX_VERTS = [[-4.61880215, 0.],
[-2.30940108, -4.],
[2.30940108, -4.],
[4.61880215, 0.],
[2.30940108, 4.],
[-2.30940108, 4.]]
tp = poly.SimplePolygon(point.PointList(HEX_VERTS)).grow(0.5)
actual = tp.arr
expect = [[-5.19615242, 0.],
[-2.59807621, -4.5],
[2.59807621, -4.5],
[5.19615242, 0.],
[2.59807621, 4.5],
[-2.59807621, 4.5]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_shrink_hexagon_rev(self):
HEX_VERTS = [[-4.61880215, 0.],
[-2.30940108, 4.],
[2.30940108, 4.],
[4.61880215, 0.],
[2.30940108, -4.],
[-2.30940108, -4.]]
tp = poly.SimplePolygon(point.PointList(HEX_VERTS)).shrink(1)
actual = tp.arr
expect = [[-3.46410162, 0.],
[-1.73205081, 3.],
[1.73205081, 3.],
[3.46410162, 0.],
[1.73205081, -3.],
[-1.73205081, -3.]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_grow_hexagon_rev(self):
HEX_VERTS = [[-4.61880215, 0.],
[-2.30940108, 4.],
[2.30940108, 4.],
[4.61880215, 0.],
[2.30940108, -4.],
[-2.30940108, -4.]]
tp = poly.SimplePolygon(point.PointList(HEX_VERTS)).grow(0.5)
actual = tp.arr
expect = [[-5.19615242, 0.],
[-2.59807621, 4.5],
[2.59807621, 4.5],
[5.19615242, 0.],
[2.59807621, -4.5],
[-2.59807621, -4.5]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_shrink_square_notched(self):
SQR_VERTS = [[0., 0.],
[10., 0.],
[10., 4.],
[8., 4.],
[8., 6.],
[10., 6.],
[10., 10.],
[0., 10.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).shrink(1)
actual = tp.arr
expect = [[1., 1.],
[9., 1.],
[9., 3.],
[7., 3.],
[7., 7.],
[9., 7.],
[9., 9.],
[1., 9.]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_grow_square_notched(self):
SQR_VERTS = [[0., 0.],
[10., 0.],
[10., 4.],
[8., 4.],
[8., 6.],
[10., 6.],
[10., 10.],
[0., 10.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).grow(0.5)
actual = tp.arr
expect = [[-0.5, -0.5],
[10.5, -0.5],
[10.5, 4.5],
[8.5, 4.5],
[8.5, 5.5],
[10.5, 5.5],
[10.5, 10.5],
[-0.5, 10.5]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_shrink_square_notched_rev(self):
SQR_VERTS = [[0., 0.],
[0., 10.],
[10., 10.],
[10., 6.],
[8., 6.],
[8., 4.],
[10., 4.],
[10., 0.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).shrink(1)
actual = tp.arr
expect = [[1., 1.],
[1., 9.],
[9., 9.],
[9., 7.],
[7., 7.],
[7., 3.],
[9., 3.],
[9., 1.]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_grow_square_notched_rev(self):
SQR_VERTS = [[0., 0.],
[0., 10.],
[10., 10.],
[10., 6.],
[8., 6.],
[8., 4.],
[10., 4.],
[10., 0.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).grow(0.5)
actual = tp.arr
expect = [[-0.5, -0.5],
[-0.5, 10.5],
[10.5, 10.5],
[10.5, 5.5],
[8.5, 5.5],
[8.5, 4.5],
[10.5, 4.5],
[10.5, -0.5]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_shrink_square_smashed(self):
SQR_VERTS = [[0., 0.],
[10., 0.],
[4., 4.],
[0., 10.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).shrink(1)
actual = tp.arr
expect = [[1., 1.],
[6.69722436, 1.],
[3.27888974, 3.27888974],
[1., 6.69722436]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_grow_square_smashed(self):
SQR_VERTS = [[0., 0.],
[10., 0.],
[4., 4.],
[0., 10.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).grow(0.5)
actual = tp.arr
expect = [[-0.5, -0.5],
[11.65138782, -0.5],
[4.36055513, 4.36055513],
[-0.5, 11.65138782]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_shrink_square_smashed_rev(self):
SQR_VERTS = [[0., 0.],
[0., 10.],
[4., 4.],
[10., 0.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).shrink(1)
actual = tp.arr
expect = [[1., 1.],
[1., 6.69722436],
[3.27888974, 3.27888974],
[6.69722436, 1.]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_grow_square_smashed_rev(self):
SQR_VERTS = [[0., 0.],
[0., 10.],
[4., 4.],
[10., 0.]]
tp = poly.SimplePolygon(point.PointList(SQR_VERTS)).grow(0.5)
actual = tp.arr
expect = [[-0.5, -0.5],
[-0.5, 11.65138782],
[4.36055513, 4.36055513],
[11.65138782, -0.5]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_shrink_hexagon_smashed(self):
HEX_VERTS = [[-4.61880215, 0.],
[-2.30940108, -4.],
[2.30940108, -4.],
[0., 0.],
[2.30940108, 4.],
[-2.30940108, 4.]]
tp = poly.SimplePolygon(point.PointList(HEX_VERTS)).shrink(1)
actual = tp.arr
expect = [[-3.46410162, 0.],
[-1.73205081, -3.],
[0.57735027, -3.],
[-1.15470054, 0.],
[0.57735027, 3.],
[-1.73205081, 3.]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_grow_hexagon_smashed(self):
HEX_VERTS = [[-4.61880215, 0.],
[-2.30940108, -4.],
[2.30940108, -4.],
[0., 0.],
[2.30940108, 4.],
[-2.30940108, 4.]]
tp = poly.SimplePolygon(point.PointList(HEX_VERTS)).grow(0.5)
actual = tp.arr
expect = [[-5.19615242, 0.],
[-2.59807621, -4.5],
[3.17542648, -4.5],
[0.57735027, 0.],
[3.17542648, 4.5],
[-2.59807621, 4.5]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_shrink_hexagon_smashed_rev(self):
HEX_VERTS = [[-4.61880215, 0.],
[-2.30940108, 4.],
[2.30940108, 4.],
[0., 0.],
[2.30940108, -4.],
[-2.30940108, -4.]]
tp = poly.SimplePolygon(point.PointList(HEX_VERTS)).shrink(1)
actual = tp.arr
expect = [[-3.46410162, 0.],
[-1.73205081, 3.],
[0.57735027, 3.],
[-1.15470054, 0.],
[0.57735027, -3.],
[-1.73205081, -3.]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_grow_hexagon_smashed_rev(self):
HEX_VERTS = [[-4.61880215, 0.],
[-2.30940108, 4.],
[2.30940108, 4.],
[0., 0.],
[2.30940108, -4.],
[-2.30940108, -4.]]
tp = poly.SimplePolygon(point.PointList(HEX_VERTS)).grow(0.5)
actual = tp.arr
expect = [[-5.19615242, 0.],
[-2.59807621, 4.5],
[3.17542648, 4.5],
[0.57735027, 0.],
[3.17542648, -4.5],
[-2.59807621, -4.5]]
expect = point.PointList(expect).arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_invalid_relay_outline(self):
TST_VERTS = [[30.8737, 1.5875],
[40.5257, 1.5875],
[40.5257, 25.2095],
[26.4287, 25.2095],
[26.4287, 22.9235],
[24.8412, 24.511],
[26.4287, 22.9235],
[11.9507, 22.9235],
[11.9507, 6.5405],
[30.8737, 6.5405],
[29.2862, 4.953],
[30.8737, 6.5405]]
actual = ""
try:
tp = poly.SimplePolygon(point.PointList(TST_VERTS))
except poly.PolygonError as err:
actual = str(err)
expect = "SimplePolygon vertices must form a simple polygon's mathematical definition"
self.assertEqual(actual, expect)
| 38.522792
| 104
| 0.464446
| 2,971
| 27,043
| 4.135645
| 0.060586
| 0.086921
| 0.064458
| 0.080085
| 0.886058
| 0.867909
| 0.854155
| 0.84097
| 0.826727
| 0.796452
| 0
| 0.128035
| 0.369523
| 27,043
| 701
| 105
| 38.577746
| 0.59261
| 0.036497
| 0
| 0.756219
| 0
| 0
| 0.042341
| 0
| 0
| 0
| 0
| 0
| 0.097844
| 1
| 0.064677
| false
| 0
| 0.00995
| 0
| 0.08126
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7e63f0c03aba591391f3d69672f24b0feb4403e7
| 15,555
|
py
|
Python
|
src/bugge/difference_in_extracted_rules.py
|
Abdumaleek/infinity-mirror
|
b493c5602d9e4bcf374b748e9b80e7c85be54a88
|
[
"MIT"
] | 5
|
2020-03-13T02:54:03.000Z
|
2022-03-18T02:33:12.000Z
|
src/bugge/difference_in_extracted_rules.py
|
Abdumaleek/infinity-mirror
|
b493c5602d9e4bcf374b748e9b80e7c85be54a88
|
[
"MIT"
] | 2
|
2021-11-10T19:47:00.000Z
|
2022-02-10T01:24:59.000Z
|
src/bugge/difference_in_extracted_rules.py
|
Abdumaleek/infinity-mirror
|
b493c5602d9e4bcf374b748e9b80e7c85be54a88
|
[
"MIT"
] | 1
|
2021-05-24T21:54:44.000Z
|
2021-05-24T21:54:44.000Z
|
import networkx as nx
from networkx.algorithms import isomorphism
import math
import re
ORIG_PPI = [["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 234) ('type_1' 'type_1') ('type_1' 234) (197 234) (234 197)]", 844],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 478) ('type_1' 'type_1') ('type_1' 478) (197 234) (197 478) (234 197) (478 197)]", 73],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 234) ('type_1' 'type_1') ('type_1' 234) (197 234) (234 197) (234 338) (338 234)]", 53],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 234) ('type_1' 'type_1') ('type_1' 234) (197 234) (234 197) (234 738) (234 800) (738 234) (800 234)]", 25],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 328) ('type_1' 'type_1') ('type_1' 328) (197 478) (478 197) (478 479) (328 479) (479 328) (479 478)]", 4],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 314) ('type_1' 'type_1') ('type_1' 314) (114 314) (314 114) (314 331) (314 1120) (294 331) (331 294) (331 314)]", 11],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 314) ('type_1' 'type_1') ('type_1' 314) (114 314) (314 114) (314 1120)]", 14],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 275) ('type_1' 'type_1') ('type_1' 275) (275 334) (275 375) (275 589) (275 1069) (334 275) (375 275) (589 275) (1069 275)]", 21],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 314) ('type_1' 'type_1') ('type_1' 314) (314 1120)]", 60],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 558) ('type_1' 'type_1') ('type_1' 558) (558 559) (558 571) (558 654) (558 1328) (558 1329) (559 558) (571 558) (654 558) (1328 558) (1329 558)]", 36],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 239) ('type_1' 'type_1') ('type_1' 239) (239 1056) (239 1107) (239 1331) (1056 239)]", 12],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 43) ('type_1' 'type_1') ('type_1' 43) (42 43)]", 7],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 1059) ('type_0' 925) ('type_1' 'type_1') ('type_1' 1059) ('type_1' 925) (925 1185) (1185 925) (1185 1059) (1059 1185)]", 12],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 1440) ('type_0' 749) ('type_0' 1551) ('type_1' 'type_1') ('type_1' 1440) ('type_1' 749) ('type_1' 1551) (749 750) (750 749) (750 1440) (750 1551) (1440 750) (1551 750)]", 4],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 1) ('type_0' 5) ('type_1' 'type_1') ('type_1' 1) ('type_1' 5) (1 12) (1 58) (1 188) (12 1) (12 5) (58 1) (58 5) (58 609) (188 1) (188 5) (5 12) (5 58) (5 188) (5 609) (609 5) (609 58)]", 1]]
CL_PPI = [["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 215) ('type_1' 'type_1') ('type_1' 215) (215 807)]", 682],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 215) ('type_1' 'type_1') ('type_1' 215) (215 437) (215 807)]", 15],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 602) ('type_1' 'type_1') ('type_1' 602) (215 807) (602 215)]", 13],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 215) ('type_1' 'type_1') ('type_1' 215) (1064 215)]", 488],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 770) ('type_1' 'type_1') ('type_1' 76) (76 770)]", 392],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 64) ('type_1' 'type_1') ('type_1' 64) (5 64) (64 5)]", 56],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 30) ('type_1' 'type_1') ('type_1' 30) (30 503) (503 30) (777 30)]", 1],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 49) ('type_1' 'type_1') ('type_1' 9) (9 49) (49 9)]", 12],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 10) ('type_1' 'type_1') ('type_1' 10) ('type_1' 6) (6 10) (10 6)]", 2],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 9) ('type_0' 10) ('type_1' 'type_1') ('type_1' 9) ('type_1' 10) (9 10)]", 2],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 10) ('type_0' 50) ('type_1' 'type_1') ('type_1' 50) (10 50) (50 10)]", 4],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 20) ('type_0' 244) ('type_1' 'type_1') ('type_1' 33) ('type_1' 6) (3 6) (3 8) (3 33) (3 244) (6 8) (6 20) (8 20) (8 33) (8 244) (33 3) (33 6) (33 20) (244 3) (244 8) (244 20) (244 33) (20 8)]", 1]]
ER_PPI = [["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 1480) ('type_1' 'type_1') ('type_1' 1281) (1281 1480)]", 1303],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 1643) ('type_1' 'type_1') ('type_1' 1643) (1643 1281)]", 127],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 1437) ('type_1' 'type_1') ('type_1' 1437) (1004 1437)]", 155],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 581) ('type_1' 'type_1') ('type_1' 736) (581 736)]", 102],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 650) ('type_1' 'type_1') ('type_1' 537) (537 650) (650 537)]", 2],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 650) ('type_1' 'type_1') ('type_1' 650) (537 650) (650 537)]", 6],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 32) ('type_1' 'type_1') ('type_1' 170) (4 32) (4 252) (170 4) (170 32) (252 4) (252 170)]", 1],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 17) ('type_0' 74) ('type_1' 'type_1') ('type_1' 8) ('type_1' 13) (1 8) (1 13) (1 17) (3 1) (3 74) (8 3) (8 17) (13 1) (13 3) (17 74) (74 3)]", 1]]
ORIG_BLOGS = [["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 896) ('type_1' 'type_1') ('type_1' 446) (896 446)]", 323],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 896) ('type_1' 'type_1') ('type_1' 446) (896 446) (913 446)]", 6],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 515) ('type_1' 'type_1') ('type_1' 515) (515 446)]", 127],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 772) ('type_1' 'type_1') ('type_1' 772) (446 772)]", 226],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 652) ('type_1' 'type_1') ('type_1' 446) (446 652) (652 446)]", 88],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 1068) ('type_1' 'type_1') ('type_1' 163) (92 163) (1066 163) (1068 163)]", 3],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 887) ('type_1' 'type_1') ('type_1' 887) (885 887) (887 885)]", 155],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 987) ('type_1' 'type_1') ('type_1' 871) (871 987)]", 99],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 193) ('type_0' 265) ('type_1' 'type_1') ('type_1' 265) (193 265) (265 193)]", 39],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 193) ('type_1' 'type_1') ('type_1' 193) (193 383) (193 679) (193 680)]", 8],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 209) ('type_1' 'type_1') ('type_1' 209) ('type_1' 210) (209 210)]", 14],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 4) ('type_0' 293) ('type_1' 'type_1') ('type_1' 4) (293 4)]", 21],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 4) ('type_1' 'type_1') ('type_1' 4) (405 4) (657 4)]", 9],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 135) ('type_1' 'type_1') ('type_1' 317) ('type_1' 135) (135 317) (317 135)]", 13],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 106) ('type_0' 44) ('type_1' 'type_1') ('type_1' 106) ('type_1' 44) (44 106) (106 44)]", 11],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 107) ('type_0' 44) ('type_1' 'type_1') ('type_1' 107) (107 44)]", 16],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 512) ('type_1' 'type_1') ('type_1' 512) ('type_1' 511) (511 512)]", 12],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 426) ('type_0' 427) ('type_1' 'type_1') ('type_1' 426) ('type_1' 427) (426 427)]", 9],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 64) ('type_0' 42) ('type_0' 4) ('type_1' 'type_1') ('type_1' 64) ('type_1' 2) (2 4) (2 42) (4 42) (4 64) (42 2) (42 4) (42 64) (64 2) (64 4) (64 42)]", 1],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 47) ('type_0' 7) ('type_1' 'type_1') ('type_1' 3) ('type_1' 47) (1 3) (1 7) (1 47) (3 1) (3 7) (3 47) (7 1) (7 3) (47 7)]", 1]]
CL_BLOGS = [["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 208) ('type_1' 'type_1') ('type_1' 208) (208 257)]", 191],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 208) ('type_1' 'type_1') ('type_1' 208) (208 257) (776 208)]", 7],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 208) ('type_1' 'type_1') ('type_1' 514) (514 208)]", 237],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 520) ('type_1' 'type_1') ('type_1' 208) (520 208)]", 321],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 208) ('type_1' 'type_1') ('type_1' 208) (776 208)]", 345],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 114) ('type_1' 'type_1') ('type_1' 114) ('type_1' 1155) (114 1155)]", 5],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 128) ('type_1' 'type_1') ('type_1' 70) (70 128) (128 70)]", 50],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 274) ('type_1' 'type_1') ('type_1' 274) (45 274) (274 45)]", 26],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 14) ('type_1' 'type_1') ('type_1' 68) ('type_1' 14) (14 68) (68 14)]", 7],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 29) ('type_0' 14) ('type_1' 'type_1') ('type_1' 14) (14 29) (29 14)]", 10],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 29) ('type_0' 46) ('type_1' 'type_1') ('type_1' 29) (29 46)]", 6],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 29) ('type_0' 14) ('type_1' 'type_1') ('type_1' 29) ('type_1' 14) (14 29) (29 14)]", 4],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 17) ('type_0' 19) ('type_1' 'type_1') ('type_1' 17) ('type_1' 19) (19 17)]", 4],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 1) ('type_0' 12) ('type_0' 5) ('type_1' 'type_1') ('type_1' 1) ('type_1' 5) (1 12) (1 26) (12 1) (12 5) (12 26) (26 1) (26 12) (5 12)]", 1]]
ER_BLOGS = [["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 877) ('type_1' 'type_1') ('type_1' 1121) (877 1121)]", 54],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 1075) ('type_1' 'type_1') ('type_1' 877) (877 1075)]", 1032],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 1109) ('type_1' 'type_1') ('type_1' 1109) (877 1109)]", 48],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 1152) ('type_1' 'type_1') ('type_1' 1152) (1152 1117)]", 18],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 141) ('type_1' 'type_1') ('type_1' 141) (141 1080) (1080 141)]", 4],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 406) ('type_1' 'type_1') ('type_1' 540) (406 540) (540 406)]", 57],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_0' 73) ('type_0' 6) ('type_1' 'type_1') ('type_1' 73) (73 6)]", 5],\
["[('type_0' 'type_0') ('type_0' 'type_1') ('type_1' 'type_1') (1 7) (1 23) (5 1) (5 7) (7 23) (10 1) (10 8) (23 1) (23 7) (23 8) (23 10) (8 5) (8 10)]", 1]]
def graph_from_string(the_string):
pieces = re.split('\[\(| |\) \(|\)\]', the_string)
pieces = pieces[1:(len(pieces)-1)]
nodes = set()
for piece in pieces:
nodes.add(piece)
G = nx.DiGraph()
for node in nodes:
G.add_node(node)
idx = 0
while idx < len(pieces):
G.add_edge(pieces[idx], pieces[idx + 1])
idx += 2
return G
def make_graph_list(the_list):
return [[graph_from_string(x[0]), x[1]] for x in the_list]
def merge_graph_lists(list_a, list_b, list_c):
main_list = [[x[0], x[1], 0, 0] for x in list_a]
for graph_and_count_b in list_b:
found_match = False
for i in range(0, len(main_list)):
gm = isomorphism.DiGraphMatcher(graph_and_count_b[0], main_list[i][0])
if gm.is_isomorphic():
main_list[i][2] = graph_and_count_b[1]
found_match = True
break
if not found_match:
main_list.append([graph_and_count_b[0], 0, graph_and_count_b[1], 0])
for graph_and_count_c in list_c:
found_match = False
for main_gc in main_list:
gm = isomorphism.DiGraphMatcher(graph_and_count_c[0], main_gc[0])
if gm.is_isomorphic():
main_gc[3] = graph_and_count_c[1]
found_match = True
break
if not found_match:
main_list.append([graph_and_count_c[0], 0, 0, graph_and_count_c[1]])
return main_list
def merged_probabilities(merged_lists):
sum_a = 0.0
sum_b = 0.0
sum_c = 0.0
for list_element in merged_lists:
sum_a += list_element[1] + 1.0
sum_b += list_element[2] + 1.0
sum_c += list_element[3] + 1.0
return [[x[0], (x[1] + 1.0) / sum_a, (x[2] + 1.0) / sum_b, (x[3] + 1.0) / sum_c] for x in merged_lists]
def ratios(prob_list, idx_1, idx_2):
ratio_list = [[x[0], x[idx_1] / x[idx_2] if x[idx_1] > x[idx_2] else -1.0 * x[idx_2] / x[idx_1]] for x in prob_list]
ratio_list.sort(key=(lambda x: -1.0 * abs(x[1])))
return ratio_list
def kl_contributions(prob_list, idx_1, idx_2):
contributions = [[x[0], -1.0 * x[idx_1] * math.log(x[idx_2] / x[idx_1])] for x in prob_list]
contributions.sort(key=(lambda x: -1.0 * x[1]))
return contributions
def KL_divergence(kl_contributions):
kl = 0.0
for item in kl_contributions:
kl += item[1]
return kl
def display_kl_contributions(some_kl, title):
print(title)
for i in range(0, min(len(some_kl), 3)):
edges = list(some_kl[i][0].edges())
edges.sort()
print("%s %s" % (edges, some_kl[i][1]))
orig_ppi_list = make_graph_list(ORIG_PPI)
cl_ppi_list = make_graph_list(CL_PPI)
er_ppi_list = make_graph_list(ER_PPI)
merged_list = merge_graph_lists(orig_ppi_list, cl_ppi_list, er_ppi_list)
probs_list = merged_probabilities(merged_list)
orig_cl_kl_contributions = kl_contributions(probs_list, 1, 2)
orig_er_kl_contributions = kl_contributions(probs_list, 1, 3)
display_kl_contributions(orig_cl_kl_contributions, "PPI: Original vs CL")
display_kl_contributions(orig_er_kl_contributions, "PPI: Original vs ER")
orig_cl_kl = KL_divergence(orig_cl_kl_contributions)
print("orig_cl_kl: %s" % orig_cl_kl)
orig_er_kl = KL_divergence(orig_er_kl_contributions)
print("orig_er_kl: %s" % orig_er_kl)
orig_blogs_list = make_graph_list(ORIG_BLOGS)
cl_blogs_list = make_graph_list(CL_BLOGS)
er_blogs_list = make_graph_list(ER_BLOGS)
merged_list = merge_graph_lists(orig_blogs_list, cl_blogs_list, er_blogs_list)
probs_list = merged_probabilities(merged_list)
orig_cl_kl_contributions = kl_contributions(probs_list, 1, 2)
orig_er_kl_contributions = kl_contributions(probs_list, 1, 3)
display_kl_contributions(orig_cl_kl_contributions, "Blogs: Original vs CL")
display_kl_contributions(orig_er_kl_contributions, "Blogs: Original vs ER")
orig_cl_kl = KL_divergence(orig_cl_kl_contributions)
print("orig_cl_kl: %s" % orig_cl_kl)
orig_er_kl = KL_divergence(orig_er_kl_contributions)
print("orig_er_kl: %s" % orig_er_kl)
| 80.595855
| 262
| 0.554034
| 2,779
| 15,555
| 2.77258
| 0.094998
| 0.214147
| 0.269825
| 0.19987
| 0.660999
| 0.626866
| 0.521739
| 0.498378
| 0.485918
| 0.462038
| 0
| 0.205927
| 0.19955
| 15,555
| 192
| 263
| 81.015625
| 0.412899
| 0
| 0
| 0.137931
| 0
| 0.442529
| 0.631694
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045977
| false
| 0
| 0.022989
| 0.005747
| 0.109195
| 0.034483
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7e65821ac048ab95d4fa9b04d9dbc2b45dae03f5
| 127
|
py
|
Python
|
PythonExercicios/ex012.py
|
MatthewsTomts/Python_Class
|
f326d521d62c45a4fcb429d2a22cf2ab958492cb
|
[
"MIT"
] | null | null | null |
PythonExercicios/ex012.py
|
MatthewsTomts/Python_Class
|
f326d521d62c45a4fcb429d2a22cf2ab958492cb
|
[
"MIT"
] | null | null | null |
PythonExercicios/ex012.py
|
MatthewsTomts/Python_Class
|
f326d521d62c45a4fcb429d2a22cf2ab958492cb
|
[
"MIT"
] | null | null | null |
des = float(input('Qual o preço do produto? R$'))
print(f'O preço do produto com desconto de 5% é \033[1;37mR${des*0.95:.2f}')
| 42.333333
| 76
| 0.669291
| 27
| 127
| 3.148148
| 0.814815
| 0.141176
| 0.188235
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100917
| 0.141732
| 127
| 2
| 77
| 63.5
| 0.678899
| 0
| 0
| 0
| 0
| 0.5
| 0.732283
| 0.204724
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
0e5ed60203647f8d887ecc99337108ccd3d17121
| 9,952
|
py
|
Python
|
scripts/analysis/models.py
|
self-improving-agent/SomaticVariantCallingWithDeepLearning
|
50912dd3c2e88cd05daf5870ab6437d43a16cca8
|
[
"MIT"
] | null | null | null |
scripts/analysis/models.py
|
self-improving-agent/SomaticVariantCallingWithDeepLearning
|
50912dd3c2e88cd05daf5870ab6437d43a16cca8
|
[
"MIT"
] | null | null | null |
scripts/analysis/models.py
|
self-improving-agent/SomaticVariantCallingWithDeepLearning
|
50912dd3c2e88cd05daf5870ab6437d43a16cca8
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
# Different possible models
class GRU(nn.Module):
def __init__(self, n_hidden, n_layers, dropout, bidirectional):
super(GRU, self).__init__()
self.n_hidden = n_hidden
self.n_layers = n_layers
self.dropout = dropout
self.bidirectional = bidirectional
self.gru = nn.GRU(
input_size = 6, # 6 features: Normal REF %, Normal ALT %, Normal GAP %, Tumor REF %, Tumor ALT %, Tumor GAP %
hidden_size = self.n_hidden,
num_layers = self.n_layers,
batch_first = True,
dropout=self.dropout,
bidirectional=self.bidirectional)
if self.dropout != 0.0:
self.dropout_layer = nn.Dropout(p=self.dropout)
self.out = nn.Linear(self.n_hidden, 3)
self.out_act = nn.Softmax(dim=1)
def forward(self, x):
out, _ = self.gru(x)
if self.bidirectional:
out = out[:,:,:self.n_hidden] + out[:,:,self.n_hidden:]
if out.shape[1] > 1:
out = out[:,-1,:].squeeze()
if self.dropout!= 0.0:
out = self.dropout_layer(out)
out = self.out(out)
out = self.out_act(out)
return out
class LSTM(nn.Module):
def __init__(self, n_hidden, n_layers, dropout, bidirectional):
super(LSTM, self).__init__()
self.n_hidden = n_hidden
self.n_layers = n_layers
self.dropout = dropout
self.bidirectional = bidirectional
self.LSTM = nn.LSTM(
input_size = 6, # 6 features: Normal REF %, Normal ALT %, Normal GAP %, Tumor REF %, Tumor ALT %, Tumor GAP %
hidden_size = self.n_hidden,
num_layers = self.n_layers,
batch_first = True,
dropout=self.dropout,
bidirectional=self.bidirectional)
if self.dropout != 0.0:
self.dropout_layer = nn.Dropout(p=self.dropout)
self.out = nn.Linear(self.n_hidden, 3)
self.out_act = nn.Softmax(dim=1)
def forward(self, x):
out, _ = self.LSTM(x)
if self.bidirectional:
out = out[:,:,:self.n_hidden] + out[:,:,self.n_hidden:]
if out.shape[1] > 1:
out = out[:,-1,:].squeeze()
if self.dropout!= 0.0:
out = self.dropout_layer(out)
out = self.out(out)
out = self.out_act(out)
return out
class RNN(nn.Module):
def __init__(self, n_hidden, n_layers, dropout, bidirectional):
super(RNN, self).__init__()
self.n_hidden = n_hidden
self.n_layers = n_layers
self.dropout = dropout
self.bidirectional = bidirectional
self.rnn = nn.RNN(
input_size = 6, # 6 features: Normal REF %, Normal ALT %, Normal GAP %, Tumor REF %, Tumor ALT %, Tumor GAP %
hidden_size = self.n_hidden,
num_layers = self.n_layers,
batch_first = True,
dropout=self.dropout,
bidirectional=self.bidirectional)
if self.dropout != 0.0:
self.dropout_layer = nn.Dropout(p=self.dropout)
self.out = nn.Linear(self.n_hidden, 3)
self.out_act = nn.Softmax(dim=1)
def forward(self, x):
out, _ = self.rnn(x)
if self.bidirectional:
out = out[:,:,:self.n_hidden] + out[:,:,self.n_hidden:]
if out.shape[1] > 1:
out = out[:,-1,:].squeeze()
if self.dropout!= 0.0:
out = self.dropout_layer(out)
out = self.out(out)
out = self.out_act(out)
return out
class Transformer(nn.Module):
def __init__(self, n_layers, dropout, seq_len):
super(Transformer, self).__init__()
self.n_layers = n_layers
self.dropout = dropout
self.seq_len = seq_len
encoder_layer = nn.TransformerEncoderLayer(d_model=6, nhead=6, dropout=self.dropout)
self.transformer = nn.TransformerEncoder(encoder_layer, num_layers=n_layers)
if self.dropout != 0.0:
self.dropout_layer = nn.Dropout(p=self.dropout)
self.flatten = nn.Flatten()
self.out = nn.Linear(self.seq_len*6, 3)
self.out_act = nn.Softmax(dim=1)
def forward(self, x):
out = self.transformer(x)
if self.dropout!= 0.0:
out = self.dropout_layer(out)
out = self.flatten(out)
out = self.out(out)
out = self.out_act(out)
return out
class Perceptron(nn.Module):
def __init__(self, seq_len):
super(Perceptron, self).__init__()
self.seq_len = seq_len
self.out = nn.Linear(self.seq_len*6, 3)
self.out_act = nn.Softmax(dim=1)
def forward(self, x):
batch_size = x.shape[0]
out = x.contiguous().view(batch_size, -1)
out = self.out(out)
out = self.out_act(out)
return out
# Genotyping models
class Genotyping_GRU(nn.Module):
def __init__(self, n_hidden, n_layers, dropout, bidirectional):
super(Genotyping_GRU, self).__init__()
self.n_hidden = n_hidden
self.n_layers = n_layers
self.dropout = dropout
self.bidirectional = bidirectional
self.gru = nn.GRU(
input_size = 10, # 10 features
hidden_size = self.n_hidden,
num_layers = self.n_layers,
batch_first = True,
dropout=self.dropout,
bidirectional=self.bidirectional)
if self.dropout != 0.0:
self.dropout_layer = nn.Dropout(p=self.dropout)
self.out = nn.Linear(self.n_hidden, 4) # 4 clases
self.out_act = nn.Softmax(dim=1)
def forward(self, x):
out, _ = self.gru(x)
if self.bidirectional:
out = out[:,:,:self.n_hidden] + out[:,:,self.n_hidden:]
if out.shape[1] > 1:
out = out[:,-1,:].squeeze()
if self.dropout!= 0.0:
out = self.dropout_layer(out)
out = self.out(out)
out = self.out_act(out)
return out
class Genotyping_LSTM(nn.Module):
def __init__(self, n_hidden, n_layers, dropout, bidirectional):
super(Genotyping_LSTM, self).__init__()
self.n_hidden = n_hidden
self.n_layers = n_layers
self.dropout = dropout
self.bidirectional = bidirectional
self.LSTM = nn.LSTM(
input_size = 10, # 10 features
hidden_size = self.n_hidden,
num_layers = self.n_layers,
batch_first = True,
dropout=self.dropout,
bidirectional=self.bidirectional)
if self.dropout != 0.0:
self.dropout_layer = nn.Dropout(p=self.dropout)
self.out = nn.Linear(self.n_hidden, 4) # 4 classes
self.out_act = nn.Softmax(dim=1)
def forward(self, x):
out, _ = self.LSTM(x)
if self.bidirectional:
out = out[:,:,:self.n_hidden] + out[:,:,self.n_hidden:]
if out.shape[1] > 1:
out = out[:,-1,:].squeeze()
if self.dropout!= 0.0:
out = self.dropout_layer(out)
out = self.out(out)
out = self.out_act(out)
return out
class Genotyping_RNN(nn.Module):
def __init__(self, n_hidden, n_layers, dropout, bidirectional):
super(Genotyping_RNN, self).__init__()
self.n_hidden = n_hidden
self.n_layers = n_layers
self.dropout = dropout
self.bidirectional = bidirectional
self.rnn = nn.RNN(
input_size = 10, # 10 features
hidden_size = self.n_hidden,
num_layers = self.n_layers,
batch_first = True,
dropout=self.dropout,
bidirectional=self.bidirectional)
if self.dropout != 0.0:
self.dropout_layer = nn.Dropout(p=self.dropout)
self.out = nn.Linear(self.n_hidden, 4) # 4 classes
self.out_act = nn.Softmax(dim=1)
def forward(self, x):
out, _ = self.rnn(x)
if self.bidirectional:
out = out[:,:,:self.n_hidden] + out[:,:,self.n_hidden:]
if out.shape[1] > 1:
out = out[:,-1,:].squeeze()
if self.dropout!= 0.0:
out = self.dropout_layer(out)
out = self.out(out)
out = self.out_act(out)
return out
class Genotyping_Transformer(nn.Module):
def __init__(self, n_layers, dropout, seq_len):
super(Genotyping_Transformer, self).__init__()
self.n_layers = n_layers
self.dropout = dropout
self.seq_len = seq_len
encoder_layer = nn.TransformerEncoderLayer(d_model=10, nhead=10, dropout=self.dropout)
self.transformer = nn.TransformerEncoder(encoder_layer, num_layers=n_layers)
if self.dropout != 0.0:
self.dropout_layer = nn.Dropout(p=self.dropout)
self.flatten = nn.Flatten()
self.out = nn.Linear(self.seq_len*10, 4)
self.out_act = nn.Softmax(dim=1)
def forward(self, x):
out = self.transformer(x)
if self.dropout!= 0.0:
out = self.dropout_layer(out)
out = self.flatten(out)
out = self.out(out)
out = self.out_act(out)
return out
class Genotyping_Perceptron(nn.Module):
def __init__(self, seq_len):
super(Genotyping_Perceptron, self).__init__()
self.seq_len = seq_len
self.out = nn.Linear(self.seq_len*10, 4)
self.out_act = nn.Softmax(dim=1)
def forward(self, x):
batch_size = x.shape[0]
out = x.contiguous().view(batch_size, -1)
out = self.out(out)
out = self.out_act(out)
return out
| 33.508418
| 122
| 0.555265
| 1,252
| 9,952
| 4.207668
| 0.054313
| 0.116932
| 0.075171
| 0.044419
| 0.977601
| 0.977601
| 0.977601
| 0.977601
| 0.977601
| 0.962415
| 0
| 0.016032
| 0.329381
| 9,952
| 297
| 123
| 33.508418
| 0.773299
| 0.038887
| 0
| 0.904564
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082988
| false
| 0
| 0.004149
| 0
| 0.170124
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0e91cf61e1a8d61ce62d24ecf7c209abb6a029c2
| 681
|
py
|
Python
|
Leetcode/Python/_1812.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | 1
|
2021-11-28T15:03:32.000Z
|
2021-11-28T15:03:32.000Z
|
Leetcode/Python/_1812.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | null | null | null |
Leetcode/Python/_1812.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | null | null | null |
class Solution:
def squareIsWhite(self, coordinates: str) -> bool:
hashMap = {"a":1, "b":2, "c":3, "d":4,
"e":5, "f":6, "g":7, "h":8}
row, col = int(coordinates[1]), hashMap[coordinates[0]]
if (row%2 != 0 and col%2 != 0) or (row%2 == 0 and col%2 == 0):
return False
else:
return True
class Solution:
def squareIsWhite(self, coordinates: str) -> bool:
hashMap = {"a":1, "b":2, "c":3, "d":4,
"e":5, "f":6, "g":7, "h":8}
row, col = int(coordinates[1]), hashMap[coordinates[0]]
if row%2==col%2:
return False
else:
return True
| 34.05
| 70
| 0.46696
| 97
| 681
| 3.278351
| 0.371134
| 0.025157
| 0.100629
| 0.18239
| 0.981132
| 0.823899
| 0.823899
| 0.754717
| 0.754717
| 0.754717
| 0
| 0.066964
| 0.342144
| 681
| 19
| 71
| 35.842105
| 0.642857
| 0
| 0
| 0.888889
| 0
| 0
| 0.023495
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0ea04313b919e9ed30d3a66455942e10d3eb2852
| 16,509
|
py
|
Python
|
scripts/Networks.py
|
NamDinhRobotics/DRL_graph_exploration
|
0d7dbb044c01dba91fca200ab66ec7f185e26329
|
[
"BSD-3-Clause"
] | 1
|
2021-11-29T10:30:38.000Z
|
2021-11-29T10:30:38.000Z
|
scripts/Networks.py
|
NamDinhRobotics/DRL_graph_exploration
|
0d7dbb044c01dba91fca200ab66ec7f185e26329
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/Networks.py
|
NamDinhRobotics/DRL_graph_exploration
|
0d7dbb044c01dba91fca200ab66ec7f185e26329
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import torch
import torch.nn.functional as F
from torch_sparse import spspmm
from torch_scatter import scatter_max, scatter_add
from torch_geometric.nn import GCNConv, TopKPooling, GatedGraphConv, global_max_pool, global_mean_pool
from torch_geometric.utils import (add_self_loops, sort_edge_index,
remove_self_loops, softmax)
from torch_geometric.utils.repeat import repeat
class GCN(torch.nn.Module):
def __init__(self):
super(GCN, self).__init__()
self.conv1 = GCNConv(5, 1000, improved=True)
self.conv2 = GCNConv(1000, 1000, improved=True)
self.fully_con1 = torch.nn.Linear(1000, 1)
def forward(self, data, prob, batch=None):
x, edge_index, edge_weight = data.x, data.edge_index, data.edge_attr
x = self.conv1(x, edge_index, edge_weight=edge_weight)
x = F.relu(x)
x = self.conv2(x, edge_index, edge_weight=edge_weight)
x = F.relu(x)
x = F.dropout(x, p=prob)
x = self.fully_con1(x)
return x
class PolicyGCN(torch.nn.Module):
def __init__(self):
super(PolicyGCN, self).__init__()
self.conv1 = GCNConv(5, 1000, improved=True)
self.conv2 = GCNConv(1000, 1000, improved=True)
self.fully_con1 = torch.nn.Linear(1000, 1)
def forward(self, data, mask, batch=None):
x, edge_index, edge_weight = data.x, data.edge_index, data.edge_attr
x = self.conv1(x, edge_index, edge_weight=edge_weight)
x = F.relu(x)
x = self.conv2(x, edge_index, edge_weight=edge_weight)
x = F.relu(x)
x = F.dropout(x)
x = self.fully_con1(x)
x = torch.masked_select(x.view(-1), mask)
batch = torch.masked_select(batch, mask)
x = softmax(x, batch)
return x
class ValueGCN(torch.nn.Module):
def __init__(self):
super(ValueGCN, self).__init__()
self.conv1 = GCNConv(5, 1000, improved=True)
self.conv2 = GCNConv(1000, 1000, improved=True)
self.fully_con1 = torch.nn.Linear(1000, 100)
def forward(self, data, mask, batch=None):
x, edge_index, edge_weight = data.x, data.edge_index, data.edge_attr
x = self.conv1(x, edge_index, edge_weight=edge_weight)
x = F.relu(x)
x = self.conv2(x, edge_index, edge_weight=edge_weight)
x = F.relu(x)
x = F.dropout(x)
x = self.fully_con1(x)
x = global_mean_pool(x, batch).mean(dim=1)
return x
class GGNN(torch.nn.Module):
def __init__(self):
super(GGNN, self).__init__()
self.gconv1 = GatedGraphConv(1000, 3)
self.fully_con1 = torch.nn.Linear(1000, 1)
def forward(self, data, prob, batch=None):
x, edge_index, edge_weight = data.x, data.edge_index, data.edge_attr
x = self.gconv1(x, edge_index, edge_weight=edge_weight)
x = F.relu(x)
x = F.dropout(x, p=prob)
x = self.fully_con1(x)
return x
class PolicyGGNN(torch.nn.Module):
def __init__(self):
super(PolicyGGNN, self).__init__()
self.gconv1 = GatedGraphConv(1000, 3)
self.fully_con1 = torch.nn.Linear(1000, 1)
def forward(self, data, mask, batch=None):
x, edge_index, edge_weight = data.x, data.edge_index, data.edge_attr
x = self.gconv1(x, edge_index, edge_weight=edge_weight)
x = F.relu(x)
x = F.dropout(x)
x = self.fully_con1(x)
x = torch.masked_select(x.view(-1), mask)
batch = torch.masked_select(batch, mask)
x = softmax(x, batch)
return x
class ValueGGNN(torch.nn.Module):
def __init__(self):
super(ValueGGNN, self).__init__()
self.gconv1 = GatedGraphConv(1000, 3)
self.fully_con1 = torch.nn.Linear(1000, 100)
def forward(self, data, mask, batch=None):
x, edge_index, edge_weight = data.x, data.edge_index, data.edge_attr
x = self.gconv1(x, edge_index, edge_weight=edge_weight)
x = F.relu(x)
x = F.dropout(x)
x = self.fully_con1(x)
x = global_mean_pool(x, batch).mean(dim=1)
return x
class GraphUNet(torch.nn.Module):
def __init__(self, in_channels, hidden_channels, out_channels, depth,
pool_ratios=0.5, sum_res=True, act=F.relu):
super(GraphUNet, self).__init__()
assert depth >= 1
self.in_channels = in_channels
self.hidden_channels = hidden_channels
self.out_channels = out_channels
self.depth = depth
self.pool_ratios = repeat(pool_ratios, depth)
self.act = act
self.sum_res = sum_res
channels = hidden_channels
self.down_convs = torch.nn.ModuleList()
self.pools = torch.nn.ModuleList()
self.down_convs.append(GCNConv(in_channels, channels, improved=True))
for i in range(depth):
self.pools.append(TopKPooling(channels, self.pool_ratios[i]))
self.down_convs.append(GCNConv(channels, channels, improved=True))
in_channels = channels if sum_res else 2 * channels
self.up_convs = torch.nn.ModuleList()
for i in range(depth - 1):
self.up_convs.append(GCNConv(in_channels, channels, improved=True))
self.up_convs.append(GCNConv(in_channels, out_channels, improved=True))
self.fully_con1 = torch.nn.Linear(self.out_channels, 1)
self.reset_parameters()
def reset_parameters(self):
for conv in self.down_convs:
conv.reset_parameters()
for pool in self.pools:
pool.reset_parameters()
for conv in self.up_convs:
conv.reset_parameters()
def forward(self, data, prob, batch=None):
x, edge_index, edge_weight = data.x, data.edge_index, data.edge_attr
""""""
if batch is None:
batch = edge_index.new_zeros(x.size(0))
# edge_weight = x.new_ones(edge_index.size(1))
x = self.down_convs[0](x, edge_index, edge_weight)
x = self.act(x)
xs = [x]
edge_indices = [edge_index]
edge_weights = [edge_weight]
perms = []
for i in range(1, self.depth + 1):
edge_index, edge_weight = self.augment_adj(edge_index, edge_weight,
x.size(0))
x, edge_index, edge_weight, batch, perm, _ = self.pools[i - 1](
x, edge_index, edge_weight, batch)
x = self.down_convs[i](x, edge_index, edge_weight)
x = self.act(x)
if i < self.depth:
xs += [x]
edge_indices += [edge_index]
edge_weights += [edge_weight]
perms += [perm]
for i in range(self.depth):
j = self.depth - 1 - i
res = xs[j]
edge_index = edge_indices[j]
edge_weight = edge_weights[j]
perm = perms[j]
up = torch.zeros_like(res)
up[perm] = x
x = res + up if self.sum_res else torch.cat((res, up), dim=-1)
x = self.up_convs[i](x, edge_index, edge_weight)
x = self.act(x) if i < self.depth - 1 else x
x = F.relu(x)
x = F.dropout(x, p=prob)
x = self.fully_con1(x)
return x
def augment_adj(self, edge_index, edge_weight, num_nodes):
edge_index, edge_weight = add_self_loops(edge_index, edge_weight,
num_nodes=num_nodes)
edge_index, edge_weight = sort_edge_index(edge_index, edge_weight,
num_nodes)
edge_index, edge_weight = spspmm(edge_index, edge_weight, edge_index,
edge_weight, num_nodes, num_nodes,
num_nodes)
edge_index, edge_weight = remove_self_loops(edge_index, edge_weight)
return edge_index, edge_weight
def __repr__(self):
return '{}({}, {}, {}, depth={}, pool_ratios={})'.format(
self.__class__.__name__, self.in_channels, self.hidden_channels,
self.out_channels, self.depth, self.pool_ratios)
class PolicyGraphUNet(torch.nn.Module):
def __init__(self, in_channels, hidden_channels, out_channels, depth,
pool_ratios=0.5, sum_res=True, act=F.relu):
super(PolicyGraphUNet, self).__init__()
assert depth >= 1
self.in_channels = in_channels
self.hidden_channels = hidden_channels
self.out_channels = out_channels
self.depth = depth
self.pool_ratios = repeat(pool_ratios, depth)
self.act = act
self.sum_res = sum_res
channels = hidden_channels
self.down_convs = torch.nn.ModuleList()
self.pools = torch.nn.ModuleList()
self.down_convs.append(GCNConv(in_channels, channels, improved=True))
for i in range(depth):
self.pools.append(TopKPooling(channels, self.pool_ratios[i]))
self.down_convs.append(GCNConv(channels, channels, improved=True))
in_channels = channels if sum_res else 2 * channels
self.up_convs = torch.nn.ModuleList()
for i in range(depth - 1):
self.up_convs.append(GCNConv(in_channels, channels, improved=True))
self.up_convs.append(GCNConv(in_channels, out_channels, improved=True))
self.fully_con1 = torch.nn.Linear(self.out_channels, 1)
self.reset_parameters()
def reset_parameters(self):
for conv in self.down_convs:
conv.reset_parameters()
for pool in self.pools:
pool.reset_parameters()
for conv in self.up_convs:
conv.reset_parameters()
def forward(self, data, mask, batch=None):
x, edge_index, edge_weight = data.x, data.edge_index, data.edge_attr
""""""
if batch is None:
batch = edge_index.new_zeros(x.size(0))
old_batch = batch.clone()
x = self.down_convs[0](x, edge_index, edge_weight)
x = self.act(x)
xs = [x]
edge_indices = [edge_index]
edge_weights = [edge_weight]
perms = []
for i in range(1, self.depth + 1):
edge_index, edge_weight = self.augment_adj(edge_index, edge_weight,
x.size(0))
x, edge_index, edge_weight, batch, perm, _ = self.pools[i - 1](
x, edge_index, edge_weight, batch)
x = self.down_convs[i](x, edge_index, edge_weight)
x = self.act(x)
if i < self.depth:
xs += [x]
edge_indices += [edge_index]
edge_weights += [edge_weight]
perms += [perm]
for i in range(self.depth):
j = self.depth - 1 - i
res = xs[j]
edge_index = edge_indices[j]
edge_weight = edge_weights[j]
perm = perms[j]
up = torch.zeros_like(res)
up[perm] = x
x = res + up if self.sum_res else torch.cat((res, up), dim=-1)
x = self.up_convs[i](x, edge_index, edge_weight)
x = self.act(x) if i < self.depth - 1 else x
x = F.relu(x)
x = F.dropout(x)
x = self.fully_con1(x)
x = torch.masked_select(x.view(-1), mask)
old_batch = torch.masked_select(old_batch, mask)
x = softmax(x, old_batch)
return x
def augment_adj(self, edge_index, edge_weight, num_nodes):
edge_index, edge_weight = add_self_loops(edge_index, edge_weight,
num_nodes=num_nodes)
edge_index, edge_weight = sort_edge_index(edge_index, edge_weight,
num_nodes)
edge_index, edge_weight = spspmm(edge_index, edge_weight, edge_index,
edge_weight, num_nodes, num_nodes,
num_nodes)
edge_index, edge_weight = remove_self_loops(edge_index, edge_weight)
return edge_index, edge_weight
def __repr__(self):
return '{}({}, {}, {}, depth={}, pool_ratios={})'.format(
self.__class__.__name__, self.in_channels, self.hidden_channels,
self.out_channels, self.depth, self.pool_ratios)
class ValueGraphUNet(torch.nn.Module):
def __init__(self, in_channels, hidden_channels, out_channels, depth,
pool_ratios=0.5, sum_res=True, act=F.relu):
super(ValueGraphUNet, self).__init__()
assert depth >= 1
self.in_channels = in_channels
self.hidden_channels = hidden_channels
self.out_channels = out_channels
self.depth = depth
self.pool_ratios = repeat(pool_ratios, depth)
self.act = act
self.sum_res = sum_res
channels = hidden_channels
self.down_convs = torch.nn.ModuleList()
self.pools = torch.nn.ModuleList()
self.down_convs.append(GCNConv(in_channels, channels, improved=True))
for i in range(depth):
self.pools.append(TopKPooling(channels, self.pool_ratios[i]))
self.down_convs.append(GCNConv(channels, channels, improved=True))
in_channels = channels if sum_res else 2 * channels
self.up_convs = torch.nn.ModuleList()
for i in range(depth - 1):
self.up_convs.append(GCNConv(in_channels, channels, improved=True))
self.up_convs.append(GCNConv(in_channels, out_channels, improved=True))
self.fully_con1 = torch.nn.Linear(self.out_channels, 100)
self.reset_parameters()
def reset_parameters(self):
for conv in self.down_convs:
conv.reset_parameters()
for pool in self.pools:
pool.reset_parameters()
for conv in self.up_convs:
conv.reset_parameters()
def forward(self, data, mask, batch=None):
x, edge_index, edge_weight = data.x, data.edge_index, data.edge_attr
""""""
if batch is None:
batch = edge_index.new_zeros(x.size(0))
# edge_weight = x.new_ones(edge_index.size(1))
old_batch = batch.clone()
x = self.down_convs[0](x, edge_index, edge_weight)
x = self.act(x)
xs = [x]
edge_indices = [edge_index]
edge_weights = [edge_weight]
perms = []
for i in range(1, self.depth + 1):
edge_index, edge_weight = self.augment_adj(edge_index, edge_weight,
x.size(0))
x, edge_index, edge_weight, batch, perm, _ = self.pools[i - 1](
x, edge_index, edge_weight, batch)
x = self.down_convs[i](x, edge_index, edge_weight)
x = self.act(x)
if i < self.depth:
xs += [x]
edge_indices += [edge_index]
edge_weights += [edge_weight]
perms += [perm]
for i in range(self.depth):
j = self.depth - 1 - i
res = xs[j]
edge_index = edge_indices[j]
edge_weight = edge_weights[j]
perm = perms[j]
up = torch.zeros_like(res)
up[perm] = x
x = res + up if self.sum_res else torch.cat((res, up), dim=-1)
x = self.up_convs[i](x, edge_index, edge_weight)
x = self.act(x) if i < self.depth - 1 else x
x = F.relu(x)
x = F.dropout(x)
x = self.fully_con1(x)
x = global_mean_pool(x, old_batch).mean(dim=1)
return x
def augment_adj(self, edge_index, edge_weight, num_nodes):
edge_index, edge_weight = add_self_loops(edge_index, edge_weight,
num_nodes=num_nodes)
edge_index, edge_weight = sort_edge_index(edge_index, edge_weight,
num_nodes)
edge_index, edge_weight = spspmm(edge_index, edge_weight, edge_index,
edge_weight, num_nodes, num_nodes,
num_nodes)
edge_index, edge_weight = remove_self_loops(edge_index, edge_weight)
return edge_index, edge_weight
def __repr__(self):
return '{}({}, {}, {}, depth={}, pool_ratios={})'.format(
self.__class__.__name__, self.in_channels, self.hidden_channels,
self.out_channels, self.depth, self.pool_ratios)
| 36.443709
| 102
| 0.584287
| 2,183
| 16,509
| 4.158039
| 0.055428
| 0.09816
| 0.120304
| 0.150711
| 0.942162
| 0.940178
| 0.938746
| 0.919577
| 0.919577
| 0.919577
| 0
| 0.015699
| 0.309346
| 16,509
| 452
| 103
| 36.524336
| 0.780389
| 0.005391
| 0
| 0.912921
| 0
| 0
| 0.007319
| 0
| 0
| 0
| 0
| 0
| 0.008427
| 1
| 0.075843
| false
| 0
| 0.022472
| 0.008427
| 0.16573
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ed62e2ad93a6b4e7685c0657a7b1246fa6d4571
| 2,202
|
py
|
Python
|
tools/test/config_test/test10/test_data.py
|
pradeep-gr/mbed-os5-onsemi
|
576d096f2d9933c39b8a220f486e9756d89173f2
|
[
"Apache-2.0"
] | 7
|
2017-01-15T16:37:41.000Z
|
2021-08-10T02:14:04.000Z
|
tools/test/config_test/test10/test_data.py
|
pradeep-gr/mbed-os5-onsemi
|
576d096f2d9933c39b8a220f486e9756d89173f2
|
[
"Apache-2.0"
] | 1
|
2017-02-20T10:48:02.000Z
|
2017-02-21T11:34:16.000Z
|
tools/test/config_test/test10/test_data.py
|
pradeep-gr/mbed-os5-onsemi
|
576d096f2d9933c39b8a220f486e9756d89173f2
|
[
"Apache-2.0"
] | 10
|
2018-02-05T03:24:55.000Z
|
2021-07-04T00:31:30.000Z
|
# This builds on top of test8 by adding target-conditional overrides in mbed_app_config.json.
expected_results = {
"base": {
"desc": "override values based on labels with libs, target params and target overrides (no labels)",
"app.app1": "v_app1",
"app.app2": "v_app2",
"lib1.p1": "v_p1_lib1_app",
"lib1.p2": "v_p2_lib1",
"lib1.p3": "v_p3_lib1_app",
"lib2.p1": "v_p1_lib2_app",
"lib2.p2": "v_p2_lib2",
"target.par1": "v_par1_target_app",
"target.par2": "v_par2_base",
"target.par3": "v_par3_base"
},
"b1": {
"desc": "override values based on labels with libs, target params and target overrides (first label)",
"app.app1": "v_app1[b1_label_label]",
"app.app2": "v_app2",
"lib1.p1": "v_p1_lib1_app",
"lib1.p2": "v_p2_lib1",
"lib1.p3": "v_p3_lib1_app",
"lib2.p1": "v_p1_lib2_app",
"lib2.p2": "v_p2_lib2[b1_label]",
"target.par1": "v_par1_target_app",
"target.par2": "v_par2_base",
"target.par3": "v_par3_base"
},
"b2": {
"desc": "override values based on labels with libs, target params and target overrides (second label)",
"app.app1": "v_app1",
"app.app2": "v_app2[b2_label]",
"lib1.p1": "v_p1_lib1_app",
"lib1.p2": "v_p2_lib1[b2_label]",
"lib1.p3": "v_p3_lib1_app",
"lib2.p1": "v_p1_lib2_app",
"lib2.p2": "v_p2_lib2[b2_label]",
"target.par1": "v_par1_target_app",
"target.par2": "v_par2_b2",
"target.par3": "v_par3_base"
},
"both": {
"desc": "override values based on labels with libs, target params and target overrides (both labels)",
"app.app1": "v_app1[b1_label_label]",
"app.app2": "v_app2[b2_label]",
"lib1.p1": "v_p1_lib1_app",
"lib1.p2": "v_p2_lib1[b2_label]",
"lib1.p3": "v_p3_lib1_app",
"lib2.p1": "v_p1_lib2_app[both_label]",
"lib2.p2": "v_p2_lib2[b2_label]",
"target.par1": "v_par1_target_app",
"target.par2": "v_par2_b2",
"target.par3": "v_par3_both",
"target.par4": "v_par4_app[both_label]"
}
}
| 37.965517
| 111
| 0.57584
| 319
| 2,202
| 3.642633
| 0.147335
| 0.020654
| 0.034423
| 0.079174
| 0.861446
| 0.839071
| 0.839071
| 0.839071
| 0.818417
| 0.818417
| 0
| 0.082977
| 0.255677
| 2,202
| 57
| 112
| 38.631579
| 0.625991
| 0.041326
| 0
| 0.654545
| 0
| 0
| 0.617354
| 0.043148
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7d1c9badd68d5cffdb964cb30d7f0c3acffe6358
| 42,876
|
py
|
Python
|
sdk/kamonohashi/op/rest/api/inference_api.py
|
ozota1/kamonohashi
|
49ab4c189d4e9d35e679ad91640c33397ede096e
|
[
"Apache-2.0"
] | null | null | null |
sdk/kamonohashi/op/rest/api/inference_api.py
|
ozota1/kamonohashi
|
49ab4c189d4e9d35e679ad91640c33397ede096e
|
[
"Apache-2.0"
] | null | null | null |
sdk/kamonohashi/op/rest/api/inference_api.py
|
ozota1/kamonohashi
|
49ab4c189d4e9d35e679ad91640c33397ede096e
|
[
"Apache-2.0"
] | 4
|
2019-07-24T02:15:54.000Z
|
2019-08-02T01:51:04.000Z
|
# coding: utf-8
"""
KAMONOHASHI API
A platform for deep learning # noqa: E501
OpenAPI spec version: v1
Contact: kamonohashi-support@jp.nssol.nipponsteel.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from kamonohashi.op.rest.api_client import ApiClient
class InferenceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_inference_file(self, id, **kwargs): # noqa: E501
"""add_inference_file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_inference_file(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param ComponentsAddFileInputModel model:
:return: TrainingApiModelsAttachedFileOutputModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_inference_file_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.add_inference_file_with_http_info(id, **kwargs) # noqa: E501
return data
def add_inference_file_with_http_info(self, id, **kwargs): # noqa: E501
"""add_inference_file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_inference_file_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param ComponentsAddFileInputModel model:
:return: TrainingApiModelsAttachedFileOutputModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'model'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_inference_file" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `add_inference_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'model' in params:
body_params = params['model']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v1/inferences/{id}/files', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TrainingApiModelsAttachedFileOutputModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def complete_inference(self, id, **kwargs): # noqa: E501
"""complete_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.complete_inference(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: InferenceApiModelsInferenceSimpleOutputModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.complete_inference_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.complete_inference_with_http_info(id, **kwargs) # noqa: E501
return data
def complete_inference_with_http_info(self, id, **kwargs): # noqa: E501
"""complete_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.complete_inference_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: InferenceApiModelsInferenceSimpleOutputModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method complete_inference" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `complete_inference`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v1/inferences/{id}/complete', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InferenceApiModelsInferenceSimpleOutputModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_inference(self, **kwargs): # noqa: E501
"""create_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_inference(async_req=True)
>>> result = thread.get()
:param async_req bool
:param TrainingApiModelsCreateInputModel model:
:return: InferenceApiModelsInferenceSimpleOutputModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_inference_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_inference_with_http_info(**kwargs) # noqa: E501
return data
def create_inference_with_http_info(self, **kwargs): # noqa: E501
"""create_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_inference_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param TrainingApiModelsCreateInputModel model:
:return: InferenceApiModelsInferenceSimpleOutputModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['model'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_inference" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'model' in params:
body_params = params['model']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v1/inferences/run', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InferenceApiModelsInferenceSimpleOutputModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_inference(self, id, **kwargs): # noqa: E501
"""delete_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_inference(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_inference_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_inference_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_inference_with_http_info(self, id, **kwargs): # noqa: E501
"""delete_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_inference_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_inference" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_inference`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v1/inferences/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_inference_file(self, id, file_id, **kwargs): # noqa: E501
"""delete_inference_file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_inference_file(id, file_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int file_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_inference_file_with_http_info(id, file_id, **kwargs) # noqa: E501
else:
(data) = self.delete_inference_file_with_http_info(id, file_id, **kwargs) # noqa: E501
return data
def delete_inference_file_with_http_info(self, id, file_id, **kwargs): # noqa: E501
"""delete_inference_file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_inference_file_with_http_info(id, file_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int file_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'file_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_inference_file" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_inference_file`") # noqa: E501
# verify the required parameter 'file_id' is set
if ('file_id' not in params or
params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `delete_inference_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'file_id' in params:
path_params['fileId'] = params['file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v1/inferences/{id}/files/{fileId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_inference(self, id, **kwargs): # noqa: E501
"""get_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_inference(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: InferenceApiModelsInferenceDetailsOutputModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_inference_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_inference_with_http_info(id, **kwargs) # noqa: E501
return data
def get_inference_with_http_info(self, id, **kwargs): # noqa: E501
"""get_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_inference_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: InferenceApiModelsInferenceDetailsOutputModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_inference" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_inference`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v1/inferences/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InferenceApiModelsInferenceDetailsOutputModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def halt_inference(self, id, **kwargs): # noqa: E501
"""halt_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.halt_inference(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: InferenceApiModelsInferenceSimpleOutputModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.halt_inference_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.halt_inference_with_http_info(id, **kwargs) # noqa: E501
return data
def halt_inference_with_http_info(self, id, **kwargs): # noqa: E501
"""halt_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.halt_inference_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: InferenceApiModelsInferenceSimpleOutputModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method halt_inference" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `halt_inference`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v1/inferences/{id}/halt', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InferenceApiModelsInferenceSimpleOutputModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_inference(self, **kwargs): # noqa: E501
"""list_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_inference(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id:
:param str name:
:param str started_at:
:param str started_by:
:param str data_set:
:param str memo:
:param str status:
:param str entry_point:
:param str parent_id:
:param str parent_name:
:param int per_page:
:param int page:
:param bool with_total:
:return: list[InferenceApiModelsInferenceIndexOutputModel]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_inference_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_inference_with_http_info(**kwargs) # noqa: E501
return data
def list_inference_with_http_info(self, **kwargs): # noqa: E501
"""list_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_inference_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id:
:param str name:
:param str started_at:
:param str started_by:
:param str data_set:
:param str memo:
:param str status:
:param str entry_point:
:param str parent_id:
:param str parent_name:
:param int per_page:
:param int page:
:param bool with_total:
:return: list[InferenceApiModelsInferenceIndexOutputModel]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'name', 'started_at', 'started_by', 'data_set', 'memo', 'status', 'entry_point', 'parent_id', 'parent_name', 'per_page', 'page', 'with_total'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_inference" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'id' in params:
query_params.append(('Id', params['id'])) # noqa: E501
if 'name' in params:
query_params.append(('Name', params['name'])) # noqa: E501
if 'started_at' in params:
query_params.append(('StartedAt', params['started_at'])) # noqa: E501
if 'started_by' in params:
query_params.append(('StartedBy', params['started_by'])) # noqa: E501
if 'data_set' in params:
query_params.append(('DataSet', params['data_set'])) # noqa: E501
if 'memo' in params:
query_params.append(('Memo', params['memo'])) # noqa: E501
if 'status' in params:
query_params.append(('Status', params['status'])) # noqa: E501
if 'entry_point' in params:
query_params.append(('EntryPoint', params['entry_point'])) # noqa: E501
if 'parent_id' in params:
query_params.append(('ParentId', params['parent_id'])) # noqa: E501
if 'parent_name' in params:
query_params.append(('ParentName', params['parent_name'])) # noqa: E501
if 'per_page' in params:
query_params.append(('perPage', params['per_page'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'with_total' in params:
query_params.append(('withTotal', params['with_total'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v1/inferences', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[InferenceApiModelsInferenceIndexOutputModel]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_inference_container_files(self, id, **kwargs): # noqa: E501
"""list_inference_container_files # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_inference_container_files(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str path:
:param bool with_url:
:return: StorageListResultInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_inference_container_files_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.list_inference_container_files_with_http_info(id, **kwargs) # noqa: E501
return data
def list_inference_container_files_with_http_info(self, id, **kwargs): # noqa: E501
"""list_inference_container_files # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_inference_container_files_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str path:
:param bool with_url:
:return: StorageListResultInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'path', 'with_url'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_inference_container_files" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `list_inference_container_files`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'path' in params:
query_params.append(('path', params['path'])) # noqa: E501
if 'with_url' in params:
query_params.append(('withUrl', params['with_url'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v1/inferences/{id}/container-files', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StorageListResultInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_inference_files(self, id, **kwargs): # noqa: E501
"""list_inference_files # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_inference_files(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool with_url:
:return: list[TrainingApiModelsAttachedFileOutputModel]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_inference_files_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.list_inference_files_with_http_info(id, **kwargs) # noqa: E501
return data
def list_inference_files_with_http_info(self, id, **kwargs): # noqa: E501
"""list_inference_files # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_inference_files_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool with_url:
:return: list[TrainingApiModelsAttachedFileOutputModel]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'with_url'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_inference_files" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `list_inference_files`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'with_url' in params:
query_params.append(('withUrl', params['with_url'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v1/inferences/{id}/files', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TrainingApiModelsAttachedFileOutputModel]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_inference(self, id, **kwargs): # noqa: E501
"""update_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_inference(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param TrainingApiModelsEditInputModel model:
:return: InferenceApiModelsInferenceSimpleOutputModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_inference_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_inference_with_http_info(id, **kwargs) # noqa: E501
return data
def update_inference_with_http_info(self, id, **kwargs): # noqa: E501
"""update_inference # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_inference_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param TrainingApiModelsEditInputModel model:
:return: InferenceApiModelsInferenceSimpleOutputModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'model'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_inference" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_inference`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'model' in params:
body_params = params['model']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v1/inferences/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InferenceApiModelsInferenceSimpleOutputModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 37.943363
| 184
| 0.599893
| 4,775
| 42,876
| 5.132984
| 0.041047
| 0.049286
| 0.025133
| 0.032313
| 0.946797
| 0.934557
| 0.923909
| 0.919257
| 0.915218
| 0.906038
| 0
| 0.015801
| 0.304786
| 42,876
| 1,129
| 185
| 37.976971
| 0.806461
| 0.3039
| 0
| 0.777597
| 1
| 0
| 0.189533
| 0.059038
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037338
| false
| 0
| 0.006494
| 0
| 0.099026
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7d210c137b95926236a997f83da30f56ee797340
| 186
|
py
|
Python
|
echecs_hall/app/controller/mj_hall_controller/prop_controller.py
|
obespoir/echecs
|
e4bb8be1d360b6c568725aee4dfe4c037a855a49
|
[
"AFL-3.0"
] | 14
|
2020-03-22T14:03:51.000Z
|
2022-02-21T09:28:39.000Z
|
echecs_hall/app/controller/mj_hall_controller/prop_controller.py
|
obespoir/echecs
|
e4bb8be1d360b6c568725aee4dfe4c037a855a49
|
[
"AFL-3.0"
] | null | null | null |
echecs_hall/app/controller/mj_hall_controller/prop_controller.py
|
obespoir/echecs
|
e4bb8be1d360b6c568725aee4dfe4c037a855a49
|
[
"AFL-3.0"
] | 7
|
2020-03-22T13:57:43.000Z
|
2022-02-21T09:30:17.000Z
|
# coding=utf-8
from app.data_bridge import mj_hall_bridge
def get_all_prop_info():
"""
获取道具详情
:param:
:return:list
"""
return mj_hall_bridge.get_all_prop_info()
| 16.909091
| 45
| 0.677419
| 28
| 186
| 4.107143
| 0.678571
| 0.104348
| 0.208696
| 0.243478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006849
| 0.215054
| 186
| 11
| 45
| 16.909091
| 0.780822
| 0.22043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bc2c55227564aae2bfa22c14de09f90f79e5ea8a
| 23,789
|
py
|
Python
|
api_tests/comments/views/test_comment_report_detail.py
|
sf2ne/Playground
|
95b2d222d7ac43baca0249acbfc34e043d6a95b3
|
[
"Apache-2.0"
] | null | null | null |
api_tests/comments/views/test_comment_report_detail.py
|
sf2ne/Playground
|
95b2d222d7ac43baca0249acbfc34e043d6a95b3
|
[
"Apache-2.0"
] | 13
|
2020-03-24T15:29:41.000Z
|
2022-03-11T23:15:28.000Z
|
api_tests/comments/views/test_comment_report_detail.py
|
sf2ne/Playground
|
95b2d222d7ac43baca0249acbfc34e043d6a95b3
|
[
"Apache-2.0"
] | null | null | null |
from nose.tools import * # flake8: noqa
from datetime import datetime
from api.base.settings.defaults import API_BASE
from api_tests import utils as test_utils
from tests.base import ApiTestCase
from tests.factories import ProjectFactory, AuthUserFactory, CommentFactory
class TestReportDetailView(ApiTestCase):
def setUp(self):
super(TestReportDetailView, self).setUp()
self.user = AuthUserFactory()
self.contributor = AuthUserFactory()
self.non_contributor = AuthUserFactory()
self.payload = {
'data': {
'id': self.user._id,
'type': 'comment_reports',
'attributes': {
'category': 'spam',
'message': 'Spam is delicious.'
}
}
}
def _set_up_private_project_comment_reports(self):
self.private_project = ProjectFactory.build(is_public=False, creator=self.user)
self.private_project.add_contributor(contributor=self.contributor, save=True)
self.comment = CommentFactory.build(node=self.private_project, user=self.contributor)
self.comment.reports = {self.user._id: {
'category': 'spam',
'text': 'This is spam',
'date': datetime.utcnow(),
'retracted': False,
}}
self.comment.save()
self.private_url = '/{}comments/{}/reports/{}/'.format(API_BASE, self.comment._id, self.user._id)
def _set_up_public_project_comment_reports(self):
self.public_project = ProjectFactory.build(is_public=True, creator=self.user)
self.public_project.add_contributor(contributor=self.contributor, save=True)
self.public_comment = CommentFactory.build(node=self.public_project, user=self.contributor)
self.public_comment.reports = {self.user._id: {
'category': 'spam',
'text': 'This is spam',
'date': datetime.utcnow(),
'retracted': False,
}}
self.public_comment.save()
self.public_url = '/{}comments/{}/reports/{}/'.format(API_BASE, self.public_comment._id, self.user._id)
def test_private_node_reporting_contributor_can_view_report_detail(self):
self._set_up_private_project_comment_reports()
res = self.app.get(self.private_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['id'], self.user._id)
def test_private_node_reported_contributor_cannot_view_report_detail(self):
self._set_up_private_project_comment_reports()
res = self.app.get(self.private_url, auth=self.contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_private_node_logged_in_non_contributor_cannot_view_report_detail(self):
self._set_up_private_project_comment_reports()
res = self.app.get(self.private_url, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_private_node_logged_out_contributor_cannot_view_report_detail(self):
self._set_up_private_project_comment_reports()
res = self.app.get(self.private_url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_public_node_reporting_contributor_can_view_report_detail(self):
self._set_up_public_project_comment_reports()
res = self.app.get(self.public_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['id'], self.user._id)
def test_public_node_reported_contributor_cannot_view_report_detail(self):
self._set_up_public_project_comment_reports()
res = self.app.get(self.public_url, auth=self.contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_public_node_logged_in_non_contributor_cannot_view_other_users_report_detail(self):
self._set_up_public_project_comment_reports()
res = self.app.get(self.public_url, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_public_node_logged_out_contributor_cannot_view_report_detail(self):
self._set_up_public_project_comment_reports()
res = self.app.get(self.public_url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_public_node_logged_in_non_contributor_reporter_can_view_own_report_detail(self):
self._set_up_public_project_comment_reports()
self.public_comment.reports[self.non_contributor._id] = {
'category': 'spam',
'text': 'This is spam',
'date': datetime.utcnow(),
'retracted': False,
}
self.public_comment.save()
url = '/{}comments/{}/reports/{}/'.format(API_BASE, self.public_comment._id, self.non_contributor._id)
res = self.app.get(url, auth=self.non_contributor.auth)
assert_equal(res.status_code, 200)
def test_private_node_reporting_contributor_can_update_report_detail(self):
self._set_up_private_project_comment_reports()
res = self.app.put_json_api(self.private_url, self.payload, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['id'], self.user._id)
assert_equal(res.json['data']['attributes']['message'], self.payload['data']['attributes']['message'])
def test_private_node_reported_contributor_cannot_update_report_detail(self):
self._set_up_private_project_comment_reports()
res = self.app.put_json_api(self.private_url, self.payload, auth=self.contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_private_node_logged_in_non_contributor_cannot_update_report_detail(self):
self._set_up_private_project_comment_reports()
res = self.app.put_json_api(self.private_url, self.payload, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_private_node_logged_out_contributor_cannot_update_detail(self):
self._set_up_private_project_comment_reports()
res = self.app.put_json_api(self.private_url, self.payload, expect_errors=True)
assert_equal(res.status_code, 401)
def test_public_node_reporting_contributor_can_update_detail(self):
self._set_up_public_project_comment_reports()
res = self.app.put_json_api(self.public_url, self.payload, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['id'], self.user._id)
assert_equal(res.json['data']['attributes']['message'], self.payload['data']['attributes']['message'])
def test_public_node_reported_contributor_cannot_update_detail(self):
self._set_up_public_project_comment_reports()
res = self.app.put_json_api(self.public_url, self.payload, auth=self.contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_public_node_logged_in_non_contributor_cannot_update_other_users_report_detail(self):
self._set_up_public_project_comment_reports()
res = self.app.put_json_api(self.public_url, self.payload, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_public_node_logged_out_contributor_cannot_update_report_detail(self):
self._set_up_public_project_comment_reports()
res = self.app.put_json_api(self.public_url, self.payload, expect_errors=True)
assert_equal(res.status_code, 401)
def test_public_node_logged_in_non_contributor_reporter_can_update_own_report_detail(self):
self._set_up_public_project_comment_reports()
self.public_comment.reports[self.non_contributor._id] = {
'category': 'spam',
'text': 'This is spam',
'date': datetime.utcnow(),
'retracted': False,
}
self.public_comment.save()
url = '/{}comments/{}/reports/{}/'.format(API_BASE, self.public_comment._id, self.non_contributor._id)
payload = {
'data': {
'id': self.non_contributor._id,
'type': 'comment_reports',
'attributes': {
'category': 'spam',
'message': 'Spam is delicious.'
}
}
}
res = self.app.put_json_api(url, payload, auth=self.non_contributor.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['message'], payload['data']['attributes']['message'])
def test_private_node_reporting_contributor_can_delete_report_detail(self):
self._set_up_private_project_comment_reports()
comment = CommentFactory.build(node=self.private_project, user=self.contributor)
comment.reports = {self.user._id: {
'category': 'spam',
'text': 'This is spam',
'date': datetime.utcnow(),
'retracted': False,
}}
comment.save()
url = '/{}comments/{}/reports/{}/'.format(API_BASE, comment._id, self.user._id)
res = self.app.delete_json_api(url, auth=self.user.auth)
assert_equal(res.status_code, 204)
def test_private_node_reported_contributor_cannot_delete_report_detail(self):
self._set_up_private_project_comment_reports()
res = self.app.delete_json_api(self.private_url, auth=self.contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_private_node_logged_in_non_contributor_cannot_delete_report_detail(self):
self._set_up_private_project_comment_reports()
res = self.app.delete_json_api(self.private_url, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_private_node_logged_out_contributor_cannot_delete_detail(self):
self._set_up_private_project_comment_reports()
res = self.app.delete_json_api(self.private_url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_public_node_reporting_contributor_can_delete_detail(self):
self._set_up_public_project_comment_reports()
res = self.app.delete_json_api(self.public_url, auth=self.user.auth)
assert_equal(res.status_code, 204)
def test_public_node_reported_contributor_cannot_delete_detail(self):
self._set_up_public_project_comment_reports()
res = self.app.delete_json_api(self.public_url, auth=self.contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_public_node_logged_in_non_contributor_cannot_delete_other_users_report_detail(self):
self._set_up_public_project_comment_reports()
res = self.app.delete_json_api(self.public_url, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_public_node_logged_out_contributor_cannot_delete_report_detail(self):
self._set_up_public_project_comment_reports()
res = self.app.delete_json_api(self.public_url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_public_node_logged_in_non_contributor_reporter_can_delete_own_report_detail(self):
self._set_up_public_project_comment_reports()
self.public_comment.reports[self.non_contributor._id] = {
'category': 'spam',
'text': 'This is spam',
'date': datetime.utcnow(),
'retracted': False,
}
self.public_comment.save()
url = '/{}comments/{}/reports/{}/'.format(API_BASE, self.public_comment._id, self.non_contributor._id)
res = self.app.delete_json_api(url, auth=self.non_contributor.auth)
assert_equal(res.status_code, 204)
class TestFileCommentReportDetailView(ApiTestCase):
def setUp(self):
super(TestFileCommentReportDetailView, self).setUp()
self.user = AuthUserFactory()
self.contributor = AuthUserFactory()
self.non_contributor = AuthUserFactory()
self.payload = {
'data': {
'id': self.user._id,
'type': 'comment_reports',
'attributes': {
'category': 'spam',
'message': 'Spam is delicious.'
}
}
}
def _set_up_private_project_file_comment_reports(self):
self.private_project = ProjectFactory.build(is_public=False, creator=self.user)
self.private_project.add_contributor(contributor=self.contributor, save=True)
self.file = test_utils.create_test_file(self.private_project, self.user)
self.comment = CommentFactory.build(node=self.private_project, target=self.file.get_guid(), user=self.contributor)
self.comment.reports = {self.user._id: {
'category': 'spam',
'text': 'This is spam',
'date': datetime.utcnow(),
'retracted': False,
}}
self.comment.save()
self.private_url = '/{}comments/{}/reports/{}/'.format(API_BASE, self.comment._id, self.user._id)
def _set_up_public_project_file_comment_reports(self):
self.public_project = ProjectFactory.build(is_public=True, creator=self.user)
self.public_project.add_contributor(contributor=self.contributor, save=True)
self.public_file = test_utils.create_test_file(self.public_project, self.user)
self.public_comment = CommentFactory.build(node=self.public_project, target=self.public_file.get_guid(), user=self.contributor)
self.public_comment.reports = {self.user._id: {
'category': 'spam',
'text': 'This is spam',
'date': datetime.utcnow(),
'retracted': False,
}}
self.public_comment.save()
self.public_url = '/{}comments/{}/reports/{}/'.format(API_BASE, self.public_comment._id, self.user._id)
def test_private_node_reporting_contributor_can_view_report_detail(self):
self._set_up_private_project_file_comment_reports()
res = self.app.get(self.private_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['id'], self.user._id)
def test_private_node_reported_contributor_cannot_view_report_detail(self):
self._set_up_private_project_file_comment_reports()
res = self.app.get(self.private_url, auth=self.contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_private_node_logged_in_non_contributor_cannot_view_report_detail(self):
self._set_up_private_project_file_comment_reports()
res = self.app.get(self.private_url, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_private_node_logged_out_contributor_cannot_view_report_detail(self):
self._set_up_private_project_file_comment_reports()
res = self.app.get(self.private_url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_public_node_reporting_contributor_can_view_report_detail(self):
self._set_up_public_project_file_comment_reports()
res = self.app.get(self.public_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['id'], self.user._id)
def test_public_node_reported_contributor_cannot_view_report_detail(self):
self._set_up_public_project_file_comment_reports()
res = self.app.get(self.public_url, auth=self.contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_public_node_logged_in_non_contributor_cannot_view_other_users_report_detail(self):
self._set_up_public_project_file_comment_reports()
res = self.app.get(self.public_url, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_public_node_logged_out_contributor_cannot_view_report_detail(self):
self._set_up_public_project_file_comment_reports()
res = self.app.get(self.public_url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_public_node_logged_in_non_contributor_reporter_can_view_own_file_comment_report_detail(self):
self._set_up_public_project_file_comment_reports()
self.public_comment.reports[self.non_contributor._id] = {
'category': 'spam',
'text': 'This is spam',
'date': datetime.utcnow(),
'retracted': False,
}
self.public_comment.save()
url = '/{}comments/{}/reports/{}/'.format(API_BASE, self.public_comment._id, self.non_contributor._id)
res = self.app.get(url, auth=self.non_contributor.auth)
assert_equal(res.status_code, 200)
def test_private_node_reporting_contributor_can_update_file_comment_report_detail(self):
self._set_up_private_project_file_comment_reports()
res = self.app.put_json_api(self.private_url, self.payload, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['id'], self.user._id)
assert_equal(res.json['data']['attributes']['message'], self.payload['data']['attributes']['message'])
def test_private_node_reported_contributor_cannot_update_report_detail(self):
self._set_up_private_project_file_comment_reports()
res = self.app.put_json_api(self.private_url, self.payload, auth=self.contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_private_node_logged_in_non_contributor_cannot_update_report_detail(self):
self._set_up_private_project_file_comment_reports()
res = self.app.put_json_api(self.private_url, self.payload, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_private_node_logged_out_contributor_cannot_update_detail(self):
self._set_up_private_project_file_comment_reports()
res = self.app.put_json_api(self.private_url, self.payload, expect_errors=True)
assert_equal(res.status_code, 401)
def test_public_node_reporting_contributor_can_update_detail(self):
self._set_up_public_project_file_comment_reports()
res = self.app.put_json_api(self.public_url, self.payload, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['id'], self.user._id)
assert_equal(res.json['data']['attributes']['message'], self.payload['data']['attributes']['message'])
def test_public_node_reported_contributor_cannot_update_detail(self):
self._set_up_public_project_file_comment_reports()
res = self.app.put_json_api(self.public_url, self.payload, auth=self.contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_public_node_logged_in_non_contributor_cannot_update_other_user_report_detail(self):
self._set_up_public_project_file_comment_reports()
res = self.app.put_json_api(self.public_url, self.payload, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_public_node_logged_out_contributor_cannot_update_report_detail(self):
self._set_up_public_project_file_comment_reports()
res = self.app.put_json_api(self.public_url, self.payload, expect_errors=True)
assert_equal(res.status_code, 401)
def test_public_node_logged_in_non_contributor_reporter_can_update_own_report_detail(self):
self._set_up_public_project_file_comment_reports()
self.public_comment.reports[self.non_contributor._id] = {
'category': 'spam',
'text': 'This is spam',
'date': datetime.utcnow(),
'retracted': False,
}
self.public_comment.save()
url = '/{}comments/{}/reports/{}/'.format(API_BASE, self.public_comment._id, self.non_contributor._id)
payload = {
'data': {
'id': self.non_contributor._id,
'type': 'comment_reports',
'attributes': {
'category': 'spam',
'message': 'Spam is delicious.'
}
}
}
res = self.app.put_json_api(url, payload, auth=self.non_contributor.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['message'], payload['data']['attributes']['message'])
def test_private_node_reporting_contributor_can_delete_report_detail(self):
self._set_up_private_project_file_comment_reports()
comment = CommentFactory.build(node=self.private_project, target=self.file.get_guid(), user=self.contributor)
comment.reports = {self.user._id: {
'category': 'spam',
'text': 'This is spam',
'date': datetime.utcnow(),
'retracted': False,
}}
comment.save()
url = '/{}comments/{}/reports/{}/'.format(API_BASE, comment._id, self.user._id)
res = self.app.delete_json_api(url, auth=self.user.auth)
assert_equal(res.status_code, 204)
def test_private_node_reported_contributor_cannot_delete_report_detail(self):
self._set_up_private_project_file_comment_reports()
res = self.app.delete_json_api(self.private_url, auth=self.contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_private_node_logged_in_non_contributor_cannot_delete_report_detail(self):
self._set_up_private_project_file_comment_reports()
res = self.app.delete_json_api(self.private_url, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_private_node_logged_out_contributor_cannot_delete_detail(self):
self._set_up_private_project_file_comment_reports()
res = self.app.delete_json_api(self.private_url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_public_node_reporting_contributor_can_delete_detail(self):
self._set_up_public_project_file_comment_reports()
res = self.app.delete_json_api(self.public_url, auth=self.user.auth)
assert_equal(res.status_code, 204)
def test_public_node_reported_contributor_cannot_delete_detail(self):
self._set_up_public_project_file_comment_reports()
res = self.app.delete_json_api(self.public_url, auth=self.contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_public_node_logged_in_non_contributor_cannot_delete_other_users_report_detail(self):
self._set_up_public_project_file_comment_reports()
res = self.app.delete_json_api(self.public_url, auth=self.non_contributor.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_public_node_logged_out_contributor_cannot_delete_report_detail(self):
self._set_up_public_project_file_comment_reports()
res = self.app.delete_json_api(self.public_url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_public_node_logged_in_non_contributor_reporter_can_delete_own_report_detail(self):
self._set_up_public_project_file_comment_reports()
self.public_comment.reports[self.non_contributor._id] = {
'category': 'spam',
'text': 'This is spam',
'date': datetime.utcnow(),
'retracted': False,
}
self.public_comment.save()
url = '/{}comments/{}/reports/{}/'.format(API_BASE, self.public_comment._id, self.non_contributor._id)
res = self.app.delete_json_api(url, auth=self.non_contributor.auth)
assert_equal(res.status_code, 204)
| 50.614894
| 135
| 0.70747
| 3,084
| 23,789
| 5.030804
| 0.031777
| 0.066774
| 0.06136
| 0.059169
| 0.974154
| 0.970545
| 0.970545
| 0.965582
| 0.963648
| 0.956945
| 0
| 0.008429
| 0.187145
| 23,789
| 469
| 136
| 50.722815
| 0.793918
| 0.000504
| 0
| 0.886139
| 0
| 0
| 0.058549
| 0.013123
| 0
| 0
| 0
| 0
| 0.168317
| 1
| 0.148515
| false
| 0
| 0.014851
| 0
| 0.168317
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cb23b05a06fc023fd8a7b125576bdf7abfaa364f
| 28,700
|
py
|
Python
|
backend/api/tests/ReviewTestCase.py
|
kukiamarilla/polijira
|
510dbc1473db973ac71fc68fa5a9b758b90a780b
|
[
"MIT"
] | 1
|
2022-03-02T02:28:49.000Z
|
2022-03-02T02:28:49.000Z
|
backend/api/tests/ReviewTestCase.py
|
kukiamarilla/polijira
|
510dbc1473db973ac71fc68fa5a9b758b90a780b
|
[
"MIT"
] | 22
|
2021-09-01T17:44:25.000Z
|
2021-10-07T19:39:09.000Z
|
backend/api/tests/ReviewTestCase.py
|
kukiamarilla/polijira
|
510dbc1473db973ac71fc68fa5a9b758b90a780b
|
[
"MIT"
] | null | null | null |
from django.test import TestCase, Client
from backend.api.models import UserStory, Review, PermisoProyecto, SprintBacklog
class ReviewTestCase(TestCase):
"""
ReviewTestCase Prueba las funcionalidades de Review
"""
fixtures = [
"backend/api/fixtures/testing/auth.json",
"backend/api/fixtures/testing/usuarios.json",
"backend/api/fixtures/testing/permisos.json",
"backend/api/fixtures/testing/roles.json",
"backend/api/fixtures/testing/permisosProyecto.json",
"backend/api/fixtures/testing/proyectos.json",
"backend/api/fixtures/testing/plantillas.json",
"backend/api/fixtures/testing/rolesProyecto.json",
"backend/api/fixtures/testing/miembros.json",
"backend/api/fixtures/testing/horarios.json",
"backend/api/fixtures/testing/user-stories.json",
"backend/api/fixtures/testing/product-backlogs.json",
"backend/api/fixtures/testing/registro-user-stories.json",
"backend/api/fixtures/testing/sprints.json",
"backend/api/fixtures/testing/sprintbacklogs.json",
"backend/api/fixtures/testing/miembrosprints.json",
"backend/api/fixtures/testing/reviews.json"
]
def setUp(self):
"""
setUp Configura el testcase
"""
self.client = Client()
def test_obtener_review(self):
"""
test_obtener_review
Prueba obtener un review de user story especificado
"""
print("\nProbando obtener un review de user story especificado.")
self.client.login(username="testing", password="polijira2021")
review = Review.objects.get(pk=1)
response = self.client.get("/api/reviews/" + str(review.id) + "/")
body = response.json()
self.assertEquals(response.status_code, 200)
self.assertEqual(body["id"], review.id)
self.assertEqual(body["user_story"], review.user_story.pk)
self.assertEqual(body["observacion"], review.observacion)
def test_obtener_review_sin_permiso(self):
"""
test_obtener_review_sin_permiso
Prueba obtener un review de user story especificado sin permiso
"""
print("\nProbando obtener un review de user story especificado sin permiso.")
self.client.login(username="testing", password="polijira2021")
PermisoProyecto.objects.get(codigo="ver_user_stories").delete()
review = Review.objects.get(pk=1)
response = self.client.get("/api/reviews/" + str(review.id) + "/")
body = response.json()
self.assertEquals(response.status_code, 403)
self.assertEqual(body["message"], "No tiene permiso para realizar esta acción")
self.assertEqual(body["permission_required"], ["ver_user_stories"])
self.assertEqual(body["error"], "forbidden")
def test_obtener_review_con_review_inexistente(self):
"""
test_obtener_review_con_review_inexistente
Prueba obtener un review de user story especificado con review inexistente
"""
print("\nProbando obtener un review de user story especificado con review inexistente.")
self.client.login(username="testing", password="polijira2021")
response = self.client.get("/api/reviews/99/")
body = response.json()
self.assertEquals(response.status_code, 404)
self.assertEqual(body["message"], "No existe review del User Story")
self.assertEqual(body["error"], "not_found")
def test_obtener_review_sin_ser_miembro_del_proyecto(self):
"""
test_obtener_review_sin_ser_miembro_del_proyecto
Prueba obtener un review de user story especificado sin ser miembro del proyecto
"""
print("\nProbando obtener un review de user story especificado sin ser miembro del proyecto.")
self.client.login(username="user_test", password="polijira2021")
review = Review.objects.get(pk=1)
response = self.client.get("/api/reviews/" + str(review.id) + "/")
body = response.json()
self.assertEquals(response.status_code, 403)
self.assertEqual(body["message"], "Usted no es miembro de este Proyecto")
self.assertEqual(body["error"], "forbidden")
def test_crear_review(self):
"""
test_crear_review
Prueba crear un review de user story
"""
print("\nProbando crear un review de User Story.")
self.client.login(username="testing", password="polijira2021")
request_data = {
"user_story": 2,
"observacion": "Corregir modelo"
}
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.post("/api/reviews/", request_data, content_type="application/json")
body = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(body["user_story"], request_data["user_story"])
self.assertEqual(body["observacion"], request_data["observacion"])
self.assertEqual(body["autor"]["user"]["username"], "testing")
def test_crear_review_sin_permiso_ver_user_stories(self):
"""
test_crear_review_sin_permiso_ver_user_stories
Prueba crear un review de user story sin permiso ver user stories
"""
print("\nProbando crear un review de User Story sin permiso ver user stories.")
self.client.login(username="testing", password="polijira2021")
PermisoProyecto.objects.get(codigo="ver_user_stories").delete()
request_data = {
"user_story": 2,
"observacion": "Corregir modelo"
}
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.post("/api/reviews/", request_data, content_type="application/json")
body = response.json()
self.assertEqual(response.status_code, 403)
self.assertEqual(body["message"], "No tiene permiso para realizar esta acción")
self.assertEqual(body["permission_required"], ["ver_user_stories", "crear_reviews"])
self.assertEqual(body["error"], "forbidden")
def test_crear_review_sin_permiso_crear_reviews(self):
"""
test_crear_review_sin_permiso_crear_reviews
Prueba crear un review de user story sin permiso crear reviews
"""
print("\nProbando crear un review de User Story sin permiso crear reviews.")
self.client.login(username="testing", password="polijira2021")
PermisoProyecto.objects.get(codigo="crear_reviews").delete()
request_data = {
"user_story": 2,
"observacion": "Corregir modelo"
}
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.post("/api/reviews/", request_data, content_type="application/json")
body = response.json()
self.assertEqual(response.status_code, 403)
self.assertEqual(body["message"], "No tiene permiso para realizar esta acción")
self.assertEqual(body["permission_required"], ["ver_user_stories", "crear_reviews"])
self.assertEqual(body["error"], "forbidden")
def test_crear_review_con_user_story_no_pendiente(self):
"""
test_crear_review_con_user_story_no_pendiente
Prueba crear un review de user story con estado no pendiente
"""
print("\nProbando crear un review de User Story con estado no pendiente.")
self.client.login(username="testing", password="polijira2021")
request_data = {
"user_story": 2,
"observacion": "Corregir modelo"
}
user_story = UserStory.objects.get(pk=2)
sprint_backlog = SprintBacklog.objects.get(user_story=user_story)
sprint_backlog.sprint.activar()
user_story.estado = "E"
user_story.save()
response = self.client.post("/api/reviews/", request_data, content_type="application/json")
body = response.json()
self.assertEqual(response.status_code, 403)
self.assertEqual(body["message"], "No se puede crear review en el estado actual del User Story")
self.assertEqual(body["error"], "forbidden")
def test_crear_review_sin_sprint_activo(self):
"""
test_crear_review_sin_sprint_activo
Prueba crear un review de user story sin sprint en estado activo
"""
print("\nProbando crear un review de User Story sin sprint en estado activo.")
self.client.login(username="testing", password="polijira2021")
request_data = {
"user_story": 2,
"observacion": "Corregir modelo"
}
response = self.client.post("/api/reviews/", request_data, content_type="application/json")
body = response.json()
self.assertEqual(response.status_code, 403)
self.assertEqual(body["message"], "No se puede crear review si el user story no está en un sprint activo")
self.assertEqual(body["error"], "forbidden")
def test_crear_review_sin_ser_miembro(self):
"""
test_crear_review_sin_ser_miembro
Prueba crear un review de user story sin ser miembro
"""
print("\nProbando crear un review de User Story sin ser miembro.")
self.client.login(username="user_test", password="polijira2021")
request_data = {
"user_story": 2,
"observacion": "Corregir modelo"
}
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.post("/api/reviews/", request_data, content_type="application/json")
body = response.json()
self.assertEqual(response.status_code, 403)
self.assertEqual(body["message"], "Usted no es miembro de este Proyecto")
self.assertEqual(body["error"], "forbidden")
def test_crear_review_con_user_story_sin_especificar(self):
"""
test_crear_review_con_user_story_sin_especificar
Prueba crear un review de User Story con user story sin especificar
"""
print("\nProbando crear un review de User Story con user story sin especificar.")
self.client.login(username="testing", password="polijira2021")
request_data = {
"observacion": "Corregir modelo"
}
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.post("/api/reviews/", request_data, content_type="application/json")
body = response.json()
self.assertEqual(response.status_code, 422)
self.assertEqual(body["message"], "Error de validación")
self.assertEqual(body["errors"]["user_story"], ["No especificaste el user story"])
def test_crear_review_con_observacion_sin_especificar(self):
"""
test_crear_review_con_observacion_sin_especificar
Prueba crear un review de User Story con observación sin especificar
"""
print("\nProbando crear un review de User Story con observación sin especificar.")
self.client.login(username="testing", password="polijira2021")
request_data = {
"user_story": 2
}
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.post("/api/reviews/", request_data, content_type="application/json")
body = response.json()
self.assertEqual(response.status_code, 422)
self.assertEqual(body["message"], "Error de validación")
self.assertEqual(body["errors"]["observacion"], ["No especificaste la observacion"])
def test_crear_review_con_user_story_inexistente(self):
"""
test_crear_review_con_user_story_inexistente
Prueba crear un review de User Story con user story inexistente
"""
print("\nProbando crear un review de User Story con user story inexistente.")
self.client.login(username="testing", password="polijira2021")
request_data = {
"user_story": 99,
"observacion": "Corregir modelo"
}
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.post("/api/reviews/", request_data, content_type="application/json")
body = response.json()
self.assertEqual(response.status_code, 422)
self.assertEqual(body["message"], "Error de validación")
self.assertEqual(body["errors"]["user_story"], ["No se encontro el user story en la base de datos"])
def test_modificar_review(self):
"""
test_modificar_review
Prueba modificar un review de user story
"""
print("\nProbando modificar un review de User Story.")
self.client.login(username="testing", password="polijira2021")
request_data = {
"observacion": "Volver a estimar horas"
}
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.put("/api/reviews/1/", request_data, content_type="application/json")
body = response.json()
review = Review.objects.get(pk=1)
self.assertEqual(response.status_code, 200)
self.assertEqual(body["user_story"], review.user_story.pk)
self.assertEqual(review.observacion, request_data["observacion"])
self.assertEqual(body["autor"]["user"]["username"], "testing")
def test_modificar_review_sin_ser_autor(self):
"""
test_modificar_review_sin_ser_autor
Prueba modificar un review de User Story sin ser autor
"""
print("\nProbando modificar un review de User Story sin ser autor.")
self.client.login(username="user_testing", password="polijira2021")
request_data = {
"observacion": "Volver a estimar horas"
}
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.put("/api/reviews/1/", request_data, content_type="application/json")
body = response.json()
review = Review.objects.get(pk=1)
self.assertEqual(response.status_code, 403)
self.assertNotEqual(review.observacion, request_data["observacion"])
self.assertEqual(body["message"], "Usted no el autor de este review")
self.assertEqual(body["error"], "forbidden")
def test_modificar_review_sin_permiso_modificar_reviews(self):
"""
test_modificar_review_sin_permiso_modificar_reviews
Prueba modificar un review de user story sin permiso modificar reviews
"""
print("\nProbando modificar un review de User Story sin permiso modificar reviews.")
self.client.login(username="testing", password="polijira2021")
PermisoProyecto.objects.get(codigo="modificar_reviews").delete()
request_data = {
"observacion": "Volver a estimar horas"
}
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.put("/api/reviews/1/", request_data, content_type="application/json")
body = response.json()
review = Review.objects.get(pk=1)
self.assertEqual(response.status_code, 403)
self.assertNotEqual(review.observacion, request_data["observacion"])
self.assertEqual(body["message"], "No tiene permiso para realizar esta acción")
self.assertEqual(body["permission_required"], ["ver_user_stories", "modificar_reviews"])
self.assertEqual(body["error"], "forbidden")
def test_modificar_review_sin_permiso_ver_user_stories(self):
"""
test_modificar_review_sin_permiso_ver_user_stories
Prueba modificar un review de user story sin permiso ver user stories
"""
print("\nProbando modificar un review de User Story sin permiso ver user stories.")
self.client.login(username="testing", password="polijira2021")
PermisoProyecto.objects.get(codigo="ver_user_stories").delete()
request_data = {
"user_story": 2,
"observacion": "Volver a estimar horas"
}
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.put("/api/reviews/1/", request_data, content_type="application/json")
body = response.json()
review = Review.objects.get(pk=1)
self.assertEqual(response.status_code, 403)
self.assertNotEqual(review.observacion, request_data["observacion"])
self.assertEqual(body["message"], "No tiene permiso para realizar esta acción")
self.assertEqual(body["permission_required"], ["ver_user_stories", "modificar_reviews"])
self.assertEqual(body["error"], "forbidden")
def test_modificar_review_con_user_story_no_pendiente(self):
"""
test_modificar_review_con_user_story_no_pendiente
Prueba modificar un review de user story con estado no pendiente
"""
print("\nProbando modificar un review de User Story con estado no pendiente.")
self.client.login(username="testing", password="polijira2021")
request_data = {
"observacion": "Corregir modelo"
}
user_story = UserStory.objects.get(pk=2)
sprint_backlog = SprintBacklog.objects.get(user_story=user_story)
sprint_backlog.sprint.activar()
user_story.estado = "E"
user_story.save()
response = self.client.put("/api/reviews/1/", request_data, content_type="application/json")
body = response.json()
self.assertEqual(response.status_code, 403)
self.assertEqual(body["message"], "No se puede modificar review en el estado actual del User Story")
self.assertEqual(body["error"], "forbidden")
def test_modificar_review_sin_sprint_activo(self):
"""
test_modificar_review_sin_sprint_activo
Prueba modificar un review de user story sin sprint en estado activo
"""
print("\nProbando modificar un review de User Story sin sprint en estado activo.")
self.client.login(username="testing", password="polijira2021")
request_data = {
"observacion": "Volver a estimar horas"
}
response = self.client.put("/api/reviews/1/", request_data, content_type="application/json")
body = response.json()
review = Review.objects.get(pk=1)
self.assertEqual(response.status_code, 403)
self.assertNotEqual(review.observacion, request_data["observacion"])
self.assertEqual(body["message"], "No se puede modificar review si el user story no está en un sprint activo")
self.assertEqual(body["error"], "forbidden")
def test_modificar_review_con_observacion_sin_especificar(self):
"""
test_modificar_review_con_observacion_sin_especificar
Prueba modificar un review de User Story con observación sin especificar
"""
print("\nProbando modificar un review de User Story con observación sin especificar.")
self.client.login(username="testing", password="polijira2021")
request_data = {}
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.put("/api/reviews/1/", request_data, content_type="application/json")
body = response.json()
self.assertEqual(response.status_code, 422)
self.assertEqual(body["message"], "Error de validación")
self.assertEqual(body["errors"]["observacion"], ["No especificaste la observacion"])
def test_modificar_review_con_review_inexistente(self):
"""
test_modificar_review_con_review_inexistente
Prueba modificar un review de user story con review inexistente
"""
print("\nProbando modificar un review de User Story con review inexistente.")
self.client.login(username="testing", password="polijira2021")
request_data = {
"observacion": "Volver a estimar horas"
}
response = self.client.put("/api/reviews/99/", request_data, content_type="application/json")
body = response.json()
self.assertEqual(response.status_code, 404)
self.assertEqual(body["message"], "No existe el review especificado")
self.assertEqual(body["error"], "not_found")
def test_modificar_review_sin_ser_miembro(self):
"""
test_modificar_review_sin_ser_miembro
Prueba modificar un review de user story sin ser miembro
"""
print("\nProbando modificar un review de User Story sin ser miembro.")
self.client.login(username="user_test", password="polijira2021")
request_data = {
"observacion": "Corregir modelo"
}
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.put("/api/reviews/1/", request_data, content_type="application/json")
body = response.json()
self.assertEqual(response.status_code, 403)
self.assertEqual(body["message"], "Usted no es miembro de este Proyecto")
self.assertEqual(body["error"], "forbidden")
def test_eliminar_review(self):
"""
test_eliminar_review
Prueba eliminar un review de user story
"""
print("\nProbando eliminar un review de User Story.")
self.client.login(username="testing", password="polijira2021")
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.delete("/api/reviews/1/")
body = response.json()
review = Review.objects.filter(pk=1)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(review), 0)
self.assertEqual(body["message"], "Review eliminado")
def test_eliminar_review_sin_ser_autor(self):
"""
test_eliminar_review_sin_ser_autor
Prueba eliminar un review de User Story sin ser autor
"""
print("\nProbando eliminar un review de User Story sin ser autor.")
self.client.login(username="user_testing", password="polijira2021")
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.delete("/api/reviews/1/")
body = response.json()
review = Review.objects.filter(pk=1)
self.assertEqual(response.status_code, 403)
self.assertNotEqual(len(review), 0)
self.assertEqual(body["message"], "Usted no el autor de este review")
self.assertEqual(body["error"], "forbidden")
def test_eliminar_review_sin_permiso_eliminar_reviews(self):
"""
test_eliminar_review_sin_permiso_eliminar_reviews
Prueba eliminar un review de user story sin permiso eliminar reviews
"""
print("\nProbando eliminar un review de User Story sin permiso eliminar reviews.")
self.client.login(username="testing", password="polijira2021")
PermisoProyecto.objects.get(codigo="eliminar_reviews").delete()
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.delete("/api/reviews/1/")
body = response.json()
review = Review.objects.filter(pk=1)
self.assertEqual(response.status_code, 403)
self.assertNotEqual(len(review), 0)
self.assertEqual(body["message"], "No tiene permiso para realizar esta acción")
self.assertEqual(body["permission_required"], ["ver_user_stories", "eliminar_reviews"])
self.assertEqual(body["error"], "forbidden")
def test_eliminar_review_sin_permiso_ver_user_stories(self):
"""
test_eliminar_review_sin_permiso_ver_user_stories
Prueba eliminar un review de user story sin permiso ver user stories
"""
print("\nProbando eliminar un review de User Story sin permiso ver user stories.")
self.client.login(username="testing", password="polijira2021")
PermisoProyecto.objects.get(codigo="ver_user_stories").delete()
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.delete("/api/reviews/1/")
body = response.json()
review = Review.objects.filter(pk=1)
self.assertEqual(response.status_code, 403)
self.assertNotEqual(len(review), 0)
self.assertEqual(body["message"], "No tiene permiso para realizar esta acción")
self.assertEqual(body["permission_required"], ["ver_user_stories", "eliminar_reviews"])
self.assertEqual(body["error"], "forbidden")
def test_eliminar_review_con_user_story_no_pendiente(self):
"""
test_eliminar_review_con_user_story_no_pendiente
Prueba eliminar un review de user story con estado no pendiente
"""
print("\nProbando eliminar un review de User Story con estado no pendiente.")
self.client.login(username="testing", password="polijira2021")
user_story = UserStory.objects.get(pk=2)
sprint_backlog = SprintBacklog.objects.get(user_story=user_story)
sprint_backlog.sprint.activar()
user_story.estado = "E"
user_story.save()
response = self.client.delete("/api/reviews/1/")
body = response.json()
review = Review.objects.filter(pk=1)
self.assertEqual(response.status_code, 403)
self.assertNotEqual(len(review), 0)
self.assertEqual(body["message"], "No se puede eliminar review en el estado actual del User Story")
self.assertEqual(body["error"], "forbidden")
def test_eliminar_review_sin_sprint_activo(self):
"""
test_eliminar_review_sin_sprint_activo
Prueba eliminar un review de user story sin sprint en estado activo
"""
print("\nProbando eliminar un review de User Story sin sprint en estado activo.")
self.client.login(username="testing", password="polijira2021")
response = self.client.delete("/api/reviews/1/")
body = response.json()
review = Review.objects.filter(pk=1)
self.assertEqual(response.status_code, 403)
self.assertNotEqual(len(review), 0)
self.assertEqual(body["message"], "No se puede eliminar review si el user story no está en un sprint activo")
self.assertEqual(body["error"], "forbidden")
def test_eliminar_review_con_review_inexistente(self):
"""
test_eliminar_review_con_review_inexistente
Prueba eliminar un review de user story con review inexistente
"""
print("\nProbando eliminar un review de User Story con review inexistente.")
self.client.login(username="testing", password="polijira2021")
response = self.client.delete("/api/reviews/99/")
body = response.json()
review = Review.objects.filter(pk=1)
self.assertEqual(response.status_code, 404)
self.assertNotEqual(len(review), 0)
self.assertEqual(body["message"], "No existe el review especificado")
self.assertEqual(body["error"], "not_found")
def test_eliminar_review_sin_ser_miembro(self):
"""
test_eliminar_review_sin_ser_miembro
Prueba eliminar un review de user story sin ser miembro
"""
print("\nProbando eliminar un review de User Story sin ser miembro.")
self.client.login(username="user_test", password="polijira2021")
sprint_backlog = SprintBacklog.objects.get(user_story=UserStory.objects.get(pk=2))
sprint_backlog.sprint.activar()
response = self.client.delete("/api/reviews/1/")
body = response.json()
review = Review.objects.filter(pk=1)
self.assertEqual(response.status_code, 403)
self.assertNotEqual(len(review), 0)
self.assertEqual(body["message"], "Usted no es miembro de este Proyecto")
self.assertEqual(body["error"], "forbidden")
| 49.482759
| 118
| 0.67338
| 3,355
| 28,700
| 5.597019
| 0.044113
| 0.063745
| 0.068804
| 0.044733
| 0.960965
| 0.934445
| 0.898125
| 0.85941
| 0.817659
| 0.80115
| 0
| 0.012701
| 0.215401
| 28,700
| 579
| 119
| 49.568221
| 0.82121
| 0.110488
| 0
| 0.692124
| 0
| 0
| 0.285749
| 0.030863
| 0
| 0
| 0
| 0
| 0.264916
| 1
| 0.073986
| false
| 0.071599
| 0.004773
| 0
| 0.083532
| 0.195704
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
cb7acba52c515fc64dc09f21e4b88a4df107cc69
| 90
|
py
|
Python
|
calc.py
|
eeti3084/eeti3084.github.io
|
0ec311999f9872e0ac08d5c14c8b9e2b60622b87
|
[
"MIT"
] | null | null | null |
calc.py
|
eeti3084/eeti3084.github.io
|
0ec311999f9872e0ac08d5c14c8b9e2b60622b87
|
[
"MIT"
] | null | null | null |
calc.py
|
eeti3084/eeti3084.github.io
|
0ec311999f9872e0ac08d5c14c8b9e2b60622b87
|
[
"MIT"
] | 1
|
2018-02-10T23:11:49.000Z
|
2018-02-10T23:11:49.000Z
|
def add(x,y):
return x+y
def sub(x,y):
return x-y
def divide(x,y):
return x/y
| 12.857143
| 16
| 0.566667
| 21
| 90
| 2.428571
| 0.333333
| 0.235294
| 0.470588
| 0.529412
| 0.705882
| 0.509804
| 0
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 90
| 6
| 17
| 15
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
cb86f3c089f41d9c7732e9883c3c8248ee4e29b0
| 530
|
py
|
Python
|
Projetos/desafios/desa109/__init__.py
|
LucasDeAndradeMarin/Marin-python-training
|
9537f6ff26fa1f93e20aee306f35a71765d7fdd1
|
[
"MIT"
] | null | null | null |
Projetos/desafios/desa109/__init__.py
|
LucasDeAndradeMarin/Marin-python-training
|
9537f6ff26fa1f93e20aee306f35a71765d7fdd1
|
[
"MIT"
] | null | null | null |
Projetos/desafios/desa109/__init__.py
|
LucasDeAndradeMarin/Marin-python-training
|
9537f6ff26fa1f93e20aee306f35a71765d7fdd1
|
[
"MIT"
] | null | null | null |
def metade(p=0, formato=False):
resp = p / 2
return resp if formato is False else moeda(resp)
def dobro(p=0, formato=False):
resp = p * 2
return resp if formato is False else moeda(resp)
def aumento(p=0, taxa=0, formato=False):
resp = p + (p * taxa / 100)
return resp if formato is False else moeda(resp)
def moeda(p=0, m='R$'):
return f'{m}{p:.2f}'.replace('.', ',')
def diminuir(p=0, taxa=0, formato=False):
resp = p - (p * taxa / 100)
return resp if formato is False else moeda(resp)
| 24.090909
| 52
| 0.618868
| 92
| 530
| 3.565217
| 0.25
| 0.030488
| 0.158537
| 0.207317
| 0.820122
| 0.820122
| 0.820122
| 0.820122
| 0.820122
| 0.820122
| 0
| 0.039312
| 0.232075
| 530
| 22
| 53
| 24.090909
| 0.766585
| 0
| 0
| 0.285714
| 0
| 0
| 0.026365
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.357143
| false
| 0
| 0
| 0.071429
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
cba808cab48f4cbf7ea05a383b1bee5adfd7ef34
| 189
|
py
|
Python
|
hTools2.roboFontExt/lib/Scripts/selected glyphs/layers/copy to layer.py
|
frankrolf/hTools2_extension
|
9d73b8640c85209853a72f8d4b167768de5e0d60
|
[
"BSD-3-Clause"
] | 2
|
2019-12-18T16:12:07.000Z
|
2019-12-21T01:19:23.000Z
|
hTools2.roboFontExt/lib/Scripts/selected glyphs/layers/copy to layer.py
|
frankrolf/hTools2_extension
|
9d73b8640c85209853a72f8d4b167768de5e0d60
|
[
"BSD-3-Clause"
] | null | null | null |
hTools2.roboFontExt/lib/Scripts/selected glyphs/layers/copy to layer.py
|
frankrolf/hTools2_extension
|
9d73b8640c85209853a72f8d4b167768de5e0d60
|
[
"BSD-3-Clause"
] | null | null | null |
# [h] copy to layer
import hTools2.dialogs.glyphs.layers_copy
import importlib
importlib.reload(hTools2.dialogs.glyphs.layers_copy)
hTools2.dialogs.glyphs.layers_copy.copyToLayerDialog()
| 23.625
| 54
| 0.835979
| 25
| 189
| 6.2
| 0.48
| 0.270968
| 0.387097
| 0.503226
| 0.580645
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017045
| 0.068783
| 189
| 7
| 55
| 27
| 0.863636
| 0.089947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
3825eab466c037d8ca2b21a5c3d1ef764388ee55
| 8,428
|
py
|
Python
|
test/compliance_tool/test_compliance_check_aasx.py
|
eclipse-basyx/basyx-sdk-python
|
1249f49803a6ef5e594bb61410ad1c7939c2bdb7
|
[
"MIT"
] | 4
|
2022-01-07T01:30:49.000Z
|
2022-02-21T07:58:14.000Z
|
test/compliance_tool/test_compliance_check_aasx.py
|
eclipse-basyx/basyx-sdk-python
|
1249f49803a6ef5e594bb61410ad1c7939c2bdb7
|
[
"MIT"
] | 5
|
2022-02-22T15:24:22.000Z
|
2022-03-28T11:42:28.000Z
|
test/compliance_tool/test_compliance_check_aasx.py
|
eclipse-basyx/basyx-sdk-python
|
1249f49803a6ef5e594bb61410ad1c7939c2bdb7
|
[
"MIT"
] | 2
|
2021-11-15T10:24:02.000Z
|
2022-03-17T14:44:39.000Z
|
# Copyright (c) 2020 the Eclipse BaSyx Authors
#
# This program and the accompanying materials are made available under the terms of the MIT License, available in
# the LICENSE file of this project.
#
# SPDX-License-Identifier: MIT
import os
import unittest
import sys
from basyx.aas.compliance_tool import compliance_check_aasx as compliance_tool
from basyx.aas.compliance_tool.state_manager import ComplianceToolStateManager, Status
class ComplianceToolAASXTest(unittest.TestCase):
def test_check_deserialization(self) -> None:
manager = ComplianceToolStateManager()
script_dir = os.path.dirname(__file__)
file_path_1 = os.path.join(script_dir, 'files/test_not_found.aasx')
compliance_tool.check_deserialization(file_path_1, manager)
self.assertEqual(2, len(manager.steps))
self.assertEqual(Status.FAILED, manager.steps[0].status)
self.assertIn("is not a valid ECMA376-2 (OPC) file", manager.format_step(0, verbose_level=1))
self.assertEqual(Status.NOT_EXECUTED, manager.steps[1].status)
# Todo add more tests for checking wrong aasx files
manager.steps = []
file_path_5 = os.path.join(script_dir, 'files/test_demo_full_example.aasx')
compliance_tool.check_deserialization(file_path_5, manager)
self.assertEqual(2, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
def test_check_aas_example(self) -> None:
manager = ComplianceToolStateManager()
script_dir = os.path.dirname(__file__)
file_path_2 = os.path.join(script_dir, 'files/test_demo_full_example.aasx')
compliance_tool.check_aas_example(file_path_2, manager)
self.assertEqual(4, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
self.assertEqual(Status.SUCCESS, manager.steps[3].status)
manager.steps = []
file_path_3 = os.path.join(script_dir, 'files/test_demo_full_example2.aasx')
compliance_tool.check_aas_example(file_path_3, manager)
self.assertEqual(4, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
self.assertEqual(Status.SUCCESS, manager.steps[3].status)
manager.steps = []
file_path_4 = os.path.join(script_dir, 'files/test_demo_full_example_wrong_attribute.aasx')
compliance_tool.check_aas_example(file_path_4, manager)
self.assertEqual(4, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.FAILED, manager.steps[2].status)
self.assertIn('Attribute id_short of AssetAdministrationShell[Identifier(IRI=https://acplt.org/'
'Test_AssetAdministrationShell)] must be == TestAssetAdministrationShell',
manager.format_step(2, verbose_level=1))
self.assertEqual(Status.NOT_EXECUTED, manager.steps[3].status)
def test_check_aasx_files_equivalence(self) -> None:
manager = ComplianceToolStateManager()
script_dir = os.path.dirname(__file__)
file_path_1 = os.path.join(script_dir, 'files/test_demo_full_example.aasx')
file_path_2 = os.path.join(script_dir, 'files/test_empty.aasx')
compliance_tool.check_aasx_files_equivalence(file_path_1, file_path_2, manager)
self.assertEqual(6, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
self.assertEqual(Status.SUCCESS, manager.steps[3].status)
self.assertEqual(Status.FAILED, manager.steps[4].status)
self.assertEqual(Status.NOT_EXECUTED, manager.steps[5].status)
manager.steps = []
compliance_tool.check_aasx_files_equivalence(file_path_2, file_path_1, manager)
self.assertEqual(6, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
self.assertEqual(Status.SUCCESS, manager.steps[3].status)
self.assertEqual(Status.FAILED, manager.steps[4].status)
self.assertEqual(Status.NOT_EXECUTED, manager.steps[5].status)
manager.steps = []
file_path_3 = os.path.join(script_dir, 'files/test_demo_full_example.aasx')
file_path_4 = os.path.join(script_dir, 'files/test_demo_full_example.aasx')
compliance_tool.check_aasx_files_equivalence(file_path_3, file_path_4, manager)
self.assertEqual(6, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
self.assertEqual(Status.SUCCESS, manager.steps[3].status)
self.assertEqual(Status.SUCCESS, manager.steps[4].status)
self.assertEqual(Status.SUCCESS, manager.steps[5].status)
manager.steps = []
file_path_3 = os.path.join(script_dir, 'files/test_demo_full_example.aasx')
file_path_4 = os.path.join(script_dir, 'files/test_demo_full_example_wrong_attribute.aasx')
compliance_tool.check_aasx_files_equivalence(file_path_3, file_path_4, manager)
self.assertEqual(6, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
self.assertEqual(Status.SUCCESS, manager.steps[3].status)
self.assertEqual(Status.FAILED, manager.steps[4].status)
self.assertIn('Attribute id_short of AssetAdministrationShell'
'[Identifier(IRI=https://acplt.org/Test_AssetAdministrationShell)] must be ==',
manager.format_step(4, verbose_level=1))
self.assertEqual(Status.FAILED, manager.steps[4].status)
manager.steps = []
compliance_tool.check_aasx_files_equivalence(file_path_4, file_path_3, manager)
self.assertEqual(6, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
self.assertEqual(Status.SUCCESS, manager.steps[3].status)
self.assertEqual(Status.FAILED, manager.steps[4].status)
self.assertIn('Attribute id_short of AssetAdministrationShell'
'[Identifier(IRI=https://acplt.org/Test_AssetAdministrationShell)] must be ==',
manager.format_step(4, verbose_level=1))
self.assertEqual(Status.NOT_EXECUTED, manager.steps[5].status)
@unittest.skipIf(sys.version_info < (3, 7), "The XML schema check fails for Python <= 3.6")
def test_check_schema(self):
manager = ComplianceToolStateManager()
script_dir = os.path.dirname(__file__)
file_path_2 = os.path.join(script_dir, 'files/test_demo_full_example.aasx')
compliance_tool.check_schema(file_path_2, manager)
self.assertEqual(10, len(manager.steps))
for i in range(10):
self.assertEqual(Status.SUCCESS, manager.steps[i].status)
manager.steps = []
file_path_3 = os.path.join(script_dir, 'files/test_demo_full_example2.aasx')
compliance_tool.check_schema(file_path_3, manager)
self.assertEqual(4, len(manager.steps))
for i in range(4):
self.assertEqual(Status.SUCCESS, manager.steps[i].status)
manager.steps = []
file_path_4 = os.path.join(script_dir, 'files/test_demo_full_example_wrong_attribute.aasx')
compliance_tool.check_schema(file_path_4, manager)
self.assertEqual(10, len(manager.steps))
for i in range(10):
self.assertEqual(Status.SUCCESS, manager.steps[i].status)
| 53.341772
| 113
| 0.708472
| 1,076
| 8,428
| 5.345725
| 0.105948
| 0.148122
| 0.178894
| 0.180111
| 0.877782
| 0.866655
| 0.857962
| 0.825626
| 0.814325
| 0.804242
| 0
| 0.017334
| 0.178571
| 8,428
| 157
| 114
| 53.681529
| 0.81352
| 0.031917
| 0
| 0.707692
| 0
| 0
| 0.118513
| 0.073488
| 0
| 0
| 0
| 0.006369
| 0.507692
| 1
| 0.030769
| false
| 0
| 0.038462
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6978073b68d0c51ea29d58f19e8b732befcca868
| 22,456
|
py
|
Python
|
tellurium/tests/sedml/test_kisao.py
|
kirichoi/tellurium
|
77cf6e794600587741ebe209644a78051e0db1d5
|
[
"Apache-2.0"
] | 1
|
2019-09-05T13:36:10.000Z
|
2019-09-05T13:36:10.000Z
|
tellurium/tests/sedml/test_kisao.py
|
kirichoi/tellurium
|
77cf6e794600587741ebe209644a78051e0db1d5
|
[
"Apache-2.0"
] | null | null | null |
tellurium/tests/sedml/test_kisao.py
|
kirichoi/tellurium
|
77cf6e794600587741ebe209644a78051e0db1d5
|
[
"Apache-2.0"
] | 1
|
2021-01-01T09:59:12.000Z
|
2021-01-01T09:59:12.000Z
|
"""
Testing the support of KISAO terms for SED-ML simulations.
test_sedml_phrasedml.py : phrasedml based tests.
test_sedml_kisao.py : SED-ML kisao support
test_sedml_omex.py : SED-ML tests based on Combine Archives
test_sedml_sedml.py : sed-ml tests
"""
from __future__ import absolute_import, print_function
import os
import shutil
import tempfile
import unittest
import pytest
import six
import matplotlib
import tellurium as te
try:
import tesedml as libsedml
except ImportError:
import libsedml
from tellurium.sedml import tesedml
from tellurium.utils import omex
class KisaoSedmlTestCase(unittest.TestCase):
def setUp(self):
# switch the backend of matplotlib, so plots can be tested
self.test_dir = tempfile.mkdtemp()
self.backend = matplotlib.rcParams['backend']
matplotlib.pyplot.switch_backend("Agg")
# create a test instance
self.antimony = '''
model myModel
S1 -> S2; k1*S1;
S1 = 10; S2 = 0;
k1 = 1;
end
'''
self.phrasedml = '''
model1 = model "myModel"
sim1 = simulate uniform(0, 5, 100)
task1 = run sim1 on model1
plot "Figure 1" time vs S1, S2
'''
# self.tep = tephrasedml.experiment(self.antimony, self.phrasedml)
self.a1 = """
model m1()
J0: S1 -> S2; k1*S1;
S1 = 10.0; S2=0.0;
k1 = 0.1;
end
"""
self.a2 = """
model m2()
v0: X1 -> X2; p1*X1;
X1 = 5.0; X2 = 20.0;
p1 = 0.2;
end
"""
def tearDown(self):
matplotlib.pyplot.switch_backend(self.backend)
shutil.rmtree(self.test_dir)
matplotlib.pyplot.close('all')
def checkKisaoIntegrator(self, inline_omex, kisao, name):
""" Helper function for checking kisao integrator. """
omex_file = os.path.join(self.test_dir, "test.omex")
te.exportInlineOmex(inline_omex, omex_file)
omex.extractCombineArchive(omex_file, directory=self.test_dir, method="zip")
locations = omex.getLocationsByFormat(omex_file, "sed-ml")
sedml_files = [os.path.join(self.test_dir, loc) for loc in locations]
sedml_file = sedml_files[0]
# check the SED-ML
doc = libsedml.readSedMLFromFile(sedml_file)
# test_str = libsedml.writeSedMLToString(doc)
# print(test_str)
simulation = doc.getSimulation('sim0')
algorithm = simulation.getAlgorithm()
assert algorithm.getKisaoID() == kisao
# check the generated code
pystr = tesedml.sedmlToPython(sedml_file, workingDir=self.test_dir)
# is integrator/solver set in python code
if simulation.getTypeCode() is libsedml.SEDML_SIMULATION_STEADYSTATE:
assert ".setSteadyStateSolver('{}')".format(name) in pystr
else:
assert ".setIntegrator('{}')".format(name) in pystr
def checkKisaoAlgorithmParameter(self, inline_omex, kisao, name, value):
""" Helper function for checking kisao parameter. """
# check that set AlgorithmParameter set correctly in SED-ML
omex_file = os.path.join(self.test_dir, "test.omex")
te.exportInlineOmex(inline_omex, omex_file)
omex.extractCombineArchive(omex_file, directory=self.test_dir, method="zip")
locations = omex.getLocationsByFormat(omex_file, "sed-ml")
sedml_files = [os.path.join(self.test_dir, loc) for loc in locations]
sedml_file = sedml_files[0]
doc = libsedml.readSedMLFromFile(sedml_file)
simulation = doc.getSimulation('sim0')
algorithm = simulation.getAlgorithm()
pdict = {p.getKisaoID(): p for p in algorithm.getListOfAlgorithmParameters()}
self.assertTrue(kisao in pdict)
pkey = tesedml.SEDMLCodeFactory.algorithmParameterToParameterKey(pdict[kisao])
if pkey.dtype == str:
self.assertEqual(pkey.value, value)
else:
# numerical parameter
self.assertAlmostEqual(float(pkey.value), value)
# check that integrator is set in python code
pystr = tesedml.sedmlToPython(sedml_file, workingDir=self.test_dir)
print(simulation.getElementName())
print(pystr)
if simulation.getTypeCode() is libsedml.SEDML_SIMULATION_STEADYSTATE:
if pkey.dtype == str:
self.assertTrue(".steadyStateSolver.setValue('{}', '{}')".format(name, value) in pystr)
else:
# numerical parameter
self.assertTrue(".steadyStateSolver.setValue('{}', {})".format(name, value) in pystr)
else:
if pkey.dtype == str:
self.assertTrue(".integrator.setValue('{}', '{}')".format(name, value) in pystr)
else:
# numerical parameter
self.assertTrue(".integrator.setValue('{}', {})".format(name, value) in pystr)
def test_kisao_cvode_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm = CVODE
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoIntegrator(inline_omex, 'KISAO:0000019', 'cvode')
te.executeInlineOmex(inline_omex)
def test_kisao_cvode_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm = kisao.19
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoIntegrator(inline_omex, 'KISAO:0000019', 'cvode')
te.executeInlineOmex(inline_omex)
def test_kisao_cvode_3(self):
""" Default of uniform is cvode. """
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoIntegrator(inline_omex, 'KISAO:0000019', 'cvode')
te.executeInlineOmex(inline_omex)
def test_kisao_cvode_4(self):
""" Default of onestep is cvode. """
p = """
model0 = model "m1"
sim0 = simulate onestep(10)
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoIntegrator(inline_omex, 'KISAO:0000019', 'cvode')
te.executeInlineOmex(inline_omex)
def test_kisao_gillespie_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm = gillespie
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoIntegrator(inline_omex, 'KISAO:0000241', 'gillespie')
te.executeInlineOmex(inline_omex)
def test_kisao_gillespie_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm = kisao.241
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoIntegrator(inline_omex, 'KISAO:0000241', 'gillespie')
te.executeInlineOmex(inline_omex)
def test_kisao_gillespie_3(self):
""" Default of uniform_stochastic is gillespie."""
p = """
model0 = model "m1"
sim0 = simulate uniform_stochastic(0, 10, 100)
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoIntegrator(inline_omex, 'KISAO:0000241', 'gillespie')
te.executeInlineOmex(inline_omex)
def test_kisao_rk4_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm = rk4
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoIntegrator(inline_omex, 'KISAO:0000032', 'rk4')
te.executeInlineOmex(inline_omex)
def test_kisao_rk4_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm = kisao.32
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoIntegrator(inline_omex, 'KISAO:0000032', 'rk4')
te.executeInlineOmex(inline_omex)
def test_kisao_bdf(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm = stiff
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoIntegrator(inline_omex, 'KISAO:0000288', 'cvode')
te.executeInlineOmex(inline_omex)
omex_file = os.path.join(self.test_dir, "test.omex")
te.exportInlineOmex(inline_omex, omex_file)
pycode_dict = tesedml.combineArchiveToPython(omex_file)
pycode = six.next(six.itervalues(pycode_dict))
print(pycode)
self.assertTrue("integrator.setValue('stiff', True)" in pycode)
def test_kisao_adams(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm = nonstiff
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoIntegrator(inline_omex, 'KISAO:0000280', 'cvode')
te.executeInlineOmex(inline_omex)
omex_file = os.path.join(self.test_dir, "test.omex")
te.exportInlineOmex(inline_omex, omex_file)
pycode_dict = tesedml.combineArchiveToPython(omex_file)
pycode = six.next(six.itervalues(pycode_dict))
print(pycode)
self.assertTrue("integrator.setValue('stiff', False)" in pycode)
@pytest.mark.skip(reason="bug in roadrunner")
def test_kisao_rk45_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm = rk45
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoIntegrator(inline_omex, 'KISAO:0000435', 'rk45')
te.executeInlineOmex(inline_omex)
@pytest.mark.skip(reason="bug in roadrunner")
def test_kisao_rk45_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm = kisao.435
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoIntegrator(inline_omex, 'KISAO:0000435', 'rk45')
te.executeInlineOmex(inline_omex)
def test_kisao_relative_tolerance_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.relative_tolerance = 1E-8
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000209', 'relative_tolerance', 1E-8)
te.executeInlineOmex(inline_omex)
def test_kisao_relative_tolerance_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.209 = 1E-8
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000209', 'relative_tolerance', 1E-8)
te.executeInlineOmex(inline_omex)
def test_kisao_absolute_tolerance_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.absolute_tolerance = 1E-8
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000211', 'absolute_tolerance', 1E-8)
te.executeInlineOmex(inline_omex)
def test_kisao_absolute_tolerance_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.211 = 1E-8
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000211', 'absolute_tolerance', 1E-8)
te.executeInlineOmex(inline_omex)
def test_kisao_maximum_bdf_order_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.maximum_bdf_order = 4
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000220', 'maximum_bdf_order', 4)
te.executeInlineOmex(inline_omex)
def test_kisao_maximum_bdf_order_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.220 = 4
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000220', 'maximum_bdf_order', 4)
te.executeInlineOmex(inline_omex)
def test_kisao_maximum_adams_order_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.maximum_adams_order = 5
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000219', 'maximum_adams_order', 5)
te.executeInlineOmex(inline_omex)
def test_kisao_maximum_adams_order_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.219 = 5
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000219', 'maximum_adams_order', 5)
te.executeInlineOmex(inline_omex)
def test_kisao_maximum_num_steps_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.maximum_num_steps = 10000
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000415', 'maximum_num_steps', 10000)
te.executeInlineOmex(inline_omex)
def test_kisao_maximum_num_steps_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.415 = 10000
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000415', 'maximum_num_steps', 10000)
te.executeInlineOmex(inline_omex)
def test_kisao_maximum_time_step_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.maximum_time_step = 1.0
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000467', 'maximum_time_step', 1.0)
te.executeInlineOmex(inline_omex)
def test_kisao_maximum_time_step_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.467 = 1.0
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000467', 'maximum_time_step', 1.0)
te.executeInlineOmex(inline_omex)
# @pytest.mark.skip(reason="bug in roadrunner")
def test_kisao_minimum_time_step_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.minimum_time_step = 1E-6
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000485', 'minimum_time_step', 1E-6)
te.executeInlineOmex(inline_omex)
# @pytest.mark.skip(reason="bug in roadrunner")
def test_kisao_minimum_time_step_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.485 = 1E-6
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000485', 'minimum_time_step', 1E-6)
te.executeInlineOmex(inline_omex)
def test_kisao_initial_time_step_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.initial_time_step = 0.01
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000332', 'initial_time_step', 0.01)
te.executeInlineOmex(inline_omex)
def test_kisao_initial_time_step_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.332 = 0.01
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000332', 'initial_time_step', 0.01)
te.executeInlineOmex(inline_omex)
def test_kisao_variable_step_size_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.variable_step_size = true
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000107', 'variable_step_size', True)
te.executeInlineOmex(inline_omex)
def test_kisao_variable_step_size_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform(0, 10, 100)
sim0.algorithm.107 = true
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000107', 'variable_step_size', True)
te.executeInlineOmex(inline_omex)
def test_kisao_maximum_iterations_1(self):
p = """
model0 = model "m1"
sim0 = simulate steadystate
sim0.algorithm.maximum_iterations = 10
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000486', 'maximum_iterations', 10)
te.executeInlineOmex(inline_omex)
def test_kisao_maximum_iterations_2(self):
p = """
model0 = model "m1"
sim0 = simulate steadystate
sim0.algorithm.486 = 10
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000486', 'maximum_iterations', 10)
te.executeInlineOmex(inline_omex)
def test_kisao_minimum_damping_1(self):
p = """
model0 = model "m1"
sim0 = simulate steadystate
sim0.algorithm.minimum_damping = 1.0
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000487', 'minimum_damping', 1.0)
te.executeInlineOmex(inline_omex)
def test_kisao_minimum_damping_2(self):
p = """
model0 = model "m1"
sim0 = simulate steadystate
sim0.algorithm.487 = 1
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000487', 'minimum_damping', 1.0)
te.executeInlineOmex(inline_omex)
def test_kisao_seed_1(self):
p = """
model0 = model "m1"
sim0 = simulate uniform_stochastic(0, 10, 100)
sim0.algorithm.seed = 1234
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000488', 'seed', 1234)
te.executeInlineOmex(inline_omex)
def test_kisao_seed_2(self):
p = """
model0 = model "m1"
sim0 = simulate uniform_stochastic(0, 10, 100)
sim0.algorithm.488 = 1234
task0 = run sim0 on model0
plot task0.time vs task0.S1
"""
inline_omex = '\n'.join([self.a1, p])
self.checkKisaoAlgorithmParameter(inline_omex, 'KISAO:0000488', 'seed', 1234)
te.executeInlineOmex(inline_omex)
| 36.632953
| 103
| 0.589108
| 2,604
| 22,456
| 4.933564
| 0.097542
| 0.091072
| 0.045536
| 0.040321
| 0.845956
| 0.824317
| 0.820269
| 0.810773
| 0.79575
| 0.783607
| 0
| 0.071328
| 0.302013
| 22,456
| 612
| 104
| 36.69281
| 0.748309
| 0.044665
| 0
| 0.735922
| 0
| 0
| 0.445935
| 0.028972
| 0
| 0
| 0
| 0
| 0.023301
| 1
| 0.079612
| false
| 0
| 0.027184
| 0
| 0.108738
| 0.009709
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
69882deb7ea11e9442c08ffa1b656d32968421ed
| 1,557
|
py
|
Python
|
src/finitestate/firmware/schemas/schema_sbom_cpes.py
|
FiniteStateInc/clearcode-toolkit
|
521c3a2ab9d9fa6d7b9059227c6af9d09b031c33
|
[
"Apache-2.0"
] | null | null | null |
src/finitestate/firmware/schemas/schema_sbom_cpes.py
|
FiniteStateInc/clearcode-toolkit
|
521c3a2ab9d9fa6d7b9059227c6af9d09b031c33
|
[
"Apache-2.0"
] | null | null | null |
src/finitestate/firmware/schemas/schema_sbom_cpes.py
|
FiniteStateInc/clearcode-toolkit
|
521c3a2ab9d9fa6d7b9059227c6af9d09b031c33
|
[
"Apache-2.0"
] | 1
|
2020-12-22T16:51:40.000Z
|
2020-12-22T16:51:40.000Z
|
import pyspark.sql.types
sbom_cpes_schema = pyspark.sql.types.StructType([
pyspark.sql.types.StructField('cpe', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('firmware_hash', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('product', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('version', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('description', pyspark.sql.types.StructType([
pyspark.sql.types.StructField('short', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('long', pyspark.sql.types.StringType()),
])),
pyspark.sql.types.StructField('purl', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('file_hash', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('evidence', pyspark.sql.types.StructType([
pyspark.sql.types.StructField('cpe', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('firmware_hash', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('product', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('version', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('description', pyspark.sql.types.StructType([
pyspark.sql.types.StructField('short', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('long', pyspark.sql.types.StringType()),
])),
pyspark.sql.types.StructField('purl', pyspark.sql.types.StringType()),
])),
])
| 55.607143
| 87
| 0.709698
| 176
| 1,557
| 6.25
| 0.119318
| 0.345455
| 0.518182
| 0.425455
| 0.957273
| 0.957273
| 0.957273
| 0.957273
| 0.933636
| 0.883636
| 0
| 0
| 0.118176
| 1,557
| 27
| 88
| 57.666667
| 0.801165
| 0
| 0
| 0.791667
| 0
| 0
| 0.080334
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.041667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
6993fb4512e01719c25fbeef43b1dbefe403d20c
| 8,931
|
py
|
Python
|
instagram/settings.py
|
Evan-cell/instagram
|
630031b7a5887392466a0ba84da95e5bac106f39
|
[
"Unlicense"
] | null | null | null |
instagram/settings.py
|
Evan-cell/instagram
|
630031b7a5887392466a0ba84da95e5bac106f39
|
[
"Unlicense"
] | null | null | null |
instagram/settings.py
|
Evan-cell/instagram
|
630031b7a5887392466a0ba84da95e5bac106f39
|
[
"Unlicense"
] | null | null | null |
# """
# Django settings for instagram project.
# Generated by 'django-admin startproject' using Django 3.2.7.
# For more information on this file, see
# https://docs.djangoproject.com/en/3.2/topics/settings/
# For the full list of settings and their values, see
# https://docs.djangoproject.com/en/3.2/ref/settings/
# """
# from pathlib import Path
# import os
# import cloudinary
# import cloudinary.uploader
# import cloudinary.api
# import django_heroku
# import dj_database_url
# from decouple import config,Csv
# MODE=config("MODE", default="dev")
# SECRET_KEY = config('SECRET_KEY')
# DEBUG = os.environ.get('DEBUG', True)
# # development
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.postgresql_psycopg2',
# 'NAME': config('DB_NAME'),
# 'USER': config('DB_USER'),
# 'PASSWORD': config('DB_PASSWORD'),
# 'HOST': config('DB_HOST'),
# 'PORT': '',
# }
# }
# # production
# db_from_env = dj_database_url.config(conn_max_age=500)
# DATABASES['default'].update(db_from_env)
# ALLOWED_HOSTS = config('ALLOWED_HOSTS', cast=Csv())
# cloudinary.config(
# cloud_name = "moringa-chit-fund-private-limited",
# api_key = "177778488814987",
# api_secret = "_7SSmn6ASgdrXTjtRRIyT8gagEY"
# )
# # Build paths inside the project like this: BASE_DIR / 'subdir'.
# BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# # Quick-start development settings - unsuitable for production
# # See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# # SECURITY WARNING: keep the secret key used in production secret!
# SECRET_KEY = 'django-insecure-08hsc1w118+0-0e1!@ox18#stu(b!$(h-qjd)g5nitzdj7wd2$'
# # SECURITY WARNING: don't run with debug turned on in production!
# DEBUG = False
# ALLOWED_HOSTS = []
# # Application definition
# INSTALLED_APPS = [
# 'django.contrib.admin',
# 'django.contrib.auth',
# 'django.contrib.contenttypes',
# 'django.contrib.sessions',
# 'django.contrib.messages',
# 'django.contrib.staticfiles',
# "kimsta",
# 'cloudinary'
# ]
# MIDDLEWARE = [
# 'django.middleware.security.SecurityMiddleware',
# 'django.contrib.sessions.middleware.SessionMiddleware',
# 'django.middleware.common.CommonMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
# 'django.contrib.auth.middleware.AuthenticationMiddleware',
# 'django.contrib.messages.middleware.MessageMiddleware',
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
# 'whitenoise.middleware.WhiteNoiseMiddleware',
# ]
# ROOT_URLCONF = 'instagram.urls'
# TEMPLATES = [
# {
# 'BACKEND': 'django.template.backends.django.DjangoTemplates',
# 'DIRS': [],
# 'APP_DIRS': True,
# 'OPTIONS': {
# 'context_processors': [
# 'django.template.context_processors.debug',
# 'django.template.context_processors.request',
# 'django.contrib.auth.context_processors.auth',
# 'django.contrib.messages.context_processors.messages',
# ],
# },
# },
# ]
# WSGI_APPLICATION = 'instagram.wsgi.application'
# # Database
# # https://docs.djangoproject.com/en/3.2/ref/settings/#databases
# # Password validation
# # https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
# AUTH_PASSWORD_VALIDATORS = [
# {
# 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
# },
# ]
# # Internationalization
# # https://docs.djangoproject.com/en/3.2/topics/i18n/
# LANGUAGE_CODE = 'en-us'
# TIME_ZONE = 'Africa/Nairobi'
# USE_I18N = True
# USE_L10N = True
# USE_TZ = True
# # configuring the location for media
# # MEDIA_URL = '/media/'
# # MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# # Static files (CSS, JavaScript, Images)
# # https://docs.djangoproject.com/en/3.2/howto/static-files/
# STATIC_URL = '/static/'
# STATICFILES_DIRS = [
# os.path.join(BASE_DIR, "static"),
# ]
# STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# # Default primary key field type
# # https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
# DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
# # Configure Django App for Heroku.
# django_heroku.settings(locals())
"""
Django settings for awwardsProject project.
Generated by 'django-admin startproject' using Django 3.2.9.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
from pathlib import Path
from decouple import config,Csv
import dj_database_url
import cloudinary
import cloudinary.uploader
import cloudinary.api
import django_heroku
MODE=config("MODE", default="dev")
SECRET_KEY = config('SECRET_KEY')
DEBUG = os.environ.get('DEBUG', True)
#adding config
cloudinary.config(
cloud_name = "moringa-chit-fund-private-limited",
api_key = "177778488814987",
api_secret = "_7SSmn6ASgdrXTjtRRIyT8gagEY"
)
CLOUDINARY_STORAGE = {
'CLOUD_NAME': 'moringa-chit-fund-private-limited',
'API_KEY': '177778488814987',
'API_SECRET': '_7SSmn6ASgdrXTjtRRIyT8gagEY'
}
DEFAULT_FILE_STORAGE = 'cloudinary_storage.storage.MediaCloudinaryStorage'
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
"kimsta",
'cloudinary'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
ROOT_URLCONF = 'instagram.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'instagram.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': config('DB_NAME'),
'USER' : config('DB_USER'),
'PASSWORD' : config('DB_PASSWORD'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Africa/Nairobi'
USE_I18N = True
USE_L10N = True
USE_TZ = True
LOGIN_REDIRECT_URL = 'insta'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
django_heroku.settings(locals())
| 30.070707
| 93
| 0.693539
| 971
| 8,931
| 6.255407
| 0.213182
| 0.064208
| 0.057952
| 0.065854
| 0.888377
| 0.874218
| 0.867303
| 0.851498
| 0.851498
| 0.851498
| 0
| 0.016393
| 0.166723
| 8,931
| 296
| 94
| 30.172297
| 0.799785
| 0.612473
| 0
| 0
| 1
| 0
| 0.482633
| 0.387873
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.061224
| 0.081633
| 0
| 0.081633
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
0e0cf45782f82b3732c79f0365e79b4e9ac0791f
| 327,936
|
py
|
Python
|
nova/tests/unit/network/test_manager.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/network/test_manager.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/network/test_manager.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | 2
|
2017-07-20T17:31:34.000Z
|
2020-07-24T02:42:19.000Z
|
begin_unit
comment|'# Copyright 2011 Rackspace'
nl|'\n'
comment|'# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.'
nl|'\n'
comment|'# Copyright 2013 IBM Corp.'
nl|'\n'
comment|'# All Rights Reserved.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'import'
name|'fixtures'
newline|'\n'
name|'import'
name|'mock'
newline|'\n'
name|'from'
name|'mox3'
name|'import'
name|'mox'
newline|'\n'
name|'import'
name|'netaddr'
newline|'\n'
name|'from'
name|'oslo_concurrency'
name|'import'
name|'processutils'
newline|'\n'
name|'from'
name|'oslo_config'
name|'import'
name|'cfg'
newline|'\n'
name|'from'
name|'oslo_db'
name|'import'
name|'exception'
name|'as'
name|'db_exc'
newline|'\n'
name|'from'
name|'oslo_log'
name|'import'
name|'log'
name|'as'
name|'logging'
newline|'\n'
name|'import'
name|'oslo_messaging'
name|'as'
name|'messaging'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'importutils'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'netutils'
newline|'\n'
name|'import'
name|'six'
newline|'\n'
name|'import'
name|'testtools'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
name|'import'
name|'context'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'db'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'db'
op|'.'
name|'sqlalchemy'
name|'import'
name|'models'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'ipv6'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'network'
name|'import'
name|'floating_ips'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'network'
name|'import'
name|'linux_net'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'network'
name|'import'
name|'manager'
name|'as'
name|'network_manager'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'network'
name|'import'
name|'model'
name|'as'
name|'net_model'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'objects'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'objects'
name|'import'
name|'network'
name|'as'
name|'network_obj'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'objects'
name|'import'
name|'virtual_interface'
name|'as'
name|'vif_obj'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'quota'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'api'
op|'.'
name|'openstack'
name|'import'
name|'fakes'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'fake_instance'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'fake_ldap'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'fake_network'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'matchers'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'objects'
name|'import'
name|'test_fixed_ip'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'objects'
name|'import'
name|'test_floating_ip'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'objects'
name|'import'
name|'test_network'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'objects'
name|'import'
name|'test_service'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'utils'
name|'as'
name|'test_utils'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
name|'import'
name|'uuidsentinel'
name|'as'
name|'uuids'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'utils'
newline|'\n'
nl|'\n'
DECL|variable|CONF
name|'CONF'
op|'='
name|'cfg'
op|'.'
name|'CONF'
newline|'\n'
DECL|variable|LOG
name|'LOG'
op|'='
name|'logging'
op|'.'
name|'getLogger'
op|'('
name|'__name__'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|variable|HOST
name|'HOST'
op|'='
string|'"testhost"'
newline|'\n'
DECL|variable|FAKEUUID
name|'FAKEUUID'
op|'='
string|'"aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|variable|fake_inst
name|'fake_inst'
op|'='
name|'fake_instance'
op|'.'
name|'fake_db_instance'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|variable|networks
name|'networks'
op|'='
op|'['
op|'{'
string|"'id'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'uuid'"
op|':'
name|'FAKEUUID'
op|','
nl|'\n'
string|"'label'"
op|':'
string|"'test0'"
op|','
nl|'\n'
string|"'injected'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'multi_host'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'cidr'"
op|':'
string|"'192.168.0.0/24'"
op|','
nl|'\n'
string|"'cidr_v6'"
op|':'
string|"'2001:db8::/64'"
op|','
nl|'\n'
string|"'gateway_v6'"
op|':'
string|"'2001:db8::1'"
op|','
nl|'\n'
string|"'netmask_v6'"
op|':'
string|"'64'"
op|','
nl|'\n'
string|"'netmask'"
op|':'
string|"'255.255.255.0'"
op|','
nl|'\n'
string|"'bridge'"
op|':'
string|"'fa0'"
op|','
nl|'\n'
string|"'bridge_interface'"
op|':'
string|"'fake_fa0'"
op|','
nl|'\n'
string|"'gateway'"
op|':'
string|"'192.168.0.1'"
op|','
nl|'\n'
string|"'dhcp_server'"
op|':'
string|"'192.168.0.1'"
op|','
nl|'\n'
string|"'broadcast'"
op|':'
string|"'192.168.0.255'"
op|','
nl|'\n'
string|"'dns1'"
op|':'
string|"'192.168.0.1'"
op|','
nl|'\n'
string|"'dns2'"
op|':'
string|"'192.168.0.2'"
op|','
nl|'\n'
string|"'vlan'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'host'"
op|':'
name|'HOST'
op|','
nl|'\n'
string|"'project_id'"
op|':'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
string|"'vpn_public_address'"
op|':'
string|"'192.168.0.2'"
op|','
nl|'\n'
string|"'vpn_public_port'"
op|':'
string|"'22'"
op|','
nl|'\n'
string|"'vpn_private_address'"
op|':'
string|"'10.0.0.2'"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'id'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'uuid'"
op|':'
string|"'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'"
op|','
nl|'\n'
string|"'label'"
op|':'
string|"'test1'"
op|','
nl|'\n'
string|"'injected'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'multi_host'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'cidr'"
op|':'
string|"'192.168.1.0/24'"
op|','
nl|'\n'
string|"'cidr_v6'"
op|':'
string|"'2001:db9::/64'"
op|','
nl|'\n'
string|"'gateway_v6'"
op|':'
string|"'2001:db9::1'"
op|','
nl|'\n'
string|"'netmask_v6'"
op|':'
string|"'64'"
op|','
nl|'\n'
string|"'netmask'"
op|':'
string|"'255.255.255.0'"
op|','
nl|'\n'
string|"'bridge'"
op|':'
string|"'fa1'"
op|','
nl|'\n'
string|"'bridge_interface'"
op|':'
string|"'fake_fa1'"
op|','
nl|'\n'
string|"'gateway'"
op|':'
string|"'192.168.1.1'"
op|','
nl|'\n'
string|"'dhcp_server'"
op|':'
string|"'192.168.1.1'"
op|','
nl|'\n'
string|"'broadcast'"
op|':'
string|"'192.168.1.255'"
op|','
nl|'\n'
string|"'dns1'"
op|':'
string|"'192.168.0.1'"
op|','
nl|'\n'
string|"'dns2'"
op|':'
string|"'192.168.0.2'"
op|','
nl|'\n'
string|"'vlan'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'host'"
op|':'
name|'HOST'
op|','
nl|'\n'
string|"'project_id'"
op|':'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
string|"'vpn_public_address'"
op|':'
string|"'192.168.1.2'"
op|','
nl|'\n'
string|"'vpn_public_port'"
op|':'
string|"'22'"
op|','
nl|'\n'
string|"'vpn_private_address'"
op|':'
string|"'10.0.0.2'"
op|'}'
op|']'
newline|'\n'
nl|'\n'
DECL|variable|fixed_ips
name|'fixed_ips'
op|'='
op|'['
op|'{'
string|"'id'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'network_id'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'address'"
op|':'
string|"'192.168.0.100'"
op|','
nl|'\n'
string|"'instance_uuid'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'allocated'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'virtual_interface_id'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'floating_ips'"
op|':'
op|'['
op|']'
op|'}'
op|','
nl|'\n'
op|'{'
string|"'id'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'network_id'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'address'"
op|':'
string|"'192.168.1.100'"
op|','
nl|'\n'
string|"'instance_uuid'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'allocated'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'virtual_interface_id'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'floating_ips'"
op|':'
op|'['
op|']'
op|'}'
op|','
nl|'\n'
op|'{'
string|"'id'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'network_id'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'address'"
op|':'
string|"'2001:db9:0:1::10'"
op|','
nl|'\n'
string|"'instance_uuid'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'allocated'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'virtual_interface_id'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'floating_ips'"
op|':'
op|'['
op|']'
op|'}'
op|']'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|variable|flavor
name|'flavor'
op|'='
op|'{'
string|"'id'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'rxtx_cap'"
op|':'
number|'3'
op|'}'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|variable|floating_ip_fields
name|'floating_ip_fields'
op|'='
op|'{'
string|"'id'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'address'"
op|':'
string|"'192.168.10.100'"
op|','
nl|'\n'
string|"'pool'"
op|':'
string|"'nova'"
op|','
nl|'\n'
string|"'interface'"
op|':'
string|"'eth0'"
op|','
nl|'\n'
string|"'fixed_ip_id'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'project_id'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'auto_assigned'"
op|':'
name|'False'
op|'}'
newline|'\n'
nl|'\n'
DECL|variable|vifs
name|'vifs'
op|'='
op|'['
op|'{'
string|"'id'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'created_at'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'updated_at'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'deleted_at'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'deleted'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'address'"
op|':'
string|"'DE:AD:BE:EF:00:00'"
op|','
nl|'\n'
string|"'uuid'"
op|':'
name|'uuids'
op|'.'
name|'vif1_uuid'
op|','
nl|'\n'
string|"'network_id'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'instance_uuid'"
op|':'
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
string|"'tag'"
op|':'
string|"'fake-tag1'"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'id'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'created_at'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'updated_at'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'deleted_at'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'deleted'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'address'"
op|':'
string|"'DE:AD:BE:EF:00:01'"
op|','
nl|'\n'
string|"'uuid'"
op|':'
string|"'00000000-0000-0000-0000-0000000000000001'"
op|','
nl|'\n'
string|"'network_id'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'instance_uuid'"
op|':'
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
string|"'tag'"
op|':'
string|"'fake-tag2'"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'id'"
op|':'
number|'2'
op|','
nl|'\n'
string|"'created_at'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'updated_at'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'deleted_at'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'deleted'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'address'"
op|':'
string|"'DE:AD:BE:EF:00:02'"
op|','
nl|'\n'
string|"'uuid'"
op|':'
string|"'00000000-0000-0000-0000-0000000000000002'"
op|','
nl|'\n'
string|"'network_id'"
op|':'
number|'2'
op|','
nl|'\n'
string|"'instance_uuid'"
op|':'
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
string|"'tag'"
op|':'
string|"'fake-tag3'"
op|'}'
op|']'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FlatNetworkTestCase
name|'class'
name|'FlatNetworkTestCase'
op|'('
name|'test'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|variable|REQUIRES_LOCKING
indent|' '
name|'REQUIRES_LOCKING'
op|'='
name|'True'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'FlatNetworkTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'tempdir'
op|'='
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'fixtures'
op|'.'
name|'TempDir'
op|'('
op|')'
op|')'
op|'.'
name|'path'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'log_dir'
op|'='
name|'self'
op|'.'
name|'tempdir'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'='
name|'network_manager'
op|'.'
name|'FlatManager'
op|'('
name|'host'
op|'='
name|'HOST'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'instance_dns_domain'
op|'='
string|"''"
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|'='
name|'db'
newline|'\n'
name|'self'
op|'.'
name|'context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'testtools'
op|'.'
name|'skipIf'
op|'('
name|'test_utils'
op|'.'
name|'is_osx'
op|'('
op|')'
op|','
nl|'\n'
string|"'IPv6 pretty-printing broken on OSX, see bug 1409135'"
op|')'
newline|'\n'
DECL|member|test_get_instance_nw_info_fake
name|'def'
name|'test_get_instance_nw_info_fake'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_get_instance_nw_info'
op|'='
name|'fake_network'
op|'.'
name|'fake_get_instance_nw_info'
newline|'\n'
nl|'\n'
name|'nw_info'
op|'='
name|'fake_get_instance_nw_info'
op|'('
name|'self'
op|','
number|'0'
op|','
number|'2'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'nw_info'
op|')'
newline|'\n'
nl|'\n'
name|'nw_info'
op|'='
name|'fake_get_instance_nw_info'
op|'('
name|'self'
op|','
number|'1'
op|','
number|'2'
op|')'
newline|'\n'
nl|'\n'
name|'for'
name|'i'
op|','
name|'vif'
name|'in'
name|'enumerate'
op|'('
name|'nw_info'
op|')'
op|':'
newline|'\n'
indent|' '
name|'nid'
op|'='
name|'i'
op|'+'
number|'1'
newline|'\n'
name|'check'
op|'='
op|'{'
string|"'bridge'"
op|':'
string|"'fake_br%d'"
op|'%'
name|'nid'
op|','
nl|'\n'
string|"'cidr'"
op|':'
string|"'192.168.%s.0/24'"
op|'%'
name|'nid'
op|','
nl|'\n'
string|"'cidr_v6'"
op|':'
string|"'2001:db8:0:%x::/64'"
op|'%'
name|'nid'
op|','
nl|'\n'
string|"'id'"
op|':'
name|'getattr'
op|'('
name|'uuids'
op|','
string|"'vif%i'"
op|'%'
name|'nid'
op|')'
op|','
nl|'\n'
string|"'multi_host'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'injected'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'bridge_interface'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'vlan'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'broadcast'"
op|':'
string|"'192.168.%d.255'"
op|'%'
name|'nid'
op|','
nl|'\n'
string|"'dhcp_server'"
op|':'
string|"'192.168.1.1'"
op|','
nl|'\n'
string|"'dns'"
op|':'
op|'['
string|"'192.168.%d.3'"
op|'%'
name|'nid'
op|','
string|"'192.168.%d.4'"
op|'%'
name|'nid'
op|']'
op|','
nl|'\n'
string|"'gateway'"
op|':'
string|"'192.168.%d.1'"
op|'%'
name|'nid'
op|','
nl|'\n'
string|"'gateway_v6'"
op|':'
string|"'2001:db8:0:1::1'"
op|','
nl|'\n'
string|"'label'"
op|':'
string|"'test%d'"
op|'%'
name|'nid'
op|','
nl|'\n'
string|"'mac'"
op|':'
string|"'DE:AD:BE:EF:00:%02x'"
op|'%'
name|'nid'
op|','
nl|'\n'
string|"'rxtx_cap'"
op|':'
number|'30'
op|','
nl|'\n'
string|"'vif_type'"
op|':'
name|'net_model'
op|'.'
name|'VIF_TYPE_BRIDGE'
op|','
nl|'\n'
string|"'vif_devname'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'vif_uuid'"
op|':'
name|'getattr'
op|'('
name|'uuids'
op|','
string|"'vif%i'"
op|'%'
name|'nid'
op|')'
op|','
nl|'\n'
string|"'ovs_interfaceid'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'qbh_params'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'qbg_params'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'should_create_vlan'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'should_create_bridge'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'ip'"
op|':'
string|"'192.168.%d.%03d'"
op|'%'
op|'('
name|'nid'
op|','
name|'nid'
op|'+'
number|'99'
op|')'
op|','
nl|'\n'
string|"'ip_v6'"
op|':'
string|"'2001:db8:0:1:dcad:beff:feef:%x'"
op|'%'
name|'nid'
op|','
nl|'\n'
string|"'netmask'"
op|':'
string|"'255.255.255.0'"
op|','
nl|'\n'
string|"'netmask_v6'"
op|':'
number|'64'
op|','
nl|'\n'
string|"'physical_network'"
op|':'
name|'None'
op|','
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
name|'network'
op|'='
name|'vif'
op|'['
string|"'network'"
op|']'
newline|'\n'
name|'net_v4'
op|'='
name|'vif'
op|'['
string|"'network'"
op|']'
op|'['
string|"'subnets'"
op|']'
op|'['
number|'0'
op|']'
newline|'\n'
name|'net_v6'
op|'='
name|'vif'
op|'['
string|"'network'"
op|']'
op|'['
string|"'subnets'"
op|']'
op|'['
number|'1'
op|']'
newline|'\n'
nl|'\n'
name|'vif_dict'
op|'='
name|'dict'
op|'('
name|'bridge'
op|'='
name|'network'
op|'['
string|"'bridge'"
op|']'
op|','
nl|'\n'
name|'cidr'
op|'='
name|'net_v4'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'cidr_v6'
op|'='
name|'net_v6'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'id'
op|'='
name|'vif'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'multi_host'
op|'='
name|'network'
op|'.'
name|'get_meta'
op|'('
string|"'multi_host'"
op|','
name|'False'
op|')'
op|','
nl|'\n'
name|'injected'
op|'='
name|'network'
op|'.'
name|'get_meta'
op|'('
string|"'injected'"
op|','
name|'False'
op|')'
op|','
nl|'\n'
name|'bridge_interface'
op|'='
nl|'\n'
name|'network'
op|'.'
name|'get_meta'
op|'('
string|"'bridge_interface'"
op|')'
op|','
nl|'\n'
name|'vlan'
op|'='
name|'network'
op|'.'
name|'get_meta'
op|'('
string|"'vlan'"
op|')'
op|','
nl|'\n'
name|'broadcast'
op|'='
name|'str'
op|'('
name|'net_v4'
op|'.'
name|'as_netaddr'
op|'('
op|')'
op|'.'
name|'broadcast'
op|')'
op|','
nl|'\n'
name|'dhcp_server'
op|'='
name|'network'
op|'.'
name|'get_meta'
op|'('
string|"'dhcp_server'"
op|','
nl|'\n'
name|'net_v4'
op|'['
string|"'gateway'"
op|']'
op|'['
string|"'address'"
op|']'
op|')'
op|','
nl|'\n'
name|'dns'
op|'='
op|'['
name|'ip'
op|'['
string|"'address'"
op|']'
name|'for'
name|'ip'
name|'in'
name|'net_v4'
op|'['
string|"'dns'"
op|']'
op|']'
op|','
nl|'\n'
name|'gateway'
op|'='
name|'net_v4'
op|'['
string|"'gateway'"
op|']'
op|'['
string|"'address'"
op|']'
op|','
nl|'\n'
name|'gateway_v6'
op|'='
name|'net_v6'
op|'['
string|"'gateway'"
op|']'
op|'['
string|"'address'"
op|']'
op|','
nl|'\n'
name|'label'
op|'='
name|'network'
op|'['
string|"'label'"
op|']'
op|','
nl|'\n'
name|'mac'
op|'='
name|'vif'
op|'['
string|"'address'"
op|']'
op|','
nl|'\n'
name|'rxtx_cap'
op|'='
name|'vif'
op|'.'
name|'get_meta'
op|'('
string|"'rxtx_cap'"
op|')'
op|','
nl|'\n'
name|'vif_type'
op|'='
name|'vif'
op|'['
string|"'type'"
op|']'
op|','
nl|'\n'
name|'vif_devname'
op|'='
name|'vif'
op|'.'
name|'get'
op|'('
string|"'devname'"
op|')'
op|','
nl|'\n'
name|'vif_uuid'
op|'='
name|'vif'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'ovs_interfaceid'
op|'='
name|'vif'
op|'.'
name|'get'
op|'('
string|"'ovs_interfaceid'"
op|')'
op|','
nl|'\n'
name|'qbh_params'
op|'='
name|'vif'
op|'.'
name|'get'
op|'('
string|"'qbh_params'"
op|')'
op|','
nl|'\n'
name|'qbg_params'
op|'='
name|'vif'
op|'.'
name|'get'
op|'('
string|"'qbg_params'"
op|')'
op|','
nl|'\n'
name|'should_create_vlan'
op|'='
nl|'\n'
name|'network'
op|'.'
name|'get_meta'
op|'('
string|"'should_create_vlan'"
op|','
name|'False'
op|')'
op|','
nl|'\n'
name|'should_create_bridge'
op|'='
nl|'\n'
name|'network'
op|'.'
name|'get_meta'
op|'('
string|"'should_create_bridge'"
op|','
nl|'\n'
name|'False'
op|')'
op|','
nl|'\n'
name|'ip'
op|'='
name|'net_v4'
op|'['
string|"'ips'"
op|']'
op|'['
name|'i'
op|']'
op|'['
string|"'address'"
op|']'
op|','
nl|'\n'
name|'ip_v6'
op|'='
name|'net_v6'
op|'['
string|"'ips'"
op|']'
op|'['
name|'i'
op|']'
op|'['
string|"'address'"
op|']'
op|','
nl|'\n'
name|'netmask'
op|'='
name|'str'
op|'('
name|'net_v4'
op|'.'
name|'as_netaddr'
op|'('
op|')'
op|'.'
name|'netmask'
op|')'
op|','
nl|'\n'
name|'netmask_v6'
op|'='
name|'net_v6'
op|'.'
name|'as_netaddr'
op|'('
op|')'
op|'.'
name|'_prefixlen'
op|','
nl|'\n'
name|'physical_network'
op|'='
nl|'\n'
name|'network'
op|'.'
name|'get_meta'
op|'('
string|"'physical_network'"
op|','
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertThat'
op|'('
name|'vif_dict'
op|','
name|'matchers'
op|'.'
name|'DictMatches'
op|'('
name|'check'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_networks
dedent|''
dedent|''
name|'def'
name|'test_validate_networks'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'fixed_ip_get_by_address'"
op|')'
newline|'\n'
nl|'\n'
name|'requested_networks'
op|'='
op|'['
op|'('
string|"'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'"
op|','
nl|'\n'
string|"'192.168.1.100'"
op|')'
op|','
nl|'\n'
op|'('
string|"'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'"
op|','
nl|'\n'
string|"'192.168.0.100'"
op|')'
op|']'
newline|'\n'
nl|'\n'
name|'ip'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
op|'**'
name|'fixed_ips'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
name|'ip'
op|'['
string|"'network'"
op|']'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
name|'ip'
op|'['
string|"'instance_uuid'"
op|']'
op|'='
name|'None'
newline|'\n'
name|'db'
op|'.'
name|'fixed_ip_get_by_address'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'columns_to_join'
op|'='
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
nl|'\n'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'ip'
op|')'
newline|'\n'
name|'ip'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
op|'**'
name|'fixed_ips'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'ip'
op|'['
string|"'network'"
op|']'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'ip'
op|'['
string|"'instance_uuid'"
op|']'
op|'='
name|'None'
newline|'\n'
name|'db'
op|'.'
name|'fixed_ip_get_by_address'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'columns_to_join'
op|'='
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
nl|'\n'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'ip'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'validate_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_networks_valid_fixed_ipv6
dedent|''
name|'def'
name|'test_validate_networks_valid_fixed_ipv6'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'fixed_ip_get_by_address'"
op|')'
newline|'\n'
nl|'\n'
name|'requested_networks'
op|'='
op|'['
op|'('
string|"'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'"
op|','
nl|'\n'
string|"'2001:db9:0:1::10'"
op|')'
op|']'
newline|'\n'
nl|'\n'
name|'ip'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
op|'**'
name|'fixed_ips'
op|'['
number|'2'
op|']'
op|')'
newline|'\n'
name|'ip'
op|'['
string|"'network'"
op|']'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
name|'ip'
op|'['
string|"'instance_uuid'"
op|']'
op|'='
name|'None'
newline|'\n'
name|'db'
op|'.'
name|'fixed_ip_get_by_address'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'columns_to_join'
op|'='
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
nl|'\n'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'ip'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'validate_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_reserved
dedent|''
name|'def'
name|'test_validate_reserved'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'context_admin'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
name|'nets'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|'('
name|'context_admin'
op|','
string|"'fake'"
op|','
nl|'\n'
string|"'192.168.0.0/24'"
op|','
name|'False'
op|','
number|'1'
op|','
nl|'\n'
number|'256'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'nets'
op|')'
op|')'
newline|'\n'
name|'network'
op|'='
name|'nets'
op|'['
number|'0'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'4'
op|','
name|'db'
op|'.'
name|'network_count_reserved_ips'
op|'('
name|'context_admin'
op|','
nl|'\n'
name|'network'
op|'['
string|"'id'"
op|']'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_reserved_start_end
dedent|''
name|'def'
name|'test_validate_reserved_start_end'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'context_admin'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
name|'nets'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|'('
name|'context_admin'
op|','
string|"'fake'"
op|','
nl|'\n'
string|"'192.168.0.0/24'"
op|','
name|'False'
op|','
number|'1'
op|','
nl|'\n'
number|'256'
op|','
name|'dhcp_server'
op|'='
string|"'192.168.0.11'"
op|','
nl|'\n'
name|'allowed_start'
op|'='
string|"'192.168.0.10'"
op|','
nl|'\n'
name|'allowed_end'
op|'='
string|"'192.168.0.245'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'nets'
op|')'
op|')'
newline|'\n'
name|'network'
op|'='
name|'nets'
op|'['
number|'0'
op|']'
newline|'\n'
comment|'# gateway defaults to beginning of allowed_start'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'192.168.0.10'"
op|','
name|'network'
op|'['
string|"'gateway'"
op|']'
op|')'
newline|'\n'
comment|"# vpn_server doesn't conflict with dhcp_start"
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'192.168.0.12'"
op|','
name|'network'
op|'['
string|"'vpn_private_address'"
op|']'
op|')'
newline|'\n'
comment|"# dhcp_start doesn't conflict with dhcp_server"
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'192.168.0.13'"
op|','
name|'network'
op|'['
string|"'dhcp_start'"
op|']'
op|')'
newline|'\n'
comment|'# NOTE(vish): 10 from the beginning, 10 from the end, and'
nl|'\n'
comment|'# 1 for the gateway, 1 for the dhcp server,'
nl|'\n'
comment|'# 1 for the vpn server'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'23'
op|','
name|'db'
op|'.'
name|'network_count_reserved_ips'
op|'('
name|'context_admin'
op|','
nl|'\n'
name|'network'
op|'['
string|"'id'"
op|']'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_reserved_start_out_of_range
dedent|''
name|'def'
name|'test_validate_reserved_start_out_of_range'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'context_admin'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'AddressOutOfRange'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|','
nl|'\n'
name|'context_admin'
op|','
string|"'fake'"
op|','
string|"'192.168.0.0/24'"
op|','
name|'False'
op|','
nl|'\n'
number|'1'
op|','
number|'256'
op|','
name|'allowed_start'
op|'='
string|"'192.168.1.10'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_reserved_end_invalid
dedent|''
name|'def'
name|'test_validate_reserved_end_invalid'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'context_admin'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidAddress'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|','
nl|'\n'
name|'context_admin'
op|','
string|"'fake'"
op|','
string|"'192.168.0.0/24'"
op|','
name|'False'
op|','
nl|'\n'
number|'1'
op|','
number|'256'
op|','
name|'allowed_end'
op|'='
string|"'invalid'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_cidr_invalid
dedent|''
name|'def'
name|'test_validate_cidr_invalid'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'context_admin'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidCidr'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|','
nl|'\n'
name|'context_admin'
op|','
string|"'fake'"
op|','
string|"'invalid'"
op|','
name|'False'
op|','
nl|'\n'
number|'1'
op|','
number|'256'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_non_int_size
dedent|''
name|'def'
name|'test_validate_non_int_size'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'context_admin'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidIntValue'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|','
nl|'\n'
name|'context_admin'
op|','
string|"'fake'"
op|','
string|"'192.168.0.0/24'"
op|','
name|'False'
op|','
nl|'\n'
number|'1'
op|','
string|"'invalid'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_networks_none_requested_networks
dedent|''
name|'def'
name|'test_validate_networks_none_requested_networks'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'network'
op|'.'
name|'validate_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'None'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_networks_empty_requested_networks
dedent|''
name|'def'
name|'test_validate_networks_empty_requested_networks'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'requested_networks'
op|'='
op|'['
op|']'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'validate_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_networks_invalid_fixed_ip
dedent|''
name|'def'
name|'test_validate_networks_invalid_fixed_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'requested_networks'
op|'='
op|'['
op|'('
string|"'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'"
op|','
nl|'\n'
string|"'192.168.1.100.1'"
op|')'
op|','
nl|'\n'
op|'('
string|"'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'"
op|','
nl|'\n'
string|"'192.168.0.100.1'"
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'FixedIpInvalid'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'validate_networks'
op|','
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_networks_empty_fixed_ip
dedent|''
name|'def'
name|'test_validate_networks_empty_fixed_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'requested_networks'
op|'='
op|'['
op|'('
string|"'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'"
op|','
nl|'\n'
string|"''"
op|')'
op|','
nl|'\n'
op|'('
string|"'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'"
op|','
nl|'\n'
string|"''"
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'FixedIpInvalid'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'validate_networks'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_networks_none_fixed_ip
dedent|''
name|'def'
name|'test_validate_networks_none_fixed_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'requested_networks'
op|'='
op|'['
op|'('
string|"'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'"
op|','
nl|'\n'
name|'None'
op|')'
op|','
nl|'\n'
op|'('
string|"'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'"
op|','
nl|'\n'
name|'None'
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'validate_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIPList.get_by_instance_uuid'"
op|')'
newline|'\n'
DECL|member|test_get_instance_nw_info
name|'def'
name|'test_get_instance_nw_info'
op|'('
name|'self'
op|','
name|'get'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|function|make_ip
indent|' '
name|'def'
name|'make_ip'
op|'('
name|'index'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vif'
op|'='
name|'objects'
op|'.'
name|'VirtualInterface'
op|'('
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'vif1_uuid'
op|','
name|'address'
op|'='
name|'index'
op|')'
newline|'\n'
name|'network'
op|'='
name|'objects'
op|'.'
name|'Network'
op|'('
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'network_1'
op|','
nl|'\n'
name|'bridge'
op|'='
name|'index'
op|','
nl|'\n'
name|'label'
op|'='
name|'index'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'injected'
op|'='
name|'False'
op|','
nl|'\n'
name|'netmask'
op|'='
string|"'255.255.255.0'"
op|','
nl|'\n'
name|'dns1'
op|'='
name|'None'
op|','
nl|'\n'
name|'dns2'
op|'='
name|'None'
op|','
nl|'\n'
name|'cidr_v6'
op|'='
name|'None'
op|','
nl|'\n'
name|'gateway_v6'
op|'='
name|'None'
op|','
nl|'\n'
name|'broadcast_v6'
op|'='
name|'None'
op|','
nl|'\n'
name|'netmask_v6'
op|'='
name|'None'
op|','
nl|'\n'
name|'rxtx_base'
op|'='
name|'None'
op|','
nl|'\n'
name|'gateway'
op|'='
string|"'192.168.%s.1'"
op|'%'
name|'index'
op|','
nl|'\n'
name|'dhcp_server'
op|'='
string|"'192.168.%s.1'"
op|'%'
name|'index'
op|','
nl|'\n'
name|'broadcast'
op|'='
string|"'192.168.%s.255'"
op|'%'
name|'index'
op|','
nl|'\n'
name|'cidr'
op|'='
string|"'192.168.%s.0/24'"
op|'%'
name|'index'
op|')'
newline|'\n'
name|'return'
name|'objects'
op|'.'
name|'FixedIP'
op|'('
name|'virtual_interface'
op|'='
name|'vif'
op|','
nl|'\n'
name|'network'
op|'='
name|'network'
op|','
nl|'\n'
name|'floating_ips'
op|'='
name|'objects'
op|'.'
name|'FloatingIPList'
op|'('
op|')'
op|','
nl|'\n'
name|'address'
op|'='
string|"'192.168.%s.2'"
op|'%'
name|'index'
op|')'
newline|'\n'
dedent|''
name|'objs'
op|'='
op|'['
name|'make_ip'
op|'('
name|'index'
op|')'
name|'for'
name|'index'
name|'in'
op|'('
string|"'3'"
op|','
string|"'1'"
op|','
string|"'2'"
op|')'
op|']'
newline|'\n'
name|'get'
op|'.'
name|'return_value'
op|'='
name|'objects'
op|'.'
name|'FixedIPList'
op|'('
name|'objects'
op|'='
name|'objs'
op|')'
newline|'\n'
name|'nw_info'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_instance_nw_info'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'None'
op|','
nl|'\n'
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
name|'for'
name|'i'
op|','
name|'vif'
name|'in'
name|'enumerate'
op|'('
name|'nw_info'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'objs'
op|'['
name|'i'
op|']'
op|'.'
name|'network'
op|'.'
name|'bridge'
op|','
name|'vif'
op|'['
string|"'network'"
op|']'
op|'['
string|"'bridge'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'Network'
op|','
string|"'get_by_id'"
op|')'
newline|'\n'
DECL|member|test_add_fixed_ip_instance_using_id_without_vpn
name|'def'
name|'test_add_fixed_ip_instance_using_id_without_vpn'
op|'('
name|'self'
op|','
name|'get_by_id'
op|')'
op|':'
newline|'\n'
comment|'# Allocate a fixed ip from a network and assign it to an instance.'
nl|'\n'
comment|'# Network is given by network id.'
nl|'\n'
nl|'\n'
indent|' '
name|'network_id'
op|'='
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|"'id'"
op|']'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'network'
op|','
nl|'\n'
string|"'allocate_fixed_ip'"
op|')'
name|'as'
name|'allocate_fixed_ip'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'network'
op|'.'
name|'add_fixed_ip_to_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'FAKEUUID'
op|','
name|'HOST'
op|','
nl|'\n'
name|'network_id'
op|')'
newline|'\n'
nl|'\n'
comment|'# Assert that we fetched the network by id, not uuid'
nl|'\n'
dedent|''
name|'get_by_id'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'network_id'
op|','
name|'project_only'
op|'='
string|"'allow_none'"
op|')'
newline|'\n'
nl|'\n'
comment|'# Assert that we called allocate_fixed_ip for the given network and'
nl|'\n'
comment|'# instance. We should not have requested a specific address from the'
nl|'\n'
comment|'# network.'
nl|'\n'
name|'allocate_fixed_ip'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'FAKEUUID'
op|','
nl|'\n'
name|'get_by_id'
op|'.'
name|'return_value'
op|','
nl|'\n'
name|'address'
op|'='
name|'None'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'Network'
op|','
string|"'get_by_uuid'"
op|')'
newline|'\n'
DECL|member|test_add_fixed_ip_instance_using_uuid_without_vpn
name|'def'
name|'test_add_fixed_ip_instance_using_uuid_without_vpn'
op|'('
name|'self'
op|','
name|'get_by_uuid'
op|')'
op|':'
newline|'\n'
comment|'# Allocate a fixed ip from a network and assign it to an instance.'
nl|'\n'
comment|'# Network is given by network uuid.'
nl|'\n'
nl|'\n'
indent|' '
name|'network_uuid'
op|'='
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|"'uuid'"
op|']'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'network'
op|','
nl|'\n'
string|"'allocate_fixed_ip'"
op|')'
name|'as'
name|'allocate_fixed_ip'
op|','
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'context'
op|','
string|"'elevated'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'elevated'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'network'
op|'.'
name|'add_fixed_ip_to_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'FAKEUUID'
op|','
name|'HOST'
op|','
nl|'\n'
name|'network_uuid'
op|')'
newline|'\n'
nl|'\n'
comment|'# Assert that we fetched the network by uuid, not id, and with elevated'
nl|'\n'
comment|'# context'
nl|'\n'
dedent|''
name|'get_by_uuid'
op|'.'
name|'assert_called_once_with'
op|'('
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'elevated'
op|','
nl|'\n'
name|'network_uuid'
op|')'
newline|'\n'
nl|'\n'
comment|'# Assert that we called allocate_fixed_ip for the given network and'
nl|'\n'
comment|'# instance. We should not have requested a specific address from the'
nl|'\n'
comment|'# network.'
nl|'\n'
name|'allocate_fixed_ip'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'FAKEUUID'
op|','
nl|'\n'
name|'get_by_uuid'
op|'.'
name|'return_value'
op|','
nl|'\n'
name|'address'
op|'='
name|'None'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_mini_dns_driver
dedent|''
name|'def'
name|'test_mini_dns_driver'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'zone1'
op|'='
string|'"example.org"'
newline|'\n'
name|'zone2'
op|'='
string|'"example.com"'
newline|'\n'
name|'driver'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'instance_dns_manager'
newline|'\n'
name|'driver'
op|'.'
name|'create_entry'
op|'('
string|'"hostone"'
op|','
string|'"10.0.0.1"'
op|','
string|'"A"'
op|','
name|'zone1'
op|')'
newline|'\n'
name|'driver'
op|'.'
name|'create_entry'
op|'('
string|'"hosttwo"'
op|','
string|'"10.0.0.2"'
op|','
string|'"A"'
op|','
name|'zone1'
op|')'
newline|'\n'
name|'driver'
op|'.'
name|'create_entry'
op|'('
string|'"hostthree"'
op|','
string|'"10.0.0.3"'
op|','
string|'"A"'
op|','
name|'zone1'
op|')'
newline|'\n'
name|'driver'
op|'.'
name|'create_entry'
op|'('
string|'"hostfour"'
op|','
string|'"10.0.0.4"'
op|','
string|'"A"'
op|','
name|'zone1'
op|')'
newline|'\n'
name|'driver'
op|'.'
name|'create_entry'
op|'('
string|'"hostfive"'
op|','
string|'"10.0.0.5"'
op|','
string|'"A"'
op|','
name|'zone2'
op|')'
newline|'\n'
nl|'\n'
name|'driver'
op|'.'
name|'delete_entry'
op|'('
string|'"hostone"'
op|','
name|'zone1'
op|')'
newline|'\n'
name|'driver'
op|'.'
name|'modify_address'
op|'('
string|'"hostfour"'
op|','
string|'"10.0.0.1"'
op|','
name|'zone1'
op|')'
newline|'\n'
name|'driver'
op|'.'
name|'modify_address'
op|'('
string|'"hostthree"'
op|','
string|'"10.0.0.1"'
op|','
name|'zone1'
op|')'
newline|'\n'
name|'names'
op|'='
name|'driver'
op|'.'
name|'get_entries_by_address'
op|'('
string|'"10.0.0.1"'
op|','
name|'zone1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'len'
op|'('
name|'names'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'hostthree'"
op|','
name|'names'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'hostfour'"
op|','
name|'names'
op|')'
newline|'\n'
nl|'\n'
name|'names'
op|'='
name|'driver'
op|'.'
name|'get_entries_by_address'
op|'('
string|'"10.0.0.5"'
op|','
name|'zone2'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'names'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'hostfive'"
op|','
name|'names'
op|')'
newline|'\n'
nl|'\n'
name|'addresses'
op|'='
name|'driver'
op|'.'
name|'get_entries_by_name'
op|'('
string|'"hosttwo"'
op|','
name|'zone1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'addresses'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'10.0.0.2'"
op|','
name|'addresses'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'driver'
op|'.'
name|'create_entry'
op|','
nl|'\n'
string|'"hostname"'
op|','
nl|'\n'
string|'"10.10.10.10"'
op|','
nl|'\n'
string|'"invalidtype"'
op|','
nl|'\n'
name|'zone1'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_mini_dns_driver_with_mixed_case
dedent|''
name|'def'
name|'test_mini_dns_driver_with_mixed_case'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'zone1'
op|'='
string|'"example.org"'
newline|'\n'
name|'driver'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'instance_dns_manager'
newline|'\n'
name|'driver'
op|'.'
name|'create_entry'
op|'('
string|'"HostTen"'
op|','
string|'"10.0.0.10"'
op|','
string|'"A"'
op|','
name|'zone1'
op|')'
newline|'\n'
name|'addresses'
op|'='
name|'driver'
op|'.'
name|'get_entries_by_address'
op|'('
string|'"10.0.0.10"'
op|','
name|'zone1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'addresses'
op|')'
op|')'
newline|'\n'
name|'for'
name|'n'
name|'in'
name|'addresses'
op|':'
newline|'\n'
indent|' '
name|'driver'
op|'.'
name|'delete_entry'
op|'('
name|'n'
op|','
name|'zone1'
op|')'
newline|'\n'
dedent|''
name|'addresses'
op|'='
name|'driver'
op|'.'
name|'get_entries_by_address'
op|'('
string|'"10.0.0.10"'
op|','
name|'zone1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'0'
op|','
name|'len'
op|'('
name|'addresses'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_allocate_fixed_ip_instance_dns
dedent|''
name|'def'
name|'test_allocate_fixed_ip_instance_dns'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Test DNS entries are created when allocating a fixed IP.'
nl|'\n'
comment|'# Allocate a fixed IP to an instance. Ensure that dns entries have been'
nl|'\n'
comment|"# created for the instance's name and uuid."
nl|'\n'
nl|'\n'
indent|' '
name|'network'
op|'='
name|'network_obj'
op|'.'
name|'Network'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'network_obj'
op|'.'
name|'Network'
op|'('
op|')'
op|','
name|'test_network'
op|'.'
name|'fake_network'
op|')'
newline|'\n'
name|'network'
op|'.'
name|'save'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
nl|'\n'
comment|'# Create a minimal instance object'
nl|'\n'
name|'instance_params'
op|'='
op|'{'
nl|'\n'
string|"'display_name'"
op|':'
name|'HOST'
op|','
nl|'\n'
string|"'security_groups'"
op|':'
op|'['
op|']'
nl|'\n'
op|'}'
newline|'\n'
name|'instance'
op|'='
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
nl|'\n'
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'ignore'"
op|','
string|"'ignore'"
op|')'
op|','
nl|'\n'
name|'expected_attrs'
op|'='
name|'instance_params'
op|'.'
name|'keys'
op|'('
op|')'
op|','
op|'**'
name|'instance_params'
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'save'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
nl|'\n'
comment|"# We don't specify a specific address, so we should get a FixedIP"
nl|'\n'
comment|'# automatically allocated from the pool. Fix its value here.'
nl|'\n'
name|'fip'
op|'='
name|'objects'
op|'.'
name|'FixedIP'
op|'('
name|'address'
op|'='
string|"'192.168.0.101'"
op|')'
newline|'\n'
name|'fip'
op|'.'
name|'save'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'Instance'
op|','
string|"'get_by_uuid'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'instance'
op|')'
op|','
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'FixedIP'
op|','
string|"'associate_pool'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'fip'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_fixed_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'FAKEUUID'
op|','
name|'network'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'instance_manager'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'instance_dns_manager'
newline|'\n'
name|'expected_addresses'
op|'='
op|'['
string|"'192.168.0.101'"
op|']'
newline|'\n'
nl|'\n'
comment|'# Assert that we have a correct entry by instance display name'
nl|'\n'
name|'addresses'
op|'='
name|'instance_manager'
op|'.'
name|'get_entries_by_name'
op|'('
name|'HOST'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'instance_dns_domain'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_addresses'
op|','
name|'addresses'
op|')'
newline|'\n'
nl|'\n'
comment|'# Assert that we have a correct entry by instance uuid'
nl|'\n'
name|'addresses'
op|'='
name|'instance_manager'
op|'.'
name|'get_entries_by_name'
op|'('
name|'FAKEUUID'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'instance_dns_domain'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_addresses'
op|','
name|'addresses'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_allocate_floating_ip
dedent|''
name|'def'
name|'test_allocate_floating_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_floating_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
number|'1'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_deallocate_floating_ip
dedent|''
name|'def'
name|'test_deallocate_floating_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_floating_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
number|'1'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_associate_floating_ip
dedent|''
name|'def'
name|'test_associate_floating_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'associate_floating_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'None'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_disassociate_floating_ip
dedent|''
name|'def'
name|'test_disassociate_floating_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'disassociate_floating_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'None'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_networks_by_uuids_ordering
dedent|''
name|'def'
name|'test_get_networks_by_uuids_ordering'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'network_get_all_by_uuids'"
op|')'
newline|'\n'
nl|'\n'
name|'requested_networks'
op|'='
op|'['
string|"'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'"
op|','
nl|'\n'
string|"'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'"
op|']'
newline|'\n'
name|'db'
op|'.'
name|'network_get_all_by_uuids'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
op|'.'
name|'AndReturn'
op|'('
nl|'\n'
op|'['
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
op|'**'
name|'net'
op|')'
nl|'\n'
name|'for'
name|'net'
name|'in'
name|'networks'
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'res'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'_get_networks_by_uuids'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'res'
op|'['
number|'0'
op|']'
op|'['
string|"'id'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'0'
op|','
name|'res'
op|'['
number|'1'
op|']'
op|'['
string|"'id'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.instance.Instance.get_by_uuid'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.quotas.Quotas.reserve'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.quotas.ids_from_instance'"
op|')'
newline|'\n'
DECL|member|test_allocate_calculates_quota_auth
name|'def'
name|'test_allocate_calculates_quota_auth'
op|'('
name|'self'
op|','
name|'util_method'
op|','
name|'reserve'
op|','
nl|'\n'
name|'get_by_uuid'
op|')'
op|':'
newline|'\n'
indent|' '
name|'inst'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
op|')'
newline|'\n'
name|'inst'
op|'['
string|"'uuid'"
op|']'
op|'='
name|'uuids'
op|'.'
name|'instance'
newline|'\n'
name|'get_by_uuid'
op|'.'
name|'return_value'
op|'='
name|'inst'
newline|'\n'
name|'usages'
op|'='
op|'{'
string|"'fixed_ips'"
op|':'
op|'{'
string|"'in_use'"
op|':'
number|'10'
op|','
string|"'reserved'"
op|':'
number|'1'
op|'}'
op|'}'
newline|'\n'
name|'reserve'
op|'.'
name|'side_effect'
op|'='
name|'exception'
op|'.'
name|'OverQuota'
op|'('
name|'overs'
op|'='
string|"'testing'"
op|','
nl|'\n'
name|'quotas'
op|'='
op|'{'
string|"'fixed_ips'"
op|':'
number|'10'
op|'}'
op|','
nl|'\n'
name|'usages'
op|'='
name|'usages'
op|')'
newline|'\n'
name|'util_method'
op|'.'
name|'return_value'
op|'='
op|'('
string|"'foo'"
op|','
string|"'bar'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'FixedIpLimitExceeded'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_fixed_ip'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
number|'123'
op|','
op|'{'
string|"'uuid'"
op|':'
name|'uuids'
op|'.'
name|'instance'
op|'}'
op|')'
newline|'\n'
name|'util_method'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'inst'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP.get_by_address'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.quotas.Quotas.reserve'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.quotas.ids_from_instance'"
op|')'
newline|'\n'
DECL|member|test_deallocate_calculates_quota_auth
name|'def'
name|'test_deallocate_calculates_quota_auth'
op|'('
name|'self'
op|','
name|'util_method'
op|','
name|'reserve'
op|','
nl|'\n'
name|'get_by_address'
op|')'
op|':'
newline|'\n'
indent|' '
name|'inst'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|')'
newline|'\n'
name|'fip'
op|'='
name|'objects'
op|'.'
name|'FixedIP'
op|'('
name|'instance_uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'virtual_interface_id'
op|'='
number|'1'
op|')'
newline|'\n'
name|'get_by_address'
op|'.'
name|'return_value'
op|'='
name|'fip'
newline|'\n'
name|'util_method'
op|'.'
name|'return_value'
op|'='
op|'('
string|"'foo'"
op|','
string|"'bar'"
op|')'
newline|'\n'
comment|'# This will fail right after the reserve call when it tries'
nl|'\n'
comment|'# to look up the fake instance we created above'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InstanceNotFound'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_fixed_ip'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
string|"'1.2.3.4'"
op|','
name|'instance'
op|'='
name|'inst'
op|')'
newline|'\n'
name|'util_method'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'inst'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.instance.Instance.get_by_uuid'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP.associate'"
op|')'
newline|'\n'
DECL|member|test_allocate_fixed_ip_passes_string_address
name|'def'
name|'test_allocate_fixed_ip_passes_string_address'
op|'('
name|'self'
op|','
name|'mock_associate'
op|','
nl|'\n'
name|'mock_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_associate'
op|'.'
name|'side_effect'
op|'='
name|'test'
op|'.'
name|'TestingException'
newline|'\n'
name|'instance'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'create'
op|'('
op|')'
newline|'\n'
name|'mock_get'
op|'.'
name|'return_value'
op|'='
name|'instance'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'test'
op|'.'
name|'TestingException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_fixed_ip'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
op|'{'
string|"'cidr'"
op|':'
string|"'24'"
op|','
string|"'id'"
op|':'
number|'1'
op|','
string|"'uuid'"
op|':'
name|'uuids'
op|'.'
name|'instance'
op|'}'
op|','
nl|'\n'
name|'address'
op|'='
name|'netaddr'
op|'.'
name|'IPAddress'
op|'('
string|"'1.2.3.4'"
op|')'
op|')'
newline|'\n'
name|'mock_associate'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
string|"'1.2.3.4'"
op|','
nl|'\n'
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
number|'1'
op|','
nl|'\n'
name|'vif_id'
op|'='
number|'1'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.instance.Instance.get_by_uuid'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.virtual_interface.VirtualInterface'"
nl|'\n'
string|"'.get_by_instance_and_network'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP.disassociate'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP.associate'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP.save'"
op|')'
newline|'\n'
DECL|member|test_allocate_fixed_ip_cleanup
name|'def'
name|'test_allocate_fixed_ip_cleanup'
op|'('
name|'self'
op|','
nl|'\n'
name|'mock_fixedip_save'
op|','
nl|'\n'
name|'mock_fixedip_associate'
op|','
nl|'\n'
name|'mock_fixedip_disassociate'
op|','
nl|'\n'
name|'mock_vif_get'
op|','
nl|'\n'
name|'mock_instance_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'address'
op|'='
name|'netaddr'
op|'.'
name|'IPAddress'
op|'('
string|"'1.2.3.4'"
op|')'
newline|'\n'
nl|'\n'
name|'fip'
op|'='
name|'objects'
op|'.'
name|'FixedIP'
op|'('
name|'instance_uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'address'
op|'='
name|'address'
op|','
nl|'\n'
name|'virtual_interface_id'
op|'='
number|'1'
op|')'
newline|'\n'
name|'mock_fixedip_associate'
op|'.'
name|'return_value'
op|'='
name|'fip'
newline|'\n'
nl|'\n'
name|'instance'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'create'
op|'('
op|')'
newline|'\n'
name|'mock_instance_get'
op|'.'
name|'return_value'
op|'='
name|'instance'
newline|'\n'
nl|'\n'
name|'mock_vif_get'
op|'.'
name|'return_value'
op|'='
name|'vif_obj'
op|'.'
name|'VirtualInterface'
op|'('
nl|'\n'
name|'instance_uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
name|'id'
op|'='
number|'1'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_setup_network_on_host'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'instance_dns_manager'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'network'
op|','
nl|'\n'
string|"'_do_trigger_security_group_members_refresh_for_instance'"
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'mock_setup_network'
op|','
name|'mock_dns_manager'
op|','
name|'mock_ignored'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_setup_network'
op|'.'
name|'side_effect'
op|'='
name|'test'
op|'.'
name|'TestingException'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'test'
op|'.'
name|'TestingException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_fixed_ip'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
op|'{'
string|"'cidr'"
op|':'
string|"'24'"
op|','
string|"'id'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'uuid'"
op|':'
name|'uuids'
op|'.'
name|'instance'
op|'}'
op|','
nl|'\n'
name|'address'
op|'='
name|'address'
op|')'
newline|'\n'
nl|'\n'
name|'mock_dns_manager'
op|'.'
name|'delete_entry'
op|'.'
name|'assert_has_calls'
op|'('
op|'['
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'instance'
op|'.'
name|'display_name'
op|','
string|"''"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'instance'
op|'.'
name|'uuid'
op|','
string|"''"
op|')'
nl|'\n'
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'mock_fixedip_disassociate'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.instance.Instance.get_by_uuid'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.virtual_interface.VirtualInterface'"
nl|'\n'
string|"'.get_by_instance_and_network'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP.disassociate'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP.associate_pool'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.network.manager.NetworkManager._add_virtual_interface'"
op|')'
newline|'\n'
DECL|member|test_allocate_fixed_ip_create_new_vifs
name|'def'
name|'test_allocate_fixed_ip_create_new_vifs'
op|'('
name|'self'
op|','
nl|'\n'
name|'mock_add'
op|','
nl|'\n'
name|'mock_fixedip_associate'
op|','
nl|'\n'
name|'mock_fixedip_disassociate'
op|','
nl|'\n'
name|'mock_vif_get'
op|','
nl|'\n'
name|'mock_instance_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'address'
op|'='
name|'netaddr'
op|'.'
name|'IPAddress'
op|'('
string|"'1.2.3.4'"
op|')'
newline|'\n'
nl|'\n'
name|'fip'
op|'='
name|'objects'
op|'.'
name|'FixedIP'
op|'('
name|'instance_uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'address'
op|'='
name|'address'
op|','
nl|'\n'
name|'virtual_interface_id'
op|'='
number|'1000'
op|')'
newline|'\n'
name|'net'
op|'='
op|'{'
string|"'cidr'"
op|':'
string|"'24'"
op|','
string|"'id'"
op|':'
number|'1'
op|','
string|"'uuid'"
op|':'
name|'uuids'
op|'.'
name|'instance'
op|'}'
newline|'\n'
name|'instance'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'create'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'vif'
op|'='
name|'objects'
op|'.'
name|'VirtualInterface'
op|'('
name|'context'
op|','
nl|'\n'
name|'id'
op|'='
number|'1000'
op|','
nl|'\n'
name|'address'
op|'='
string|"'00:00:00:00:00:00'"
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
name|'network_id'
op|'='
name|'net'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|')'
newline|'\n'
name|'mock_fixedip_associate'
op|'.'
name|'return_value'
op|'='
name|'fip'
newline|'\n'
name|'mock_add'
op|'.'
name|'return_value'
op|'='
name|'vif'
newline|'\n'
name|'mock_instance_get'
op|'.'
name|'return_value'
op|'='
name|'instance'
newline|'\n'
name|'mock_vif_get'
op|'.'
name|'return_value'
op|'='
name|'None'
newline|'\n'
nl|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_setup_network_on_host'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'instance_dns_manager'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'network'
op|','
nl|'\n'
string|"'_do_trigger_security_group_members_refresh_for_instance'"
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'mock_setup_network'
op|','
name|'mock_dns_manager'
op|','
name|'mock_ignored'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_fixed_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|'['
string|"'uuid'"
op|']'
op|','
nl|'\n'
name|'net'
op|')'
newline|'\n'
name|'mock_add'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|'['
string|"'uuid'"
op|']'
op|','
nl|'\n'
name|'net'
op|'['
string|"'id'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'fip'
op|'.'
name|'virtual_interface_id'
op|','
name|'vif'
op|'.'
name|'id'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.instance.Instance.get_by_uuid'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'db'
op|','
string|"'virtual_interface_get_by_instance_and_network'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'None'
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP'"
op|')'
newline|'\n'
DECL|member|test_allocate_fixed_ip_add_vif_fails
name|'def'
name|'test_allocate_fixed_ip_add_vif_fails'
op|'('
name|'self'
op|','
name|'mock_fixedip'
op|','
nl|'\n'
name|'mock_get_vif'
op|','
name|'mock_instance_get'
op|')'
op|':'
newline|'\n'
comment|"# Tests that we don't try to do anything with fixed IPs if"
nl|'\n'
comment|'# _add_virtual_interface fails.'
nl|'\n'
indent|' '
name|'instance'
op|'='
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'mock_instance_get'
op|'.'
name|'return_value'
op|'='
name|'instance'
newline|'\n'
name|'network'
op|'='
op|'{'
string|"'cidr'"
op|':'
string|"'24'"
op|','
string|"'id'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'uuid'"
op|':'
string|"'398399b3-f696-4859-8695-a6560e14cb02'"
op|'}'
newline|'\n'
name|'vif_error'
op|'='
name|'exception'
op|'.'
name|'VirtualInterfaceMacAddressException'
op|'('
op|')'
newline|'\n'
comment|"# mock out quotas because we don't care in this test"
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'quotas_cls'"
op|','
name|'objects'
op|'.'
name|'QuotasNoOp'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_add_virtual_interface'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'vif_error'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
nl|'\n'
name|'exception'
op|'.'
name|'VirtualInterfaceMacAddressException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_fixed_ip'
op|','
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
string|"'9d2ee1e3-ffad-4e5f-81ff-c96dd97b0ee0'"
op|','
name|'network'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'mock_fixedip'
op|'.'
name|'called'
op|','
name|'str'
op|'('
name|'mock_fixedip'
op|'.'
name|'mock_calls'
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FlatDHCPNetworkTestCase
dedent|''
dedent|''
name|'class'
name|'FlatDHCPNetworkTestCase'
op|'('
name|'test'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|variable|REQUIRES_LOCKING
indent|' '
name|'REQUIRES_LOCKING'
op|'='
name|'True'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'FlatDHCPNetworkTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'test'
op|'.'
name|'SampleNetworks'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'use_local'
op|'='
name|'True'
op|','
name|'group'
op|'='
string|"'conductor'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'='
name|'network_manager'
op|'.'
name|'FlatDHCPManager'
op|'('
name|'host'
op|'='
name|'HOST'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|'='
name|'db'
newline|'\n'
name|'self'
op|'.'
name|'context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'context_admin'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP.get_by_id'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.floating_ip.FloatingIPList.get_by_host'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.network.linux_net.iptables_manager._apply'"
op|')'
newline|'\n'
DECL|member|test_init_host_iptables_defer_apply
name|'def'
name|'test_init_host_iptables_defer_apply'
op|'('
name|'self'
op|','
name|'iptable_apply'
op|','
nl|'\n'
name|'floating_get_by_host'
op|','
nl|'\n'
name|'fixed_get_by_id'
op|')'
op|':'
newline|'\n'
DECL|function|get_by_id
indent|' '
name|'def'
name|'get_by_id'
op|'('
name|'context'
op|','
name|'fixed_ip_id'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'net'
op|'='
name|'objects'
op|'.'
name|'Network'
op|'('
name|'bridge'
op|'='
string|"'testbridge'"
op|','
nl|'\n'
name|'cidr'
op|'='
string|"'192.168.1.0/24'"
op|')'
newline|'\n'
name|'if'
name|'fixed_ip_id'
op|'=='
number|'1'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'objects'
op|'.'
name|'FixedIP'
op|'('
name|'address'
op|'='
string|"'192.168.1.4'"
op|','
nl|'\n'
name|'network'
op|'='
name|'net'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'fixed_ip_id'
op|'=='
number|'2'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'objects'
op|'.'
name|'FixedIP'
op|'('
name|'address'
op|'='
string|"'192.168.1.5'"
op|','
nl|'\n'
name|'network'
op|'='
name|'net'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_apply
dedent|''
dedent|''
name|'def'
name|'fake_apply'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_apply'
op|'.'
name|'count'
op|'+='
number|'1'
newline|'\n'
nl|'\n'
dedent|''
name|'fake_apply'
op|'.'
name|'count'
op|'='
number|'0'
newline|'\n'
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
name|'float1'
op|'='
name|'objects'
op|'.'
name|'FloatingIP'
op|'('
name|'address'
op|'='
string|"'1.2.3.4'"
op|','
name|'fixed_ip_id'
op|'='
number|'1'
op|')'
newline|'\n'
name|'float2'
op|'='
name|'objects'
op|'.'
name|'FloatingIP'
op|'('
name|'address'
op|'='
string|"'1.2.3.5'"
op|','
name|'fixed_ip_id'
op|'='
number|'2'
op|')'
newline|'\n'
name|'float1'
op|'.'
name|'_context'
op|'='
name|'ctxt'
newline|'\n'
name|'float2'
op|'.'
name|'_context'
op|'='
name|'ctxt'
newline|'\n'
nl|'\n'
name|'iptable_apply'
op|'.'
name|'side_effect'
op|'='
name|'fake_apply'
newline|'\n'
name|'floating_get_by_host'
op|'.'
name|'return_value'
op|'='
op|'['
name|'float1'
op|','
name|'float2'
op|']'
newline|'\n'
name|'fixed_get_by_id'
op|'.'
name|'side_effect'
op|'='
name|'get_by_id'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'init_host'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'fake_apply'
op|'.'
name|'count'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VlanNetworkTestCase
dedent|''
dedent|''
name|'class'
name|'VlanNetworkTestCase'
op|'('
name|'test'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|variable|REQUIRES_LOCKING
indent|' '
name|'REQUIRES_LOCKING'
op|'='
name|'True'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'VlanNetworkTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'test'
op|'.'
name|'SampleNetworks'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'='
name|'network_manager'
op|'.'
name|'VlanManager'
op|'('
name|'host'
op|'='
name|'HOST'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|'='
name|'db'
newline|'\n'
name|'self'
op|'.'
name|'context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'context_admin'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_quota_driver_type
dedent|''
name|'def'
name|'test_quota_driver_type'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'objects'
op|'.'
name|'QuotasNoOp'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'quotas_cls'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_vpn_allocate_fixed_ip
dedent|''
name|'def'
name|'test_vpn_allocate_fixed_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'fixed_ip_associate'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'fixed_ip_update'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
nl|'\n'
string|"'virtual_interface_get_by_instance_and_network'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'instance_get_by_uuid'"
op|')'
newline|'\n'
nl|'\n'
name|'fixed'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'192.168.0.1'"
op|')'
newline|'\n'
name|'db'
op|'.'
name|'fixed_ip_associate'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'network_id'
op|'='
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'reserved'
op|'='
name|'True'
op|','
nl|'\n'
name|'virtual_interface_id'
op|'='
name|'vifs'
op|'['
number|'0'
op|']'
op|'['
string|"'id'"
op|']'
nl|'\n'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'fixed'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'virtual_interface_get_by_instance_and_network'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'vifs'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'instance_get_by_uuid'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'columns_to_join'
op|'='
op|'['
string|"'info_cache'"
op|','
nl|'\n'
string|"'security_groups'"
op|']'
nl|'\n'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'fake_inst'
op|'('
name|'display_name'
op|'='
name|'HOST'
op|','
nl|'\n'
name|'uuid'
op|'='
name|'FAKEUUID'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'network'
op|'='
name|'objects'
op|'.'
name|'Network'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'Network'
op|'('
op|')'
op|','
nl|'\n'
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
op|'**'
name|'networks'
op|'['
number|'0'
op|']'
op|')'
op|')'
newline|'\n'
name|'network'
op|'.'
name|'vpn_private_address'
op|'='
string|"'192.168.0.2'"
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_fixed_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'FAKEUUID'
op|','
name|'network'
op|','
nl|'\n'
name|'vpn'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_allocate_fixed_ip
dedent|''
name|'def'
name|'test_allocate_fixed_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
nl|'\n'
string|"'_do_trigger_security_group_members_refresh_for_instance'"
op|','
nl|'\n'
name|'lambda'
op|'*'
name|'a'
op|','
op|'**'
name|'kw'
op|':'
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'fixed_ip_associate_pool'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
nl|'\n'
string|"'virtual_interface_get_by_instance_and_network'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'instance_get_by_uuid'"
op|')'
newline|'\n'
nl|'\n'
name|'fixed'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'192.168.0.1'"
op|')'
newline|'\n'
name|'db'
op|'.'
name|'fixed_ip_associate_pool'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'host'
op|'='
name|'None'
op|','
nl|'\n'
name|'virtual_interface_id'
op|'='
name|'vifs'
op|'['
number|'0'
op|']'
op|'['
string|"'id'"
op|']'
nl|'\n'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'fixed'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'virtual_interface_get_by_instance_and_network'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'vifs'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'instance_get_by_uuid'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'columns_to_join'
op|'='
op|'['
string|"'info_cache'"
op|','
nl|'\n'
string|"'security_groups'"
op|']'
nl|'\n'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'fake_inst'
op|'('
name|'display_name'
op|'='
name|'HOST'
op|','
nl|'\n'
name|'uuid'
op|'='
name|'FAKEUUID'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'network'
op|'='
name|'objects'
op|'.'
name|'Network'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'Network'
op|'('
op|')'
op|','
nl|'\n'
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
op|'**'
name|'networks'
op|'['
number|'0'
op|']'
op|')'
op|')'
newline|'\n'
name|'network'
op|'.'
name|'vpn_private_address'
op|'='
string|"'192.168.0.2'"
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_fixed_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'FAKEUUID'
op|','
name|'network'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.network.manager.VlanManager._setup_network_on_host'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.network.manager.VlanManager.'"
nl|'\n'
string|"'_validate_instance_zone_for_dns_domain'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.network.manager.VlanManager.'"
nl|'\n'
string|"'_do_trigger_security_group_members_refresh_for_instance'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.network.manager.VlanManager._add_virtual_interface'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.instance.Instance.get_by_uuid'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP.associate'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.VirtualInterface.get_by_instance_and_network'"
op|')'
newline|'\n'
DECL|member|test_allocate_fixed_ip_return_none
name|'def'
name|'test_allocate_fixed_ip_return_none'
op|'('
name|'self'
op|','
name|'mock_get'
op|','
nl|'\n'
name|'mock_associate'
op|','
name|'mock_get_uuid'
op|','
name|'mock_add'
op|','
name|'mock_trigger'
op|','
nl|'\n'
name|'mock_validate'
op|','
name|'mock_setup'
op|')'
op|':'
newline|'\n'
indent|' '
name|'net'
op|'='
op|'{'
string|"'cidr'"
op|':'
string|"'24'"
op|','
string|"'id'"
op|':'
number|'1'
op|','
string|"'uuid'"
op|':'
name|'uuids'
op|'.'
name|'instance'
op|'}'
newline|'\n'
name|'fip'
op|'='
name|'objects'
op|'.'
name|'FixedIP'
op|'('
name|'instance_uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'address'
op|'='
name|'netaddr'
op|'.'
name|'IPAddress'
op|'('
string|"'1.2.3.4'"
op|')'
op|','
nl|'\n'
name|'virtual_interface_id'
op|'='
number|'1'
op|')'
newline|'\n'
nl|'\n'
name|'instance'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'create'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'vif'
op|'='
name|'objects'
op|'.'
name|'VirtualInterface'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'id'
op|'='
number|'1000'
op|','
nl|'\n'
name|'address'
op|'='
string|"'00:00:00:00:00:00'"
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
name|'network_id'
op|'='
name|'net'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|')'
newline|'\n'
name|'mock_associate'
op|'.'
name|'return_value'
op|'='
name|'fip'
newline|'\n'
name|'mock_add'
op|'.'
name|'return_value'
op|'='
name|'vif'
newline|'\n'
name|'mock_get'
op|'.'
name|'return_value'
op|'='
name|'None'
newline|'\n'
name|'mock_get_uuid'
op|'.'
name|'return_value'
op|'='
name|'instance'
newline|'\n'
name|'mock_validate'
op|'.'
name|'return_value'
op|'='
name|'False'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_fixed_ip'
op|'('
name|'self'
op|'.'
name|'context_admin'
op|','
name|'instance'
op|'.'
name|'uuid'
op|','
name|'net'
op|')'
newline|'\n'
nl|'\n'
name|'mock_add'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context_admin'
op|','
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
name|'net'
op|'['
string|"'id'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.instance.Instance.get_by_uuid'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP.associate'"
op|')'
newline|'\n'
DECL|member|test_allocate_fixed_ip_passes_string_address
name|'def'
name|'test_allocate_fixed_ip_passes_string_address'
op|'('
name|'self'
op|','
name|'mock_associate'
op|','
nl|'\n'
name|'mock_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_associate'
op|'.'
name|'side_effect'
op|'='
name|'test'
op|'.'
name|'TestingException'
newline|'\n'
name|'instance'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'create'
op|'('
op|')'
newline|'\n'
name|'mock_get'
op|'.'
name|'return_value'
op|'='
name|'instance'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'test'
op|'.'
name|'TestingException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_fixed_ip'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
op|'{'
string|"'cidr'"
op|':'
string|"'24'"
op|','
string|"'id'"
op|':'
number|'1'
op|','
string|"'uuid'"
op|':'
name|'uuids'
op|'.'
name|'instance'
op|'}'
op|','
nl|'\n'
name|'address'
op|'='
name|'netaddr'
op|'.'
name|'IPAddress'
op|'('
string|"'1.2.3.4'"
op|')'
op|')'
newline|'\n'
name|'mock_associate'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
string|"'1.2.3.4'"
op|','
nl|'\n'
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
number|'1'
op|','
nl|'\n'
name|'vif_id'
op|'='
number|'1'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.instance.Instance.get_by_uuid'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP.associate'"
op|')'
newline|'\n'
DECL|member|test_allocate_fixed_ip_passes_string_address_vpn
name|'def'
name|'test_allocate_fixed_ip_passes_string_address_vpn'
op|'('
name|'self'
op|','
name|'mock_associate'
op|','
nl|'\n'
name|'mock_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_associate'
op|'.'
name|'side_effect'
op|'='
name|'test'
op|'.'
name|'TestingException'
newline|'\n'
name|'instance'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'create'
op|'('
op|')'
newline|'\n'
name|'mock_get'
op|'.'
name|'return_value'
op|'='
name|'instance'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'test'
op|'.'
name|'TestingException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_fixed_ip'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
op|'{'
string|"'cidr'"
op|':'
string|"'24'"
op|','
string|"'id'"
op|':'
number|'1'
op|','
string|"'uuid'"
op|':'
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
string|"'vpn_private_address'"
op|':'
name|'netaddr'
op|'.'
name|'IPAddress'
op|'('
string|"'1.2.3.4'"
op|')'
nl|'\n'
op|'}'
op|','
name|'vpn'
op|'='
number|'1'
op|')'
newline|'\n'
name|'mock_associate'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
string|"'1.2.3.4'"
op|','
nl|'\n'
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
number|'1'
op|','
name|'reserved'
op|'='
name|'True'
op|','
nl|'\n'
name|'vif_id'
op|'='
number|'1'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'db'
op|','
string|"'virtual_interface_get_by_instance_and_network'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'None'
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP'"
op|')'
newline|'\n'
DECL|member|test_allocate_fixed_ip_add_vif_fails
name|'def'
name|'test_allocate_fixed_ip_add_vif_fails'
op|'('
name|'self'
op|','
name|'mock_fixedip'
op|','
nl|'\n'
name|'mock_get_vif'
op|')'
op|':'
newline|'\n'
comment|"# Tests that we don't try to do anything with fixed IPs if"
nl|'\n'
comment|'# _add_virtual_interface fails.'
nl|'\n'
indent|' '
name|'vif_error'
op|'='
name|'exception'
op|'.'
name|'VirtualInterfaceMacAddressException'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_add_virtual_interface'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'vif_error'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'VirtualInterfaceMacAddressException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_fixed_ip'
op|','
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
string|"'9d2ee1e3-ffad-4e5f-81ff-c96dd97b0ee0'"
op|','
nl|'\n'
name|'networks'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'mock_fixedip'
op|'.'
name|'called'
op|','
name|'str'
op|'('
name|'mock_fixedip'
op|'.'
name|'mock_calls'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_create_networks_too_big
dedent|''
name|'def'
name|'test_create_networks_too_big'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'ValueError'
op|','
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|','
name|'None'
op|','
nl|'\n'
name|'num_networks'
op|'='
number|'4094'
op|','
name|'vlan_start'
op|'='
number|'1'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_create_networks_too_many
dedent|''
name|'def'
name|'test_create_networks_too_many'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'ValueError'
op|','
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|','
name|'None'
op|','
nl|'\n'
name|'num_networks'
op|'='
number|'100'
op|','
name|'vlan_start'
op|'='
number|'1'
op|','
nl|'\n'
name|'cidr'
op|'='
string|"'192.168.0.1/24'"
op|','
name|'network_size'
op|'='
number|'100'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_duplicate_vlan_raises
dedent|''
name|'def'
name|'test_duplicate_vlan_raises'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# VLAN 100 is already used and we force the network to be created'
nl|'\n'
comment|'# in that vlan (vlan=100).'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'DuplicateVlan'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context_admin'
op|','
name|'label'
op|'='
string|'"fake"'
op|','
name|'num_networks'
op|'='
number|'1'
op|','
nl|'\n'
name|'vlan'
op|'='
number|'100'
op|','
name|'cidr'
op|'='
string|"'192.168.0.1/24'"
op|','
name|'network_size'
op|'='
number|'100'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_vlan_start
dedent|''
name|'def'
name|'test_vlan_start'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# VLAN 100 and 101 are used, so this network shoud be created in 102'
nl|'\n'
indent|' '
name|'networks'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context_admin'
op|','
name|'label'
op|'='
string|'"fake"'
op|','
name|'num_networks'
op|'='
number|'1'
op|','
nl|'\n'
name|'vlan_start'
op|'='
number|'100'
op|','
name|'cidr'
op|'='
string|"'192.168.3.1/24'"
op|','
nl|'\n'
name|'network_size'
op|'='
number|'100'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'102'
op|','
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|'"vlan"'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_vlan_start_multiple
dedent|''
name|'def'
name|'test_vlan_start_multiple'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# VLAN 100 and 101 are used, so these networks shoud be created in 102'
nl|'\n'
comment|'# and 103'
nl|'\n'
indent|' '
name|'networks'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context_admin'
op|','
name|'label'
op|'='
string|'"fake"'
op|','
name|'num_networks'
op|'='
number|'2'
op|','
nl|'\n'
name|'vlan_start'
op|'='
number|'100'
op|','
name|'cidr'
op|'='
string|"'192.168.3.1/24'"
op|','
nl|'\n'
name|'network_size'
op|'='
number|'100'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'102'
op|','
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|'"vlan"'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'103'
op|','
name|'networks'
op|'['
number|'1'
op|']'
op|'['
string|'"vlan"'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_vlan_start_used
dedent|''
name|'def'
name|'test_vlan_start_used'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# VLAN 100 and 101 are used, but vlan_start=99.'
nl|'\n'
indent|' '
name|'networks'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context_admin'
op|','
name|'label'
op|'='
string|'"fake"'
op|','
name|'num_networks'
op|'='
number|'1'
op|','
nl|'\n'
name|'vlan_start'
op|'='
number|'99'
op|','
name|'cidr'
op|'='
string|"'192.168.3.1/24'"
op|','
nl|'\n'
name|'network_size'
op|'='
number|'100'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'102'
op|','
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|'"vlan"'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_vlan_parameter
dedent|''
name|'def'
name|'test_vlan_parameter'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# vlan parameter could not be greater than 4094'
nl|'\n'
indent|' '
name|'exc'
op|'='
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'ValueError'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context_admin'
op|','
name|'label'
op|'='
string|'"fake"'
op|','
nl|'\n'
name|'num_networks'
op|'='
number|'1'
op|','
nl|'\n'
name|'vlan'
op|'='
number|'4095'
op|','
name|'cidr'
op|'='
string|"'192.168.0.1/24'"
op|')'
newline|'\n'
name|'error_msg'
op|'='
string|"'The vlan number cannot be greater than 4094'"
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'error_msg'
op|','
name|'six'
op|'.'
name|'text_type'
op|'('
name|'exc'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# vlan parameter could not be less than 1'
nl|'\n'
name|'exc'
op|'='
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'ValueError'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context_admin'
op|','
name|'label'
op|'='
string|'"fake"'
op|','
nl|'\n'
name|'num_networks'
op|'='
number|'1'
op|','
nl|'\n'
name|'vlan'
op|'='
number|'0'
op|','
name|'cidr'
op|'='
string|"'192.168.0.1/24'"
op|')'
newline|'\n'
name|'error_msg'
op|'='
string|"'The vlan number cannot be less than 1'"
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'error_msg'
op|','
name|'six'
op|'.'
name|'text_type'
op|'('
name|'exc'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_vlan_be_integer
dedent|''
name|'def'
name|'test_vlan_be_integer'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# vlan must be an integer'
nl|'\n'
indent|' '
name|'exc'
op|'='
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'ValueError'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context_admin'
op|','
name|'label'
op|'='
string|'"fake"'
op|','
nl|'\n'
name|'num_networks'
op|'='
number|'1'
op|','
nl|'\n'
name|'vlan'
op|'='
string|"'fake'"
op|','
name|'cidr'
op|'='
string|"'192.168.0.1/24'"
op|')'
newline|'\n'
name|'error_msg'
op|'='
string|"'vlan must be an integer'"
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'error_msg'
op|','
name|'six'
op|'.'
name|'text_type'
op|'('
name|'exc'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_vlan_multiple_without_dhcp_server
dedent|''
name|'def'
name|'test_vlan_multiple_without_dhcp_server'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'networks'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context_admin'
op|','
name|'label'
op|'='
string|'"fake"'
op|','
name|'num_networks'
op|'='
number|'2'
op|','
nl|'\n'
name|'vlan_start'
op|'='
number|'100'
op|','
name|'cidr'
op|'='
string|"'192.168.3.1/24'"
op|','
nl|'\n'
name|'network_size'
op|'='
number|'100'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"192.168.3.1"'
op|','
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|'"dhcp_server"'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"192.168.3.129"'
op|','
name|'networks'
op|'['
number|'1'
op|']'
op|'['
string|'"dhcp_server"'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_vlan_multiple_with_dhcp_server
dedent|''
name|'def'
name|'test_vlan_multiple_with_dhcp_server'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'networks'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_networks'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context_admin'
op|','
name|'label'
op|'='
string|'"fake"'
op|','
name|'num_networks'
op|'='
number|'2'
op|','
nl|'\n'
name|'vlan_start'
op|'='
number|'100'
op|','
name|'cidr'
op|'='
string|"'192.168.3.1/24'"
op|','
nl|'\n'
name|'network_size'
op|'='
number|'100'
op|','
name|'dhcp_server'
op|'='
string|"'192.168.3.1'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"192.168.3.1"'
op|','
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|'"dhcp_server"'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"192.168.3.1"'
op|','
name|'networks'
op|'['
number|'1'
op|']'
op|'['
string|'"dhcp_server"'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_networks
dedent|''
name|'def'
name|'test_validate_networks'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|'"fixed_ip_get_by_address"'
op|')'
newline|'\n'
nl|'\n'
name|'requested_networks'
op|'='
op|'['
op|'('
string|"'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'"
op|','
nl|'\n'
string|"'192.168.1.100'"
op|')'
op|','
nl|'\n'
op|'('
string|"'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'"
op|','
nl|'\n'
string|"'192.168.0.100'"
op|')'
op|']'
newline|'\n'
nl|'\n'
name|'db_fixed1'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'network_id'
op|'='
name|'networks'
op|'['
number|'1'
op|']'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'network'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'1'
op|']'
op|')'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'None'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'fixed_ip_get_by_address'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'columns_to_join'
op|'='
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
nl|'\n'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'db_fixed1'
op|')'
newline|'\n'
name|'db_fixed2'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'network_id'
op|'='
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'network'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'0'
op|']'
op|')'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'None'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'fixed_ip_get_by_address'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'columns_to_join'
op|'='
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
nl|'\n'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'db_fixed2'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'validate_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_networks_none_requested_networks
dedent|''
name|'def'
name|'test_validate_networks_none_requested_networks'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'network'
op|'.'
name|'validate_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'None'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_networks_empty_requested_networks
dedent|''
name|'def'
name|'test_validate_networks_empty_requested_networks'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'requested_networks'
op|'='
op|'['
op|']'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'validate_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_networks_invalid_fixed_ip
dedent|''
name|'def'
name|'test_validate_networks_invalid_fixed_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'requested_networks'
op|'='
op|'['
op|'('
string|"'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'"
op|','
nl|'\n'
string|"'192.168.1.100.1'"
op|')'
op|','
nl|'\n'
op|'('
string|"'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'"
op|','
nl|'\n'
string|"'192.168.0.100.1'"
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'FixedIpInvalid'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'validate_networks'
op|','
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_networks_empty_fixed_ip
dedent|''
name|'def'
name|'test_validate_networks_empty_fixed_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'requested_networks'
op|'='
op|'['
op|'('
string|"'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'"
op|','
string|"''"
op|')'
op|','
nl|'\n'
op|'('
string|"'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'"
op|','
string|"''"
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'FixedIpInvalid'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'validate_networks'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_networks_none_fixed_ip
dedent|''
name|'def'
name|'test_validate_networks_none_fixed_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'requested_networks'
op|'='
op|'['
op|'('
string|"'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'"
op|','
name|'None'
op|')'
op|','
nl|'\n'
op|'('
string|"'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'"
op|','
name|'None'
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'validate_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_floating_ip_owned_by_project
dedent|''
name|'def'
name|'test_floating_ip_owned_by_project'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
comment|'# raises because floating_ip project_id is None'
nl|'\n'
name|'floating_ip'
op|'='
name|'objects'
op|'.'
name|'FloatingIP'
op|'('
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'project_id'
op|'='
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'Forbidden'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'_floating_ip_owned_by_project'
op|','
nl|'\n'
name|'ctxt'
op|','
nl|'\n'
name|'floating_ip'
op|')'
newline|'\n'
nl|'\n'
comment|'# raises because floating_ip project_id is not equal to ctxt project_id'
nl|'\n'
name|'floating_ip'
op|'='
name|'objects'
op|'.'
name|'FloatingIP'
op|'('
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'project_id'
op|'='
name|'uuids'
op|'.'
name|'non_existent_uuid'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'Forbidden'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'_floating_ip_owned_by_project'
op|','
nl|'\n'
name|'ctxt'
op|','
nl|'\n'
name|'floating_ip'
op|')'
newline|'\n'
nl|'\n'
comment|'# does not raise (floating ip is owned by ctxt project)'
nl|'\n'
name|'floating_ip'
op|'='
name|'objects'
op|'.'
name|'FloatingIP'
op|'('
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'project_id'
op|'='
name|'ctxt'
op|'.'
name|'project_id'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'_floating_ip_owned_by_project'
op|'('
name|'ctxt'
op|','
name|'floating_ip'
op|')'
newline|'\n'
nl|'\n'
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
name|'None'
op|','
name|'None'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
comment|'# does not raise (ctxt is admin)'
nl|'\n'
name|'floating_ip'
op|'='
name|'objects'
op|'.'
name|'FloatingIP'
op|'('
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'project_id'
op|'='
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'_floating_ip_owned_by_project'
op|'('
name|'ctxt'
op|','
name|'floating_ip'
op|')'
newline|'\n'
nl|'\n'
comment|'# does not raise (ctxt is admin)'
nl|'\n'
name|'floating_ip'
op|'='
name|'objects'
op|'.'
name|'FloatingIP'
op|'('
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'project_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'_floating_ip_owned_by_project'
op|'('
name|'ctxt'
op|','
name|'floating_ip'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_allocate_floating_ip
dedent|''
name|'def'
name|'test_allocate_floating_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_floating_ip_pool_exists'"
op|','
nl|'\n'
name|'lambda'
name|'_x'
op|','
name|'_y'
op|':'
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_allocate_address
name|'def'
name|'fake_allocate_address'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
op|'{'
string|"'address'"
op|':'
string|"'10.0.0.1'"
op|','
string|"'project_id'"
op|':'
name|'ctxt'
op|'.'
name|'project_id'
op|'}'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_allocate_address'"
op|','
nl|'\n'
name|'fake_allocate_address'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_floating_ip'
op|'('
name|'ctxt'
op|','
name|'ctxt'
op|'.'
name|'project_id'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.quota.QUOTAS.reserve'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.quota.QUOTAS.commit'"
op|')'
newline|'\n'
DECL|member|test_deallocate_floating_ip
name|'def'
name|'test_deallocate_floating_ip'
op|'('
name|'self'
op|','
name|'mock_commit'
op|','
name|'mock_reserve'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake1
name|'def'
name|'fake1'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake2
dedent|''
name|'def'
name|'fake2'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
name|'fixed_ip_id'
op|'='
number|'1'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake3
dedent|''
name|'def'
name|'fake3'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
name|'fixed_ip_id'
op|'='
name|'None'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'ctxt'
op|'.'
name|'project_id'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_deallocate'"
op|','
name|'fake1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_floating_ip_owned_by_project'"
op|','
name|'fake1'
op|')'
newline|'\n'
nl|'\n'
comment|'# this time should raise because floating ip is associated to fixed_ip'
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_get_by_address'"
op|','
name|'fake2'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'FloatingIpAssociated'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_floating_ip'
op|','
nl|'\n'
name|'ctxt'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'mock_reserve'
op|'.'
name|'return_value'
op|'='
string|"'reserve'"
newline|'\n'
comment|'# this time should not raise'
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_get_by_address'"
op|','
name|'fake3'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_floating_ip'
op|'('
name|'ctxt'
op|','
name|'ctxt'
op|'.'
name|'project_id'
op|')'
newline|'\n'
nl|'\n'
name|'mock_commit'
op|'.'
name|'assert_called_once_with'
op|'('
name|'ctxt'
op|','
string|"'reserve'"
op|','
nl|'\n'
name|'project_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get'"
op|')'
newline|'\n'
DECL|member|test_associate_floating_ip
name|'def'
name|'test_associate_floating_ip'
op|'('
name|'self'
op|','
name|'fixed_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake1
name|'def'
name|'fake1'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'network'
op|'='
name|'test_network'
op|'.'
name|'fake_network'
op|')'
newline|'\n'
nl|'\n'
comment|"# floating ip that's already associated"
nl|'\n'
DECL|function|fake2
dedent|''
name|'def'
name|'fake2'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'pool'
op|'='
string|"'nova'"
op|','
nl|'\n'
name|'interface'
op|'='
string|"'eth0'"
op|','
nl|'\n'
name|'fixed_ip_id'
op|'='
number|'1'
op|')'
newline|'\n'
nl|'\n'
comment|"# floating ip that isn't associated"
nl|'\n'
DECL|function|fake3
dedent|''
name|'def'
name|'fake3'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'pool'
op|'='
string|"'nova'"
op|','
nl|'\n'
name|'interface'
op|'='
string|"'eth0'"
op|','
nl|'\n'
name|'fixed_ip_id'
op|'='
name|'None'
op|')'
newline|'\n'
nl|'\n'
comment|'# fixed ip with remote host'
nl|'\n'
DECL|function|fake4
dedent|''
name|'def'
name|'fake4'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'pool'
op|'='
string|"'nova'"
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'FAKEUUID'
op|','
nl|'\n'
name|'interface'
op|'='
string|"'eth0'"
op|','
nl|'\n'
name|'network_id'
op|'='
number|'123'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake4_network
dedent|''
name|'def'
name|'fake4_network'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
name|'multi_host'
op|'='
name|'False'
op|','
name|'host'
op|'='
string|"'jibberjabber'"
op|')'
newline|'\n'
nl|'\n'
comment|'# fixed ip with local host'
nl|'\n'
DECL|function|fake5
dedent|''
name|'def'
name|'fake5'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'pool'
op|'='
string|"'nova'"
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'FAKEUUID'
op|','
nl|'\n'
name|'interface'
op|'='
string|"'eth0'"
op|','
nl|'\n'
name|'network_id'
op|'='
number|'1234'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake5_network
dedent|''
name|'def'
name|'fake5_network'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
name|'multi_host'
op|'='
name|'False'
op|','
name|'host'
op|'='
string|"'testhost'"
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake6
dedent|''
name|'def'
name|'fake6'
op|'('
name|'ctxt'
op|','
name|'method'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'local'
op|'='
name|'False'
newline|'\n'
nl|'\n'
DECL|function|fake7
dedent|''
name|'def'
name|'fake7'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'local'
op|'='
name|'True'
newline|'\n'
nl|'\n'
DECL|function|fake8
dedent|''
name|'def'
name|'fake8'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'processutils'
op|'.'
name|'ProcessExecutionError'
op|'('
string|"''"
op|','
nl|'\n'
string|'\'Cannot find device "em0"\\n\''
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake9
dedent|''
name|'def'
name|'fake9'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'test'
op|'.'
name|'TestingException'
op|'('
op|')'
newline|'\n'
nl|'\n'
comment|"# raises because interface doesn't exist"
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
nl|'\n'
string|"'floating_ip_fixed_ip_associate'"
op|','
nl|'\n'
name|'fake1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_disassociate'"
op|','
name|'fake1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'driver'
op|','
string|"'ensure_floating_forward'"
op|','
name|'fake8'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'NoFloatingIpInterface'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'_associate_floating_ip'
op|','
nl|'\n'
name|'ctxt'
op|','
nl|'\n'
string|"'1.2.3.4'"
op|','
nl|'\n'
string|"'1.2.3.5'"
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_floating_ip_owned_by_project'"
op|','
name|'fake1'
op|')'
newline|'\n'
nl|'\n'
comment|'# raises because floating_ip is already associated to a fixed_ip'
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_get_by_address'"
op|','
name|'fake2'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'disassociate_floating_ip'"
op|','
name|'fake9'
op|')'
newline|'\n'
nl|'\n'
name|'fixed_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'1.2.3.4'"
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'network'
op|'='
name|'test_network'
op|'.'
name|'fake_network'
op|')'
newline|'\n'
nl|'\n'
comment|"# doesn't raise because we exit early if the address is the same"
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'associate_floating_ip'
op|'('
name|'ctxt'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
string|"'1.2.3.4'"
op|')'
newline|'\n'
nl|'\n'
comment|'# raises because we call disassociate which is mocked'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'test'
op|'.'
name|'TestingException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'associate_floating_ip'
op|','
nl|'\n'
name|'ctxt'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
string|"'new'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_get_by_address'"
op|','
name|'fake3'
op|')'
newline|'\n'
nl|'\n'
comment|'# does not raise and makes call remotely'
nl|'\n'
name|'self'
op|'.'
name|'local'
op|'='
name|'True'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'fixed_ip_get_by_address'"
op|','
name|'fake4'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'network_get'"
op|','
name|'fake4_network'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'network_rpcapi'
op|'.'
name|'client'
op|','
string|"'prepare'"
op|','
nl|'\n'
name|'lambda'
op|'**'
name|'kw'
op|':'
name|'self'
op|'.'
name|'network'
op|'.'
name|'network_rpcapi'
op|'.'
name|'client'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'network_rpcapi'
op|'.'
name|'client'
op|','
string|"'call'"
op|','
name|'fake6'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'associate_floating_ip'
op|'('
name|'ctxt'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'local'
op|')'
newline|'\n'
nl|'\n'
comment|'# does not raise and makes call locally'
nl|'\n'
name|'self'
op|'.'
name|'local'
op|'='
name|'False'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'fixed_ip_get_by_address'"
op|','
name|'fake5'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'network_get'"
op|','
name|'fake5_network'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_associate_floating_ip'"
op|','
name|'fake7'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'associate_floating_ip'
op|'('
name|'ctxt'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'local'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_add_floating_ip_nat_before_bind
dedent|''
name|'def'
name|'test_add_floating_ip_nat_before_bind'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Tried to verify order with documented mox record/verify'
nl|'\n'
comment|"# functionality, but it doesn't seem to work since I can't make it"
nl|'\n'
comment|"# fail. I'm using stubs and a flag for now, but if this mox feature"
nl|'\n'
comment|'# can be made to work, it would be a better way to test this.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# self.mox.StubOutWithMock(self.network.driver,'
nl|'\n'
comment|"# 'ensure_floating_forward')"
nl|'\n'
comment|"# self.mox.StubOutWithMock(self.network.driver, 'bind_floating_ip')"
nl|'\n'
comment|'#'
nl|'\n'
comment|'# self.network.driver.ensure_floating_forward(mox.IgnoreArg(),'
nl|'\n'
comment|'# mox.IgnoreArg(),'
nl|'\n'
comment|'# mox.IgnoreArg(),'
nl|'\n'
comment|'# mox.IgnoreArg())'
nl|'\n'
comment|'# self.network.driver.bind_floating_ip(mox.IgnoreArg(),'
nl|'\n'
comment|'# mox.IgnoreArg())'
nl|'\n'
comment|'# self.mox.ReplayAll()'
nl|'\n'
nl|'\n'
indent|' '
name|'nat_called'
op|'='
op|'['
name|'False'
op|']'
newline|'\n'
nl|'\n'
DECL|function|fake_nat
name|'def'
name|'fake_nat'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'nat_called'
op|'['
number|'0'
op|']'
op|'='
name|'True'
newline|'\n'
nl|'\n'
DECL|function|fake_bind
dedent|''
name|'def'
name|'fake_bind'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'nat_called'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'driver'
op|','
nl|'\n'
string|"'ensure_floating_forward'"
op|','
nl|'\n'
name|'fake_nat'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'driver'
op|','
string|"'bind_floating_ip'"
op|','
name|'fake_bind'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'l3driver'
op|'.'
name|'add_floating_ip'
op|'('
string|"'fakefloat'"
op|','
nl|'\n'
string|"'fakefixed'"
op|','
nl|'\n'
string|"'fakeiface'"
op|','
nl|'\n'
string|"'fakenet'"
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.floating_ip_get_all_by_host'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get'"
op|')'
newline|'\n'
DECL|member|_test_floating_ip_init_host
name|'def'
name|'_test_floating_ip_init_host'
op|'('
name|'self'
op|','
name|'fixed_get'
op|','
name|'floating_get'
op|','
nl|'\n'
name|'public_interface'
op|','
name|'expected_arg'
op|')'
op|':'
newline|'\n'
nl|'\n'
indent|' '
name|'floating_get'
op|'.'
name|'return_value'
op|'='
op|'['
nl|'\n'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'interface'
op|'='
string|"'foo'"
op|','
nl|'\n'
name|'address'
op|'='
string|"'1.2.3.4'"
op|')'
op|','
nl|'\n'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'interface'
op|'='
string|"'fakeiface'"
op|','
nl|'\n'
name|'address'
op|'='
string|"'1.2.3.5'"
op|','
nl|'\n'
name|'fixed_ip_id'
op|'='
number|'1'
op|')'
op|','
nl|'\n'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'interface'
op|'='
string|"'bar'"
op|','
nl|'\n'
name|'address'
op|'='
string|"'1.2.3.6'"
op|','
nl|'\n'
name|'fixed_ip_id'
op|'='
number|'2'
op|')'
op|','
nl|'\n'
op|']'
newline|'\n'
nl|'\n'
DECL|function|fixed_ip_get
name|'def'
name|'fixed_ip_get'
op|'('
name|'_context'
op|','
name|'fixed_ip_id'
op|','
name|'get_network'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'fixed_ip_id'
op|'=='
number|'1'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'1.2.3.4'"
op|','
nl|'\n'
name|'network'
op|'='
name|'test_network'
op|'.'
name|'fake_network'
op|')'
newline|'\n'
dedent|''
name|'raise'
name|'exception'
op|'.'
name|'FixedIpNotFound'
op|'('
name|'id'
op|'='
name|'fixed_ip_id'
op|')'
newline|'\n'
dedent|''
name|'fixed_get'
op|'.'
name|'side_effect'
op|'='
name|'fixed_ip_get'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'l3driver'
op|','
string|"'add_floating_ip'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'public_interface'
op|'='
name|'public_interface'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'l3driver'
op|'.'
name|'add_floating_ip'
op|'('
name|'netaddr'
op|'.'
name|'IPAddress'
op|'('
string|"'1.2.3.5'"
op|')'
op|','
nl|'\n'
name|'netaddr'
op|'.'
name|'IPAddress'
op|'('
string|"'1.2.3.4'"
op|')'
op|','
nl|'\n'
name|'expected_arg'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IsA'
op|'('
name|'objects'
op|'.'
name|'Network'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'init_host_floating_ips'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'UnsetStubs'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'VerifyAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_floating_ip_init_host_without_public_interface
dedent|''
name|'def'
name|'test_floating_ip_init_host_without_public_interface'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_floating_ip_init_host'
op|'('
name|'public_interface'
op|'='
name|'False'
op|','
nl|'\n'
name|'expected_arg'
op|'='
string|"'fakeiface'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_floating_ip_init_host_with_public_interface
dedent|''
name|'def'
name|'test_floating_ip_init_host_with_public_interface'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_floating_ip_init_host'
op|'('
name|'public_interface'
op|'='
string|"'fooiface'"
op|','
nl|'\n'
name|'expected_arg'
op|'='
string|"'fooiface'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_disassociate_floating_ip
dedent|''
name|'def'
name|'test_disassociate_floating_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake1
name|'def'
name|'fake1'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
comment|"# floating ip that isn't associated"
nl|'\n'
DECL|function|fake2
dedent|''
name|'def'
name|'fake2'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'pool'
op|'='
string|"'nova'"
op|','
nl|'\n'
name|'interface'
op|'='
string|"'eth0'"
op|','
nl|'\n'
name|'fixed_ip_id'
op|'='
name|'None'
op|')'
newline|'\n'
nl|'\n'
comment|'# floating ip that is associated'
nl|'\n'
DECL|function|fake3
dedent|''
name|'def'
name|'fake3'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'pool'
op|'='
string|"'nova'"
op|','
nl|'\n'
name|'interface'
op|'='
string|"'eth0'"
op|','
nl|'\n'
name|'fixed_ip_id'
op|'='
number|'1'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'ctxt'
op|'.'
name|'project_id'
op|')'
newline|'\n'
nl|'\n'
comment|'# fixed ip with remote host'
nl|'\n'
DECL|function|fake4
dedent|''
name|'def'
name|'fake4'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'pool'
op|'='
string|"'nova'"
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'FAKEUUID'
op|','
nl|'\n'
name|'interface'
op|'='
string|"'eth0'"
op|','
nl|'\n'
name|'network_id'
op|'='
number|'123'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake4_network
dedent|''
name|'def'
name|'fake4_network'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
name|'multi_host'
op|'='
name|'False'
op|','
nl|'\n'
name|'host'
op|'='
string|"'jibberjabber'"
op|')'
newline|'\n'
nl|'\n'
comment|'# fixed ip with local host'
nl|'\n'
DECL|function|fake5
dedent|''
name|'def'
name|'fake5'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'pool'
op|'='
string|"'nova'"
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'FAKEUUID'
op|','
nl|'\n'
name|'interface'
op|'='
string|"'eth0'"
op|','
nl|'\n'
name|'network_id'
op|'='
number|'1234'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake5_network
dedent|''
name|'def'
name|'fake5_network'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
name|'multi_host'
op|'='
name|'False'
op|','
name|'host'
op|'='
string|"'testhost'"
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake6
dedent|''
name|'def'
name|'fake6'
op|'('
name|'ctxt'
op|','
name|'method'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'local'
op|'='
name|'False'
newline|'\n'
nl|'\n'
DECL|function|fake7
dedent|''
name|'def'
name|'fake7'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'local'
op|'='
name|'True'
newline|'\n'
nl|'\n'
DECL|function|fake8
dedent|''
name|'def'
name|'fake8'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
nl|'\n'
name|'pool'
op|'='
string|"'nova'"
op|','
nl|'\n'
name|'interface'
op|'='
string|"'eth0'"
op|','
nl|'\n'
name|'fixed_ip_id'
op|'='
number|'1'
op|','
nl|'\n'
name|'auto_assigned'
op|'='
name|'True'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'ctxt'
op|'.'
name|'project_id'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_floating_ip_owned_by_project'"
op|','
name|'fake1'
op|')'
newline|'\n'
nl|'\n'
comment|'# raises because floating_ip is not associated to a fixed_ip'
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_get_by_address'"
op|','
name|'fake2'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'FloatingIpNotAssociated'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'disassociate_floating_ip'
op|','
nl|'\n'
name|'ctxt'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_get_by_address'"
op|','
name|'fake3'
op|')'
newline|'\n'
nl|'\n'
comment|'# does not raise and makes call remotely'
nl|'\n'
name|'self'
op|'.'
name|'local'
op|'='
name|'True'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'fixed_ip_get'"
op|','
name|'fake4'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'network_get'"
op|','
name|'fake4_network'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'network_rpcapi'
op|'.'
name|'client'
op|','
string|"'prepare'"
op|','
nl|'\n'
name|'lambda'
op|'**'
name|'kw'
op|':'
name|'self'
op|'.'
name|'network'
op|'.'
name|'network_rpcapi'
op|'.'
name|'client'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'network_rpcapi'
op|'.'
name|'client'
op|','
string|"'call'"
op|','
name|'fake6'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'disassociate_floating_ip'
op|'('
name|'ctxt'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'local'
op|')'
newline|'\n'
nl|'\n'
comment|'# does not raise and makes call locally'
nl|'\n'
name|'self'
op|'.'
name|'local'
op|'='
name|'False'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'fixed_ip_get'"
op|','
name|'fake5'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'network_get'"
op|','
name|'fake5_network'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_disassociate_floating_ip'"
op|','
name|'fake7'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'disassociate_floating_ip'
op|'('
name|'ctxt'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'local'
op|')'
newline|'\n'
nl|'\n'
comment|'# raises because auto_assigned floating IP cannot be disassociated'
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_get_by_address'"
op|','
name|'fake8'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'CannotDisassociateAutoAssignedFloatingIP'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'disassociate_floating_ip'
op|','
nl|'\n'
name|'ctxt'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_add_fixed_ip_instance_without_vpn_requested_networks
dedent|''
name|'def'
name|'test_add_fixed_ip_instance_without_vpn_requested_networks'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
nl|'\n'
string|"'_do_trigger_security_group_members_refresh_for_instance'"
op|','
nl|'\n'
name|'lambda'
op|'*'
name|'a'
op|','
op|'**'
name|'kw'
op|':'
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'network_get'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'fixed_ip_associate_pool'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
nl|'\n'
string|"'virtual_interface_get_by_instance_and_network'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'instance_get_by_uuid'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'get_instance_nw_info'"
op|')'
newline|'\n'
nl|'\n'
name|'db'
op|'.'
name|'virtual_interface_get_by_instance_and_network'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'vifs'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'fixed'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'192.168.0.101'"
op|')'
newline|'\n'
name|'db'
op|'.'
name|'fixed_ip_associate_pool'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'host'
op|'='
name|'None'
op|','
nl|'\n'
name|'virtual_interface_id'
op|'='
name|'vifs'
op|'['
number|'0'
op|']'
op|'['
string|"'id'"
op|']'
nl|'\n'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'fixed'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'network_get'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'project_only'
op|'='
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
nl|'\n'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'0'
op|']'
op|')'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'instance_get_by_uuid'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'columns_to_join'
op|'='
op|'['
string|"'info_cache'"
op|','
nl|'\n'
string|"'security_groups'"
op|']'
nl|'\n'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'fake_inst'
op|'('
name|'display_name'
op|'='
name|'HOST'
op|','
nl|'\n'
name|'uuid'
op|'='
name|'FAKEUUID'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_instance_nw_info'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'add_fixed_ip_to_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'FAKEUUID'
op|','
name|'HOST'
op|','
nl|'\n'
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|"'id'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get_by_address'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get'"
op|')'
newline|'\n'
DECL|member|test_ip_association_and_allocation_of_other_project
name|'def'
name|'test_ip_association_and_allocation_of_other_project'
op|'('
name|'self'
op|','
name|'net_get'
op|','
nl|'\n'
name|'fixed_get'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Makes sure that we cannot deallocaate or disassociate\n a public IP of other project.\n """'
newline|'\n'
name|'net_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'context1'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|')'
newline|'\n'
name|'context2'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
string|"'project2'"
op|')'
newline|'\n'
nl|'\n'
name|'float_ip'
op|'='
name|'db'
op|'.'
name|'floating_ip_create'
op|'('
name|'context1'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
nl|'\n'
op|'{'
string|"'address'"
op|':'
string|"'1.2.3.4'"
op|','
nl|'\n'
string|"'project_id'"
op|':'
name|'context1'
op|'.'
name|'project_id'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'float_addr'
op|'='
name|'float_ip'
op|'['
string|"'address'"
op|']'
newline|'\n'
nl|'\n'
name|'instance'
op|'='
name|'db'
op|'.'
name|'instance_create'
op|'('
name|'context1'
op|','
nl|'\n'
op|'{'
string|"'project_id'"
op|':'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'fix_addr'
op|'='
name|'db'
op|'.'
name|'fixed_ip_associate_pool'
op|'('
name|'context1'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
nl|'\n'
number|'1'
op|','
name|'instance'
op|'['
string|"'uuid'"
op|']'
op|')'
op|'.'
name|'address'
newline|'\n'
name|'fixed_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
name|'fix_addr'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
name|'network'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'1'
op|']'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Associate the IP with non-admin user context'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'Forbidden'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'associate_floating_ip'
op|','
nl|'\n'
name|'context2'
op|','
nl|'\n'
name|'float_addr'
op|','
nl|'\n'
name|'fix_addr'
op|')'
newline|'\n'
nl|'\n'
comment|'# Deallocate address from other project'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'Forbidden'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_floating_ip'
op|','
nl|'\n'
name|'context2'
op|','
nl|'\n'
name|'float_addr'
op|')'
newline|'\n'
nl|'\n'
comment|'# Now Associates the address to the actual project'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'associate_floating_ip'
op|'('
name|'context1'
op|','
name|'float_addr'
op|','
name|'fix_addr'
op|')'
newline|'\n'
nl|'\n'
comment|'# Now try dis-associating from other project'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'Forbidden'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'disassociate_floating_ip'
op|','
nl|'\n'
name|'context2'
op|','
nl|'\n'
name|'float_addr'
op|')'
newline|'\n'
nl|'\n'
comment|'# Clean up the ip addresses'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'disassociate_floating_ip'
op|'('
name|'context1'
op|','
name|'float_addr'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_floating_ip'
op|'('
name|'context1'
op|','
name|'float_addr'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_fixed_ip'
op|'('
name|'context1'
op|','
name|'fix_addr'
op|','
string|"'fake'"
op|')'
newline|'\n'
name|'db'
op|'.'
name|'floating_ip_destroy'
op|'('
name|'context1'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
name|'float_addr'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'fixed_ip_disassociate'
op|'('
name|'context1'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
name|'fix_addr'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get_by_address'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_update'"
op|')'
newline|'\n'
DECL|member|test_deallocate_fixed
name|'def'
name|'test_deallocate_fixed'
op|'('
name|'self'
op|','
name|'fixed_update'
op|','
name|'net_get'
op|','
name|'fixed_get'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Verify that release is called properly.\n\n Ensures https://bugs.launchpad.net/nova/+bug/973442 doesn\'t return\n """'
newline|'\n'
name|'net_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|function|vif_get
name|'def'
name|'vif_get'
op|'('
name|'_context'
op|','
name|'_vif_id'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'vifs'
op|'['
number|'0'
op|']'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.virtual_interface_get'"
op|','
name|'vif_get'
op|')'
newline|'\n'
name|'context1'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|')'
newline|'\n'
nl|'\n'
name|'instance'
op|'='
name|'db'
op|'.'
name|'instance_create'
op|'('
name|'context1'
op|','
nl|'\n'
op|'{'
string|"'project_id'"
op|':'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'elevated'
op|'='
name|'context1'
op|'.'
name|'elevated'
op|'('
op|')'
newline|'\n'
name|'fix_addr'
op|'='
name|'db'
op|'.'
name|'fixed_ip_associate_pool'
op|'('
name|'elevated'
op|','
number|'1'
op|','
name|'instance'
op|'['
string|"'uuid'"
op|']'
op|')'
newline|'\n'
name|'fixed_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
name|'fix_addr'
op|'.'
name|'address'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
name|'allocated'
op|'='
name|'True'
op|','
nl|'\n'
name|'virtual_interface_id'
op|'='
number|'3'
op|','
nl|'\n'
name|'network'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'1'
op|']'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'force_dhcp_release'
op|'='
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'linux_net'
op|','
string|"'release_dhcp'"
op|')'
newline|'\n'
name|'linux_net'
op|'.'
name|'release_dhcp'
op|'('
name|'networks'
op|'['
number|'1'
op|']'
op|'['
string|"'bridge'"
op|']'
op|','
name|'fix_addr'
op|'.'
name|'address'
op|','
nl|'\n'
string|"'DE:AD:BE:EF:00:00'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_fixed_ip'
op|'('
name|'context1'
op|','
name|'fix_addr'
op|'.'
name|'address'
op|','
string|"'fake'"
op|')'
newline|'\n'
name|'fixed_update'
op|'.'
name|'assert_called_once_with'
op|'('
name|'context1'
op|','
name|'fix_addr'
op|'.'
name|'address'
op|','
nl|'\n'
op|'{'
string|"'allocated'"
op|':'
name|'False'
op|'}'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get_by_address'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_update'"
op|')'
newline|'\n'
DECL|member|_deallocate_fixed_with_dhcp
name|'def'
name|'_deallocate_fixed_with_dhcp'
op|'('
name|'self'
op|','
name|'mock_dev_exists'
op|','
name|'fixed_update'
op|','
nl|'\n'
name|'net_get'
op|','
name|'fixed_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'net_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|function|vif_get
name|'def'
name|'vif_get'
op|'('
name|'_context'
op|','
name|'_vif_id'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'vifs'
op|'['
number|'0'
op|']'
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'db'
op|','
string|"'virtual_interface_get'"
op|','
name|'vif_get'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
nl|'\n'
name|'utils'
op|','
string|"'execute'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'processutils'
op|'.'
name|'ProcessExecutionError'
op|'('
op|')'
op|')'
op|','
nl|'\n'
op|')'
name|'as'
op|'('
name|'_vif_get'
op|','
name|'_execute'
op|')'
op|':'
newline|'\n'
indent|' '
name|'context1'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|')'
newline|'\n'
nl|'\n'
name|'instance'
op|'='
name|'db'
op|'.'
name|'instance_create'
op|'('
name|'context1'
op|','
nl|'\n'
op|'{'
string|"'project_id'"
op|':'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'elevated'
op|'='
name|'context1'
op|'.'
name|'elevated'
op|'('
op|')'
newline|'\n'
name|'fix_addr'
op|'='
name|'db'
op|'.'
name|'fixed_ip_associate_pool'
op|'('
name|'elevated'
op|','
number|'1'
op|','
nl|'\n'
name|'instance'
op|'['
string|"'uuid'"
op|']'
op|')'
newline|'\n'
name|'fixed_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
name|'fix_addr'
op|'.'
name|'address'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
name|'allocated'
op|'='
name|'True'
op|','
nl|'\n'
name|'virtual_interface_id'
op|'='
number|'3'
op|','
nl|'\n'
name|'network'
op|'='
name|'dict'
op|'('
nl|'\n'
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'1'
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'force_dhcp_release'
op|'='
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_fixed_ip'
op|'('
name|'context1'
op|','
name|'fix_addr'
op|'.'
name|'address'
op|','
nl|'\n'
string|"'fake'"
op|')'
newline|'\n'
name|'fixed_update'
op|'.'
name|'assert_called_once_with'
op|'('
name|'context1'
op|','
name|'fix_addr'
op|'.'
name|'address'
op|','
nl|'\n'
op|'{'
string|"'allocated'"
op|':'
name|'False'
op|'}'
op|')'
newline|'\n'
name|'mock_dev_exists'
op|'.'
name|'assert_called_once_with'
op|'('
name|'networks'
op|'['
number|'1'
op|']'
op|'['
string|"'bridge'"
op|']'
op|')'
newline|'\n'
name|'if'
name|'mock_dev_exists'
op|'.'
name|'return_value'
op|':'
newline|'\n'
indent|' '
name|'_execute'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'dhcp_release'"
op|','
nl|'\n'
name|'networks'
op|'['
number|'1'
op|']'
op|'['
string|"'bridge'"
op|']'
op|','
nl|'\n'
name|'fix_addr'
op|'.'
name|'address'
op|','
nl|'\n'
string|"'DE:AD:BE:EF:00:00'"
op|','
nl|'\n'
name|'run_as_root'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.network.linux_net.device_exists'"
op|','
name|'return_value'
op|'='
name|'True'
op|')'
newline|'\n'
DECL|member|test_deallocate_fixed_with_dhcp
name|'def'
name|'test_deallocate_fixed_with_dhcp'
op|'('
name|'self'
op|','
name|'mock_dev_exists'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_deallocate_fixed_with_dhcp'
op|'('
name|'mock_dev_exists'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.network.linux_net.device_exists'"
op|','
name|'return_value'
op|'='
name|'False'
op|')'
newline|'\n'
DECL|member|test_deallocate_fixed_without_dhcp
name|'def'
name|'test_deallocate_fixed_without_dhcp'
op|'('
name|'self'
op|','
name|'mock_dev_exists'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_deallocate_fixed_with_dhcp'
op|'('
name|'mock_dev_exists'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_deallocate_fixed_deleted
dedent|''
name|'def'
name|'test_deallocate_fixed_deleted'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|"# Verify doesn't deallocate deleted fixed_ip from deleted network."
nl|'\n'
nl|'\n'
DECL|function|teardown_network_on_host
indent|' '
name|'def'
name|'teardown_network_on_host'
op|'('
name|'_context'
op|','
name|'network'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'network'
op|'['
string|"'id'"
op|']'
op|'=='
number|'0'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'test'
op|'.'
name|'TestingException'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_teardown_network_on_host'"
op|','
nl|'\n'
name|'teardown_network_on_host'
op|')'
newline|'\n'
nl|'\n'
name|'context1'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|')'
newline|'\n'
name|'elevated'
op|'='
name|'context1'
op|'.'
name|'elevated'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'instance'
op|'='
name|'db'
op|'.'
name|'instance_create'
op|'('
name|'context1'
op|','
nl|'\n'
op|'{'
string|"'project_id'"
op|':'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|'}'
op|')'
newline|'\n'
name|'network'
op|'='
name|'db'
op|'.'
name|'network_create_safe'
op|'('
name|'elevated'
op|','
name|'networks'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'_fix_addr'
op|'='
name|'db'
op|'.'
name|'fixed_ip_associate_pool'
op|'('
name|'elevated'
op|','
number|'1'
op|','
name|'instance'
op|'['
string|"'uuid'"
op|']'
op|')'
newline|'\n'
name|'fix_addr'
op|'='
name|'_fix_addr'
op|'.'
name|'address'
newline|'\n'
name|'db'
op|'.'
name|'fixed_ip_update'
op|'('
name|'elevated'
op|','
name|'fix_addr'
op|','
op|'{'
string|"'deleted'"
op|':'
number|'1'
op|'}'
op|')'
newline|'\n'
name|'elevated'
op|'.'
name|'read_deleted'
op|'='
string|"'yes'"
newline|'\n'
name|'delfixed'
op|'='
name|'db'
op|'.'
name|'fixed_ip_get_by_address'
op|'('
name|'elevated'
op|','
name|'fix_addr'
op|')'
newline|'\n'
name|'values'
op|'='
op|'{'
string|"'address'"
op|':'
name|'fix_addr'
op|','
nl|'\n'
string|"'network_id'"
op|':'
name|'network'
op|'.'
name|'id'
op|','
nl|'\n'
string|"'instance_uuid'"
op|':'
name|'delfixed'
op|'['
string|"'instance_uuid'"
op|']'
op|'}'
newline|'\n'
name|'db'
op|'.'
name|'fixed_ip_create'
op|'('
name|'elevated'
op|','
name|'values'
op|')'
newline|'\n'
name|'elevated'
op|'.'
name|'read_deleted'
op|'='
string|"'no'"
newline|'\n'
name|'elevated'
op|'.'
name|'read_deleted'
op|'='
string|"'yes'"
newline|'\n'
nl|'\n'
name|'deallocate'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_fixed_ip'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'test'
op|'.'
name|'TestingException'
op|','
name|'deallocate'
op|','
name|'context1'
op|','
nl|'\n'
name|'fix_addr'
op|','
string|"'fake'"
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get_by_address'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_update'"
op|')'
newline|'\n'
DECL|member|test_deallocate_fixed_no_vif
name|'def'
name|'test_deallocate_fixed_no_vif'
op|'('
name|'self'
op|','
name|'fixed_update'
op|','
name|'net_get'
op|','
name|'fixed_get'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Verify that deallocate doesn\'t raise when no vif is returned.\n\n Ensures https://bugs.launchpad.net/nova/+bug/968457 doesn\'t return\n """'
newline|'\n'
name|'net_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|function|vif_get
name|'def'
name|'vif_get'
op|'('
name|'_context'
op|','
name|'_vif_id'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'None'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.virtual_interface_get'"
op|','
name|'vif_get'
op|')'
newline|'\n'
name|'context1'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|')'
newline|'\n'
nl|'\n'
name|'instance'
op|'='
name|'db'
op|'.'
name|'instance_create'
op|'('
name|'context1'
op|','
nl|'\n'
op|'{'
string|"'project_id'"
op|':'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'elevated'
op|'='
name|'context1'
op|'.'
name|'elevated'
op|'('
op|')'
newline|'\n'
name|'fix_addr'
op|'='
name|'db'
op|'.'
name|'fixed_ip_associate_pool'
op|'('
name|'elevated'
op|','
number|'1'
op|','
name|'instance'
op|'['
string|"'uuid'"
op|']'
op|')'
newline|'\n'
name|'fixed_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
name|'fix_addr'
op|'.'
name|'address'
op|','
nl|'\n'
name|'allocated'
op|'='
name|'True'
op|','
nl|'\n'
name|'virtual_interface_id'
op|'='
number|'3'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
name|'network'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'1'
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'force_dhcp_release'
op|'='
name|'True'
op|')'
newline|'\n'
name|'fixed_update'
op|'.'
name|'return_value'
op|'='
name|'fixed_get'
op|'.'
name|'return_value'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_fixed_ip'
op|'('
name|'context1'
op|','
name|'fix_addr'
op|'.'
name|'address'
op|','
string|"'fake'"
op|')'
newline|'\n'
name|'fixed_update'
op|'.'
name|'assert_called_once_with'
op|'('
name|'context1'
op|','
name|'fix_addr'
op|'.'
name|'address'
op|','
nl|'\n'
op|'{'
string|"'allocated'"
op|':'
name|'False'
op|'}'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get_by_address'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_update'"
op|')'
newline|'\n'
DECL|member|test_fixed_ip_cleanup_fail
name|'def'
name|'test_fixed_ip_cleanup_fail'
op|'('
name|'self'
op|','
name|'fixed_update'
op|','
name|'net_get'
op|','
name|'fixed_get'
op|')'
op|':'
newline|'\n'
comment|'# Verify IP is not deallocated if the security group refresh fails.'
nl|'\n'
indent|' '
name|'net_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
name|'context1'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|')'
newline|'\n'
nl|'\n'
name|'instance'
op|'='
name|'db'
op|'.'
name|'instance_create'
op|'('
name|'context1'
op|','
nl|'\n'
op|'{'
string|"'project_id'"
op|':'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'elevated'
op|'='
name|'context1'
op|'.'
name|'elevated'
op|'('
op|')'
newline|'\n'
name|'fix_addr'
op|'='
name|'objects'
op|'.'
name|'FixedIP'
op|'.'
name|'associate_pool'
op|'('
name|'elevated'
op|','
number|'1'
op|','
nl|'\n'
name|'instance'
op|'['
string|"'uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_refresh
name|'def'
name|'fake_refresh'
op|'('
name|'instance_uuid'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'test'
op|'.'
name|'TestingException'
op|'('
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
nl|'\n'
string|"'_do_trigger_security_group_members_refresh_for_instance'"
op|','
nl|'\n'
name|'fake_refresh'
op|')'
newline|'\n'
name|'fixed_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
name|'fix_addr'
op|'.'
name|'address'
op|','
nl|'\n'
name|'allocated'
op|'='
name|'True'
op|','
nl|'\n'
name|'virtual_interface_id'
op|'='
number|'3'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
name|'network'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'1'
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'test'
op|'.'
name|'TestingException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_fixed_ip'
op|','
nl|'\n'
name|'context1'
op|','
name|'str'
op|'('
name|'fix_addr'
op|'.'
name|'address'
op|')'
op|','
string|"'fake'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'fixed_update'
op|'.'
name|'called'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_networks_by_uuids_ordering
dedent|''
name|'def'
name|'test_get_networks_by_uuids_ordering'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'network_get_all_by_uuids'"
op|')'
newline|'\n'
nl|'\n'
name|'requested_networks'
op|'='
op|'['
string|"'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'"
op|','
nl|'\n'
string|"'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'"
op|']'
newline|'\n'
name|'db'
op|'.'
name|'network_get_all_by_uuids'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
op|'.'
name|'AndReturn'
op|'('
nl|'\n'
op|'['
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
op|'**'
name|'net'
op|')'
nl|'\n'
name|'for'
name|'net'
name|'in'
name|'networks'
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'res'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'_get_networks_by_uuids'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'res'
op|'['
number|'0'
op|']'
op|'['
string|"'id'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'0'
op|','
name|'res'
op|'['
number|'1'
op|']'
op|'['
string|"'id'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP.get_by_id'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.floating_ip.FloatingIPList.get_by_host'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.network.linux_net.iptables_manager._apply'"
op|')'
newline|'\n'
DECL|member|test_init_host_iptables_defer_apply
name|'def'
name|'test_init_host_iptables_defer_apply'
op|'('
name|'self'
op|','
name|'iptable_apply'
op|','
nl|'\n'
name|'floating_get_by_host'
op|','
nl|'\n'
name|'fixed_get_by_id'
op|')'
op|':'
newline|'\n'
DECL|function|get_by_id
indent|' '
name|'def'
name|'get_by_id'
op|'('
name|'context'
op|','
name|'fixed_ip_id'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'net'
op|'='
name|'objects'
op|'.'
name|'Network'
op|'('
name|'bridge'
op|'='
string|"'testbridge'"
op|','
nl|'\n'
name|'cidr'
op|'='
string|"'192.168.1.0/24'"
op|')'
newline|'\n'
name|'if'
name|'fixed_ip_id'
op|'=='
number|'1'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'objects'
op|'.'
name|'FixedIP'
op|'('
name|'address'
op|'='
string|"'192.168.1.4'"
op|','
nl|'\n'
name|'network'
op|'='
name|'net'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'fixed_ip_id'
op|'=='
number|'2'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'objects'
op|'.'
name|'FixedIP'
op|'('
name|'address'
op|'='
string|"'192.168.1.5'"
op|','
nl|'\n'
name|'network'
op|'='
name|'net'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_apply
dedent|''
dedent|''
name|'def'
name|'fake_apply'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_apply'
op|'.'
name|'count'
op|'+='
number|'1'
newline|'\n'
nl|'\n'
dedent|''
name|'fake_apply'
op|'.'
name|'count'
op|'='
number|'0'
newline|'\n'
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
name|'float1'
op|'='
name|'objects'
op|'.'
name|'FloatingIP'
op|'('
name|'address'
op|'='
string|"'1.2.3.4'"
op|','
name|'fixed_ip_id'
op|'='
number|'1'
op|')'
newline|'\n'
name|'float2'
op|'='
name|'objects'
op|'.'
name|'FloatingIP'
op|'('
name|'address'
op|'='
string|"'1.2.3.5'"
op|','
name|'fixed_ip_id'
op|'='
number|'2'
op|')'
newline|'\n'
name|'float1'
op|'.'
name|'_context'
op|'='
name|'ctxt'
newline|'\n'
name|'float2'
op|'.'
name|'_context'
op|'='
name|'ctxt'
newline|'\n'
nl|'\n'
name|'iptable_apply'
op|'.'
name|'side_effect'
op|'='
name|'fake_apply'
newline|'\n'
name|'floating_get_by_host'
op|'.'
name|'return_value'
op|'='
op|'['
name|'float1'
op|','
name|'float2'
op|']'
newline|'\n'
name|'fixed_get_by_id'
op|'.'
name|'side_effect'
op|'='
name|'get_by_id'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'init_host'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'fake_apply'
op|'.'
name|'count'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|_TestDomainObject
dedent|''
dedent|''
name|'class'
name|'_TestDomainObject'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'k'
op|','
name|'v'
name|'in'
name|'six'
op|'.'
name|'iteritems'
op|'('
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'__setattr__'
op|'('
name|'k'
op|','
name|'v'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|CommonNetworkTestCase
dedent|''
dedent|''
dedent|''
name|'class'
name|'CommonNetworkTestCase'
op|'('
name|'test'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|variable|REQUIRES_LOCKING
indent|' '
name|'REQUIRES_LOCKING'
op|'='
name|'True'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'CommonNetworkTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'fake'"
op|','
string|"'fake'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'ipv6_backend'
op|'='
string|"'rfc2462'"
op|')'
newline|'\n'
name|'ipv6'
op|'.'
name|'reset_backend'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_instance_zone_for_dns_domain
dedent|''
name|'def'
name|'test_validate_instance_zone_for_dns_domain'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'domain'
op|'='
string|"'example.com'"
newline|'\n'
name|'az'
op|'='
string|"'test_az'"
newline|'\n'
name|'domains'
op|'='
op|'{'
nl|'\n'
name|'domain'
op|':'
name|'_TestDomainObject'
op|'('
nl|'\n'
name|'domain'
op|'='
name|'domain'
op|','
nl|'\n'
name|'availability_zone'
op|'='
name|'az'
op|')'
op|'}'
newline|'\n'
nl|'\n'
DECL|function|dnsdomain_get
name|'def'
name|'dnsdomain_get'
op|'('
name|'context'
op|','
name|'instance_domain'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'domains'
op|'.'
name|'get'
op|'('
name|'instance_domain'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.dnsdomain_get'"
op|','
name|'dnsdomain_get'
op|')'
newline|'\n'
name|'fake_instance'
op|'='
op|'{'
string|"'uuid'"
op|':'
name|'FAKEUUID'
op|','
nl|'\n'
string|"'availability_zone'"
op|':'
name|'az'
op|'}'
newline|'\n'
nl|'\n'
name|'manager'
op|'='
name|'network_manager'
op|'.'
name|'NetworkManager'
op|'('
op|')'
newline|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'_validate_instance_zone_for_dns_domain'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'fake_instance'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'res'
op|')'
newline|'\n'
nl|'\n'
DECL|member|fake_create_fixed_ips
dedent|''
name|'def'
name|'fake_create_fixed_ips'
op|'('
name|'self'
op|','
name|'context'
op|','
name|'network_id'
op|','
name|'fixed_cidr'
op|'='
name|'None'
op|','
nl|'\n'
name|'extra_reserved'
op|'='
name|'None'
op|','
name|'bottom_reserved'
op|'='
number|'0'
op|','
nl|'\n'
name|'top_reserved'
op|'='
number|'0'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'None'
newline|'\n'
nl|'\n'
DECL|member|test_get_instance_nw_info_client_exceptions
dedent|''
name|'def'
name|'test_get_instance_nw_info_client_exceptions'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'network_manager'
op|'.'
name|'NetworkManager'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'manager'
op|'.'
name|'db'
op|','
nl|'\n'
string|"'fixed_ip_get_by_instance'"
op|')'
newline|'\n'
name|'manager'
op|'.'
name|'db'
op|'.'
name|'fixed_ip_get_by_instance'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'FAKEUUID'
op|')'
op|'.'
name|'AndRaise'
op|'('
name|'exception'
op|'.'
name|'InstanceNotFound'
op|'('
nl|'\n'
name|'instance_id'
op|'='
name|'FAKEUUID'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'messaging'
op|'.'
name|'ExpectedException'
op|','
nl|'\n'
name|'manager'
op|'.'
name|'get_instance_nw_info'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'FAKEUUID'
op|','
string|"'fake_rxtx_factor'"
op|','
name|'HOST'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.instance_get'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get_by_instance'"
op|')'
newline|'\n'
DECL|member|test_deallocate_for_instance_passes_host_info
name|'def'
name|'test_deallocate_for_instance_passes_host_info'
op|'('
name|'self'
op|','
name|'fixed_get'
op|','
nl|'\n'
name|'instance_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'db'
op|'='
name|'manager'
op|'.'
name|'db'
newline|'\n'
name|'instance_get'
op|'.'
name|'return_value'
op|'='
name|'fake_inst'
op|'('
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'non_existent_uuid'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'virtual_interface_delete_by_instance'
op|'='
name|'lambda'
name|'_x'
op|','
name|'_y'
op|':'
name|'None'
newline|'\n'
name|'ctx'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'igonre'"
op|','
string|"'igonre'"
op|')'
newline|'\n'
nl|'\n'
name|'fixed_get'
op|'.'
name|'return_value'
op|'='
op|'['
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'1.2.3.4'"
op|','
nl|'\n'
name|'network_id'
op|'='
number|'123'
op|')'
op|']'
newline|'\n'
nl|'\n'
name|'manager'
op|'.'
name|'deallocate_for_instance'
op|'('
nl|'\n'
name|'ctx'
op|','
name|'instance'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'.'
name|'_from_db_object'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'objects'
op|'.'
name|'Instance'
op|'('
op|')'
op|','
name|'instance_get'
op|'.'
name|'return_value'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
nl|'\n'
op|'('
name|'ctx'
op|','
string|"'1.2.3.4'"
op|','
string|"'fake-host'"
op|')'
nl|'\n'
op|']'
op|','
name|'manager'
op|'.'
name|'deallocate_fixed_ip_calls'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get_by_instance'"
op|')'
newline|'\n'
DECL|member|test_deallocate_for_instance_passes_host_info_with_update_dns_entries
name|'def'
name|'test_deallocate_for_instance_passes_host_info_with_update_dns_entries'
op|'('
nl|'\n'
name|'self'
op|','
name|'fixed_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'update_dns_entries'
op|'='
name|'True'
op|')'
newline|'\n'
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'db'
op|'='
name|'manager'
op|'.'
name|'db'
newline|'\n'
name|'db'
op|'.'
name|'virtual_interface_delete_by_instance'
op|'='
name|'lambda'
name|'_x'
op|','
name|'_y'
op|':'
name|'None'
newline|'\n'
name|'ctx'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'igonre'"
op|','
string|"'igonre'"
op|')'
newline|'\n'
nl|'\n'
name|'fixed_get'
op|'.'
name|'return_value'
op|'='
op|'['
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'1.2.3.4'"
op|','
nl|'\n'
name|'network_id'
op|'='
number|'123'
op|')'
op|']'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'manager'
op|'.'
name|'network_rpcapi'
op|','
nl|'\n'
string|"'update_dns'"
op|')'
name|'as'
name|'mock_update_dns'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'.'
name|'deallocate_for_instance'
op|'('
nl|'\n'
name|'ctx'
op|','
name|'instance'
op|'='
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'ctx'
op|')'
op|')'
newline|'\n'
name|'mock_update_dns'
op|'.'
name|'assert_called_once_with'
op|'('
name|'ctx'
op|','
op|'['
string|"'123'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
nl|'\n'
op|'('
name|'ctx'
op|','
string|"'1.2.3.4'"
op|','
string|"'fake-host'"
op|')'
nl|'\n'
op|']'
op|','
name|'manager'
op|'.'
name|'deallocate_fixed_ip_calls'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_deallocate_for_instance_with_requested_networks
dedent|''
name|'def'
name|'test_deallocate_for_instance_with_requested_networks'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'db'
op|'='
name|'manager'
op|'.'
name|'db'
newline|'\n'
name|'db'
op|'.'
name|'virtual_interface_delete_by_instance'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'ctx'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'igonre'"
op|','
string|"'igonre'"
op|')'
newline|'\n'
name|'requested_networks'
op|'='
name|'objects'
op|'.'
name|'NetworkRequestList'
op|'.'
name|'from_tuples'
op|'('
nl|'\n'
op|'['
op|'('
string|"'123'"
op|','
string|"'1.2.3.4'"
op|')'
op|','
op|'('
string|"'123'"
op|','
string|"'4.3.2.1'"
op|')'
op|','
op|'('
string|"'123'"
op|','
name|'None'
op|')'
op|']'
op|')'
newline|'\n'
name|'manager'
op|'.'
name|'deallocate_for_instance'
op|'('
nl|'\n'
name|'ctx'
op|','
nl|'\n'
name|'instance'
op|'='
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'ctx'
op|')'
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
nl|'\n'
op|'('
name|'ctx'
op|','
string|"'1.2.3.4'"
op|','
string|"'fake-host'"
op|')'
op|','
op|'('
name|'ctx'
op|','
string|"'4.3.2.1'"
op|','
string|"'fake-host'"
op|')'
nl|'\n'
op|']'
op|','
name|'manager'
op|'.'
name|'deallocate_fixed_ip_calls'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_deallocate_for_instance_with_update_dns_entries
dedent|''
name|'def'
name|'test_deallocate_for_instance_with_update_dns_entries'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'update_dns_entries'
op|'='
name|'True'
op|')'
newline|'\n'
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'db'
op|'='
name|'manager'
op|'.'
name|'db'
newline|'\n'
name|'db'
op|'.'
name|'virtual_interface_delete_by_instance'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'ctx'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'igonre'"
op|','
string|"'igonre'"
op|')'
newline|'\n'
name|'requested_networks'
op|'='
name|'objects'
op|'.'
name|'NetworkRequestList'
op|'.'
name|'from_tuples'
op|'('
nl|'\n'
op|'['
op|'('
string|"'123'"
op|','
string|"'1.2.3.4'"
op|')'
op|','
op|'('
string|"'123'"
op|','
string|"'4.3.2.1'"
op|')'
op|']'
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'manager'
op|'.'
name|'network_rpcapi'
op|','
nl|'\n'
string|"'update_dns'"
op|')'
name|'as'
name|'mock_update_dns'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'.'
name|'deallocate_for_instance'
op|'('
nl|'\n'
name|'ctx'
op|','
nl|'\n'
name|'instance'
op|'='
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'ctx'
op|')'
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'requested_networks'
op|')'
newline|'\n'
name|'mock_update_dns'
op|'.'
name|'assert_called_once_with'
op|'('
name|'ctx'
op|','
op|'['
string|"'123'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
nl|'\n'
op|'('
name|'ctx'
op|','
string|"'1.2.3.4'"
op|','
string|"'fake-host'"
op|')'
op|','
op|'('
name|'ctx'
op|','
string|"'4.3.2.1'"
op|','
string|"'fake-host'"
op|')'
nl|'\n'
op|']'
op|','
name|'manager'
op|'.'
name|'deallocate_fixed_ip_calls'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get_by_instance'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_disassociate'"
op|')'
newline|'\n'
DECL|member|test_remove_fixed_ip_from_instance
name|'def'
name|'test_remove_fixed_ip_from_instance'
op|'('
name|'self'
op|','
name|'disassociate'
op|','
name|'get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'get'
op|'.'
name|'return_value'
op|'='
op|'['
nl|'\n'
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
op|'**'
name|'x'
op|')'
nl|'\n'
name|'for'
name|'x'
name|'in'
name|'manager'
op|'.'
name|'db'
op|'.'
name|'fixed_ip_get_by_instance'
op|'('
name|'None'
op|','
nl|'\n'
name|'FAKEUUID'
op|')'
op|']'
newline|'\n'
name|'manager'
op|'.'
name|'remove_fixed_ip_from_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'FAKEUUID'
op|','
nl|'\n'
name|'HOST'
op|','
nl|'\n'
string|"'10.0.0.1'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'10.0.0.1'"
op|','
name|'manager'
op|'.'
name|'deallocate_called'
op|')'
newline|'\n'
name|'disassociate'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
string|"'10.0.0.1'"
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get_by_instance'"
op|')'
newline|'\n'
DECL|member|test_remove_fixed_ip_from_instance_bad_input
name|'def'
name|'test_remove_fixed_ip_from_instance_bad_input'
op|'('
name|'self'
op|','
name|'get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'get'
op|'.'
name|'return_value'
op|'='
op|'['
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'FixedIpNotFoundForSpecificInstance'
op|','
nl|'\n'
name|'manager'
op|'.'
name|'remove_fixed_ip_from_instance'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
number|'99'
op|','
name|'HOST'
op|','
string|"'bad input'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_cidrs
dedent|''
name|'def'
name|'test_validate_cidrs'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'nets'
op|'='
name|'manager'
op|'.'
name|'create_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'fake'"
op|','
nl|'\n'
string|"'192.168.0.0/24'"
op|','
nl|'\n'
name|'False'
op|','
number|'1'
op|','
number|'256'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
nl|'\n'
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'nets'
op|')'
op|')'
newline|'\n'
name|'cidrs'
op|'='
op|'['
name|'str'
op|'('
name|'net'
op|'['
string|"'cidr'"
op|']'
op|')'
name|'for'
name|'net'
name|'in'
name|'nets'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'192.168.0.0/24'"
op|','
name|'cidrs'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_cidrs_split_exact_in_half
dedent|''
name|'def'
name|'test_validate_cidrs_split_exact_in_half'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'nets'
op|'='
name|'manager'
op|'.'
name|'create_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'fake'"
op|','
nl|'\n'
string|"'192.168.0.0/24'"
op|','
nl|'\n'
name|'False'
op|','
number|'2'
op|','
number|'128'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
nl|'\n'
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'len'
op|'('
name|'nets'
op|')'
op|')'
newline|'\n'
name|'cidrs'
op|'='
op|'['
name|'str'
op|'('
name|'net'
op|'['
string|"'cidr'"
op|']'
op|')'
name|'for'
name|'net'
name|'in'
name|'nets'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'192.168.0.0/25'"
op|','
name|'cidrs'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'192.168.0.128/25'"
op|','
name|'cidrs'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get_all'"
op|')'
newline|'\n'
DECL|member|test_validate_cidrs_split_cidr_in_use_middle_of_range
name|'def'
name|'test_validate_cidrs_split_cidr_in_use_middle_of_range'
op|'('
name|'self'
op|','
name|'get_all'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'get_all'
op|'.'
name|'return_value'
op|'='
op|'['
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
name|'id'
op|'='
number|'1'
op|','
name|'cidr'
op|'='
string|"'192.168.2.0/24'"
op|')'
op|']'
newline|'\n'
name|'nets'
op|'='
name|'manager'
op|'.'
name|'create_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'fake'"
op|','
nl|'\n'
string|"'192.168.0.0/16'"
op|','
nl|'\n'
name|'False'
op|','
number|'4'
op|','
number|'256'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
nl|'\n'
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'4'
op|','
name|'len'
op|'('
name|'nets'
op|')'
op|')'
newline|'\n'
name|'cidrs'
op|'='
op|'['
name|'str'
op|'('
name|'net'
op|'['
string|"'cidr'"
op|']'
op|')'
name|'for'
name|'net'
name|'in'
name|'nets'
op|']'
newline|'\n'
name|'exp_cidrs'
op|'='
op|'['
string|"'192.168.0.0/24'"
op|','
string|"'192.168.1.0/24'"
op|','
string|"'192.168.3.0/24'"
op|','
nl|'\n'
string|"'192.168.4.0/24'"
op|']'
newline|'\n'
name|'for'
name|'exp_cidr'
name|'in'
name|'exp_cidrs'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'exp_cidr'
op|','
name|'cidrs'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertNotIn'
op|'('
string|"'192.168.2.0/24'"
op|','
name|'cidrs'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get_all'"
op|')'
newline|'\n'
DECL|member|test_validate_cidrs_smaller_subnet_in_use
name|'def'
name|'test_validate_cidrs_smaller_subnet_in_use'
op|'('
name|'self'
op|','
name|'get_all'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'get_all'
op|'.'
name|'return_value'
op|'='
op|'['
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
name|'id'
op|'='
number|'1'
op|','
name|'cidr'
op|'='
string|"'192.168.2.9/25'"
op|')'
op|']'
newline|'\n'
comment|'# CidrConflict: requested cidr (192.168.2.0/24) conflicts with'
nl|'\n'
comment|'# existing smaller cidr'
nl|'\n'
name|'args'
op|'='
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'fake'"
op|','
string|"'192.168.2.0/24'"
op|','
name|'False'
op|','
nl|'\n'
number|'1'
op|','
number|'256'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'CidrConflict'
op|','
nl|'\n'
name|'manager'
op|'.'
name|'create_networks'
op|','
op|'*'
name|'args'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get_all'"
op|')'
newline|'\n'
DECL|member|test_validate_cidrs_split_smaller_cidr_in_use
name|'def'
name|'test_validate_cidrs_split_smaller_cidr_in_use'
op|'('
name|'self'
op|','
name|'get_all'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'get_all'
op|'.'
name|'return_value'
op|'='
op|'['
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
name|'id'
op|'='
number|'1'
op|','
name|'cidr'
op|'='
string|"'192.168.2.0/25'"
op|')'
op|']'
newline|'\n'
name|'nets'
op|'='
name|'manager'
op|'.'
name|'create_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'fake'"
op|','
nl|'\n'
string|"'192.168.0.0/16'"
op|','
nl|'\n'
name|'False'
op|','
number|'4'
op|','
number|'256'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
nl|'\n'
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'4'
op|','
name|'len'
op|'('
name|'nets'
op|')'
op|')'
newline|'\n'
name|'cidrs'
op|'='
op|'['
name|'str'
op|'('
name|'net'
op|'['
string|"'cidr'"
op|']'
op|')'
name|'for'
name|'net'
name|'in'
name|'nets'
op|']'
newline|'\n'
name|'exp_cidrs'
op|'='
op|'['
string|"'192.168.0.0/24'"
op|','
string|"'192.168.1.0/24'"
op|','
string|"'192.168.3.0/24'"
op|','
nl|'\n'
string|"'192.168.4.0/24'"
op|']'
newline|'\n'
name|'for'
name|'exp_cidr'
name|'in'
name|'exp_cidrs'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'exp_cidr'
op|','
name|'cidrs'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertNotIn'
op|'('
string|"'192.168.2.0/24'"
op|','
name|'cidrs'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get_all'"
op|')'
newline|'\n'
DECL|member|test_validate_cidrs_split_smaller_cidr_in_use2
name|'def'
name|'test_validate_cidrs_split_smaller_cidr_in_use2'
op|'('
name|'self'
op|','
name|'get_all'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'manager'
op|'.'
name|'db'
op|','
string|"'network_get_all'"
op|')'
newline|'\n'
name|'get_all'
op|'.'
name|'return_value'
op|'='
op|'['
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
name|'id'
op|'='
number|'1'
op|','
nl|'\n'
name|'cidr'
op|'='
string|"'192.168.2.9/29'"
op|')'
op|']'
newline|'\n'
name|'nets'
op|'='
name|'manager'
op|'.'
name|'create_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'fake'"
op|','
nl|'\n'
string|"'192.168.2.0/24'"
op|','
nl|'\n'
name|'False'
op|','
number|'3'
op|','
number|'32'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
nl|'\n'
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'3'
op|','
name|'len'
op|'('
name|'nets'
op|')'
op|')'
newline|'\n'
name|'cidrs'
op|'='
op|'['
name|'str'
op|'('
name|'net'
op|'['
string|"'cidr'"
op|']'
op|')'
name|'for'
name|'net'
name|'in'
name|'nets'
op|']'
newline|'\n'
name|'exp_cidrs'
op|'='
op|'['
string|"'192.168.2.32/27'"
op|','
string|"'192.168.2.64/27'"
op|','
string|"'192.168.2.96/27'"
op|']'
newline|'\n'
name|'for'
name|'exp_cidr'
name|'in'
name|'exp_cidrs'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'exp_cidr'
op|','
name|'cidrs'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertNotIn'
op|'('
string|"'192.168.2.0/27'"
op|','
name|'cidrs'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get_all'"
op|')'
newline|'\n'
DECL|member|test_validate_cidrs_split_all_in_use
name|'def'
name|'test_validate_cidrs_split_all_in_use'
op|'('
name|'self'
op|','
name|'get_all'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'in_use'
op|'='
op|'['
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
op|'**'
name|'values'
op|')'
name|'for'
name|'values'
name|'in'
nl|'\n'
op|'['
op|'{'
string|"'id'"
op|':'
number|'1'
op|','
string|"'cidr'"
op|':'
string|"'192.168.2.9/29'"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'id'"
op|':'
number|'2'
op|','
string|"'cidr'"
op|':'
string|"'192.168.2.64/26'"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'id'"
op|':'
number|'3'
op|','
string|"'cidr'"
op|':'
string|"'192.168.2.128/26'"
op|'}'
op|']'
op|']'
newline|'\n'
name|'get_all'
op|'.'
name|'return_value'
op|'='
name|'in_use'
newline|'\n'
name|'args'
op|'='
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'fake'"
op|','
string|"'192.168.2.0/24'"
op|','
name|'False'
op|','
nl|'\n'
number|'3'
op|','
number|'64'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
comment|'# CidrConflict: Not enough subnets avail to satisfy requested num_'
nl|'\n'
comment|'# networks - some subnets in requested range already'
nl|'\n'
comment|'# in use'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'CidrConflict'
op|','
nl|'\n'
name|'manager'
op|'.'
name|'create_networks'
op|','
op|'*'
name|'args'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_cidrs_one_in_use
dedent|''
name|'def'
name|'test_validate_cidrs_one_in_use'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'args'
op|'='
op|'('
name|'None'
op|','
string|"'fake'"
op|','
string|"'192.168.0.0/24'"
op|','
name|'False'
op|','
number|'2'
op|','
number|'256'
op|','
name|'None'
op|','
name|'None'
op|','
nl|'\n'
name|'None'
op|','
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
comment|'# ValueError: network_size * num_networks exceeds cidr size'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'ValueError'
op|','
name|'manager'
op|'.'
name|'create_networks'
op|','
op|'*'
name|'args'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get_all'"
op|')'
newline|'\n'
DECL|member|test_validate_cidrs_already_used
name|'def'
name|'test_validate_cidrs_already_used'
op|'('
name|'self'
op|','
name|'get_all'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'get_all'
op|'.'
name|'return_value'
op|'='
op|'['
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
name|'cidr'
op|'='
string|"'192.168.0.0/24'"
op|')'
op|']'
newline|'\n'
comment|'# CidrConflict: cidr already in use'
nl|'\n'
name|'args'
op|'='
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'fake'"
op|','
string|"'192.168.0.0/24'"
op|','
name|'False'
op|','
nl|'\n'
number|'1'
op|','
number|'256'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'CidrConflict'
op|','
nl|'\n'
name|'manager'
op|'.'
name|'create_networks'
op|','
op|'*'
name|'args'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_cidrs_too_many
dedent|''
name|'def'
name|'test_validate_cidrs_too_many'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'args'
op|'='
op|'('
name|'None'
op|','
string|"'fake'"
op|','
string|"'192.168.0.0/24'"
op|','
name|'False'
op|','
number|'200'
op|','
number|'256'
op|','
name|'None'
op|','
name|'None'
op|','
nl|'\n'
name|'None'
op|','
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
comment|'# ValueError: Not enough subnets avail to satisfy requested'
nl|'\n'
comment|'# num_networks'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'ValueError'
op|','
name|'manager'
op|'.'
name|'create_networks'
op|','
op|'*'
name|'args'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_validate_cidrs_split_partial
dedent|''
name|'def'
name|'test_validate_cidrs_split_partial'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'nets'
op|'='
name|'manager'
op|'.'
name|'create_networks'
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'fake'"
op|','
nl|'\n'
string|"'192.168.0.0/16'"
op|','
nl|'\n'
name|'False'
op|','
number|'2'
op|','
number|'256'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
nl|'\n'
name|'None'
op|')'
newline|'\n'
name|'returned_cidrs'
op|'='
op|'['
name|'str'
op|'('
name|'net'
op|'['
string|"'cidr'"
op|']'
op|')'
name|'for'
name|'net'
name|'in'
name|'nets'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'192.168.0.0/24'"
op|','
name|'returned_cidrs'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'192.168.1.0/24'"
op|','
name|'returned_cidrs'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get_all'"
op|')'
newline|'\n'
DECL|member|test_validate_cidrs_conflict_existing_supernet
name|'def'
name|'test_validate_cidrs_conflict_existing_supernet'
op|'('
name|'self'
op|','
name|'get_all'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'get_all'
op|'.'
name|'return_value'
op|'='
op|'['
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
name|'id'
op|'='
number|'1'
op|','
name|'cidr'
op|'='
string|"'192.168.0.0/8'"
op|')'
op|']'
newline|'\n'
name|'args'
op|'='
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'fake'"
op|','
string|"'192.168.0.0/24'"
op|','
name|'False'
op|','
nl|'\n'
number|'1'
op|','
number|'256'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
comment|'# CidrConflict: requested cidr (192.168.0.0/24) conflicts'
nl|'\n'
comment|'# with existing supernet'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'CidrConflict'
op|','
nl|'\n'
name|'manager'
op|'.'
name|'create_networks'
op|','
op|'*'
name|'args'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_create_networks
dedent|''
name|'def'
name|'test_create_networks'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'cidr'
op|'='
string|"'192.168.0.0/24'"
newline|'\n'
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'manager'
op|','
string|"'_create_fixed_ips'"
op|','
nl|'\n'
name|'self'
op|'.'
name|'fake_create_fixed_ips'
op|')'
newline|'\n'
name|'args'
op|'='
op|'['
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'foo'"
op|','
name|'cidr'
op|','
name|'None'
op|','
number|'1'
op|','
number|'256'
op|','
nl|'\n'
string|"'fd00::/48'"
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'manager'
op|'.'
name|'create_networks'
op|'('
op|'*'
name|'args'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_create_networks_with_uuid
dedent|''
name|'def'
name|'test_create_networks_with_uuid'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'cidr'
op|'='
string|"'192.168.0.0/24'"
newline|'\n'
name|'uuid'
op|'='
name|'FAKEUUID'
newline|'\n'
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'manager'
op|','
string|"'_create_fixed_ips'"
op|','
nl|'\n'
name|'self'
op|'.'
name|'fake_create_fixed_ips'
op|')'
newline|'\n'
name|'args'
op|'='
op|'['
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'foo'"
op|','
name|'cidr'
op|','
name|'None'
op|','
number|'1'
op|','
number|'256'
op|','
nl|'\n'
string|"'fd00::/48'"
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|']'
newline|'\n'
name|'kwargs'
op|'='
op|'{'
string|"'uuid'"
op|':'
name|'uuid'
op|'}'
newline|'\n'
name|'nets'
op|'='
name|'manager'
op|'.'
name|'create_networks'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'nets'
op|')'
op|')'
newline|'\n'
name|'net'
op|'='
name|'nets'
op|'['
number|'0'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'uuid'
op|','
name|'net'
op|'['
string|"'uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get_all'"
op|')'
newline|'\n'
DECL|member|test_create_networks_cidr_already_used
name|'def'
name|'test_create_networks_cidr_already_used'
op|'('
name|'self'
op|','
name|'get_all'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'get_all'
op|'.'
name|'return_value'
op|'='
op|'['
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
name|'id'
op|'='
number|'1'
op|','
name|'cidr'
op|'='
string|"'192.168.0.0/24'"
op|')'
op|']'
newline|'\n'
name|'args'
op|'='
op|'['
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'foo'"
op|','
string|"'192.168.0.0/24'"
op|','
name|'None'
op|','
number|'1'
op|','
number|'256'
op|','
nl|'\n'
string|"'fd00::/48'"
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'CidrConflict'
op|','
nl|'\n'
name|'manager'
op|'.'
name|'create_networks'
op|','
op|'*'
name|'args'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_create_networks_many
dedent|''
name|'def'
name|'test_create_networks_many'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'cidr'
op|'='
string|"'192.168.0.0/16'"
newline|'\n'
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'manager'
op|','
string|"'_create_fixed_ips'"
op|','
nl|'\n'
name|'self'
op|'.'
name|'fake_create_fixed_ips'
op|')'
newline|'\n'
name|'args'
op|'='
op|'['
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
string|"'foo'"
op|','
name|'cidr'
op|','
name|'None'
op|','
number|'10'
op|','
number|'256'
op|','
nl|'\n'
string|"'fd00::/48'"
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'manager'
op|'.'
name|'create_networks'
op|'('
op|'*'
name|'args'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ips_by_virtual_interface'"
op|')'
newline|'\n'
DECL|member|test_get_instance_uuids_by_ip_regex
name|'def'
name|'test_get_instance_uuids_by_ip_regex'
op|'('
name|'self'
op|','
name|'fixed_get'
op|','
name|'network_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
newline|'\n'
name|'fixed_get'
op|'.'
name|'side_effect'
op|'='
name|'manager'
op|'.'
name|'db'
op|'.'
name|'fixed_ips_by_virtual_interface'
newline|'\n'
name|'_vifs'
op|'='
name|'manager'
op|'.'
name|'db'
op|'.'
name|'virtual_interface_get_all'
op|'('
name|'None'
op|')'
newline|'\n'
name|'fake_context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
string|"'project'"
op|')'
newline|'\n'
name|'network_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'manager'
op|'.'
name|'db'
op|'.'
name|'network_get'
op|'('
name|'None'
op|','
number|'1'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Greedy get eveything'
nl|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'ip'"
op|':'
string|"'.*'"
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'len'
op|'('
name|'_vifs'
op|')'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|"# Doesn't exist"
nl|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'ip'"
op|':'
string|"'10.0.0.1'"
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'res'
op|')'
newline|'\n'
nl|'\n'
comment|'# Get instance 1'
nl|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'ip'"
op|':'
string|"'172.16.0.2'"
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'res'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'1'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'0'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# Get instance 2'
nl|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'ip'"
op|':'
string|"'173.16.0.2'"
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'res'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'2'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'0'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# Get instance 0 and 1'
nl|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'ip'"
op|':'
string|"'172.16.0.*'"
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'res'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'0'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'0'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'1'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'1'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# Get instance 1 and 2'
nl|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'ip'"
op|':'
string|"'17..16.0.2'"
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'res'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'1'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'0'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'2'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'1'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get'"
op|')'
newline|'\n'
DECL|member|test_get_instance_uuids_by_ipv6_regex
name|'def'
name|'test_get_instance_uuids_by_ipv6_regex'
op|'('
name|'self'
op|','
name|'network_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
newline|'\n'
name|'_vifs'
op|'='
name|'manager'
op|'.'
name|'db'
op|'.'
name|'virtual_interface_get_all'
op|'('
name|'None'
op|')'
newline|'\n'
name|'fake_context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
string|"'project'"
op|')'
newline|'\n'
nl|'\n'
DECL|function|_network_get
name|'def'
name|'_network_get'
op|'('
name|'context'
op|','
name|'network_id'
op|','
op|'**'
name|'args'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'manager'
op|'.'
name|'db'
op|'.'
name|'network_get'
op|'('
name|'context'
op|','
name|'network_id'
op|')'
op|')'
newline|'\n'
dedent|''
name|'network_get'
op|'.'
name|'side_effect'
op|'='
name|'_network_get'
newline|'\n'
nl|'\n'
comment|'# Greedy get eveything'
nl|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'ip6'"
op|':'
string|"'.*'"
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'len'
op|'('
name|'_vifs'
op|')'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|"# Doesn't exist"
nl|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'ip6'"
op|':'
string|"'.*1034.*'"
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'res'
op|')'
newline|'\n'
nl|'\n'
comment|'# Get instance 1'
nl|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'ip6'"
op|':'
string|"'2001:.*2'"
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'res'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'1'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'0'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# Get instance 2'
nl|'\n'
name|'ip6'
op|'='
string|"'2001:db8:69:1f:dead:beff:feff:ef03'"
newline|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'ip6'"
op|':'
name|'ip6'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'res'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'2'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'0'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# Get instance 0 and 1'
nl|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'ip6'"
op|':'
string|"'.*ef0[1,2]'"
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'res'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'0'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'0'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'1'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'1'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# Get instance 1 and 2'
nl|'\n'
name|'ip6'
op|'='
string|"'2001:db8:69:1.:dead:beff:feff:ef0.'"
newline|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'ip6'"
op|':'
name|'ip6'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'res'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'1'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'0'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'2'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'1'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ips_by_virtual_interface'"
op|')'
newline|'\n'
DECL|member|test_get_instance_uuids_by_ip
name|'def'
name|'test_get_instance_uuids_by_ip'
op|'('
name|'self'
op|','
name|'fixed_get'
op|','
name|'network_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
newline|'\n'
name|'fixed_get'
op|'.'
name|'side_effect'
op|'='
name|'manager'
op|'.'
name|'db'
op|'.'
name|'fixed_ips_by_virtual_interface'
newline|'\n'
name|'_vifs'
op|'='
name|'manager'
op|'.'
name|'db'
op|'.'
name|'virtual_interface_get_all'
op|'('
name|'None'
op|')'
newline|'\n'
name|'fake_context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
string|"'project'"
op|')'
newline|'\n'
name|'network_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'manager'
op|'.'
name|'db'
op|'.'
name|'network_get'
op|'('
name|'None'
op|','
number|'1'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# No regex for you!'
nl|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'fixed_ip'"
op|':'
string|"'.*'"
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'res'
op|')'
newline|'\n'
nl|'\n'
comment|"# Doesn't exist"
nl|'\n'
name|'ip'
op|'='
string|"'10.0.0.1'"
newline|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'fixed_ip'"
op|':'
name|'ip'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'res'
op|')'
newline|'\n'
nl|'\n'
comment|'# Get instance 1'
nl|'\n'
name|'ip'
op|'='
string|"'172.16.0.2'"
newline|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'fixed_ip'"
op|':'
name|'ip'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'res'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'1'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'0'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# Get instance 2'
nl|'\n'
name|'ip'
op|'='
string|"'173.16.0.2'"
newline|'\n'
name|'res'
op|'='
name|'manager'
op|'.'
name|'get_instance_uuids_by_ip_filter'
op|'('
name|'fake_context'
op|','
nl|'\n'
op|'{'
string|"'fixed_ip'"
op|':'
name|'ip'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'res'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'_vifs'
op|'['
number|'2'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|','
name|'res'
op|'['
number|'0'
op|']'
op|'['
string|"'instance_uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get_by_uuid'"
op|')'
newline|'\n'
DECL|member|test_get_network
name|'def'
name|'test_get_network'
op|'('
name|'self'
op|','
name|'get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'fake_context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
string|"'project'"
op|')'
newline|'\n'
name|'get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
op|'**'
name|'networks'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'uuid'
op|'='
string|"'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'"
newline|'\n'
name|'network'
op|'='
name|'manager'
op|'.'
name|'get_network'
op|'('
name|'fake_context'
op|','
name|'uuid'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'uuid'
op|','
name|'network'
op|'['
string|"'uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get_by_uuid'"
op|')'
newline|'\n'
DECL|member|test_get_network_not_found
name|'def'
name|'test_get_network_not_found'
op|'('
name|'self'
op|','
name|'get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'fake_context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
string|"'project'"
op|')'
newline|'\n'
name|'get'
op|'.'
name|'side_effect'
op|'='
name|'exception'
op|'.'
name|'NetworkNotFoundForUUID'
op|'('
name|'uuid'
op|'='
string|"'foo'"
op|')'
newline|'\n'
name|'uuid'
op|'='
string|"'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'"
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'NetworkNotFound'
op|','
nl|'\n'
name|'manager'
op|'.'
name|'get_network'
op|','
name|'fake_context'
op|','
name|'uuid'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get_all'"
op|')'
newline|'\n'
DECL|member|test_get_all_networks
name|'def'
name|'test_get_all_networks'
op|'('
name|'self'
op|','
name|'get_all'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'fake_context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
string|"'project'"
op|')'
newline|'\n'
name|'get_all'
op|'.'
name|'return_value'
op|'='
op|'['
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
op|'**'
name|'net'
op|')'
nl|'\n'
name|'for'
name|'net'
name|'in'
name|'networks'
op|']'
newline|'\n'
name|'output'
op|'='
name|'manager'
op|'.'
name|'get_all_networks'
op|'('
name|'fake_context'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'len'
op|'('
name|'networks'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'"
op|','
nl|'\n'
name|'output'
op|'['
number|'0'
op|']'
op|'['
string|"'uuid'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'"
op|','
nl|'\n'
name|'output'
op|'['
number|'1'
op|']'
op|'['
string|"'uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get_by_uuid'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_disassociate'"
op|')'
newline|'\n'
DECL|member|test_disassociate_network
name|'def'
name|'test_disassociate_network'
op|'('
name|'self'
op|','
name|'disassociate'
op|','
name|'get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'disassociate'
op|'.'
name|'return_value'
op|'='
name|'True'
newline|'\n'
name|'fake_context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
string|"'project'"
op|')'
newline|'\n'
name|'get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
op|'**'
name|'networks'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'uuid'
op|'='
string|"'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'"
newline|'\n'
name|'manager'
op|'.'
name|'disassociate_network'
op|'('
name|'fake_context'
op|','
name|'uuid'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get_by_uuid'"
op|')'
newline|'\n'
DECL|member|test_disassociate_network_not_found
name|'def'
name|'test_disassociate_network_not_found'
op|'('
name|'self'
op|','
name|'get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'fake_network'
op|'.'
name|'FakeNetworkManager'
op|'('
op|')'
newline|'\n'
name|'fake_context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
string|"'project'"
op|')'
newline|'\n'
name|'get'
op|'.'
name|'side_effect'
op|'='
name|'exception'
op|'.'
name|'NetworkNotFoundForUUID'
op|'('
name|'uuid'
op|'='
string|"'fake'"
op|')'
newline|'\n'
name|'uuid'
op|'='
string|"'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'"
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'NetworkNotFound'
op|','
nl|'\n'
name|'manager'
op|'.'
name|'disassociate_network'
op|','
name|'fake_context'
op|','
name|'uuid'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_test_init_host_dynamic_fixed_range
dedent|''
name|'def'
name|'_test_init_host_dynamic_fixed_range'
op|'('
name|'self'
op|','
name|'net_manager'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'fake_network'
op|'='
name|'True'
op|','
nl|'\n'
name|'routing_source_ip'
op|'='
string|"'172.16.0.1'"
op|','
nl|'\n'
name|'metadata_host'
op|'='
string|"'172.16.0.1'"
op|','
nl|'\n'
name|'public_interface'
op|'='
string|"'eth1'"
op|','
nl|'\n'
name|'dmz_cidr'
op|'='
op|'['
string|"'10.0.3.0/24'"
op|']'
op|')'
newline|'\n'
name|'binary_name'
op|'='
name|'linux_net'
op|'.'
name|'get_binary_name'
op|'('
op|')'
newline|'\n'
nl|'\n'
comment|"# Stub out calls we don't want to really run, mock the db"
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'linux_net'
op|'.'
name|'iptables_manager'
op|','
string|"'_apply'"
op|','
name|'lambda'
op|':'
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'floating_ips'
op|'.'
name|'FloatingIP'
op|','
string|"'init_host_floating_ips'"
op|','
nl|'\n'
name|'lambda'
op|'*'
name|'args'
op|':'
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'net_manager'
op|'.'
name|'l3driver'
op|','
string|"'initialize_gateway'"
op|','
nl|'\n'
name|'lambda'
op|'*'
name|'args'
op|':'
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'network_get_all_by_host'"
op|')'
newline|'\n'
name|'fake_networks'
op|'='
op|'['
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
op|'**'
name|'n'
op|')'
nl|'\n'
name|'for'
name|'n'
name|'in'
name|'networks'
op|']'
newline|'\n'
name|'db'
op|'.'
name|'network_get_all_by_host'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
nl|'\n'
op|')'
op|'.'
name|'MultipleTimes'
op|'('
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'fake_networks'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'net_manager'
op|'.'
name|'init_host'
op|'('
op|')'
newline|'\n'
nl|'\n'
comment|'# Get the iptables rules that got created'
nl|'\n'
name|'current_lines'
op|'='
op|'['
op|']'
newline|'\n'
name|'new_lines'
op|'='
name|'linux_net'
op|'.'
name|'iptables_manager'
op|'.'
name|'_modify_rules'
op|'('
name|'current_lines'
op|','
nl|'\n'
name|'linux_net'
op|'.'
name|'iptables_manager'
op|'.'
name|'ipv4'
op|'['
string|"'nat'"
op|']'
op|','
nl|'\n'
name|'table_name'
op|'='
string|"'nat'"
op|')'
newline|'\n'
nl|'\n'
name|'expected_lines'
op|'='
op|'['
string|"'[0:0] -A %s-snat -s %s -d 0.0.0.0/0 '"
nl|'\n'
string|"'-j SNAT --to-source %s -o %s'"
nl|'\n'
op|'%'
op|'('
name|'binary_name'
op|','
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'routing_source_ip'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'public_interface'
op|')'
op|','
nl|'\n'
string|"'[0:0] -A %s-POSTROUTING -s %s -d %s/32 -j ACCEPT'"
nl|'\n'
op|'%'
op|'('
name|'binary_name'
op|','
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'metadata_host'
op|')'
op|','
nl|'\n'
string|"'[0:0] -A %s-POSTROUTING -s %s -d %s -j ACCEPT'"
nl|'\n'
op|'%'
op|'('
name|'binary_name'
op|','
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'dmz_cidr'
op|'['
number|'0'
op|']'
op|')'
op|','
nl|'\n'
string|"'[0:0] -A %s-POSTROUTING -s %s -d %s -m conntrack ! '"
nl|'\n'
string|"'--ctstate DNAT -j ACCEPT'"
op|'%'
op|'('
name|'binary_name'
op|','
nl|'\n'
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|"'cidr'"
op|']'
op|')'
op|','
nl|'\n'
string|"'[0:0] -A %s-snat -s %s -d 0.0.0.0/0 '"
nl|'\n'
string|"'-j SNAT --to-source %s -o %s'"
nl|'\n'
op|'%'
op|'('
name|'binary_name'
op|','
name|'networks'
op|'['
number|'1'
op|']'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'routing_source_ip'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'public_interface'
op|')'
op|','
nl|'\n'
string|"'[0:0] -A %s-POSTROUTING -s %s -d %s/32 -j ACCEPT'"
nl|'\n'
op|'%'
op|'('
name|'binary_name'
op|','
name|'networks'
op|'['
number|'1'
op|']'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'metadata_host'
op|')'
op|','
nl|'\n'
string|"'[0:0] -A %s-POSTROUTING -s %s -d %s -j ACCEPT'"
nl|'\n'
op|'%'
op|'('
name|'binary_name'
op|','
name|'networks'
op|'['
number|'1'
op|']'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'dmz_cidr'
op|'['
number|'0'
op|']'
op|')'
op|','
nl|'\n'
string|"'[0:0] -A %s-POSTROUTING -s %s -d %s -m conntrack ! '"
nl|'\n'
string|"'--ctstate DNAT -j ACCEPT'"
op|'%'
op|'('
name|'binary_name'
op|','
nl|'\n'
name|'networks'
op|'['
number|'1'
op|']'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'networks'
op|'['
number|'1'
op|']'
op|'['
string|"'cidr'"
op|']'
op|')'
op|']'
newline|'\n'
nl|'\n'
comment|'# Compare the expected rules against the actual ones'
nl|'\n'
name|'for'
name|'line'
name|'in'
name|'expected_lines'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'line'
op|','
name|'new_lines'
op|')'
newline|'\n'
nl|'\n'
comment|'# Add an additional network and ensure the rules get configured'
nl|'\n'
dedent|''
name|'new_network'
op|'='
op|'{'
string|"'id'"
op|':'
number|'2'
op|','
nl|'\n'
string|"'uuid'"
op|':'
name|'uuids'
op|'.'
name|'network_1'
op|','
nl|'\n'
string|"'label'"
op|':'
string|"'test2'"
op|','
nl|'\n'
string|"'injected'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'multi_host'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'cidr'"
op|':'
string|"'192.168.2.0/24'"
op|','
nl|'\n'
string|"'cidr_v6'"
op|':'
string|"'2001:dba::/64'"
op|','
nl|'\n'
string|"'gateway_v6'"
op|':'
string|"'2001:dba::1'"
op|','
nl|'\n'
string|"'netmask_v6'"
op|':'
string|"'64'"
op|','
nl|'\n'
string|"'netmask'"
op|':'
string|"'255.255.255.0'"
op|','
nl|'\n'
string|"'bridge'"
op|':'
string|"'fa1'"
op|','
nl|'\n'
string|"'bridge_interface'"
op|':'
string|"'fake_fa1'"
op|','
nl|'\n'
string|"'gateway'"
op|':'
string|"'192.168.2.1'"
op|','
nl|'\n'
string|"'dhcp_server'"
op|':'
string|"'192.168.2.1'"
op|','
nl|'\n'
string|"'broadcast'"
op|':'
string|"'192.168.2.255'"
op|','
nl|'\n'
string|"'dns1'"
op|':'
string|"'192.168.2.1'"
op|','
nl|'\n'
string|"'dns2'"
op|':'
string|"'192.168.2.2'"
op|','
nl|'\n'
string|"'vlan'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'host'"
op|':'
name|'HOST'
op|','
nl|'\n'
string|"'project_id'"
op|':'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
string|"'vpn_public_address'"
op|':'
string|"'192.168.2.2'"
op|','
nl|'\n'
string|"'vpn_public_port'"
op|':'
string|"'22'"
op|','
nl|'\n'
string|"'vpn_private_address'"
op|':'
string|"'10.0.0.2'"
op|'}'
newline|'\n'
name|'new_network_obj'
op|'='
name|'objects'
op|'.'
name|'Network'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'Network'
op|'('
op|')'
op|','
nl|'\n'
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
op|'**'
name|'new_network'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'get_admin_context'
op|'('
op|')'
newline|'\n'
name|'net_manager'
op|'.'
name|'_setup_network_on_host'
op|'('
name|'ctxt'
op|','
name|'new_network_obj'
op|')'
newline|'\n'
nl|'\n'
comment|'# Get the new iptables rules that got created from adding a new network'
nl|'\n'
name|'current_lines'
op|'='
op|'['
op|']'
newline|'\n'
name|'new_lines'
op|'='
name|'linux_net'
op|'.'
name|'iptables_manager'
op|'.'
name|'_modify_rules'
op|'('
name|'current_lines'
op|','
nl|'\n'
name|'linux_net'
op|'.'
name|'iptables_manager'
op|'.'
name|'ipv4'
op|'['
string|"'nat'"
op|']'
op|','
nl|'\n'
name|'table_name'
op|'='
string|"'nat'"
op|')'
newline|'\n'
nl|'\n'
comment|'# Add the new expected rules to the old ones'
nl|'\n'
name|'expected_lines'
op|'+='
op|'['
string|"'[0:0] -A %s-snat -s %s -d 0.0.0.0/0 '"
nl|'\n'
string|"'-j SNAT --to-source %s -o %s'"
nl|'\n'
op|'%'
op|'('
name|'binary_name'
op|','
name|'new_network'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'routing_source_ip'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'public_interface'
op|')'
op|','
nl|'\n'
string|"'[0:0] -A %s-POSTROUTING -s %s -d %s/32 -j ACCEPT'"
nl|'\n'
op|'%'
op|'('
name|'binary_name'
op|','
name|'new_network'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'metadata_host'
op|')'
op|','
nl|'\n'
string|"'[0:0] -A %s-POSTROUTING -s %s -d %s -j ACCEPT'"
nl|'\n'
op|'%'
op|'('
name|'binary_name'
op|','
name|'new_network'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'dmz_cidr'
op|'['
number|'0'
op|']'
op|')'
op|','
nl|'\n'
string|"'[0:0] -A %s-POSTROUTING -s %s -d %s -m conntrack '"
nl|'\n'
string|"'! --ctstate DNAT -j ACCEPT'"
op|'%'
op|'('
name|'binary_name'
op|','
nl|'\n'
name|'new_network'
op|'['
string|"'cidr'"
op|']'
op|','
nl|'\n'
name|'new_network'
op|'['
string|"'cidr'"
op|']'
op|')'
op|']'
newline|'\n'
nl|'\n'
comment|'# Compare the expected rules (with new network) against the actual ones'
nl|'\n'
name|'for'
name|'line'
name|'in'
name|'expected_lines'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'line'
op|','
name|'new_lines'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_flatdhcpmanager_dynamic_fixed_range
dedent|''
dedent|''
name|'def'
name|'test_flatdhcpmanager_dynamic_fixed_range'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Test FlatDHCPManager NAT rules for fixed_range."""'
newline|'\n'
comment|'# Set the network manager'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'='
name|'network_manager'
op|'.'
name|'FlatDHCPManager'
op|'('
name|'host'
op|'='
name|'HOST'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|'='
name|'db'
newline|'\n'
nl|'\n'
comment|'# Test new behavior:'
nl|'\n'
comment|'# CONF.fixed_range is not set, defaults to None'
nl|'\n'
comment|'# Determine networks to NAT based on lookup'
nl|'\n'
name|'self'
op|'.'
name|'_test_init_host_dynamic_fixed_range'
op|'('
name|'self'
op|'.'
name|'network'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_vlanmanager_dynamic_fixed_range
dedent|''
name|'def'
name|'test_vlanmanager_dynamic_fixed_range'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Test VlanManager NAT rules for fixed_range."""'
newline|'\n'
comment|'# Set the network manager'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'='
name|'network_manager'
op|'.'
name|'VlanManager'
op|'('
name|'host'
op|'='
name|'HOST'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|'='
name|'db'
newline|'\n'
nl|'\n'
comment|'# Test new behavior:'
nl|'\n'
comment|'# CONF.fixed_range is not set, defaults to None'
nl|'\n'
comment|'# Determine networks to NAT based on lookup'
nl|'\n'
name|'self'
op|'.'
name|'_test_init_host_dynamic_fixed_range'
op|'('
name|'self'
op|'.'
name|'network'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.quotas.Quotas.rollback'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.fixed_ip.FixedIP.get_by_address'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.network.manager.NetworkManager.'"
nl|'\n'
string|"'_do_trigger_security_group_members_refresh_for_instance'"
op|')'
newline|'\n'
DECL|member|test_fixed_ip_cleanup_rollback
name|'def'
name|'test_fixed_ip_cleanup_rollback'
op|'('
name|'self'
op|','
name|'fake_trig'
op|','
nl|'\n'
name|'fixed_get'
op|','
name|'rollback'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'network_manager'
op|'.'
name|'NetworkManager'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'fake_trig'
op|'.'
name|'side_effect'
op|'='
name|'test'
op|'.'
name|'TestingException'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'test'
op|'.'
name|'TestingException'
op|','
nl|'\n'
name|'manager'
op|'.'
name|'deallocate_fixed_ip'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
string|"'fake'"
op|','
string|"'fake'"
op|','
nl|'\n'
name|'instance'
op|'='
name|'fake_inst'
op|'('
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'non_existent_uuid'
op|')'
op|')'
newline|'\n'
name|'rollback'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_fixed_cidr_out_of_range
dedent|''
name|'def'
name|'test_fixed_cidr_out_of_range'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'manager'
op|'='
name|'network_manager'
op|'.'
name|'NetworkManager'
op|'('
op|')'
newline|'\n'
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'get_admin_context'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'AddressOutOfRange'
op|','
nl|'\n'
name|'manager'
op|'.'
name|'create_networks'
op|','
name|'ctxt'
op|','
name|'label'
op|'='
string|'"fake"'
op|','
nl|'\n'
name|'cidr'
op|'='
string|"'10.1.0.0/24'"
op|','
name|'fixed_cidr'
op|'='
string|"'10.1.1.0/25'"
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
dedent|''
dedent|''
name|'class'
name|'TestRPCFixedManager'
op|'('
name|'network_manager'
op|'.'
name|'RPCAllocateFixedIP'
op|','
nl|'\n'
DECL|class|TestRPCFixedManager
name|'network_manager'
op|'.'
name|'NetworkManager'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Dummy manager that implements RPCAllocateFixedIP."""'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|RPCAllocateTestCase
dedent|''
name|'class'
name|'RPCAllocateTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Tests nova.network.manager.RPCAllocateFixedIP."""'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'RPCAllocateTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'rpc_fixed'
op|'='
name|'TestRPCFixedManager'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'fake'"
op|','
string|"'fake'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_rpc_allocate
dedent|''
name|'def'
name|'test_rpc_allocate'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Test to verify bug 855030 doesn\'t resurface.\n\n Mekes sure _rpc_allocate_fixed_ip returns a value so the call\n returns properly and the greenpool completes.\n """'
newline|'\n'
name|'address'
op|'='
string|"'10.10.10.10'"
newline|'\n'
nl|'\n'
DECL|function|fake_allocate
name|'def'
name|'fake_allocate'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'address'
newline|'\n'
nl|'\n'
DECL|function|fake_network_get
dedent|''
name|'def'
name|'fake_network_get'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'test_network'
op|'.'
name|'fake_network'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'rpc_fixed'
op|','
string|"'allocate_fixed_ip'"
op|','
name|'fake_allocate'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'rpc_fixed'
op|'.'
name|'db'
op|','
string|"'network_get'"
op|','
name|'fake_network_get'
op|')'
newline|'\n'
name|'rval'
op|'='
name|'self'
op|'.'
name|'rpc_fixed'
op|'.'
name|'_rpc_allocate_fixed_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
string|"'fake_instance'"
op|','
nl|'\n'
string|"'fake_network'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'address'
op|','
name|'rval'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
dedent|''
dedent|''
name|'class'
name|'TestFloatingIPManager'
op|'('
name|'floating_ips'
op|'.'
name|'FloatingIP'
op|','
nl|'\n'
DECL|class|TestFloatingIPManager
name|'network_manager'
op|'.'
name|'NetworkManager'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Dummy manager that implements FloatingIP."""'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|AllocateTestCase
dedent|''
name|'class'
name|'AllocateTestCase'
op|'('
name|'test'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|variable|REQUIRES_LOCKING
indent|' '
name|'REQUIRES_LOCKING'
op|'='
name|'True'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'AllocateTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'dns'
op|'='
string|"'nova.network.noop_dns_driver.NoopDNSDriver'"
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'instance_dns_manager'
op|'='
name|'dns'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'test'
op|'.'
name|'SampleNetworks'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'='
name|'network_manager'
op|'.'
name|'VlanManager'
op|'('
name|'host'
op|'='
name|'HOST'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'user_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_USER_ID'
newline|'\n'
name|'self'
op|'.'
name|'project_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
newline|'\n'
name|'self'
op|'.'
name|'context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
name|'self'
op|'.'
name|'user_id'
op|','
nl|'\n'
name|'self'
op|'.'
name|'project_id'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'user_context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_allocate_for_instance
dedent|''
name|'def'
name|'test_allocate_for_instance'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'address'
op|'='
string|'"10.10.10.10"'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'auto_assign_floating_ip'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'db'
op|'.'
name|'floating_ip_create'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
op|'{'
string|"'address'"
op|':'
name|'address'
op|','
nl|'\n'
string|"'pool'"
op|':'
string|"'nova'"
op|'}'
op|')'
newline|'\n'
name|'inst'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'inst'
op|'.'
name|'host'
op|'='
name|'HOST'
newline|'\n'
name|'inst'
op|'.'
name|'display_name'
op|'='
name|'HOST'
newline|'\n'
name|'inst'
op|'.'
name|'instance_type_id'
op|'='
number|'1'
newline|'\n'
name|'inst'
op|'.'
name|'uuid'
op|'='
name|'FAKEUUID'
newline|'\n'
name|'inst'
op|'.'
name|'create'
op|'('
op|')'
newline|'\n'
name|'networks'
op|'='
name|'db'
op|'.'
name|'network_get_all'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'for'
name|'network'
name|'in'
name|'networks'
op|':'
newline|'\n'
indent|' '
name|'db'
op|'.'
name|'network_update'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'network'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
name|'HOST'
op|'}'
op|')'
newline|'\n'
dedent|''
name|'project_id'
op|'='
name|'self'
op|'.'
name|'user_context'
op|'.'
name|'project_id'
newline|'\n'
name|'nw_info'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_for_instance'
op|'('
name|'self'
op|'.'
name|'user_context'
op|','
nl|'\n'
name|'instance_id'
op|'='
name|'inst'
op|'['
string|"'id'"
op|']'
op|','
name|'instance_uuid'
op|'='
name|'inst'
op|'['
string|"'uuid'"
op|']'
op|','
nl|'\n'
name|'host'
op|'='
name|'inst'
op|'['
string|"'host'"
op|']'
op|','
name|'vpn'
op|'='
name|'None'
op|','
name|'rxtx_factor'
op|'='
number|'3'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'project_id'
op|','
name|'macs'
op|'='
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'nw_info'
op|')'
op|')'
newline|'\n'
name|'fixed_ip'
op|'='
name|'nw_info'
op|'.'
name|'fixed_ips'
op|'('
op|')'
op|'['
number|'0'
op|']'
op|'['
string|"'address'"
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'netutils'
op|'.'
name|'is_valid_ipv4'
op|'('
name|'fixed_ip'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_for_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'='
name|'inst'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_allocate_for_instance_illegal_network
dedent|''
name|'def'
name|'test_allocate_for_instance_illegal_network'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'networks'
op|'='
name|'db'
op|'.'
name|'network_get_all'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'requested_networks'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'network'
name|'in'
name|'networks'
op|':'
newline|'\n'
comment|'# set all networks to other projects'
nl|'\n'
indent|' '
name|'db'
op|'.'
name|'network_update'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'network'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
name|'HOST'
op|','
nl|'\n'
string|"'project_id'"
op|':'
string|"'otherid'"
op|'}'
op|')'
newline|'\n'
name|'requested_networks'
op|'.'
name|'append'
op|'('
op|'('
name|'network'
op|'['
string|"'uuid'"
op|']'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
comment|'# set the first network to our project'
nl|'\n'
dedent|''
name|'db'
op|'.'
name|'network_update'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'networks'
op|'['
number|'0'
op|']'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
op|'{'
string|"'project_id'"
op|':'
name|'self'
op|'.'
name|'user_context'
op|'.'
name|'project_id'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'inst'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'inst'
op|'.'
name|'host'
op|'='
name|'HOST'
newline|'\n'
name|'inst'
op|'.'
name|'display_name'
op|'='
name|'HOST'
newline|'\n'
name|'inst'
op|'.'
name|'instance_type_id'
op|'='
number|'1'
newline|'\n'
name|'inst'
op|'.'
name|'uuid'
op|'='
name|'FAKEUUID'
newline|'\n'
name|'inst'
op|'.'
name|'create'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'NetworkNotFoundForProject'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_for_instance'
op|','
name|'self'
op|'.'
name|'user_context'
op|','
nl|'\n'
name|'instance_id'
op|'='
name|'inst'
op|'['
string|"'id'"
op|']'
op|','
name|'instance_uuid'
op|'='
name|'inst'
op|'['
string|"'uuid'"
op|']'
op|','
nl|'\n'
name|'host'
op|'='
name|'inst'
op|'['
string|"'host'"
op|']'
op|','
name|'vpn'
op|'='
name|'None'
op|','
name|'rxtx_factor'
op|'='
number|'3'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'self'
op|'.'
name|'context'
op|'.'
name|'project_id'
op|','
name|'macs'
op|'='
name|'None'
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'requested_networks'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_allocate_for_instance_with_mac
dedent|''
name|'def'
name|'test_allocate_for_instance_with_mac'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'available_macs'
op|'='
name|'set'
op|'('
op|'['
string|"'ca:fe:de:ad:be:ef'"
op|']'
op|')'
newline|'\n'
name|'inst'
op|'='
name|'db'
op|'.'
name|'instance_create'
op|'('
name|'self'
op|'.'
name|'context'
op|','
op|'{'
string|"'host'"
op|':'
name|'HOST'
op|','
nl|'\n'
string|"'display_name'"
op|':'
name|'HOST'
op|','
nl|'\n'
string|"'instance_type_id'"
op|':'
number|'1'
op|'}'
op|')'
newline|'\n'
name|'networks'
op|'='
name|'db'
op|'.'
name|'network_get_all'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'for'
name|'network'
name|'in'
name|'networks'
op|':'
newline|'\n'
indent|' '
name|'db'
op|'.'
name|'network_update'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'network'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
name|'HOST'
op|'}'
op|')'
newline|'\n'
dedent|''
name|'project_id'
op|'='
name|'self'
op|'.'
name|'context'
op|'.'
name|'project_id'
newline|'\n'
name|'nw_info'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_for_instance'
op|'('
name|'self'
op|'.'
name|'user_context'
op|','
nl|'\n'
name|'instance_id'
op|'='
name|'inst'
op|'['
string|"'id'"
op|']'
op|','
name|'instance_uuid'
op|'='
name|'inst'
op|'['
string|"'uuid'"
op|']'
op|','
nl|'\n'
name|'host'
op|'='
name|'inst'
op|'['
string|"'host'"
op|']'
op|','
name|'vpn'
op|'='
name|'None'
op|','
name|'rxtx_factor'
op|'='
number|'3'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'project_id'
op|','
name|'macs'
op|'='
name|'available_macs'
op|')'
newline|'\n'
name|'assigned_macs'
op|'='
op|'['
name|'vif'
op|'['
string|"'address'"
op|']'
name|'for'
name|'vif'
name|'in'
name|'nw_info'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'assigned_macs'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'available_macs'
op|'.'
name|'pop'
op|'('
op|')'
op|','
name|'assigned_macs'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_for_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance_id'
op|'='
name|'inst'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'host'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'host'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'project_id'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_allocate_for_instance_not_enough_macs
dedent|''
name|'def'
name|'test_allocate_for_instance_not_enough_macs'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'available_macs'
op|'='
name|'set'
op|'('
op|')'
newline|'\n'
name|'inst'
op|'='
name|'db'
op|'.'
name|'instance_create'
op|'('
name|'self'
op|'.'
name|'context'
op|','
op|'{'
string|"'host'"
op|':'
name|'HOST'
op|','
nl|'\n'
string|"'display_name'"
op|':'
name|'HOST'
op|','
nl|'\n'
string|"'instance_type_id'"
op|':'
number|'1'
op|'}'
op|')'
newline|'\n'
name|'networks'
op|'='
name|'db'
op|'.'
name|'network_get_all'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'for'
name|'network'
name|'in'
name|'networks'
op|':'
newline|'\n'
indent|' '
name|'db'
op|'.'
name|'network_update'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'network'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
name|'self'
op|'.'
name|'network'
op|'.'
name|'host'
op|'}'
op|')'
newline|'\n'
dedent|''
name|'project_id'
op|'='
name|'self'
op|'.'
name|'context'
op|'.'
name|'project_id'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'VirtualInterfaceCreateException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'allocate_for_instance'
op|','
nl|'\n'
name|'self'
op|'.'
name|'user_context'
op|','
nl|'\n'
name|'instance_id'
op|'='
name|'inst'
op|'['
string|"'id'"
op|']'
op|','
name|'instance_uuid'
op|'='
name|'inst'
op|'['
string|"'uuid'"
op|']'
op|','
nl|'\n'
name|'host'
op|'='
name|'inst'
op|'['
string|"'host'"
op|']'
op|','
name|'vpn'
op|'='
name|'None'
op|','
name|'rxtx_factor'
op|'='
number|'3'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'project_id'
op|','
name|'macs'
op|'='
name|'available_macs'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FloatingIPTestCase
dedent|''
dedent|''
name|'class'
name|'FloatingIPTestCase'
op|'('
name|'test'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Tests nova.network.manager.FloatingIP."""'
newline|'\n'
nl|'\n'
DECL|variable|REQUIRES_LOCKING
name|'REQUIRES_LOCKING'
op|'='
name|'True'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'FloatingIPTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'tempdir'
op|'='
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'fixtures'
op|'.'
name|'TempDir'
op|'('
op|')'
op|')'
op|'.'
name|'path'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'log_dir'
op|'='
name|'self'
op|'.'
name|'tempdir'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'='
name|'TestFloatingIPManager'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|'='
name|'db'
newline|'\n'
name|'self'
op|'.'
name|'project_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
newline|'\n'
name|'self'
op|'.'
name|'context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
name|'self'
op|'.'
name|'project_id'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.instance_get_by_uuid'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.service_get_by_host_and_binary'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.floating_ip_get_by_address'"
op|')'
newline|'\n'
DECL|member|test_disassociate_floating_ip_multi_host_calls
name|'def'
name|'test_disassociate_floating_ip_multi_host_calls'
op|'('
name|'self'
op|','
name|'floating_get'
op|','
nl|'\n'
name|'service_get'
op|','
nl|'\n'
name|'inst_get'
op|','
name|'net_get'
op|','
nl|'\n'
name|'fixed_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'floating_ip'
op|'='
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'fixed_ip_id'
op|'='
number|'12'
op|')'
newline|'\n'
nl|'\n'
name|'fixed_ip'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'network_id'
op|'='
name|'None'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|')'
newline|'\n'
nl|'\n'
name|'network'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
name|'multi_host'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'instance'
op|'='
name|'dict'
op|'('
name|'fake_instance'
op|'.'
name|'fake_db_instance'
op|'('
name|'host'
op|'='
string|"'some-other-host'"
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
nl|'\n'
string|"'_floating_ip_owned_by_project'"
op|','
nl|'\n'
name|'lambda'
name|'_x'
op|','
name|'_y'
op|':'
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'floating_get'
op|'.'
name|'return_value'
op|'='
name|'floating_ip'
newline|'\n'
name|'fixed_get'
op|'.'
name|'return_value'
op|'='
name|'fixed_ip'
newline|'\n'
name|'net_get'
op|'.'
name|'return_value'
op|'='
name|'network'
newline|'\n'
name|'inst_get'
op|'.'
name|'return_value'
op|'='
name|'instance'
newline|'\n'
name|'service_get'
op|'.'
name|'return_value'
op|'='
name|'test_service'
op|'.'
name|'fake_service'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'servicegroup_api'
op|','
nl|'\n'
string|"'service_is_up'"
op|','
nl|'\n'
name|'lambda'
name|'_x'
op|':'
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'network_rpcapi'
op|','
string|"'_disassociate_floating_ip'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'network_rpcapi'
op|'.'
name|'_disassociate_floating_ip'
op|'('
nl|'\n'
name|'ctxt'
op|','
string|"'fl_ip'"
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
string|"'some-other-host'"
op|','
nl|'\n'
name|'uuids'
op|'.'
name|'instance'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'disassociate_floating_ip'
op|'('
name|'ctxt'
op|','
string|"'fl_ip'"
op|','
name|'True'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get_by_address'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.network_get'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.instance_get_by_uuid'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.floating_ip_get_by_address'"
op|')'
newline|'\n'
DECL|member|test_associate_floating_ip_multi_host_calls
name|'def'
name|'test_associate_floating_ip_multi_host_calls'
op|'('
name|'self'
op|','
name|'floating_get'
op|','
nl|'\n'
name|'inst_get'
op|','
name|'net_get'
op|','
nl|'\n'
name|'fixed_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'floating_ip'
op|'='
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'fixed_ip_id'
op|'='
name|'None'
op|')'
newline|'\n'
nl|'\n'
name|'fixed_ip'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'network_id'
op|'='
name|'None'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|')'
newline|'\n'
nl|'\n'
name|'network'
op|'='
name|'dict'
op|'('
name|'test_network'
op|'.'
name|'fake_network'
op|','
nl|'\n'
name|'multi_host'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'instance'
op|'='
name|'dict'
op|'('
name|'fake_instance'
op|'.'
name|'fake_db_instance'
op|'('
name|'host'
op|'='
string|"'some-other-host'"
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
nl|'\n'
string|"'_floating_ip_owned_by_project'"
op|','
nl|'\n'
name|'lambda'
name|'_x'
op|','
name|'_y'
op|':'
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'floating_get'
op|'.'
name|'return_value'
op|'='
name|'floating_ip'
newline|'\n'
name|'fixed_get'
op|'.'
name|'return_value'
op|'='
name|'fixed_ip'
newline|'\n'
name|'net_get'
op|'.'
name|'return_value'
op|'='
name|'network'
newline|'\n'
name|'inst_get'
op|'.'
name|'return_value'
op|'='
name|'instance'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'network_rpcapi'
op|','
string|"'_associate_floating_ip'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'network_rpcapi'
op|'.'
name|'_associate_floating_ip'
op|'('
nl|'\n'
name|'ctxt'
op|','
string|"'fl_ip'"
op|','
string|"'fix_ip'"
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
string|"'some-other-host'"
op|','
nl|'\n'
name|'uuids'
op|'.'
name|'instance'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'associate_floating_ip'
op|'('
name|'ctxt'
op|','
string|"'fl_ip'"
op|','
string|"'fix_ip'"
op|','
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_double_deallocation
dedent|''
name|'def'
name|'test_double_deallocation'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance_ref'
op|'='
name|'db'
op|'.'
name|'instance_create'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
op|'{'
string|'"project_id"'
op|':'
name|'self'
op|'.'
name|'project_id'
op|'}'
op|')'
newline|'\n'
comment|'# Run it twice to make it fault if it does not handle'
nl|'\n'
comment|'# instances without fixed networks'
nl|'\n'
comment|'# If this fails in either, it does not handle having no addresses'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_for_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance_id'
op|'='
name|'instance_ref'
op|'['
string|"'id'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_for_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance_id'
op|'='
name|'instance_ref'
op|'['
string|"'id'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_deallocate_floating_ip_quota_rollback
dedent|''
name|'def'
name|'test_deallocate_floating_ip_quota_rollback'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake
name|'def'
name|'fake'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.1'"
op|','
name|'fixed_ip_id'
op|'='
name|'None'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'ctxt'
op|'.'
name|'project_id'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_get_by_address'"
op|','
name|'fake'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
string|"'floating_ip_deallocate'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'network'
op|','
nl|'\n'
string|"'_floating_ip_owned_by_project'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'quota'
op|'.'
name|'QUOTAS'
op|','
string|"'reserve'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'quota'
op|'.'
name|'QUOTAS'
op|','
string|"'rollback'"
op|')'
newline|'\n'
name|'quota'
op|'.'
name|'QUOTAS'
op|'.'
name|'reserve'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'floating_ips'
op|'='
op|'-'
number|'1'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
nl|'\n'
op|')'
op|'.'
name|'AndReturn'
op|'('
string|"'fake-rsv'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'_floating_ip_owned_by_project'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'floating_ip_deallocate'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'None'
op|')'
newline|'\n'
name|'quota'
op|'.'
name|'QUOTAS'
op|'.'
name|'rollback'
op|'('
name|'self'
op|'.'
name|'context'
op|','
string|"'fake-rsv'"
op|','
nl|'\n'
name|'project_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_floating_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
string|"'10.0.0.1'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_deallocation_deleted_instance
dedent|''
name|'def'
name|'test_deallocation_deleted_instance'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_teardown_network_on_host'"
op|','
nl|'\n'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'None'
op|')'
newline|'\n'
name|'instance'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'project_id'
op|'='
name|'self'
op|'.'
name|'project_id'
newline|'\n'
name|'instance'
op|'.'
name|'deleted'
op|'='
name|'True'
newline|'\n'
name|'instance'
op|'.'
name|'create'
op|'('
op|')'
newline|'\n'
name|'network'
op|'='
name|'db'
op|'.'
name|'network_create_safe'
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
op|'{'
nl|'\n'
string|"'project_id'"
op|':'
name|'self'
op|'.'
name|'project_id'
op|','
nl|'\n'
string|"'host'"
op|':'
name|'CONF'
op|'.'
name|'host'
op|','
nl|'\n'
string|"'label'"
op|':'
string|"'foo'"
op|'}'
op|')'
newline|'\n'
name|'fixed'
op|'='
name|'db'
op|'.'
name|'fixed_ip_create'
op|'('
name|'self'
op|'.'
name|'context'
op|','
op|'{'
string|"'allocated'"
op|':'
name|'True'
op|','
nl|'\n'
string|"'instance_uuid'"
op|':'
name|'instance'
op|'.'
name|'uuid'
op|','
string|"'address'"
op|':'
string|"'10.1.1.1'"
op|','
nl|'\n'
string|"'network_id'"
op|':'
name|'network'
op|'['
string|"'id'"
op|']'
op|'}'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'floating_ip_create'
op|'('
name|'self'
op|'.'
name|'context'
op|','
op|'{'
nl|'\n'
string|"'address'"
op|':'
string|"'10.10.10.10'"
op|','
string|"'instance_uuid'"
op|':'
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
string|"'fixed_ip_id'"
op|':'
name|'fixed'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
string|"'project_id'"
op|':'
name|'self'
op|'.'
name|'project_id'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_for_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|'='
name|'instance'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_deallocation_duplicate_floating_ip
dedent|''
name|'def'
name|'test_deallocation_duplicate_floating_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_teardown_network_on_host'"
op|','
nl|'\n'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'None'
op|')'
newline|'\n'
name|'instance'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'project_id'
op|'='
name|'self'
op|'.'
name|'project_id'
newline|'\n'
name|'instance'
op|'.'
name|'create'
op|'('
op|')'
newline|'\n'
name|'network'
op|'='
name|'db'
op|'.'
name|'network_create_safe'
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'elevated'
op|'('
op|')'
op|','
op|'{'
nl|'\n'
string|"'project_id'"
op|':'
name|'self'
op|'.'
name|'project_id'
op|','
nl|'\n'
string|"'host'"
op|':'
name|'CONF'
op|'.'
name|'host'
op|','
nl|'\n'
string|"'label'"
op|':'
string|"'foo'"
op|'}'
op|')'
newline|'\n'
name|'fixed'
op|'='
name|'db'
op|'.'
name|'fixed_ip_create'
op|'('
name|'self'
op|'.'
name|'context'
op|','
op|'{'
string|"'allocated'"
op|':'
name|'True'
op|','
nl|'\n'
string|"'instance_uuid'"
op|':'
name|'instance'
op|'.'
name|'uuid'
op|','
string|"'address'"
op|':'
string|"'10.1.1.1'"
op|','
nl|'\n'
string|"'network_id'"
op|':'
name|'network'
op|'['
string|"'id'"
op|']'
op|'}'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'floating_ip_create'
op|'('
name|'self'
op|'.'
name|'context'
op|','
op|'{'
nl|'\n'
string|"'address'"
op|':'
string|"'10.10.10.10'"
op|','
nl|'\n'
string|"'deleted'"
op|':'
name|'True'
op|'}'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'floating_ip_create'
op|'('
name|'self'
op|'.'
name|'context'
op|','
op|'{'
nl|'\n'
string|"'address'"
op|':'
string|"'10.10.10.10'"
op|','
string|"'instance_uuid'"
op|':'
name|'instance'
op|'.'
name|'uuid'
op|','
nl|'\n'
string|"'fixed_ip_id'"
op|':'
name|'fixed'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
string|"'project_id'"
op|':'
name|'self'
op|'.'
name|'project_id'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_for_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|'='
name|'instance'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.floating_ip_get_by_address'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.floating_ip_update'"
op|')'
newline|'\n'
DECL|member|test_migrate_instance_start
name|'def'
name|'test_migrate_instance_start'
op|'('
name|'self'
op|','
name|'floating_update'
op|','
name|'floating_get'
op|','
nl|'\n'
name|'fixed_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'='
op|'{'
string|"'count'"
op|':'
number|'0'
op|'}'
newline|'\n'
nl|'\n'
DECL|function|fake_floating_ip_get_by_address
name|'def'
name|'fake_floating_ip_get_by_address'
op|'('
name|'context'
op|','
name|'address'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'address'
op|'='
name|'address'
op|','
nl|'\n'
name|'fixed_ip_id'
op|'='
number|'0'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_is_stale_floating_ip_address
dedent|''
name|'def'
name|'fake_is_stale_floating_ip_address'
op|'('
name|'context'
op|','
name|'floating_ip'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'str'
op|'('
name|'floating_ip'
op|'.'
name|'address'
op|')'
op|'=='
string|"'172.24.4.23'"
newline|'\n'
nl|'\n'
dedent|''
name|'floating_get'
op|'.'
name|'side_effect'
op|'='
name|'fake_floating_ip_get_by_address'
newline|'\n'
name|'fixed_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.2'"
op|','
nl|'\n'
name|'network'
op|'='
name|'test_network'
op|'.'
name|'fake_network'
op|')'
newline|'\n'
name|'floating_update'
op|'.'
name|'return_value'
op|'='
name|'fake_floating_ip_get_by_address'
op|'('
nl|'\n'
name|'None'
op|','
string|"'1.2.3.4'"
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_remove_floating_ip
name|'def'
name|'fake_remove_floating_ip'
op|'('
name|'floating_addr'
op|','
name|'fixed_addr'
op|','
name|'interface'
op|','
nl|'\n'
name|'network'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'['
string|"'count'"
op|']'
op|'+='
number|'1'
newline|'\n'
nl|'\n'
DECL|function|fake_clean_conntrack
dedent|''
name|'def'
name|'fake_clean_conntrack'
op|'('
name|'fixed_ip'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'str'
op|'('
name|'fixed_ip'
op|')'
op|'=='
string|'"10.0.0.2"'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'FixedIpInvalid'
op|'('
name|'address'
op|'='
name|'fixed_ip'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_is_stale_floating_ip_address'"
op|','
nl|'\n'
name|'fake_is_stale_floating_ip_address'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'l3driver'
op|','
string|"'remove_floating_ip'"
op|','
nl|'\n'
name|'fake_remove_floating_ip'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'driver'
op|','
string|"'clean_conntrack'"
op|','
nl|'\n'
name|'fake_clean_conntrack'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'addresses'
op|'='
op|'['
string|"'172.24.4.23'"
op|','
string|"'172.24.4.24'"
op|','
string|"'172.24.4.25'"
op|']'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'migrate_instance_start'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'FAKEUUID'
op|','
nl|'\n'
name|'floating_addresses'
op|'='
name|'addresses'
op|','
nl|'\n'
name|'rxtx_factor'
op|'='
number|'3'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'self'
op|'.'
name|'project_id'
op|','
nl|'\n'
name|'source'
op|'='
string|"'fake_source'"
op|','
nl|'\n'
name|'dest'
op|'='
string|"'fake_dest'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'called'
op|'['
string|"'count'"
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.fixed_ip_get'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.floating_ip_update'"
op|')'
newline|'\n'
DECL|member|test_migrate_instance_finish
name|'def'
name|'test_migrate_instance_finish'
op|'('
name|'self'
op|','
name|'floating_update'
op|','
name|'fixed_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'='
op|'{'
string|"'count'"
op|':'
number|'0'
op|'}'
newline|'\n'
nl|'\n'
DECL|function|fake_floating_ip_get_by_address
name|'def'
name|'fake_floating_ip_get_by_address'
op|'('
name|'context'
op|','
name|'address'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_floating_ip'
op|'.'
name|'fake_floating_ip'
op|','
nl|'\n'
name|'address'
op|'='
name|'address'
op|','
nl|'\n'
name|'fixed_ip_id'
op|'='
number|'0'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_is_stale_floating_ip_address
dedent|''
name|'def'
name|'fake_is_stale_floating_ip_address'
op|'('
name|'context'
op|','
name|'floating_ip'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'str'
op|'('
name|'floating_ip'
op|'.'
name|'address'
op|')'
op|'=='
string|"'172.24.4.23'"
newline|'\n'
nl|'\n'
dedent|''
name|'fixed_get'
op|'.'
name|'return_value'
op|'='
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'address'
op|'='
string|"'10.0.0.2'"
op|','
nl|'\n'
name|'network'
op|'='
name|'test_network'
op|'.'
name|'fake_network'
op|')'
newline|'\n'
name|'floating_update'
op|'.'
name|'return_value'
op|'='
name|'fake_floating_ip_get_by_address'
op|'('
nl|'\n'
name|'None'
op|','
string|"'1.2.3.4'"
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_add_floating_ip
name|'def'
name|'fake_add_floating_ip'
op|'('
name|'floating_addr'
op|','
name|'fixed_addr'
op|','
name|'interface'
op|','
nl|'\n'
name|'network'
op|')'
op|':'
newline|'\n'
indent|' '
name|'called'
op|'['
string|"'count'"
op|']'
op|'+='
number|'1'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_get_by_address'"
op|','
nl|'\n'
name|'fake_floating_ip_get_by_address'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'_is_stale_floating_ip_address'"
op|','
nl|'\n'
name|'fake_is_stale_floating_ip_address'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'l3driver'
op|','
string|"'add_floating_ip'"
op|','
nl|'\n'
name|'fake_add_floating_ip'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'addresses'
op|'='
op|'['
string|"'172.24.4.23'"
op|','
string|"'172.24.4.24'"
op|','
string|"'172.24.4.25'"
op|']'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'migrate_instance_finish'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'FAKEUUID'
op|','
nl|'\n'
name|'floating_addresses'
op|'='
name|'addresses'
op|','
nl|'\n'
name|'host'
op|'='
string|"'fake_dest'"
op|','
nl|'\n'
name|'rxtx_factor'
op|'='
number|'3'
op|','
nl|'\n'
name|'project_id'
op|'='
name|'self'
op|'.'
name|'project_id'
op|','
nl|'\n'
name|'source'
op|'='
string|"'fake_source'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'called'
op|'['
string|"'count'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_floating_dns_create_conflict
dedent|''
name|'def'
name|'test_floating_dns_create_conflict'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'zone'
op|'='
string|'"example.org"'
newline|'\n'
name|'address1'
op|'='
string|'"10.10.10.11"'
newline|'\n'
name|'name1'
op|'='
string|'"foo"'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'add_dns_entry'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'address1'
op|','
name|'name1'
op|','
string|'"A"'
op|','
name|'zone'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'FloatingIpDNSExists'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'add_dns_entry'
op|','
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'address1'
op|','
name|'name1'
op|','
string|'"A"'
op|','
name|'zone'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_floating_create_and_get
dedent|''
name|'def'
name|'test_floating_create_and_get'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'zone'
op|'='
string|'"example.org"'
newline|'\n'
name|'address1'
op|'='
string|'"10.10.10.11"'
newline|'\n'
name|'name1'
op|'='
string|'"foo"'
newline|'\n'
name|'name2'
op|'='
string|'"bar"'
newline|'\n'
name|'entries'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_dns_entries_by_address'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'address1'
op|','
name|'zone'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'entries'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'add_dns_entry'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'address1'
op|','
name|'name1'
op|','
string|'"A"'
op|','
name|'zone'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'add_dns_entry'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'address1'
op|','
name|'name2'
op|','
string|'"A"'
op|','
name|'zone'
op|')'
newline|'\n'
name|'entries'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_dns_entries_by_address'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'address1'
op|','
name|'zone'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'len'
op|'('
name|'entries'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'name1'
op|','
name|'entries'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'name2'
op|','
name|'entries'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'entries'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_dns_entries_by_name'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'name1'
op|','
name|'zone'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'entries'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'address1'
op|','
name|'entries'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_floating_dns_delete
dedent|''
name|'def'
name|'test_floating_dns_delete'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'zone'
op|'='
string|'"example.org"'
newline|'\n'
name|'address1'
op|'='
string|'"10.10.10.11"'
newline|'\n'
name|'name1'
op|'='
string|'"foo"'
newline|'\n'
name|'name2'
op|'='
string|'"bar"'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'add_dns_entry'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'address1'
op|','
name|'name1'
op|','
string|'"A"'
op|','
name|'zone'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'add_dns_entry'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'address1'
op|','
name|'name2'
op|','
string|'"A"'
op|','
name|'zone'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'delete_dns_entry'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'name1'
op|','
name|'zone'
op|')'
newline|'\n'
nl|'\n'
name|'entries'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_dns_entries_by_address'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'address1'
op|','
name|'zone'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'entries'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'name2'
op|','
name|'entries'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'NotFound'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'delete_dns_entry'
op|','
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'name1'
op|','
name|'zone'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_floating_dns_domains_public
dedent|''
name|'def'
name|'test_floating_dns_domains_public'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'domain1'
op|'='
string|'"example.org"'
newline|'\n'
name|'domain2'
op|'='
string|'"example.com"'
newline|'\n'
name|'address1'
op|'='
string|"'10.10.10.10'"
newline|'\n'
name|'entryname'
op|'='
string|"'testentry'"
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_public_dns_domain'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'domain1'
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_public_dns_domain'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'domain2'
op|','
nl|'\n'
string|"'fakeproject'"
op|')'
newline|'\n'
nl|'\n'
name|'domains'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_dns_domains'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'len'
op|'('
name|'domains'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'domain1'
op|','
name|'domains'
op|'['
number|'0'
op|']'
op|'['
string|"'domain'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'domain2'
op|','
name|'domains'
op|'['
number|'1'
op|']'
op|'['
string|"'domain'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
name|'domains'
op|'['
number|'0'
op|']'
op|'['
string|"'project'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'fakeproject'"
op|','
name|'domains'
op|'['
number|'1'
op|']'
op|'['
string|"'project'"
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'add_dns_entry'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'address1'
op|','
name|'entryname'
op|','
nl|'\n'
string|"'A'"
op|','
name|'domain1'
op|')'
newline|'\n'
name|'entries'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_dns_entries_by_name'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'entryname'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'entries'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'address1'
op|','
name|'entries'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'delete_dns_domain'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'delete_dns_domain'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'domain2'
op|')'
newline|'\n'
nl|'\n'
comment|'# Verify that deleting the domain deleted the associated entry'
nl|'\n'
name|'entries'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_dns_entries_by_name'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'entryname'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'entries'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_delete_all_by_ip
dedent|''
name|'def'
name|'test_delete_all_by_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'domain1'
op|'='
string|'"example.org"'
newline|'\n'
name|'domain2'
op|'='
string|'"example.com"'
newline|'\n'
name|'address'
op|'='
string|'"10.10.10.10"'
newline|'\n'
name|'name1'
op|'='
string|'"foo"'
newline|'\n'
name|'name2'
op|'='
string|'"bar"'
newline|'\n'
nl|'\n'
DECL|function|fake_domains
name|'def'
name|'fake_domains'
op|'('
name|'context'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
op|'['
op|'{'
string|"'domain'"
op|':'
string|"'example.org'"
op|','
string|"'scope'"
op|':'
string|"'public'"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'domain'"
op|':'
string|"'example.com'"
op|','
string|"'scope'"
op|':'
string|"'public'"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'domain'"
op|':'
string|"'test.example.org'"
op|','
string|"'scope'"
op|':'
string|"'public'"
op|'}'
op|']'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|','
string|"'get_dns_domains'"
op|','
name|'fake_domains'
op|')'
newline|'\n'
nl|'\n'
name|'context_admin'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_public_dns_domain'
op|'('
name|'context_admin'
op|','
name|'domain1'
op|','
nl|'\n'
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_public_dns_domain'
op|'('
name|'context_admin'
op|','
name|'domain2'
op|','
nl|'\n'
string|"'fakeproject'"
op|')'
newline|'\n'
nl|'\n'
name|'domains'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_dns_domains'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'for'
name|'domain'
name|'in'
name|'domains'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'network'
op|'.'
name|'add_dns_entry'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'address'
op|','
nl|'\n'
name|'name1'
op|','
string|'"A"'
op|','
name|'domain'
op|'['
string|"'domain'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'add_dns_entry'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'address'
op|','
nl|'\n'
name|'name2'
op|','
string|'"A"'
op|','
name|'domain'
op|'['
string|"'domain'"
op|']'
op|')'
newline|'\n'
name|'entries'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_dns_entries_by_address'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'address'
op|','
nl|'\n'
name|'domain'
op|'['
string|"'domain'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'len'
op|'('
name|'entries'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'network'
op|'.'
name|'_delete_all_entries_for_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'address'
op|')'
newline|'\n'
nl|'\n'
name|'for'
name|'domain'
name|'in'
name|'domains'
op|':'
newline|'\n'
indent|' '
name|'entries'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_dns_entries_by_address'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'address'
op|','
nl|'\n'
name|'domain'
op|'['
string|"'domain'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'entries'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'network'
op|'.'
name|'delete_dns_domain'
op|'('
name|'context_admin'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'delete_dns_domain'
op|'('
name|'context_admin'
op|','
name|'domain2'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_mac_conflicts
dedent|''
name|'def'
name|'test_mac_conflicts'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Make sure MAC collisions are retried.'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'create_unique_mac_address_attempts'
op|'='
number|'3'
op|')'
newline|'\n'
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'True'
op|')'
newline|'\n'
name|'macs'
op|'='
op|'['
string|"'bb:bb:bb:bb:bb:bb'"
op|','
string|"'aa:aa:aa:aa:aa:aa'"
op|']'
newline|'\n'
nl|'\n'
comment|'# Create a VIF with aa:aa:aa:aa:aa:aa'
nl|'\n'
name|'crash_test_dummy_vif'
op|'='
op|'{'
nl|'\n'
string|"'address'"
op|':'
name|'macs'
op|'['
number|'1'
op|']'
op|','
nl|'\n'
string|"'instance_uuid'"
op|':'
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
string|"'network_id'"
op|':'
number|'123'
op|','
nl|'\n'
string|"'uuid'"
op|':'
string|"'fake_uuid'"
op|','
nl|'\n'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|'.'
name|'virtual_interface_create'
op|'('
name|'ctxt'
op|','
name|'crash_test_dummy_vif'
op|')'
newline|'\n'
nl|'\n'
comment|'# Hand out a collision first, then a legit MAC'
nl|'\n'
DECL|function|fake_gen_mac
name|'def'
name|'fake_gen_mac'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'macs'
op|'.'
name|'pop'
op|'('
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'utils'
op|','
string|"'generate_mac_address'"
op|','
name|'fake_gen_mac'
op|')'
newline|'\n'
nl|'\n'
comment|"# SQLite doesn't seem to honor the uniqueness constraint on the"
nl|'\n'
comment|'# address column, so fake the collision-avoidance here'
nl|'\n'
DECL|function|fake_vif_save
name|'def'
name|'fake_vif_save'
op|'('
name|'vif'
op|','
name|'session'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'vif'
op|'.'
name|'address'
op|'=='
name|'crash_test_dummy_vif'
op|'['
string|"'address'"
op|']'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'db_exc'
op|'.'
name|'DBError'
op|'('
string|'"If you\'re smart, you\'ll retry!"'
op|')'
newline|'\n'
comment|'# NOTE(russellb) The VirtualInterface object requires an ID to be'
nl|'\n'
comment|'# set, and we expect it to get set automatically when we do the'
nl|'\n'
comment|'# save.'
nl|'\n'
dedent|''
name|'vif'
op|'.'
name|'id'
op|'='
number|'1'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'models'
op|'.'
name|'VirtualInterface'
op|','
string|"'save'"
op|','
name|'fake_vif_save'
op|')'
newline|'\n'
nl|'\n'
comment|'# Attempt to add another and make sure that both MACs are consumed'
nl|'\n'
comment|'# by the retry loop'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'_add_virtual_interface'
op|'('
name|'ctxt'
op|','
name|'uuids'
op|'.'
name|'instance'
op|','
number|'123'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
op|']'
op|','
name|'macs'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_deallocate_client_exceptions
dedent|''
name|'def'
name|'test_deallocate_client_exceptions'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Ensure that FloatingIpNotFoundForAddress is wrapped.'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_get_by_address'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|'.'
name|'floating_ip_get_by_address'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
string|"'1.2.3.4'"
op|')'
op|'.'
name|'AndRaise'
op|'('
nl|'\n'
name|'exception'
op|'.'
name|'FloatingIpNotFoundForAddress'
op|'('
name|'address'
op|'='
string|"'fake'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'messaging'
op|'.'
name|'ExpectedException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'deallocate_floating_ip'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
string|"'1.2.3.4'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_associate_client_exceptions
dedent|''
name|'def'
name|'test_associate_client_exceptions'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Ensure that FloatingIpNotFoundForAddress is wrapped.'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_get_by_address'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|'.'
name|'floating_ip_get_by_address'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
string|"'1.2.3.4'"
op|')'
op|'.'
name|'AndRaise'
op|'('
nl|'\n'
name|'exception'
op|'.'
name|'FloatingIpNotFoundForAddress'
op|'('
name|'address'
op|'='
string|"'fake'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'messaging'
op|'.'
name|'ExpectedException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'associate_floating_ip'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
string|"'1.2.3.4'"
op|','
string|"'10.0.0.1'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_disassociate_client_exceptions
dedent|''
name|'def'
name|'test_disassociate_client_exceptions'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Ensure that FloatingIpNotFoundForAddress is wrapped.'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_get_by_address'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|'.'
name|'floating_ip_get_by_address'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
string|"'1.2.3.4'"
op|')'
op|'.'
name|'AndRaise'
op|'('
nl|'\n'
name|'exception'
op|'.'
name|'FloatingIpNotFoundForAddress'
op|'('
name|'address'
op|'='
string|"'fake'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'messaging'
op|'.'
name|'ExpectedException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'disassociate_floating_ip'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
string|"'1.2.3.4'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_floating_ip_client_exceptions
dedent|''
name|'def'
name|'test_get_floating_ip_client_exceptions'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Ensure that FloatingIpNotFoundForAddress is wrapped.'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_get'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|'.'
name|'floating_ip_get'
op|'('
name|'self'
op|'.'
name|'context'
op|','
string|"'fake-id'"
op|')'
op|'.'
name|'AndRaise'
op|'('
nl|'\n'
name|'exception'
op|'.'
name|'FloatingIpNotFound'
op|'('
name|'id'
op|'='
string|"'fake'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'messaging'
op|'.'
name|'ExpectedException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_floating_ip'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
string|"'fake-id'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|_test_associate_floating_ip_failure
dedent|''
name|'def'
name|'_test_associate_floating_ip_failure'
op|'('
name|'self'
op|','
name|'stdout'
op|','
name|'expected_exception'
op|')'
op|':'
newline|'\n'
DECL|function|_fake_catchall
indent|' '
name|'def'
name|'_fake_catchall'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'dict'
op|'('
name|'test_fixed_ip'
op|'.'
name|'fake_fixed_ip'
op|','
nl|'\n'
name|'network'
op|'='
name|'test_network'
op|'.'
name|'fake_network'
op|')'
newline|'\n'
nl|'\n'
DECL|function|_fake_add_floating_ip
dedent|''
name|'def'
name|'_fake_add_floating_ip'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'processutils'
op|'.'
name|'ProcessExecutionError'
op|'('
name|'stdout'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_fixed_ip_associate'"
op|','
nl|'\n'
name|'_fake_catchall'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|','
string|"'floating_ip_disassociate'"
op|','
nl|'\n'
name|'_fake_catchall'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'l3driver'
op|','
string|"'add_floating_ip'"
op|','
nl|'\n'
name|'_fake_add_floating_ip'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'expected_exception'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'_associate_floating_ip'
op|','
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
string|"'1.2.3.4'"
op|','
string|"'1.2.3.5'"
op|','
string|"''"
op|','
string|"''"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_associate_floating_ip_failure
dedent|''
name|'def'
name|'test_associate_floating_ip_failure'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_associate_floating_ip_failure'
op|'('
name|'None'
op|','
nl|'\n'
name|'processutils'
op|'.'
name|'ProcessExecutionError'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_associate_floating_ip_failure_interface_not_found
dedent|''
name|'def'
name|'test_associate_floating_ip_failure_interface_not_found'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_associate_floating_ip_failure'
op|'('
string|"'Cannot find device'"
op|','
nl|'\n'
name|'exception'
op|'.'
name|'NoFloatingIpInterface'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.FloatingIP.get_by_address'"
op|')'
newline|'\n'
DECL|member|test_get_floating_ip_by_address
name|'def'
name|'test_get_floating_ip_by_address'
op|'('
name|'self'
op|','
name|'mock_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_get'
op|'.'
name|'return_value'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'floating'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'floating'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_floating_ip_by_address'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'address'
op|')'
op|')'
newline|'\n'
name|'mock_get'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'address'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.FloatingIPList.get_by_project'"
op|')'
newline|'\n'
DECL|member|test_get_floating_ips_by_project
name|'def'
name|'test_get_floating_ips_by_project'
op|'('
name|'self'
op|','
name|'mock_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_get'
op|'.'
name|'return_value'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'floatings'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'floatings'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_floating_ips_by_project'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|')'
op|')'
newline|'\n'
name|'mock_get'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'self'
op|'.'
name|'context'
op|'.'
name|'project_id'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.FloatingIPList.get_by_fixed_address'"
op|')'
newline|'\n'
DECL|member|test_get_floating_ips_by_fixed_address
name|'def'
name|'test_get_floating_ips_by_fixed_address'
op|'('
name|'self'
op|','
name|'mock_get'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_get'
op|'.'
name|'return_value'
op|'='
op|'['
name|'objects'
op|'.'
name|'FloatingIP'
op|'('
name|'address'
op|'='
string|"'1.2.3.4'"
op|')'
op|','
nl|'\n'
name|'objects'
op|'.'
name|'FloatingIP'
op|'('
name|'address'
op|'='
string|"'5.6.7.8'"
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
string|"'1.2.3.4'"
op|','
string|"'5.6.7.8'"
op|']'
op|','
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_floating_ips_by_fixed_address'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'address'
op|')'
op|')'
newline|'\n'
name|'mock_get'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'address'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.floating_ip_get_pools'"
op|')'
newline|'\n'
DECL|member|test_floating_ip_pool_exists
name|'def'
name|'test_floating_ip_pool_exists'
op|'('
name|'self'
op|','
name|'floating_ip_get_pools'
op|')'
op|':'
newline|'\n'
indent|' '
name|'floating_ip_get_pools'
op|'.'
name|'return_value'
op|'='
op|'['
op|'{'
string|"'name'"
op|':'
string|"'public'"
op|'}'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'_floating_ip_pool_exists'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
string|"'public'"
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.db.floating_ip_get_pools'"
op|')'
newline|'\n'
DECL|member|test_floating_ip_pool_does_not_exist
name|'def'
name|'test_floating_ip_pool_does_not_exist'
op|'('
name|'self'
op|','
name|'floating_ip_get_pools'
op|')'
op|':'
newline|'\n'
indent|' '
name|'floating_ip_get_pools'
op|'.'
name|'return_value'
op|'='
op|'['
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'network'
op|'.'
name|'_floating_ip_pool_exists'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
string|"'public'"
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|InstanceDNSTestCase
dedent|''
dedent|''
name|'class'
name|'InstanceDNSTestCase'
op|'('
name|'test'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Tests nova.network.manager instance DNS."""'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'InstanceDNSTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'tempdir'
op|'='
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'fixtures'
op|'.'
name|'TempDir'
op|'('
op|')'
op|')'
op|'.'
name|'path'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'log_dir'
op|'='
name|'self'
op|'.'
name|'tempdir'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'='
name|'TestFloatingIPManager'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'db'
op|'='
name|'db'
newline|'\n'
name|'self'
op|'.'
name|'project_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
newline|'\n'
name|'self'
op|'.'
name|'context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'testuser'"
op|','
name|'self'
op|'.'
name|'project_id'
op|','
nl|'\n'
name|'is_admin'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_dns_domains_private
dedent|''
name|'def'
name|'test_dns_domains_private'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'zone1'
op|'='
string|"'testzone'"
newline|'\n'
name|'domain1'
op|'='
string|"'example.org'"
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'create_private_dns_domain'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'domain1'
op|','
name|'zone1'
op|')'
newline|'\n'
name|'domains'
op|'='
name|'self'
op|'.'
name|'network'
op|'.'
name|'get_dns_domains'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'domains'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'domain1'
op|','
name|'domains'
op|'['
number|'0'
op|']'
op|'['
string|"'domain'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'zone1'
op|','
name|'domains'
op|'['
number|'0'
op|']'
op|'['
string|"'availability_zone'"
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'network'
op|'.'
name|'delete_dns_domain'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'domain1'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|variable|domain1
dedent|''
dedent|''
name|'domain1'
op|'='
string|'"example.org"'
newline|'\n'
DECL|variable|domain2
name|'domain2'
op|'='
string|'"example.com"'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|LdapDNSTestCase
name|'class'
name|'LdapDNSTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Tests nova.network.ldapdns.LdapDNS."""'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'LdapDNSTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'fixtures'
op|'.'
name|'MonkeyPatch'
op|'('
nl|'\n'
string|"'nova.network.ldapdns.ldap'"
op|','
nl|'\n'
name|'fake_ldap'
op|')'
op|')'
newline|'\n'
name|'dns_class'
op|'='
string|"'nova.network.ldapdns.LdapDNS'"
newline|'\n'
name|'self'
op|'.'
name|'driver'
op|'='
name|'importutils'
op|'.'
name|'import_object'
op|'('
name|'dns_class'
op|')'
newline|'\n'
nl|'\n'
name|'attrs'
op|'='
op|'{'
string|"'objectClass'"
op|':'
op|'['
string|"'domainrelatedobject'"
op|','
string|"'dnsdomain'"
op|','
nl|'\n'
string|"'domain'"
op|','
string|"'dcobject'"
op|','
string|"'top'"
op|']'
op|','
nl|'\n'
string|"'associateddomain'"
op|':'
op|'['
string|"'root'"
op|']'
op|','
nl|'\n'
string|"'dc'"
op|':'
op|'['
string|"'root'"
op|']'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'lobj'
op|'.'
name|'add_s'
op|'('
string|'"ou=hosts,dc=example,dc=org"'
op|','
name|'attrs'
op|'.'
name|'items'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'create_domain'
op|'('
name|'domain1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'create_domain'
op|'('
name|'domain2'
op|')'
newline|'\n'
nl|'\n'
DECL|member|tearDown
dedent|''
name|'def'
name|'tearDown'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'driver'
op|'.'
name|'delete_domain'
op|'('
name|'domain1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'delete_domain'
op|'('
name|'domain2'
op|')'
newline|'\n'
name|'super'
op|'('
name|'LdapDNSTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'tearDown'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_ldap_dns_domains
dedent|''
name|'def'
name|'test_ldap_dns_domains'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'domains'
op|'='
name|'self'
op|'.'
name|'driver'
op|'.'
name|'get_domains'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'len'
op|'('
name|'domains'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'domain1'
op|','
name|'domains'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'domain2'
op|','
name|'domains'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_ldap_dns_create_conflict
dedent|''
name|'def'
name|'test_ldap_dns_create_conflict'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'address1'
op|'='
string|'"10.10.10.11"'
newline|'\n'
name|'name1'
op|'='
string|'"foo"'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'create_entry'
op|'('
name|'name1'
op|','
name|'address1'
op|','
string|'"A"'
op|','
name|'domain1'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'FloatingIpDNSExists'
op|','
nl|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'create_entry'
op|','
nl|'\n'
name|'name1'
op|','
name|'address1'
op|','
string|'"A"'
op|','
name|'domain1'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_ldap_dns_create_and_get
dedent|''
name|'def'
name|'test_ldap_dns_create_and_get'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'address1'
op|'='
string|'"10.10.10.11"'
newline|'\n'
name|'name1'
op|'='
string|'"foo"'
newline|'\n'
name|'name2'
op|'='
string|'"bar"'
newline|'\n'
name|'entries'
op|'='
name|'self'
op|'.'
name|'driver'
op|'.'
name|'get_entries_by_address'
op|'('
name|'address1'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'entries'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'create_entry'
op|'('
name|'name1'
op|','
name|'address1'
op|','
string|'"A"'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'create_entry'
op|'('
name|'name2'
op|','
name|'address1'
op|','
string|'"A"'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'entries'
op|'='
name|'self'
op|'.'
name|'driver'
op|'.'
name|'get_entries_by_address'
op|'('
name|'address1'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'len'
op|'('
name|'entries'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'name1'
op|','
name|'entries'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'name2'
op|','
name|'entries'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'entries'
op|'='
name|'self'
op|'.'
name|'driver'
op|'.'
name|'get_entries_by_name'
op|'('
name|'name1'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'entries'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'address1'
op|','
name|'entries'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_ldap_dns_delete
dedent|''
name|'def'
name|'test_ldap_dns_delete'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'address1'
op|'='
string|'"10.10.10.11"'
newline|'\n'
name|'name1'
op|'='
string|'"foo"'
newline|'\n'
name|'name2'
op|'='
string|'"bar"'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'create_entry'
op|'('
name|'name1'
op|','
name|'address1'
op|','
string|'"A"'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'create_entry'
op|'('
name|'name2'
op|','
name|'address1'
op|','
string|'"A"'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'entries'
op|'='
name|'self'
op|'.'
name|'driver'
op|'.'
name|'get_entries_by_address'
op|'('
name|'address1'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'len'
op|'('
name|'entries'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'delete_entry'
op|'('
name|'name1'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'entries'
op|'='
name|'self'
op|'.'
name|'driver'
op|'.'
name|'get_entries_by_address'
op|'('
name|'address1'
op|','
name|'domain1'
op|')'
newline|'\n'
name|'LOG'
op|'.'
name|'debug'
op|'('
string|'"entries: %s"'
op|'%'
name|'entries'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'entries'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'name2'
op|','
name|'entries'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'NotFound'
op|','
nl|'\n'
name|'self'
op|'.'
name|'driver'
op|'.'
name|'delete_entry'
op|','
nl|'\n'
name|'name1'
op|','
name|'domain1'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|NetworkManagerNoDBTestCase
dedent|''
dedent|''
name|'class'
name|'NetworkManagerNoDBTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Tests nova.network.manager.NetworkManager without a database."""'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'NetworkManagerNoDBTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'context'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'fake-user'"
op|','
string|"'fake-project'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'manager'
op|'='
name|'network_manager'
op|'.'
name|'NetworkManager'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'FixedIP'
op|','
string|"'get_by_address'"
op|')'
newline|'\n'
DECL|member|test_release_fixed_ip_not_associated
name|'def'
name|'test_release_fixed_ip_not_associated'
op|'('
name|'self'
op|','
name|'mock_fip_get_by_addr'
op|')'
op|':'
newline|'\n'
comment|'# Tests that the method is a no-op when the fixed IP is not associated'
nl|'\n'
comment|'# to an instance.'
nl|'\n'
indent|' '
name|'fip'
op|'='
name|'objects'
op|'.'
name|'FixedIP'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'FixedIP'
op|'('
op|')'
op|','
name|'fake_network'
op|'.'
name|'next_fixed_ip'
op|'('
number|'1'
op|')'
op|')'
newline|'\n'
name|'fip'
op|'.'
name|'instance_uuid'
op|'='
name|'None'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'fip'
op|','
string|"'disassociate'"
op|')'
name|'as'
name|'mock_disassociate'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'manager'
op|'.'
name|'release_fixed_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'fip'
op|'.'
name|'address'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'mock_disassociate'
op|'.'
name|'called'
op|','
nl|'\n'
name|'str'
op|'('
name|'mock_disassociate'
op|'.'
name|'mock_calls'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'FixedIP'
op|','
string|"'get_by_address'"
op|')'
newline|'\n'
DECL|member|test_release_fixed_ip_allocated
name|'def'
name|'test_release_fixed_ip_allocated'
op|'('
name|'self'
op|','
name|'mock_fip_get_by_addr'
op|')'
op|':'
newline|'\n'
comment|"# Tests that the fixed IP is not disassociated if it's allocated."
nl|'\n'
indent|' '
name|'fip'
op|'='
name|'objects'
op|'.'
name|'FixedIP'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'FixedIP'
op|'('
op|')'
op|','
name|'fake_network'
op|'.'
name|'next_fixed_ip'
op|'('
number|'1'
op|')'
op|')'
newline|'\n'
name|'fip'
op|'.'
name|'leased'
op|'='
name|'False'
newline|'\n'
name|'fip'
op|'.'
name|'allocated'
op|'='
name|'True'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'fip'
op|','
string|"'disassociate'"
op|')'
name|'as'
name|'mock_disassociate'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'manager'
op|'.'
name|'release_fixed_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'fip'
op|'.'
name|'address'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'mock_disassociate'
op|'.'
name|'called'
op|','
nl|'\n'
name|'str'
op|'('
name|'mock_disassociate'
op|'.'
name|'mock_calls'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'FixedIP'
op|','
string|"'get_by_address'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'VirtualInterface'
op|','
string|"'get_by_address'"
op|')'
newline|'\n'
DECL|member|test_release_fixed_ip_mac_matches_associated_instance
name|'def'
name|'test_release_fixed_ip_mac_matches_associated_instance'
op|'('
name|'self'
op|','
nl|'\n'
name|'mock_vif_get_by_addr'
op|','
nl|'\n'
name|'mock_fip_get_by_addr'
op|')'
op|':'
newline|'\n'
comment|'# Tests that the fixed IP is disassociated when the mac passed to'
nl|'\n'
comment|'# release_fixed_ip matches the VIF which has the same instance_uuid'
nl|'\n'
comment|'# as the instance associated to the FixedIP object. Also tests'
nl|'\n'
comment|'# that the fixed IP is marked as not leased in the database if it was'
nl|'\n'
comment|'# currently leased.'
nl|'\n'
indent|' '
name|'instance'
op|'='
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'fip'
op|'='
name|'fake_network'
op|'.'
name|'next_fixed_ip'
op|'('
number|'1'
op|')'
newline|'\n'
name|'fip'
op|'['
string|"'instance_uuid'"
op|']'
op|'='
name|'instance'
op|'.'
name|'uuid'
newline|'\n'
name|'fip'
op|'['
string|"'leased'"
op|']'
op|'='
name|'True'
newline|'\n'
name|'vif'
op|'='
name|'fip'
op|'['
string|"'virtual_interface'"
op|']'
newline|'\n'
name|'vif'
op|'['
string|"'instance_uuid'"
op|']'
op|'='
name|'instance'
op|'.'
name|'uuid'
newline|'\n'
name|'vif'
op|'='
name|'objects'
op|'.'
name|'VirtualInterface'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'VirtualInterface'
op|'('
op|')'
op|','
name|'vif'
op|')'
newline|'\n'
name|'fip'
op|'='
name|'objects'
op|'.'
name|'FixedIP'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'FixedIP'
op|'('
op|')'
op|','
name|'fip'
op|')'
newline|'\n'
name|'mock_fip_get_by_addr'
op|'.'
name|'return_value'
op|'='
name|'fip'
newline|'\n'
name|'mock_vif_get_by_addr'
op|'.'
name|'return_value'
op|'='
name|'vif'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'fip'
op|','
string|"'save'"
op|')'
name|'as'
name|'mock_fip_save'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'fip'
op|','
string|"'disassociate'"
op|')'
name|'as'
name|'mock_disassociate'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'manager'
op|'.'
name|'release_fixed_ip'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'fip'
op|'.'
name|'address'
op|','
name|'vif'
op|'.'
name|'address'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'mock_fip_save'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'fip'
op|'.'
name|'leased'
op|')'
newline|'\n'
name|'mock_vif_get_by_addr'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'vif'
op|'.'
name|'address'
op|')'
newline|'\n'
name|'mock_disassociate'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'FixedIP'
op|','
string|"'get_by_address'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'VirtualInterface'
op|','
string|"'get_by_address'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'None'
op|')'
newline|'\n'
DECL|member|test_release_fixed_ip_vif_not_found_for_mac
name|'def'
name|'test_release_fixed_ip_vif_not_found_for_mac'
op|'('
name|'self'
op|','
name|'mock_vif_get_by_addr'
op|','
nl|'\n'
name|'mock_fip_get_by_addr'
op|')'
op|':'
newline|'\n'
comment|'# Tests that the fixed IP is disassociated when the fixed IP is marked'
nl|'\n'
comment|'# as deallocated and there is no VIF found in the database for the mac'
nl|'\n'
comment|'# passed in.'
nl|'\n'
indent|' '
name|'fip'
op|'='
name|'fake_network'
op|'.'
name|'next_fixed_ip'
op|'('
number|'1'
op|')'
newline|'\n'
name|'fip'
op|'['
string|"'leased'"
op|']'
op|'='
name|'False'
newline|'\n'
name|'mac'
op|'='
name|'fip'
op|'['
string|"'virtual_interface'"
op|']'
op|'['
string|"'address'"
op|']'
newline|'\n'
name|'fip'
op|'='
name|'objects'
op|'.'
name|'FixedIP'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'FixedIP'
op|'('
op|')'
op|','
name|'fip'
op|')'
newline|'\n'
name|'mock_fip_get_by_addr'
op|'.'
name|'return_value'
op|'='
name|'fip'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'fip'
op|','
string|"'disassociate'"
op|')'
name|'as'
name|'mock_disassociate'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'manager'
op|'.'
name|'release_fixed_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'fip'
op|'.'
name|'address'
op|','
name|'mac'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'mock_vif_get_by_addr'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'mac'
op|')'
newline|'\n'
name|'mock_disassociate'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'FixedIP'
op|','
string|"'get_by_address'"
op|')'
newline|'\n'
DECL|member|test_release_fixed_ip_no_mac
name|'def'
name|'test_release_fixed_ip_no_mac'
op|'('
name|'self'
op|','
name|'mock_fip_get_by_addr'
op|')'
op|':'
newline|'\n'
comment|'# Tests that the fixed IP is disassociated when the fixed IP is'
nl|'\n'
comment|'# deallocated and there is no mac address passed in (like before'
nl|'\n'
comment|'# the network rpc api version bump to pass it in).'
nl|'\n'
indent|' '
name|'fip'
op|'='
name|'fake_network'
op|'.'
name|'next_fixed_ip'
op|'('
number|'1'
op|')'
newline|'\n'
name|'fip'
op|'['
string|"'leased'"
op|']'
op|'='
name|'False'
newline|'\n'
name|'fip'
op|'='
name|'objects'
op|'.'
name|'FixedIP'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'FixedIP'
op|'('
op|')'
op|','
name|'fip'
op|')'
newline|'\n'
name|'mock_fip_get_by_addr'
op|'.'
name|'return_value'
op|'='
name|'fip'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'fip'
op|','
string|"'disassociate'"
op|')'
name|'as'
name|'mock_disassociate'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'manager'
op|'.'
name|'release_fixed_ip'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'fip'
op|'.'
name|'address'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'mock_disassociate'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'FixedIP'
op|','
string|"'get_by_address'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'VirtualInterface'
op|','
string|"'get_by_address'"
op|')'
newline|'\n'
DECL|member|test_release_fixed_ip_mac_mismatch_associated_instance
name|'def'
name|'test_release_fixed_ip_mac_mismatch_associated_instance'
op|'('
name|'self'
op|','
nl|'\n'
name|'mock_vif_get_by_addr'
op|','
nl|'\n'
name|'mock_fip_get_by_addr'
op|')'
op|':'
newline|'\n'
comment|'# Tests that the fixed IP is not disassociated when the VIF for the mac'
nl|'\n'
comment|'# passed to release_fixed_ip does not have an instance_uuid that'
nl|'\n'
comment|'# matches fixed_ip.instance_uuid.'
nl|'\n'
indent|' '
name|'old_instance'
op|'='
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'new_instance'
op|'='
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'fip'
op|'='
name|'fake_network'
op|'.'
name|'next_fixed_ip'
op|'('
number|'1'
op|')'
newline|'\n'
name|'fip'
op|'['
string|"'instance_uuid'"
op|']'
op|'='
name|'new_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'fip'
op|'['
string|"'leased'"
op|']'
op|'='
name|'False'
newline|'\n'
name|'vif'
op|'='
name|'fip'
op|'['
string|"'virtual_interface'"
op|']'
newline|'\n'
name|'vif'
op|'['
string|"'instance_uuid'"
op|']'
op|'='
name|'old_instance'
op|'.'
name|'uuid'
newline|'\n'
name|'vif'
op|'='
name|'objects'
op|'.'
name|'VirtualInterface'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'VirtualInterface'
op|'('
op|')'
op|','
name|'vif'
op|')'
newline|'\n'
name|'fip'
op|'='
name|'objects'
op|'.'
name|'FixedIP'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'FixedIP'
op|'('
op|')'
op|','
name|'fip'
op|')'
newline|'\n'
name|'mock_fip_get_by_addr'
op|'.'
name|'return_value'
op|'='
name|'fip'
newline|'\n'
name|'mock_vif_get_by_addr'
op|'.'
name|'return_value'
op|'='
name|'vif'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'fip'
op|','
string|"'disassociate'"
op|')'
name|'as'
name|'mock_disassociate'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'manager'
op|'.'
name|'release_fixed_ip'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'fip'
op|'.'
name|'address'
op|','
name|'vif'
op|'.'
name|'address'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'mock_vif_get_by_addr'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'vif'
op|'.'
name|'address'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'mock_disassociate'
op|'.'
name|'called'
op|','
nl|'\n'
name|'str'
op|'('
name|'mock_disassociate'
op|'.'
name|'mock_calls'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'FixedIP'
op|','
string|"'get_by_address'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'VirtualInterface'
op|','
string|"'get_by_id'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'Quotas'
op|','
string|"'reserve'"
op|')'
newline|'\n'
DECL|member|test_deallocate_fixed_ip_explicit_disassociate
name|'def'
name|'test_deallocate_fixed_ip_explicit_disassociate'
op|'('
name|'self'
op|','
nl|'\n'
name|'mock_quota_reserve'
op|','
nl|'\n'
name|'mock_vif_get_by_id'
op|','
nl|'\n'
name|'mock_fip_get_by_addr'
op|')'
op|':'
newline|'\n'
comment|'# Tests that we explicitly call FixedIP.disassociate when the fixed IP'
nl|'\n'
comment|'# is not leased and has an associated instance (race with dnsmasq).'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'force_dhcp_release'
op|'='
name|'True'
op|')'
newline|'\n'
name|'fake_inst'
op|'='
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'fip'
op|'='
name|'fake_network'
op|'.'
name|'next_fixed_ip'
op|'('
number|'1'
op|')'
newline|'\n'
name|'fip'
op|'['
string|"'instance_uuid'"
op|']'
op|'='
name|'fake_inst'
op|'.'
name|'uuid'
newline|'\n'
name|'fip'
op|'['
string|"'leased'"
op|']'
op|'='
name|'False'
newline|'\n'
name|'vif'
op|'='
name|'fip'
op|'['
string|"'virtual_interface'"
op|']'
newline|'\n'
name|'vif'
op|'['
string|"'instance_uuid'"
op|']'
op|'='
name|'fake_inst'
op|'.'
name|'uuid'
newline|'\n'
name|'vif'
op|'='
name|'objects'
op|'.'
name|'VirtualInterface'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'VirtualInterface'
op|'('
op|')'
op|','
name|'vif'
op|')'
newline|'\n'
name|'fip'
op|'='
name|'objects'
op|'.'
name|'FixedIP'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'FixedIP'
op|'('
op|')'
op|','
name|'fip'
op|')'
newline|'\n'
name|'fip'
op|'.'
name|'network'
op|'='
name|'fake_network'
op|'.'
name|'fake_network_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'fip'
op|'.'
name|'network_id'
op|')'
newline|'\n'
name|'mock_fip_get_by_addr'
op|'.'
name|'return_value'
op|'='
name|'fip'
newline|'\n'
name|'mock_vif_get_by_id'
op|'.'
name|'return_value'
op|'='
name|'vif'
newline|'\n'
nl|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'manager'
op|','
nl|'\n'
string|"'_do_trigger_security_group_members_refresh_for_instance'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'manager'
op|','
nl|'\n'
string|"'_validate_instance_zone_for_dns_domain'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'False'
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'manager'
op|','
string|"'_teardown_network_on_host'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'fip'
op|','
string|"'save'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'fip'
op|','
string|"'disassociate'"
op|')'
newline|'\n'
DECL|function|do_test
name|'def'
name|'do_test'
op|'('
name|'mock_disassociate'
op|','
name|'mock_fip_save'
op|','
nl|'\n'
name|'mock_teardown_network_on_host'
op|','
name|'mock_validate_zone'
op|','
nl|'\n'
name|'mock_trigger_secgroup_refresh'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'fake_inst'
op|'.'
name|'uuid'
op|','
name|'fip'
op|'.'
name|'instance_uuid'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'fip'
op|'.'
name|'leased'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'manager'
op|'.'
name|'deallocate_fixed_ip'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'fip'
op|'['
string|"'address'"
op|']'
op|','
name|'instance'
op|'='
name|'fake_inst'
op|')'
newline|'\n'
nl|'\n'
name|'mock_trigger_secgroup_refresh'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'fake_inst'
op|'.'
name|'uuid'
op|')'
newline|'\n'
name|'mock_teardown_network_on_host'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'fip'
op|'.'
name|'network'
op|')'
newline|'\n'
name|'mock_disassociate'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'do_test'
op|'('
op|')'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 12.29468
| 198
| 0.599532
| 49,947
| 327,936
| 3.824374
| 0.01852
| 0.182781
| 0.077009
| 0.074025
| 0.946397
| 0.925106
| 0.90506
| 0.878246
| 0.84696
| 0.821135
| 0
| 0.011316
| 0.095128
| 327,936
| 26,672
| 199
| 12.295141
| 0.632399
| 0
| 0
| 0.970793
| 0
| 0.000562
| 0.546472
| 0.098626
| 0
| 0
| 0
| 0
| 0.010385
| 0
| null | null | 0.000637
| 0.001575
| null | null | 0.000037
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2a3d9b2fe4e198177a93ecaa364d91edb7481941
| 176
|
py
|
Python
|
pyarrow_ops/__init__.py
|
bmschmidt/pyarrow_ops
|
1dff4172fc12d7e33ed6f4fd6861317521b94411
|
[
"Apache-2.0"
] | 25
|
2021-02-14T16:37:59.000Z
|
2022-01-28T02:58:12.000Z
|
pyarrow_ops/__init__.py
|
bmschmidt/pyarrow_ops
|
1dff4172fc12d7e33ed6f4fd6861317521b94411
|
[
"Apache-2.0"
] | 3
|
2021-04-10T20:23:54.000Z
|
2022-01-30T16:14:34.000Z
|
pyarrow_ops/__init__.py
|
bmschmidt/pyarrow_ops
|
1dff4172fc12d7e33ed6f4fd6861317521b94411
|
[
"Apache-2.0"
] | 6
|
2021-04-11T00:32:55.000Z
|
2022-01-30T13:09:33.000Z
|
from pyarrow_ops.ops import head, filters, drop_duplicates, head
from pyarrow_ops.group import groupby
from pyarrow_ops.ml import TableCleaner
from pyarrow_ops.join import join
| 44
| 64
| 0.857955
| 28
| 176
| 5.214286
| 0.464286
| 0.30137
| 0.383562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102273
| 176
| 4
| 65
| 44
| 0.924051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
aa88c9c49f9c26357deb80c51c809afa423820e5
| 142
|
py
|
Python
|
netdev/vendors/cisco/cisco_ios.py
|
brechtold/netdev
|
89845631aa921a5f78cb33b755295ede61cbc199
|
[
"BSD-3-Clause"
] | null | null | null |
netdev/vendors/cisco/cisco_ios.py
|
brechtold/netdev
|
89845631aa921a5f78cb33b755295ede61cbc199
|
[
"BSD-3-Clause"
] | null | null | null |
netdev/vendors/cisco/cisco_ios.py
|
brechtold/netdev
|
89845631aa921a5f78cb33b755295ede61cbc199
|
[
"BSD-3-Clause"
] | null | null | null |
from netdev.vendors.ios_like import IOSLikeDevice
class CiscoIOS(IOSLikeDevice):
"""Class for working with Cisco IOS/IOS XE"""
pass
| 20.285714
| 49
| 0.746479
| 19
| 142
| 5.526316
| 0.789474
| 0.342857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169014
| 142
| 6
| 50
| 23.666667
| 0.889831
| 0.274648
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
aaa4003600810f2298d5c5e4fa43cb313b8a1178
| 20,329
|
py
|
Python
|
tests/test_util.py
|
hattya/ayame
|
e8bb2b0ace79cd358b1384270cb9c5e809e12b5d
|
[
"MIT"
] | 1
|
2022-03-05T03:21:13.000Z
|
2022-03-05T03:21:13.000Z
|
tests/test_util.py
|
hattya/ayame
|
e8bb2b0ace79cd358b1384270cb9c5e809e12b5d
|
[
"MIT"
] | 1
|
2021-08-25T13:41:34.000Z
|
2021-08-25T13:41:34.000Z
|
tests/test_util.py
|
hattya/ayame
|
e8bb2b0ace79cd358b1384270cb9c5e809e12b5d
|
[
"MIT"
] | 1
|
2018-03-04T21:47:27.000Z
|
2018-03-04T21:47:27.000Z
|
#
# test_util
#
# Copyright (c) 2011-2021 Akinori Hattori <hattya@gmail.com>
#
# SPDX-License-Identifier: MIT
#
import collections.abc
import os
import pickle
import random
import threading
import time
from ayame import util
from base import AyameTestCase
class UtilTestCase(AyameTestCase):
def test_fqon_of_builtin(self):
self.assertEqual(util.fqon_of(None), 'NoneType')
self.assertEqual(util.fqon_of(True), 'bool')
self.assertEqual(util.fqon_of(False), 'bool')
self.assertEqual(util.fqon_of(''), 'str')
self.assertEqual(util.fqon_of([]), 'list')
self.assertEqual(util.fqon_of({}), 'dict')
self.assertEqual(util.fqon_of(1), 'int')
self.assertEqual(util.fqon_of(3.14), 'float')
def test_fqon_of_class(self):
class C:
pass
self.assertEqual(util.fqon_of(C), __name__ + '.C')
self.assertEqual(util.fqon_of(C()), __name__ + '.C')
C.__module__ = None
self.assertEqual(util.fqon_of(C), '<unknown>.C')
self.assertEqual(util.fqon_of(C()), '<unknown>.C')
def test_fqon_of_function(self):
def f():
pass
self.assertEqual(util.fqon_of(f), __name__ + '.f')
del f.__module__
self.assertEqual(util.fqon_of(f), '<unknown>.f')
f = lambda: None
self.assertEqual(util.fqon_of(f), __name__ + '.<lambda>')
del f.__module__
self.assertEqual(util.fqon_of(f), '<unknown>.<lambda>')
def test_fqon_of_module(self):
self.assertEqual(util.fqon_of(os), 'os')
self.assertEqual(util.fqon_of(util), 'ayame.util')
def test_to_bytes(self):
# iroha in hiragana
v = util.to_bytes('\u3044\u308d\u306f')
self.assertIsInstance(v, bytes)
self.assertEqual(v, b'\xe3\x81\x84\xe3\x82\x8d\xe3\x81\xaf')
v = util.to_bytes('\u3044\u308d\u306f', 'ascii', 'ignore')
self.assertIsInstance(v, bytes)
self.assertEqual(v, b'')
with self.assertRaises(UnicodeEncodeError):
util.to_bytes('\u3044\u308d\u306f', 'ascii')
v = util.to_bytes(b'abc')
self.assertIsInstance(v, bytes)
self.assertEqual(v, b'abc')
v = util.to_bytes(0)
self.assertIsInstance(v, bytes)
self.assertEqual(v, b'0')
v = util.to_bytes(3.14)
self.assertIsInstance(v, bytes)
self.assertEqual(v, b'3.14')
def test_to_list(self):
self.assertEqual(util.to_list(None), [])
self.assertEqual(util.to_list('abc'), ['abc'])
self.assertEqual(util.to_list(''), [''])
self.assertEqual(util.to_list(1), [1])
self.assertEqual(util.to_list(3.14), [3.14])
self.assertEqual(util.to_list((1,)), [1])
self.assertEqual(util.to_list([1]), [1])
self.assertEqual(util.to_list({'a': 1}), ['a'])
def test_new_token(self):
a = util.new_token()
b = util.new_token()
self.assertNotEqual(a, b)
def test_iterable(self):
self.assertTrue(util.iterable(()))
self.assertTrue(util.iterable([]))
self.assertTrue(util.iterable({}))
self.assertFalse(util.iterable(''))
def test_filter_dict(self):
class LowerDict(util.FilterDict):
def __convert__(self, key):
if isinstance(key, str):
return key.lower()
return super().__convert__(key)
d = LowerDict(a=-1, A=0)
self.assertEqual(d['A'], 0)
self.assertEqual(d['a'], 0)
self.assertIn('A', d)
self.assertIn('a', d)
self.assertEqual(d.get('A'), 0)
self.assertEqual(d.get('a'), 0)
d.setdefault('a', -1)
self.assertEqual(d, {'a': 0})
d['B'] = 1
self.assertEqual(d['B'], 1)
self.assertEqual(d['b'], 1)
self.assertIn('B', d)
self.assertIn('b', d)
self.assertEqual(d.get('B'), 1)
self.assertEqual(d.get('b'), 1)
d.setdefault('b', -1)
self.assertEqual(d, {'a': 0, 'b': 1})
del d['b']
self.assertEqual(d, {'a': 0})
self.assertEqual(d.pop('a'), 0)
self.assertEqual(d, {})
d.update(A=0)
self.assertEqual(d, {'a': 0})
d.update(A=0, b=1)
self.assertEqual(d, {'a': 0, 'b': 1})
d[0] = 'a'
self.assertEqual(d, {'a': 0, 'b': 1, 0: 'a'})
x = d.copy()
self.assertIsInstance(x, LowerDict)
self.assertEqual(x, d)
x[0] = 'b'
self.assertEqual(d, {'a': 0, 'b': 1, 0: 'a'})
self.assertEqual(x, {'a': 0, 'b': 1, 0: 'b'})
class RWLockTestCase(AyameTestCase):
def test_rwlock(self):
def reader():
with lock.read():
self.assertGreater(lock._rcnt, 0)
self.assertEqual(lock._rwait, 0)
time.sleep(0.01)
def writer():
with lock.write():
self.assertEqual(lock._rcnt, -1)
self.assertEqual(lock._rwait, 0)
time.sleep(0.01)
lock = util.RWLock()
for _ in range(10):
thr = threading.Thread(target=random.choice((reader, writer)))
thr.daemon = True
thr.start()
time.sleep(0.01)
time.sleep(0.17)
self.assertEqual(lock._rcnt, 0)
self.assertEqual(lock._rwait, 0)
self.assertEqual(threading.active_count(), 1)
def test_release(self):
lock = util.RWLock()
with self.assertRaises(RuntimeError):
lock.release_read()
with self.assertRaises(RuntimeError):
lock.release_write()
class LRUCacheTestCase(AyameTestCase):
def lru_cache(self, n):
c = LRUCache(n)
for i in range(n):
c[chr(ord('a') + i)] = i + 1
return c
def test_lru_cache(self):
c = LRUCache(3)
self.assertEqual(c.cap, 3)
self.assertEqual(len(c), 0)
self.assertIsInstance(c, collections.abc.MutableMapping)
def test_repr(self):
c = self.lru_cache(0)
self.assertEqual(repr(c), 'LRUCache([])')
c = self.lru_cache(3)
self.assertEqual(repr(c), "LRUCache([('c', 3), ('b', 2), ('a', 1)])")
def test_set(self):
c = self.lru_cache(3)
self.assertEqual(len(c), 3)
self.assertEqual(list(c), ['c', 'b', 'a'])
self.assertEqual(list(reversed(c)), ['a', 'b', 'c'])
self.assertIn('a', c)
self.assertIn('b', c)
self.assertIn('c', c)
self.assertEqual(list(c.keys()), ['c', 'b', 'a'])
self.assertEqual(list(c.values()), [3, 2, 1])
self.assertEqual(list(c.items()), [('c', 3), ('b', 2), ('a', 1)])
self.assertEqual(c.evicted, [])
c['c'] = 3.0
c['b'] = 2.0
c['a'] = 1.0
self.assertEqual(list(reversed(c)), ['c', 'b', 'a'])
self.assertEqual(list(c.items()), [('a', 1.0), ('b', 2.0), ('c', 3.0)])
self.assertEqual(c.evicted, [])
c['a'] = 1
c['b'] = 2
c['c'] = 3
c['d'] = 4
self.assertEqual(list(reversed(c)), ['b', 'c', 'd'])
self.assertEqual(list(c.items()), [('d', 4), ('c', 3), ('b', 2)])
self.assertEqual(c.evicted[0:], [('a', 1.0)])
self.assertEqual(c.setdefault('c', 0), 3)
self.assertEqual(c.setdefault('d', 0), 4)
self.assertEqual(c.setdefault('e', 5), 5)
self.assertEqual(list(reversed(c)), ['c', 'd', 'e'])
self.assertEqual(list(c.items()), [('e', 5), ('d', 4), ('c', 3)])
self.assertEqual(c.evicted[1:], [('b', 2)])
def test_get(self):
c = self.lru_cache(3)
self.assertEqual(list(reversed(c)), ['a', 'b', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('b', 2), ('a', 1)])
self.assertEqual(c.evicted, [])
self.assertEqual(c['c'], 3)
self.assertEqual(c['b'], 2)
self.assertEqual(c['a'], 1)
self.assertEqual(list(reversed(c)), ['c', 'b', 'a'])
self.assertEqual(list(c.items()), [('a', 1), ('b', 2), ('c', 3)])
self.assertEqual(c.evicted, [])
self.assertEqual(c.peek('a'), 1)
self.assertEqual(c.peek('b'), 2)
self.assertEqual(c.peek('c'), 3)
self.assertEqual(list(reversed(c)), ['c', 'b', 'a'])
self.assertEqual(list(c.items()), [('a', 1), ('b', 2), ('c', 3)])
self.assertEqual(c.evicted, [])
self.assertEqual(c.get('a'), 1)
self.assertEqual(c.get('b'), 2)
self.assertEqual(c.get('c'), 3)
self.assertEqual(c.get('z', 26), 26)
self.assertEqual(list(reversed(c)), ['a', 'b', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('b', 2), ('a', 1)])
self.assertEqual(c.evicted, [])
def test_del(self):
c = self.lru_cache(3)
del c['a']
self.assertEqual(list(reversed(c)), ['b', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('b', 2)])
self.assertEqual(c.evicted, [('a', 1)])
c = self.lru_cache(3)
del c['b']
self.assertEqual(list(reversed(c)), ['a', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('a', 1)])
self.assertEqual(c.evicted, [('b', 2)])
c = self.lru_cache(3)
del c['c']
self.assertEqual(list(reversed(c)), ['a', 'b'])
self.assertEqual(list(c.items()), [('b', 2), ('a', 1)])
self.assertEqual(c.evicted, [('c', 3)])
c = self.lru_cache(3)
self.assertEqual(c.pop('b'), 2)
self.assertEqual(list(reversed(c)), ['a', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('a', 1)])
self.assertEqual(c.evicted, [('b', 2)])
with self.assertRaises(KeyError):
c.pop('b')
self.assertIsNone(c.pop('b', None))
c = self.lru_cache(3)
n = len(c)
for i in range(1, n + 1):
self.assertEqual(len(c.popitem()), 2)
self.assertEqual(len(c), n - i)
self.assertEqual(len(c.evicted), i)
with self.assertRaises(KeyError):
c.popitem()
def test_resize(self):
c = self.lru_cache(3)
c.cap = 2
self.assertEqual(list(reversed(c)), ['b', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('b', 2)])
self.assertEqual(c.evicted[0:], [('a', 1)])
c['d'] = 4
self.assertEqual(list(reversed(c)), ['c', 'd'])
self.assertEqual(list(c.items()), [('d', 4), ('c', 3)])
self.assertEqual(c.evicted[1:], [('b', 2)])
c.cap = 1
self.assertEqual(list(reversed(c)), ['d'])
self.assertEqual(list(c.items()), [('d', 4)])
self.assertEqual(c.evicted[2:], [('c', 3)])
c['e'] = 5
self.assertEqual(list(reversed(c)), ['e'])
self.assertEqual(list(c.items()), [('e', 5)])
self.assertEqual(c.evicted[3:], [('d', 4)])
c.cap = 0
self.assertEqual(list(reversed(c)), [])
self.assertEqual(list(c.items()), [])
self.assertEqual(c.evicted[4:], [('e', 5)])
c.cap = -1
c['f'] = 6
c['g'] = 7
c['h'] = 8
c['i'] = 9
self.assertEqual(list(reversed(c)), ['f', 'g', 'h', 'i'])
self.assertEqual(list(c.items()), [('i', 9), ('h', 8), ('g', 7), ('f', 6)])
self.assertEqual(c.evicted[5:], [])
def test_clear(self):
c = self.lru_cache(3)
c.clear()
self.assertEqual(list(reversed(c)), [])
self.assertEqual(list(c.items()), [])
self.assertEqual(c.evicted, [])
def test_update(self):
c = self.lru_cache(3)
with self.assertRaises(NotImplementedError):
c.update()
def test_copy(self):
self._test_dup(lambda c: c.copy())
def test_pickle(self):
self._test_dup(lambda c: pickle.loads(pickle.dumps(c)))
def _test_dup(self, dup):
r = self.lru_cache(3)
c = dup(r)
self.assertIsNot(c, r)
self.assertEqual(c.cap, 3)
self.assertEqual(list(reversed(c)), ['a', 'b', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('b', 2), ('a', 1)])
self.assertEqual(c.evicted, [])
class LRUCache(util.LRUCache):
def on_init(self):
super().on_init()
self.evicted = []
def on_evicted(self, k, v):
super().on_evicted(k, v)
self.evicted.append((k, v))
class LFUCacheTestCase(AyameTestCase):
def lfu_cache(self, n):
c = LFUCache(n)
for i in range(n):
c[chr(ord('a') + i)] = i + 1
return c
def test_lfu_cache(self):
c = LFUCache(3)
self.assertEqual(c.cap, 3)
self.assertEqual(len(c), 0)
self.assertIsInstance(c, collections.abc.MutableMapping)
with self.assertRaises(RuntimeError):
c._lfu()
def test_repr(self):
c = self.lfu_cache(0)
self.assertEqual(repr(c), 'LFUCache([])')
c = self.lfu_cache(3)
self.assertEqual(repr(c), "LFUCache([('c', 3), ('b', 2), ('a', 1)])")
def test_set(self):
c = self.lfu_cache(3)
self.assertEqual(len(c), 3)
self.assertEqual(list(c), ['c', 'b', 'a'])
self.assertEqual(list(reversed(c)), ['a', 'b', 'c'])
self.assertIn('a', c)
self.assertIn('b', c)
self.assertIn('c', c)
self.assertEqual(list(c.keys()), ['c', 'b', 'a'])
self.assertEqual(list(c.values()), [3, 2, 1])
self.assertEqual(list(c.items()), [('c', 3), ('b', 2), ('a', 1)])
self.assertEqual(c.evicted, [])
c['c'] = 3.0
c['b'] = 2.0
c['a'] = 1.0
self.assertEqual(list(reversed(c)), ['c', 'b', 'a'])
self.assertEqual(list(c.items()), [('a', 1.0), ('b', 2.0), ('c', 3.0)])
self.assertEqual(c.evicted[0:], [('c', 3), ('b', 2), ('a', 1)])
c['a'] = 1
c['b'] = 2
c['c'] = 3
c['d'] = 4
self.assertEqual(list(reversed(c)), ['b', 'c', 'd'])
self.assertEqual(list(c.items()), [('d', 4), ('c', 3), ('b', 2)])
self.assertEqual(c.evicted[3:], [('a', 1.0), ('b', 2.0), ('c', 3.0), ('a', 1)])
self.assertEqual(c.setdefault('d', 0), 4)
self.assertEqual(c.setdefault('e', 5), 5)
self.assertEqual(c.setdefault('c', 0), 3)
self.assertEqual(list(reversed(c)), ['e', 'd', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('d', 4), ('e', 5)])
self.assertEqual(c.evicted[7:], [('b', 2)])
def test_get(self):
c = self.lfu_cache(3)
self.assertEqual(list(reversed(c)), ['a', 'b', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('b', 2), ('a', 1)])
self.assertEqual(c.evicted, [])
self.assertEqual(c['c'], 3)
self.assertEqual(c['b'], 2)
self.assertEqual(c['a'], 1)
self.assertEqual(list(reversed(c)), ['c', 'b', 'a'])
self.assertEqual(list(c.items()), [('a', 1), ('b', 2), ('c', 3)])
self.assertEqual(c.evicted, [])
self.assertEqual(c.peek('a'), 1)
self.assertEqual(c.peek('b'), 2)
self.assertEqual(c.peek('c'), 3)
self.assertEqual(list(reversed(c)), ['c', 'b', 'a'])
self.assertEqual(list(c.items()), [('a', 1), ('b', 2), ('c', 3)])
self.assertEqual(c.evicted, [])
self.assertEqual(c.get('a'), 1)
self.assertEqual(c.get('b'), 2)
self.assertEqual(c.get('c'), 3)
self.assertEqual(c.get('z', 26), 26)
self.assertEqual(list(reversed(c)), ['a', 'b', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('b', 2), ('a', 1)])
self.assertEqual(c.evicted, [])
def test_del(self):
c = self.lfu_cache(3)
del c['a']
self.assertEqual(list(reversed(c)), ['b', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('b', 2)])
self.assertEqual(c.evicted, [('a', 1)])
c = self.lfu_cache(3)
del c['b']
self.assertEqual(list(reversed(c)), ['a', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('a', 1)])
self.assertEqual(c.evicted, [('b', 2)])
c = self.lfu_cache(3)
del c['c']
self.assertEqual(list(reversed(c)), ['a', 'b'])
self.assertEqual(list(c.items()), [('b', 2), ('a', 1)])
self.assertEqual(c.evicted, [('c', 3)])
c = self.lfu_cache(3)
self.assertEqual(c.pop('b'), 2)
self.assertEqual(list(reversed(c)), ['a', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('a', 1)])
self.assertEqual(c.evicted, [('b', 2)])
with self.assertRaises(KeyError):
c.pop('b')
self.assertIsNone(c.pop('b', None))
c = self.lfu_cache(3)
n = len(c)
for i in range(1, n + 1):
self.assertEqual(len(c.popitem()), 2)
self.assertEqual(len(c), n - i)
self.assertEqual(len(c.evicted), i)
with self.assertRaises(KeyError):
c.popitem()
def test_resize(self):
c = self.lfu_cache(3)
c.cap = 2
self.assertEqual(list(reversed(c)), ['b', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('b', 2)])
self.assertEqual(c.evicted[0:], [('a', 1)])
c['d'] = 4
self.assertEqual(list(reversed(c)), ['c', 'd'])
self.assertEqual(list(c.items()), [('d', 4), ('c', 3)])
self.assertEqual(c.evicted[1:], [('b', 2)])
c.cap = 1
self.assertEqual(list(reversed(c)), ['d'])
self.assertEqual(list(c.items()), [('d', 4)])
self.assertEqual(c.evicted[2:], [('c', 3)])
c['e'] = 5
self.assertEqual(list(reversed(c)), ['e'])
self.assertEqual(list(c.items()), [('e', 5)])
self.assertEqual(c.evicted[3:], [('d', 4)])
c.cap = 0
self.assertEqual(list(reversed(c)), [])
self.assertEqual(list(c.items()), [])
self.assertEqual(c.evicted[4:], [('e', 5)])
c.cap = -1
c['f'] = 6
c['g'] = 7
c['h'] = 8
c['i'] = 9
self.assertEqual(list(reversed(c)), ['f', 'g', 'h', 'i'])
self.assertEqual(list(c.items()), [('i', 9), ('h', 8), ('g', 7), ('f', 6)])
self.assertEqual(c.evicted[5:], [])
def test_clear(self):
c = self.lfu_cache(3)
c.clear()
self.assertEqual(list(reversed(c)), [])
self.assertEqual(list(c.items()), [])
self.assertEqual(c.evicted, [])
def test_update(self):
c = self.lfu_cache(3)
with self.assertRaises(NotImplementedError):
c.update()
def test_copy(self):
self._test_dup(lambda c: c.copy())
def test_pickle(self):
self._test_dup(lambda c: pickle.loads(pickle.dumps(c)))
def _test_dup(self, dup):
f = self.lfu_cache(3)
c = dup(f)
self.assertIsNot(c, f)
self.assertEqual(c.cap, 3)
self.assertEqual(list(reversed(c)), ['a', 'b', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('b', 2), ('a', 1)])
self.assertEqual(c.evicted, [])
freq = c._head.next
self.assertEqual(freq.value, 1)
self.assertEqual(freq.len, 3)
self.assertEqual(c._head.prev.value, 1)
f = self.lfu_cache(3)
f['b']
f['c']
f['c']
c = dup(f)
self.assertIsNot(c, f)
self.assertEqual(c.cap, 3)
self.assertEqual(list(reversed(c)), ['a', 'b', 'c'])
self.assertEqual(list(c.items()), [('c', 3), ('b', 2), ('a', 1)])
self.assertEqual(c.evicted, [])
freq = c._head.next
self.assertEqual(freq.value, 1)
self.assertEqual(freq.len, 1)
self.assertEqual(freq.head.key, 'a')
self.assertEqual(freq.head.value, 1)
freq = c._head.next.next
self.assertEqual(freq.value, 2)
self.assertEqual(freq.len, 1)
self.assertEqual(freq.head.key, 'b')
self.assertEqual(freq.head.value, 2)
freq = c._head.next.next.next
self.assertEqual(freq.value, 3)
self.assertEqual(freq.len, 1)
self.assertEqual(freq.head.key, 'c')
self.assertEqual(freq.head.value, 3)
self.assertEqual(c._head.prev.value, 3)
class LFUCache(util.LFUCache):
def on_init(self):
super().on_init()
self.evicted = []
def on_evicted(self, k, v):
super().on_evicted(k, v)
self.evicted.append((k, v))
| 33.271686
| 87
| 0.516946
| 2,741
| 20,329
| 3.767603
| 0.067129
| 0.360221
| 0.161906
| 0.091024
| 0.845454
| 0.800813
| 0.745328
| 0.712501
| 0.657112
| 0.622833
| 0
| 0.028921
| 0.268631
| 20,329
| 610
| 88
| 33.32623
| 0.665658
| 0.005854
| 0
| 0.677355
| 0
| 0
| 0.033711
| 0.001782
| 0
| 0
| 0
| 0
| 0.57515
| 1
| 0.086172
| false
| 0.004008
| 0.016032
| 0
| 0.126253
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2aa961c99578a0a409c5723116f8c1b1f459bc16
| 2,022
|
py
|
Python
|
data_structures/test_deque.py
|
prekolna/AlgorithmsGreatestHits
|
7cdb60825b6e36abbffd5bdba480c3c70bc2c413
|
[
"MIT"
] | 1
|
2017-04-07T03:57:04.000Z
|
2017-04-07T03:57:04.000Z
|
data_structures/test_deque.py
|
doug-wade/AlgorithmsGreatestHits
|
7cdb60825b6e36abbffd5bdba480c3c70bc2c413
|
[
"MIT"
] | null | null | null |
data_structures/test_deque.py
|
doug-wade/AlgorithmsGreatestHits
|
7cdb60825b6e36abbffd5bdba480c3c70bc2c413
|
[
"MIT"
] | null | null | null |
from .deque import Deque
import random
import unittest
class dequeTests(unittest.TestCase):
def test_append_one(self):
d = Deque()
d.append("this is the last value")
self.assertIsNone(d._first.prev, d._last.next)
self.assertEqual(d._first.value, d._last.value,
"this is the last value")
self.assertEqual(d.size(), 1)
def test_append_two(self):
d = Deque()
d.append(1)
d.append(2)
self.assertIsNone(d._first.prev, d._last.next)
self.assertEqual(d._first.next, d._last, 2)
self.assertEqual(d._last.prev, d._first, 1)
self.assertEqual(d.size(), 2)
def test_prepend_one(self):
d = Deque()
d.prepend("this is the first value")
self.assertIsNone(d._first.prev, d._last.next)
self.assertEqual(d._first.value, d._last.value,
"this is the first value")
self.assertEqual(d.size(), 1)
def test_prepend_two(self):
d = Deque()
d.prepend(2)
d.prepend(1)
self.assertIsNone(d._first.prev, d._last.next)
self.assertEqual(d._first.next, d._last, 2)
self.assertEqual(d._last.prev, d._first, 1)
self.assertEqual(d.size(), 2)
def test_pop_first(self):
comp_list = []
d = Deque()
for i in range(10):
new_int = random.randint(0,10000)
comp_list.append(new_int)
d.append(new_int)
self.assertEqual(d.size(), 10)
for i in range(len(comp_list)):
self.assertEqual(comp_list[i], d.pop_first())
self.assertEqual(d.size(), 0)
def test_pop_last(self):
comp_list = []
d = Deque()
for i in range(10):
new_int = random.randint(0,10000)
comp_list.append(new_int)
d.prepend(new_int)
self.assertEqual(d.size(), 10)
for i in range(len(comp_list)):
self.assertEqual(comp_list[i], d.pop_last())
self.assertEqual(d.size(), 0)
| 32.095238
| 57
| 0.582097
| 284
| 2,022
| 3.975352
| 0.151408
| 0.212578
| 0.198406
| 0.141718
| 0.846767
| 0.736935
| 0.705049
| 0.705049
| 0.64659
| 0.64659
| 0
| 0.023546
| 0.285856
| 2,022
| 62
| 58
| 32.612903
| 0.75831
| 0
| 0
| 0.607143
| 0
| 0
| 0.04451
| 0
| 0
| 0
| 0
| 0
| 0.357143
| 1
| 0.107143
| false
| 0
| 0.053571
| 0
| 0.178571
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2af3de4e1a9de3b836334a5b91c69a24dc7a02c9
| 121
|
py
|
Python
|
tests/test_local/__init__.py
|
amih90/bacpypes
|
27ab4f18aa252ceb6ffdc32d53af2995a2e92647
|
[
"MIT"
] | 240
|
2015-07-17T16:27:54.000Z
|
2022-03-29T13:53:06.000Z
|
tests/test_local/__init__.py
|
amih90/bacpypes
|
27ab4f18aa252ceb6ffdc32d53af2995a2e92647
|
[
"MIT"
] | 400
|
2015-07-23T05:37:52.000Z
|
2022-03-29T12:32:30.000Z
|
tests/test_local/__init__.py
|
amih90/bacpypes
|
27ab4f18aa252ceb6ffdc32d53af2995a2e92647
|
[
"MIT"
] | 143
|
2015-07-17T18:22:27.000Z
|
2022-03-22T01:21:24.000Z
|
#!/usr/bin/python
"""
Test Local Schedule
"""
from . import test_local_schedule_1
from . import test_local_schedule_2
| 12.1
| 35
| 0.752066
| 18
| 121
| 4.722222
| 0.555556
| 0.317647
| 0.6
| 0.447059
| 0.635294
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019231
| 0.140496
| 121
| 9
| 36
| 13.444444
| 0.798077
| 0.297521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
632dd69c68a240ff1c80fe4559e496ee60bc25f5
| 806
|
py
|
Python
|
week12/data_reader/paths.py
|
balintmaci/drone_intro_exercises
|
1d8b839fecd6b0c5e33210b9a88fd741a71034cc
|
[
"Unlicense"
] | null | null | null |
week12/data_reader/paths.py
|
balintmaci/drone_intro_exercises
|
1d8b839fecd6b0c5e33210b9a88fd741a71034cc
|
[
"Unlicense"
] | null | null | null |
week12/data_reader/paths.py
|
balintmaci/drone_intro_exercises
|
1d8b839fecd6b0c5e33210b9a88fd741a71034cc
|
[
"Unlicense"
] | null | null | null |
SENSOR_COMBINED_5 = 'csv_files/TEST5_30-01-19/TEST5_30-01-19_sensor_combined_0.csv'
VELOCITY_5 = 'csv_files/TEST5_30-01-19/TEST5_30-01-19_vehicle_gps_position_0.csv'
MANUAL_CONTROLLED_SETPOINT_5 = 'csv_files/TEST5_30-01-19/TEST5_30-01-19_manual_control_setpoint_0.csv'
SENSOR_COMBINED_8 = 'csv_files/TEST8_30-01-19/TEST8_30-01-19_sensor_combined_0.csv'
VELOCITY_8 = 'csv_files/TEST8_30-01-19/TEST8_30-01-19_vehicle_gps_position_0.csv'
MANUAL_CONTROLLED_SETPOINT_8 = 'csv_files/TEST8_30-01-19/TEST8_30-01-19_manual_control_setpoint_0.csv'
SENSOR_COMBINED_9 = 'csv_files/TEST9_08-02-19/TEST9_08-02-19_sensor_combined_0.csv'
VELOCITY_9 = 'csv_files/TEST9_08-02-19/TEST9_08-02-19_vehicle_gps_position_0.csv'
MANUAL_CONTROLLED_SETPOINT_9 = 'csv_files/TEST9_08-02-19/TEST9_08-02-19_manual_control_setpoint_0.csv'
| 89.555556
| 102
| 0.856079
| 159
| 806
| 3.849057
| 0.150943
| 0.078431
| 0.117647
| 0.107843
| 0.977124
| 0.977124
| 0.893791
| 0.893791
| 0.808824
| 0.73366
| 0
| 0.184615
| 0.032258
| 806
| 9
| 103
| 89.555556
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0.728625
| 0.728625
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6330e769e4b1e3aa50afa9ebb57230c2549aebd6
| 6,843
|
py
|
Python
|
pip_services3_expressions-3.3.4/pip_services3_expressions/variants/IVariantOperations.py
|
pip-services3-python/pip-services3-expressions-python
|
4ea237fbbba32e62f920e6be3bd48e6cc02184e5
|
[
"MIT"
] | null | null | null |
pip_services3_expressions-3.3.4/pip_services3_expressions/variants/IVariantOperations.py
|
pip-services3-python/pip-services3-expressions-python
|
4ea237fbbba32e62f920e6be3bd48e6cc02184e5
|
[
"MIT"
] | null | null | null |
pip_services3_expressions-3.3.4/pip_services3_expressions/variants/IVariantOperations.py
|
pip-services3-python/pip-services3-expressions-python
|
4ea237fbbba32e62f920e6be3bd48e6cc02184e5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from abc import ABC
from pip_services3_expressions.variants.Variant import Variant
from pip_services3_expressions.variants.VariantType import VariantType
class IVariantOperations(ABC):
"""
Defines an interface for variant operations manager.
"""
def convert(self, value: Variant, new_type: VariantType) -> Variant:
"""
Converts variant to specified type
:param value: A variant value to be converted.
:param new_type: A type of object to be returned.
:return: A converted Variant value.
"""
def add(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs '+' operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def sub(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs '-' operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def mul(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs '*' operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def div(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs '/' operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def mod(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs '%' operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def pow(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs '^' operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def and_(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs AND operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def or_(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs OR operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def xor(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs XOR operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def lsh(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs << operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def rsh(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs >> operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def not_(self, value: Variant) -> Variant:
"""
Performs NOT operation for two variants.
:param value: The operand for this operation.
:return: A result variant object.
"""
def negative(self, value: Variant) -> Variant:
"""
Performs unary '-' operation for a variant.
:param value: The operand for this operation.
:return: A result variant object.
"""
def equal(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs '=' operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def not_equal(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs '<>' operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def more(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs '<' operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def less(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs '>' operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def more_equal(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs '<=' operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def less_equal(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs '>=' operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def in_(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs IN operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
def get_element(self, value1: Variant, value2: Variant) -> Variant:
"""
Performs [] operation for two variants.
:param value1: The first operand for this operation.
:param value2: The second operand for this operation.
:return: A result variant object.
"""
| 32.741627
| 72
| 0.616104
| 772
| 6,843
| 5.443005
| 0.088083
| 0.095193
| 0.13327
| 0.218943
| 0.900048
| 0.86316
| 0.86316
| 0.820324
| 0.820324
| 0.820324
| 0
| 0.01637
| 0.294754
| 6,843
| 208
| 73
| 32.899038
| 0.854331
| 0.57957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.846154
| false
| 0
| 0.115385
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
2dc2147f6dc3ed57700bc7a345fc6b9eda15ca69
| 3,583
|
py
|
Python
|
Chapter09/chefbot_code/chefbot_bringup/scripts/bkup_working/murray_demo.py
|
AIHZP/ROS-Robotics-Projects-published-by-Packt
|
bb44f034d7bd14cb715c008f92cd835bb3753867
|
[
"MIT"
] | 42
|
2018-02-04T17:20:53.000Z
|
2022-03-07T14:44:09.000Z
|
Chapter09/chefbot_code/chefbot_bringup/scripts/bkup_working/murray_demo.py
|
AIHZP/ROS-Robotics-Projects-published-by-Packt
|
bb44f034d7bd14cb715c008f92cd835bb3753867
|
[
"MIT"
] | 2
|
2018-05-17T02:30:37.000Z
|
2019-03-21T19:03:20.000Z
|
Chapter09/chefbot_code/chefbot_bringup/scripts/bkup_working/murray_demo.py
|
AIHZP/ROS-Robotics-Projects-published-by-Packt
|
bb44f034d7bd14cb715c008f92cd835bb3753867
|
[
"MIT"
] | 23
|
2018-05-07T07:39:46.000Z
|
2021-08-19T03:24:29.000Z
|
#!/usr/bin/env python
#Murray demo
import roslib
import rospy
import time
from geometry_msgs.msg import Twist
if __name__== "__main__":
rospy.init_node('murray_demo')
pub = rospy.Publisher("cmd_vel_mux/input/teleop",Twist, queue_size=1)
while True:
#Go straigt
for i in range(0,15):
twist = Twist()
twist.linear.x = -0.1
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = 0
pub.publish(twist)
r = rospy.Rate(10)
r.sleep()
time.sleep(2)
#Make a turn
for i in range(0,1):
twist = Twist()
twist.linear.x = 0
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = -0.002
pub.publish(twist)
r = rospy.Rate(1)
r.sleep()
time.sleep(2)
#Move a little forward
for i in range(0,1):
twist = Twist()
twist.linear.x = -0.001
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = 0
pub.publish(twist)
r = rospy.Rate(10)
r.sleep()
time.sleep(2)
#Make a turn
for i in range(0,1):
twist = Twist()
twist.linear.x = 0
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = -0.002
pub.publish(twist)
r = rospy.Rate(2)
r.sleep()
time.sleep(2)
###################################################################################3
'''
#Go straigt
for i in range(0,15):
twist = Twist()
twist.linear.x = -0.1
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = 0
pub.publish(twist)
r = rospy.Rate(10)
r.sleep()
time.sleep(2)
#Make a turn
for i in range(0,1):
twist = Twist()
twist.linear.x = 0
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = 0.002
pub.publish(twist)
r = rospy.Rate(10)
r.sleep()
time.sleep(2)
#Move a little forward
for i in range(0,1):
twist = Twist()
twist.linear.x = -0.001
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = 0
pub.publish(twist)
r = rospy.Rate(10)
r.sleep()
time.sleep(2)
#Make a turn
for i in range(0,1):
twist = Twist()
twist.linear.x = 0
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = 0.002
pub.publish(twist)
r = rospy.Rate(10)
r.sleep()
time.sleep(4)
for i in range(0,15):
twist = Twist()
twist.linear.x = -0.1
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = 0
pub.publish(twist)
r = rospy.Rate(10)
r.sleep()
time.sleep(2)
for i in range(0,8):
twist = Twist()
twist.linear.x = 0
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = -0.2
pub.publish(twist)
r = rospy.Rate(10)
r.sleep()
'''
| 15.118143
| 85
| 0.493999
| 512
| 3,583
| 3.429688
| 0.121094
| 0.153759
| 0.222096
| 0.062642
| 0.892939
| 0.876993
| 0.876993
| 0.876993
| 0.876993
| 0.8582
| 0
| 0.05849
| 0.360592
| 3,583
| 236
| 86
| 15.182203
| 0.707988
| 0.023723
| 0
| 0.767857
| 0
| 0
| 0.03007
| 0.016783
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2df1915e041c6a8614c2e7cc1c94ab50493baa2d
| 12,809
|
py
|
Python
|
tests/handler_table_test.py
|
gleb-chipiga/aiotgbot
|
2b8a889a3642ae36f1e5ab7659dc54ff6e62a95d
|
[
"MIT"
] | null | null | null |
tests/handler_table_test.py
|
gleb-chipiga/aiotgbot
|
2b8a889a3642ae36f1e5ab7659dc54ff6e62a95d
|
[
"MIT"
] | null | null | null |
tests/handler_table_test.py
|
gleb-chipiga/aiotgbot
|
2b8a889a3642ae36f1e5ab7659dc54ff6e62a95d
|
[
"MIT"
] | null | null | null |
import re
import pytest
from aiotgbot.api_types import Message, Update
from aiotgbot.bot import (Bot, Handler, HandlerCallable, HandlerTableProtocol,
PollBot)
from aiotgbot.bot_update import BotUpdate, Context
from aiotgbot.constants import ContentType, UpdateType
from aiotgbot.filters import (CallbackQueryDataFilter, CommandsFilter,
ContentTypeFilter, GroupChatFilter,
MessageTextFilter, PrivateChatFilter,
StateFilter, UpdateTypeFilter)
from aiotgbot.handler_table import HandlerTable
from aiotgbot.storage_memory import MemoryStorage
@pytest.fixture
def handler() -> HandlerCallable:
async def _handler(_: Bot, __: BotUpdate) -> None: ...
return _handler
def test_protocol() -> None:
ht: HandlerTableProtocol = HandlerTable()
assert isinstance(ht, HandlerTableProtocol)
def test_freeze(handler: HandlerCallable) -> None:
ht = HandlerTable()
assert not ht.frozen
ht.message_handler(handler, state='state1', commands=['command1'],
content_types=[ContentType.CONTACT],
text_match='pattern',
filters=[PrivateChatFilter()])
ht.freeze()
assert ht.frozen
with pytest.raises(RuntimeError, match='Cannot modify frozen list.'):
ht.message_handler(handler, state='state1', commands=['command1'],
content_types=[ContentType.CONTACT],
text_match='pattern',
filters=[PrivateChatFilter()])
def test_message_handler(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.message_handler(handler, state='state1', commands=['command1'],
content_types=[ContentType.CONTACT],
text_match=re.compile('pattern'),
filters=[PrivateChatFilter()])
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.MESSAGE),
StateFilter('state1'),
CommandsFilter(('command1',)),
ContentTypeFilter((ContentType.CONTACT,)),
MessageTextFilter(re.compile('pattern')),
PrivateChatFilter()
))]
def test_message(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.message(state='state1', commands=['command1'],
content_types=[ContentType.CONTACT],
text_match='pattern',
filters=[PrivateChatFilter()])(handler)
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.MESSAGE),
StateFilter('state1'),
CommandsFilter(('command1',)),
ContentTypeFilter((ContentType.CONTACT,)),
MessageTextFilter(re.compile('pattern')),
PrivateChatFilter()
))]
def test_edited_message_handler(
handler: HandlerCallable
) -> None:
ht = HandlerTable()
ht.edited_message_handler(handler,
state='state1',
filters=[GroupChatFilter()])
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.EDITED_MESSAGE),
StateFilter('state1'),
GroupChatFilter()
))]
def test_edited_message(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.edited_message(state='state1',
filters=[GroupChatFilter()])(handler)
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.EDITED_MESSAGE),
StateFilter('state1'),
GroupChatFilter()
))]
def test_channel_post_handler(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.channel_post_handler(handler,
state='state1',
filters=[GroupChatFilter()])
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.CHANNEL_POST),
StateFilter('state1'),
GroupChatFilter()
))]
def test_table_channel_post(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.channel_post(state='state1',
filters=[GroupChatFilter()])(handler)
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.CHANNEL_POST),
StateFilter('state1'),
GroupChatFilter()
))]
def test_edited_channel_post_handler(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.edited_channel_post_handler(handler,
state='state1',
filters=[GroupChatFilter()])
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.EDITED_CHANNEL_POST),
StateFilter('state1'),
GroupChatFilter()
))]
def test_table_edited_channel_post(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.edited_channel_post(state='state1',
filters=[GroupChatFilter()])(handler)
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.EDITED_CHANNEL_POST),
StateFilter('state1'),
GroupChatFilter()
))]
def test_inline_query_handler(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.inline_query_handler(handler,
state='state1',
filters=[GroupChatFilter()])
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.INLINE_QUERY),
StateFilter('state1'),
GroupChatFilter()
))]
def test_table_inline_query(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.inline_query(state='state1',
filters=[GroupChatFilter()])(handler)
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.INLINE_QUERY),
StateFilter('state1'),
GroupChatFilter()
))]
def test_chosen_inline_result_handler(
handler: HandlerCallable
) -> None:
ht = HandlerTable()
ht.chosen_inline_result_handler(handler,
state='state1',
filters=[GroupChatFilter()])
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.CHOSEN_INLINE_RESULT),
StateFilter('state1'),
GroupChatFilter()
))]
def test_chosen_inline_result(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.chosen_inline_result(state='state1',
filters=[GroupChatFilter()])(handler)
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.CHOSEN_INLINE_RESULT),
StateFilter('state1'),
GroupChatFilter()
))]
def test_callback_query_handler(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.callback_query_handler(handler,
state='state1',
data_match=re.compile('pattern'),
filters=[GroupChatFilter()])
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.CALLBACK_QUERY),
StateFilter('state1'),
CallbackQueryDataFilter(re.compile('pattern')),
GroupChatFilter()
))]
def test_callback_query(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.callback_query(state='state1',
data_match='pattern',
filters=[GroupChatFilter()])(handler)
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.CALLBACK_QUERY),
StateFilter('state1'),
CallbackQueryDataFilter(re.compile('pattern')),
GroupChatFilter()
))]
def test_shipping_query_handler(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.shipping_query_handler(handler,
state='state1',
filters=[GroupChatFilter()])
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.SHIPPING_QUERY),
StateFilter('state1'),
GroupChatFilter()
))]
def test_shipping_query(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.shipping_query(state='state1',
filters=[GroupChatFilter()])(handler)
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.SHIPPING_QUERY),
StateFilter('state1'),
GroupChatFilter()
))]
def test_pre_checkout_query_handler(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.pre_checkout_query_handler(handler,
state='state1',
filters=[GroupChatFilter()])
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.PRE_CHECKOUT_QUERY),
StateFilter('state1'),
GroupChatFilter()
))]
def test_pre_checkout_query(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.pre_checkout_query(state='state1',
filters=[GroupChatFilter()])(handler)
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.PRE_CHECKOUT_QUERY),
StateFilter('state1'),
GroupChatFilter()
))]
def test_poll_handler(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.poll_handler(handler, state='state1', filters=[GroupChatFilter()])
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.POLL),
StateFilter('state1'),
GroupChatFilter()
))]
def test_poll(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.poll(state='state1', filters=[GroupChatFilter()])(handler)
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.POLL),
StateFilter('state1'),
GroupChatFilter()
))]
def test_poll_answer_handler(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.poll_answer_handler(handler, state='state1',
filters=[GroupChatFilter()])
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.POLL_ANSWER),
StateFilter('state1'),
GroupChatFilter()
))]
def test_poll_answer(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.poll_answer(state='state1', filters=[GroupChatFilter()])(handler)
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.POLL_ANSWER),
StateFilter('state1'),
GroupChatFilter()
))]
def test_my_chat_member_handler(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.my_chat_member_handler(handler,
state='state1',
filters=[GroupChatFilter()])
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.MY_CHAT_MEMBER),
StateFilter('state1'),
GroupChatFilter()
))]
def test_my_chat_member(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.my_chat_member(state='state1',
filters=[GroupChatFilter()])(handler)
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.MY_CHAT_MEMBER),
StateFilter('state1'),
GroupChatFilter()
))]
def test_chat_member_handler(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.chat_member_handler(handler,
state='state1',
filters=[GroupChatFilter()])
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.CHAT_MEMBER),
StateFilter('state1'),
GroupChatFilter()
))]
def test_chat_member(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.chat_member(state='state1',
filters=[GroupChatFilter()])(handler)
assert ht._handlers == [Handler(handler, filters=(
UpdateTypeFilter(UpdateType.CHAT_MEMBER),
StateFilter('state1'),
GroupChatFilter()
))]
@pytest.mark.asyncio
async def test_get_handler(handler: HandlerCallable) -> None:
ht = HandlerTable()
ht.message(state='state1')(handler)
ht.freeze()
_bot = PollBot('token', ht, MemoryStorage())
ctx = Context({'key1': 'str1', 'key2': 'str2', 'key3': 4})
message = Message.from_dict({'message_id': 1, 'date': 1,
'chat': {'id': 1, 'type': 'private'}})
bu1 = BotUpdate('state1', ctx, Update(update_id=1, message=message))
assert await ht.get_handler(_bot, bu1) == handler
bu2 = BotUpdate('state2', ctx, Update(update_id=2, message=message))
assert await ht.get_handler(_bot, bu2) is None
| 32.34596
| 78
| 0.623078
| 1,100
| 12,809
| 7.069091
| 0.093636
| 0.099023
| 0.093621
| 0.100823
| 0.863169
| 0.847479
| 0.841178
| 0.831019
| 0.715792
| 0.6241
| 0
| 0.008217
| 0.258959
| 12,809
| 395
| 79
| 32.427848
| 0.810999
| 0
| 0
| 0.705882
| 0
| 0
| 0.042314
| 0
| 0
| 0
| 0
| 0
| 0.101307
| 1
| 0.094771
| false
| 0
| 0.029412
| 0
| 0.127451
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fa8c29e531f1c70ce625c1cfd088001f1dfe697f
| 9,606
|
py
|
Python
|
SBaaS_isotopomer/stage01_isotopomer_spectrumAccuracy_io.py
|
dmccloskey/SBaaS_isotopomer
|
b669abd6e41034739a2c53d855005753e658c436
|
[
"MIT"
] | null | null | null |
SBaaS_isotopomer/stage01_isotopomer_spectrumAccuracy_io.py
|
dmccloskey/SBaaS_isotopomer
|
b669abd6e41034739a2c53d855005753e658c436
|
[
"MIT"
] | null | null | null |
SBaaS_isotopomer/stage01_isotopomer_spectrumAccuracy_io.py
|
dmccloskey/SBaaS_isotopomer
|
b669abd6e41034739a2c53d855005753e658c436
|
[
"MIT"
] | null | null | null |
# System
import json
from .stage01_isotopomer_spectrumAccuracy_query import stage01_isotopomer_spectrumAccuracy_query
from SBaaS_base.sbaas_template_io import sbaas_template_io
# Resources
from io_utilities.base_importData import base_importData
from io_utilities.base_exportData import base_exportData
class stage01_isotopomer_spectrumAccuracy_io(stage01_isotopomer_spectrumAccuracy_query,sbaas_template_io):
def export_compareAveragesSpectrumToTheoretical(self, experiment_id_I, filename, sample_name_abbreviations_I=None,scan_types_I=None,met_ids_I = None):
'''export a comparison of calculated spectrum to theoretical spectrum'''
# query the data
data = [];
# get time points
time_points = self.get_timePoint_experimentID_dataStage01Averages(experiment_id_I);
for tp in time_points:
print('Reporting average precursor and product spectrum from isotopomer normalized for time-point ' + str(tp));
if sample_name_abbreviations_I:
sample_abbreviations = sample_name_abbreviations_I;
# query sample types from sample name abbreviations and time-point from _dataStage01Averages
else:
# get sample names and sample name abbreviations
sample_abbreviations = [];
sample_types = ['Unknown','QC'];
sample_types_lst = [];
for st in sample_types:
sample_abbreviations_tmp = [];
sample_abbreviations_tmp = self.get_sampleNameAbbreviations_experimentIDAndSampleTypeAndTimePoint_dataStage01Averages(experiment_id_I,st,tp);
sample_abbreviations.extend(sample_abbreviations_tmp);
sample_types_lst.extend([st for i in range(len(sample_abbreviations_tmp))]);
for sna_cnt,sna in enumerate(sample_abbreviations):
print('Reporting average precursor and product spectrum from isotopomer normalized for sample name abbreviation ' + sna);
# get the scan_types
if scan_types_I:
scan_types = [];
scan_types_tmp = [];
scan_types_tmp = self.get_scanTypes_experimentIDAndTimePointAndSampleAbbreviationsAndSampleType_dataStage01Averages(experiment_id_I,tp,sna,sample_types_lst[sna_cnt]);
scan_types = [st for st in scan_types_tmp if st in scan_types_I];
else:
scan_types = [];
scan_types = self.get_scanTypes_experimentIDAndTimePointAndSampleAbbreviationsAndSampleType_dataStage01Averages(experiment_id_I,tp,sna,sample_types_lst[sna_cnt]);
for scan_type in scan_types:
print('Reporting average precursor and product spectrum for scan type ' + scan_type)
# met_ids
if not met_ids_I:
met_ids = [];
met_ids = self.get_metIDs_experimentIDAndSampleAbbreviationAndTimePointAndSampleTypeAndScanType_dataStage01Averages( \
experiment_id_I,sna,tp,sample_types_lst[sna_cnt],scan_type);
else:
met_ids = met_ids_I;
if not(met_ids): continue #no component information was found
for met in met_ids:
print('Reporting average precursor and product spectrum for metabolite ' + met);
data_tmp = [];
data_tmp = self.get_dataPrecursorFragment_experimentIDAndTimePointSampleAbbreviationAndSampleTypeAndScanTypeAndMetID_dataStage01Averages(\
experiment_id_I,sna,tp,sample_types_lst[sna_cnt],scan_type,met);
data.extend(data_tmp);
data_tmp = [];
data_tmp = self.get_dataProductFragment_experimentIDAndTimePointSampleAbbreviationAndSampleTypeAndScanTypeAndMetID_dataStage01Averages(\
experiment_id_I,sna,tp,sample_types_lst[sna_cnt],scan_type,met);
data.extend(data_tmp);
# write the comparison to file
headerL1 = ['sample_name_abbreviation','time_point','met_id','fragment_formula','C_pos','scan_type','theoretical'] + ['' for i in range(49)]\
+ ['measured'] + ['' for i in range(49)]\
+ ['measured_cv'] + ['' for i in range(49)]\
+ ['abs_difference'] + ['' for i in range(49)];
headerL2 = ['' for i in range(6)] + ['a' + str(i) for i in range(50)]\
+ ['a' + str(i) for i in range(50)]\
+ ['a' + str(i) for i in range(50)]\
+ ['a' + str(i) for i in range(50)];
header = [];
header.append(headerL1);
header.append(headerL2);
export = base_exportData(data);
export.write_headersAndElements2csv(header,filename);
def export_compareAveragesNormSumSpectrumToTheoretical(self, experiment_id_I, filename, sample_name_abbreviations_I=None,scan_types_I=None,met_ids_I = None):
'''export a comparison of calculated spectrum to theoretical spectrum'''
# query the data
data = [];
# get time points
time_points = self.get_timePoint_experimentID_dataStage01AveragesNormSum(experiment_id_I);
for tp in time_points:
print('Reporting average precursor and product spectrum from isotopomer normalized for time-point ' + str(tp));
if sample_name_abbreviations_I:
sample_abbreviations = sample_name_abbreviations_I;
# query sample types from sample name abbreviations and time-point from data_stage01_isotopomer_normalized
else:
# get sample names and sample name abbreviations
sample_abbreviations = [];
sample_types = ['Unknown','QC'];
sample_types_lst = [];
for st in sample_types:
sample_abbreviations_tmp = [];
sample_abbreviations_tmp = self.get_sampleNameAbbreviations_experimentIDAndSampleTypeAndTimePoint_dataStage01AveragesNormSum(experiment_id_I,st,tp);
sample_abbreviations.extend(sample_abbreviations_tmp);
sample_types_lst.extend([st for i in range(len(sample_abbreviations_tmp))]);
for sna_cnt,sna in enumerate(sample_abbreviations):
print('Reporting average precursor and product spectrum from isotopomer normalized for sample name abbreviation ' + sna);
# get the scan_types
if scan_types_I:
scan_types = [];
scan_types_tmp = [];
scan_types_tmp = self.get_scanTypes_experimentIDAndTimePointAndSampleAbbreviationsAndSampleType_dataStage01AveragesNormSum(experiment_id_I,tp,sna,sample_types_lst[sna_cnt]);
scan_types = [st for st in scan_types_tmp if st in scan_types_I];
else:
scan_types = [];
scan_types = self.get_scanTypes_experimentIDAndTimePointAndSampleAbbreviationsAndSampleType_dataStage01AveragesNormSum(experiment_id_I,tp,sna,sample_types_lst[sna_cnt]);
for scan_type in scan_types:
print('Reporting average precursor and product spectrum for scan type ' + scan_type)
# met_ids
if not met_ids_I:
met_ids = [];
met_ids = self.get_metIDs_experimentIDAndSampleAbbreviationAndTimePointAndSampleTypeAndScanType_dataStage01AveragesNormSum( \
experiment_id_I,sna,tp,sample_types_lst[sna_cnt],scan_type);
else:
met_ids = met_ids_I;
if not(met_ids): continue #no component information was found
for met in met_ids:
print('Reporting average precursor and product spectrum for metabolite ' + met);
data_tmp = [];
data_tmp = self.get_dataPrecursorFragment_experimentIDAndTimePointSampleAbbreviationAndSampleTypeAndScanTypeAndMetID_dataStage01AveragesNormSum(\
experiment_id_I,sna,tp,sample_types_lst[sna_cnt],scan_type,met);
data.extend(data_tmp);
data_tmp = [];
data_tmp = self.get_dataProductFragment_experimentIDAndTimePointSampleAbbreviationAndSampleTypeAndScanTypeAndMetID_dataStage01AveragesNormSum(\
experiment_id_I,sna,tp,sample_types_lst[sna_cnt],scan_type,met);
data.extend(data_tmp);
# write the comparison to file
headerL1 = ['sample_name_abbreviation','time_point','met_id','fragment_formula','C_pos','scan_type','theoretical'] + ['' for i in range(49)]\
+ ['measured'] + ['' for i in range(49)]\
+ ['measured_cv'] + ['' for i in range(49)]\
+ ['abs_difference'] + ['' for i in range(49)]\
+ ['average_accuracy'];
headerL2 = ['' for i in range(6)] + ['a' + str(i) for i in range(50)]\
+ ['a' + str(i) for i in range(50)]\
+ ['a' + str(i) for i in range(50)]\
+ ['a' + str(i) for i in range(50)]\
+ [''];
header = [];
header.append(headerL1);
header.append(headerL2);
export = base_exportData(data);
export.write_headersAndElements2csv(header,filename);
| 66.708333
| 193
| 0.626483
| 985
| 9,606
| 5.805076
| 0.115736
| 0.037775
| 0.020986
| 0.038475
| 0.894544
| 0.894544
| 0.894544
| 0.894544
| 0.894544
| 0.894544
| 0
| 0.012426
| 0.296273
| 9,606
| 144
| 194
| 66.708333
| 0.833432
| 0.071101
| 0
| 0.782258
| 0
| 0
| 0.102968
| 0.005396
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016129
| false
| 0
| 0.040323
| 0
| 0.064516
| 0.064516
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
faac5cef9ca180410acd0d10f0b53c9a2b833f5d
| 5,356
|
py
|
Python
|
userbot/plugins/mute.py
|
hussein2Me/Bot2Me
|
bd2c3020565c28df779fbe5361dbebf8e4588489
|
[
"MIT"
] | null | null | null |
userbot/plugins/mute.py
|
hussein2Me/Bot2Me
|
bd2c3020565c28df779fbe5361dbebf8e4588489
|
[
"MIT"
] | null | null | null |
userbot/plugins/mute.py
|
hussein2Me/Bot2Me
|
bd2c3020565c28df779fbe5361dbebf8e4588489
|
[
"MIT"
] | null | null | null |
from userbot.plugins.sql_helper.mute_sql import is_muted, mute, unmute
import asyncio
@command(outgoing=True, pattern=r"^.mute ?(\d+)?", allow_sudo=True)
async def startmute(event):
private = False
if event.fwd_from:
return
elif event.is_private:
await event.edit("قد تحدث مشاكل غير متوقعة أو أخطاء قبيحة!")
await asyncio.sleep(3)
private = True
reply = await event.get_reply_message()
if event.pattern_match.group(1) is not None:
userid = event.pattern_match.group(1)
elif reply is not None:
userid = reply.sender_id
elif private is True:
userid = event.chat_id
else:
return await event.edit("يرجى الرد على مستخدم ليتم كتمه.")
chat_id = event.chat_id
chat = await event.get_chat()
if "admin_rights" in vars(chat) and vars(chat)["admin_rights"] is not None:
if chat.admin_rights.delete_messages is True:
pass
else:
return await event.edit("لا يمكنك كتم شخص ما إذا لم يكن لديك إذن حذف الرسائل")
elif "creator" in vars(chat):
pass
elif private == True:
pass
else:
return await event.edit("لا يمكنك كتم شخص بدون حقوق المسؤول")
if is_muted(userid, chat_id):
return await event.edit("هذا المستخدم موجود بالفعل في هذه الدردشة")
try:
mute(userid, chat_id)
except Exception as e:
await event.edit("حدث خطأ! n\ خطأ" + str(e))
else:
await event.edit("تم كتم هذا الشخص بنجاح")
@command(outgoing=True, pattern=r"^.unmute ?(\d+)?", allow_sudo=True)
async def endmute(event):
private = False
if event.fwd_from:
return
elif event.is_private:
await event.edit("قد تحدث مشاكل غير متوقعة أو أخطاء قبيحة!")
await asyncio.sleep(3)
private = True
reply = await event.get_reply_message()
if event.pattern_match.group(1) is not None:
userid = event.pattern_match.group(1)
elif reply is not None:
userid = reply.sender_id
elif private is True:
userid = event.chat_id
else:
return await event.edit("يرجى الرد على مستخدم أو إضافته إلى الأمر لإلغاء كتمه.")
chat_id = event.chat_id
if not is_muted(userid, chat_id):
return await event.edit("هذا المستخدم لم يتم كتم في هذه المحادثة")
try:
unmute(userid, chat_id)
except Exception as e:
await event.edit("حدث خطأ! n\ خطأ" + str(e))
else:
await event.edit("تم إلغاء كتم هذا الشخص بنجاح")
@command(incoming=True)
async def watcher(event):
if is_muted(event.sender_id, event.chat_id):
await event.delete()
from userbot.plugins.sql_helper.mute_sql import is_muted, mute, unmute
import asyncio
@command(outgoing=True, pattern=r"^.mute ?(\d+)?")
async def startmute(event):
private = False
if event.fwd_from:
return
elif event.is_private:
await event.edit("قد تحدث مشاكل غير متوقعة أو أخطاء قبيحة!")
await asyncio.sleep(3)
private = True
reply = await event.get_reply_message()
if event.pattern_match.group(1) is not None:
userid = event.pattern_match.group(1)
elif reply is not None:
userid = reply.sender_id
elif private is True:
userid = event.chat_id
else:
return await event.edit("يرجى الرد على مستخدم ليتم كتمه.")
chat_id = event.chat_id
chat = await event.get_chat()
if "admin_rights" in vars(chat) and vars(chat)["admin_rights"] is not None:
if chat.admin_rights.delete_messages is True:
pass
else:
return await event.edit("لا يمكنك كتم شخص ما إذا لم يكن لديك إذن حذف الرسائل")
elif "creator" in vars(chat):
pass
elif private == True:
pass
else:
return await event.edit("لا يمكنك كتم شخص بدون حقوق المسؤول")
if is_muted(userid, chat_id):
return await event.edit("هذا المستخدم بالفعل مكتوم في هذه الدردشة")
try:
mute(userid, chat_id)
except Exception as e:
await event.edit("حدث خطأ! n\ خطأ" + str(e))
else:
await event.edit("تم كتم هذا الشخص بنجاح")
@command(outgoing=True, pattern=r"^.unmute ?(\d+)?")
async def endmute(event):
private = False
if event.fwd_from:
return
elif event.is_private:
await event.edit("قد تحدث مشاكل غير متوقعة أو أخطاء قبيحة!")
await asyncio.sleep(3)
private = True
reply = await event.get_reply_message()
if event.pattern_match.group(1) is not None:
userid = event.pattern_match.group(1)
elif reply is not None:
userid = reply.sender_id
elif private is True:
userid = event.chat_id
else:
return await event.edit("يرجى الرد على مستخدم أو إضافته إلى الأمر لإلغاء كتمه.")
chat_id = event.chat_id
if not is_muted(userid, chat_id):
return await event.edit("هذا المستخدم لم يتم كتم في هذه المحادثة")
try:
unmute(userid, chat_id)
except Exception as e:
await event.edit("حدث خطأ! n\ خطأ" + str(e))
else:
await event.edit("تم إلغاء كتم هذا الشخص بنجاح")
@command(incoming=True)
async def watcher(event):
if is_muted(event.sender_id, event.chat_id):
await event.delete()
| 35.236842
| 91
| 0.627707
| 768
| 5,356
| 4.278646
| 0.148438
| 0.097383
| 0.102252
| 0.073037
| 0.993305
| 0.993305
| 0.985393
| 0.985393
| 0.985393
| 0.985393
| 0
| 0.003112
| 0.28006
| 5,356
| 151
| 92
| 35.470199
| 0.849066
| 0
| 0
| 0.958333
| 0
| 0
| 0.180211
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.041667
| 0.027778
| 0
| 0.138889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fac6ab38c01b638f2183de3b3b8b7e8434f6f0a0
| 59
|
py
|
Python
|
backend/debank/src/debank.py
|
Ben0p/token_tracker
|
d2816bd24deed30ea07422c39027de4f7e08bae4
|
[
"MIT"
] | null | null | null |
backend/debank/src/debank.py
|
Ben0p/token_tracker
|
d2816bd24deed30ea07422c39027de4f7e08bae4
|
[
"MIT"
] | null | null | null |
backend/debank/src/debank.py
|
Ben0p/token_tracker
|
d2816bd24deed30ea07422c39027de4f7e08bae4
|
[
"MIT"
] | 1
|
2021-07-18T05:58:59.000Z
|
2021-07-18T05:58:59.000Z
|
print('0x8d8fC6ebe594FF30Ba34286F29F265d9Aab018Da'.lower())
| 59
| 59
| 0.881356
| 3
| 59
| 17.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.389831
| 0
| 59
| 1
| 59
| 59
| 0.491525
| 0
| 0
| 0
| 0
| 0
| 0.7
| 0.7
| 0
| 0
| 0.7
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
4f0f31454c9e974200bc0fff33b68e4c11dcaddc
| 543
|
py
|
Python
|
python/projetos_dev/python_basico/aula_repeticao_acumuladores_while_else.py
|
jonfisik/Projects
|
7847f32c9e333cfca31cc127db175d9b4080ed0f
|
[
"MIT"
] | 2
|
2020-09-05T22:25:37.000Z
|
2021-06-01T21:34:54.000Z
|
python/projetos_dev/python_basico/aula_repeticao_acumuladores_while_else.py
|
jonfisik/Projects
|
7847f32c9e333cfca31cc127db175d9b4080ed0f
|
[
"MIT"
] | null | null | null |
python/projetos_dev/python_basico/aula_repeticao_acumuladores_while_else.py
|
jonfisik/Projects
|
7847f32c9e333cfca31cc127db175d9b4080ed0f
|
[
"MIT"
] | null | null | null |
'''
while / else - contadores e acumuladores
'''
contador = 1
acumulador = 1
while contador <= 10:
print(f'contador = {contador}, acumulador = {acumulador}')
acumulador = acumulador + contador
contador += 1
else:
print('Ainda estou aqui.')
print('-'*20)
contador = 1
acumulador = 1
while contador <= 10:
print(f'contador = {contador}, acumulador = {acumulador}')
if contador > 5:
break
acumulador = acumulador + contador
contador += 1
else:
print('Ainda estou aqui.')
print('Isso será executado.')
| 21.72
| 62
| 0.646409
| 61
| 543
| 5.754098
| 0.344262
| 0.2849
| 0.108262
| 0.11396
| 0.809117
| 0.809117
| 0.809117
| 0.809117
| 0.809117
| 0.809117
| 0
| 0.03066
| 0.219153
| 543
| 25
| 63
| 21.72
| 0.79717
| 0.073665
| 0
| 0.8
| 0
| 0
| 0.304435
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.3
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
87e669f8a1db6b422495db2c2d9d98b8bbca6175
| 56,913
|
py
|
Python
|
tests/test_routing.py
|
richpsharp/ecoshard
|
4f5e15835160901b83001e82496576236b2328e5
|
[
"Apache-2.0"
] | 6
|
2019-11-10T03:01:02.000Z
|
2020-04-14T10:27:53.000Z
|
tests/test_routing.py
|
richpsharp/ecoshard
|
4f5e15835160901b83001e82496576236b2328e5
|
[
"Apache-2.0"
] | 9
|
2019-11-09T22:08:45.000Z
|
2020-08-06T22:25:01.000Z
|
tests/test_routing.py
|
richpsharp/ecoshard
|
4f5e15835160901b83001e82496576236b2328e5
|
[
"Apache-2.0"
] | 2
|
2020-02-10T21:14:32.000Z
|
2020-07-31T01:17:18.000Z
|
"""ecoshard.geoprocessing.routing test suite."""
import os
import shutil
import tempfile
import unittest
from osgeo import gdal
import numpy
import numpy.testing
import scipy.interpolate
import ecoshard.geoprocessing
import ecoshard.geoprocessing.routing
from test_geoprocessing import _array_to_raster
class TestRouting(unittest.TestCase):
"""Tests for ecoshard.geoprocessing.routing."""
def setUp(self):
"""Create a temporary workspace that's deleted later."""
self.workspace_dir = tempfile.mkdtemp()
if not os.path.exists(self.workspace_dir):
os.makedirs(self.workspace_dir)
def tearDown(self):
"""Clean up remaining files."""
shutil.rmtree(self.workspace_dir)
def test_pit_filling(self):
"""PGP.routing: test pitfilling."""
base_path = os.path.join(self.workspace_dir, 'base.tif')
dem_array = numpy.zeros((11, 11), dtype=numpy.float32)
dem_array[3:8, 3:8] = -1.0
dem_array[0, 0] = -1.0
_array_to_raster(dem_array, None, base_path)
fill_path = os.path.join(self.workspace_dir, 'filled.tif')
ecoshard.geoprocessing.routing.fill_pits(
(base_path, 1), fill_path, working_dir=self.workspace_dir)
result_array = ecoshard.geoprocessing.raster_to_numpy_array(fill_path)
dem_array[3:8, 3:8] = 0.0
numpy.testing.assert_almost_equal(result_array, dem_array)
def test_pit_filling_large_border(self):
"""PGP.routing: test pitfilling with large nodata border."""
os.makedirs(self.workspace_dir, exist_ok=True)
base_path = os.path.join(self.workspace_dir, 'base.tif')
nodata = -1.0
n = 30
dem_array = numpy.full((n, n), nodata, dtype=numpy.float32)
dem_array[n//10:n-n//10,n//10:n-n//10] = nodata
# make a pour point
dem_array[n//10+1, n//10+1] = 8
# make a pit
dem_array[n//10+2:n//10-2+10, n//10+2:n//10-2+10] = 8
dem_array[n//10+3:n//10-3+10, n//10+3:n//10-3+10] = 7
_array_to_raster(dem_array, nodata, base_path)
fill_path = os.path.join(self.workspace_dir, 'filled.tif')
ecoshard.geoprocessing.routing.fill_pits(
(base_path, 1), fill_path, working_dir=self.workspace_dir)
result_array = ecoshard.geoprocessing.raster_to_numpy_array(fill_path)
expected_result = numpy.copy(dem_array)
expected_result[n//10+2:n//10-2+10, n//10+2:n//10-2+10] = 8
expected_path = os.path.join(self.workspace_dir, 'expected.tif')
_array_to_raster(expected_result, nodata, expected_path)
numpy.testing.assert_almost_equal(result_array, expected_result)
def test_pit_filling_small_delta(self):
"""PGP.routing: test pitfilling on small delta."""
base_path = os.path.join(self.workspace_dir, 'base.tif')
dem_array = numpy.empty((4, 4), dtype=numpy.float32)
# these values came from a real world dem that failed
lower_val = 272.53228759765625
higher_val = 272.5325012207031
dem_array[:] = higher_val
dem_array[2, 2] = lower_val
expected_result = numpy.empty((4, 4), numpy.float32)
expected_result[:] = higher_val
_array_to_raster(dem_array, None, base_path)
fill_path = os.path.join(self.workspace_dir, 'filled.tif')
ecoshard.geoprocessing.routing.fill_pits(
(base_path, 1), fill_path, working_dir=self.workspace_dir)
result_array = ecoshard.geoprocessing.raster_to_numpy_array(fill_path)
self.assertTrue(
(result_array == expected_result).all(),
result_array == expected_result)
def test_pit_filling_ignore_large_pit(self):
"""PGP.routing: test pitfilling but ignore large pits."""
base_path = os.path.join(self.workspace_dir, 'base.tif')
n = 256
# create a big pit
grid_x, grid_y = numpy.mgrid[0:n, 0:n]
values = numpy.array([10, 10, 10, 10, 0])
points = numpy.array(
[(0, 0), (0, n-1), (n-1, 0), (n-1, n-1), (n//2, n//2)])
pit_dem_array = scipy.interpolate.griddata(
points, values, (grid_x, grid_y), method='linear')
_array_to_raster(pit_dem_array, None, base_path)
fill_path = os.path.join(self.workspace_dir, 'filled.tif')
# First limit fill size to 100 pixels, should not fill the pit
ecoshard.geoprocessing.routing.fill_pits(
(base_path, 1), fill_path, working_dir=self.workspace_dir,
max_pixel_fill_count=100)
result_array = ecoshard.geoprocessing.raster_to_numpy_array(fill_path)
self.assertTrue(
(result_array == pit_dem_array).all(),
result_array == pit_dem_array)
# Let pit fill all the way
ecoshard.geoprocessing.routing.fill_pits(
(base_path, 1), fill_path, working_dir=self.workspace_dir,
max_pixel_fill_count=1000000)
filled_array = numpy.full((n, n), 10.0)
result_array = ecoshard.geoprocessing.raster_to_numpy_array(fill_path)
self.assertTrue(
(numpy.isclose(result_array, filled_array)).all(),
f'{result_array == filled_array}, {result_array} {filled_array}')
def test_pit_filling_path_band_checking(self):
"""PGP.routing: test pitfilling catches path-band formatting errors."""
with self.assertRaises(ValueError):
ecoshard.geoprocessing.routing.fill_pits(
('invalid path', 1), 'foo')
with self.assertRaises(ValueError):
ecoshard.geoprocessing.routing.fill_pits(
'invalid path', 'foo')
def test_pit_filling_nodata_int(self):
"""PGP.routing: test pitfilling with nodata value."""
base_path = os.path.join(self.workspace_dir, 'base.tif')
dem_array = numpy.zeros((11, 11), dtype=numpy.int32)
nodata = 9999
dem_array[3:8, 3:8] = -1
dem_array[0, 0] = -1
dem_array[1, 1] = nodata
_array_to_raster(dem_array, nodata, base_path)
fill_path = os.path.join(self.workspace_dir, 'filled.tif')
ecoshard.geoprocessing.routing.fill_pits(
(base_path, 1), fill_path, working_dir=self.workspace_dir)
result_array = ecoshard.geoprocessing.raster_to_numpy_array(fill_path)
self.assertEqual(result_array.dtype, numpy.int32)
# the expected result is that the pit is filled in
dem_array[3:8, 3:8] = 0.0
numpy.testing.assert_almost_equal(result_array, dem_array)
def test_flow_dir_d8(self):
"""PGP.routing: test D8 flow."""
dem_path = os.path.join(self.workspace_dir, 'dem.tif')
dem_array = numpy.zeros((11, 11), dtype=numpy.float32)
_array_to_raster(dem_array, None, dem_path)
target_flow_dir_path = os.path.join(
self.workspace_dir, 'flow_dir.tif')
ecoshard.geoprocessing.routing.flow_dir_d8(
(dem_path, 1), target_flow_dir_path,
working_dir=self.workspace_dir)
flow_array = ecoshard.geoprocessing.raster_to_numpy_array(
target_flow_dir_path)
self.assertEqual(flow_array.dtype, numpy.uint8)
# this is a regression result saved by hand
expected_result = numpy.array([
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0],
[4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0],
[4, 4, 2, 2, 2, 2, 2, 2, 2, 0, 0],
[4, 4, 4, 2, 2, 2, 2, 2, 0, 0, 0],
[4, 4, 4, 4, 2, 2, 2, 0, 0, 0, 0],
[4, 4, 4, 4, 4, 2, 0, 0, 0, 0, 0],
[4, 4, 4, 4, 4, 6, 0, 0, 0, 0, 0],
[4, 4, 4, 4, 6, 6, 6, 0, 0, 0, 0],
[4, 4, 4, 6, 6, 6, 6, 6, 0, 0, 0],
[4, 4, 6, 6, 6, 6, 6, 6, 6, 0, 0],
[4, 6, 6, 6, 6, 6, 6, 6, 6, 6, 0]])
numpy.testing.assert_almost_equal(flow_array, expected_result)
def test_invalid_mode_detect_outlets(self):
"""PGP.routing: ensure invalid mode caught when detecting outlets."""
flow_dir_d8 = numpy.full((512, 512), 128, dtype=numpy.uint8)
flow_dir_d8[0:4, 0:4] = [
[2, 2, 2, 2],
[2, 2, 2, 0],
[4, 128, 2, 2],
[2, 2, 6, 2]]
flow_dir_d8[-1, -1] = 0
flow_dir_d8_path = os.path.join(self.workspace_dir, 'd8.tif')
_array_to_raster(flow_dir_d8, 128, flow_dir_d8_path)
outlet_vector_path = os.path.join(
self.workspace_dir, 'outlets.gpkg')
with self.assertRaises(ValueError) as cm:
ecoshard.geoprocessing.routing.detect_outlets(
(flow_dir_d8_path, 1), 'bad_mode', outlet_vector_path)
expected_message = (
'expected flow dir type of either d8 or mfd but got bad_mode')
actual_message = str(cm.exception)
self.assertTrue(expected_message in actual_message, actual_message)
def test_detect_outlets_d8(self):
"""PGP.routing: test detect outlets for D8."""
flow_dir_d8 = numpy.full((512, 512), 128, dtype=numpy.uint8)
flow_dir_d8[0:4, 0:4] = [
[2, 2, 2, 2],
[2, 2, 2, 0],
[4, 128, 2, 2],
[2, 2, 6, 2]]
flow_dir_d8[-1, -1] = 0
flow_dir_d8_path = os.path.join(self.workspace_dir, 'd8.tif')
_array_to_raster(flow_dir_d8, 128, flow_dir_d8_path)
outlet_vector_path = os.path.join(
self.workspace_dir, 'outlets.gpkg')
ecoshard.geoprocessing.routing.detect_outlets(
(flow_dir_d8_path, 1), 'd8', outlet_vector_path)
outlet_vector = gdal.OpenEx(
outlet_vector_path, gdal.OF_VECTOR)
outlet_layer = outlet_vector.GetLayer()
outlet_ij_set = set()
id_list = []
for outlet_feature in outlet_layer:
outlet_ij_set.add(
(outlet_feature.GetField('i'),
outlet_feature.GetField('j')))
id_list.append(outlet_feature.GetField('ID'))
# We know the expected outlets because we constructed them above
expected_outlet_ij_set = {
(0, 0), (1, 0), (2, 0), (3, 0),
(3, 1),
(0, 2),
(1, 3), (2, 3),
(511, 511)}
self.assertEqual(outlet_ij_set, expected_outlet_ij_set)
self.assertEqual(
sorted(id_list), list(range(len(expected_outlet_ij_set))))
def test_detect_outlets_mfd(self):
"""PGP.routing: test detect outlets for MFD."""
d8_nodata = 128
flow_dir_mfd = numpy.full((512, 512), d8_nodata, dtype=numpy.int32)
flow_dir_mfd[0:4, 0:4] = [
[2, 2, 2, 2],
[2, 2, 2, 0],
[4, d8_nodata, 2, 2],
[2, 2, 6, 2]]
flow_dir_mfd[-1, -1] = 0
nodata_mask = flow_dir_mfd == d8_nodata
flow_dir_mfd[~nodata_mask] = (1 << (flow_dir_mfd[~nodata_mask]*4))
flow_dir_mfd[nodata_mask] = 0 # set to MFD nodata
flow_dir_mfd_path = os.path.join(self.workspace_dir, 'mfd.tif')
_array_to_raster(flow_dir_mfd, 0, flow_dir_mfd_path)
outlet_vector_path = os.path.join(
self.workspace_dir, 'outlets.gpkg')
ecoshard.geoprocessing.routing.detect_outlets(
(flow_dir_mfd_path, 1), 'mfd', outlet_vector_path)
outlet_vector = gdal.OpenEx(
outlet_vector_path, gdal.OF_VECTOR)
outlet_layer = outlet_vector.GetLayer()
outlet_ij_set = set()
id_list = []
for outlet_feature in outlet_layer:
outlet_ij_set.add(
(outlet_feature.GetField('i'),
outlet_feature.GetField('j')))
id_list.append(outlet_feature.GetField('ID'))
# We know the expected outlets because we constructed them above
expected_outlet_ij_set = {
(0, 0), (1, 0), (2, 0), (3, 0),
(3, 1),
(0, 2),
(1, 3), (2, 3),
(511, 511)}
self.assertEqual(outlet_ij_set, expected_outlet_ij_set)
self.assertEqual(
sorted(id_list), list(range(len(expected_outlet_ij_set))))
def test_detect_outlets_by_block(self):
"""PGP: test detect_outlets by memory block for border cases."""
nodata = 128 # nodata value
flow_dir_array = numpy.array([
[0, 0, 0, 0, 7, 7, 7, 1, 6, 6],
[2, 3, 4, 5, 6, 7, 0, 1, 1, 2],
[2, 2, 2, 2, 0, nodata, nodata, 3, 3, nodata],
[2, 1, 1, 1, 2, 6, 4, 1, nodata, nodata],
[1, 1, 0, 0, 0, 0, nodata, nodata, nodata, nodata]
], dtype=numpy.uint8)
expected_outlet_ij_set = {(7, 0), (5, 1), (4, 2), (5, 4)}
d8_flow_dir_raster_path = os.path.join(self.workspace_dir, 'd8.tif')
ecoshard.geoprocessing.numpy_array_to_raster(
flow_dir_array, nodata, (1, 1), (0, 0), None,
d8_flow_dir_raster_path)
outlet_vector_path = os.path.join(self.workspace_dir, 'outlets.gpkg')
# Mock iterblocks so that we can test with an array smaller than
# gets pour points on block edges e.g. flow_dir_array[2, 4]
def mock_iterblocks(*args, **kwargs):
xoffs = [0, 4, 8]
win_xsizes = [4, 4, 2]
for xoff, win_xsize in zip(xoffs, win_xsizes):
yield {
'xoff': xoff,
'yoff': 0,
'win_xsize': win_xsize,
'win_ysize': 5}
with unittest.mock.patch(
'ecoshard.geoprocessing.iterblocks',
mock_iterblocks):
ecoshard.geoprocessing.routing.detect_outlets(
(d8_flow_dir_raster_path, 1), 'd8', outlet_vector_path)
outlet_vector = gdal.OpenEx(outlet_vector_path, gdal.OF_VECTOR)
outlet_layer = outlet_vector.GetLayer()
outlet_ij_set = set()
id_list = []
for outlet_feature in outlet_layer:
outlet_ij_set.add(
(outlet_feature.GetField('i'),
outlet_feature.GetField('j')))
id_list.append(outlet_feature.GetField('ID'))
# We know the expected outlets because we constructed them above
self.assertEqual(outlet_ij_set, expected_outlet_ij_set)
def test_flow_accum_d8(self):
"""PGP.routing: test D8 flow accum."""
# this was generated from a pre-calculated plateau drain dem
flow_dir_array = numpy.array([
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0],
[4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0],
[4, 4, 2, 2, 2, 2, 2, 2, 2, 0, 0],
[4, 4, 4, 2, 2, 2, 2, 2, 0, 0, 0],
[4, 4, 4, 4, 2, 2, 2, 0, 0, 0, 0],
[4, 4, 4, 4, 4, 2, 0, 0, 0, 0, 0],
[4, 4, 4, 4, 4, 6, 0, 0, 0, 0, 0],
[4, 4, 4, 4, 6, 6, 6, 0, 0, 0, 0],
[4, 4, 4, 6, 6, 6, 6, 6, 0, 0, 0],
[4, 4, 6, 6, 6, 6, 6, 6, 6, 0, 0],
[4, 6, 6, 6, 6, 6, 6, 6, 6, 6, 0]], dtype=numpy.uint8)
flow_dir_path = os.path.join(self.workspace_dir, 'flow_dir.tif')
_array_to_raster(flow_dir_array, None, flow_dir_path)
target_flow_accum_path = os.path.join(
self.workspace_dir, 'flow_accum.tif')
ecoshard.geoprocessing.routing.flow_accumulation_d8(
(flow_dir_path, 1), target_flow_accum_path)
flow_accum_array = ecoshard.geoprocessing.raster_to_numpy_array(
target_flow_accum_path)
self.assertEqual(flow_accum_array.dtype, numpy.float64)
# this is a regression result saved by hand
expected_result = numpy.array(
[[1, 2, 3, 4, 5, 6, 5, 4, 3, 2, 1],
[1, 1, 2, 3, 4, 5, 4, 3, 2, 1, 1],
[2, 1, 1, 2, 3, 4, 3, 2, 1, 1, 2],
[3, 2, 1, 1, 2, 3, 2, 1, 1, 2, 3],
[4, 3, 2, 1, 1, 2, 1, 1, 2, 3, 4],
[5, 4, 3, 2, 1, 1, 1, 2, 3, 4, 5],
[5, 4, 3, 2, 1, 1, 1, 2, 3, 4, 5],
[4, 3, 2, 1, 1, 2, 1, 1, 2, 3, 4],
[3, 2, 1, 1, 2, 3, 2, 1, 1, 2, 3],
[2, 1, 1, 2, 3, 4, 3, 2, 1, 1, 2],
[1, 1, 2, 3, 4, 5, 4, 3, 2, 1, 1]])
numpy.testing.assert_almost_equal(flow_accum_array, expected_result)
def test_flow_accum_d8_flow_weights(self):
"""PGP.routing: test D8 flow accum with flow weights."""
# this was generated from a pre-calculated plateau drain dem
flow_dir_array = numpy.array([
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0],
[4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0],
[4, 4, 2, 2, 2, 2, 2, 2, 2, 0, 0],
[4, 4, 4, 2, 2, 2, 2, 2, 0, 0, 0],
[4, 4, 4, 4, 2, 2, 2, 0, 0, 0, 0],
[4, 4, 4, 4, 4, 2, 0, 0, 0, 0, 0],
[4, 4, 4, 4, 4, 6, 0, 0, 0, 0, 0],
[4, 4, 4, 4, 6, 6, 6, 0, 0, 0, 0],
[4, 4, 4, 6, 6, 6, 6, 6, 0, 0, 0],
[4, 4, 6, 6, 6, 6, 6, 6, 6, 0, 0],
[4, 6, 6, 6, 6, 6, 6, 6, 6, 6, 0]], dtype=numpy.uint8)
flow_dir_path = os.path.join(self.workspace_dir, 'flow_dir.tif')
_array_to_raster(flow_dir_array, None, flow_dir_path)
flow_weight_raster_path = os.path.join(
self.workspace_dir, 'flow_weights.tif')
flow_weight_array = numpy.empty(
flow_dir_array.shape, dtype=numpy.float32)
flow_weight_constant = 2.7
flow_weight_array[:] = flow_weight_constant
_array_to_raster(flow_weight_array, None, flow_weight_raster_path)
target_flow_accum_path = os.path.join(
self.workspace_dir, 'flow_accum.tif')
ecoshard.geoprocessing.routing.flow_accumulation_d8(
(flow_dir_path, 1), target_flow_accum_path,
weight_raster_path_band=(flow_weight_raster_path, 1))
flow_accum_array = ecoshard.geoprocessing.raster_to_numpy_array(
target_flow_accum_path)
self.assertEqual(flow_accum_array.dtype, numpy.float64)
# this is a regression result saved by hand from a simple run but
# multiplied by the flow weight constant so we know flow weights work.
expected_result = flow_weight_constant * numpy.array(
[[1, 2, 3, 4, 5, 6, 5, 4, 3, 2, 1],
[1, 1, 2, 3, 4, 5, 4, 3, 2, 1, 1],
[2, 1, 1, 2, 3, 4, 3, 2, 1, 1, 2],
[3, 2, 1, 1, 2, 3, 2, 1, 1, 2, 3],
[4, 3, 2, 1, 1, 2, 1, 1, 2, 3, 4],
[5, 4, 3, 2, 1, 1, 1, 2, 3, 4, 5],
[5, 4, 3, 2, 1, 1, 1, 2, 3, 4, 5],
[4, 3, 2, 1, 1, 2, 1, 1, 2, 3, 4],
[3, 2, 1, 1, 2, 3, 2, 1, 1, 2, 3],
[2, 1, 1, 2, 3, 4, 3, 2, 1, 1, 2],
[1, 1, 2, 3, 4, 5, 4, 3, 2, 1, 1]], dtype=numpy.float64)
numpy.testing.assert_almost_equal(
flow_accum_array, expected_result, 6)
ecoshard.geoprocessing.routing.flow_accumulation_d8(
(flow_dir_path, 1), target_flow_accum_path,
weight_raster_path_band=(flow_weight_raster_path, 1))
flow_accum_array = ecoshard.geoprocessing.raster_to_numpy_array(
target_flow_accum_path)
self.assertEqual(flow_accum_array.dtype, numpy.float64)
# this is a regression result saved by hand from a simple run but
# multiplied by the flow weight constant so we know flow weights work.
zero_array = numpy.zeros(flow_dir_array.shape, dtype=numpy.float32)
zero_raster_path = os.path.join(self.workspace_dir, 'zero.tif')
_array_to_raster(zero_array, None, zero_raster_path)
ecoshard.geoprocessing.routing.flow_accumulation_d8(
(flow_dir_path, 1), target_flow_accum_path,
weight_raster_path_band=(zero_raster_path, 1))
flow_accum_array = ecoshard.geoprocessing.raster_to_numpy_array(
target_flow_accum_path)
self.assertEqual(flow_accum_array.dtype, numpy.float64)
numpy.testing.assert_almost_equal(flow_accum_array, zero_array, 6)
def test_flow_dir_mfd(self):
"""PGP.routing: test multiple flow dir."""
dem_path = os.path.join(self.workspace_dir, 'dem.tif')
# this makes a flat raster with a left-to-right central channel
dem_array = numpy.zeros((11, 11))
dem_array[5, :] = -1
_array_to_raster(dem_array, None, dem_path)
target_flow_dir_path = os.path.join(
self.workspace_dir, 'flow_dir.tif')
ecoshard.geoprocessing.routing.flow_dir_mfd(
(dem_path, 1), target_flow_dir_path,
working_dir=self.workspace_dir)
flow_array = ecoshard.geoprocessing.raster_to_numpy_array(target_flow_dir_path)
self.assertEqual(flow_array.dtype, numpy.int32)
# this was generated from a hand checked result
expected_result = numpy.array([
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[4603904, 983040, 983040, 983040, 983040, 524296, 15, 15, 15, 15,
1073741894],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880]])
numpy.testing.assert_almost_equal(flow_array, expected_result)
def test_flow_accum_mfd(self):
"""PGP.routing: test flow accumulation for multiple flow."""
driver = gdal.GetDriverByName('GTiff')
n = 11
dem_path = os.path.join(self.workspace_dir, 'dem.tif')
dem_array = numpy.zeros((n, n), dtype=numpy.float32)
dem_array[int(n/2), :] = -1
_array_to_raster(dem_array, None, dem_path)
flow_dir_path = os.path.join(self.workspace_dir, 'flow_dir.tif')
ecoshard.geoprocessing.routing.flow_dir_mfd(
(dem_path, 1), flow_dir_path,
working_dir=self.workspace_dir)
target_flow_accum_path = os.path.join(
self.workspace_dir, 'flow_accum_mfd.tif')
ecoshard.geoprocessing.routing.flow_accumulation_mfd(
(flow_dir_path, 1), target_flow_accum_path)
flow_array = ecoshard.geoprocessing.raster_to_numpy_array(
target_flow_accum_path)
self.assertEqual(flow_array.dtype, numpy.float64)
# this was generated from a hand-checked result
expected_result = numpy.array([
[1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],
[1.88571429, 2.11428571, 2., 2., 2., 2., 2., 2., 2., 2.11428571,
1.88571429],
[2.7355102, 3.23183673, 3.03265306, 3., 3., 3., 3., 3.,
3.03265306, 3.23183673, 2.7355102],
[3.56468805, 4.34574927, 4.08023324, 4.00932945, 4., 4., 4.,
4.00932945, 4.08023324, 4.34574927, 3.56468805],
[4.38045548, 5.45412012, 5.13583673, 5.02692212, 5.00266556, 5.,
5.00266556, 5.02692212, 5.13583673, 5.45412012, 4.38045548],
[60.5, 51.12681336, 39.01272503, 27.62141227, 16.519192,
11.00304635, 16.519192, 27.62141227, 39.01272503, 51.12681336,
60.5],
[4.38045548, 5.45412012, 5.13583673, 5.02692212, 5.00266556, 5.,
5.00266556, 5.02692212, 5.13583673, 5.45412012, 4.38045548],
[3.56468805, 4.34574927, 4.08023324, 4.00932945, 4., 4., 4.,
4.00932945, 4.08023324, 4.34574927, 3.56468805],
[2.7355102, 3.23183673, 3.03265306, 3., 3., 3., 3., 3.,
3.03265306, 3.23183673, 2.7355102],
[1.88571429, 2.11428571, 2., 2., 2., 2., 2., 2., 2., 2.11428571,
1.88571429],
[1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]])
numpy.testing.assert_almost_equal(flow_array, expected_result)
def test_flow_accum_mfd_with_weights(self):
"""PGP.routing: test flow accum for mfd with weights."""
n = 11
dem_raster_path = os.path.join(self.workspace_dir, 'dem.tif')
dem_array = numpy.zeros((n, n), dtype=numpy.float32)
dem_array[int(n/2), :] = -1
_array_to_raster(dem_array, None, dem_raster_path)
flow_dir_path = os.path.join(self.workspace_dir, 'flow_dir.tif')
ecoshard.geoprocessing.routing.flow_dir_mfd(
(dem_raster_path, 1), flow_dir_path,
working_dir=self.workspace_dir)
flow_weight_raster_path = os.path.join(
self.workspace_dir, 'flow_weights.tif')
flow_weight_array = numpy.empty((n, n))
flow_weight_constant = 2.7
flow_weight_array[:] = flow_weight_constant
ecoshard.geoprocessing.new_raster_from_base(
flow_dir_path, flow_weight_raster_path, gdal.GDT_Float32,
[-1.0])
flow_weight_raster = gdal.OpenEx(
flow_weight_raster_path, gdal.OF_RASTER | gdal.GA_Update)
flow_weight_band = flow_weight_raster.GetRasterBand(1)
flow_weight_band.WriteArray(flow_weight_array)
flow_weight_band.FlushCache()
flow_weight_band = None
flow_weight_raster = None
target_flow_accum_path = os.path.join(
self.workspace_dir, 'flow_accum_mfd.tif')
ecoshard.geoprocessing.routing.flow_accumulation_mfd(
(flow_dir_path, 1), target_flow_accum_path,
weight_raster_path_band=(flow_weight_raster_path, 1))
flow_array = ecoshard.geoprocessing.raster_to_numpy_array(
target_flow_accum_path)
self.assertEqual(flow_array.dtype, numpy.float64)
# this was generated from a hand-checked result with flow weight of
# 1, so the result should be twice that since we have flow weights
# of 2.
expected_result = flow_weight_constant * numpy.array([
[1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],
[1.88571429, 2.11428571, 2., 2., 2., 2., 2., 2., 2., 2.11428571,
1.88571429],
[2.7355102, 3.23183673, 3.03265306, 3., 3., 3., 3., 3.,
3.03265306, 3.23183673, 2.7355102],
[3.56468805, 4.34574927, 4.08023324, 4.00932945, 4., 4., 4.,
4.00932945, 4.08023324, 4.34574927, 3.56468805],
[4.38045548, 5.45412012, 5.13583673, 5.02692212, 5.00266556, 5.,
5.00266556, 5.02692212, 5.13583673, 5.45412012, 4.38045548],
[60.5, 51.12681336, 39.01272503, 27.62141227, 16.519192,
11.00304635, 16.519192, 27.62141227, 39.01272503, 51.12681336,
60.5],
[4.38045548, 5.45412012, 5.13583673, 5.02692212, 5.00266556, 5.,
5.00266556, 5.02692212, 5.13583673, 5.45412012, 4.38045548],
[3.56468805, 4.34574927, 4.08023324, 4.00932945, 4., 4., 4.,
4.00932945, 4.08023324, 4.34574927, 3.56468805],
[2.7355102, 3.23183673, 3.03265306, 3., 3., 3., 3., 3.,
3.03265306, 3.23183673, 2.7355102],
[1.88571429, 2.11428571, 2., 2., 2., 2., 2., 2., 2., 2.11428571,
1.88571429],
[1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]])
numpy.testing.assert_allclose(flow_array, expected_result, rtol=1e-6)
# try with zero weights
zero_array = numpy.zeros(expected_result.shape, dtype=numpy.float32)
zero_raster_path = os.path.join(self.workspace_dir, 'zero.tif')
_array_to_raster(zero_array, None, zero_raster_path)
ecoshard.geoprocessing.routing.flow_accumulation_mfd(
(flow_dir_path, 1), target_flow_accum_path,
weight_raster_path_band=(zero_raster_path, 1))
flow_accum_array = ecoshard.geoprocessing.raster_to_numpy_array(
target_flow_accum_path)
self.assertEqual(flow_accum_array.dtype, numpy.float64)
numpy.testing.assert_almost_equal(
numpy.sum(flow_accum_array), numpy.sum(zero_array), 6)
def test_extract_streams_mfd(self):
"""PGP.routing: stream extraction on multiple flow direction."""
n = 11
dem_path = os.path.join(self.workspace_dir, 'dem.tif')
dem_array = numpy.zeros((n, n), dtype=numpy.float32)
dem_array[int(n/2), :] = -1
_array_to_raster(dem_array, None, dem_path)
flow_dir_path = os.path.join(self.workspace_dir, 'flow_dir.tif')
ecoshard.geoprocessing.routing.flow_dir_mfd(
(dem_path, 1), flow_dir_path)
target_flow_accum_path = os.path.join(
self.workspace_dir, 'flow_accum_mfd.tif')
ecoshard.geoprocessing.routing.flow_accumulation_mfd(
(flow_dir_path, 1), target_flow_accum_path)
target_stream_raster_path = os.path.join(
self.workspace_dir, 'stream.tif')
ecoshard.geoprocessing.routing.extract_streams_mfd(
(target_flow_accum_path, 1), (flow_dir_path, 1), 30,
target_stream_raster_path, trace_threshold_proportion=0.5)
stream_array = ecoshard.geoprocessing.raster_to_numpy_array(
target_stream_raster_path)
expected_stream_array = numpy.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
numpy.testing.assert_almost_equal(stream_array, expected_stream_array)
def test_distance_to_channel_d8(self):
"""PGP.routing: test distance to channel D8."""
flow_dir_d8_path = os.path.join(self.workspace_dir, 'flow_dir.d8_tif')
# this is a flow direction raster that was created from a plateau drain
flow_dir_d8_array = numpy.array([
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0],
[4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0],
[4, 4, 2, 2, 2, 2, 2, 2, 2, 0, 0],
[4, 4, 4, 2, 2, 2, 2, 2, 0, 0, 0],
[4, 4, 4, 4, 2, 2, 2, 0, 0, 0, 0],
[4, 4, 4, 4, 4, 2, 0, 0, 0, 0, 0],
[4, 4, 4, 4, 4, 6, 0, 0, 0, 0, 0],
[4, 4, 4, 4, 6, 6, 6, 0, 0, 0, 0],
[4, 4, 4, 6, 6, 6, 6, 6, 0, 0, 0],
[4, 4, 6, 6, 6, 6, 6, 6, 6, 0, 0],
[4, 6, 6, 6, 6, 6, 6, 6, 6, 6, 0]], dtype=numpy.uint8)
_array_to_raster(flow_dir_d8_array, None, flow_dir_d8_path)
# taken from a manual inspection of a flow accumulation run
channel_path = os.path.join(self.workspace_dir, 'channel.tif')
channel_array = numpy.array(
[[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]], dtype=numpy.uint8)
_array_to_raster(channel_array, None, channel_path)
distance_to_channel_d8_path = os.path.join(
self.workspace_dir, 'distance_to_channel_d8.tif')
ecoshard.geoprocessing.routing.distance_to_channel_d8(
(flow_dir_d8_path, 1), (channel_path, 1),
distance_to_channel_d8_path)
distance_to_channel_d8_array = ecoshard.geoprocessing.raster_to_numpy_array(
distance_to_channel_d8_path)
expected_result = numpy.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 2, 2, 2, 2, 2, 2, 2, 1, 0],
[0, 1, 2, 3, 3, 3, 3, 3, 2, 1, 0],
[0, 0, 1, 2, 4, 4, 4, 2, 1, 0, 0],
[0, 0, 1, 2, 3, 5, 3, 2, 1, 0, 0],
[0, 0, 1, 2, 3, 4, 3, 2, 1, 0, 0],
[0, 1, 2, 3, 3, 3, 3, 3, 2, 1, 0],
[0, 1, 2, 2, 2, 2, 2, 2, 2, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
numpy.testing.assert_almost_equal(
distance_to_channel_d8_array, expected_result)
def test_distance_to_channel_d8_with_weights(self):
"""PGP.routing: test distance to channel D8."""
driver = gdal.GetDriverByName('GTiff')
flow_dir_d8_path = os.path.join(self.workspace_dir, 'flow_dir.d8_tif')
# this is a flow direction raster that was created from a plateau drain
flow_dir_d8_array = numpy.array([
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0],
[4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0],
[4, 4, 2, 2, 2, 2, 2, 2, 2, 0, 0],
[4, 4, 4, 2, 2, 2, 2, 2, 0, 0, 0],
[4, 4, 4, 4, 2, 2, 2, 0, 0, 0, 0],
[4, 4, 4, 4, 4, 2, 0, 0, 0, 0, 0],
[4, 4, 4, 4, 4, 6, 0, 0, 0, 0, 0],
[4, 4, 4, 4, 6, 6, 6, 0, 0, 0, 0],
[4, 4, 4, 6, 6, 6, 6, 6, 0, 0, 0],
[4, 4, 6, 6, 6, 6, 6, 6, 6, 0, 0],
[4, 6, 6, 6, 6, 6, 6, 6, 6, 6, 0]], dtype=numpy.uint8)
_array_to_raster(flow_dir_d8_array, None, flow_dir_d8_path)
# taken from a manual inspection of a flow accumulation run
channel_path = os.path.join(self.workspace_dir, 'channel.tif')
channel_array = numpy.array(
[[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]], dtype=numpy.uint8)
_array_to_raster(channel_array, None, channel_path)
flow_weight_array = numpy.empty(
flow_dir_d8_array.shape, dtype=numpy.int32)
weight_factor = 2.0
flow_weight_array[:] = weight_factor
flow_dir_d8_weight_path = os.path.join(
self.workspace_dir, 'flow_dir_d8.tif')
_array_to_raster(flow_weight_array, None, flow_dir_d8_weight_path)
distance_to_channel_d8_path = os.path.join(
self.workspace_dir, 'distance_to_channel_d8.tif')
ecoshard.geoprocessing.routing.distance_to_channel_d8(
(flow_dir_d8_path, 1), (channel_path, 1),
distance_to_channel_d8_path,
weight_raster_path_band=(flow_dir_d8_weight_path, 1))
distance_to_channel_d8_array = ecoshard.geoprocessing.raster_to_numpy_array(
distance_to_channel_d8_path)
expected_result = weight_factor * numpy.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 2, 2, 2, 2, 2, 2, 2, 1, 0],
[0, 1, 2, 3, 3, 3, 3, 3, 2, 1, 0],
[0, 0, 1, 2, 4, 4, 4, 2, 1, 0, 0],
[0, 0, 1, 2, 3, 5, 3, 2, 1, 0, 0],
[0, 0, 1, 2, 3, 4, 3, 2, 1, 0, 0],
[0, 1, 2, 3, 3, 3, 3, 3, 2, 1, 0],
[0, 1, 2, 2, 2, 2, 2, 2, 2, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
numpy.testing.assert_almost_equal(
distance_to_channel_d8_array, expected_result)
# try with zero weights
zero_array = numpy.zeros(
distance_to_channel_d8_array.shape, dtype=numpy.float32)
zero_raster_path = os.path.join(self.workspace_dir, 'zero.tif')
_array_to_raster(zero_array, None, zero_raster_path)
ecoshard.geoprocessing.routing.distance_to_channel_d8(
(flow_dir_d8_path, 1), (channel_path, 1),
distance_to_channel_d8_path,
weight_raster_path_band=(zero_raster_path, 1))
distance_to_channel_d8_array = ecoshard.geoprocessing.raster_to_numpy_array(
distance_to_channel_d8_path)
numpy.testing.assert_almost_equal(
distance_to_channel_d8_array, zero_array)
def test_distance_to_channel_mfd(self):
"""PGP.routing: test distance to channel mfd."""
flow_dir_mfd_array = numpy.array([
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[4603904, 983040, 983040, 983040, 983040, 524296, 15, 15, 15, 15,
1073741894],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
1178599424, 26880]], dtype=numpy.int32)
flow_dir_mfd_path = os.path.join(
self.workspace_dir, 'flow_dir_mfd.tif')
_array_to_raster(flow_dir_mfd_array, None, flow_dir_mfd_path)
# taken from a manual inspection of a flow accumulation run
channel_array = numpy.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=numpy.uint8)
channel_path = os.path.join(self.workspace_dir, 'channel.tif')
_array_to_raster(channel_array, None, channel_path)
distance_to_channel_mfd_path = os.path.join(
self.workspace_dir, 'distance_to_channel_mfd.tif')
ecoshard.geoprocessing.routing.distance_to_channel_mfd(
(flow_dir_mfd_path, 1), (channel_path, 1),
distance_to_channel_mfd_path)
distance_to_channel_mfd_array = ecoshard.geoprocessing.raster_to_numpy_array(
distance_to_channel_mfd_path)
# this is a regression result copied by hand
expected_result = numpy.array(
[[5.98240137, 6.10285187, 6.15935357, 6.1786881, 6.18299413,
6.18346732, 6.18299413, 6.1786881, 6.15935357, 6.10285187,
5.98240137],
[4.77092897, 4.88539641, 4.93253084, 4.94511769, 4.94677386,
4.94677386, 4.94677386, 4.94511769, 4.93253084, 4.88539641,
4.77092897],
[3.56278943, 3.66892471, 3.70428382, 3.71008039, 3.71008039,
3.71008039, 3.71008039, 3.71008039, 3.70428382, 3.66892471,
3.56278943],
[2.35977407, 2.45309892, 2.47338693, 2.47338693, 2.47338693,
2.47338693, 2.47338693, 2.47338693, 2.47338693, 2.45309892,
2.35977407],
[1.16568542, 1.23669346, 1.23669346, 1.23669346, 1.23669346,
1.23669346, 1.23669346, 1.23669346, 1.23669346, 1.23669346,
1.16568542],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1.16568542, 1.23669346, 1.23669346, 1.23669346, 1.23669346,
1.23669346, 1.23669346, 1.23669346, 1.23669346, 1.23669346,
1.16568542],
[2.35977407, 2.45309892, 2.47338693, 2.47338693, 2.47338693,
2.47338693, 2.47338693, 2.47338693, 2.47338693, 2.45309892,
2.35977407],
[3.56278943, 3.66892471, 3.70428382, 3.71008039, 3.71008039,
3.71008039, 3.71008039, 3.71008039, 3.70428382, 3.66892471,
3.56278943],
[4.77092897, 4.88539641, 4.93253084, 4.94511769, 4.94677386,
4.94677386, 4.94677386, 4.94511769, 4.93253084, 4.88539641,
4.77092897],
[5.98240137, 6.10285187, 6.15935357, 6.1786881, 6.18299413,
6.18346732, 6.18299413, 6.1786881, 6.15935357, -1,
5.98240137]])
numpy.testing.assert_almost_equal(
distance_to_channel_mfd_array, expected_result)
def test_distance_to_channel_mfd_with_weights(self):
"""PGP.routing: test distance to channel mfd with weights."""
flow_dir_mfd_array = numpy.array([
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[1761607680, 1178599424, 1178599424, 1178599424, 1178599424,
1178599424, 1178599424, 1178599424, 1178599424, 1178599424,
157286400],
[4603904, 983040, 983040, 983040, 983040, 524296, 15, 15, 15, 15,
1073741894],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880],
[2400, 17984, 17984, 17984, 17984, 17984, 17984, 17984, 17984,
17984, 26880]], dtype=numpy.int32)
flow_dir_mfd_path = os.path.join(
self.workspace_dir, 'flow_dir_mfd.tif')
_array_to_raster(flow_dir_mfd_array, None, flow_dir_mfd_path)
flow_weight_array = numpy.empty(
flow_dir_mfd_array.shape, dtype=numpy.int32)
flow_weight_array[:] = 2.0
flow_dir_mfd_weight_path = os.path.join(
self.workspace_dir, 'flow_dir_mfd_weights.tif')
_array_to_raster(flow_weight_array, None, flow_dir_mfd_weight_path)
# taken from a manual inspection of a flow accumulation run
channel_path = os.path.join(self.workspace_dir, 'channel.tif')
channel_array = numpy.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=numpy.uint8)
_array_to_raster(channel_array, None, channel_path)
distance_to_channel_mfd_path = os.path.join(
self.workspace_dir, 'distance_to_channel_mfd.tif')
ecoshard.geoprocessing.routing.distance_to_channel_mfd(
(flow_dir_mfd_path, 1), (channel_path, 1),
distance_to_channel_mfd_path,
weight_raster_path_band=(flow_dir_mfd_weight_path, 1))
distance_to_channel_mfd_array = ecoshard.geoprocessing.raster_to_numpy_array(
distance_to_channel_mfd_path)
# this is a regression result copied by hand
expected_result = numpy.array(
[
[10., 10., 10., 10., 10., 10., 10., 10., 10., 10., 10.],
[8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8.],
[6., 6., 6., 6., 6., 6., 6., 6., 6., 6., 6.],
[4., 4., 4., 4., 4., 4., 4., 4., 4., 4., 4.],
[2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2.],
[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2.],
[4., 4., 4., 4., 4., 4., 4., 4., 4., 4., 4.],
[6., 6., 6., 6., 6., 6., 6., 6., 6., 6., 6.],
[8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8.],
[10., 10., 10., 10., 10., 10., 10., 10., 10., 10., 10.],
])
numpy.testing.assert_almost_equal(
distance_to_channel_mfd_array, expected_result)
# try with zero weights
zero_array = numpy.zeros(
expected_result.shape, dtype=numpy.float32)
zero_raster_path = os.path.join(self.workspace_dir, 'zero.tif')
_array_to_raster(zero_array, 0, zero_raster_path)
ecoshard.geoprocessing.routing.distance_to_channel_mfd(
(flow_dir_mfd_path, 1), (channel_path, 1),
distance_to_channel_mfd_path,
weight_raster_path_band=(zero_raster_path, 1))
distance_to_channel_mfd_array = ecoshard.geoprocessing.raster_to_numpy_array(
distance_to_channel_mfd_path)
numpy.testing.assert_almost_equal(
distance_to_channel_mfd_array, zero_array)
def test_flow_dir_mfd_plateau(self):
"""PGP.routing: MFD on a plateau."""
dem_path = os.path.join(self.workspace_dir, 'dem.tif')
# this makes a flat raster
n = 100
dem_array = numpy.zeros((n, n))
dem_nodata = -1
dem_array[2, :] = 1e-12
dem_array[n//2, :] = 1e-12
dem_array[3*n//4, :] = 1e-12
_array_to_raster(dem_array, dem_nodata, dem_path)
target_flow_dir_path = os.path.join(
self.workspace_dir, 'flow_dir.tif')
ecoshard.geoprocessing.routing.flow_dir_mfd(
(dem_path, 1), target_flow_dir_path,
working_dir=self.workspace_dir)
flow_dir_nodata = ecoshard.geoprocessing.get_raster_info(
target_flow_dir_path)['nodata'][0]
flow_dir_array = ecoshard.geoprocessing.raster_to_numpy_array(
target_flow_dir_path)
self.assertTrue(not numpy.isclose(
flow_dir_array[1:-1, 1: -1], flow_dir_nodata).any(),
'all flow directions should be defined')
def test_extract_straher_streams_watersheds_d8(self):
"""PGP.routing: test Strahler stream and subwatershed creation."""
# make a long canyon herringbone style DEM that will have a main
# central river and single pixel tributaries every other pixel to
# the west and east as one steps south the canyon
n = 53
dem_array = numpy.zeros((n, 3))
dem_array[0, 1] = -0.5
# make notches every other row for both columns
dem_array[1::2, 0::2] = 1
dem_path = os.path.join(self.workspace_dir, 'dem.tif')
ecoshard.geoprocessing.numpy_array_to_raster(
dem_array, -1, (1, -1), (0, 0), None, dem_path)
filled_pits_path = os.path.join(self.workspace_dir, 'filled_pits.tif')
ecoshard.geoprocessing.routing.fill_pits(
(dem_path, 1), filled_pits_path)
flow_dir_d8_path = os.path.join(self.workspace_dir, 'd8.tif')
ecoshard.geoprocessing.routing.flow_dir_d8(
(filled_pits_path, 1), flow_dir_d8_path,
working_dir=self.workspace_dir)
flow_accum_d8_path = os.path.join(self.workspace_dir, 'flow_accum.tif')
ecoshard.geoprocessing.routing.flow_accumulation_d8(
(flow_dir_d8_path, 1), flow_accum_d8_path)
no_autotune_stream_vector_path = os.path.join(
self.workspace_dir, 'no_autotune_stream.gpkg')
ecoshard.geoprocessing.routing.extract_strahler_streams_d8(
(flow_dir_d8_path, 1),
(flow_accum_d8_path, 1),
(filled_pits_path, 1),
no_autotune_stream_vector_path,
autotune_flow_accumulation=False,
min_flow_accum_threshold=1)
# every pixel is a stream
stream_vector = gdal.OpenEx(
no_autotune_stream_vector_path, gdal.OF_VECTOR)
stream_layer = stream_vector.GetLayer()
self.assertEqual(stream_layer.GetFeatureCount(), n*2+2)
stream_layer = None
stream_vector = None
autotune_stream_vector_path = os.path.join(
self.workspace_dir, 'autotune_stream.gpkg')
ecoshard.geoprocessing.routing.extract_strahler_streams_d8(
(flow_dir_d8_path, 1),
(flow_accum_d8_path, 1),
(filled_pits_path, 1),
autotune_stream_vector_path,
autotune_flow_accumulation=True,
min_flow_accum_threshold=2)
# n-1 streams
stream_vector = gdal.OpenEx(
autotune_stream_vector_path, gdal.OF_VECTOR)
stream_layer = stream_vector.GetLayer()
self.assertEqual(stream_layer.GetFeatureCount(), n-2)
# this gets just the single outlet feature
stream_layer.SetAttributeFilter(f'"outlet"=1')
outlet_feature = next(iter(stream_layer))
# known to be order 2 because none of the streams can branch more
# than once
self.assertEqual(outlet_feature.GetField('order'), 2)
stream_vector = None
stream_layer = None
watershed_confluence_vector_path = os.path.join(
self.workspace_dir, 'watershed_confluence.gpkg')
ecoshard.geoprocessing.routing.calculate_subwatershed_boundary(
(flow_dir_d8_path, 1), autotune_stream_vector_path,
watershed_confluence_vector_path, outlet_at_confluence=True)
watershed_vector = gdal.OpenEx(
watershed_confluence_vector_path, gdal.OF_VECTOR)
watershed_layer = watershed_vector.GetLayer()
# there should be exactly an integer half number of watersheds as
# the length of the canyon
self.assertEqual(watershed_layer.GetFeatureCount(), n//2)
watershed_vector = None
watershed_layer = None
watershed_confluence_vector_path = os.path.join(
self.workspace_dir, 'watershed_confluence.gpkg')
ecoshard.geoprocessing.routing.calculate_subwatershed_boundary(
(flow_dir_d8_path, 1), autotune_stream_vector_path,
watershed_confluence_vector_path, outlet_at_confluence=False)
watershed_vector = gdal.OpenEx(
watershed_confluence_vector_path, gdal.OF_VECTOR)
watershed_layer = watershed_vector.GetLayer()
# every stream should have a watershed
self.assertEqual(watershed_layer.GetFeatureCount(), n-2)
watershed_vector = None
watershed_layer = None
def test_single_drain_point(self):
"""PGP.routing: test single_drain_point pitfill."""
dem_array = numpy.zeros((11, 11), dtype=numpy.float32)
dem_array[0, 0] = -1.0
dem_array[1:8, 1:8] = -2.0
dem_array[10, 7] = -4.0
dem_array[8:11, 8:11] = 2.0
dem_array[9:11, 9:11] = 1.0
dem_array[10, 10] = -7.0
dem_path = os.path.join(self.workspace_dir, 'dem.tif')
_array_to_raster(dem_array, None, dem_path)
# outlet tuple at 0,0, just drain one edge
expected_array_0_0 = numpy.copy(dem_array)
expected_array_0_0[1:8, 1:8] = -1
expected_array_0_0[10, 7] = 0
expected_array_0_0[8:11, 8:11] = 2.0
# output tuple at 5,5, it's a massive pit so drain the edges
expected_array_5_5 = numpy.copy(dem_array)
expected_array_5_5[8:11, 8:11] = 2.0
expected_array_5_5[10, 7] = 0
for output_tuple, expected_array, fill_dist in [
((0, 0), expected_array_0_0, -1),
((5, 5), expected_array_5_5, -1),
((0, 0), dem_array, 1),
((5, 5), dem_array, 1),
]:
fill_path = os.path.join(self.workspace_dir, 'filled.tif')
ecoshard.geoprocessing.routing.fill_pits(
(dem_path, 1), fill_path,
single_outlet_tuple=output_tuple,
max_pixel_fill_count=fill_dist,
working_dir=self.workspace_dir)
result_array = ecoshard.geoprocessing.raster_to_numpy_array(fill_path)
numpy.testing.assert_almost_equal(
result_array, expected_array)
def test_detect_lowest_drain_and_sink(self):
"""PGP.routing: test detect_lowest_sink_and_drain."""
dem_array = numpy.zeros((11, 11), dtype=numpy.float32)
dem_array[3:8, 3:8] = -1.0
dem_array[0, 0] = -1.0
dem_array[10, 10] = -1.0
dem_path = os.path.join(self.workspace_dir, 'dem.tif')
_array_to_raster(dem_array, None, dem_path)
drain_pixel, drain_height, sink_pixel, sink_height = \
ecoshard.geoprocessing.routing.detect_lowest_drain_and_sink(
(dem_path, 1))
expected_drain_pixel = (0, 0)
expected_drain_height = -1
expected_sink_pixel = (3, 3)
expected_sink_height = -1
self.assertEqual(drain_pixel, expected_drain_pixel)
self.assertEqual(drain_height, expected_drain_height)
self.assertEqual(sink_pixel, expected_sink_pixel)
self.assertEqual(sink_height, expected_sink_height)
def test_channel_not_exist_distance(self):
"""PGP.routing: test for nodata result if channel doesn't exist."""
from osgeo import osr
srs = osr.SpatialReference()
srs.ImportFromEPSG(3857)
projection_wkt = srs.ExportToWkt()
flow_dir = numpy.ones((10, 10))
streams = numpy.zeros((10, 10))
nodata = -1
expected_result = numpy.full((10, 10), nodata)
flow_dir_path = os.path.join(
self.workspace_dir, 'test_stream_distance_flow_dir.tif')
streams_path = os.path.join(
self.workspace_dir, 'test_stream_distance_streams.tif')
distance_path = os.path.join(
self.workspace_dir, 'test_stream_distance_output.tif')
ecoshard.geoprocessing.numpy_array_to_raster(
flow_dir, nodata, (10, -10), (1000, 1000), projection_wkt,
flow_dir_path)
ecoshard.geoprocessing.numpy_array_to_raster(
streams, nodata, (10, -10), (1000, 1000), projection_wkt,
streams_path)
ecoshard.geoprocessing.routing.distance_to_channel_d8(
(flow_dir_path, 1), (streams_path, 1), distance_path)
numpy.testing.assert_almost_equal(
ecoshard.geoprocessing.raster_to_numpy_array(distance_path),
expected_result)
ecoshard.geoprocessing.routing.distance_to_channel_mfd(
(flow_dir_path, 1), (streams_path, 1), distance_path)
numpy.testing.assert_almost_equal(
ecoshard.geoprocessing.raster_to_numpy_array(distance_path),
expected_result)
| 45.026108
| 87
| 0.579147
| 8,169
| 56,913
| 3.811972
| 0.053373
| 0.04361
| 0.054913
| 0.063969
| 0.823218
| 0.787251
| 0.774663
| 0.754432
| 0.737604
| 0.722608
| 0
| 0.189504
| 0.290514
| 56,913
| 1,263
| 88
| 45.061758
| 0.581689
| 0.068315
| 0
| 0.700787
| 0
| 0
| 0.024327
| 0.006285
| 0
| 0
| 0
| 0
| 0.052165
| 1
| 0.028543
| false
| 0
| 0.012795
| 0
| 0.042323
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
87e6fb439ab0a1559540c24ed989086665609e33
| 3,333
|
py
|
Python
|
pages/SignUp_Page.py
|
ahmedsamir94/Fly365
|
745c96b73022acaf95f419f04e0db87852984f4d
|
[
"MIT"
] | null | null | null |
pages/SignUp_Page.py
|
ahmedsamir94/Fly365
|
745c96b73022acaf95f419f04e0db87852984f4d
|
[
"MIT"
] | null | null | null |
pages/SignUp_Page.py
|
ahmedsamir94/Fly365
|
745c96b73022acaf95f419f04e0db87852984f4d
|
[
"MIT"
] | null | null | null |
class SignUpPage():
def __init__(self,driver):
self.driver=driver
# self.sign_in_button_xpath="/html/body/div[1]/div/div[1]/div/div/div[2]/div/div[3]/a"
self.sign_up_button_xpath="/html/body/div[1]/div/footer/div[2]/div/div/div[2]/div/ul/li[4]/a"
self.first_name_textbox_xpath="/html/body/div[1]/div/div[2]/div/div[2]/div[3]/div/form/div/div[1]/div[1]/div/div[2]/div/div/div[1]/input"
self.family_name_textbox_xpath="/html/body/div[1]/div/div[2]/div/div[2]/div[3]/div/form/div/div[1]/div[2]/div/div[2]/div/div/div[1]/input"
self.email_textbox_xpath="/html/body/div[1]/div/div[2]/div/div[2]/div[3]/div/form/div/div[2]/div/div/div[2]/div/div/div/input"
self.password_textbox_xpath="/html/body/div[1]/div/div[2]/div/div[2]/div[3]/div/form/div/div[3]/div/div/div[2]/div/div/div/div/input"
self.create_account_button_xpath="/html/body/div[1]/div/div[2]/div/div[2]/div[3]/div/form/div/div[4]/div/button"
self.click_acount_button_xpath="/html/body/div[1]/div/div[2]/div/div/div[2]/div/div[3]/span"
self.click_SignOut_button_xpath="/html/body/ul/li[3]"
self.UserName_textbox_xpath="/html/body/div[1]/div/div[2]/div/div[2]/div[3]/div/form/div[2]/div[1]/div/div/div[2]/div/div/div/input"
self.Password_textbox_xpath="/html/body/div[1]/div/div[2]/div/div[2]/div[3]/div/form/div[2]/div[2]/div/div/div[2]/div/div/div/div[1]/input"
self.click_SignIn="/html/body/div[1]/div/div[2]/div/div[2]/div[3]/div/form/div[2]/div[4]/div/button"
def click_SignUp(self):
self.driver.find_element_by_xpath(self.sign_up_button_xpath).click()
def enter_first_name(self,firstname):
self.driver.find_element_by_xpath(self.first_name_textbox_xpath).clear()
self.driver.find_element_by_xpath(self.first_name_textbox_xpath).send_keys(firstname)
def enter_family_name(self,familyname):
self.driver.find_element_by_xpath(self.family_name_textbox_xpath).clear()
self.driver.find_element_by_xpath(self.family_name_textbox_xpath).send_keys(familyname)
def enter_email(self,email):
self.driver.find_element_by_xpath(self.email_textbox_xpath).clear()
self.driver.find_element_by_xpath(self.email_textbox_xpath).send_keys(email)
def enter_password(self,password):
self.driver.find_element_by_xpath(self.password_textbox_xpath).clear()
self.driver.find_element_by_xpath(self.password_textbox_xpath).send_keys(password)
def create_account(self):
self.driver.find_element_by_xpath(self.create_account_button_xpath).click()
def click_account(self):
self.driver.find_element_by_xpath(self.click_acount_button_xpath).click()
def click_SignOut(self):
self.driver.find_element_by_xpath(self.click_SignOut_button_xpath).click()
def enter_username(self,username):
self.driver.find_element_by_xpath(self.UserName_textbox_xpath).clear()
self.driver.find_element_by_xpath(self.UserName_textbox_xpath).send_keys(username)
def enter_Password(self,Password):
self.driver.find_element_by_xpath(self.Password_textbox_xpath).clear()
self.driver.find_element_by_xpath(self.Password_textbox_xpath).send_keys(Password)
def click_SignIn(self):
self.driver.find_element_by_xpath(self.click_SignIn).click()
| 65.352941
| 147
| 0.729373
| 571
| 3,333
| 4.003503
| 0.084063
| 0.144357
| 0.10105
| 0.11811
| 0.855643
| 0.753718
| 0.748906
| 0.729221
| 0.68154
| 0.569554
| 0
| 0.022476
| 0.105611
| 3,333
| 51
| 148
| 65.352941
| 0.744381
| 0.025203
| 0
| 0
| 0
| 0.238095
| 0.284175
| 0.278325
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.190476
| 0
| 0
| 0.309524
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
87fbe2153ba8b1339d8a09e2619edf5767a3d9ed
| 12,844
|
py
|
Python
|
colour/models/rgb/__init__.py
|
aurelienpierre/colour
|
3ac45c12fbc0493e49ba4d4b2cb253df9fe14c47
|
[
"BSD-3-Clause"
] | 1
|
2022-02-12T06:28:15.000Z
|
2022-02-12T06:28:15.000Z
|
colour/models/rgb/__init__.py
|
aurelienpierre/colour
|
3ac45c12fbc0493e49ba4d4b2cb253df9fe14c47
|
[
"BSD-3-Clause"
] | null | null | null |
colour/models/rgb/__init__.py
|
aurelienpierre/colour
|
3ac45c12fbc0493e49ba4d4b2cb253df9fe14c47
|
[
"BSD-3-Clause"
] | null | null | null |
from .derivation import (
normalised_primary_matrix,
chromatically_adapted_primaries,
primaries_whitepoint,
RGB_luminance_equation,
RGB_luminance,
)
from .rgb_colourspace import RGB_Colourspace
from .rgb_colourspace import XYZ_to_RGB, RGB_to_XYZ
from .rgb_colourspace import matrix_RGB_to_RGB, RGB_to_RGB
from .transfer_functions import (
CV_range,
legal_to_full,
full_to_legal,
gamma_function,
log_encoding_ACESproxy,
log_decoding_ACESproxy,
log_encoding_ACEScc,
log_decoding_ACEScc,
log_encoding_ACEScct,
log_decoding_ACEScct,
oetf_ARIBSTDB67,
oetf_inverse_ARIBSTDB67,
log_encoding_ALEXALogC,
log_decoding_ALEXALogC,
oetf_BlackmagicFilmGeneration5,
oetf_inverse_BlackmagicFilmGeneration5,
log_encoding_CanonLog,
log_decoding_CanonLog,
log_encoding_CanonLog2,
log_decoding_CanonLog2,
log_encoding_CanonLog3,
log_decoding_CanonLog3,
log_encoding_Cineon,
log_decoding_Cineon,
oetf_DaVinciIntermediate,
oetf_inverse_DaVinciIntermediate,
eotf_inverse_DCDM,
eotf_DCDM,
eotf_inverse_DICOMGSDF,
eotf_DICOMGSDF,
log_encoding_DJIDLog,
log_decoding_DJIDLog,
exponent_function_basic,
exponent_function_monitor_curve,
log_encoding_FilmicPro6,
log_decoding_FilmicPro6,
log_encoding_FilmLightTLog,
log_decoding_FilmLightTLog,
log_encoding_Protune,
log_decoding_Protune,
oetf_BT601,
oetf_inverse_BT601,
oetf_BT709,
oetf_inverse_BT709,
eotf_inverse_BT1886,
eotf_BT1886,
eotf_inverse_BT2020,
eotf_BT2020,
eotf_inverse_ST2084,
eotf_ST2084,
oetf_PQ_BT2100,
oetf_inverse_PQ_BT2100,
eotf_PQ_BT2100,
eotf_inverse_PQ_BT2100,
ootf_PQ_BT2100,
ootf_inverse_PQ_BT2100,
oetf_HLG_BT2100,
oetf_inverse_HLG_BT2100,
BT2100_HLG_EOTF_METHODS,
eotf_HLG_BT2100,
BT2100_HLG_EOTF_INVERSE_METHODS,
eotf_inverse_HLG_BT2100,
BT2100_HLG_OOTF_METHODS,
ootf_HLG_BT2100,
BT2100_HLG_OOTF_INVERSE_METHODS,
ootf_inverse_HLG_BT2100,
linear_function,
logarithmic_function_basic,
logarithmic_function_quasilog,
logarithmic_function_camera,
log_encoding_Log2,
log_decoding_Log2,
log_encoding_Panalog,
log_decoding_Panalog,
log_encoding_VLog,
log_decoding_VLog,
log_encoding_FLog,
log_decoding_FLog,
log_encoding_NLog,
log_decoding_NLog,
log_encoding_PivotedLog,
log_decoding_PivotedLog,
log_encoding_REDLog,
log_decoding_REDLog,
log_encoding_REDLogFilm,
log_decoding_REDLogFilm,
LOG3G10_ENCODING_METHODS,
LOG3G10_DECODING_METHODS,
log_encoding_Log3G10,
log_decoding_Log3G10,
log_encoding_Log3G12,
log_decoding_Log3G12,
cctf_encoding_ROMMRGB,
cctf_decoding_ROMMRGB,
cctf_encoding_ProPhotoRGB,
cctf_decoding_ProPhotoRGB,
cctf_encoding_RIMMRGB,
cctf_decoding_RIMMRGB,
log_encoding_ERIMMRGB,
log_decoding_ERIMMRGB,
oetf_SMPTE240M,
eotf_SMPTE240M,
log_encoding_SLog,
log_decoding_SLog,
log_encoding_SLog2,
log_decoding_SLog2,
log_encoding_SLog3,
log_decoding_SLog3,
eotf_inverse_sRGB,
eotf_sRGB,
log_encoding_ViperLog,
log_decoding_ViperLog,
)
from .transfer_functions import (
LOG_ENCODINGS,
log_encoding,
LOG_DECODINGS,
log_decoding,
OETFS,
oetf,
OETF_INVERSES,
oetf_inverse,
EOTFS,
eotf,
EOTF_INVERSES,
eotf_inverse,
CCTF_ENCODINGS,
cctf_encoding,
CCTF_DECODINGS,
cctf_decoding,
OOTFS,
ootf,
OOTF_INVERSES,
ootf_inverse,
)
from .datasets import (
RGB_COLOURSPACES,
RGB_COLOURSPACE_ACES2065_1,
RGB_COLOURSPACE_ACESCC,
RGB_COLOURSPACE_ACESCCT,
RGB_COLOURSPACE_ACESPROXY,
RGB_COLOURSPACE_ACESCG,
RGB_COLOURSPACE_ADOBE_RGB1998,
RGB_COLOURSPACE_ADOBE_WIDE_GAMUT_RGB,
RGB_COLOURSPACE_ALEXA_WIDE_GAMUT,
RGB_COLOURSPACE_APPLE_RGB,
RGB_COLOURSPACE_BEST_RGB,
RGB_COLOURSPACE_BETA_RGB,
RGB_COLOURSPACE_BLACKMAGIC_WIDE_GAMUT,
RGB_COLOURSPACE_BT470_525,
RGB_COLOURSPACE_BT470_625,
RGB_COLOURSPACE_BT709,
RGB_COLOURSPACE_BT2020,
RGB_COLOURSPACE_CIE_RGB,
RGB_COLOURSPACE_CINEMA_GAMUT,
RGB_COLOURSPACE_COLOR_MATCH_RGB,
RGB_COLOURSPACE_DAVINCI_WIDE_GAMUT,
RGB_COLOURSPACE_DCDM_XYZ,
RGB_COLOURSPACE_DCI_P3,
RGB_COLOURSPACE_DCI_P3_P,
RGB_COLOURSPACE_DISPLAY_P3,
RGB_COLOURSPACE_DJI_D_GAMUT,
RGB_COLOURSPACE_DON_RGB_4,
RGB_COLOURSPACE_ECI_RGB_V2,
RGB_COLOURSPACE_EKTA_SPACE_PS_5,
RGB_COLOURSPACE_FILMLIGHT_E_GAMUT,
RGB_COLOURSPACE_PROTUNE_NATIVE,
RGB_COLOURSPACE_MAX_RGB,
RGB_COLOURSPACE_N_GAMUT,
RGB_COLOURSPACE_P3_D65,
RGB_COLOURSPACE_PAL_SECAM,
RGB_COLOURSPACE_RED_COLOR,
RGB_COLOURSPACE_RED_COLOR_2,
RGB_COLOURSPACE_RED_COLOR_3,
RGB_COLOURSPACE_RED_COLOR_4,
RGB_COLOURSPACE_DRAGON_COLOR,
RGB_COLOURSPACE_DRAGON_COLOR_2,
RGB_COLOURSPACE_RED_WIDE_GAMUT_RGB,
RGB_COLOURSPACE_ROMM_RGB,
RGB_COLOURSPACE_RIMM_RGB,
RGB_COLOURSPACE_ERIMM_RGB,
RGB_COLOURSPACE_PROPHOTO_RGB,
RGB_COLOURSPACE_RUSSELL_RGB,
RGB_COLOURSPACE_SHARP_RGB,
RGB_COLOURSPACE_SMPTE_240M,
RGB_COLOURSPACE_SMPTE_C,
RGB_COLOURSPACE_NTSC1953,
RGB_COLOURSPACE_NTSC1987,
RGB_COLOURSPACE_S_GAMUT,
RGB_COLOURSPACE_S_GAMUT3,
RGB_COLOURSPACE_S_GAMUT3_CINE,
RGB_COLOURSPACE_VENICE_S_GAMUT3,
RGB_COLOURSPACE_VENICE_S_GAMUT3_CINE,
RGB_COLOURSPACE_sRGB,
RGB_COLOURSPACE_V_GAMUT,
RGB_COLOURSPACE_XTREME_RGB,
RGB_COLOURSPACE_F_GAMUT,
)
from .common import XYZ_to_sRGB, sRGB_to_XYZ
from .cylindrical import (
RGB_to_HSV,
HSV_to_RGB,
RGB_to_HSL,
HSL_to_RGB,
RGB_to_HCL,
HCL_to_RGB,
)
from .cmyk import RGB_to_CMY, CMY_to_RGB, CMY_to_CMYK, CMYK_to_CMY
from .hanbury2003 import RGB_to_IHLS, IHLS_to_RGB
from .prismatic import RGB_to_Prismatic, Prismatic_to_RGB
from .ycbcr import (
WEIGHTS_YCBCR,
matrix_YCbCr,
offset_YCbCr,
RGB_to_YCbCr,
YCbCr_to_RGB,
RGB_to_YcCbcCrc,
YcCbcCrc_to_RGB,
)
from .ycocg import RGB_to_YCoCg, YCoCg_to_RGB
from .ictcp import RGB_to_ICtCp, ICtCp_to_RGB, XYZ_to_ICtCp, ICtCp_to_XYZ
__all__ = [
"normalised_primary_matrix",
"chromatically_adapted_primaries",
"primaries_whitepoint",
"RGB_luminance_equation",
"RGB_luminance",
]
__all__ += [
"RGB_Colourspace",
]
__all__ += [
"XYZ_to_RGB",
"RGB_to_XYZ",
]
__all__ += [
"matrix_RGB_to_RGB",
"RGB_to_RGB",
]
__all__ += [
"CV_range",
"legal_to_full",
"full_to_legal",
"gamma_function",
"log_encoding_ACESproxy",
"log_decoding_ACESproxy",
"log_encoding_ACEScc",
"log_decoding_ACEScc",
"log_encoding_ACEScct",
"log_decoding_ACEScct",
"oetf_ARIBSTDB67",
"oetf_inverse_ARIBSTDB67",
"log_encoding_ALEXALogC",
"log_decoding_ALEXALogC",
"oetf_BlackmagicFilmGeneration5",
"oetf_inverse_BlackmagicFilmGeneration5",
"log_encoding_CanonLog",
"log_decoding_CanonLog",
"log_encoding_CanonLog2",
"log_decoding_CanonLog2",
"log_encoding_CanonLog3",
"log_decoding_CanonLog3",
"log_encoding_Cineon",
"log_decoding_Cineon",
"oetf_DaVinciIntermediate",
"oetf_inverse_DaVinciIntermediate",
"eotf_inverse_DCDM",
"eotf_DCDM",
"eotf_inverse_DICOMGSDF",
"eotf_DICOMGSDF",
"log_encoding_DJIDLog",
"log_decoding_DJIDLog",
"exponent_function_basic",
"exponent_function_monitor_curve",
"log_encoding_FilmicPro6",
"log_decoding_FilmicPro6",
"log_encoding_FilmLightTLog",
"log_decoding_FilmLightTLog",
"log_encoding_Protune",
"log_decoding_Protune",
"oetf_BT601",
"oetf_inverse_BT601",
"oetf_BT709",
"oetf_inverse_BT709",
"eotf_inverse_BT1886",
"eotf_BT1886",
"eotf_inverse_BT2020",
"eotf_BT2020",
"eotf_inverse_ST2084",
"eotf_ST2084",
"oetf_PQ_BT2100",
"oetf_inverse_PQ_BT2100",
"eotf_PQ_BT2100",
"eotf_inverse_PQ_BT2100",
"ootf_PQ_BT2100",
"ootf_inverse_PQ_BT2100",
"oetf_HLG_BT2100",
"oetf_inverse_HLG_BT2100",
"BT2100_HLG_EOTF_METHODS",
"eotf_HLG_BT2100",
"BT2100_HLG_EOTF_INVERSE_METHODS",
"eotf_inverse_HLG_BT2100",
"BT2100_HLG_OOTF_METHODS",
"ootf_HLG_BT2100",
"BT2100_HLG_OOTF_INVERSE_METHODS",
"ootf_inverse_HLG_BT2100",
"linear_function",
"logarithmic_function_basic",
"logarithmic_function_quasilog",
"logarithmic_function_camera",
"log_encoding_Log2",
"log_decoding_Log2",
"log_encoding_Panalog",
"log_decoding_Panalog",
"log_encoding_VLog",
"log_decoding_VLog",
"log_encoding_FLog",
"log_decoding_FLog",
"log_encoding_NLog",
"log_decoding_NLog",
"log_encoding_PivotedLog",
"log_decoding_PivotedLog",
"log_encoding_REDLog",
"log_decoding_REDLog",
"log_encoding_REDLogFilm",
"log_decoding_REDLogFilm",
"LOG3G10_ENCODING_METHODS",
"LOG3G10_DECODING_METHODS",
"log_encoding_Log3G10",
"log_decoding_Log3G10",
"log_encoding_Log3G12",
"log_decoding_Log3G12",
"cctf_encoding_ROMMRGB",
"cctf_decoding_ROMMRGB",
"cctf_encoding_ProPhotoRGB",
"cctf_decoding_ProPhotoRGB",
"cctf_encoding_RIMMRGB",
"cctf_decoding_RIMMRGB",
"log_encoding_ERIMMRGB",
"log_decoding_ERIMMRGB",
"oetf_SMPTE240M",
"eotf_SMPTE240M",
"log_encoding_SLog",
"log_decoding_SLog",
"log_encoding_SLog2",
"log_decoding_SLog2",
"log_encoding_SLog3",
"log_decoding_SLog3",
"eotf_inverse_sRGB",
"eotf_sRGB",
"log_encoding_ViperLog",
"log_decoding_ViperLog",
]
__all__ += [
"LOG_ENCODINGS",
"log_encoding",
"LOG_DECODINGS",
"log_decoding",
"OETFS",
"oetf",
"OETF_INVERSES",
"oetf_inverse",
"EOTFS",
"eotf",
"EOTF_INVERSES",
"eotf_inverse",
"CCTF_ENCODINGS",
"cctf_encoding",
"CCTF_DECODINGS",
"cctf_decoding",
"OOTFS",
"ootf",
"OOTF_INVERSES",
"ootf_inverse",
]
__all__ += [
"RGB_COLOURSPACES",
"RGB_COLOURSPACE_ACES2065_1",
"RGB_COLOURSPACE_ACESCC",
"RGB_COLOURSPACE_ACESCCT",
"RGB_COLOURSPACE_ACESPROXY",
"RGB_COLOURSPACE_ACESCG",
"RGB_COLOURSPACE_ADOBE_RGB1998",
"RGB_COLOURSPACE_ADOBE_WIDE_GAMUT_RGB",
"RGB_COLOURSPACE_ALEXA_WIDE_GAMUT",
"RGB_COLOURSPACE_APPLE_RGB",
"RGB_COLOURSPACE_BEST_RGB",
"RGB_COLOURSPACE_BETA_RGB",
"RGB_COLOURSPACE_BLACKMAGIC_WIDE_GAMUT",
"RGB_COLOURSPACE_BT470_525",
"RGB_COLOURSPACE_BT470_625",
"RGB_COLOURSPACE_BT709",
"RGB_COLOURSPACE_BT2020",
"RGB_COLOURSPACE_CIE_RGB",
"RGB_COLOURSPACE_CINEMA_GAMUT",
"RGB_COLOURSPACE_COLOR_MATCH_RGB",
"RGB_COLOURSPACE_DAVINCI_WIDE_GAMUT",
"RGB_COLOURSPACE_DCDM_XYZ",
"RGB_COLOURSPACE_DCI_P3",
"RGB_COLOURSPACE_DCI_P3_P",
"RGB_COLOURSPACE_DISPLAY_P3",
"RGB_COLOURSPACE_DJI_D_GAMUT",
"RGB_COLOURSPACE_DON_RGB_4",
"RGB_COLOURSPACE_ECI_RGB_V2",
"RGB_COLOURSPACE_EKTA_SPACE_PS_5",
"RGB_COLOURSPACE_FILMLIGHT_E_GAMUT",
"RGB_COLOURSPACE_PROTUNE_NATIVE",
"RGB_COLOURSPACE_MAX_RGB",
"RGB_COLOURSPACE_N_GAMUT",
"RGB_COLOURSPACE_P3_D65",
"RGB_COLOURSPACE_PAL_SECAM",
"RGB_COLOURSPACE_RED_COLOR",
"RGB_COLOURSPACE_RED_COLOR_2",
"RGB_COLOURSPACE_RED_COLOR_3",
"RGB_COLOURSPACE_RED_COLOR_4",
"RGB_COLOURSPACE_DRAGON_COLOR",
"RGB_COLOURSPACE_DRAGON_COLOR_2",
"RGB_COLOURSPACE_RED_WIDE_GAMUT_RGB",
"RGB_COLOURSPACE_ROMM_RGB",
"RGB_COLOURSPACE_RIMM_RGB",
"RGB_COLOURSPACE_ERIMM_RGB",
"RGB_COLOURSPACE_PROPHOTO_RGB",
"RGB_COLOURSPACE_RUSSELL_RGB",
"RGB_COLOURSPACE_SHARP_RGB",
"RGB_COLOURSPACE_SMPTE_240M",
"RGB_COLOURSPACE_SMPTE_C",
"RGB_COLOURSPACE_NTSC1953",
"RGB_COLOURSPACE_NTSC1987",
"RGB_COLOURSPACE_S_GAMUT",
"RGB_COLOURSPACE_S_GAMUT3",
"RGB_COLOURSPACE_S_GAMUT3_CINE",
"RGB_COLOURSPACE_VENICE_S_GAMUT3",
"RGB_COLOURSPACE_VENICE_S_GAMUT3_CINE",
"RGB_COLOURSPACE_sRGB",
"RGB_COLOURSPACE_V_GAMUT",
"RGB_COLOURSPACE_XTREME_RGB",
"RGB_COLOURSPACE_F_GAMUT",
]
__all__ += [
"XYZ_to_sRGB",
"sRGB_to_XYZ",
]
__all__ += [
"RGB_to_HSV",
"HSV_to_RGB",
"RGB_to_HSL",
"HSL_to_RGB",
"RGB_to_HCL",
"HCL_to_RGB",
]
__all__ += [
"RGB_to_CMY",
"CMY_to_RGB",
"CMY_to_CMYK",
"CMYK_to_CMY",
]
__all__ += [
"RGB_to_IHLS",
"IHLS_to_RGB",
]
__all__ += [
"RGB_to_Prismatic",
"Prismatic_to_RGB",
]
__all__ += [
"WEIGHTS_YCBCR",
"matrix_YCbCr",
"offset_YCbCr",
"RGB_to_YCbCr",
"YCbCr_to_RGB",
"RGB_to_YcCbcCrc",
"YcCbcCrc_to_RGB",
]
__all__ += [
"RGB_to_YCoCg",
"YCoCg_to_RGB",
]
__all__ += [
"RGB_to_ICtCp",
"ICtCp_to_RGB",
"XYZ_to_ICtCp",
"ICtCp_to_XYZ",
]
| 25.843058
| 73
| 0.738399
| 1,535
| 12,844
| 5.480782
| 0.114007
| 0.208011
| 0.06062
| 0.011886
| 0.95709
| 0.955664
| 0.931891
| 0.926661
| 0.926661
| 0.926661
| 0
| 0.038135
| 0.183354
| 12,844
| 496
| 74
| 25.895161
| 0.763943
| 0
| 0
| 0.032389
| 0
| 0
| 0.353239
| 0.222516
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030364
| 0
| 0.030364
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3578e0705866546829f5275f2419ea47c6de70e9
| 30,911
|
py
|
Python
|
pyraider/utils.py
|
raidersource/pyraider
|
ea770ca88458f27b8d28bd5a319709e2c9603134
|
[
"MIT"
] | 6
|
2020-01-08T11:15:02.000Z
|
2021-09-04T07:07:25.000Z
|
pyraider/utils.py
|
raidersource/pyraider
|
ea770ca88458f27b8d28bd5a319709e2c9603134
|
[
"MIT"
] | 4
|
2020-04-14T19:23:13.000Z
|
2021-06-16T16:42:22.000Z
|
pyraider/utils.py
|
raidersource/pyraider
|
ea770ca88458f27b8d28bd5a319709e2c9603134
|
[
"MIT"
] | 1
|
2020-04-14T19:03:10.000Z
|
2020-04-14T19:03:10.000Z
|
from beautifultable import BeautifulTable, BTRowCollection
import colored
from colored import stylize
import csv
import hashlib
import json
from json2html import *
import os
import pickle
from pkg_resources import parse_version
import subprocess
import sys
import ssl
import time
lib_path = os.path.abspath(os.path.join('..'))
sys.path.append(lib_path)
try:
from urllib2 import Request, urlopen
except ImportError:
from urllib.request import Request, urlopen, urlretrieve
def download_progress(count, block_size, total_size):
global start_time
if count == 0:
start_time = time.time()
return
duration = time.time() - start_time
progress_size = int(count * block_size)
speed = int(progress_size / (1024 * duration))
percent = int(count * block_size * 100 / total_size)
sys.stdout.write("\r...%d%%, %d MB, %d KB/s, %d seconds passed" %
(percent, progress_size / (1024 * 1024), speed, duration))
sys.stdout.flush()
def export_to_json(data_dict, export_file_path):
"""
Export vulnerable data into a JSON file
"""
if len(data_dict.get('pyraider'))>0:
result_path = ''
if export_file_path == '.':
result_path = 'result.json'
print(stylize('result.json has been exported in the current directory', colored.fg("green")))
elif export_file_path:
result_path = export_file_path
filename = os.path.basename(result_path)
print(stylize('{0} has been exported in {1} directory'.format(filename, result_path), colored.fg("green")))
else:
result_path = 'result.json'
print(stylize('result.json has been exported in the current directory', colored.fg("green")))
with open(result_path, 'w') as fp:
json.dump(data_dict, fp, indent=4)
def export_to_html(data_dict, export_file_path):
"""
Export vulnerable data into a JSON file
"""
if len(data_dict.get('pyraider'))>0:
result_path = ''
if export_file_path == '.':
result_path = 'result.html'
print(stylize('result.html has been exported in the current directory', colored.fg("green")))
elif export_file_path:
result_path = export_file_path
filename = os.path.basename(result_path)
print(stylize('{0} has been exported in {1} directory'.format(filename, result_path), colored.fg("green")))
else:
result_path = 'result.html'
print(stylize('result.html has been exported in the current directory', colored.fg("green")))
data = json2html.convert(json = data_dict, table_attributes="id=\"info-table\" class=\"table table-striped table-bordered table-hover\"")
header = """<html>
<head>
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css" integrity="sha384-ggOyR0iXCbMQv3Xipma34MD+dH/1fQ784/j6cY/iJTQUOhcWr7x9JvoRxT2MZw1T" crossorigin="anonymous">
</head>
<div class="jumbotron jumbotron-fluid">
<div class="container">
<h1 class="display-4">PyRaider Result</h1>
</div>
</div>
<hr>
<br>
</html>"""
with open(result_path, 'w') as fp:
fp.write(header)
fp.write(data)
def export_to_csv(data_dict, export_file_path):
"""
Export vulnerable data into a CSV file
"""
if len(data_dict.get('pyraider'))>0:
result_path = ''
if export_file_path == '.':
result_path = 'result.csv'
print(stylize('result.csv has been exported in the current directory', colored.fg("green")))
elif export_file_path:
result_path = export_file_path
filename = os.path.basename(result_path)
print(stylize('{0} has been exported in {1} directory'.format(filename, result_path), colored.fg("green")))
else:
result_path = 'result.csv'
print(stylize('result.csv has been exported in the current directory', colored.fg("green")))
with open(result_path, 'w') as f:
writer = csv.DictWriter(f,
fieldnames=['Package', 'Current Version', 'Description', 'Severity', 'CWE', 'CVE',
'Update Version'])
writer.writeheader()
for k,v in data_dict.items():
for data in v:
for k, v in data.items():
writer.writerow(
{"Package": k, "Current Version": v.get('current_version'), "Description": v.get('decription'),
'Severity': v.get('severity'), 'CWE': v.get('cwe'),
'CVE': v.get('cve'), 'Update Version': v.get('update_to')})
def show_high_severity_vulnerabilities(data_dict):
"""
Only High severity
"""
for k, v in data_dict.items():
parent_table = BeautifulTable()
parent_table.rows.append(['Package', k])
if v.get('severity') == 'HIGH':
parent_table.rows.append(
["Severity", stylize(v.get('severity'), colored.fg("red"))])
parent_table.rows.append(['CWE', v.get('cwe')])
parent_table.rows.append(['CVE', v.get('cve')])
if v.get('current_version') < v.get('update_to'):
parent_table.rows.append(['Current version', stylize(
v.get('current_version'), colored.fg("red"))])
else:
parent_table.rows.append(['Current version', stylize(
v.get('current_version'), colored.fg("green"))])
if v.get('current_version') == v.get('update_to'):
parent_table.rows.append(['Update To', stylize(
'Package is up to date', colored.fg("green"))])
else:
parent_table.rows.append(['Update To', stylize(
v.get('update_to'), colored.fg("green"))])
parent_table.rows.append(['Description', v.get('description')])
parent_table.rows.append(['Resolve', "pip install {0}=={1}".format(k,v.get('update_to'))])
parent_table.rows.append(['More Info', "https://nvd.nist.gov/vuln/detail/{0}".format(v.get('cve'))])
print('\n')
print(parent_table)
def show_medium_severity_vulnerabilities(data_dict):
"""
Only Medium severity
"""
for k, v in data_dict.items():
parent_table = BeautifulTable()
parent_table.rows.append(['Package', k])
if v.get('severity') == 'MEDIUM':
parent_table.rows.append(["Severity", stylize(
v.get('severity'), colored.fg("yellow"))])
parent_table.rows.append(['CWE', v.get('cwe')])
parent_table.rows.append(['CVE', v.get('cve')])
if v.get('current_version') < v.get('update_to'):
parent_table.rows.append(['Current version', stylize(
v.get('current_version'), colored.fg("red"))])
else:
parent_table.rows.append(['Current version', stylize(
v.get('current_version'), colored.fg("green"))])
if v.get('current_version') == v.get('update_to'):
parent_table.rows.append(['Update To', stylize(
'Package is up to date', colored.fg("green"))])
else:
parent_table.rows.append(['Update To', stylize(
v.get('update_to'), colored.fg("green"))])
parent_table.rows.append(['Description', v.get('description')])
parent_table.rows.append(['Resolve', "pip install {0}=={1}".format(k,v.get('update_to'))])
parent_table.rows.append(['More Info', "https://nvd.nist.gov/vuln/detail/{0}".format(v.get('cve'))])
print('\n')
print(parent_table)
def show_low_severity_vulnerabilities(data_dict):
"""
Only Low severity
"""
for k, v in data_dict.items():
parent_table = BeautifulTable()
parent_table.rows.append(['Package', k])
if v.get('severity') == 'Low':
parent_table.rows.append(
["Severity", stylize(v.get('severity'), colored.fg("blue"))])
parent_table.rows.append(['CWE', v.get('cwe')])
parent_table.rows.append(['CVE', v.get('cve')])
if v.get('current_version') < v.get('update_to'):
parent_table.rows.append(['Current version', stylize(
v.get('current_version'), colored.fg("red"))])
else:
parent_table.rows.append(['Current version', stylize(
v.get('current_version'), colored.fg("green"))])
if v.get('current_version') == v.get('update_to'):
parent_table.rows.append(['Update To', stylize(
'Package is up to date', colored.fg("green"))])
else:
parent_table.rows.append(['Update To', stylize(
v.get('update_to'), colored.fg("green"))])
parent_table.rows.append(['Description', v.get('description')])
parent_table.rows.append(['Resolve', "pip install {0}=={1}".format(k,v.get('update_to'))])
parent_table.rows.append(['More Info', "https://nvd.nist.gov/vuln/detail/{0}".format(v.get('cve'))])
print('\n')
print(parent_table)
def show_vulnerablities(data_dict,sev=None):
"""
Render Vulnerable data into a terminal table
"""
if sev == 'HIGH':
show_high_severity_vulnerabilities(data_dict)
elif sev == 'MEDIUM':
show_medium_severity_vulnerabilities(data_dict)
elif sev == 'LOW':
show_low_severity_vulnerabilities(data_dict)
else:
for k, v in data_dict.items():
parent_table = BeautifulTable()
parent_table.rows.append(['Package', k])
if v.get('severity') == 'HIGH':
parent_table.rows.append(
["Severity", stylize(v.get('severity'), colored.fg("red"))])
elif v.get('severity') == 'MEDIUM':
parent_table.rows.append(["Severity", stylize(
v.get('severity'), colored.fg("yellow"))])
elif v.get('severity') == 'LOW':
parent_table.rows.append(
["Severity", stylize(v.get('severity'), colored.fg("blue"))])
else:
parent_table.rows.append(
["Severity", stylize(v.get('severity'), colored.fg("blue"))])
parent_table.rows.append(['CWE', v.get('cwe')])
parent_table.rows.append(['CVE', v.get('cve')])
if v.get('current_version') < v.get('update_to'):
parent_table.rows.append(['Current version', stylize(
v.get('current_version'), colored.fg("red"))])
else:
parent_table.rows.append(['Current version', stylize(
v.get('current_version'), colored.fg("green"))])
if v.get('current_version') == v.get('update_to'):
parent_table.rows.append(['Update To', stylize(
'Package is up to date', colored.fg("green"))])
else:
parent_table.rows.append(['Update To', stylize(
v.get('update_to'), colored.fg("green"))])
parent_table.rows.append(['Description', v.get('description')])
parent_table.rows.append(['Resolve', "pip install {0}=={1}".format(k,v.get('update_to'))])
parent_table.rows.append(['More Info', "https://nvd.nist.gov/vuln/detail/{0}".format(v.get('cve'))])
print('\n')
print(parent_table)
def show_secure_packages(data_dict):
"""
Render Vulnerable data into a terminal table
"""
for secure in data_dict:
for k, v in secure.items():
parent_table = BeautifulTable()
parent_table.rows.append(['Package', k])
parent_table.rows.append(['Current version', stylize(
v.get('current_version'), colored.fg("green"))])
parent_table.rows.append(['Status', stylize(
'No known security vulnerabilities found', colored.fg("green"))])
print('\n')
print(parent_table)
def render_package_update_report(data_dict):
"""
Render package and latest version
"""
print("\n")
for k, v in data_dict.items():
parent_table = BeautifulTable()
parent_table.rows.append(['Package', k])
if v.get('current_version') !=None:
if v.get('current_version') < v.get('update_to'):
parent_table.rows.append(['Current version', stylize(
v.get('current_version'), colored.fg("red"))])
else:
parent_table.rows.append(['Current version', stylize(
v.get('current_version'), colored.fg("green"))])
if v.get('current_version') == v.get('update_to'):
parent_table.rows.append(['Update To', stylize(
'Package is up to date', colored.fg("green"))])
else:
parent_table.rows.append(['Update To', stylize(
v.get('update_to'), colored.fg("green"))])
else:
parent_table.rows.append(['Latest Version', stylize(
v.get('update_to'), colored.fg("green"))])
print('\n')
print(parent_table)
def get_info_from_pypi(packages):
"""
Get latest package version
"""
ssl._create_default_https_context = ssl._create_unverified_context
url = 'https://pypi.python.org/pypi/{0}/json'.format(packages)
headers = {'Accept': 'application/json'}
req = Request(url=url, headers=headers)
resp = urlopen(req)
if resp.code == 200:
info = resp.read()
decode_data = info.decode()
info_data = json.loads(decode_data)
latest_version = sorted(info_data["releases"], key=parse_version)
return latest_version[-1]
else:
return 'Unexpected error'
def validate_version(packages, current_version):
"""
Create a dict with current, and update version data
"""
data_dict = {}
validated_data = get_info_from_pypi(packages)
data_dict[packages] = {}
if current_version != None:
data_dict[packages]['current_version'] = current_version
data_dict[packages]['update_to'] = validated_data
return data_dict
def scan_vulnerabilities():
"""
Read from database
"""
this_dir, this_filename = os.path.split(__file__)
data_path = os.path.join(this_dir, 'resource.json')
if os.path.exists(data_path):
f = open(data_path)
data = json.load(f)
return data
else:
print(stylize('Downloading resources to scan the packages, It may take some time to download .....', colored.fg("green")))
ssl._create_default_https_context = ssl._create_unverified_context
url = 'https://pyraider-source-data.s3-us-west-2.amazonaws.com/resource.pickle'
try:
urlretrieve(url, data_path, download_progress)
except Exception as e:
print(stylize('There is some error. You need to enable `https://pyraider-source-data.s3-us-west-2.amazonaws.com/` URL to download database',
colored.fg("red")))
data = pickle.load(open(data_path, 'rb'))
print(stylize('\nResource has been successfully downloaded', colored.fg("green")))
return data
def scan_light_vulnerabilities():
"""
Read from database
"""
this_dir, this_filename = os.path.split(__file__)
data_path = os.path.join(this_dir, 'resource_light.json')
if os.path.exists(data_path):
f = open(data_path)
data = json.load(f)
return data
else:
print(stylize('Downloading resources to scan the packages, It may take some time to download .....', colored.fg("green")))
ssl._create_default_https_context = ssl._create_unverified_context
url = 'https://pyraider-source-data.s3-us-west-2.amazonaws.com/resource_light.json'
try:
urlretrieve(url, data_path, download_progress)
except Exception as e:
print(stylize('There is some error. You need to enable `https://pyraider-source-data.s3-us-west-2.amazonaws.com/` URL to download database',
colored.fg("red")))
f = open(data_path)
data = json.load(f)
print(stylize('\nResource has been successfully downloaded', colored.fg("green")))
return data
def check_latestdb():
"""
check and download the latest database
"""
this_dir, this_filename = os.path.split(__file__)
data_path = os.path.join(this_dir, 'resource_light.json')
if os.path.exists(data_path):
os.remove(data_path)
print(stylize('Downloading resources to scan the packages, It may take some time to download .....', colored.fg("green")))
ssl._create_default_https_context = ssl._create_unverified_context
url = 'https://pyraider-source-data.s3-us-west-2.amazonaws.com/resource_light.json'
try:
urlretrieve(url, data_path, download_progress)
except Exception as e:
print(stylize('There is some error. You need to enable `https://pyraider-source-data.s3-us-west-2.amazonaws.com/` URL to download database',
colored.fg("red")))
if os.path.exists(data_path):
print(stylize('Resource database successfully downloaded and its last updated on October 2021', colored.fg("green")))
def scanned_high_severity(data, req_name, req_version):
"""
Scan High vulnerable library
"""
data_dict = {}
for k, v in data.items():
if k.lower() == req_name:
validated_version = get_info_from_pypi(k.lower())
for vuls in v.get('info'):
if vuls.get('sev') == 'HIGH':
if vuls.get('version'):
if req_version <= vuls.get('version') or vuls.get('version') <= req_version:
data_dict[k] = {}
data_dict[k]['current_version'] = req_version
data_dict[k]['update_to'] = validated_version
data_dict[k]['cwe'] = vuls.get('cwe')
data_dict[k]['cve'] = vuls.get('cve')
data_dict[k]['severity'] = vuls.get('sev')
if vuls.get('description'):
data_dict[k]['description'] = vuls.get('description')
return data_dict
def scanned_medium_severity(data, req_name, req_version):
"""
Scan Medium vulnerable library
"""
data_dict = {}
for k, v in data.items():
if k.lower() == req_name:
validated_version = get_info_from_pypi(k.lower())
for vuls in v.get('info'):
if vuls.get('sev') == 'MEDIUM':
if vuls.get('version'):
if req_version <= vuls.get('version') or vuls.get('version') <= req_version:
data_dict[k] = {}
data_dict[k]['current_version'] = req_version
data_dict[k]['update_to'] = validated_version
data_dict[k]['cwe'] = vuls.get('cwe')
data_dict[k]['cve'] = vuls.get('cve')
data_dict[k]['severity'] = vuls.get('sev')
if vuls.get('description'):
data_dict[k]['description'] = vuls.get('description')
return data_dict
def scanned_low_severity(data, req_name, req_version):
"""
Scan Low vulnerable library
"""
data_dict = {}
for k, v in data.items():
if k.lower() == req_name:
validated_version = get_info_from_pypi(k.lower())
for vuls in v.get('info'):
if vuls.get('sev') == 'LOW':
if vuls.get('version'):
if req_version <= vuls.get('version') or vuls.get('version') <= req_version:
data_dict[k] = {}
data_dict[k]['current_version'] = req_version
data_dict[k]['update_to'] = validated_version
data_dict[k]['cwe'] = vuls.get('cwe')
data_dict[k]['cve'] = vuls.get('cve')
data_dict[k]['severity'] = vuls.get('sev')
if vuls.get('description'):
data_dict[k]['description'] = vuls.get('description')
return data_dict
def scanned_vulnerable_data(data, req_name, req_version,sev):
"""
Scan vulnerable library
"""
if sev == 'HIGH':
data_dict = scanned_high_severity(data, req_name, req_version)
return data_dict
elif sev == 'MEDIUM':
data_dict = scanned_medium_severity(data, req_name, req_version)
return data_dict
elif sev == 'LOW':
data_dict = scanned_low_severity(data, req_name, req_version)
return data_dict
else:
data_dict = {}
for k, v in data.items():
if k.lower() == req_name:
validated_version = get_info_from_pypi(k.lower())
for vuls in v.get('info'):
if vuls.get('version'):
if req_version <= vuls.get('version') or vuls.get('version') <= req_version:
data_dict[k] = {}
data_dict[k]['current_version'] = req_version
data_dict[k]['update_to'] = validated_version
data_dict[k]['cwe'] = vuls.get('cwe')
data_dict[k]['cve'] = vuls.get('cve')
data_dict[k]['severity'] = vuls.get('sev')
if vuls.get('description'):
data_dict[k]['description'] = vuls.get('description')
return data_dict
def query_yes_no(question, default="yes"):
"""
Question prompt tag
"""
valid = {"yes": True, "y": True, "ye": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: {0}".format(default))
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
if valid[choice]:
print("")
else:
print("")
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
def check_installation(question, default="yes"):
"""
Question prompt tag
"""
valid = {"yes": True, "y": True, "ye": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: {0}".format(default))
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
if valid[choice]:
print("")
else:
print("")
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
return valid[default]
def fix(data_dict):
"""
Update latest version one by one
"""
pip_question = "Do you want to use pip to install packages?"
pipenv_question = "Do you want to use pipenv to install packages?"
conda_question = "Do you want to use conda to install packages?"
check_is_pip = check_installation(pip_question)
is_install = False
is_pip = False
is_pipenv = False
is_conda = False
if check_is_pip == True:
is_pip = True
is_install = True
elif is_pip==False:
check_is_pipenv = check_installation(pipenv_question)
if check_is_pipenv:
is_pipenv = True
is_install = True
elif is_pip==False and is_pipenv==False:
check_is_conda = check_installation(conda_question)
if check_is_conda:
is_conda = True
is_install = True
if is_install:
for data in data_dict:
for k, v in data.items():
if v.get('current_version') < v.get('update_to'):
question = "Do you want to update {0} pacakge from {1} to {2} version?".format(k,v.get('current_version'), v.get('update_to'))
answers = query_yes_no(question)
if answers == True:
if is_pip:
installing = subprocess.call(
['pip', 'install', "{0}=={1}".format(k, v.get('update_to'))])
print(installing)
print(stylize("{0}=={1} version has been installed successfully!!!".format(
k, v.get('update_to')), colored.fg("green")))
elif is_pipenv:
installing = subprocess.call(
['pipenv', 'install', "{0}=={1}".format(k, v.get('update_to'))])
print(installing)
print(stylize("{0}=={1} version has been installed successfully!!!".format(
k, v.get('update_to')), colored.fg("green")))
print(
stylize("Pipfile has been updated successfully!!!"), colored.fg("green"))
elif is_conda:
installing = subprocess.call(
['pip', 'install', "{0}=={1}".format(k, v.get('update_to'))])
print(installing)
print(stylize("{0}=={1} version has been installed successfully!!!".format(
k, v.get('update_to')), colored.fg("green")))
else:
installing = subprocess.call(
['pip', 'install', "{0}=={1}".format(k, v.get('update_to'))])
print(installing)
print(stylize("{0}=={1} version has been installed successfully!!!".format(
k, v.get('update_to')), colored.fg("green")))
else:
print(stylize("{0} is already up-to date to {1} version".format(k,
v.get('update_to')), colored.fg("green")))
else:
print(stylize('You havent selected any of the option', colored.fg("green")))
def auto_fix_all(data_dict):
"""
Update all packages
"""
ans = 'Are you sure want to update all the packages, It might affect other packages?'
answers = query_yes_no(ans)
if answers == True:
pip_question = "Do you want to use pip to install packages?"
pipenv_question = "Do you want to use pipenv to install packages?"
conda_question = "Do you want to use conda to install packages?"
check_is_pip = check_installation(pip_question)
is_install = False
is_pip = False
is_pipenv = False
is_conda = False
if check_is_pip == True:
is_pip = True
is_install = True
elif is_pip==False:
check_is_pipenv = check_installation(pipenv_question)
if check_is_pipenv:
is_pipenv = True
is_install = True
elif is_pip==False and is_pipenv==False:
check_is_conda = check_installation(conda_question)
if check_is_conda:
is_conda = True
is_install = True
if is_install:
for vul in data_dict:
for k, v in vul.items():
if v.get('current_version') < v.get('update_to'):
if is_pip:
installing = subprocess.call(
['pip', 'install', "{0}=={1}".format(k, v.get('update_to'))])
print(installing)
print(stylize("{0}=={1} version has been installed successfully!!!".format(
k, v.get('update_to')), colored.fg("green")))
elif is_pipenv:
installing = subprocess.call(
['pipenv', 'install', "{0}=={1}".format(k, v.get('update_to'))])
print(installing)
print(stylize("{0}=={1} version has been installed successfully!!!".format(
k, v.get('update_to')), colored.fg("green")))
print(
stylize("Pipfile has been updated successfully!!!"), colored.fg("green"))
elif is_conda:
installing = subprocess.call(
['pip', 'install', "{0}=={1}".format(k, v.get('update_to'))])
print(installing)
print(stylize("{0} == {1} version has been installed successfully!!!".format(
k, v.get('update_to')), colored.fg("green")))
else:
installing = subprocess.call(
['pip', 'install', "{0}=={1}".format(k, v.get('update_to'))])
print(installing)
print(stylize("{0}=={1} version has been installed successfully!!!".format(
k, v.get('update_to')), colored.fg("green")))
else:
print(stylize("{0} is already up to date to {1} version".format(
k, v.get('update_to')), colored.fg("green")))
else:
print(stylize('You havent selected any of the option', colored.fg("green")))
# End-Of-File
| 44.798551
| 215
| 0.539614
| 3,537
| 30,911
| 4.558666
| 0.089059
| 0.026048
| 0.052096
| 0.072935
| 0.826966
| 0.816609
| 0.799243
| 0.78746
| 0.772513
| 0.76383
| 0
| 0.005988
| 0.324674
| 30,911
| 689
| 216
| 44.86357
| 0.766419
| 0.021837
| 0
| 0.780446
| 0
| 0.012007
| 0.213871
| 0.004461
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039451
| false
| 0.001715
| 0.02916
| 0
| 0.102916
| 0.09777
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
35b25753117975f3d05604a8d94009f451383ec4
| 5,255
|
py
|
Python
|
cogs/security.py
|
Dark-Amir/Security_Bot
|
a13469f822fda106242557096bb55b449e354591
|
[
"MIT"
] | 1
|
2021-11-09T15:14:30.000Z
|
2021-11-09T15:14:30.000Z
|
cogs/security.py
|
Kumarozh/Security_Bot
|
a13469f822fda106242557096bb55b449e354591
|
[
"MIT"
] | null | null | null |
cogs/security.py
|
Kumarozh/Security_Bot
|
a13469f822fda106242557096bb55b449e354591
|
[
"MIT"
] | null | null | null |
from typing import ByteString
import nextcord
from nextcord import client
from nextcord.ext import commands
import aiohttp
from io import BytesIO
from nextcord.ext.commands.core import command
class security(commands.Cog):
def __init__(self,client):
self.client=client
@commands.Cog.listener()
async def on_guild_channel_delete(self, channel):
entry = await channel.guild.audit_logs(action=nextcord.AuditLogAction.channel_delete, limit=None).get()
if entry.user != self.client.user:
user = await self.client.fetch_user(entry.user.id)
try:
await channel.guild.ban(user,reason="update channel")
except:
pass
@commands.Cog.listener()
async def on_guild_channel_create(self, channel):
entry = await channel.guild.audit_logs(action=nextcord.AuditLogAction.channel_create, limit=1).get()
if entry.user.id != self.client.user.id :
user = await self.client.fetch_user(entry.user.id)
try:
await channel.guild.ban(user,reason="update channel")
except:
pass
@commands.Cog.listener()
async def on_guild_channel_update(self, before, after):
entry = await after.guild.audit_logs(action=nextcord.AuditLogAction.channel_update,limit=None).get()
entry1 = await after.guild.audit_logs(action=nextcord.AuditLogAction.overwrite_create,limit=None).get()
entry3 = await after.guild.audit_logs(action=nextcord.AuditLogAction.overwrite_delete,limit=None).get()
entry2 = await after.guild.audit_logs(action=nextcord.AuditLogAction.overwrite_update,limit=None).get()
if entry.user != self.client.user :
user = await self.client.fetch_user(entry.user.id)
try:
await after.guild.ban(user,reason="update channel")
except:
pass
if entry1.user is not None:
if entry1.user != self.client.user :
user = await self.client.fetch_user(entry1.user.id)
try:
await after.guild.ban(user,reason="update channel")
except:
pass
if entry2.user is not None:
if entry2.user != self.client.user:
user = await self.client.fetch_user(entry2.user.id)
try:
await after.guild.ban(user,reason="update channel")
except:
pass
if entry3.user is not None:
if entry3.user != self.client.user:
user = await self.client.fetch_user(entry3.user.id)
try:
await after.guild.ban(user,reason="update channel")
except:
pass
@commands.Cog.listener()
async def on_guild_role_create(self, role):
entry = await role.guild.audit_logs(action=nextcord.AuditLogAction.role_create,limit=None).get()
if entry.user!=self.client.user:
user=await self.client.fetch_user(entry.user.id)
try:
await role.guild.ban(user, reason="delete role")
except:
pass
@commands.Cog.listener()
async def on_guild_role_delete(self, role):
entry=await role.guild.audit_logs(action=nextcord.AuditLogAction.role_delete, limit=None).get()
if entry.user!=self.client.user:
user=await self.client.fetch_user(entry.user.id)
try:
await role.guild.ban(user, reason="delete role")
except:
pass
@commands.Cog.listener()
async def on_guild_role_update(self, before, after):
entry = await before.guild.audit_logs(action=nextcord.AuditLogAction.role_update, limit=None).get()
if entry.user!=self.client.user:
user = await self.client.fetch_user(entry.user.id)
try:
await before.guild.ban(user, reason="update role")
except:
pass
@commands.Cog.listener()
async def on_webhooks_update(self, channel):
entry = await channel.guild.audit_logs(limit=1).get()
if entry.user!=self.client.user:
user = await self.client.fetch_user(entry.user.id)
try:
await channel.guild.ban(user, reason="update webhook")
except:
pass
@commands.Cog.listener()
async def on_guild_emojis_update(self, guild, before, after):
entry = await guild.audit_logs(limit=1).get()
if entry.user!=self.client.user:
user = await self.client.fetch_user(entry.user.id)
try:
await guild.ban(user, reason="update emoji")
except:
pass
@commands.Cog.listener()
async def on_guild_stickers_update(self, guild, before, after):
entry = await guild.audit_logs(limit=1).get()
if entry.user!=self.client.user:
user = await self.client.fetch_user(entry.user.id)
try:
await guild.ban(user, reason="update sticker")
except:
pass
def setup(client):
client.add_cog(security(client))
| 44.533898
| 113
| 0.603616
| 632
| 5,255
| 4.917722
| 0.099684
| 0.083655
| 0.054054
| 0.073359
| 0.835586
| 0.813385
| 0.78861
| 0.771557
| 0.732625
| 0.631596
| 0
| 0.004318
| 0.294957
| 5,255
| 118
| 114
| 44.533898
| 0.834548
| 0
| 0
| 0.615385
| 0
| 0
| 0.029871
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017094
| false
| 0.102564
| 0.059829
| 0
| 0.08547
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
35c007f175d55d8873c746de2a3b4e7f4aec43de
| 13,235
|
py
|
Python
|
third_party_package/RDKit_2015_03_1/rdkit/sping/PS/psmetrics.py
|
Ivy286/cluster_basedfps
|
7fc216537f570436f008ea567c137d03ba2b6d81
|
[
"WTFPL"
] | 9
|
2019-04-23T01:46:12.000Z
|
2021-08-16T07:07:12.000Z
|
third_party_package/RDKit_2015_03_1/rdkit/sping/PS/psmetrics.py
|
Ivy286/cluster_basedfps
|
7fc216537f570436f008ea567c137d03ba2b6d81
|
[
"WTFPL"
] | null | null | null |
third_party_package/RDKit_2015_03_1/rdkit/sping/PS/psmetrics.py
|
Ivy286/cluster_basedfps
|
7fc216537f570436f008ea567c137d03ba2b6d81
|
[
"WTFPL"
] | 5
|
2016-09-21T03:47:48.000Z
|
2019-07-30T22:17:35.000Z
|
# $Id$
# Christopher Lee clee@users.sourceforge.net
# based upon pdfmetrics.py by Andy Robinson
import string
import fontinfo
import latin1MetricsCache
##############################################################
#
# PDF Metrics
# This is a preamble to give us a stringWidth function.
# loads and caches AFM files, but won't need to as the
# standard fonts are there already
##############################################################
_stdenc_widths = {'courier':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,0,600,600,600,600,0,600,600,600,600,600,600,600,600,0,600,0,600,600,600,600,600,600,600,600,0,600,600,0,600,600,600,600,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,600,0,600,0,0,0,0,600,600,600,600,0,0,0,0,0,600,0,0,0,600,0,0,600,600,600,600,0,0,600],'courier-bold':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,0,600,600,600,600,0,600,600,600,600,600,600,600,600,0,600,0,600,600,600,600,600,600,600,600,0,600,600,0,600,600,600,600,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,600,0,600,0,0,0,0,600,600,600,600,0,0,0,0,0,600,0,0,0,600,0,0,600,600,600,600,0,0,600],'courier-boldoblique':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,0,600,600,600,600,0,600,600,600,600,600,600,600,600,0,600,0,600,600,600,600,600,600,600,600,0,600,600,0,600,600,600,600,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,600,0,600,0,0,0,0,600,600,600,600,0,0,0,0,0,600,0,0,0,600,0,0,600,600,600,600,0,0,600],'courier-oblique':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,600,600,600,600,600,600,600,600,600,600,600,600,600,600,600,0,600,600,600,600,0,600,600,600,600,600,600,600,600,0,600,0,600,600,600,600,600,600,600,600,0,600,600,0,600,600,600,600,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,600,0,600,0,0,0,0,600,600,600,600,0,0,0,0,0,600,0,0,0,600,0,0,600,600,600,600,0,0,600],'helvetica':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,278,278,355,556,556,889,667,222,333,333,389,584,278,333,278,278,556,556,556,556,556,556,556,556,556,556,278,278,584,584,584,556,1015,667,667,722,722,667,611,778,722,278,500,667,556,833,722,778,667,778,722,667,611,722,667,944,667,667,611,278,278,278,469,556,222,556,556,500,556,556,278,556,556,222,222,500,222,833,556,556,556,556,333,500,278,556,500,722,500,500,500,334,260,334,584,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,333,556,556,167,556,556,556,556,191,333,556,333,333,500,500,0,556,556,556,278,0,537,350,222,333,333,556,1000,1000,0,611,0,333,333,333,333,333,333,333,333,0,333,333,0,333,333,333,1000,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1000,0,370,0,0,0,0,556,778,1000,365,0,0,0,0,0,889,0,0,0,278,0,0,222,611,944,611,0,0,834],'helvetica-bold':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,278,333,474,556,556,889,722,278,333,333,389,584,278,333,278,278,556,556,556,556,556,556,556,556,556,556,333,333,584,584,584,611,975,722,722,722,722,667,611,778,722,278,556,722,611,833,722,778,667,778,722,667,611,722,667,944,667,667,611,333,278,333,584,556,278,556,611,556,611,556,333,611,611,278,278,556,278,889,611,611,611,611,389,556,333,611,556,778,556,556,500,389,280,389,584,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,333,556,556,167,556,556,556,556,238,500,556,333,333,611,611,0,556,556,556,278,0,556,350,278,500,500,556,1000,1000,0,611,0,333,333,333,333,333,333,333,333,0,333,333,0,333,333,333,1000,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1000,0,370,0,0,0,0,611,778,1000,365,0,0,0,0,0,889,0,0,0,278,0,0,278,611,944,611,0,0,834],'helvetica-boldoblique':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,278,333,474,556,556,889,722,278,333,333,389,584,278,333,278,278,556,556,556,556,556,556,556,556,556,556,333,333,584,584,584,611,975,722,722,722,722,667,611,778,722,278,556,722,611,833,722,778,667,778,722,667,611,722,667,944,667,667,611,333,278,333,584,556,278,556,611,556,611,556,333,611,611,278,278,556,278,889,611,611,611,611,389,556,333,611,556,778,556,556,500,389,280,389,584,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,333,556,556,167,556,556,556,556,238,500,556,333,333,611,611,0,556,556,556,278,0,556,350,278,500,500,556,1000,1000,0,611,0,333,333,333,333,333,333,333,333,0,333,333,0,333,333,333,1000,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1000,0,370,0,0,0,0,611,778,1000,365,0,0,0,0,0,889,0,0,0,278,0,0,278,611,944,611,0,0,834],'helvetica-oblique':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,278,278,355,556,556,889,667,222,333,333,389,584,278,333,278,278,556,556,556,556,556,556,556,556,556,556,278,278,584,584,584,556,1015,667,667,722,722,667,611,778,722,278,500,667,556,833,722,778,667,778,722,667,611,722,667,944,667,667,611,278,278,278,469,556,222,556,556,500,556,556,278,556,556,222,222,500,222,833,556,556,556,556,333,500,278,556,500,722,500,500,500,334,260,334,584,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,333,556,556,167,556,556,556,556,191,333,556,333,333,500,500,0,556,556,556,278,0,537,350,222,333,333,556,1000,1000,0,611,0,333,333,333,333,333,333,333,333,0,333,333,0,333,333,333,1000,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1000,0,370,0,0,0,0,556,778,1000,365,0,0,0,0,0,889,0,0,0,278,0,0,222,611,944,611,0,0,834],'symbol':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,250,333,713,500,549,833,778,439,333,333,500,549,250,549,250,278,500,500,500,500,500,500,500,500,500,500,278,278,549,549,549,444,549,722,667,722,612,611,763,603,722,333,631,722,686,889,722,722,768,741,556,592,611,690,439,768,645,795,611,333,863,333,658,500,500,631,549,549,494,439,521,411,603,329,603,549,549,576,521,549,549,521,549,603,439,576,713,686,493,686,494,480,200,480,549,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,620,247,549,167,713,500,753,753,753,753,1042,987,603,987,603,400,549,411,549,549,713,494,460,549,549,549,549,1000,603,1000,658,823,686,795,987,768,768,823,768,768,713,713,713,713,713,713,713,768,713,790,790,890,823,549,250,713,603,603,1042,987,603,987,603,494,329,790,790,786,713,384,384,384,384,384,384,494,494,494,494,0,329,274,686,686,686,384,384,384,384,384,384,494,494,790],'times-bold':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,250,333,555,500,500,1000,833,333,333,333,500,570,250,333,250,278,500,500,500,500,500,500,500,500,500,500,333,333,570,570,570,500,930,722,667,722,722,667,611,778,778,389,500,778,667,944,722,778,611,778,722,556,667,722,722,1000,722,722,667,333,278,333,581,500,333,500,556,444,556,444,333,500,556,278,333,556,278,833,556,500,556,556,444,389,333,556,500,722,500,500,444,394,220,394,520,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,333,500,500,167,500,500,500,500,278,500,500,333,333,556,556,0,500,500,500,250,0,540,350,333,500,500,500,1000,1000,0,500,0,333,333,333,333,333,333,333,333,0,333,333,0,333,333,333,1000,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1000,0,300,0,0,0,0,667,778,1000,330,0,0,0,0,0,722,0,0,0,278,0,0,278,500,722,556,0,0,750],'times-bolditalic':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,250,389,555,500,500,833,778,333,333,333,500,570,250,333,250,278,500,500,500,500,500,500,500,500,500,500,333,333,570,570,570,500,832,667,667,667,722,667,667,722,778,389,500,667,611,889,722,722,611,722,667,556,611,722,667,889,667,611,611,333,278,333,570,500,333,500,500,444,500,444,333,500,556,278,278,500,278,778,556,500,500,500,389,389,278,556,444,667,500,444,389,348,220,348,570,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,389,500,500,167,500,500,500,500,278,500,500,333,333,556,556,0,500,500,500,250,0,500,350,333,500,500,500,1000,1000,0,500,0,333,333,333,333,333,333,333,333,0,333,333,0,333,333,333,1000,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,944,0,266,0,0,0,0,611,722,944,300,0,0,0,0,0,722,0,0,0,278,0,0,278,500,722,500,0,0,750],'times-italic':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,250,333,420,500,500,833,778,333,333,333,500,675,250,333,250,278,500,500,500,500,500,500,500,500,500,500,333,333,675,675,675,500,920,611,611,667,722,611,611,722,722,333,444,667,556,833,667,722,611,722,611,500,556,722,611,833,611,556,556,389,278,389,422,500,333,500,500,444,500,444,278,500,500,278,278,444,278,722,500,500,500,500,389,389,278,500,444,667,444,444,389,400,275,400,541,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,389,500,500,167,500,500,500,500,214,556,500,333,333,500,500,0,500,500,500,250,0,523,350,333,556,556,500,889,1000,0,500,0,333,333,333,333,333,333,333,333,0,333,333,0,333,333,333,889,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,889,0,276,0,0,0,0,556,722,944,310,0,0,0,0,0,667,0,0,0,278,0,0,278,500,667,500,0,0,750],'times-roman':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,250,333,408,500,500,833,778,333,333,333,500,564,250,333,250,278,500,500,500,500,500,500,500,500,500,500,278,278,564,564,564,444,921,722,667,667,722,611,556,722,722,333,389,722,611,889,722,722,556,722,667,556,611,722,722,944,722,722,611,333,278,333,469,500,333,444,500,444,500,444,333,500,500,278,278,500,278,778,500,500,500,500,333,389,278,500,500,722,500,500,444,480,200,480,541,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,333,500,500,167,500,500,500,500,180,444,500,333,333,556,556,0,500,500,500,250,0,453,350,333,444,444,500,1000,1000,0,444,0,333,333,333,333,333,333,333,333,0,333,333,0,333,333,333,1000,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,889,0,276,0,0,0,0,611,722,889,310,0,0,0,0,0,667,0,0,0,278,0,0,278,500,722,500,0,0,750],'zapfdingbats':
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,278,974,961,974,980,719,789,790,791,690,960,939,549,855,911,933,911,945,974,755,846,762,761,571,677,763,760,759,754,494,552,537,577,692,786,788,788,790,793,794,816,823,789,841,823,833,816,831,923,744,723,749,790,792,695,776,768,792,759,707,708,682,701,826,815,789,789,707,687,696,689,786,787,713,791,785,791,873,761,762,762,759,759,892,892,788,784,438,138,277,415,392,392,668,668,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,732,544,544,910,667,760,760,776,595,694,626,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,788,894,838,1016,458,748,924,748,918,927,928,928,834,873,828,924,924,917,930,931,463,883,836,836,867,867,696,696,874,0,874,760,946,771,865,771,888,967,888,831,873,927,970,234]
}
ascent_descent = {'Courier': (629, -157), 'Courier-Bold': (626, -142), 'Courier-BoldOblique': (626, -142), 'Courier-Oblique': (629, -157), 'Helvetica': (718, -207), 'Helvetica-Bold': (718, -207), 'Helvetica-BoldOblique': (718, -207), 'Helvetica-Oblique': (718, -207), 'Symbol': (0, 0), 'Times-Bold': (676, -205), 'Times-BoldItalic': (699, -205), 'Times-Italic': (683, -205), 'Times-Roman': (683, -217), 'ZapfDingbats': (0, 0)}
_Widths = { 'StandardEncoding' : _stdenc_widths,
'Latin1Encoding' : latin1MetricsCache.FontWidths}
def stringwidth(text, font, encoding):
if font in fontinfo.NonRomanFonts:
widths = _Widths['StandardEncoding'][string.lower(font) ]
else:
try:
widths = _Widths[encoding][string.lower(font) ]
except:
raise KeyError("Improper encoding {0} or font name {1}".format(encoding, font))
w = 0
for char in text:
w = w + widths[ord(char)]
return w
| 213.467742
| 905
| 0.67858
| 3,756
| 13,235
| 2.388978
| 0.075612
| 0.269698
| 0.370445
| 0.460047
| 0.727516
| 0.704781
| 0.690405
| 0.684721
| 0.672685
| 0.672685
| 0
| 0.620996
| 0.021156
| 13,235
| 61
| 906
| 216.967213
| 0.071633
| 0.018436
| 0
| 0
| 0
| 0
| 0.034697
| 0.003267
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029412
| false
| 0
| 0.088235
| 0
| 0.147059
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
35d220b49f5d4a5556f4a8ddcd348e287cb462d5
| 7,373
|
py
|
Python
|
loldib/getratings/models/NA/na_heimerdinger/na_heimerdinger_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_heimerdinger/na_heimerdinger_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_heimerdinger/na_heimerdinger_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Heimerdinger_Top_Aatrox(Ratings):
pass
class NA_Heimerdinger_Top_Ahri(Ratings):
pass
class NA_Heimerdinger_Top_Akali(Ratings):
pass
class NA_Heimerdinger_Top_Alistar(Ratings):
pass
class NA_Heimerdinger_Top_Amumu(Ratings):
pass
class NA_Heimerdinger_Top_Anivia(Ratings):
pass
class NA_Heimerdinger_Top_Annie(Ratings):
pass
class NA_Heimerdinger_Top_Ashe(Ratings):
pass
class NA_Heimerdinger_Top_AurelionSol(Ratings):
pass
class NA_Heimerdinger_Top_Azir(Ratings):
pass
class NA_Heimerdinger_Top_Bard(Ratings):
pass
class NA_Heimerdinger_Top_Blitzcrank(Ratings):
pass
class NA_Heimerdinger_Top_Brand(Ratings):
pass
class NA_Heimerdinger_Top_Braum(Ratings):
pass
class NA_Heimerdinger_Top_Caitlyn(Ratings):
pass
class NA_Heimerdinger_Top_Camille(Ratings):
pass
class NA_Heimerdinger_Top_Cassiopeia(Ratings):
pass
class NA_Heimerdinger_Top_Chogath(Ratings):
pass
class NA_Heimerdinger_Top_Corki(Ratings):
pass
class NA_Heimerdinger_Top_Darius(Ratings):
pass
class NA_Heimerdinger_Top_Diana(Ratings):
pass
class NA_Heimerdinger_Top_Draven(Ratings):
pass
class NA_Heimerdinger_Top_DrMundo(Ratings):
pass
class NA_Heimerdinger_Top_Ekko(Ratings):
pass
class NA_Heimerdinger_Top_Elise(Ratings):
pass
class NA_Heimerdinger_Top_Evelynn(Ratings):
pass
class NA_Heimerdinger_Top_Ezreal(Ratings):
pass
class NA_Heimerdinger_Top_Fiddlesticks(Ratings):
pass
class NA_Heimerdinger_Top_Fiora(Ratings):
pass
class NA_Heimerdinger_Top_Fizz(Ratings):
pass
class NA_Heimerdinger_Top_Galio(Ratings):
pass
class NA_Heimerdinger_Top_Gangplank(Ratings):
pass
class NA_Heimerdinger_Top_Garen(Ratings):
pass
class NA_Heimerdinger_Top_Gnar(Ratings):
pass
class NA_Heimerdinger_Top_Gragas(Ratings):
pass
class NA_Heimerdinger_Top_Graves(Ratings):
pass
class NA_Heimerdinger_Top_Hecarim(Ratings):
pass
class NA_Heimerdinger_Top_Heimerdinger(Ratings):
pass
class NA_Heimerdinger_Top_Illaoi(Ratings):
pass
class NA_Heimerdinger_Top_Irelia(Ratings):
pass
class NA_Heimerdinger_Top_Ivern(Ratings):
pass
class NA_Heimerdinger_Top_Janna(Ratings):
pass
class NA_Heimerdinger_Top_JarvanIV(Ratings):
pass
class NA_Heimerdinger_Top_Jax(Ratings):
pass
class NA_Heimerdinger_Top_Jayce(Ratings):
pass
class NA_Heimerdinger_Top_Jhin(Ratings):
pass
class NA_Heimerdinger_Top_Jinx(Ratings):
pass
class NA_Heimerdinger_Top_Kalista(Ratings):
pass
class NA_Heimerdinger_Top_Karma(Ratings):
pass
class NA_Heimerdinger_Top_Karthus(Ratings):
pass
class NA_Heimerdinger_Top_Kassadin(Ratings):
pass
class NA_Heimerdinger_Top_Katarina(Ratings):
pass
class NA_Heimerdinger_Top_Kayle(Ratings):
pass
class NA_Heimerdinger_Top_Kayn(Ratings):
pass
class NA_Heimerdinger_Top_Kennen(Ratings):
pass
class NA_Heimerdinger_Top_Khazix(Ratings):
pass
class NA_Heimerdinger_Top_Kindred(Ratings):
pass
class NA_Heimerdinger_Top_Kled(Ratings):
pass
class NA_Heimerdinger_Top_KogMaw(Ratings):
pass
class NA_Heimerdinger_Top_Leblanc(Ratings):
pass
class NA_Heimerdinger_Top_LeeSin(Ratings):
pass
class NA_Heimerdinger_Top_Leona(Ratings):
pass
class NA_Heimerdinger_Top_Lissandra(Ratings):
pass
class NA_Heimerdinger_Top_Lucian(Ratings):
pass
class NA_Heimerdinger_Top_Lulu(Ratings):
pass
class NA_Heimerdinger_Top_Lux(Ratings):
pass
class NA_Heimerdinger_Top_Malphite(Ratings):
pass
class NA_Heimerdinger_Top_Malzahar(Ratings):
pass
class NA_Heimerdinger_Top_Maokai(Ratings):
pass
class NA_Heimerdinger_Top_MasterYi(Ratings):
pass
class NA_Heimerdinger_Top_MissFortune(Ratings):
pass
class NA_Heimerdinger_Top_MonkeyKing(Ratings):
pass
class NA_Heimerdinger_Top_Mordekaiser(Ratings):
pass
class NA_Heimerdinger_Top_Morgana(Ratings):
pass
class NA_Heimerdinger_Top_Nami(Ratings):
pass
class NA_Heimerdinger_Top_Nasus(Ratings):
pass
class NA_Heimerdinger_Top_Nautilus(Ratings):
pass
class NA_Heimerdinger_Top_Nidalee(Ratings):
pass
class NA_Heimerdinger_Top_Nocturne(Ratings):
pass
class NA_Heimerdinger_Top_Nunu(Ratings):
pass
class NA_Heimerdinger_Top_Olaf(Ratings):
pass
class NA_Heimerdinger_Top_Orianna(Ratings):
pass
class NA_Heimerdinger_Top_Ornn(Ratings):
pass
class NA_Heimerdinger_Top_Pantheon(Ratings):
pass
class NA_Heimerdinger_Top_Poppy(Ratings):
pass
class NA_Heimerdinger_Top_Quinn(Ratings):
pass
class NA_Heimerdinger_Top_Rakan(Ratings):
pass
class NA_Heimerdinger_Top_Rammus(Ratings):
pass
class NA_Heimerdinger_Top_RekSai(Ratings):
pass
class NA_Heimerdinger_Top_Renekton(Ratings):
pass
class NA_Heimerdinger_Top_Rengar(Ratings):
pass
class NA_Heimerdinger_Top_Riven(Ratings):
pass
class NA_Heimerdinger_Top_Rumble(Ratings):
pass
class NA_Heimerdinger_Top_Ryze(Ratings):
pass
class NA_Heimerdinger_Top_Sejuani(Ratings):
pass
class NA_Heimerdinger_Top_Shaco(Ratings):
pass
class NA_Heimerdinger_Top_Shen(Ratings):
pass
class NA_Heimerdinger_Top_Shyvana(Ratings):
pass
class NA_Heimerdinger_Top_Singed(Ratings):
pass
class NA_Heimerdinger_Top_Sion(Ratings):
pass
class NA_Heimerdinger_Top_Sivir(Ratings):
pass
class NA_Heimerdinger_Top_Skarner(Ratings):
pass
class NA_Heimerdinger_Top_Sona(Ratings):
pass
class NA_Heimerdinger_Top_Soraka(Ratings):
pass
class NA_Heimerdinger_Top_Swain(Ratings):
pass
class NA_Heimerdinger_Top_Syndra(Ratings):
pass
class NA_Heimerdinger_Top_TahmKench(Ratings):
pass
class NA_Heimerdinger_Top_Taliyah(Ratings):
pass
class NA_Heimerdinger_Top_Talon(Ratings):
pass
class NA_Heimerdinger_Top_Taric(Ratings):
pass
class NA_Heimerdinger_Top_Teemo(Ratings):
pass
class NA_Heimerdinger_Top_Thresh(Ratings):
pass
class NA_Heimerdinger_Top_Tristana(Ratings):
pass
class NA_Heimerdinger_Top_Trundle(Ratings):
pass
class NA_Heimerdinger_Top_Tryndamere(Ratings):
pass
class NA_Heimerdinger_Top_TwistedFate(Ratings):
pass
class NA_Heimerdinger_Top_Twitch(Ratings):
pass
class NA_Heimerdinger_Top_Udyr(Ratings):
pass
class NA_Heimerdinger_Top_Urgot(Ratings):
pass
class NA_Heimerdinger_Top_Varus(Ratings):
pass
class NA_Heimerdinger_Top_Vayne(Ratings):
pass
class NA_Heimerdinger_Top_Veigar(Ratings):
pass
class NA_Heimerdinger_Top_Velkoz(Ratings):
pass
class NA_Heimerdinger_Top_Vi(Ratings):
pass
class NA_Heimerdinger_Top_Viktor(Ratings):
pass
class NA_Heimerdinger_Top_Vladimir(Ratings):
pass
class NA_Heimerdinger_Top_Volibear(Ratings):
pass
class NA_Heimerdinger_Top_Warwick(Ratings):
pass
class NA_Heimerdinger_Top_Xayah(Ratings):
pass
class NA_Heimerdinger_Top_Xerath(Ratings):
pass
class NA_Heimerdinger_Top_XinZhao(Ratings):
pass
class NA_Heimerdinger_Top_Yasuo(Ratings):
pass
class NA_Heimerdinger_Top_Yorick(Ratings):
pass
class NA_Heimerdinger_Top_Zac(Ratings):
pass
class NA_Heimerdinger_Top_Zed(Ratings):
pass
class NA_Heimerdinger_Top_Ziggs(Ratings):
pass
class NA_Heimerdinger_Top_Zilean(Ratings):
pass
class NA_Heimerdinger_Top_Zyra(Ratings):
pass
| 17.681055
| 49
| 0.792893
| 972
| 7,373
| 5.588477
| 0.151235
| 0.177835
| 0.482695
| 0.55891
| 0.83229
| 0.83229
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150685
| 7,373
| 416
| 50
| 17.723558
| 0.867454
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
ea756467bb7830d1909e4300f352b8d2599175ac
| 21,079
|
py
|
Python
|
Test/FunctionalTests/DomTreeEditorTestScripts/AddAllItems.py
|
StirfireStudios/ATF
|
efc9c91a48a643a16e37d1f2dd0f89c6d626a51c
|
[
"Apache-2.0"
] | null | null | null |
Test/FunctionalTests/DomTreeEditorTestScripts/AddAllItems.py
|
StirfireStudios/ATF
|
efc9c91a48a643a16e37d1f2dd0f89c6d626a51c
|
[
"Apache-2.0"
] | null | null | null |
Test/FunctionalTests/DomTreeEditorTestScripts/AddAllItems.py
|
StirfireStudios/ATF
|
efc9c91a48a643a16e37d1f2dd0f89c6d626a51c
|
[
"Apache-2.0"
] | null | null | null |
#Copyright (c) 2014 Sony Computer Entertainment America LLC. See License.txt.
import sys
sys.path.append("./CommonTestScripts")
import Test
doc = atfDocService.OpenNewDocument(editor)
#===================== 0: root ==================================
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count")
package = editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count")
print "Trying to add objects that cannot be a child of the root"
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding an animation")
#===================== 1: Package ==================================
print "Adding children to a package"
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count")
form = editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), package.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count after adding form")
shader = editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), package.DomNode)
Test.Equal(2, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count after adding shader")
texture = editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), package.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count after adding texture")
font = editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), package.DomNode)
Test.Equal(4, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count after adding font")
packageChildCount = 4
print "Trying to add objects that cannot be a child of a package"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), package.DomNode)
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count does not increase after adding package")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), package.DomNode)
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count does not increase after adding sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), package.DomNode)
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count does not increase after adding text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), package.DomNode)
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count does not increase after adding animation")
#===================== 2: Form ==================================
print "Adding children to a form"
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count")
sprite = editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), form.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count after adding sprite")
text = editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), form.DomNode)
Test.Equal(2, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count after adding text")
animation = editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count after adding animation")
print "Trying to add objects that cannot be a child of a form"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a font")
#===================== 3: Shader ==================================
print "Verify cannot add children to a shader"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding an animation")
#===================== 4: Texture ==================================
print "Verify cannot add children to a texture"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding an animation")
#===================== 5: Font ==================================
print "Verify cannot add children to a font"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding an animation")
#===================== 6: Sprite ==================================
print "Adding children to a sprite"
Test.Equal(2, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count (starts with a transform and an empty ref)")
spriteUnderSprite = editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), sprite.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count after adding sprite")
textUnderSprite = editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), sprite.DomNode)
Test.Equal(4, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count after adding text")
animationUnderSprite = editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count after adding animation")
#must be added as ref:
shaderUnderSprite = editingContext.InsertAsRef[UIShader](DomNode(UISchema.UIShaderType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count after adding shader")
#refs will be added as real objects to the package
packageChildCount = packageChildCount + 1
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count increases after adding a ref")
print "Trying to add objects that cannot be a child of a sprite"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a form")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a font")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a shader")
#===================== 7: Text ==================================
print "Adding children to a text"
Test.Equal(2, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count (starts with a transform and an empty ref)")
spriteUnderText = editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), text.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count after adding sprite")
textUnderText = editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), text.DomNode)
Test.Equal(4, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count after adding text")
animationUnderText = editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count after adding animation")
#must be added as ref:
fontUnderText = editingContext.InsertAsRef[UIFont](DomNode(UISchema.UIFontType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count after adding font as ref")
packageChildCount = packageChildCount + 1
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count increases after adding a ref")
print "Trying to add objects that cannot be a child of a text"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a font")
#===================== 8: Animation ==================================
print "Verify cannot add children to an animation"
animCount = Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode))
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding an animation")
print Test.SUCCESS
| 101.341346
| 176
| 0.799279
| 2,510
| 21,079
| 6.712749
| 0.04502
| 0.105763
| 0.155736
| 0.19467
| 0.96344
| 0.951392
| 0.949611
| 0.861713
| 0.843433
| 0.829485
| 0
| 0.004258
| 0.075288
| 21,079
| 207
| 177
| 101.830918
| 0.860096
| 0.036387
| 0
| 0.022599
| 0
| 0
| 0.261759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.011299
| null | null | 0.079096
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
576216dab94c1680fc81a5ad044ac58da271032c
| 12,249
|
py
|
Python
|
workflow/migrations/0053_auto_20191001_2329.py
|
mercycorps/toladata
|
4d5f9b45905a81af9981b586690e020d5b3bfc60
|
[
"Apache-2.0"
] | null | null | null |
workflow/migrations/0053_auto_20191001_2329.py
|
mercycorps/toladata
|
4d5f9b45905a81af9981b586690e020d5b3bfc60
|
[
"Apache-2.0"
] | 268
|
2020-03-31T15:46:59.000Z
|
2022-03-31T18:01:08.000Z
|
workflow/migrations/0053_auto_20191001_2329.py
|
Falliatcom-sa/falliatcom
|
39fb926de072c296ed32d50cccfb8003ca870739
|
[
"Apache-2.0"
] | 1
|
2021-01-05T01:58:24.000Z
|
2021-01-05T01:58:24.000Z
|
# Generated by Django 2.2.5 on 2019-10-02 06:29
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('workflow', '0052_delete_tolasites'),
]
operations = [
migrations.RemoveField(
model_name='adminlevelthree',
name='district',
),
migrations.RemoveField(
model_name='approvalauthority',
name='approval_user',
),
migrations.RemoveField(
model_name='approvalauthority',
name='country',
),
migrations.RemoveField(
model_name='contact',
name='country',
),
migrations.RemoveField(
model_name='district',
name='province',
),
migrations.DeleteModel(
name='FormGuidance',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='approval_submitted_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='approved_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='checked_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='estimated_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='finance_reviewed_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='history_user',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='me_reviewed_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='office',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='program',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='project_type',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='reviewed_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='sector',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='approval_submitted_by',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='approved_by',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='checked_by',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='estimated_by',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='history_user',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='office',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='program',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='project_agreement',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='project_type',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='reviewed_by',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='sector',
),
migrations.RemoveField(
model_name='office',
name='province',
),
migrations.RemoveField(
model_name='projectagreement',
name='approval_submitted_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='approved_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='capacity',
),
migrations.RemoveField(
model_name='projectagreement',
name='checked_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='estimated_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='evaluate',
),
migrations.RemoveField(
model_name='projectagreement',
name='finance_reviewed_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='me_reviewed_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='office',
),
migrations.RemoveField(
model_name='projectagreement',
name='program',
),
migrations.RemoveField(
model_name='projectagreement',
name='project_type',
),
migrations.RemoveField(
model_name='projectagreement',
name='reviewed_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='sector',
),
migrations.RemoveField(
model_name='projectagreement',
name='site',
),
migrations.RemoveField(
model_name='projectagreement',
name='stakeholder',
),
migrations.RemoveField(
model_name='projectcomplete',
name='approval_submitted_by',
),
migrations.RemoveField(
model_name='projectcomplete',
name='approved_by',
),
migrations.RemoveField(
model_name='projectcomplete',
name='checked_by',
),
migrations.RemoveField(
model_name='projectcomplete',
name='estimated_by',
),
migrations.RemoveField(
model_name='projectcomplete',
name='office',
),
migrations.RemoveField(
model_name='projectcomplete',
name='program',
),
migrations.RemoveField(
model_name='projectcomplete',
name='project_agreement',
),
migrations.RemoveField(
model_name='projectcomplete',
name='project_type',
),
migrations.RemoveField(
model_name='projectcomplete',
name='reviewed_by',
),
migrations.RemoveField(
model_name='projectcomplete',
name='sector',
),
migrations.RemoveField(
model_name='projectcomplete',
name='site',
),
migrations.RemoveField(
model_name='projectcomplete',
name='stakeholder',
),
migrations.RemoveField(
model_name='province',
name='country',
),
migrations.RemoveField(
model_name='stakeholder',
name='approved_by',
),
migrations.RemoveField(
model_name='stakeholder',
name='contact',
),
migrations.RemoveField(
model_name='stakeholder',
name='country',
),
migrations.RemoveField(
model_name='stakeholder',
name='filled_by',
),
migrations.RemoveField(
model_name='stakeholder',
name='formal_relationship_document',
),
migrations.RemoveField(
model_name='stakeholder',
name='sectors',
),
migrations.RemoveField(
model_name='stakeholder',
name='type',
),
migrations.RemoveField(
model_name='stakeholder',
name='vetting_document',
),
migrations.RemoveField(
model_name='village',
name='admin_3',
),
migrations.RemoveField(
model_name='village',
name='district',
),
migrations.AlterModelOptions(
name='checklist',
options={},
),
migrations.RemoveField(
model_name='benchmarks',
name='agreement',
),
migrations.RemoveField(
model_name='benchmarks',
name='complete',
),
migrations.RemoveField(
model_name='budget',
name='agreement',
),
migrations.RemoveField(
model_name='budget',
name='complete',
),
migrations.RemoveField(
model_name='checklist',
name='agreement',
),
migrations.RemoveField(
model_name='documentation',
name='project',
),
migrations.RemoveField(
model_name='documentation',
name='template',
),
migrations.RemoveField(
model_name='historicalbudget',
name='agreement',
),
migrations.RemoveField(
model_name='historicalbudget',
name='complete',
),
migrations.RemoveField(
model_name='historicalsiteprofile',
name='admin_level_three',
),
migrations.RemoveField(
model_name='historicalsiteprofile',
name='district',
),
migrations.RemoveField(
model_name='historicalsiteprofile',
name='office',
),
migrations.RemoveField(
model_name='historicalsiteprofile',
name='province',
),
migrations.RemoveField(
model_name='historicalsiteprofile',
name='village',
),
migrations.RemoveField(
model_name='monitor',
name='agreement',
),
migrations.RemoveField(
model_name='monitor',
name='complete',
),
migrations.RemoveField(
model_name='program',
name='fund_code',
),
migrations.RemoveField(
model_name='siteprofile',
name='admin_level_three',
),
migrations.RemoveField(
model_name='siteprofile',
name='district',
),
migrations.RemoveField(
model_name='siteprofile',
name='office',
),
migrations.RemoveField(
model_name='siteprofile',
name='province',
),
migrations.RemoveField(
model_name='siteprofile',
name='village',
),
migrations.DeleteModel(
name='AdminLevelThree',
),
migrations.DeleteModel(
name='ApprovalAuthority',
),
migrations.DeleteModel(
name='Capacity',
),
migrations.DeleteModel(
name='Contact',
),
migrations.DeleteModel(
name='District',
),
migrations.DeleteModel(
name='Evaluate',
),
migrations.DeleteModel(
name='FundCode',
),
migrations.DeleteModel(
name='HistoricalProjectAgreement',
),
migrations.DeleteModel(
name='HistoricalProjectComplete',
),
migrations.DeleteModel(
name='Office',
),
migrations.DeleteModel(
name='ProjectAgreement',
),
migrations.DeleteModel(
name='ProjectComplete',
),
migrations.DeleteModel(
name='ProjectType',
),
migrations.DeleteModel(
name='Province',
),
migrations.DeleteModel(
name='Stakeholder',
),
migrations.DeleteModel(
name='StakeholderType',
),
migrations.DeleteModel(
name='Template',
),
migrations.DeleteModel(
name='Village',
),
]
| 28.419954
| 52
| 0.521675
| 783
| 12,249
| 7.978289
| 0.109834
| 0.299184
| 0.370418
| 0.427405
| 0.832079
| 0.816392
| 0.394109
| 0.134785
| 0
| 0
| 0
| 0.002615
| 0.375704
| 12,249
| 430
| 53
| 28.486047
| 0.814306
| 0.003674
| 0
| 0.884434
| 1
| 0
| 0.216768
| 0.071792
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.002358
| 0
| 0.009434
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
57729bdb448a2bf91fd8ef13ae38f0689987c174
| 181
|
py
|
Python
|
chainer/training/updaters/__init__.py
|
takeratta/chainer
|
02686e98cd6dc8f20979a1f3a79130f076cbfc6c
|
[
"MIT"
] | 2
|
2018-02-05T07:25:48.000Z
|
2018-08-28T20:29:45.000Z
|
chainer/training/updaters/__init__.py
|
takeratta/chainer
|
02686e98cd6dc8f20979a1f3a79130f076cbfc6c
|
[
"MIT"
] | null | null | null |
chainer/training/updaters/__init__.py
|
takeratta/chainer
|
02686e98cd6dc8f20979a1f3a79130f076cbfc6c
|
[
"MIT"
] | 1
|
2018-08-23T01:34:57.000Z
|
2018-08-23T01:34:57.000Z
|
from chainer.training.updaters import multiprocess_parallel_updater # NOQA
from chainer.training.updaters.multiprocess_parallel_updater import MultiprocessParallelUpdater # NOQA
| 45.25
| 103
| 0.878453
| 19
| 181
| 8.157895
| 0.526316
| 0.141935
| 0.245161
| 0.348387
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082873
| 181
| 3
| 104
| 60.333333
| 0.933735
| 0.049724
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
577ae3a2850d6b81f3cbbca43638cf6bb4c0f663
| 168
|
py
|
Python
|
temboo/core/Library/Mixpanel/DataExport/Retention/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/Mixpanel/DataExport/Retention/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/Mixpanel/DataExport/Retention/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.Mixpanel.DataExport.Retention.RetentionData import RetentionData, RetentionDataInputSet, RetentionDataResultSet, RetentionDataChoreographyExecution
| 84
| 167
| 0.910714
| 12
| 168
| 12.75
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 168
| 1
| 168
| 168
| 0.950311
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
578b08638019ad090381c25548eb2ca0cc0eb031
| 112
|
py
|
Python
|
descriptors/haralick/__init__.py
|
WPaczula/image-fragmentation
|
f5650ff384bf803b3cca8b21c621019ce25018e6
|
[
"MIT"
] | null | null | null |
descriptors/haralick/__init__.py
|
WPaczula/image-fragmentation
|
f5650ff384bf803b3cca8b21c621019ce25018e6
|
[
"MIT"
] | null | null | null |
descriptors/haralick/__init__.py
|
WPaczula/image-fragmentation
|
f5650ff384bf803b3cca8b21c621019ce25018e6
|
[
"MIT"
] | null | null | null |
from descriptors.haralick.descriptor import get_haralicks
from descriptors.haralick.long import get_long_feature
| 56
| 57
| 0.901786
| 15
| 112
| 6.533333
| 0.6
| 0.306122
| 0.469388
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 112
| 2
| 58
| 56
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
57abcb7421aff4f6e69f807d63b82ccffadc1ec3
| 223
|
py
|
Python
|
interrogatio/validators/__init__.py
|
ffaraone/interrogatio
|
8b66e7fe73d14bfda38cc2eb3aecb3291e4afda1
|
[
"BSD-3-Clause"
] | 5
|
2019-02-19T13:10:39.000Z
|
2022-03-04T19:11:04.000Z
|
interrogatio/validators/__init__.py
|
ffaraone/interrogatio
|
8b66e7fe73d14bfda38cc2eb3aecb3291e4afda1
|
[
"BSD-3-Clause"
] | 11
|
2020-03-24T16:58:41.000Z
|
2021-12-14T10:19:17.000Z
|
interrogatio/validators/__init__.py
|
ffaraone/interrogatio
|
8b66e7fe73d14bfda38cc2eb3aecb3291e4afda1
|
[
"BSD-3-Clause"
] | 2
|
2019-05-31T08:36:26.000Z
|
2020-12-18T17:58:50.000Z
|
from interrogatio.validators.base import Validator # noqa
from interrogatio.validators.builtins import * # noqa
from interrogatio.validators.registry import ( # noqa
get_instance,
get_registered,
register,
)
| 27.875
| 58
| 0.766816
| 24
| 223
| 7.041667
| 0.541667
| 0.284024
| 0.461538
| 0.35503
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165919
| 223
| 7
| 59
| 31.857143
| 0.908602
| 0.06278
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.428571
| 0
| 0.428571
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
57aed1a76c0e4db179e97c91c1f8f009dff17636
| 233
|
py
|
Python
|
portaljob/portaljob_visitor/views.py
|
mixdevdev/project_portal
|
ee639590a9e5bcf3bd6b058b5ba72c242f2ba667
|
[
"MIT"
] | null | null | null |
portaljob/portaljob_visitor/views.py
|
mixdevdev/project_portal
|
ee639590a9e5bcf3bd6b058b5ba72c242f2ba667
|
[
"MIT"
] | null | null | null |
portaljob/portaljob_visitor/views.py
|
mixdevdev/project_portal
|
ee639590a9e5bcf3bd6b058b5ba72c242f2ba667
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
# Create your views here.
def index(request):
return render(request, 'portaljob_visitor/index.html')
def recruitment(request):
return render(request,'portaljob_visitor/recruitment.html')
| 25.888889
| 63
| 0.781116
| 29
| 233
| 6.206897
| 0.586207
| 0.144444
| 0.211111
| 0.288889
| 0.466667
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120172
| 233
| 9
| 63
| 25.888889
| 0.878049
| 0.098712
| 0
| 0
| 0
| 0
| 0.296651
| 0.296651
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
57d8380f3767ab5667c6dc435fe3c9e72dee2eac
| 202
|
py
|
Python
|
components/bs_components.py
|
robswc/AoC-cookie-leaderboard-visualizer
|
c5ea33fc5ff36687b9dc13cf7ef44d13f9cae3f9
|
[
"MIT"
] | null | null | null |
components/bs_components.py
|
robswc/AoC-cookie-leaderboard-visualizer
|
c5ea33fc5ff36687b9dc13cf7ef44d13f9cae3f9
|
[
"MIT"
] | null | null | null |
components/bs_components.py
|
robswc/AoC-cookie-leaderboard-visualizer
|
c5ea33fc5ff36687b9dc13cf7ef44d13f9cae3f9
|
[
"MIT"
] | null | null | null |
from dash import html
def container(children):
return (
html.Div(children, className='container')
)
def row(children):
return (
html.Div(children, className='row')
)
| 14.428571
| 49
| 0.613861
| 22
| 202
| 5.636364
| 0.5
| 0.225806
| 0.290323
| 0.33871
| 0.612903
| 0.612903
| 0
| 0
| 0
| 0
| 0
| 0
| 0.267327
| 202
| 13
| 50
| 15.538462
| 0.837838
| 0
| 0
| 0.222222
| 0
| 0
| 0.059406
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0.222222
| 0.555556
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
57e572cc5b3e41543f3e258b0395484334eea64f
| 8,772
|
py
|
Python
|
pirates/leveleditor/worldData/pvpShipIsland2_int_tavern.py
|
itsyaboyrocket/pirates
|
6ca1e7d571c670b0d976f65e608235707b5737e3
|
[
"BSD-3-Clause"
] | 3
|
2021-02-25T06:38:13.000Z
|
2022-03-22T07:00:15.000Z
|
pirates/leveleditor/worldData/pvpShipIsland2_int_tavern.py
|
itsyaboyrocket/pirates
|
6ca1e7d571c670b0d976f65e608235707b5737e3
|
[
"BSD-3-Clause"
] | null | null | null |
pirates/leveleditor/worldData/pvpShipIsland2_int_tavern.py
|
itsyaboyrocket/pirates
|
6ca1e7d571c670b0d976f65e608235707b5737e3
|
[
"BSD-3-Clause"
] | 1
|
2021-02-25T06:38:17.000Z
|
2021-02-25T06:38:17.000Z
|
# uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.leveleditor.worldData.pvpShipIsland2_int_tavern
from pandac.PandaModules import Point3, VBase3, Vec4, Vec3
objectStruct = {'Objects': {'1204237124.2akelts0': {'Type': 'Building Interior', 'Name': '', 'AdditionalData': ['interior_tavern_b'], 'Instanced': False, 'Objects': {'1204237886.13akelts': {'Type': 'Door Locator Node', 'Name': 'door_locator', 'Hpr': VBase3(78.799, 0.0, 0.0), 'Pos': Point3(19.911, -9.53, 0.487), 'Scale': VBase3(1.0, 1.0, 1.0)}, '1204237886.14akelts': {'Type': 'Door Locator Node', 'Name': 'door_locator_2', 'Hpr': VBase3(-180.0, 0.0, 0.0), 'Pos': Point3(-22.427, 42.298, 0.736), 'Scale': VBase3(1.0, 1.0, 1.0)}, '1208547869.58akelts': {'Type': 'Wall_Hangings', 'DisableCollision': False, 'Hpr': VBase3(-102.382, 0.0, 0.0), 'Pos': Point3(26.147, 22.598, 15.042), 'Scale': VBase3(1.238, 1.238, 1.238), 'Visual': {'Color': (0.5, 0.45, 0.45, 1.0), 'Model': 'models/props/flag_hanging_spanish'}}, '1208548123.86akelts': {'Type': 'Parlor Game', 'Category': 'Blackjack', 'BetMultiplier': '1', 'GameVariation': 'Normal', 'Hpr': VBase3(-105.006, 0.0, 0.0), 'Pos': Point3(-7.091, 2.474, 1.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/table_bar_round_parlor'}}, '1208548168.92akelts': {'Type': 'Townsperson', 'Category': 'Bartender', 'AnimSet': 'sweep', 'AuraFX': 'None', 'Boss': False, 'CustomModel': 'None', 'GhostColor': 'None', 'GhostFX': 0, 'Greeting Animation': '', 'Hpr': VBase3(-57.471, 0.0, 0.0), 'Instanced World': 'None', 'Level': '37', 'Notice Animation 1': '', 'Notice Animation 2': '', 'Patrol Radius': '12.0000', 'Pos': Point3(-51.07, -5.162, 1.0), 'PoseAnim': '', 'PoseFrame': '', 'Private Status': 'All', 'PropFXLeft': 'None', 'PropFXRight': 'None', 'PropLeft': 'None', 'PropRight': 'None', 'Respawns': True, 'Scale': VBase3(1.027, 1.027, 1.027), 'ShopID': 'PORT_ROYAL_DEFAULTS', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'Player', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'Zombie': False, 'spawnTimeAlt': '', 'spawnTimeBegin': 0.0, 'spawnTimeEnd': 0.0}, '1208548525.28akelts': {'Type': 'Townsperson', 'Category': 'Commoner', 'AnimSet': 'coin_flip', 'AuraFX': 'None', 'Boss': False, 'CustomModel': 'None', 'GhostColor': 'None', 'GhostFX': 0, 'Greeting Animation': '', 'Hpr': VBase3(139.751, 0.0, 0.0), 'Instanced World': 'None', 'Level': '37', 'Notice Animation 1': '', 'Notice Animation 2': '', 'Patrol Radius': '5.1205', 'Pos': Point3(2.055, 8.769, 1.0), 'PoseAnim': '', 'PoseFrame': '', 'Private Status': 'All', 'PropFXLeft': 'None', 'PropFXRight': 'None', 'PropLeft': 'None', 'PropRight': 'None', 'Respawns': True, 'Scale': VBase3(1.0, 1.0, 1.0), 'ShopID': 'PORT_ROYAL_DEFAULTS', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'Player', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'Zombie': False, 'spawnTimeAlt': '', 'spawnTimeBegin': 0.0, 'spawnTimeEnd': 0.0}, '1208548943.97akelts': {'Type': 'Townsperson', 'Category': 'Commoner', 'AnimSet': 'sit_hanginglegs', 'AuraFX': 'None', 'Boss': False, 'CustomModel': 'None', 'GhostColor': 'None', 'GhostFX': 0, 'Greeting Animation': '', 'Hpr': VBase3(52.393, 0.0, 0.0), 'Instanced World': 'None', 'Level': '37', 'Notice Animation 1': '', 'Notice Animation 2': '', 'Patrol Radius': '6.3795', 'Pos': Point3(-4.253, -14.819, 4.08), 'PoseAnim': '', 'PoseFrame': '', 'Private Status': 'All', 'PropFXLeft': 'None', 'PropFXRight': 'None', 'PropLeft': 'None', 'PropRight': 'None', 'Respawns': True, 'Scale': VBase3(1.0, 1.0, 1.0), 'ShopID': 'PORT_ROYAL_DEFAULTS', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'Player', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'Zombie': False, 'spawnTimeAlt': '', 'spawnTimeBegin': 0.0, 'spawnTimeEnd': 0.0}, '1208549359.09akelts': {'Type': 'Townsperson', 'Category': 'Commoner', 'AnimSet': 'bar_talk02', 'AuraFX': 'None', 'Boss': False, 'CustomModel': 'None', 'GhostColor': 'None', 'GhostFX': 0, 'Greeting Animation': '', 'HelpID': 'NONE', 'Holiday': '', 'Hpr': VBase3(134.052, 0.0, 0.0), 'Instanced World': 'None', 'Level': '37', 'Notice Animation 1': '', 'Notice Animation 2': '', 'Patrol Radius': '12.0000', 'Pos': Point3(-43.977, 3.151, 1.0), 'PoseAnim': '', 'PoseFrame': '', 'Private Status': 'All', 'PropFXLeft': 'None', 'PropFXRight': 'None', 'PropLeft': 'None', 'PropRight': 'None', 'Respawns': True, 'Scale': VBase3(1.0, 1.0, 1.0), 'ShopID': 'PORT_ROYAL_DEFAULTS', 'Start State': 'Walk', 'StartFrame': '0', 'Team': 'Player', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Zombie': False, 'spawnTimeAlt': '', 'spawnTimeBegin': 0.0, 'spawnTimeEnd': 0.0}, '1208549853.83akelts': {'Type': 'Cups', 'DisableCollision': False, 'Hpr': VBase3(162.32, 0.0, 0.0), 'Pos': Point3(-46.992, 1.496, 4.68), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.4000000059604645, 0.4000000059604645, 0.4000000059604645, 1.0), 'Model': 'models/props/beerstein'}}, '1208549913.56akelts': {'Type': 'Townsperson', 'Category': 'Commoner', 'AnimSet': 'lute', 'AuraFX': 'None', 'Boss': False, 'CustomModel': 'None', 'GhostColor': 'None', 'GhostFX': 0, 'Greeting Animation': '', 'Hpr': VBase3(-141.39, 0.0, 0.0), 'Instanced World': 'None', 'Level': '37', 'Notice Animation 1': '', 'Notice Animation 2': '', 'Patrol Radius': '6.8373', 'Pos': Point3(-26.92, 31.205, 1.0), 'PoseAnim': '', 'PoseFrame': '', 'Private Status': 'All', 'PropFXLeft': 'None', 'PropFXRight': 'None', 'PropLeft': 'None', 'PropRight': 'None', 'Respawns': True, 'Scale': VBase3(1.0, 1.0, 1.0), 'ShopID': 'PORT_ROYAL_DEFAULTS', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'Player', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'Zombie': False, 'spawnTimeAlt': '', 'spawnTimeBegin': 0.0, 'spawnTimeEnd': 0.0}}, 'Visual': {'Model': 'models/buildings/interior_tavern_b'}}}, 'Node Links': [], 'Layers': {'Collisions': ['1184008208.59kmuller', '1184016064.62kmuller', '1184013852.84kmuller', '1185822696.06kmuller', '1184006140.32kmuller', '1184002350.98kmuller', '1184007573.29kmuller', '1184021176.59kmuller', '1184005963.59kmuller', '1188324241.31akelts', '1184006537.34kmuller', '1184006605.81kmuller', '1187139568.33kmuller', '1188324186.98akelts', '1184006730.66kmuller', '1184007538.51kmuller', '1184006188.41kmuller', '1184021084.27kmuller', '1185824396.94kmuller', '1185824250.16kmuller', '1185823630.52kmuller', '1185823760.23kmuller', '1185824497.83kmuller', '1185824751.45kmuller', '1187739103.34akelts', '1188323993.34akelts', '1184016538.29kmuller', '1185822200.97kmuller', '1184016225.99kmuller', '1195241421.34akelts', '1195242796.08akelts', '1184020642.13kmuller', '1195237994.63akelts', '1184020756.88kmuller', '1184020833.4kmuller', '1185820992.97kmuller', '1185821053.83kmuller', '1184015068.54kmuller', '1184014935.82kmuller', '1185821432.88kmuller', '1185821701.86kmuller', '1195240137.55akelts', '1195241539.38akelts', '1195238422.3akelts', '1195238473.22akelts', '1185821453.17kmuller', '1184021269.96kmuller', '1185821310.89kmuller', '1185821165.59kmuller', '1185821199.36kmuller', '1185822035.98kmuller', '1184015806.59kmuller', '1185822059.48kmuller', '1185920461.76kmuller', '1194984449.66akelts', '1185824206.22kmuller', '1184003446.23kmuller', '1184003254.85kmuller', '1184003218.74kmuller', '1184002700.44kmuller', '1186705073.11kmuller', '1187658531.86akelts', '1186705214.3kmuller', '1185824927.28kmuller', '1184014204.54kmuller', '1184014152.84kmuller']}, 'ObjectIds': {'1204237124.2akelts0': '["Objects"]["1204237124.2akelts0"]', '1204237886.13akelts': '["Objects"]["1204237124.2akelts0"]["Objects"]["1204237886.13akelts"]', '1204237886.14akelts': '["Objects"]["1204237124.2akelts0"]["Objects"]["1204237886.14akelts"]', '1208547869.58akelts': '["Objects"]["1204237124.2akelts0"]["Objects"]["1208547869.58akelts"]', '1208548123.86akelts': '["Objects"]["1204237124.2akelts0"]["Objects"]["1208548123.86akelts"]', '1208548168.92akelts': '["Objects"]["1204237124.2akelts0"]["Objects"]["1208548168.92akelts"]', '1208548525.28akelts': '["Objects"]["1204237124.2akelts0"]["Objects"]["1208548525.28akelts"]', '1208548943.97akelts': '["Objects"]["1204237124.2akelts0"]["Objects"]["1208548943.97akelts"]', '1208549359.09akelts': '["Objects"]["1204237124.2akelts0"]["Objects"]["1208549359.09akelts"]', '1208549853.83akelts': '["Objects"]["1204237124.2akelts0"]["Objects"]["1208549853.83akelts"]', '1208549913.56akelts': '["Objects"]["1204237124.2akelts0"]["Objects"]["1208549913.56akelts"]'}}
extraInfo = {'camPos': Point3(-40.8568, -27.4814, 7.78277), 'camHpr': VBase3(15.4648, -9.95552, 0), 'focalLength': 1.39999997616, 'skyState': -1, 'fog': 0}
| 1,253.142857
| 8,317
| 0.669175
| 1,024
| 8,772
| 5.705078
| 0.350586
| 0.014036
| 0.010784
| 0.010955
| 0.446251
| 0.399692
| 0.388566
| 0.376926
| 0.371106
| 0.35416
| 0
| 0.230779
| 0.082193
| 8,772
| 7
| 8,318
| 1,253.142857
| 0.494845
| 0.026334
| 0
| 0
| 0
| 0
| 0.617313
| 0.098161
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
aa2f5159986aa03ece60b7dd6adc192e2381ed33
| 26,154
|
py
|
Python
|
dpnp/dpnp_iface_logic.py
|
LukichevaPolina/dpnp
|
5f5a679905d237ac7be1cc9ad1075877a9f77e39
|
[
"BSD-2-Clause"
] | 37
|
2020-09-08T00:38:52.000Z
|
2022-03-18T01:44:10.000Z
|
dpnp/dpnp_iface_logic.py
|
LukichevaPolina/dpnp
|
5f5a679905d237ac7be1cc9ad1075877a9f77e39
|
[
"BSD-2-Clause"
] | 432
|
2020-09-07T09:48:41.000Z
|
2022-03-25T17:50:55.000Z
|
dpnp/dpnp_iface_logic.py
|
LukichevaPolina/dpnp
|
5f5a679905d237ac7be1cc9ad1075877a9f77e39
|
[
"BSD-2-Clause"
] | 17
|
2020-09-07T10:00:34.000Z
|
2022-03-25T13:53:43.000Z
|
# cython: language_level=3
# distutils: language = c++
# -*- coding: utf-8 -*-
# *****************************************************************************
# Copyright (c) 2016-2020, Intel Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# - Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# *****************************************************************************
"""
Interface of the Logic part of the DPNP
Notes
-----
This module is a face or public interface file for the library
it contains:
- Interface functions
- documentation for the functions
- The functions parameters check
"""
import numpy
import dpnp
import dpnp.config as config
from dpnp.dpnp_algo import *
from dpnp.dpnp_utils import *
__all__ = [
"all",
"allclose",
"any",
"equal",
"greater",
"greater_equal",
"isclose",
"isfinite",
"isinf",
"isnan",
"less",
"less_equal",
"logical_and",
"logical_not",
"logical_or",
"logical_xor",
"not_equal"
]
def all(x1, axis=None, out=None, keepdims=False):
"""
Test whether all array elements along a given axis evaluate to True.
For full documentation refer to :obj:`numpy.all`.
Limitations
-----------
Input array is supported as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
Parameter ``axis`` is supported only with default value ``None``.
Parameter ``out`` is supported only with default value ``None``.
Parameter ``keepdims`` is supported only with default value ``False``.
See Also
--------
:obj:`dpnp.any` : Test whether any element along a given axis evaluates to True.
Notes
-----
Not a Number (NaN), positive infinity and negative infinity
evaluate to `True` because these are not equal to zero.
Examples
--------
>>> import dpnp as np
>>> x = np.array([[True, False], [True, True]])
>>> np.all(x)
False
>>> x2 = np.array([-1, 4, 5])
>>> np.all(x2)
True
>>> x3 = np.array([1.0, np.nan])
>>> np.all(x3)
True
"""
x1_desc = dpnp.get_dpnp_descriptor(x1)
if x1_desc:
if axis is not None:
pass
elif out is not None:
pass
elif keepdims is not False:
pass
else:
result_obj = dpnp_all(x1_desc).get_pyobj()
result = dpnp.convert_single_elem_array_to_scalar(result_obj)
return result
return call_origin(numpy.all, x1, axis, out, keepdims)
def allclose(x1, x2, rtol=1.e-5, atol=1.e-8, **kwargs):
"""
Returns True if two arrays are element-wise equal within a tolerance.
For full documentation refer to :obj:`numpy.allclose`.
Limitations
-----------
Parameters ``x1`` and ``x2`` are supported as either :obj:`dpnp.ndarray` or scalar.
Keyword arguments ``kwargs`` are currently unsupported.
Otherwise the functions will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
Examples
--------
>>> import dpnp as np
>>> np.allclose([1e10,1e-7], [1.00001e10,1e-8])
>>> False
"""
rtol_is_scalar = dpnp.isscalar(rtol)
atol_is_scalar = dpnp.isscalar(atol)
x1_desc = dpnp.get_dpnp_descriptor(x1)
x2_desc = dpnp.get_dpnp_descriptor(x2)
if x1_desc and x2_desc and not kwargs:
if not rtol_is_scalar or not atol_is_scalar:
pass
else:
result_obj = dpnp_allclose(x1_desc, x2_desc, rtol, atol).get_pyobj()
result = dpnp.convert_single_elem_array_to_scalar(result_obj)
return result
return call_origin(numpy.allclose, x1, x2, rtol=rtol, atol=atol, **kwargs)
def any(x1, axis=None, out=None, keepdims=False):
"""
Test whether any array element along a given axis evaluates to True.
For full documentation refer to :obj:`numpy.any`.
Limitations
-----------
Input array is supported as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
Parameter ``axis`` is supported only with default value ``None``.
Parameter ``out`` is supported only with default value ``None``.
Parameter ``keepdims`` is supported only with default value ``False``.
See Also
--------
:obj:`dpnp.all` : Test whether all elements along a given axis evaluate to True.
Notes
-----
Not a Number (NaN), positive infinity and negative infinity evaluate
to `True` because these are not equal to zero.
Examples
--------
>>> import dpnp as np
>>> x = np.array([[True, False], [True, True]])
>>> np.any(x)
True
>>> x2 = np.array([0, 0, 0])
>>> np.any(x2)
False
>>> x3 = np.array([1.0, np.nan])
>>> np.any(x3)
True
"""
x1_desc = dpnp.get_dpnp_descriptor(x1)
if x1_desc:
if axis is not None:
pass
elif out is not None:
pass
elif keepdims is not False:
pass
else:
result_obj = dpnp_any(x1_desc).get_pyobj()
result = dpnp.convert_single_elem_array_to_scalar(result_obj)
return result
return call_origin(numpy.any, x1, axis, out, keepdims)
def equal(x1, x2):
"""
Return (x1 == x2) element-wise.
For full documentation refer to :obj:`numpy.equal`.
Limitations
-----------
Parameter ``x1`` is supported as :obj:`dpnp.ndarray`.
Parameter ``x2`` is supported as either :obj:`dpnp.ndarray` or int.
Input array data types are limited by supported DPNP :ref:`Data types`.
Sizes, shapes and data types of input arrays ``x1`` and ``x2`` are supported to be equal.
See Also
--------
:obj:`dpnp.not_equal` : Return (x1 != x2) element-wise.
:obj:`dpnp.greater_equal` : Return the truth value of (x1 >= x2) element-wise.
:obj:`dpnp.less_equal` : Return the truth value of (x1 =< x2) element-wise.
:obj:`dpnp.greater` : Return the truth value of (x1 > x2) element-wise.
:obj:`dpnp.less` : Return the truth value of (x1 < x2) element-wise.
Examples
--------
>>> import dpnp as np
>>> x1 = np.array([0, 1, 3])
>>> x2 = np.arange(3)
>>> out = np.equal(x1, x2)
>>> [i for i in out]
[True, True, False]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# x2_desc = dpnp.get_dpnp_descriptor(x2)
# if x1_desc and x2_desc:
# if x1_desc.size != x2_desc.size:
# pass
# elif x1_desc.dtype != x2_desc.dtype:
# pass
# elif x1_desc.shape != x2_desc.shape:
# pass
# else:
# return dpnp_equal(x1_desc, x2_desc).get_pyobj()
return call_origin(numpy.equal, x1, x2)
def greater(x1, x2):
"""
Return (x1 > x2) element-wise.
For full documentation refer to :obj:`numpy.greater`.
Limitations
-----------
At least either ``x1`` or ``x2`` should be as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
See Also
--------
:obj:`dpnp.greater_equal` : Return the truth value of (x1 >= x2) element-wise.
:obj:`dpnp.less` : Return the truth value of (x1 < x2) element-wise.
:obj:`dpnp.less_equal` : Return the truth value of (x1 =< x2) element-wise.
:obj:`dpnp.equal` : Return (x1 == x2) element-wise.
:obj:`dpnp.not_equal` : Return (x1 != x2) element-wise.
Examples
--------
>>> import dpnp as np
>>> x1 = np.array([4, 2])
>>> x2 = np.array([2, 2])
>>> out = np.greater(x1, x2)
>>> [i for i in out]
[True, False]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# x2_desc = dpnp.get_dpnp_descriptor(x2)
# if x1_desc and x2_desc:
# if x1_desc.size < 2:
# pass
# elif x2_desc.size < 2:
# pass
# else:
# return dpnp_greater(x1_desc, x2_desc).get_pyobj()
return call_origin(numpy.greater, x1, x2)
def greater_equal(x1, x2):
"""
Return (x1 >= x2) element-wise.
For full documentation refer to :obj:`numpy.greater_equal`.
Limitations
-----------
At least either ``x1`` or ``x2`` should be as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
See Also
--------
:obj:`dpnp.greater` : Return the truth value of (x1 > x2) element-wise.
:obj:`dpnp.less` : Return the truth value of (x1 < x2) element-wise.
:obj:`dpnp.less_equal` : Return the truth value of (x1 =< x2) element-wise.
:obj:`dpnp.equal` : Return (x1 == x2) element-wise.
:obj:`dpnp.not_equal` : Return (x1 != x2) element-wise.
Examples
--------
>>> import dpnp as np
>>> x1 = np.array([4, 2, 1])
>>> x2 = np.array([2, 2, 2])
>>> out = np.greater_equal(x1, x2)
>>> [i for i in out]
[True, True, False]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# x2_desc = dpnp.get_dpnp_descriptor(x2)
# if x1_desc and x2_desc:
# if x1_desc.size < 2:
# pass
# elif x2_desc.size < 2:
# pass
# else:
# return dpnp_greater_equal(x1_desc, x2_desc).get_pyobj()
return call_origin(numpy.greater_equal, x1, x2)
def isclose(x1, x2, rtol=1e-05, atol=1e-08, equal_nan=False):
"""
Returns a boolean array where two arrays are element-wise equal within a tolerance.
For full documentation refer to :obj:`numpy.isclose`.
Limitations
-----------
``x2`` is supported to be integer if ``x1`` is :obj:`dpnp.ndarray` or
at least either ``x1`` or ``x2`` should be as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
See Also
--------
:obj:`dpnp.allclose` : Returns True if two arrays are element-wise equal within a tolerance.
Examples
--------
>>> import dpnp as np
>>> x1 = np.array([1e10,1e-7])
>>> x2 = np.array([1.00001e10,1e-8])
>>> out = np.isclose(x1, x2)
>>> [i for i in out]
[True, False]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# x2_desc = dpnp.get_dpnp_descriptor(x2)
# if x1_desc and x2_desc:
# result_obj = dpnp_isclose(x1_desc, x2_desc, rtol, atol, equal_nan).get_pyobj()
# return result_obj
return call_origin(numpy.isclose, x1, x2, rtol=rtol, atol=atol, equal_nan=equal_nan)
def isfinite(x1, out=None, **kwargs):
"""
Test element-wise for finiteness (not infinity or not Not a Number).
For full documentation refer to :obj:`numpy.isfinite`.
Limitations
-----------
Input array is supported as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
Parameter ``out`` is supported only with default value ``None``.
Parameter ``where`` is supported only with default value ``True``.
See Also
--------
:obj:`dpnp.isinf` : Test element-wise for positive or negative infinity.
:obj:`dpnp.isneginf` : Test element-wise for negative infinity,
return result as bool array.
:obj:`dpnp.isposinf` : Test element-wise for positive infinity,
return result as bool array.
:obj:`dpnp.isnan` : Test element-wise for NaN and
return result as a boolean array.
Notes
-----
Not a Number, positive infinity and negative infinity are considered
to be non-finite.
Examples
--------
>>> import numpy
>>> import dpnp as np
>>> x = np.array([-numpy.inf, 0., numpy.inf])
>>> out = np.isfinite(x)
>>> [i for i in out]
[False, True, False]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# if x1_desc and kwargs:
# if out is not None:
# pass
# else:
# return dpnp_isfinite(x1_desc).get_pyobj()
return call_origin(numpy.isfinite, x1, out, **kwargs)
def isinf(x1, out=None, **kwargs):
"""
Test element-wise for positive or negative infinity.
For full documentation refer to :obj:`numpy.isinf`.
Limitations
-----------
Input array is supported as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
Parameter ``out`` is supported only with default value ``None``.
Parameter ``where`` is supported only with default value ``True``.
See Also
--------
:obj:`dpnp.isneginf` : Test element-wise for negative infinity,
return result as bool array.
:obj:`dpnp.isposinf` : Test element-wise for positive infinity,
return result as bool array.
:obj:`dpnp.isnan` : Test element-wise for NaN and
return result as a boolean array.
:obj:`dpnp.isfinite` : Test element-wise for finiteness.
Examples
--------
>>> import numpy
>>> import dpnp as np
>>> x = np.array([-numpy.inf, 0., numpy.inf])
>>> out = np.isinf(x)
>>> [i for i in out]
[True, False, True]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# if x1_desc and kwargs:
# if out is not None:
# pass
# else:
# return dpnp_isinf(x1_desc).get_pyobj()
return call_origin(numpy.isinf, x1, out, **kwargs)
def isnan(x1, out=None, **kwargs):
"""
Test element-wise for NaN and return result as a boolean array.
For full documentation refer to :obj:`numpy.isnan`.
Limitations
-----------
Input array is supported as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
Parameter ``out`` is supported only with default value ``None``.
Parameter ``where`` is supported only with default value ``True``.
See Also
--------
:obj:`dpnp.isinf` : Test element-wise for positive or negative infinity.
:obj:`dpnp.isneginf` : Test element-wise for negative infinity,
return result as bool array.
:obj:`dpnp.isposinf` : Test element-wise for positive infinity,
return result as bool array.
:obj:`dpnp.isfinite` : Test element-wise for finiteness.
:obj:`dpnp.isnat` : Test element-wise for NaT (not a time)
and return result as a boolean array.
Examples
--------
>>> import numpy
>>> import dpnp as np
>>> x = np.array([numpy.inf, 0., np.nan])
>>> out = np.isnan(x)
>>> [i for i in out]
[False, False, True]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# if x1_desc and kwargs:
# if out is not None:
# pass
# else:
# return dpnp_isnan(x1_desc).get_pyobj()
return call_origin(numpy.isnan, x1, out, **kwargs)
def less(x1, x2):
"""
Return (x1 < x2) element-wise.
For full documentation refer to :obj:`numpy.less`.
Limitations
-----------
At least either ``x1`` or ``x2`` should be as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
See Also
--------
:obj:`dpnp.greater` : Return the truth value of (x1 > x2) element-wise.
:obj:`dpnp.less_equal` : Return the truth value of (x1 =< x2) element-wise.
:obj:`dpnp.greater_equal` : Return the truth value of (x1 >= x2) element-wise.
:obj:`dpnp.equal` : Return (x1 == x2) element-wise.
:obj:`dpnp.not_equal` : Return (x1 != x2) element-wise.
Examples
--------
>>> import dpnp as np
>>> x1 = np.array([1, 2])
>>> x2 = np.array([2, 2])
>>> out = np.less(x1, x2)
>>> [i for i in out]
[True, False]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# x2_desc = dpnp.get_dpnp_descriptor(x2)
# if x1_desc and x2_desc:
# if x1_desc.size < 2:
# pass
# elif x2_desc.size < 2:
# pass
# else:
# return dpnp_less(x1_desc, x2_desc).get_pyobj()
return call_origin(numpy.less, x1, x2)
def less_equal(x1, x2):
"""
Return (x1 <= x2) element-wise.
For full documentation refer to :obj:`numpy.less_equal`.
Limitations
-----------
At least either ``x1`` or ``x2`` should be as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
See Also
--------
:obj:`dpnp.greater` : Return the truth value of (x1 > x2) element-wise.
:obj:`dpnp.less` : Return the truth value of (x1 < x2) element-wise.
:obj:`dpnp.greater_equal` : Return the truth value of (x1 >= x2) element-wise.
:obj:`dpnp.equal` : Return (x1 == x2) element-wise.
:obj:`dpnp.not_equal` : Return (x1 != x2) element-wise.
Examples
--------
>>> import dpnp as np
>>> x1 = np.array([4, 2, 1])
>>> x2 = np.array([2, 2, 2])
>>> out = np.less_equal(x1, x2)
>>> [i for i in out]
[False, True, True]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# x2_desc = dpnp.get_dpnp_descriptor(x2)
# if x1_desc and x2_desc:
# if x1_desc.size < 2:
# pass
# elif x2_desc.size < 2:
# pass
# else:
# return dpnp_less_equal(x1_desc, x2_desc).get_pyobj()
return call_origin(numpy.less_equal, x1, x2)
def logical_and(x1, x2, out=None, **kwargs):
"""
Compute the truth value of x1 AND x2 element-wise.
For full documentation refer to :obj:`numpy.logical_and`.
Limitations
-----------
Input arrays are supported as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
Parameter ``out`` is supported only with default value ``None``.
Parameter ``where`` is supported only with default value ``True``.
See Also
--------
:obj:`dpnp.logical_or` : Compute the truth value of x1 OR x2 element-wise.
:obj:`dpnp.logical_not` : Compute the truth value of NOT x element-wise.
:obj:`dpnp.logical_xor` : Compute the truth value of x1 XOR x2, element-wise.
:obj:`dpnp.bitwise_and` : Compute the bit-wise AND of two arrays element-wise.
Examples
--------
>>> import dpnp as np
>>> x1 = np.array([True, False])
>>> x2 = np.array([False, False])
>>> out = np.logical_and(x1, x2)
>>> [i for i in out]
[False, False]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# x2_desc = dpnp.get_dpnp_descriptor(x2)
# if x1_desc and x2_desc and not kwargs:
# if out is not None:
# pass
# else:
# return dpnp_logical_and(x1_desc, x2_desc).get_pyobj()
return call_origin(numpy.logical_and, x1, x2, out, **kwargs)
def logical_not(x1, out=None, **kwargs):
"""
Compute the truth value of NOT x element-wise.
For full documentation refer to :obj:`numpy.logical_not`.
Limitations
-----------
Input array is supported as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
Parameter ``out`` is supported only with default value ``None``.
Parameter ``where`` is supported only with default value ``True``.
See Also
--------
:obj:`dpnp.logical_and` : Compute the truth value of x1 AND x2 element-wise.
:obj:`dpnp.logical_or` : Compute the truth value of x1 OR x2 element-wise.
:obj:`dpnp.logical_xor` : Compute the truth value of x1 XOR x2, element-wise.
Examples
--------
>>> import dpnp as np
>>> x = np.array([True, False, 0, 1])
>>> out = np.logical_not(x)
>>> [i for i in out]
[False, True, True, False]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# if x1_desc and not kwargs:
# if out is not None:
# pass
# else:
# return dpnp_logical_not(x1_desc).get_pyobj()
return call_origin(numpy.logical_not, x1, out, **kwargs)
def logical_or(x1, x2, out=None, **kwargs):
"""
Compute the truth value of x1 OR x2 element-wise.
For full documentation refer to :obj:`numpy.logical_or`.
Limitations
-----------
Input arrays are supported as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
Parameter ``out`` is supported only with default value ``None``.
Parameter ``where`` is supported only with default value ``True``.
See Also
--------
:obj:`dpnp.logical_and` : Compute the truth value of x1 AND x2 element-wise.
:obj:`dpnp.logical_not` : Compute the truth value of NOT x element-wise.
:obj:`dpnp.logical_xor` : Compute the truth value of x1 XOR x2, element-wise.
:obj:`dpnp.bitwise_or` : Compute the bit-wise OR of two arrays element-wise.
Examples
--------
>>> import dpnp as np
>>> x1 = np.array([True, False])
>>> x2 = np.array([False, False])
>>> out = np.logical_or(x1, x2)
>>> [i for i in out]
[True, False]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# x2_desc = dpnp.get_dpnp_descriptor(x2)
# if x1_desc and x2_desc and not kwargs:
# if out is not None:
# pass
# else:
# return dpnp_logical_or(x1_desc, x2_desc).get_pyobj()
return call_origin(numpy.logical_or, x1, x2, out, **kwargs)
def logical_xor(x1, x2, out=None, **kwargs):
"""
Compute the truth value of x1 XOR x2, element-wise.
For full documentation refer to :obj:`numpy.logical_xor`.
Limitations
-----------
Input arrays are supported as :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
Parameter ``out`` is supported only with default value ``None``.
Parameter ``where`` is supported only with default value ``True``.
See Also
--------
:obj:`dpnp.logical_and` : Compute the truth value of x1 AND x2 element-wise.
:obj:`dpnp.logical_or` : Compute the truth value of x1 OR x2 element-wise.
:obj:`dpnp.logical_not` : Compute the truth value of NOT x element-wise.
:obj:`dpnp.bitwise_xor` : Compute the bit-wise XOR of two arrays element-wise.
Examples
--------
>>> import dpnp as np
>>> x1 = np.array([True, True, False, False])
>>> x2 = np.array([True, False, True, False])
>>> out = np.logical_xor(x1, x2)
>>> [i for i in out]
[False, True, True, False]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# x2_desc = dpnp.get_dpnp_descriptor(x2)
# if x1_desc and x2_desc and not kwargs:
# if out is not None:
# pass
# else:
# return dpnp_logical_xor(x1_desc, x2_desc).get_pyobj()
return call_origin(numpy.logical_xor, x1, x2, out, **kwargs)
def not_equal(x1, x2):
"""
Return (x1 != x2) element-wise.
For full documentation refer to :obj:`numpy.not_equal`.
Limitations
-----------
At least either ``x1`` or ``x2`` should be as :obj:`dpnp.ndarray`.
If either ``x1`` or ``x2`` is scalar then other one should be :obj:`dpnp.ndarray`.
Otherwise the function will be executed sequentially on CPU.
Input array data types are limited by supported DPNP :ref:`Data types`.
See Also
--------
:obj:`dpnp.equal` : Return (x1 == x2) element-wise.
:obj:`dpnp.greater` : Return the truth value of (x1 > x2) element-wise.
:obj:`dpnp.greater_equal` : Return the truth value of (x1 >= x2) element-wise.
:obj:`dpnp.less` : Return the truth value of (x1 < x2) element-wise.
:obj:`dpnp.less_equal` : Return the truth value of (x1 =< x2) element-wise.
Examples
--------
>>> import dpnp as np
>>> x1 = np.array([1., 2.])
>>> x2 = np.arange(1., 3.)
>>> out = np.not_equal(x1, x2)
>>> [i for i in out]
[False, False]
"""
# x1_desc = dpnp.get_dpnp_descriptor(x1)
# x2_desc = dpnp.get_dpnp_descriptor(x2)
# if x1_desc and x2_desc:
# if x1_desc.size < 2:
# pass
# elif x2_desc.size < 2:
# pass
# else:
# result = dpnp_not_equal(x1_desc, x2_desc).get_pyobj()
# return result
return call_origin(numpy.not_equal, x1, x2)
| 31.247312
| 96
| 0.611761
| 3,649
| 26,154
| 4.296794
| 0.073445
| 0.037949
| 0.039798
| 0.034441
| 0.821672
| 0.802538
| 0.790994
| 0.776261
| 0.742203
| 0.723069
| 0
| 0.023186
| 0.252963
| 26,154
| 836
| 97
| 31.284689
| 0.779302
| 0.774375
| 0
| 0.290323
| 0
| 0
| 0.03277
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.182796
| false
| 0.075269
| 0.053763
| 0
| 0.451613
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
a4d431717d872a80eab75e963b1a13f2884b113e
| 163
|
py
|
Python
|
src/active/acquisition/__init__.py
|
silasbrack/special-course
|
47dc396f97b2027d366e90add115d4ed2bc0f1de
|
[
"MIT"
] | null | null | null |
src/active/acquisition/__init__.py
|
silasbrack/special-course
|
47dc396f97b2027d366e90add115d4ed2bc0f1de
|
[
"MIT"
] | null | null | null |
src/active/acquisition/__init__.py
|
silasbrack/special-course
|
47dc396f97b2027d366e90add115d4ed2bc0f1de
|
[
"MIT"
] | null | null | null |
from src.active.acquisition.bald import Bald
from src.active.acquisition.max_entropy import MaxEntropy
from src.active.acquisition.random import RandomAcquisition
| 40.75
| 59
| 0.871166
| 22
| 163
| 6.409091
| 0.5
| 0.148936
| 0.276596
| 0.510638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07362
| 163
| 3
| 60
| 54.333333
| 0.933775
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
352ae6a63900da652ce66fefb38c4e2052dd1c92
| 112
|
py
|
Python
|
blobcity/config/__init__.py
|
SkBlaz/autoai
|
c9f6d8d6cc2027b7918bae88862ea476cc49397f
|
[
"Apache-2.0"
] | 1
|
2021-12-30T07:20:28.000Z
|
2021-12-30T07:20:28.000Z
|
blobcity/config/__init__.py
|
maxpark/autoai
|
c9f6d8d6cc2027b7918bae88862ea476cc49397f
|
[
"Apache-2.0"
] | null | null | null |
blobcity/config/__init__.py
|
maxpark/autoai
|
c9f6d8d6cc2027b7918bae88862ea476cc49397f
|
[
"Apache-2.0"
] | null | null | null |
from .classifier_config import *
from .regressor_config import *
from .time_config import *
from .tuner import *
| 28
| 32
| 0.794643
| 15
| 112
| 5.733333
| 0.466667
| 0.418605
| 0.55814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133929
| 112
| 4
| 33
| 28
| 0.886598
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
10b577d29dbc3ff483df1b3adbf5e549bf9bd4c7
| 1,233
|
py
|
Python
|
tests/test_database.py
|
lucafaggianelli/layabase
|
90733c6b9efd56051dfce5c3d89bd4e657ce7b3f
|
[
"MIT"
] | 3
|
2019-12-02T23:29:44.000Z
|
2019-12-31T00:55:01.000Z
|
tests/test_database.py
|
lucafaggianelli/layabase
|
90733c6b9efd56051dfce5c3d89bd4e657ce7b3f
|
[
"MIT"
] | 29
|
2019-12-02T16:12:45.000Z
|
2022-02-17T16:01:55.000Z
|
tests/test_database.py
|
lucafaggianelli/layabase
|
90733c6b9efd56051dfce5c3d89bd4e657ce7b3f
|
[
"MIT"
] | 3
|
2020-01-02T10:58:47.000Z
|
2022-02-17T10:55:18.000Z
|
import pytest
import layabase
def test_controller_without_collection_or_table():
with pytest.raises(Exception) as exception_info:
layabase.CRUDController(None)
assert str(exception_info.value) == "Table or Collection must be provided."
def test_none_connection_string_is_invalid():
with pytest.raises(layabase.NoDatabaseProvided) as exception_info:
layabase.load(None, None)
assert str(exception_info.value) == "A database connection URL must be provided."
def test_empty_connection_string_is_invalid():
with pytest.raises(layabase.NoDatabaseProvided) as exception_info:
layabase.load("", None)
assert str(exception_info.value) == "A database connection URL must be provided."
def test_sqla_no_controllers_is_invalid():
with pytest.raises(layabase.NoRelatedControllers) as exception_info:
layabase.load("sqlite:///:memory:", None)
assert str(exception_info.value) == "A list of CRUDController must be provided."
def test_no_mongo_no_controllers_is_invalid():
with pytest.raises(layabase.NoRelatedControllers) as exception_info:
layabase.load("mongomock", None)
assert str(exception_info.value) == "A list of CRUDController must be provided."
| 36.264706
| 85
| 0.762368
| 157
| 1,233
| 5.757962
| 0.273885
| 0.143805
| 0.088496
| 0.127212
| 0.793142
| 0.762168
| 0.727876
| 0.727876
| 0.727876
| 0.727876
| 0
| 0
| 0.148418
| 1,233
| 33
| 86
| 37.363636
| 0.860952
| 0
| 0
| 0.363636
| 0
| 0
| 0.189781
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 1
| 0.227273
| true
| 0
| 0.090909
| 0
| 0.318182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
52a398b6a3a75fe1d23a016b4c672388d941c971
| 571
|
py
|
Python
|
swepc.python/swepc/flowinfo.py
|
hertzsprung/seamless-wave-uq
|
10a9b2e18d11cf3f4e711a90523f85758e5fb531
|
[
"MIT"
] | null | null | null |
swepc.python/swepc/flowinfo.py
|
hertzsprung/seamless-wave-uq
|
10a9b2e18d11cf3f4e711a90523f85758e5fb531
|
[
"MIT"
] | null | null | null |
swepc.python/swepc/flowinfo.py
|
hertzsprung/seamless-wave-uq
|
10a9b2e18d11cf3f4e711a90523f85758e5fb531
|
[
"MIT"
] | null | null | null |
import numpy as np
class FlowInfoEta:
def __init__(self, g):
self.g = g
def maxWaveSpeed(self, flow):
v = 0.0
for i in range(flow.elements):
h = flow.water[i,0] - flow.z[i,0]
v = max(v, abs(flow.q[i,0] / h) + np.sqrt(self.g*h))
return v
class FlowInfoH:
def __init__(self, g):
self.g = g
def maxWaveSpeed(self, flow):
v = 0.0
for i in range(flow.elements):
h = flow.water[i,0]
v = max(v, abs(flow.q[i,0] / h) + np.sqrt(self.g*h))
return v
| 23.791667
| 64
| 0.504378
| 94
| 571
| 2.978723
| 0.297872
| 0.107143
| 0.078571
| 0.085714
| 0.821429
| 0.821429
| 0.821429
| 0.821429
| 0.821429
| 0.821429
| 0
| 0.024259
| 0.350263
| 571
| 23
| 65
| 24.826087
| 0.730458
| 0
| 0
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0
| 0.052632
| 0
| 0.473684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
52b9df0a00d8ea9a2c078b1d65044e8a614e79cb
| 151
|
py
|
Python
|
config.py
|
boatybits/boaty_mon_lib
|
0ef32aef6598a370a873caa5965c2e610155c309
|
[
"CC0-1.0"
] | null | null | null |
config.py
|
boatybits/boaty_mon_lib
|
0ef32aef6598a370a873caa5965c2e610155c309
|
[
"CC0-1.0"
] | null | null | null |
config.py
|
boatybits/boaty_mon_lib
|
0ef32aef6598a370a873caa5965c2e610155c309
|
[
"CC0-1.0"
] | null | null | null |
{
"ssid" : "openplotter",
"password" : "12345678",
"ESP_IP_Address" : "10.10.10.161"
"udp_IP_Addr" : "10.10.10.1"
}
| 25.166667
| 41
| 0.476821
| 18
| 151
| 3.777778
| 0.666667
| 0.235294
| 0.176471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 0.324503
| 151
| 6
| 42
| 25.166667
| 0.431373
| 0
| 0
| 0
| 0
| 0
| 0.513158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.166667
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
52c6a48c42ef6aa8b5327e31149cf7ad5176c7c2
| 23,630
|
py
|
Python
|
test/test_da.py
|
6Ulm/POT
|
28dd2fe2c461ac2287ae51e464f0b002d61f5f31
|
[
"MIT"
] | 1
|
2021-04-14T20:16:47.000Z
|
2021-04-14T20:16:47.000Z
|
test/test_da.py
|
6Ulm/POT
|
28dd2fe2c461ac2287ae51e464f0b002d61f5f31
|
[
"MIT"
] | null | null | null |
test/test_da.py
|
6Ulm/POT
|
28dd2fe2c461ac2287ae51e464f0b002d61f5f31
|
[
"MIT"
] | 1
|
2020-12-07T08:47:55.000Z
|
2020-12-07T08:47:55.000Z
|
"""Tests for module da on Domain Adaptation """
# Author: Remi Flamary <remi.flamary@unice.fr>
#
# License: MIT License
import numpy as np
from numpy.testing import assert_allclose, assert_equal
import pytest
import ot
from ot.datasets import make_data_classif
from ot.utils import unif
try: # test if cudamat installed
import sklearn # noqa: F401
nosklearn = False
except ImportError:
nosklearn = True
def test_sinkhorn_lpl1_transport_class():
"""test_sinkhorn_transport
"""
ns = 150
nt = 200
Xs, ys = make_data_classif('3gauss', ns)
Xt, yt = make_data_classif('3gauss2', nt)
otda = ot.da.SinkhornLpl1Transport()
# test its computed
otda.fit(Xs=Xs, ys=ys, Xt=Xt)
assert hasattr(otda, "cost_")
assert hasattr(otda, "coupling_")
# test dimensions of coupling
assert_equal(otda.cost_.shape, ((Xs.shape[0], Xt.shape[0])))
assert_equal(otda.coupling_.shape, ((Xs.shape[0], Xt.shape[0])))
# test margin constraints
mu_s = unif(ns)
mu_t = unif(nt)
assert_allclose(
np.sum(otda.coupling_, axis=0), mu_t, rtol=1e-3, atol=1e-3)
assert_allclose(
np.sum(otda.coupling_, axis=1), mu_s, rtol=1e-3, atol=1e-3)
# test transform
transp_Xs = otda.transform(Xs=Xs)
assert_equal(transp_Xs.shape, Xs.shape)
Xs_new, _ = make_data_classif('3gauss', ns + 1)
transp_Xs_new = otda.transform(Xs_new)
# check that the oos method is working
assert_equal(transp_Xs_new.shape, Xs_new.shape)
# test inverse transform
transp_Xt = otda.inverse_transform(Xt=Xt)
assert_equal(transp_Xt.shape, Xt.shape)
Xt_new, _ = make_data_classif('3gauss2', nt + 1)
transp_Xt_new = otda.inverse_transform(Xt=Xt_new)
# check that the oos method is working
assert_equal(transp_Xt_new.shape, Xt_new.shape)
# test fit_transform
transp_Xs = otda.fit_transform(Xs=Xs, ys=ys, Xt=Xt)
assert_equal(transp_Xs.shape, Xs.shape)
# check label propagation
transp_yt = otda.transform_labels(ys)
assert_equal(transp_yt.shape[0], yt.shape[0])
assert_equal(transp_yt.shape[1], len(np.unique(ys)))
# check inverse label propagation
transp_ys = otda.inverse_transform_labels(yt)
assert_equal(transp_ys.shape[0], ys.shape[0])
assert_equal(transp_ys.shape[1], len(np.unique(yt)))
# test unsupervised vs semi-supervised mode
otda_unsup = ot.da.SinkhornLpl1Transport()
otda_unsup.fit(Xs=Xs, ys=ys, Xt=Xt)
n_unsup = np.sum(otda_unsup.cost_)
otda_semi = ot.da.SinkhornLpl1Transport()
otda_semi.fit(Xs=Xs, ys=ys, Xt=Xt, yt=yt)
assert_equal(otda_semi.cost_.shape, ((Xs.shape[0], Xt.shape[0])))
n_semisup = np.sum(otda_semi.cost_)
# check that the cost matrix norms are indeed different
assert n_unsup != n_semisup, "semisupervised mode not working"
# check that the coupling forbids mass transport between labeled source
# and labeled target samples
mass_semi = np.sum(
otda_semi.coupling_[otda_semi.cost_ == otda_semi.limit_max])
assert mass_semi == 0, "semisupervised mode not working"
def test_sinkhorn_l1l2_transport_class():
"""test_sinkhorn_transport
"""
ns = 150
nt = 200
Xs, ys = make_data_classif('3gauss', ns)
Xt, yt = make_data_classif('3gauss2', nt)
otda = ot.da.SinkhornL1l2Transport()
# test its computed
otda.fit(Xs=Xs, ys=ys, Xt=Xt)
assert hasattr(otda, "cost_")
assert hasattr(otda, "coupling_")
assert hasattr(otda, "log_")
# test dimensions of coupling
assert_equal(otda.cost_.shape, ((Xs.shape[0], Xt.shape[0])))
assert_equal(otda.coupling_.shape, ((Xs.shape[0], Xt.shape[0])))
# test margin constraints
mu_s = unif(ns)
mu_t = unif(nt)
assert_allclose(
np.sum(otda.coupling_, axis=0), mu_t, rtol=1e-3, atol=1e-3)
assert_allclose(
np.sum(otda.coupling_, axis=1), mu_s, rtol=1e-3, atol=1e-3)
# test transform
transp_Xs = otda.transform(Xs=Xs)
assert_equal(transp_Xs.shape, Xs.shape)
Xs_new, _ = make_data_classif('3gauss', ns + 1)
transp_Xs_new = otda.transform(Xs_new)
# check that the oos method is working
assert_equal(transp_Xs_new.shape, Xs_new.shape)
# test inverse transform
transp_Xt = otda.inverse_transform(Xt=Xt)
assert_equal(transp_Xt.shape, Xt.shape)
# check label propagation
transp_yt = otda.transform_labels(ys)
assert_equal(transp_yt.shape[0], yt.shape[0])
assert_equal(transp_yt.shape[1], len(np.unique(ys)))
# check inverse label propagation
transp_ys = otda.inverse_transform_labels(yt)
assert_equal(transp_ys.shape[0], ys.shape[0])
assert_equal(transp_ys.shape[1], len(np.unique(yt)))
Xt_new, _ = make_data_classif('3gauss2', nt + 1)
transp_Xt_new = otda.inverse_transform(Xt=Xt_new)
# check that the oos method is working
assert_equal(transp_Xt_new.shape, Xt_new.shape)
# test fit_transform
transp_Xs = otda.fit_transform(Xs=Xs, ys=ys, Xt=Xt)
assert_equal(transp_Xs.shape, Xs.shape)
# test unsupervised vs semi-supervised mode
otda_unsup = ot.da.SinkhornL1l2Transport()
otda_unsup.fit(Xs=Xs, ys=ys, Xt=Xt)
n_unsup = np.sum(otda_unsup.cost_)
otda_semi = ot.da.SinkhornL1l2Transport()
otda_semi.fit(Xs=Xs, ys=ys, Xt=Xt, yt=yt)
assert_equal(otda_semi.cost_.shape, ((Xs.shape[0], Xt.shape[0])))
n_semisup = np.sum(otda_semi.cost_)
# check that the cost matrix norms are indeed different
assert n_unsup != n_semisup, "semisupervised mode not working"
# check that the coupling forbids mass transport between labeled source
# and labeled target samples
mass_semi = np.sum(
otda_semi.coupling_[otda_semi.cost_ == otda_semi.limit_max])
mass_semi = otda_semi.coupling_[otda_semi.cost_ == otda_semi.limit_max]
assert_allclose(mass_semi, np.zeros_like(mass_semi),
rtol=1e-9, atol=1e-9)
# check everything runs well with log=True
otda = ot.da.SinkhornL1l2Transport(log=True)
otda.fit(Xs=Xs, ys=ys, Xt=Xt)
assert len(otda.log_.keys()) != 0
def test_sinkhorn_transport_class():
"""test_sinkhorn_transport
"""
ns = 150
nt = 200
Xs, ys = make_data_classif('3gauss', ns)
Xt, yt = make_data_classif('3gauss2', nt)
otda = ot.da.SinkhornTransport()
# test its computed
otda.fit(Xs=Xs, Xt=Xt)
assert hasattr(otda, "cost_")
assert hasattr(otda, "coupling_")
assert hasattr(otda, "log_")
# test dimensions of coupling
assert_equal(otda.cost_.shape, ((Xs.shape[0], Xt.shape[0])))
assert_equal(otda.coupling_.shape, ((Xs.shape[0], Xt.shape[0])))
# test margin constraints
mu_s = unif(ns)
mu_t = unif(nt)
assert_allclose(
np.sum(otda.coupling_, axis=0), mu_t, rtol=1e-3, atol=1e-3)
assert_allclose(
np.sum(otda.coupling_, axis=1), mu_s, rtol=1e-3, atol=1e-3)
# test transform
transp_Xs = otda.transform(Xs=Xs)
assert_equal(transp_Xs.shape, Xs.shape)
Xs_new, _ = make_data_classif('3gauss', ns + 1)
transp_Xs_new = otda.transform(Xs_new)
# check that the oos method is working
assert_equal(transp_Xs_new.shape, Xs_new.shape)
# test inverse transform
transp_Xt = otda.inverse_transform(Xt=Xt)
assert_equal(transp_Xt.shape, Xt.shape)
# check label propagation
transp_yt = otda.transform_labels(ys)
assert_equal(transp_yt.shape[0], yt.shape[0])
assert_equal(transp_yt.shape[1], len(np.unique(ys)))
# check inverse label propagation
transp_ys = otda.inverse_transform_labels(yt)
assert_equal(transp_ys.shape[0], ys.shape[0])
assert_equal(transp_ys.shape[1], len(np.unique(yt)))
Xt_new, _ = make_data_classif('3gauss2', nt + 1)
transp_Xt_new = otda.inverse_transform(Xt=Xt_new)
# check that the oos method is working
assert_equal(transp_Xt_new.shape, Xt_new.shape)
# test fit_transform
transp_Xs = otda.fit_transform(Xs=Xs, Xt=Xt)
assert_equal(transp_Xs.shape, Xs.shape)
# test unsupervised vs semi-supervised mode
otda_unsup = ot.da.SinkhornTransport()
otda_unsup.fit(Xs=Xs, Xt=Xt)
n_unsup = np.sum(otda_unsup.cost_)
otda_semi = ot.da.SinkhornTransport()
otda_semi.fit(Xs=Xs, ys=ys, Xt=Xt, yt=yt)
assert_equal(otda_semi.cost_.shape, ((Xs.shape[0], Xt.shape[0])))
n_semisup = np.sum(otda_semi.cost_)
# check that the cost matrix norms are indeed different
assert n_unsup != n_semisup, "semisupervised mode not working"
# check that the coupling forbids mass transport between labeled source
# and labeled target samples
mass_semi = np.sum(
otda_semi.coupling_[otda_semi.cost_ == otda_semi.limit_max])
assert mass_semi == 0, "semisupervised mode not working"
# check everything runs well with log=True
otda = ot.da.SinkhornTransport(log=True)
otda.fit(Xs=Xs, ys=ys, Xt=Xt)
assert len(otda.log_.keys()) != 0
def test_unbalanced_sinkhorn_transport_class():
"""test_sinkhorn_transport
"""
ns = 150
nt = 200
Xs, ys = make_data_classif('3gauss', ns)
Xt, yt = make_data_classif('3gauss2', nt)
otda = ot.da.UnbalancedSinkhornTransport()
# test its computed
otda.fit(Xs=Xs, Xt=Xt)
assert hasattr(otda, "cost_")
assert hasattr(otda, "coupling_")
assert hasattr(otda, "log_")
# test dimensions of coupling
assert_equal(otda.cost_.shape, ((Xs.shape[0], Xt.shape[0])))
assert_equal(otda.coupling_.shape, ((Xs.shape[0], Xt.shape[0])))
# test transform
transp_Xs = otda.transform(Xs=Xs)
assert_equal(transp_Xs.shape, Xs.shape)
# check label propagation
transp_yt = otda.transform_labels(ys)
assert_equal(transp_yt.shape[0], yt.shape[0])
assert_equal(transp_yt.shape[1], len(np.unique(ys)))
# check inverse label propagation
transp_ys = otda.inverse_transform_labels(yt)
assert_equal(transp_ys.shape[0], ys.shape[0])
assert_equal(transp_ys.shape[1], len(np.unique(yt)))
Xs_new, _ = make_data_classif('3gauss', ns + 1)
transp_Xs_new = otda.transform(Xs_new)
# check that the oos method is working
assert_equal(transp_Xs_new.shape, Xs_new.shape)
# test inverse transform
transp_Xt = otda.inverse_transform(Xt=Xt)
assert_equal(transp_Xt.shape, Xt.shape)
Xt_new, _ = make_data_classif('3gauss2', nt + 1)
transp_Xt_new = otda.inverse_transform(Xt=Xt_new)
# check that the oos method is working
assert_equal(transp_Xt_new.shape, Xt_new.shape)
# test fit_transform
transp_Xs = otda.fit_transform(Xs=Xs, Xt=Xt)
assert_equal(transp_Xs.shape, Xs.shape)
# test unsupervised vs semi-supervised mode
otda_unsup = ot.da.SinkhornTransport()
otda_unsup.fit(Xs=Xs, Xt=Xt)
n_unsup = np.sum(otda_unsup.cost_)
otda_semi = ot.da.SinkhornTransport()
otda_semi.fit(Xs=Xs, ys=ys, Xt=Xt, yt=yt)
assert_equal(otda_semi.cost_.shape, ((Xs.shape[0], Xt.shape[0])))
n_semisup = np.sum(otda_semi.cost_)
# check that the cost matrix norms are indeed different
assert n_unsup != n_semisup, "semisupervised mode not working"
# check everything runs well with log=True
otda = ot.da.SinkhornTransport(log=True)
otda.fit(Xs=Xs, ys=ys, Xt=Xt)
assert len(otda.log_.keys()) != 0
def test_emd_transport_class():
"""test_sinkhorn_transport
"""
ns = 150
nt = 200
Xs, ys = make_data_classif('3gauss', ns)
Xt, yt = make_data_classif('3gauss2', nt)
otda = ot.da.EMDTransport()
# test its computed
otda.fit(Xs=Xs, Xt=Xt)
assert hasattr(otda, "cost_")
assert hasattr(otda, "coupling_")
# test dimensions of coupling
assert_equal(otda.cost_.shape, ((Xs.shape[0], Xt.shape[0])))
assert_equal(otda.coupling_.shape, ((Xs.shape[0], Xt.shape[0])))
# test margin constraints
mu_s = unif(ns)
mu_t = unif(nt)
assert_allclose(
np.sum(otda.coupling_, axis=0), mu_t, rtol=1e-3, atol=1e-3)
assert_allclose(
np.sum(otda.coupling_, axis=1), mu_s, rtol=1e-3, atol=1e-3)
# test transform
transp_Xs = otda.transform(Xs=Xs)
assert_equal(transp_Xs.shape, Xs.shape)
Xs_new, _ = make_data_classif('3gauss', ns + 1)
transp_Xs_new = otda.transform(Xs_new)
# check that the oos method is working
assert_equal(transp_Xs_new.shape, Xs_new.shape)
# test inverse transform
transp_Xt = otda.inverse_transform(Xt=Xt)
assert_equal(transp_Xt.shape, Xt.shape)
# check label propagation
transp_yt = otda.transform_labels(ys)
assert_equal(transp_yt.shape[0], yt.shape[0])
assert_equal(transp_yt.shape[1], len(np.unique(ys)))
# check inverse label propagation
transp_ys = otda.inverse_transform_labels(yt)
assert_equal(transp_ys.shape[0], ys.shape[0])
assert_equal(transp_ys.shape[1], len(np.unique(yt)))
Xt_new, _ = make_data_classif('3gauss2', nt + 1)
transp_Xt_new = otda.inverse_transform(Xt=Xt_new)
# check that the oos method is working
assert_equal(transp_Xt_new.shape, Xt_new.shape)
# test fit_transform
transp_Xs = otda.fit_transform(Xs=Xs, Xt=Xt)
assert_equal(transp_Xs.shape, Xs.shape)
# test unsupervised vs semi-supervised mode
otda_unsup = ot.da.EMDTransport()
otda_unsup.fit(Xs=Xs, ys=ys, Xt=Xt)
n_unsup = np.sum(otda_unsup.cost_)
otda_semi = ot.da.EMDTransport()
otda_semi.fit(Xs=Xs, ys=ys, Xt=Xt, yt=yt)
assert_equal(otda_semi.cost_.shape, ((Xs.shape[0], Xt.shape[0])))
n_semisup = np.sum(otda_semi.cost_)
# check that the cost matrix norms are indeed different
assert n_unsup != n_semisup, "semisupervised mode not working"
# check that the coupling forbids mass transport between labeled source
# and labeled target samples
mass_semi = np.sum(
otda_semi.coupling_[otda_semi.cost_ == otda_semi.limit_max])
mass_semi = otda_semi.coupling_[otda_semi.cost_ == otda_semi.limit_max]
# we need to use a small tolerance here, otherwise the test breaks
assert_allclose(mass_semi, np.zeros_like(mass_semi),
rtol=1e-2, atol=1e-2)
def test_mapping_transport_class():
"""test_mapping_transport
"""
ns = 60
nt = 120
Xs, ys = make_data_classif('3gauss', ns)
Xt, yt = make_data_classif('3gauss2', nt)
Xs_new, _ = make_data_classif('3gauss', ns + 1)
##########################################################################
# kernel == linear mapping tests
##########################################################################
# check computation and dimensions if bias == False
otda = ot.da.MappingTransport(kernel="linear", bias=False)
otda.fit(Xs=Xs, Xt=Xt)
assert hasattr(otda, "coupling_")
assert hasattr(otda, "mapping_")
assert hasattr(otda, "log_")
assert_equal(otda.coupling_.shape, ((Xs.shape[0], Xt.shape[0])))
assert_equal(otda.mapping_.shape, ((Xs.shape[1], Xt.shape[1])))
# test margin constraints
mu_s = unif(ns)
mu_t = unif(nt)
assert_allclose(
np.sum(otda.coupling_, axis=0), mu_t, rtol=1e-3, atol=1e-3)
assert_allclose(
np.sum(otda.coupling_, axis=1), mu_s, rtol=1e-3, atol=1e-3)
# test transform
transp_Xs = otda.transform(Xs=Xs)
assert_equal(transp_Xs.shape, Xs.shape)
transp_Xs_new = otda.transform(Xs_new)
# check that the oos method is working
assert_equal(transp_Xs_new.shape, Xs_new.shape)
# check computation and dimensions if bias == True
otda = ot.da.MappingTransport(kernel="linear", bias=True)
otda.fit(Xs=Xs, Xt=Xt)
assert_equal(otda.coupling_.shape, ((Xs.shape[0], Xt.shape[0])))
assert_equal(otda.mapping_.shape, ((Xs.shape[1] + 1, Xt.shape[1])))
# test margin constraints
mu_s = unif(ns)
mu_t = unif(nt)
assert_allclose(
np.sum(otda.coupling_, axis=0), mu_t, rtol=1e-3, atol=1e-3)
assert_allclose(
np.sum(otda.coupling_, axis=1), mu_s, rtol=1e-3, atol=1e-3)
# test transform
transp_Xs = otda.transform(Xs=Xs)
assert_equal(transp_Xs.shape, Xs.shape)
transp_Xs_new = otda.transform(Xs_new)
# check that the oos method is working
assert_equal(transp_Xs_new.shape, Xs_new.shape)
##########################################################################
# kernel == gaussian mapping tests
##########################################################################
# check computation and dimensions if bias == False
otda = ot.da.MappingTransport(kernel="gaussian", bias=False)
otda.fit(Xs=Xs, Xt=Xt)
assert_equal(otda.coupling_.shape, ((Xs.shape[0], Xt.shape[0])))
assert_equal(otda.mapping_.shape, ((Xs.shape[0], Xt.shape[1])))
# test margin constraints
mu_s = unif(ns)
mu_t = unif(nt)
assert_allclose(
np.sum(otda.coupling_, axis=0), mu_t, rtol=1e-3, atol=1e-3)
assert_allclose(
np.sum(otda.coupling_, axis=1), mu_s, rtol=1e-3, atol=1e-3)
# test transform
transp_Xs = otda.transform(Xs=Xs)
assert_equal(transp_Xs.shape, Xs.shape)
transp_Xs_new = otda.transform(Xs_new)
# check that the oos method is working
assert_equal(transp_Xs_new.shape, Xs_new.shape)
# check computation and dimensions if bias == True
otda = ot.da.MappingTransport(kernel="gaussian", bias=True)
otda.fit(Xs=Xs, Xt=Xt)
assert_equal(otda.coupling_.shape, ((Xs.shape[0], Xt.shape[0])))
assert_equal(otda.mapping_.shape, ((Xs.shape[0] + 1, Xt.shape[1])))
# test margin constraints
mu_s = unif(ns)
mu_t = unif(nt)
assert_allclose(
np.sum(otda.coupling_, axis=0), mu_t, rtol=1e-3, atol=1e-3)
assert_allclose(
np.sum(otda.coupling_, axis=1), mu_s, rtol=1e-3, atol=1e-3)
# test transform
transp_Xs = otda.transform(Xs=Xs)
assert_equal(transp_Xs.shape, Xs.shape)
transp_Xs_new = otda.transform(Xs_new)
# check that the oos method is working
assert_equal(transp_Xs_new.shape, Xs_new.shape)
# check everything runs well with log=True
otda = ot.da.MappingTransport(kernel="gaussian", log=True)
otda.fit(Xs=Xs, Xt=Xt)
assert len(otda.log_.keys()) != 0
def test_linear_mapping():
ns = 150
nt = 200
Xs, ys = make_data_classif('3gauss', ns)
Xt, yt = make_data_classif('3gauss2', nt)
A, b = ot.da.OT_mapping_linear(Xs, Xt)
Xst = Xs.dot(A) + b
Ct = np.cov(Xt.T)
Cst = np.cov(Xst.T)
np.testing.assert_allclose(Ct, Cst, rtol=1e-2, atol=1e-2)
def test_linear_mapping_class():
ns = 150
nt = 200
Xs, ys = make_data_classif('3gauss', ns)
Xt, yt = make_data_classif('3gauss2', nt)
otmap = ot.da.LinearTransport()
otmap.fit(Xs=Xs, Xt=Xt)
assert hasattr(otmap, "A_")
assert hasattr(otmap, "B_")
assert hasattr(otmap, "A1_")
assert hasattr(otmap, "B1_")
Xst = otmap.transform(Xs=Xs)
Ct = np.cov(Xt.T)
Cst = np.cov(Xst.T)
np.testing.assert_allclose(Ct, Cst, rtol=1e-2, atol=1e-2)
def test_jcpot_transport_class():
"""test_jcpot_transport
"""
ns1 = 150
ns2 = 150
nt = 200
Xs1, ys1 = make_data_classif('3gauss', ns1)
Xs2, ys2 = make_data_classif('3gauss', ns2)
Xt, yt = make_data_classif('3gauss2', nt)
Xs = [Xs1, Xs2]
ys = [ys1, ys2]
otda = ot.da.JCPOTTransport(reg_e=1, max_iter=10000, tol=1e-9, verbose=True, log=True)
# test its computed
otda.fit(Xs=Xs, ys=ys, Xt=Xt)
assert hasattr(otda, "coupling_")
assert hasattr(otda, "proportions_")
assert hasattr(otda, "log_")
# test dimensions of coupling
for i, xs in enumerate(Xs):
assert_equal(otda.coupling_[i].shape, ((xs.shape[0], Xt.shape[0])))
# test all margin constraints
mu_t = unif(nt)
for i in range(len(Xs)):
# test margin constraints w.r.t. uniform target weights for each coupling matrix
assert_allclose(
np.sum(otda.coupling_[i], axis=0), mu_t, rtol=1e-3, atol=1e-3)
# test margin constraints w.r.t. modified source weights for each source domain
assert_allclose(
np.dot(otda.log_['D1'][i], np.sum(otda.coupling_[i], axis=1)), otda.proportions_, rtol=1e-3,
atol=1e-3)
# test transform
transp_Xs = otda.transform(Xs=Xs)
[assert_equal(x.shape, y.shape) for x, y in zip(transp_Xs, Xs)]
Xs_new, _ = make_data_classif('3gauss', ns1 + 1)
transp_Xs_new = otda.transform(Xs_new)
# check that the oos method is working
assert_equal(transp_Xs_new.shape, Xs_new.shape)
# check label propagation
transp_yt = otda.transform_labels(ys)
assert_equal(transp_yt.shape[0], yt.shape[0])
assert_equal(transp_yt.shape[1], len(np.unique(ys)))
# check inverse label propagation
transp_ys = otda.inverse_transform_labels(yt)
[assert_equal(x.shape[0], y.shape[0]) for x, y in zip(transp_ys, ys)]
[assert_equal(x.shape[1], len(np.unique(y))) for x, y in zip(transp_ys, ys)]
def test_jcpot_barycenter():
"""test_jcpot_barycenter
"""
ns1 = 150
ns2 = 150
nt = 200
sigma = 0.1
np.random.seed(1985)
ps1 = .2
ps2 = .9
pt = .4
Xs1, ys1 = make_data_classif('2gauss_prop', ns1, nz=sigma, p=ps1)
Xs2, ys2 = make_data_classif('2gauss_prop', ns2, nz=sigma, p=ps2)
Xt, yt = make_data_classif('2gauss_prop', nt, nz=sigma, p=pt)
Xs = [Xs1, Xs2]
ys = [ys1, ys2]
prop = ot.bregman.jcpot_barycenter(Xs, ys, Xt, reg=.5, metric='sqeuclidean',
numItermax=10000, stopThr=1e-9, verbose=False, log=False)
np.testing.assert_allclose(prop, [1 - pt, pt], rtol=1e-3, atol=1e-3)
@pytest.mark.skipif(nosklearn, reason="No sklearn available")
def test_emd_laplace_class():
"""test_emd_laplace_transport
"""
ns = 150
nt = 200
Xs, ys = make_data_classif('3gauss', ns)
Xt, yt = make_data_classif('3gauss2', nt)
otda = ot.da.EMDLaplaceTransport(reg_lap=0.01, max_iter=1000, tol=1e-9, verbose=False, log=True)
# test its computed
otda.fit(Xs=Xs, ys=ys, Xt=Xt)
assert hasattr(otda, "coupling_")
assert hasattr(otda, "log_")
# test dimensions of coupling
assert_equal(otda.coupling_.shape, ((Xs.shape[0], Xt.shape[0])))
# test all margin constraints
mu_s = unif(ns)
mu_t = unif(nt)
assert_allclose(
np.sum(otda.coupling_, axis=0), mu_t, rtol=1e-3, atol=1e-3)
assert_allclose(
np.sum(otda.coupling_, axis=1), mu_s, rtol=1e-3, atol=1e-3)
# test transform
transp_Xs = otda.transform(Xs=Xs)
[assert_equal(x.shape, y.shape) for x, y in zip(transp_Xs, Xs)]
Xs_new, _ = make_data_classif('3gauss', ns + 1)
transp_Xs_new = otda.transform(Xs_new)
# check that the oos method is working
assert_equal(transp_Xs_new.shape, Xs_new.shape)
# test inverse transform
transp_Xt = otda.inverse_transform(Xt=Xt)
assert_equal(transp_Xt.shape, Xt.shape)
Xt_new, _ = make_data_classif('3gauss2', nt + 1)
transp_Xt_new = otda.inverse_transform(Xt=Xt_new)
# check that the oos method is working
assert_equal(transp_Xt_new.shape, Xt_new.shape)
# test fit_transform
transp_Xs = otda.fit_transform(Xs=Xs, Xt=Xt)
assert_equal(transp_Xs.shape, Xs.shape)
# check label propagation
transp_yt = otda.transform_labels(ys)
assert_equal(transp_yt.shape[0], yt.shape[0])
assert_equal(transp_yt.shape[1], len(np.unique(ys)))
# check inverse label propagation
transp_ys = otda.inverse_transform_labels(yt)
assert_equal(transp_ys.shape[0], ys.shape[0])
assert_equal(transp_ys.shape[1], len(np.unique(yt)))
| 30.929319
| 104
| 0.665722
| 3,591
| 23,630
| 4.170983
| 0.063771
| 0.069035
| 0.07264
| 0.032982
| 0.891574
| 0.878555
| 0.866604
| 0.860863
| 0.849312
| 0.844305
| 0
| 0.02334
| 0.196784
| 23,630
| 763
| 105
| 30.969856
| 0.765806
| 0.166187
| 0
| 0.805621
| 0
| 0
| 0.036169
| 0
| 0
| 0
| 0
| 0
| 0.362998
| 1
| 0.025761
| false
| 0
| 0.018735
| 0
| 0.044496
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5e0358cc7e0173f2ffc72bbf25e4b51094c075f0
| 2,289
|
py
|
Python
|
src/IceRayPy/core/material/instruction/constant.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | 2
|
2020-09-04T12:27:15.000Z
|
2022-01-17T14:49:40.000Z
|
src/IceRayPy/core/material/instruction/constant.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | null | null | null |
src/IceRayPy/core/material/instruction/constant.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | 1
|
2020-09-04T12:27:52.000Z
|
2020-09-04T12:27:52.000Z
|
import ctypes
import IceRayPy
AddresOf = ctypes.addressof
SizeType = IceRayPy.type.basic.Size
ScalarType = IceRayPy.type.basic.Scalar
class Size:
def __init__(self, P_dll, P_value, P_result ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Material_Pigment_Surface_Instruction_Constant_Size0( SizeType( P_value ), SizeType( P_result ) )
def __del__(self):
self.m_cargo['dll'].IceRayC_Material_Pigment_Surface_Instruction_Release( self.m_cargo['this'] )
class Scalar:
def __init__(self, P_dll, P_value, P_result ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Material_Pigment_Surface_Instruction_Constant_Scalar0( ScalarType( P_value ), SizeType( P_result ) )
def __del__(self):
self.m_cargo['dll'].IceRayC_Material_Pigment_Surface_Instruction_Release( self.m_cargo['this'] )
class Color:
def __init__(self, P_dll, P_value, P_result ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Material_Pigment_Surface_Instruction_Constant_Color0( AddresOf( P_value ), SizeType( P_result ) )
def __del__(self):
self.m_cargo['dll'].IceRayC_Material_Pigment_Surface_Instruction_Release( self.m_cargo['this'] )
class Coord2D:
def __init__(self, P_dll, P_value, P_result ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Material_Pigment_Surface_Instruction_Constant_Coord_Scalar2D0( AddresOf( P_value ), SizeType( P_result ) )
def __del__(self):
self.m_cargo['dll'].IceRayC_Material_Pigment_Surface_Instruction_Release( self.m_cargo['this'] )
class Coord3D:
def __init__(self, P_dll, P_value, P_result ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Material_Pigment_Surface_Instruction_Constant_Coord_Scalar3D0( AddresOf( P_value ), SizeType( P_result ) )
def __del__(self):
self.m_cargo['dll'].IceRayC_Material_Pigment_Surface_Instruction_Release( self.m_cargo['this'] )
| 39.465517
| 166
| 0.692442
| 309
| 2,289
| 4.621359
| 0.126214
| 0.105042
| 0.210084
| 0.136555
| 0.860644
| 0.860644
| 0.860644
| 0.860644
| 0.860644
| 0.860644
| 0
| 0.004852
| 0.189602
| 2,289
| 57
| 167
| 40.157895
| 0.76496
| 0
| 0
| 0.625
| 0
| 0
| 0.038082
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.05
| 0
| 0.425
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5e252fc4c23d2244edf3630734765d1821f055b3
| 119,294
|
py
|
Python
|
build_model.py
|
armyja/CHAOS_GCN
|
685c84fdabf6db71cfc007cec41a72d900422920
|
[
"MIT"
] | 4
|
2019-10-05T12:54:51.000Z
|
2021-03-29T11:41:50.000Z
|
build_model.py
|
armyja/CHAOS_GCN
|
685c84fdabf6db71cfc007cec41a72d900422920
|
[
"MIT"
] | null | null | null |
build_model.py
|
armyja/CHAOS_GCN
|
685c84fdabf6db71cfc007cec41a72d900422920
|
[
"MIT"
] | 3
|
2019-10-05T12:54:55.000Z
|
2021-07-15T05:32:37.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision import models
from torchvision.models.resnet import Bottleneck
class GCN_BIG(nn.Module):
def __init__(self, c, out_c, k=(7, 7)): # out_Channel=21 in paper
super(GCN_BIG, self).__init__()
self.conv_l1 = nn.Conv2d(c, out_c, kernel_size=7, padding=3)
# self.conv_l2 = nn.Conv2d(out_c, out_c, kernel_size=(1, k[0]), padding=(0, 3))
self.conv_r1 = nn.Conv2d(c, out_c, kernel_size=7, padding=3)
# self.conv_r1 = nn.Conv2d(c, out_c, kernel_size=(1, k[1]), padding=(0, 3))
# self.conv_r2 = nn.Conv2d(out_c, out_c, kernel_size=(k[1], 1), padding=(3, 0))
self.prelu = nn.PReLU()
# self.conv_l1 = nn.Conv2d(c, out_c, kernel_size=(k[0], 1), padding=((int(k[0] - 1) / 2), 0))
# self.conv_l2 = nn.Conv2d(out_c, out_c, kernel_size=(1, k[0]), padding=(0, int((k[0] - 1) / 2)))
# self.conv_r1 = nn.Conv2d(c, out_c, kernel_size=(1, k[1]), padding=(0, int((k[1] - 1) / 2)))
# self.conv_r2 = nn.Conv2d(out_c, out_c, kernel_size=(k[1], 1), padding=(int((k[1] - 1) / 2), 0))
def forward(self, x):
x_l = self.conv_l1(x)
# x_l = self.conv_l2(x_l)
x_r = self.conv_r1(x)
# x_r = self.conv_r2(x_r)
x = x_l + x_r
x = self.prelu(x)
return x
class GCN_8(nn.Module):
def __init__(self, c, out_c, k=(7, 7)): # out_Channel=21 in paper
super(GCN_8, self).__init__()
self.conv_l1 = nn.Conv2d(c, out_c, kernel_size=(k[0], 1), padding=(3, 0))
self.conv_l2 = nn.Conv2d(out_c, out_c, kernel_size=(1, k[0]), padding=(0, 3))
self.conv_r1 = nn.Conv2d(c, out_c, kernel_size=(1, k[1]), padding=(0, 3))
self.conv_r2 = nn.Conv2d(out_c, out_c, kernel_size=(k[1], 1), padding=(3, 0))
self.prelu = nn.PReLU()
# self.conv_l1 = nn.Conv2d(c, out_c, kernel_size=(k[0], 1), padding=((int(k[0] - 1) / 2), 0))
# self.conv_l2 = nn.Conv2d(out_c, out_c, kernel_size=(1, k[0]), padding=(0, int((k[0] - 1) / 2)))
# self.conv_r1 = nn.Conv2d(c, out_c, kernel_size=(1, k[1]), padding=(0, int((k[1] - 1) / 2)))
# self.conv_r2 = nn.Conv2d(out_c, out_c, kernel_size=(k[1], 1), padding=(int((k[1] - 1) / 2), 0))
def forward(self, x):
x_l = self.conv_l1(x)
x_l = self.conv_l2(x_l)
x_r = self.conv_r1(x)
x_r = self.conv_r2(x_r)
x = x_l + x_r
x = self.prelu(x)
return x
class GCN(nn.Module):
def __init__(self, c, out_c, k=(7, 7)): # out_Channel=21 in paper
super(GCN, self).__init__()
self.conv_l1 = nn.Conv2d(c, out_c, kernel_size=(k[0], 1), padding=(3, 0))
self.conv_l2 = nn.Conv2d(out_c, out_c, kernel_size=(1, k[0]), padding=(0, 3))
self.conv_r1 = nn.Conv2d(c, out_c, kernel_size=(1, k[1]), padding=(0, 3))
self.conv_r2 = nn.Conv2d(out_c, out_c, kernel_size=(k[1], 1), padding=(3, 0))
# self.conv_l1 = nn.Conv2d(c, out_c, kernel_size=(k[0], 1), padding=((int(k[0] - 1) / 2), 0))
# self.conv_l2 = nn.Conv2d(out_c, out_c, kernel_size=(1, k[0]), padding=(0, int((k[0] - 1) / 2)))
# self.conv_r1 = nn.Conv2d(c, out_c, kernel_size=(1, k[1]), padding=(0, int((k[1] - 1) / 2)))
# self.conv_r2 = nn.Conv2d(out_c, out_c, kernel_size=(k[1], 1), padding=(int((k[1] - 1) / 2), 0))
def forward(self, x):
x_l = self.conv_l1(x)
x_l = self.conv_l2(x_l)
x_r = self.conv_r1(x)
x_r = self.conv_r2(x_r)
x = x_l + x_r
return x
class _ASPPModule(nn.Module):
def __init__(self, inplanes, planes, kernel_size, padding, dilation, BatchNorm):
super(_ASPPModule, self).__init__()
self.atrous_conv = nn.Conv2d(inplanes, planes, kernel_size=kernel_size,
stride=1, padding=padding, dilation=dilation, bias=False)
self.bn = BatchNorm(planes)
self.relu = nn.ReLU()
self._init_weight()
def forward(self, x):
x = self.atrous_conv(x)
x = self.bn(x)
return self.relu(x)
def _init_weight(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
torch.nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
class ASPP(nn.Module):
def __init__(self, backbone, output_stride, BatchNorm):
super(ASPP, self).__init__()
if backbone == 'drn':
inplanes = 512
elif backbone == 'mobilenet':
inplanes = 320
else:
inplanes = backbone
if output_stride == 16:
dilations = [1, 6, 12, 18]
elif output_stride == 8:
dilations = [1, 12, 24, 36]
else:
raise NotImplementedError
self.aspp1 = _ASPPModule(inplanes, 5, 1, padding=0, dilation=dilations[0], BatchNorm=BatchNorm)
self.aspp2 = _ASPPModule(inplanes, 5, 3, padding=dilations[1], dilation=dilations[1], BatchNorm=BatchNorm)
self.aspp3 = _ASPPModule(inplanes, 5, 3, padding=dilations[2], dilation=dilations[2], BatchNorm=BatchNorm)
self.aspp4 = _ASPPModule(inplanes, 5, 3, padding=dilations[3], dilation=dilations[3], BatchNorm=BatchNorm)
self.global_avg_pool = nn.Sequential(nn.AdaptiveAvgPool2d((1, 1)),
nn.Conv2d(inplanes, 5, 1, stride=1, bias=False),
BatchNorm(5),
nn.ReLU())
self.conv1 = nn.Conv2d(25, 5, 1, bias=False)
self.bn1 = BatchNorm(5)
self.relu = nn.ReLU()
self.dropout = nn.Dropout(0.5)
self._init_weight()
def forward(self, x):
x1 = self.aspp1(x)
x2 = self.aspp2(x)
x3 = self.aspp3(x)
x4 = self.aspp4(x)
x5 = self.global_avg_pool(x)
x5 = F.interpolate(x5, size=x4.size()[2:], mode='bilinear', align_corners=True)
x = torch.cat((x1, x2, x3, x4, x5), dim=1)
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
return self.dropout(x)
def _init_weight(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, math.sqrt(2. / n))
torch.nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
class BR_super_7(nn.Module):
def __init__(self, out_c, dilation=False, BatchNorm=nn.BatchNorm2d):
super(BR_super_7, self).__init__()
# self.bn = nn.BatchNorm2d(out_c)
if dilation:
d_rate = 2
else:
d_rate = 1
self.relu = nn.ReLU(inplace=True)
self.conv1 = nn.Conv2d(out_c, out_c, kernel_size=3, padding=d_rate, dilation=d_rate)
self.conv2 = nn.Conv2d(out_c, out_c, kernel_size=3, padding=d_rate, dilation=d_rate)
self.conv_0_0 = nn.Conv2d(out_c, out_c, kernel_size=1)
self.conv_1_0 = nn.Conv2d(out_c, out_c, kernel_size=1)
self.conv_1_1 = nn.Conv2d(out_c, out_c, kernel_size=3, padding=1)
self.conv_2_0 = nn.Conv2d(out_c, out_c, kernel_size=1)
self.conv_2_1 = nn.Conv2d(out_c, out_c, kernel_size=5, padding=2)
# self.conv_2_1 = ASPP(out_c, 16, BatchNorm=BatchNorm)
def forward(self, x):
x_res = self.conv1(x)
x_res = self.relu(x_res)
x_res = self.conv2(x_res)
x_0 = self.conv_0_0(x)
x_1 = self.conv_1_0(x)
x_1 = self.conv_1_1(x_1)
x_2 = self.conv_2_0(x)
x_2 = self.conv_2_1(x_2)
x = x + x_res + x_0 + x_1 + x_2
return x
class BR_super_7_SE(nn.Module):
def __init__(self, out_c, dilation=False, BatchNorm=nn.BatchNorm2d):
super(BR_super_7_SE, self).__init__()
# self.bn = nn.BatchNorm2d(out_c)
if dilation:
d_rate = 2
else:
d_rate = 1
self.relu = nn.ReLU(inplace=True)
self.conv1 = nn.Conv2d(out_c, out_c, kernel_size=3, padding=d_rate, dilation=d_rate)
self.conv2 = nn.Conv2d(out_c, out_c, kernel_size=3, padding=d_rate, dilation=d_rate)
self.conv_0_0 = nn.Conv2d(out_c, out_c, kernel_size=1)
self.conv_1_0 = nn.Conv2d(out_c, out_c, kernel_size=1)
self.conv_1_1 = nn.Conv2d(out_c, out_c, kernel_size=3, padding=1)
self.conv_2_0 = nn.Conv2d(out_c, out_c, kernel_size=1)
self.conv_2_1 = nn.Conv2d(out_c, out_c, kernel_size=5, padding=2)
# self.conv_2_1 = ASPP(out_c, 16, BatchNorm=BatchNorm)
# se_layer
self.se_layer = Selayer(out_c)
def forward(self, x):
x_res = self.conv1(x)
x_res = self.relu(x_res)
x_res = self.conv2(x_res)
x_0 = self.conv_0_0(x)
x_1 = self.conv_1_0(x)
x_1 = self.conv_1_1(x_1)
x_2 = self.conv_2_0(x)
x_2 = self.conv_2_1(x_2)
x = x + x_res + x_0 + x_1 + x_2
x = self.se_layer(x)
return x
class BR(nn.Module):
def __init__(self, out_c, dilation=False):
super(BR, self).__init__()
# self.bn = nn.BatchNorm2d(out_c)
if dilation:
d_rate = 2
else:
d_rate = 1
self.relu = nn.ReLU(inplace=True)
self.conv1 = nn.Conv2d(out_c, out_c, kernel_size=3, padding=d_rate, dilation=d_rate)
self.conv2 = nn.Conv2d(out_c, out_c, kernel_size=3, padding=d_rate, dilation=d_rate)
def forward(self, x):
x_res = self.conv1(x)
x_res = self.relu(x_res)
x_res = self.conv2(x_res)
x = x + x_res
return x
class Selayer(nn.Module):
def __init__(self, inplanes):
super(Selayer, self).__init__()
self.global_avgpool = nn.AdaptiveAvgPool2d(1)
self.conv1 = nn.Conv2d(inplanes, inplanes // 4, kernel_size=1, stride=1)
self.conv2 = nn.Conv2d(inplanes // 4, inplanes, kernel_size=1, stride=1)
self.relu = nn.ReLU(inplace=True)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
out = self.global_avgpool(x)
out = self.conv1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.sigmoid(out)
return x * out
class R2(nn.Module):
def __init__(self, out_c, group=4):
super(R2, self).__init__()
# self.bn = nn.BatchNorm2d(out_c)
# entry
self.conv_1x1_0 = nn.Conv2d(out_c, out_c, 1)
# exit
self.conv_1x1_1 = nn.Conv2d(out_c, out_c, 1)
self.group = group
if out_c / group != out_c // group:
raise ValueError("\"{}\" is not divisible by \"{}\".".format(out_c, group))
self.p = int(out_c / group)
# feature
self.conv_3x3_s = []
p = int(out_c / group)
# for i in range(group - 1):
# self.conv_3x3_s.append(nn.Conv2d(p, p, kernel_size=3, padding=1))
self.conv_3x3_1 = nn.Conv2d(p, p, kernel_size=3, padding=1)
self.conv_3x3_2 = nn.Conv2d(p, p, kernel_size=3, padding=1)
# self.conv_3x3_3 = nn.Conv2d(p, p, kernel_size=3, padding=1)
self.conv_5x5_3 = nn.Conv2d(p, p, kernel_size=5, padding=2)
# door
# self.conv_1x1_s = [nn.Conv2d(p, p, 1) for i in range(group - 1)]
self.conv_1x1_2 = nn.Conv2d(p, p, kernel_size=1)
self.conv_1x1_3 = nn.Conv2d(p, p, kernel_size=1)
self.conv_1x1_4 = nn.Conv2d(p, p, kernel_size=1)
# se_layer
self.se_layer = Selayer(out_c)
def forward(self, x):
x = self.conv_1x1_0(x)
chunks = torch.chunk(x, self.group, dim=1)
idx = 0
a = self.conv_1x1_2(chunks[idx])
idx = 1
b = self.conv_3x3_1(chunks[idx])
idx = 2
c = self.conv_1x1_3(b)
c = self.conv_3x3_2(c)
idx = 3
d = self.conv_1x1_4(c)
# d = self.conv_3x3_3(d)
d = self.conv_5x5_3(d)
# for idx, chunk in enumerate(chunks):
# if idx == 0:
# chunks[idx] = self.conv_1x1_s[idx](chunks[idx])
# continue
#
# elif idx == 1:
# chunks[idx] = self.conv_3x3_s[idx - 1](chunks[idx])
#
# else:
# chunks[idx] = self.conv_1x1_s[idx - 1](chunks[idx - 1])
# chunks[idx] = self.conv_3x3_s[idx - 1](chunks[idx])
x = torch.cat((a, b, c, d), 1)
x = self.conv_1x1_1(x)
x = self.se_layer(x)
return x
class R2_R_3(nn.Module):
def __init__(self, out_c, group=4):
super(R2_R_3, self).__init__()
# self.bn = nn.BatchNorm2d(out_c)
# entry
self.conv_1x1_0 = nn.Conv2d(out_c, out_c, 1)
# exit
self.conv_1x1_1 = nn.Conv2d(out_c, out_c, 1)
self.group = group
if out_c / group != out_c // group:
raise ValueError("\"{}\" is not divisible by \"{}\".".format(out_c, group))
self.p = int(out_c / group)
# feature
self.conv_3x3_s = []
p = int(out_c / group)
# for i in range(group - 1):
# self.conv_3x3_s.append(nn.Conv2d(p, p, kernel_size=3, padding=1))
self.conv_3x3_1 = nn.Conv2d(p, p, kernel_size=3, padding=1)
self.conv_3x3_2 = nn.Conv2d(p*2, p, kernel_size=3, padding=1)
self.conv_3x3_3 = nn.Conv2d(p*2, p, kernel_size=3, padding=1)
# self.conv_5x5_3 = nn.Conv2d(p*2, p, kernel_size=5, padding=2)
# door
# self.conv_1x1_s = [nn.Conv2d(p, p, 1) for i in range(group - 1)]
self.conv_1x1_2 = nn.Conv2d(p, p, kernel_size=1)
self.conv_1x1_3 = nn.Conv2d(p, p, kernel_size=1)
self.conv_1x1_4 = nn.Conv2d(p, p, kernel_size=1)
# se_layer
self.se_layer = Selayer(out_c)
def forward(self, x):
x = self.conv_1x1_0(x)
chunks = torch.chunk(x, self.group, dim=1)
idx = 0
a = self.conv_1x1_2(chunks[idx])
idx = 1
b = self.conv_3x3_1(chunks[idx])
idx = 2
c = self.conv_1x1_3(b)
c = torch.cat((chunks[idx], c), 1)
c = self.conv_3x3_2(c)
idx = 3
d = self.conv_1x1_4(c)
d = torch.cat((chunks[idx], d), 1)
d = self.conv_3x3_3(d)
# d = self.conv_5x5_3(d)
# for idx, chunk in enumerate(chunks):
# if idx == 0:
# chunks[idx] = self.conv_1x1_s[idx](chunks[idx])
# continue
#
# elif idx == 1:
# chunks[idx] = self.conv_3x3_s[idx - 1](chunks[idx])
#
# else:
# chunks[idx] = self.conv_1x1_s[idx - 1](chunks[idx - 1])
# chunks[idx] = self.conv_3x3_s[idx - 1](chunks[idx])
x = torch.cat((a, b, c, d), 1)
x = self.conv_1x1_1(x)
x = self.se_layer(x)
return x
class R2_R_NO_SE(nn.Module):
def __init__(self, out_c, group=4):
super(R2_R_NO_SE, self).__init__()
# self.bn = nn.BatchNorm2d(out_c)
# entry
self.conv_1x1_0 = nn.Conv2d(out_c, out_c, 1)
# exit
self.conv_1x1_1 = nn.Conv2d(out_c, out_c, 1)
self.group = group
if out_c / group != out_c // group:
raise ValueError("\"{}\" is not divisible by \"{}\".".format(out_c, group))
self.p = int(out_c / group)
# feature
self.conv_3x3_s = []
p = int(out_c / group)
# for i in range(group - 1):
# self.conv_3x3_s.append(nn.Conv2d(p, p, kernel_size=3, padding=1))
self.conv_3x3_1 = nn.Conv2d(p, p, kernel_size=3, padding=1)
self.conv_3x3_2 = nn.Conv2d(p*2, p, kernel_size=3, padding=1)
# self.conv_3x3_3 = nn.Conv2d(p, p, kernel_size=3, padding=1)
self.conv_5x5_3 = nn.Conv2d(p*2, p, kernel_size=5, padding=2)
# door
# self.conv_1x1_s = [nn.Conv2d(p, p, 1) for i in range(group - 1)]
self.conv_1x1_2 = nn.Conv2d(p, p, kernel_size=1)
self.conv_1x1_3 = nn.Conv2d(p, p, kernel_size=1)
self.conv_1x1_4 = nn.Conv2d(p, p, kernel_size=1)
# se_layer
# self.se_layer = Selayer(out_c)
def forward(self, x):
x = self.conv_1x1_0(x)
chunks = torch.chunk(x, self.group, dim=1)
idx = 0
a = self.conv_1x1_2(chunks[idx])
idx = 1
b = self.conv_3x3_1(chunks[idx])
idx = 2
c = self.conv_1x1_3(b)
c = torch.cat((chunks[idx], c), 1)
c = self.conv_3x3_2(c)
idx = 3
d = self.conv_1x1_4(c)
d = torch.cat((chunks[idx], d), 1)
# d = self.conv_3x3_3(d)
d = self.conv_5x5_3(d)
# for idx, chunk in enumerate(chunks):
# if idx == 0:
# chunks[idx] = self.conv_1x1_s[idx](chunks[idx])
# continue
#
# elif idx == 1:
# chunks[idx] = self.conv_3x3_s[idx - 1](chunks[idx])
#
# else:
# chunks[idx] = self.conv_1x1_s[idx - 1](chunks[idx - 1])
# chunks[idx] = self.conv_3x3_s[idx - 1](chunks[idx])
x = torch.cat((a, b, c, d), 1)
x = self.conv_1x1_1(x)
# x = self.se_layer(x)
return x
class R2_R(nn.Module):
def __init__(self, out_c, group=4):
super(R2_R, self).__init__()
# self.bn = nn.BatchNorm2d(out_c)
# entry
self.conv_1x1_0 = nn.Conv2d(out_c, out_c, 1)
# exit
self.conv_1x1_1 = nn.Conv2d(out_c, out_c, 1)
self.group = group
if out_c / group != out_c // group:
raise ValueError("\"{}\" is not divisible by \"{}\".".format(out_c, group))
self.p = int(out_c / group)
# feature
self.conv_3x3_s = []
p = int(out_c / group)
# for i in range(group - 1):
# self.conv_3x3_s.append(nn.Conv2d(p, p, kernel_size=3, padding=1))
self.conv_3x3_1 = nn.Conv2d(p, p, kernel_size=3, padding=1)
self.conv_3x3_2 = nn.Conv2d(p*2, p, kernel_size=3, padding=1)
# self.conv_3x3_3 = nn.Conv2d(p, p, kernel_size=3, padding=1)
self.conv_5x5_3 = nn.Conv2d(p*2, p, kernel_size=5, padding=2)
# door
# self.conv_1x1_s = [nn.Conv2d(p, p, 1) for i in range(group - 1)]
self.conv_1x1_2 = nn.Conv2d(p, p, kernel_size=1)
self.conv_1x1_3 = nn.Conv2d(p, p, kernel_size=1)
self.conv_1x1_4 = nn.Conv2d(p, p, kernel_size=1)
# se_layer
self.se_layer = Selayer(out_c)
def forward(self, x):
x = self.conv_1x1_0(x)
chunks = torch.chunk(x, self.group, dim=1)
idx = 0
a = self.conv_1x1_2(chunks[idx])
idx = 1
b = self.conv_3x3_1(chunks[idx])
idx = 2
c = self.conv_1x1_3(b)
c = torch.cat((chunks[idx], c), 1)
c = self.conv_3x3_2(c)
idx = 3
d = self.conv_1x1_4(c)
d = torch.cat((chunks[idx], d), 1)
# d = self.conv_3x3_3(d)
d = self.conv_5x5_3(d)
# for idx, chunk in enumerate(chunks):
# if idx == 0:
# chunks[idx] = self.conv_1x1_s[idx](chunks[idx])
# continue
#
# elif idx == 1:
# chunks[idx] = self.conv_3x3_s[idx - 1](chunks[idx])
#
# else:
# chunks[idx] = self.conv_1x1_s[idx - 1](chunks[idx - 1])
# chunks[idx] = self.conv_3x3_s[idx - 1](chunks[idx])
x = torch.cat((a, b, c, d), 1)
x = self.conv_1x1_1(x)
x = self.se_layer(x)
return x
# dense unsample
class DU(nn.Module):
def __init__(self, in_channel, out_channel):
super(DU, self).__init__()
self.conv1 = nn.Conv2d(in_channel, out_channel, kernel_size=1, stride=1)
self.bn = nn.BatchNorm2d(out_channel)
def forward(self, x):
x = self.conv1(x)
x = self.bn(x)
x = F.interpolate(x, scale_factor=2, mode='bilinear', align_corners=True)
return x
class BottleneckX(nn.Module):
def __init__(self, in_channel, out_channel):
super(BottleneckX, self).__init__()
self.conv1 = nn.Conv2d(in_channel, out_channel, kernel_size=1, stride=1)
self.bn = nn.BatchNorm2d(out_channel)
def forward(self, x):
x = self.conv1(x)
x = self.bn(x)
return x
# add 3 GCB
# add 3 (plus + B)
# dilated conv
# problem: label 4 prob map include label 1's prob
# delete layer_4
# super boundary refine
class FCN_GCN_GCN_NO_SE(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_GCN_NO_SE, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv0 = nn.Conv2d(3, 64, kernel_size=1, stride=1) # 256x256, 64
# self.conv1 = resnet.conv1
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
# self.layer0 = resnet.layer1 # res-2 o/p = 128x128,256
self.layer1 = resnet.layer1 # res-2 o/p = 128x128,256
self.layer2 = resnet.layer2 # res-3 o/p = 64x64,512
self.layer3 = resnet.layer3 # res-4 o/p = 32x32,1024
self.layer4 = resnet.layer4 # res-5 o/p = 16x16,2048
# self.layer5 = resnet._make_layer(Bottleneck, 1024, 3, stride=2) # res-5 o/p = 16x16,4096
self.gcn1 = GCN_8(256, self.num_classes * 4) # gcn_i after layer-1
self.gcn2 = GCN_8(512, self.num_classes * 4)
self.gcn3 = GCN_8(1024, self.num_classes * 4)
self.gcn4 = GCN_8(2048, self.num_classes * 4)
# self.gcn5 = GCN_8(4096, self.num_classes * 4)
self.gcn1_1 = R2_R_NO_SE(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn1_2 = R2_R_NO_SE(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn1_3 = R2_R_NO_SE(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn1_4 = R2_R_NO_SE(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn2_1 = R2_R_NO_SE(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn2_2 = R2_R_NO_SE(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn2_3 = R2_R_NO_SE(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn3_1 = R2_R_NO_SE(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn3_2 = R2_R_NO_SE(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn3_3 = R2_R_NO_SE(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn4_1 = R2_R_NO_SE(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
# self.gcn5_1 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.DU_01 = DU(20, 20)
self.DU_02 = DU(20, 20)
self.DU_03 = DU(20, 20)
self.DU_04 = DU(20, 20)
self.DU_05 = DU(20, 20)
self.DU_06 = DU(20, 20)
self.DU_07 = DU(20, 20)
self.DU_08 = DU(20, 20)
self.DU_09 = DU(20, 20)
# self.DU_10 = DU(20, 20)
self.bottleneck_01 = BottleneckX(40, 20)
self.bottleneck_02 = BottleneckX(60, 20)
self.bottleneck_03 = BottleneckX(80, 20)
self.bottleneck_04 = BottleneckX(100, 20)
self.bottleneck_05 = BottleneckX(40, 20)
self.bottleneck_06 = BottleneckX(60, 20)
self.bottleneck_07 = BottleneckX(80, 20)
self.bottleneck_08 = BottleneckX(40, 20)
self.bottleneck_09 = BottleneckX(60, 20)
# self.bottleneck_10 = BottleneckX(40, 20)
self.conv_1x1_1 = nn.Conv2d(20, 20, 1)
self.conv_1x1_2 = nn.Conv2d(20, 20, 1)
self.conv_1x1_3 = nn.Conv2d(20, 20, 1)
self.conv_1x1_4 = nn.Conv2d(20, 20, 1)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv0(x)
x = self.bn0(x)
x = self.relu(x)
fm0 = x # 256
fm1 = self.layer1(x) # 256
fm2 = self.layer2(fm1) # 128
fm3 = self.layer3(fm2) # 64
fm4 = self.layer4(fm3) # 32
# fm5 = self.layer5(fm4) # 32
# print(fm4.size())
# R2
gc_fm1 = self.gcn1_1(self.gcn1(fm1))
gc_fm2 = self.gcn2_1(self.gcn2(fm2))
gc_fm3 = self.gcn3_1(self.gcn3(fm3))
gc_fm4 = self.gcn4_1(self.gcn4(fm4))
# gc_fm5 = self.gcn5_1(self.gcn5(fm5))
# DU ok
# gc_fm4_L = self.DU_10(gc_fm5)
# gc_fm4_L = torch.cat((gc_fm4, gc_fm4_L), 1)
# gc_fm4_L = self.bottleneck_10(gc_fm4_L)
gc_fm4_L = gc_fm4
# ok
x = self.DU_08(gc_fm4)
x = torch.cat((gc_fm3, x), 1)
x = self.bottleneck_08(x)
gc_fm3_1 = self.gcn3_2(x)
# ok
x = self.DU_09(gc_fm4_L)
x = torch.cat((gc_fm3, gc_fm3_1, x), 1)
gc_fm3_L = self.bottleneck_09(x)
# ok
x = self.DU_05(gc_fm3)
x = torch.cat((gc_fm2, x), 1)
x = self.bottleneck_05(x)
gc_fm2_1 = self.gcn2_2(x)
x = self.DU_06(gc_fm3_1)
x = torch.cat((gc_fm2, gc_fm2_1, x), 1)
x = self.bottleneck_06(x)
gc_fm2_2 = self.gcn2_3(x)
x = self.DU_07(gc_fm3_L)
x = torch.cat((gc_fm2, gc_fm2_1, gc_fm2_2, x), 1)
gc_fm2_L = self.bottleneck_07(x)
x = self.DU_01(gc_fm2)
x = torch.cat((gc_fm1, x), 1)
x = self.bottleneck_01(x)
gc_fm1_1 = self.gcn1_2(x)
x = self.DU_02(gc_fm2_1)
x = torch.cat((gc_fm1, gc_fm1_1, x), 1)
x = self.bottleneck_02(x)
gc_fm1_2 = self.gcn1_3(x)
x = self.DU_03(gc_fm2_2)
x = torch.cat((gc_fm1, gc_fm1_1, gc_fm1_2, x), 1)
x = self.bottleneck_03(x)
gc_fm1_3 = self.gcn1_4(x)
x = self.DU_04(gc_fm2_L)
x = torch.cat((gc_fm1, gc_fm1_1, gc_fm1_2, gc_fm1_3, x), 1)
gc_fm1_L = self.bottleneck_04(x)
# gc_fm1_1 = F.interpolate(gc_fm1_1, scale_factor=2, mode='bilinear', align_corners=True)
# gc_fm1_2 = F.interpolate(gc_fm1_2, scale_factor=2, mode='bilinear', align_corners=True)
# gc_fm1_3 = F.interpolate(gc_fm1_3, scale_factor=2, mode='bilinear', align_corners=True)
# gc_fm1_L = F.interpolate(gc_fm1_L, scale_factor=2, mode='bilinear', align_corners=True)
x = self.conv_1x1_1(gc_fm1_1)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_1 = s1 + s2 + s3 + s4
x = self.conv_1x1_2(gc_fm1_2)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_2 = s1 + s2 + s3 + s4
x = self.conv_1x1_3(gc_fm1_3)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_3 = s1 + s2 + s3 + s4
x = self.conv_1x1_4(gc_fm1_L)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_4 = s1 + s2 + s3 + s4
# print(score_4.size())
out = score_1 + score_2 + score_3 + score_4
if debug is True:
self.heatmap(score_1, viz, patient, slice_index, 'score_1')
self.heatmap(score_2, viz, patient, slice_index, 'score_2')
self.heatmap(score_3, viz, patient, slice_index, 'score_3')
self.heatmap(score_4, viz, patient, slice_index, 'score_4')
self.heatmap(out, viz, patient, slice_index, 'out')
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
# add 3 GCB
# add 3 (plus + B)
# dilated conv
# problem: label 4 prob map include label 1's prob
# delete layer_4
# super boundary refine
class FCN_GCN(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv0 = nn.Conv2d(3, 64, kernel_size=1, stride=1) # 256x256, 64
# self.conv1 = resnet.conv1
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
# self.layer0 = resnet.layer1 # res-2 o/p = 128x128,256
self.layer1 = resnet.layer1 # res-2 o/p = 128x128,256
self.layer2 = resnet.layer2 # res-3 o/p = 64x64,512
self.layer3 = resnet.layer3 # res-4 o/p = 32x32,1024
self.layer4 = resnet.layer4 # res-5 o/p = 16x16,2048
# self.layer5 = resnet._make_layer(Bottleneck, 1024, 3, stride=2) # res-5 o/p = 16x16,4096
self.gcn1 = GCN_8(256, self.num_classes * 4) # gcn_i after layer-1
self.gcn2 = GCN_8(512, self.num_classes * 4)
self.gcn3 = GCN_8(1024, self.num_classes * 4)
self.gcn4 = GCN_8(2048, self.num_classes * 4)
# self.gcn5 = GCN_8(4096, self.num_classes * 4)
self.gcn1_1 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn1_2 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn1_3 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn1_4 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn2_1 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn2_2 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn2_3 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn3_1 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn3_2 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn3_3 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn4_1 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
# self.gcn5_1 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.DU_01 = DU(20, 20)
self.DU_02 = DU(20, 20)
self.DU_03 = DU(20, 20)
self.DU_04 = DU(20, 20)
self.DU_05 = DU(20, 20)
self.DU_06 = DU(20, 20)
self.DU_07 = DU(20, 20)
self.DU_08 = DU(20, 20)
self.DU_09 = DU(20, 20)
# self.DU_10 = DU(20, 20)
self.bottleneck_01 = BottleneckX(40, 20)
self.bottleneck_02 = BottleneckX(60, 20)
self.bottleneck_03 = BottleneckX(80, 20)
self.bottleneck_04 = BottleneckX(100, 20)
self.bottleneck_05 = BottleneckX(40, 20)
self.bottleneck_06 = BottleneckX(60, 20)
self.bottleneck_07 = BottleneckX(80, 20)
self.bottleneck_08 = BottleneckX(40, 20)
self.bottleneck_09 = BottleneckX(60, 20)
# self.bottleneck_10 = BottleneckX(40, 20)
self.conv_1x1_1 = nn.Conv2d(20, 20, 1)
self.conv_1x1_2 = nn.Conv2d(20, 20, 1)
self.conv_1x1_3 = nn.Conv2d(20, 20, 1)
self.conv_1x1_4 = nn.Conv2d(20, 20, 1)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv0(x)
x = self.bn0(x)
x = self.relu(x)
fm0 = x # 256
fm1 = self.layer1(x) # 256
fm2 = self.layer2(fm1) # 128
fm3 = self.layer3(fm2) # 64
fm4 = self.layer4(fm3) # 32
# fm5 = self.layer5(fm4) # 32
# print(fm4.size())
# R2
gc_fm1 = self.gcn1_1(self.gcn1(fm1))
gc_fm2 = self.gcn2_1(self.gcn2(fm2))
gc_fm3 = self.gcn3_1(self.gcn3(fm3))
gc_fm4 = self.gcn4_1(self.gcn4(fm4))
# gc_fm5 = self.gcn5_1(self.gcn5(fm5))
# DU ok
# gc_fm4_L = self.DU_10(gc_fm5)
# gc_fm4_L = torch.cat((gc_fm4, gc_fm4_L), 1)
# gc_fm4_L = self.bottleneck_10(gc_fm4_L)
gc_fm4_L = gc_fm4
# ok
x = self.DU_08(gc_fm4)
x = torch.cat((gc_fm3, x), 1)
x = self.bottleneck_08(x)
gc_fm3_1 = self.gcn3_2(x)
# ok
x = self.DU_09(gc_fm4_L)
x = torch.cat((gc_fm3, gc_fm3_1, x), 1)
gc_fm3_L = self.bottleneck_09(x)
# ok
x = self.DU_05(gc_fm3)
x = torch.cat((gc_fm2, x), 1)
x = self.bottleneck_05(x)
gc_fm2_1 = self.gcn2_2(x)
x = self.DU_06(gc_fm3_1)
x = torch.cat((gc_fm2, gc_fm2_1, x), 1)
x = self.bottleneck_06(x)
gc_fm2_2 = self.gcn2_3(x)
x = self.DU_07(gc_fm3_L)
x = torch.cat((gc_fm2, gc_fm2_1, gc_fm2_2, x), 1)
gc_fm2_L = self.bottleneck_07(x)
x = self.DU_01(gc_fm2)
x = torch.cat((gc_fm1, x), 1)
x = self.bottleneck_01(x)
gc_fm1_1 = self.gcn1_2(x)
x = self.DU_02(gc_fm2_1)
x = torch.cat((gc_fm1, gc_fm1_1, x), 1)
x = self.bottleneck_02(x)
gc_fm1_2 = self.gcn1_3(x)
x = self.DU_03(gc_fm2_2)
x = torch.cat((gc_fm1, gc_fm1_1, gc_fm1_2, x), 1)
x = self.bottleneck_03(x)
gc_fm1_3 = self.gcn1_4(x)
x = self.DU_04(gc_fm2_L)
x = torch.cat((gc_fm1, gc_fm1_1, gc_fm1_2, gc_fm1_3, x), 1)
gc_fm1_L = self.bottleneck_04(x)
# gc_fm1_1 = F.interpolate(gc_fm1_1, scale_factor=2, mode='bilinear', align_corners=True)
# gc_fm1_2 = F.interpolate(gc_fm1_2, scale_factor=2, mode='bilinear', align_corners=True)
# gc_fm1_3 = F.interpolate(gc_fm1_3, scale_factor=2, mode='bilinear', align_corners=True)
# gc_fm1_L = F.interpolate(gc_fm1_L, scale_factor=2, mode='bilinear', align_corners=True)
x = self.conv_1x1_1(gc_fm1_1)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_1 = s1 + s2 + s3 + s4
x = self.conv_1x1_2(gc_fm1_2)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_2 = s1 + s2 + s3 + s4
x = self.conv_1x1_3(gc_fm1_3)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_3 = s1 + s2 + s3 + s4
x = self.conv_1x1_4(gc_fm1_L)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_4 = s1 + s2 + s3 + s4
# print(score_4.size())
out = score_1 + score_2 + score_3 + score_4
if debug is True:
self.heatmap(score_1, viz, patient, slice_index, 'score_1')
self.heatmap(score_2, viz, patient, slice_index, 'score_2')
self.heatmap(score_3, viz, patient, slice_index, 'score_3')
self.heatmap(score_4, viz, patient, slice_index, 'score_4')
self.heatmap(out, viz, patient, slice_index, 'out')
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
# add 3 GCB
# add 3 (plus + B)
# dilated conv
# problem: label 4 prob map include label 1's prob
# delete layer_4
# super boundary refine
class FCN_GCN_UGS_R2_3(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_UGS_R2_3, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv0 = nn.Conv2d(3, 64, kernel_size=1, stride=1) # 256x256, 64
# self.conv1 = resnet.conv1
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
# self.layer0 = resnet.layer1 # res-2 o/p = 128x128,256
self.layer1 = resnet.layer1 # res-2 o/p = 128x128,256
self.layer2 = resnet.layer2 # res-3 o/p = 64x64,512
self.layer3 = resnet.layer3 # res-4 o/p = 32x32,1024
self.layer4 = resnet.layer4 # res-5 o/p = 16x16,2048
# self.layer5 = resnet._make_layer(Bottleneck, 1024, 3, stride=2) # res-5 o/p = 16x16,4096
self.gcn1 = GCN_8(256, self.num_classes * 4) # gcn_i after layer-1
self.gcn2 = GCN_8(512, self.num_classes * 4)
self.gcn3 = GCN_8(1024, self.num_classes * 4)
self.gcn4 = GCN_8(2048, self.num_classes * 4)
# self.gcn5 = GCN_8(4096, self.num_classes * 4)
self.gcn1_1 = R2_R_3(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn1_2 = R2_R_3(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn1_3 = R2_R_3(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn1_4 = R2_R_3(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn2_1 = R2_R_3(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn2_2 = R2_R_3(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn2_3 = R2_R_3(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn3_1 = R2_R_3(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn3_2 = R2_R_3(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn3_3 = R2_R_3(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn4_1 = R2_R_3(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
# self.gcn5_1 = R2_R(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.DU_01 = DU(20, 20)
self.DU_02 = DU(20, 20)
self.DU_03 = DU(20, 20)
self.DU_04 = DU(20, 20)
self.DU_05 = DU(20, 20)
self.DU_06 = DU(20, 20)
self.DU_07 = DU(20, 20)
self.DU_08 = DU(20, 20)
self.DU_09 = DU(20, 20)
# self.DU_10 = DU(20, 20)
self.bottleneck_01 = BottleneckX(40, 20)
self.bottleneck_02 = BottleneckX(60, 20)
self.bottleneck_03 = BottleneckX(80, 20)
self.bottleneck_04 = BottleneckX(100, 20)
self.bottleneck_05 = BottleneckX(40, 20)
self.bottleneck_06 = BottleneckX(60, 20)
self.bottleneck_07 = BottleneckX(80, 20)
self.bottleneck_08 = BottleneckX(40, 20)
self.bottleneck_09 = BottleneckX(60, 20)
# self.bottleneck_10 = BottleneckX(40, 20)
self.conv_1x1_1 = nn.Conv2d(20, 20, 1)
self.conv_1x1_2 = nn.Conv2d(20, 20, 1)
self.conv_1x1_3 = nn.Conv2d(20, 20, 1)
self.conv_1x1_4 = nn.Conv2d(20, 20, 1)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv0(x)
x = self.bn0(x)
x = self.relu(x)
fm0 = x # 256
fm1 = self.layer1(x) # 256
fm2 = self.layer2(fm1) # 128
fm3 = self.layer3(fm2) # 64
fm4 = self.layer4(fm3) # 32
# fm5 = self.layer5(fm4) # 32
# print(fm4.size())
# R2
gc_fm1 = self.gcn1_1(self.gcn1(fm1))
gc_fm2 = self.gcn2_1(self.gcn2(fm2))
gc_fm3 = self.gcn3_1(self.gcn3(fm3))
gc_fm4 = self.gcn4_1(self.gcn4(fm4))
# gc_fm5 = self.gcn5_1(self.gcn5(fm5))
# DU ok
# gc_fm4_L = self.DU_10(gc_fm5)
# gc_fm4_L = torch.cat((gc_fm4, gc_fm4_L), 1)
# gc_fm4_L = self.bottleneck_10(gc_fm4_L)
gc_fm4_L = gc_fm4
# ok
x = self.DU_08(gc_fm4)
x = torch.cat((gc_fm3, x), 1)
x = self.bottleneck_08(x)
gc_fm3_1 = self.gcn3_2(x)
# ok
x = self.DU_09(gc_fm4_L)
x = torch.cat((gc_fm3, gc_fm3_1, x), 1)
gc_fm3_L = self.bottleneck_09(x)
# ok
x = self.DU_05(gc_fm3)
x = torch.cat((gc_fm2, x), 1)
x = self.bottleneck_05(x)
gc_fm2_1 = self.gcn2_2(x)
x = self.DU_06(gc_fm3_1)
x = torch.cat((gc_fm2, gc_fm2_1, x), 1)
x = self.bottleneck_06(x)
gc_fm2_2 = self.gcn2_3(x)
x = self.DU_07(gc_fm3_L)
x = torch.cat((gc_fm2, gc_fm2_1, gc_fm2_2, x), 1)
gc_fm2_L = self.bottleneck_07(x)
x = self.DU_01(gc_fm2)
x = torch.cat((gc_fm1, x), 1)
x = self.bottleneck_01(x)
gc_fm1_1 = self.gcn1_2(x)
x = self.DU_02(gc_fm2_1)
x = torch.cat((gc_fm1, gc_fm1_1, x), 1)
x = self.bottleneck_02(x)
gc_fm1_2 = self.gcn1_3(x)
x = self.DU_03(gc_fm2_2)
x = torch.cat((gc_fm1, gc_fm1_1, gc_fm1_2, x), 1)
x = self.bottleneck_03(x)
gc_fm1_3 = self.gcn1_4(x)
x = self.DU_04(gc_fm2_L)
x = torch.cat((gc_fm1, gc_fm1_1, gc_fm1_2, gc_fm1_3, x), 1)
gc_fm1_L = self.bottleneck_04(x)
# gc_fm1_1 = F.interpolate(gc_fm1_1, scale_factor=2, mode='bilinear', align_corners=True)
# gc_fm1_2 = F.interpolate(gc_fm1_2, scale_factor=2, mode='bilinear', align_corners=True)
# gc_fm1_3 = F.interpolate(gc_fm1_3, scale_factor=2, mode='bilinear', align_corners=True)
# gc_fm1_L = F.interpolate(gc_fm1_L, scale_factor=2, mode='bilinear', align_corners=True)
x = self.conv_1x1_1(gc_fm1_1)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_1 = s1 + s2 + s3 + s4
x = self.conv_1x1_2(gc_fm1_2)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_2 = s1 + s2 + s3 + s4
x = self.conv_1x1_3(gc_fm1_3)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_3 = s1 + s2 + s3 + s4
x = self.conv_1x1_4(gc_fm1_L)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_4 = s1 + s2 + s3 + s4
# print(score_4.size())
out = score_1 + score_2 + score_3 + score_4
if debug is True:
self.heatmap(score_1, viz, patient, slice_index, 'score_1')
self.heatmap(score_2, viz, patient, slice_index, 'score_2')
self.heatmap(score_3, viz, patient, slice_index, 'score_3')
self.heatmap(score_4, viz, patient, slice_index, 'score_4')
self.heatmap(out, viz, patient, slice_index, 'out')
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
# add 3 GCB
# add 3 (plus + B)
# dilated conv
# problem: label 4 prob map include label 1's prob
# delete layer_4
# super boundary refine
class FCN_GCN_L(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_L, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv0 = nn.Conv2d(3, 64, kernel_size=1, stride=1) # 256x256, 64
# self.conv1 = resnet.conv1
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
# self.layer0 = resnet.layer1 # res-2 o/p = 128x128,256
self.layer1 = resnet.layer1 # res-2 o/p = 128x128,256
self.layer2 = resnet.layer2 # res-3 o/p = 64x64,512
self.layer3 = resnet.layer3 # res-4 o/p = 32x32,1024
self.layer4 = resnet.layer4 # res-5 o/p = 16x16,2048
# self.layer5 = resnet._make_layer(Bottleneck, 1024, 3, stride=2) # res-5 o/p = 16x16,4096
self.gcn1 = GCN_8(256, self.num_classes * 4) # gcn_i after layer-1
self.gcn2 = GCN_8(512, self.num_classes * 4)
self.gcn3 = GCN_8(1024, self.num_classes * 4)
self.gcn4 = GCN_8(2048, self.num_classes * 4)
# self.gcn5 = GCN_8(4096, self.num_classes * 4)
self.gcn1_1 = R2(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn1_2 = R2(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn1_3 = R2(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn1_4 = R2(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn2_1 = R2(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn2_2 = R2(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn2_3 = R2(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn3_1 = R2(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn3_2 = R2(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn3_3 = R2(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.gcn4_1 = R2(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
# self.gcn5_1 = R2(out_c=self.num_classes * 4, group=4) # gcn_i after layer-1
self.DU_01 = DU(20, 20)
self.DU_02 = DU(20, 20)
self.DU_03 = DU(20, 20)
self.DU_04 = DU(20, 20)
self.DU_05 = DU(20, 20)
self.DU_06 = DU(20, 20)
self.DU_07 = DU(20, 20)
self.DU_08 = DU(20, 20)
self.DU_09 = DU(20, 20)
# self.DU_10 = DU(20, 20)
self.bottleneck_01 = BottleneckX(40, 20)
self.bottleneck_02 = BottleneckX(60, 20)
self.bottleneck_03 = BottleneckX(80, 20)
self.bottleneck_04 = BottleneckX(100, 20)
self.bottleneck_05 = BottleneckX(40, 20)
self.bottleneck_06 = BottleneckX(60, 20)
self.bottleneck_07 = BottleneckX(80, 20)
self.bottleneck_08 = BottleneckX(40, 20)
self.bottleneck_09 = BottleneckX(60, 20)
# self.bottleneck_10 = BottleneckX(40, 20)
self.conv_1x1_1 = nn.Conv2d(20, 20, 1)
self.conv_1x1_2 = nn.Conv2d(20, 20, 1)
self.conv_1x1_3 = nn.Conv2d(20, 20, 1)
self.conv_1x1_4 = nn.Conv2d(20, 20, 1)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv0(x)
x = self.bn0(x)
x = self.relu(x)
fm0 = x # 256
fm1 = self.layer1(x) # 256
fm2 = self.layer2(fm1) # 128
fm3 = self.layer3(fm2) # 64
fm4 = self.layer4(fm3) # 32
# fm5 = self.layer5(fm4) # 32
print(fm4.size())
# R2
gc_fm1 = self.gcn1_1(self.gcn1(fm1))
gc_fm2 = self.gcn2_1(self.gcn2(fm2))
gc_fm3 = self.gcn3_1(self.gcn3(fm3))
gc_fm4 = self.gcn4_1(self.gcn4(fm4))
# gc_fm5 = self.gcn5_1(self.gcn5(fm5))
# DU ok
# gc_fm4_L = self.DU_10(gc_fm5)
# gc_fm4_L = torch.cat((gc_fm4, gc_fm4_L), 1)
# gc_fm4_L = self.bottleneck_10(gc_fm4_L)
gc_fm4_L = gc_fm4
# ok
x = self.DU_08(gc_fm4)
x = torch.cat((gc_fm3, x), 1)
x = self.bottleneck_08(x)
gc_fm3_1 = self.gcn3_2(x)
# ok
x = self.DU_09(gc_fm4_L)
x = torch.cat((gc_fm3, gc_fm3_1, x), 1)
gc_fm3_L = self.bottleneck_09(x)
# ok
x = self.DU_05(gc_fm3)
x = torch.cat((gc_fm2, x), 1)
x = self.bottleneck_05(x)
gc_fm2_1 = self.gcn2_2(x)
x = self.DU_06(gc_fm3_1)
x = torch.cat((gc_fm2, gc_fm2_1, x), 1)
x = self.bottleneck_06(x)
gc_fm2_2 = self.gcn2_3(x)
x = self.DU_07(gc_fm3_L)
x = torch.cat((gc_fm2, gc_fm2_1, gc_fm2_2, x), 1)
gc_fm2_L = self.bottleneck_07(x)
x = self.DU_01(gc_fm2)
x = torch.cat((gc_fm1, x), 1)
x = self.bottleneck_01(x)
gc_fm1_1 = self.gcn1_2(x)
x = self.DU_02(gc_fm2_1)
x = torch.cat((gc_fm1, gc_fm1_1, x), 1)
x = self.bottleneck_02(x)
gc_fm1_2 = self.gcn1_3(x)
x = self.DU_03(gc_fm2_2)
x = torch.cat((gc_fm1, gc_fm1_1, gc_fm1_2, x), 1)
x = self.bottleneck_03(x)
gc_fm1_3 = self.gcn1_4(x)
x = self.DU_04(gc_fm2_L)
x = torch.cat((gc_fm1, gc_fm1_1, gc_fm1_2, gc_fm1_3, x), 1)
gc_fm1_L = self.bottleneck_04(x)
# gc_fm1_1 = F.interpolate(gc_fm1_1, scale_factor=2, mode='bilinear', align_corners=True)
# gc_fm1_2 = F.interpolate(gc_fm1_2, scale_factor=2, mode='bilinear', align_corners=True)
# gc_fm1_3 = F.interpolate(gc_fm1_3, scale_factor=2, mode='bilinear', align_corners=True)
# gc_fm1_L = F.interpolate(gc_fm1_L, scale_factor=2, mode='bilinear', align_corners=True)
x = self.conv_1x1_1(gc_fm1_1)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_1 = s1 + s2 + s3 + s4
x = self.conv_1x1_2(gc_fm1_2)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_2 = s1 + s2 + s3 + s4
x = self.conv_1x1_3(gc_fm1_3)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_3 = s1 + s2 + s3 + s4
x = self.conv_1x1_4(gc_fm1_L)
s1, s2, s3, s4 = torch.chunk(x, 4, 1)
score_4 = s1 + s2 + s3 + s4
# print(score_4.size())
out = score_1 + score_2 + score_3 + score_4
if debug is True:
self.heatmap(score_1, viz, patient, slice_index, 'score_1')
self.heatmap(score_2, viz, patient, slice_index, 'score_2')
self.heatmap(score_3, viz, patient, slice_index, 'score_3')
self.heatmap(score_4, viz, patient, slice_index, 'score_4')
self.heatmap(out, viz, patient, slice_index, 'out')
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
class FCN_GCN_GCN_C(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_GCN_C, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
# self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN_8(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN_8(512, self.num_classes)
self.gcn3 = GCN_8(1024, self.num_classes)
self.gcn1_1 = GCN_8(self.num_classes, self.num_classes) # gcn_i after layer-1
self.gcn2_1 = GCN_8(self.num_classes, self.num_classes)
self.gcn3_1 = GCN_8(self.num_classes, self.num_classes)
# self.gcn4 = GCN(2048, self.num_classes)
self.conv_3x3_1 = nn.Conv2d(5 * 2, 5, kernel_size=3, padding=1)
self.conv_3x3_2 = nn.Conv2d(5 * 2, 5, kernel_size=3, padding=1)
self.br1 = BR_super_7(num_classes, dilation=True)
self.br2 = BR_super_7(num_classes, dilation=True)
self.br3 = BR_super_7(num_classes, dilation=False)
# self.br4 = BR_super_7(num_classes, dilation=False)
self.br5 = BR_super_7(num_classes, dilation=False)
self.br6 = BR_super_7(num_classes, dilation=True)
self.br7 = BR_super_7(num_classes, dilation=True)
self.br5_1 = BR_super_7(num_classes, dilation=False)
self.br6_1 = BR_super_7(num_classes, dilation=True)
self.br7_1 = BR_super_7(num_classes, dilation=True)
self.br5_2 = BR_super_7(num_classes, dilation=False)
self.br6_2 = BR_super_7(num_classes, dilation=True)
self.br7_2 = BR_super_7(num_classes, dilation=True)
self.br8 = BR_super_7(num_classes, dilation=True)
self.br9 = BR_super_7(num_classes, dilation=True)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 128
fm2 = self.layer2(fm1) # 64
fm3 = self.layer3(fm2) # 32
# fm4 = self.layer4(fm3) # 16
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
# gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
# gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
# gc_fm3_1 = self.br5(gc_fm3 + gc_fm4)
gc_fm3_1 = gc_fm3
gc_fm3_2 = self.br5_1(self.gcn3_1(gc_fm3_1))
gc_fm3_3 = self.br5_2(gc_fm3_2 + gc_fm3_1)
gc_fm3 = F.upsample(gc_fm3_3, fm2.size()[2:], mode='bilinear', align_corners=True)
x = torch.cat((gc_fm2, gc_fm3), 1)
x = self.conv_3x3_1(x)
gc_fm2_1 = self.br6(x)
gc_fm2_2 = self.br6_1(self.gcn2_1(gc_fm2_1))
gc_fm2_3 = self.br6_2(gc_fm2_2 + gc_fm2_1)
gc_fm2 = F.upsample(gc_fm2_3, fm1.size()[2:], mode='bilinear', align_corners=True)
x = torch.cat((gc_fm1, gc_fm2), 1)
x = self.conv_3x3_2(x)
gc_fm1_1 = self.br7(x)
gc_fm1_2 = self.br7_1(self.gcn1_1(gc_fm1_1))
gc_fm1_3 = self.br7_2(gc_fm1_2 + gc_fm1_1)
gc_fm1 = F.upsample(gc_fm1_3, pooled_x.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
out = self.br9(gc_fm1)
n, c, h, w = out.shape
if debug is True:
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3_1, viz, patient, slice_index, 'gc_fm3_1')
self.heatmap(gc_fm3_2, viz, patient, slice_index, 'gc_fm3_2')
self.heatmap(gc_fm3_3, viz, patient, slice_index, 'gc_fm3_3')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3')
self.heatmap(gc_fm2_1, viz, patient, slice_index, 'gc_fm2_1')
self.heatmap(gc_fm2_2, viz, patient, slice_index, 'gc_fm2_2')
self.heatmap(gc_fm2_3, viz, patient, slice_index, 'gc_fm2_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2')
self.heatmap(gc_fm1_1, viz, patient, slice_index, 'gc_fm1_1')
self.heatmap(gc_fm1_2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1_3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1')
self.heatmap(out, viz, patient, slice_index, 'out')
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
class FCN_GCN_GCN_A(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_GCN_A, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
# self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN_8(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN_8(512, self.num_classes)
self.gcn3 = GCN_8(1024, self.num_classes)
self.gcn1_1 = GCN_8(self.num_classes, self.num_classes) # gcn_i after layer-1
self.gcn2_1 = GCN_8(self.num_classes, self.num_classes)
self.gcn3_1 = GCN_8(self.num_classes, self.num_classes)
# self.gcn4 = GCN(2048, self.num_classes)
self.br1 = BR_super_7(num_classes, dilation=True)
self.br2 = BR_super_7(num_classes, dilation=True)
self.br3 = BR_super_7(num_classes, dilation=False)
# self.br4 = BR_super_7(num_classes, dilation=False)
self.br5 = BR_super_7(num_classes, dilation=False)
self.br6 = BR_super_7(num_classes, dilation=True)
self.br7 = BR_super_7(num_classes, dilation=True)
self.br5_1 = BR_super_7(num_classes, dilation=False)
self.br6_1 = BR_super_7(num_classes, dilation=True)
self.br7_1 = BR_super_7(num_classes, dilation=True)
self.br5_2 = BR_super_7(num_classes, dilation=False)
self.br6_2 = BR_super_7(num_classes, dilation=True)
self.br7_2 = BR_super_7(num_classes, dilation=True)
self.br8 = BR_super_7(num_classes, dilation=True)
self.br9 = BR_super_7(num_classes, dilation=True)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 128
fm2 = self.layer2(fm1) # 64
fm3 = self.layer3(fm2) # 32
# fm4 = self.layer4(fm3) # 16
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
# gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
# gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
# gc_fm3_1 = self.br5(gc_fm3 + gc_fm4)
gc_fm3_1 = gc_fm3
gc_fm3_2 = self.br5_1(self.gcn3_1(gc_fm3_1))
gc_fm3_3 = self.br5_2(gc_fm3_2 + gc_fm3_1)
gc_fm3 = F.upsample(gc_fm3_3, fm2.size()[2:], mode='bilinear', align_corners=True)
gc_fm2_1 = self.br6(gc_fm2 + gc_fm3)
gc_fm2_2 = self.br6_1(self.gcn2_1(gc_fm2_1))
gc_fm2_3 = self.br6_2(gc_fm2_2 + gc_fm2_1)
gc_fm2 = F.upsample(gc_fm2_3, fm1.size()[2:], mode='bilinear', align_corners=True)
gc_fm1_1 = self.br7(gc_fm1 + gc_fm2)
gc_fm1_2 = self.br7_1(self.gcn1_1(gc_fm1_1))
gc_fm1_3 = self.br7_2(gc_fm1_2 + gc_fm1_1)
gc_fm1 = F.upsample(gc_fm1_3, pooled_x.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
out = self.br9(gc_fm1)
n, c, h, w = out.shape
if debug is True:
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3_1, viz, patient, slice_index, 'gc_fm3_1')
self.heatmap(gc_fm3_2, viz, patient, slice_index, 'gc_fm3_2')
self.heatmap(gc_fm3_3, viz, patient, slice_index, 'gc_fm3_3')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3')
self.heatmap(gc_fm2_1, viz, patient, slice_index, 'gc_fm2_1')
self.heatmap(gc_fm2_2, viz, patient, slice_index, 'gc_fm2_2')
self.heatmap(gc_fm2_3, viz, patient, slice_index, 'gc_fm2_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2')
self.heatmap(gc_fm1_1, viz, patient, slice_index, 'gc_fm1_1')
self.heatmap(gc_fm1_2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1_3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1')
self.heatmap(out, viz, patient, slice_index, 'out')
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
class FCN_GCN_GCN_B(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_GCN_B, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
# self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN_BIG(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN_BIG(512, self.num_classes)
self.gcn3 = GCN_BIG(1024, self.num_classes)
self.gcn1_1 = GCN_BIG(self.num_classes, self.num_classes) # gcn_i after layer-1
self.gcn2_1 = GCN_BIG(self.num_classes, self.num_classes)
self.gcn3_1 = GCN_BIG(self.num_classes, self.num_classes)
# self.gcn4 = GCN(2048, self.num_classes)
self.br1 = BR_super_7(num_classes, dilation=True)
self.br2 = BR_super_7(num_classes, dilation=True)
self.br3 = BR_super_7(num_classes, dilation=False)
# self.br4 = BR_super_7(num_classes, dilation=False)
self.br5 = BR_super_7(num_classes, dilation=False)
self.br6 = BR_super_7(num_classes, dilation=True)
self.br7 = BR_super_7(num_classes, dilation=True)
self.br5_1 = BR_super_7(num_classes, dilation=False)
self.br6_1 = BR_super_7(num_classes, dilation=True)
self.br7_1 = BR_super_7(num_classes, dilation=True)
self.br5_2 = BR_super_7(num_classes, dilation=False)
self.br6_2 = BR_super_7(num_classes, dilation=True)
self.br7_2 = BR_super_7(num_classes, dilation=True)
self.br8 = BR_super_7(num_classes, dilation=True)
self.br9 = BR_super_7(num_classes, dilation=True)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 128
fm2 = self.layer2(fm1) # 64
fm3 = self.layer3(fm2) # 32
# fm4 = self.layer4(fm3) # 16
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
# gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
# gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
# gc_fm3_1 = self.br5(gc_fm3 + gc_fm4)
gc_fm3_1 = gc_fm3
gc_fm3_2 = self.br5_1(self.gcn3_1(gc_fm3_1))
gc_fm3_3 = self.br5_2(gc_fm3_2 + gc_fm3_1)
gc_fm3 = F.upsample(gc_fm3_3, fm2.size()[2:], mode='bilinear', align_corners=True)
gc_fm2_1 = self.br6(gc_fm2 * gc_fm3)
gc_fm2_2 = self.br6_1(self.gcn2_1(gc_fm2_1))
gc_fm2_3 = self.br6_2(gc_fm2_2 + gc_fm2_1)
gc_fm2 = F.upsample(gc_fm2_3, fm1.size()[2:], mode='bilinear', align_corners=True)
gc_fm1_1 = self.br7(gc_fm1 * gc_fm2)
gc_fm1_2 = self.br7_1(self.gcn1_1(gc_fm1_1))
gc_fm1_3 = self.br7_2(gc_fm1_2 + gc_fm1_1)
gc_fm1 = F.upsample(gc_fm1_3, pooled_x.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
out = self.br9(gc_fm1)
n, c, h, w = out.shape
if debug is True:
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3_1, viz, patient, slice_index, 'gc_fm3_1')
self.heatmap(gc_fm3_2, viz, patient, slice_index, 'gc_fm3_2')
self.heatmap(gc_fm3_3, viz, patient, slice_index, 'gc_fm3_3')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3')
self.heatmap(gc_fm2_1, viz, patient, slice_index, 'gc_fm2_1')
self.heatmap(gc_fm2_2, viz, patient, slice_index, 'gc_fm2_2')
self.heatmap(gc_fm2_3, viz, patient, slice_index, 'gc_fm2_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2')
self.heatmap(gc_fm1_1, viz, patient, slice_index, 'gc_fm1_1')
self.heatmap(gc_fm1_2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1_3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1')
self.heatmap(out, viz, patient, slice_index, 'out')
return out
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
class FCN_GCN_GCN_SE(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_GCN_SE, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
# self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN_8(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN_8(512, self.num_classes)
self.gcn3 = GCN_8(1024, self.num_classes)
self.gcn1_1 = GCN_8(self.num_classes, self.num_classes) # gcn_i after layer-1
self.gcn2_1 = GCN_8(self.num_classes, self.num_classes)
self.gcn3_1 = GCN_8(self.num_classes, self.num_classes)
# self.gcn4 = GCN(2048, self.num_classes)
self.br1 = BR_super_7_SE(num_classes, dilation=True)
self.br2 = BR_super_7_SE(num_classes, dilation=True)
self.br3 = BR_super_7_SE(num_classes, dilation=False)
# self.br4 = BR_super_7_SE(num_classes, dilation=False)
self.br5 = BR_super_7_SE(num_classes, dilation=False)
self.br6 = BR_super_7_SE(num_classes, dilation=True)
self.br7 = BR_super_7_SE(num_classes, dilation=True)
self.br5_1 = BR_super_7_SE(num_classes, dilation=False)
self.br6_1 = BR_super_7_SE(num_classes, dilation=True)
self.br7_1 = BR_super_7_SE(num_classes, dilation=True)
self.br5_2 = BR_super_7_SE(num_classes, dilation=False)
self.br6_2 = BR_super_7_SE(num_classes, dilation=True)
self.br7_2 = BR_super_7_SE(num_classes, dilation=True)
self.br8 = BR_super_7_SE(num_classes, dilation=True)
self.br9 = BR_super_7_SE(num_classes, dilation=True)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 128
fm2 = self.layer2(fm1) # 64
fm3 = self.layer3(fm2) # 32
# fm4 = self.layer4(fm3) # 16
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
# gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
# gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
# gc_fm3_1 = self.br5(gc_fm3 + gc_fm4)
gc_fm3_1 = gc_fm3
gc_fm3_2 = self.br5_1(self.gcn3_1(gc_fm3_1))
gc_fm3_3 = self.br5_2(gc_fm3_2 + gc_fm3_1)
gc_fm3 = F.upsample(gc_fm3_3, fm2.size()[2:], mode='bilinear', align_corners=True)
gc_fm2_1 = self.br6(gc_fm2 * gc_fm3)
gc_fm2_2 = self.br6_1(self.gcn2_1(gc_fm2_1))
gc_fm2_3 = self.br6_2(gc_fm2_2 + gc_fm2_1)
gc_fm2 = F.upsample(gc_fm2_3, fm1.size()[2:], mode='bilinear', align_corners=True)
gc_fm1_1 = self.br7(gc_fm1 * gc_fm2)
gc_fm1_2 = self.br7_1(self.gcn1_1(gc_fm1_1))
gc_fm1_3 = self.br7_2(gc_fm1_2 + gc_fm1_1)
gc_fm1 = F.upsample(gc_fm1_3, pooled_x.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
out = self.br9(gc_fm1)
n, c, h, w = out.shape
if debug is True:
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3_1, viz, patient, slice_index, 'gc_fm3_1')
self.heatmap(gc_fm3_2, viz, patient, slice_index, 'gc_fm3_2')
self.heatmap(gc_fm3_3, viz, patient, slice_index, 'gc_fm3_3')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3')
self.heatmap(gc_fm2_1, viz, patient, slice_index, 'gc_fm2_1')
self.heatmap(gc_fm2_2, viz, patient, slice_index, 'gc_fm2_2')
self.heatmap(gc_fm2_3, viz, patient, slice_index, 'gc_fm2_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2')
self.heatmap(gc_fm1_1, viz, patient, slice_index, 'gc_fm1_1')
self.heatmap(gc_fm1_2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1_3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1')
self.heatmap(out, viz, patient, slice_index, 'out')
return out
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
class FCN_GCN_GCN(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_GCN, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
# self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN_8(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN_8(512, self.num_classes)
self.gcn3 = GCN_8(1024, self.num_classes)
self.gcn1_1 = GCN_8(self.num_classes, self.num_classes) # gcn_i after layer-1
self.gcn2_1 = GCN_8(self.num_classes, self.num_classes)
self.gcn3_1 = GCN_8(self.num_classes, self.num_classes)
# self.gcn4 = GCN(2048, self.num_classes)
self.br1 = BR_super_7(num_classes, dilation=True)
self.br2 = BR_super_7(num_classes, dilation=True)
self.br3 = BR_super_7(num_classes, dilation=False)
# self.br4 = BR_super_7(num_classes, dilation=False)
self.br5 = BR_super_7(num_classes, dilation=False)
self.br6 = BR_super_7(num_classes, dilation=True)
self.br7 = BR_super_7(num_classes, dilation=True)
self.br5_1 = BR_super_7(num_classes, dilation=False)
self.br6_1 = BR_super_7(num_classes, dilation=True)
self.br7_1 = BR_super_7(num_classes, dilation=True)
self.br5_2 = BR_super_7(num_classes, dilation=False)
self.br6_2 = BR_super_7(num_classes, dilation=True)
self.br7_2 = BR_super_7(num_classes, dilation=True)
self.br8 = BR_super_7(num_classes, dilation=True)
self.br9 = BR_super_7(num_classes, dilation=True)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 128
fm2 = self.layer2(fm1) # 64
fm3 = self.layer3(fm2) # 32
# fm4 = self.layer4(fm3) # 16
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
# gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
# gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
# gc_fm3_1 = self.br5(gc_fm3 + gc_fm4)
gc_fm3_1 = gc_fm3
gc_fm3_2 = self.br5_1(self.gcn3_1(gc_fm3_1))
gc_fm3_3 = self.br5_2(gc_fm3_2 + gc_fm3_1)
gc_fm3 = F.upsample(gc_fm3_3, fm2.size()[2:], mode='bilinear', align_corners=True)
gc_fm2_1 = self.br6(gc_fm2 * gc_fm3)
gc_fm2_2 = self.br6_1(self.gcn2_1(gc_fm2_1))
gc_fm2_3 = self.br6_2(gc_fm2_2 + gc_fm2_1)
gc_fm2 = F.upsample(gc_fm2_3, fm1.size()[2:], mode='bilinear', align_corners=True)
gc_fm1_1 = self.br7(gc_fm1 * gc_fm2)
gc_fm1_2 = self.br7_1(self.gcn1_1(gc_fm1_1))
gc_fm1_3 = self.br7_2(gc_fm1_2 + gc_fm1_1)
gc_fm1 = F.upsample(gc_fm1_3, pooled_x.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
out = self.br9(gc_fm1)
n, c, h, w = out.shape
if debug is True:
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3_1, viz, patient, slice_index, 'gc_fm3_1')
self.heatmap(gc_fm3_2, viz, patient, slice_index, 'gc_fm3_2')
self.heatmap(gc_fm3_3, viz, patient, slice_index, 'gc_fm3_3')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3')
self.heatmap(gc_fm2_1, viz, patient, slice_index, 'gc_fm2_1')
self.heatmap(gc_fm2_2, viz, patient, slice_index, 'gc_fm2_2')
self.heatmap(gc_fm2_3, viz, patient, slice_index, 'gc_fm2_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2')
self.heatmap(gc_fm1_1, viz, patient, slice_index, 'gc_fm1_1')
self.heatmap(gc_fm1_2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1_3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1')
self.heatmap(out, viz, patient, slice_index, 'out')
return out
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
# add 3 GCB
# add 3 (plus + B)
# dilated conv
# problem: label 4 prob map include label 1's prob
# delete layer_4
# super boundary refine
class FCN_GCN_8(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_8, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
# self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN(512, self.num_classes)
self.gcn3 = GCN(1024, self.num_classes)
self.gcn1_1 = GCN(self.num_classes, self.num_classes) # gcn_i after layer-1
self.gcn2_1 = GCN(self.num_classes, self.num_classes)
self.gcn3_1 = GCN(self.num_classes, self.num_classes)
# self.gcn4 = GCN(2048, self.num_classes)
self.br1 = BR_super_7(num_classes, dilation=True)
self.br2 = BR_super_7(num_classes, dilation=True)
self.br3 = BR_super_7(num_classes, dilation=False)
# self.br4 = BR_super_7(num_classes, dilation=False)
self.br5 = BR_super_7(num_classes, dilation=False)
self.br6 = BR_super_7(num_classes, dilation=True)
self.br7 = BR_super_7(num_classes, dilation=True)
self.br5_1 = BR_super_7(num_classes, dilation=False)
self.br6_1 = BR_super_7(num_classes, dilation=True)
self.br7_1 = BR_super_7(num_classes, dilation=True)
self.br5_2 = BR_super_7(num_classes, dilation=False)
self.br6_2 = BR_super_7(num_classes, dilation=True)
self.br7_2 = BR_super_7(num_classes, dilation=True)
self.br8 = BR_super_7(num_classes, dilation=True)
self.br9 = BR_super_7(num_classes, dilation=True)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 64
fm2 = self.layer2(fm1) # 32
fm3 = self.layer3(fm2) # 16
# fm4 = self.layer4(fm3) # 8
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
# gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
# gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
# gc_fm3_1 = self.br5(gc_fm3 + gc_fm4)
gc_fm3_1 = gc_fm3
gc_fm3_2 = self.br5_1(self.gcn3_1(gc_fm3_1))
gc_fm3_3 = self.br5_2(gc_fm3_2 + gc_fm3_1)
gc_fm3 = F.upsample(gc_fm3_3, fm2.size()[2:], mode='bilinear', align_corners=True)
gc_fm2_1 = self.br6(gc_fm2 + gc_fm3)
gc_fm2_2 = self.br6_1(self.gcn2_1(gc_fm2_1))
gc_fm2_3 = self.br6_2(gc_fm2_2 + gc_fm2_1)
gc_fm2 = F.upsample(gc_fm2_3, fm1.size()[2:], mode='bilinear', align_corners=True)
gc_fm1_1 = self.br7(gc_fm1 + gc_fm2)
gc_fm1_2 = self.br7_1(self.gcn1_1(gc_fm1_1))
gc_fm1_3 = self.br7_2(gc_fm1_2 + gc_fm1_1)
gc_fm1 = F.upsample(gc_fm1_3, pooled_x.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
out = self.br9(gc_fm1)
n, c, h, w = out.shape
if debug is True:
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3_1, viz, patient, slice_index, 'gc_fm3_1')
self.heatmap(gc_fm3_2, viz, patient, slice_index, 'gc_fm3_2')
self.heatmap(gc_fm3_3, viz, patient, slice_index, 'gc_fm3_3')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3')
self.heatmap(gc_fm2_1, viz, patient, slice_index, 'gc_fm2_1')
self.heatmap(gc_fm2_2, viz, patient, slice_index, 'gc_fm2_2')
self.heatmap(gc_fm2_3, viz, patient, slice_index, 'gc_fm2_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2')
self.heatmap(gc_fm1_1, viz, patient, slice_index, 'gc_fm1_1')
self.heatmap(gc_fm1_2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1_3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1')
self.heatmap(out, viz, patient, slice_index, 'out')
return out
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
# add 3 GCB
# add 3 (plus + B)
# dilated conv
# problem: label 4 prob map include label 1's prob
# delete layer_4
# super boundary refine
# 124203
class FCN_GCN_7(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_7, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
# self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN(512, self.num_classes)
self.gcn3 = GCN(1024, self.num_classes)
self.gcn1_1 = GCN(self.num_classes, self.num_classes) # gcn_i after layer-1
self.gcn2_1 = GCN(self.num_classes, self.num_classes)
self.gcn3_1 = GCN(self.num_classes, self.num_classes)
# self.gcn4 = GCN(2048, self.num_classes)
self.br1 = BR_super_7(num_classes, dilation=True)
self.br2 = BR_super_7(num_classes, dilation=True)
self.br3 = BR_super_7(num_classes, dilation=False)
# self.br4 = BR_super_7(num_classes, dilation=False)
self.br5 = BR_super_7(num_classes, dilation=False)
self.br6 = BR_super_7(num_classes, dilation=True)
self.br7 = BR_super_7(num_classes, dilation=True)
self.br5_1 = BR_super_7(num_classes, dilation=False)
self.br6_1 = BR_super_7(num_classes, dilation=True)
self.br7_1 = BR_super_7(num_classes, dilation=True)
self.br5_2 = BR_super_7(num_classes, dilation=False)
self.br6_2 = BR_super_7(num_classes, dilation=True)
self.br7_2 = BR_super_7(num_classes, dilation=True)
self.br8 = BR_super_7(num_classes, dilation=True)
self.br9 = BR_super_7(num_classes, dilation=True)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 64
fm2 = self.layer2(fm1) # 32
fm3 = self.layer3(fm2) # 16
# fm4 = self.layer4(fm3) # 8
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
# gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
# gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
# gc_fm3_1 = self.br5(gc_fm3 + gc_fm4)
gc_fm3_1 = gc_fm3
gc_fm3_2 = self.br5_1(self.gcn3_1(gc_fm3_1))
gc_fm3_3 = self.br5_2(gc_fm3_2 + gc_fm3_1)
gc_fm3 = F.upsample(gc_fm3_3, fm2.size()[2:], mode='bilinear', align_corners=True)
gc_fm2_1 = self.br6(gc_fm2 + gc_fm3)
gc_fm2_2 = self.br6_1(self.gcn2_1(gc_fm2_1))
gc_fm2_3 = self.br6_2(gc_fm2_2 + gc_fm2_1)
gc_fm2 = F.upsample(gc_fm2_3, fm1.size()[2:], mode='bilinear', align_corners=True)
gc_fm1_1 = self.br7(gc_fm1 + gc_fm2)
gc_fm1_2 = self.br7_1(self.gcn1_1(gc_fm1_1))
gc_fm1_3 = self.br7_2(gc_fm1_2 + gc_fm1_1)
gc_fm1 = F.upsample(gc_fm1_3, pooled_x.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
if debug is True:
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_1')
out = self.br9(gc_fm1)
n, c, h, w = out.shape
return out
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
# add 3 GCB
# add 3 (plus + B)
# dilated conv
# problem: label 4 prob map include label 1's prob
# delete layer_4
class FCN_GCN_6(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_6, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
# self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN(512, self.num_classes)
self.gcn3 = GCN(1024, self.num_classes)
self.gcn1_1 = GCN(self.num_classes, self.num_classes) # gcn_i after layer-1
self.gcn2_1 = GCN(self.num_classes, self.num_classes)
self.gcn3_1 = GCN(self.num_classes, self.num_classes)
# self.gcn4 = GCN(2048, self.num_classes)
self.br1 = BR(num_classes, dilation=True)
self.br2 = BR(num_classes, dilation=True)
self.br3 = BR(num_classes, dilation=False)
# self.br4 = BR(num_classes, dilation=False)
self.br5 = BR(num_classes, dilation=False)
self.br6 = BR(num_classes, dilation=True)
self.br7 = BR(num_classes, dilation=True)
self.br5_1 = BR(num_classes, dilation=False)
self.br6_1 = BR(num_classes, dilation=True)
self.br7_1 = BR(num_classes, dilation=True)
self.br5_2 = BR(num_classes, dilation=False)
self.br6_2 = BR(num_classes, dilation=True)
self.br7_2 = BR(num_classes, dilation=True)
self.br8 = BR(num_classes, dilation=True)
self.br9 = BR(num_classes, dilation=True)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 64
fm2 = self.layer2(fm1) # 32
fm3 = self.layer3(fm2) # 16
# fm4 = self.layer4(fm3) # 8
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
# gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
# gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
# gc_fm3_1 = self.br5(gc_fm3 + gc_fm4)
gc_fm3_1 = gc_fm3
gc_fm3_2 = self.br5_1(self.gcn3_1(gc_fm3_1))
gc_fm3_3 = self.br5_2(gc_fm3_2 + gc_fm3_1)
gc_fm3 = F.upsample(gc_fm3_3, fm2.size()[2:], mode='bilinear', align_corners=True)
gc_fm2_1 = self.br6(gc_fm2 + gc_fm3)
gc_fm2_2 = self.br6_1(self.gcn2_1(gc_fm2_1))
gc_fm2_3 = self.br6_2(gc_fm2_2 + gc_fm2_1)
gc_fm2 = F.upsample(gc_fm2_3, fm1.size()[2:], mode='bilinear', align_corners=True)
gc_fm1_1 = self.br7(gc_fm1 + gc_fm2)
gc_fm1_2 = self.br7_1(self.gcn1_1(gc_fm1_1))
gc_fm1_3 = self.br7_2(gc_fm1_2 + gc_fm1_1)
gc_fm1 = F.upsample(gc_fm1_3, pooled_x.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
if debug is True:
# self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_1')
out = self.br9(gc_fm1)
n, c, h, w = out.shape
return out
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
# add 3 GCB
# add 3 (plus + B)
# dilated conv
# problem: label 4 prob map include label 1's prob
# round /= 5
# 083113
class FCN_GCN_5(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_5, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN(512, self.num_classes)
self.gcn3 = GCN(1024, self.num_classes)
self.gcn1_1 = GCN(self.num_classes, self.num_classes) # gcn_i after layer-1
self.gcn2_1 = GCN(self.num_classes, self.num_classes)
self.gcn3_1 = GCN(self.num_classes, self.num_classes)
self.gcn4 = GCN(2048, self.num_classes)
self.br1 = BR(num_classes, dilation=True)
self.br2 = BR(num_classes, dilation=True)
self.br3 = BR(num_classes, dilation=False)
self.br4 = BR(num_classes, dilation=False)
self.br5 = BR(num_classes, dilation=False)
self.br6 = BR(num_classes, dilation=True)
self.br7 = BR(num_classes, dilation=True)
self.br5_1 = BR(num_classes, dilation=False)
self.br6_1 = BR(num_classes, dilation=True)
self.br7_1 = BR(num_classes, dilation=True)
self.br5_2 = BR(num_classes, dilation=False)
self.br6_2 = BR(num_classes, dilation=True)
self.br7_2 = BR(num_classes, dilation=True)
self.br8 = BR(num_classes, dilation=True)
self.br9 = BR(num_classes, dilation=True)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 64
fm2 = self.layer2(fm1) # 32
fm3 = self.layer3(fm2) # 16
fm4 = self.layer4(fm3) # 8
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
gc_fm3_1 = self.br5(gc_fm3 + gc_fm4)
gc_fm3_2 = self.br5_1(self.gcn3_1(gc_fm3_1))
gc_fm3_3 = self.br5_2(gc_fm3_2 + gc_fm3_1)
gc_fm3 = F.upsample(gc_fm3_3, fm2.size()[2:], mode='bilinear', align_corners=True)
gc_fm2_1 = self.br6(gc_fm2 + gc_fm3)
gc_fm2_2 = self.br6_1(self.gcn2_1(gc_fm2_1))
gc_fm2_3 = self.br6_2(gc_fm2_2 + gc_fm2_1)
gc_fm2 = F.upsample(gc_fm2_3, fm1.size()[2:], mode='bilinear', align_corners=True)
gc_fm1_1 = self.br7(gc_fm1 + gc_fm2)
gc_fm1_2 = self.br7_1(self.gcn1_1(gc_fm1_1))
gc_fm1_3 = self.br7_2(gc_fm1_2 + gc_fm1_1)
gc_fm1 = F.upsample(gc_fm1_3, pooled_x.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
if debug is True:
self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_1')
out = self.br9(gc_fm1)
n, c, h, w = out.shape
out[:, 0, :, :] /= c
out[:, 4, :, :] *= 1.25
return out
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
# add 3 GCB
# add 3 (plus + B)
# 134340
# dilated conv
# problem: label 4 prob map include label 1's prob
class FCN_GCN_4(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_4, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN(512, self.num_classes)
self.gcn3 = GCN(1024, self.num_classes)
self.gcn1_1 = GCN(self.num_classes, self.num_classes) # gcn_i after layer-1
self.gcn2_1 = GCN(self.num_classes, self.num_classes)
self.gcn3_1 = GCN(self.num_classes, self.num_classes)
self.gcn4 = GCN(2048, self.num_classes)
self.br1 = BR(num_classes, dilation=True)
self.br2 = BR(num_classes, dilation=True)
self.br3 = BR(num_classes, dilation=False)
self.br4 = BR(num_classes, dilation=False)
self.br5 = BR(num_classes, dilation=False)
self.br6 = BR(num_classes, dilation=True)
self.br7 = BR(num_classes, dilation=True)
self.br5_1 = BR(num_classes, dilation=False)
self.br6_1 = BR(num_classes, dilation=True)
self.br7_1 = BR(num_classes, dilation=True)
self.br5_2 = BR(num_classes, dilation=False)
self.br6_2 = BR(num_classes, dilation=True)
self.br7_2 = BR(num_classes, dilation=True)
self.br8 = BR(num_classes, dilation=True)
self.br9 = BR(num_classes, dilation=True)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 64
fm2 = self.layer2(fm1) # 32
fm3 = self.layer3(fm2) # 16
fm4 = self.layer4(fm3) # 8
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
gc_fm3_1 = self.br5(gc_fm3 + gc_fm4)
gc_fm3_2 = self.br5_1(self.gcn3_1(gc_fm3_1))
gc_fm3_3 = self.br5_2(gc_fm3_2 + gc_fm3_1)
gc_fm3 = F.upsample(gc_fm3_3, fm2.size()[2:], mode='bilinear', align_corners=True)
gc_fm2_1 = self.br6(gc_fm2 + gc_fm3)
gc_fm2_2 = self.br6_1(self.gcn2_1(gc_fm2_1))
gc_fm2_3 = self.br6_2(gc_fm2_2 + gc_fm2_1)
gc_fm2 = F.upsample(gc_fm2_3, fm1.size()[2:], mode='bilinear', align_corners=True)
gc_fm1_1 = self.br7(gc_fm1 + gc_fm2)
gc_fm1_2 = self.br7_1(self.gcn1_1(gc_fm1_1))
gc_fm1_3 = self.br7_2(gc_fm1_2 + gc_fm1_1)
gc_fm1 = F.upsample(gc_fm1_3, pooled_x.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
if debug is True:
self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_1')
out = self.br9(gc_fm1)
n, c, h, w = out.shape
return out
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
# add 3 GCB
# add 3 (plus + B)
class FCN_GCN_3(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_3, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN(512, self.num_classes)
self.gcn3 = GCN(1024, self.num_classes)
self.gcn1_1 = GCN(self.num_classes, self.num_classes) # gcn_i after layer-1
self.gcn2_1 = GCN(self.num_classes, self.num_classes)
self.gcn3_1 = GCN(self.num_classes, self.num_classes)
self.gcn4 = GCN(2048, self.num_classes)
self.br1 = BR(num_classes)
self.br2 = BR(num_classes)
self.br3 = BR(num_classes)
self.br4 = BR(num_classes)
self.br5 = BR(num_classes)
self.br6 = BR(num_classes)
self.br7 = BR(num_classes)
self.br5_1 = BR(num_classes)
self.br6_1 = BR(num_classes)
self.br7_1 = BR(num_classes)
self.br5_2 = BR(num_classes)
self.br6_2 = BR(num_classes)
self.br7_2 = BR(num_classes)
self.br8 = BR(num_classes)
self.br9 = BR(num_classes)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 64
fm2 = self.layer2(fm1) # 32
fm3 = self.layer3(fm2) # 16
fm4 = self.layer4(fm3) # 8
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
gc_fm3_1 = self.br5(gc_fm3 + gc_fm4)
gc_fm3_2 = self.br5_1(self.gcn3_1(gc_fm3_1))
gc_fm3_3 = self.br5_2(gc_fm3_2 + gc_fm3_1)
gc_fm3 = F.upsample(gc_fm3_3, fm2.size()[2:], mode='bilinear', align_corners=True)
gc_fm2_1 = self.br6(gc_fm2 + gc_fm3)
gc_fm2_2 = self.br6_1(self.gcn2_1(gc_fm2_1))
gc_fm2_3 = self.br6_2(gc_fm2_2 + gc_fm2_1)
gc_fm2 = F.upsample(gc_fm2_3, fm1.size()[2:], mode='bilinear', align_corners=True)
gc_fm1_1 = self.br7(gc_fm1 + gc_fm2)
gc_fm1_2 = self.br7_1(self.gcn1_1(gc_fm1_1))
gc_fm1_3 = self.br7_2(gc_fm1_2 + gc_fm1_1)
gc_fm1 = F.upsample(gc_fm1_3, pooled_x.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
if debug is True:
self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_1')
out = self.br9(gc_fm1)
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
# add 3 GCB
# add 3 (plus + B)
class FCN_GCN_2(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN(512, self.num_classes)
self.gcn3 = GCN(1024, self.num_classes)
self.gcn1_1 = GCN(self.num_classes, self.num_classes) # gcn_i after layer-1
self.gcn2_1 = GCN(self.num_classes, self.num_classes)
self.gcn3_1 = GCN(self.num_classes, self.num_classes)
self.gcn4 = GCN(2048, self.num_classes)
self.br1 = BR(num_classes)
self.br2 = BR(num_classes)
self.br3 = BR(num_classes)
self.br4 = BR(num_classes)
self.br5 = BR(num_classes)
self.br6 = BR(num_classes)
self.br7 = BR(num_classes)
self.br5_1 = BR(num_classes)
self.br6_1 = BR(num_classes)
self.br7_1 = BR(num_classes)
self.br5_2 = BR(num_classes)
self.br6_2 = BR(num_classes)
self.br7_2 = BR(num_classes)
self.br8 = BR(num_classes)
self.br9 = BR(num_classes)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 64
fm2 = self.layer2(fm1) # 32
fm3 = self.layer3(fm2) # 16
fm4 = self.layer4(fm3) # 8
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
gc_fm3_1 = self.br5(gc_fm3 + gc_fm4)
gc_fm3_2 = self.br5_1(self.gcn3_1(gc_fm3_1))
gc_fm3_3 = self.br5_2(gc_fm3_2 + gc_fm4)
gc_fm3 = F.upsample(gc_fm3_3, fm2.size()[2:], mode='bilinear', align_corners=True)
gc_fm2_1 = self.br6(gc_fm2 + gc_fm3)
gc_fm2_2 = self.br6_1(self.gcn2_1(gc_fm2_1))
gc_fm2_3 = self.br6_2(gc_fm2_2 + gc_fm3)
gc_fm2 = F.upsample(gc_fm2_3, fm1.size()[2:], mode='bilinear', align_corners=True)
gc_fm1_1 = self.br7(gc_fm1 + gc_fm2)
gc_fm1_2 = self.br7_1(self.gcn1_1(gc_fm1_1))
gc_fm1_3 = self.br7_2(gc_fm1_2 + gc_fm2)
gc_fm1 = F.upsample(gc_fm1_3, pooled_x.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
if debug is True:
self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_1')
out = self.br9(gc_fm1)
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
# add 3 GCB
class FCN_GCN_2(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_2, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN(512, self.num_classes)
self.gcn3 = GCN(1024, self.num_classes)
self.gcn1_1 = GCN(self.num_classes, self.num_classes) # gcn_i after layer-1
self.gcn2_1 = GCN(self.num_classes, self.num_classes)
self.gcn3_1 = GCN(self.num_classes, self.num_classes)
self.gcn4 = GCN(2048, self.num_classes)
self.br1 = BR(num_classes)
self.br2 = BR(num_classes)
self.br3 = BR(num_classes)
self.br4 = BR(num_classes)
self.br5 = BR(num_classes)
self.br6 = BR(num_classes)
self.br7 = BR(num_classes)
self.br5_1 = BR(num_classes)
self.br6_1 = BR(num_classes)
self.br7_1 = BR(num_classes)
self.br8 = BR(num_classes)
self.br9 = BR(num_classes)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 64
fm2 = self.layer2(fm1) # 32
fm3 = self.layer3(fm2) # 16
fm4 = self.layer4(fm3) # 8
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
gc_fm3_1 = self.br5(gc_fm3 + gc_fm4)
gc_fm3_2 = self.br5_1(self.gcn3_1(gc_fm3_1))
gc_fm3 = F.upsample(gc_fm3_2, fm2.size()[2:], mode='bilinear', align_corners=True)
gc_fm2_1 = self.br6(gc_fm2 + gc_fm3)
gc_fm2_2 = self.br6_1(self.gcn2_1(gc_fm2_1))
gc_fm2 = F.upsample(gc_fm2_2, fm1.size()[2:], mode='bilinear', align_corners=True)
gc_fm1_1 = self.br7(gc_fm1 + gc_fm2)
gc_fm1_2 = self.br7_1(self.gcn1_1(gc_fm1_1))
gc_fm1 = F.upsample(gc_fm1_2, pooled_x.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
if debug is True:
self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_1')
out = self.br9(gc_fm1)
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
class FCN_GCN_0(nn.Module):
def __init__(self, num_classes):
super(FCN_GCN_0, self).__init__()
self.num_classes = num_classes # 21 in paper
resnet = models.resnet50(pretrained=True)
# input = 256x256
self.conv1 = resnet.conv1 # 7x7,64, stride=2 o/p = 128x128
self.bn0 = resnet.bn1 # BatchNorm2d(64)?
self.relu = resnet.relu
self.layer1 = resnet.layer1 # res-2 o/p = 64x64,256
self.layer2 = resnet.layer2 # res-3 o/p = 32x32,512
self.layer3 = resnet.layer3 # res-4 o/p = 16x16,1024
self.layer4 = resnet.layer4 # res-5 o/p = 8x8,2048
self.gcn1 = GCN(256, self.num_classes) # gcn_i after layer-1
self.gcn2 = GCN(512, self.num_classes)
self.gcn3 = GCN(1024, self.num_classes)
self.gcn4 = GCN(2048, self.num_classes)
self.br1 = BR(num_classes)
self.br2 = BR(num_classes)
self.br3 = BR(num_classes)
self.br4 = BR(num_classes)
self.br5 = BR(num_classes)
self.br6 = BR(num_classes)
self.br7 = BR(num_classes)
self.br8 = BR(num_classes)
self.br9 = BR(num_classes)
def _classifier(self, in_c):
return nn.Sequential(
nn.Conv2d(in_c, in_c, 3, padding=1, bias=False),
nn.BatchNorm2d(in_c / 2),
nn.ReLU(inplace=True),
# nn.Dropout(.5),
nn.Conv2d(in_c / 2, self.num_classes, 1),
)
def forward(self, x, debug=False, viz=None, patient=None, slice_index=None):
# input = x # 256
x = self.conv1(x)
x = self.bn0(x)
x = self.relu(x)
pooled_x = x # 128
fm1 = self.layer1(x) # 64
fm2 = self.layer2(fm1) # 32
fm3 = self.layer3(fm2) # 16
fm4 = self.layer4(fm3) # 8
gc_fm1 = self.br1(self.gcn1(fm1)) # 64
gc_fm2 = self.br2(self.gcn2(fm2))
gc_fm3 = self.br3(self.gcn3(fm3))
gc_fm4 = self.br4(self.gcn4(fm4))
if debug is True:
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_0')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm2_0')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm3_0')
self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm4_0')
gc_fm4 = F.upsample(gc_fm4, fm3.size()[2:], mode='bilinear', align_corners=True)
gc_fm3 = F.upsample(self.br5(gc_fm3 + gc_fm4), fm2.size()[2:], mode='bilinear', align_corners=True)
# gc_fm3 = F.upsample(self.br5(gc_fm3), fm2.size()[2:], mode='bilinear', align_corners=True)
gc_fm2 = F.upsample(self.br6(gc_fm2 + gc_fm3), fm1.size()[2:], mode='bilinear', align_corners=True)
gc_fm1 = F.upsample(self.br7(gc_fm1 + gc_fm2), pooled_x.size()[2:], mode='bilinear', align_corners=True) # 128
gc_fm1 = F.upsample(self.br8(gc_fm1), scale_factor=2, mode='bilinear', align_corners=True)
if debug is True:
self.heatmap(gc_fm4, viz, patient, slice_index, 'gc_fm1_4')
self.heatmap(gc_fm3, viz, patient, slice_index, 'gc_fm1_3')
self.heatmap(gc_fm2, viz, patient, slice_index, 'gc_fm1_2')
self.heatmap(gc_fm1, viz, patient, slice_index, 'gc_fm1_1')
out = self.br9(gc_fm1)
return out
def heatmap(self, input, viz, patient, slice_index, name):
n, c, h, w = input.shape
fm1 = input.view(-1, h, w)
c, h, w = fm1.shape
for i in range(c):
viz.heatmap(fm1[i], opts=dict(title=f'{patient + 1}_{slice_index + 1}_{name}_input_class_{i}'))
| 39.619396
| 119
| 0.593659
| 19,366
| 119,294
| 3.401322
| 0.014149
| 0.072112
| 0.05441
| 0.063762
| 0.974662
| 0.972582
| 0.968043
| 0.966646
| 0.964718
| 0.959678
| 0
| 0.091747
| 0.273543
| 119,294
| 3,010
| 120
| 39.632558
| 0.668332
| 0.151692
| 0
| 0.891294
| 0
| 0
| 0.028847
| 0.004479
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050024
| false
| 0
| 0.002405
| 0.008658
| 0.0962
| 0.000481
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5e290f5d5bab8a288aacf45c35263b5457fb36dd
| 13,858
|
py
|
Python
|
tests/test_monkey_patching.py
|
mgorny/wrapt
|
264c06fd3850bd0cda6917ca3e87417b573e023f
|
[
"BSD-2-Clause"
] | 1,579
|
2015-01-01T09:30:58.000Z
|
2022-03-31T18:58:00.000Z
|
tests/test_monkey_patching.py
|
mgorny/wrapt
|
264c06fd3850bd0cda6917ca3e87417b573e023f
|
[
"BSD-2-Clause"
] | 174
|
2015-02-18T05:13:54.000Z
|
2022-03-30T22:09:07.000Z
|
tests/test_monkey_patching.py
|
mgorny/wrapt
|
264c06fd3850bd0cda6917ca3e87417b573e023f
|
[
"BSD-2-Clause"
] | 264
|
2015-01-23T07:46:46.000Z
|
2022-03-10T22:53:48.000Z
|
from __future__ import print_function
import unittest
import sys
import wrapt
def global_function_1(*args, **kwargs):
return args, kwargs
def global_function_2(*args, **kwargs):
return args, kwargs
def global_function_3(*args, **kwargs):
return args, kwargs
def global_function_4(*args, **kwargs):
return args, kwargs
class Class_1(object):
def method(self, *args, **kwargs):
return args, kwargs
class Class_2(object):
@classmethod
def method(cls, *args, **kwargs):
return cls, args, kwargs
class Class_2_1(Class_2):
pass
class Class_2_2(Class_2_1):
pass
class Class_3(object):
@staticmethod
def method(*args, **kwargs):
return args, kwargs
class TestMonkeyPatching(unittest.TestCase):
def test_function_wrapper(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
@wrapt.function_wrapper
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(instance, None)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
@wrapper
def function(*args, **kwargs):
return args, kwargs
result = function(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_function_wrapper_instance_method(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
_self = self
class wrapper(object):
@wrapt.function_wrapper
def __call__(self, wrapped, instance, args, kwargs):
_self.assertEqual(type(self), wrapper)
called.append((args, kwargs))
_self.assertEqual(instance, None)
_self.assertEqual(args, _args)
_self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
@wrapper()
def function(*args, **kwargs):
return args, kwargs
result = function(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_function_wrapper_class_method(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
class wrapper(object):
@wrapt.function_wrapper
@classmethod
def __call__(cls, wrapped, instance, args, kwargs):
self.assertEqual(cls, wrapper)
called.append((args, kwargs))
self.assertEqual(instance, None)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
@wrapper()
def function(*args, **kwargs):
return args, kwargs
result = function(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_wrap_function_module_name(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(instance, None)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
wrapt.wrap_function_wrapper(__name__, 'global_function_1', wrapper)
result = global_function_1(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_wrap_function_module(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(instance, None)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
module = sys.modules[__name__]
wrapt.wrap_function_wrapper(module, 'global_function_2', wrapper)
result = global_function_2(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_wrap_instance_method_module_name(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
_instance = Class_1()
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(instance, _instance)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
wrapt.wrap_function_wrapper(__name__, 'Class_1.method',
wrapper)
result = _instance.method(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_wrap_class_method_module_name(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(instance, Class_2)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
wrapt.wrap_function_wrapper(__name__, 'Class_2.method',
wrapper)
result = Class_2.method(*_args, **_kwargs)
self.assertEqual(result, (Class_2, _args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_wrap_class_method_inherited(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
wrapt.wrap_function_wrapper(__name__, 'Class_2_1.method',
wrapper)
result = Class_2_1.method(*_args, **_kwargs)
self.assertEqual(result, (Class_2_1, _args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
called.pop()
result = Class_2_2.method(*_args, **_kwargs)
self.assertEqual(result, (Class_2_2, _args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_wrap_static_method_module_name(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(instance, None)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
wrapt.wrap_function_wrapper(__name__, 'Class_3.method',
wrapper)
result = Class_3.method(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_patch_function_module_name(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
@wrapt.patch_function_wrapper(__name__, 'global_function_3')
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(instance, None)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
result = global_function_3(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_patch_function_module(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
module = sys.modules[__name__]
@wrapt.patch_function_wrapper(module, 'global_function_4')
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(instance, None)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
result = global_function_4(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def _test_transient_function_wrapper(self, *args, **kwargs):
return args, kwargs
def test_transient_function_wrapper(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
@wrapt.transient_function_wrapper(__name__,
'TestMonkeyPatching._test_transient_function_wrapper')
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(wrapped, self._test_transient_function_wrapper)
self.assertEqual(instance, self)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
@wrapper
def function(*args, **kwargs):
return self._test_transient_function_wrapper(*args, **kwargs)
result = function(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_transient_function_wrapper_instance_method(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
_self = self
class wrapper(object):
@wrapt.transient_function_wrapper(__name__,
'TestMonkeyPatching._test_transient_function_wrapper')
def __call__(self, wrapped, instance, args, kwargs):
called.append((args, kwargs))
_self.assertEqual(wrapped, _self._test_transient_function_wrapper)
_self.assertEqual(instance, _self)
_self.assertEqual(args, _args)
_self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
@wrapper()
def function(*args, **kwargs):
return self._test_transient_function_wrapper(*args, **kwargs)
result = function(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
class TestExplicitMonkeyPatching(unittest.TestCase):
def test_patch_instance_method_class(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
@wrapt.function_wrapper
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(instance, _instance)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
class Class(object):
def function(self, *args, **kwargs):
return args, kwargs
Class.function = wrapper(Class.function)
_instance = Class()
result = _instance.function(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_patch_instance_method_dict(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
@wrapt.function_wrapper
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(instance, _instance)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
class Class(object):
def function(self, *args, **kwargs):
return args, kwargs
Class.function = wrapper(vars(Class)['function'])
_instance = Class()
result = _instance.function(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_patch_instance_method_instance(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
@wrapt.function_wrapper
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(instance, _instance)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
class Class(object):
def function(self, *args, **kwargs):
return args, kwargs
_instance = Class()
_instance.function = wrapper(_instance.function)
result = _instance.function(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
def test_patch_instance_method_extracted(self):
_args = (1, 2)
_kwargs = {'one': 1, 'two': 2}
called = []
@wrapt.function_wrapper
def wrapper(wrapped, instance, args, kwargs):
called.append((args, kwargs))
self.assertEqual(instance, _instance)
self.assertEqual(args, _args)
self.assertEqual(kwargs, _kwargs)
return wrapped(*args, **kwargs)
class Class(object):
def function(self, *args, **kwargs):
return args, kwargs
_instance = Class()
function = wrapper(_instance.function)
result = function(*_args, **_kwargs)
self.assertEqual(result, (_args, _kwargs))
self.assertEqual(called[0], (_args, _kwargs))
if __name__ == '__main__':
unittest.main()
| 29.236287
| 82
| 0.588252
| 1,423
| 13,858
| 5.416725
| 0.039353
| 0.180332
| 0.099896
| 0.178386
| 0.928516
| 0.879606
| 0.861962
| 0.852102
| 0.824338
| 0.800986
| 0
| 0.012844
| 0.292106
| 13,858
| 473
| 83
| 29.298097
| 0.772885
| 0
| 0
| 0.771341
| 0
| 0
| 0.024968
| 0.00736
| 0
| 0
| 0
| 0
| 0.27439
| 1
| 0.155488
| false
| 0.006098
| 0.012195
| 0.051829
| 0.314024
| 0.003049
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5e2fca90f90b61acd7d010d9b09862da300692c3
| 7,407
|
py
|
Python
|
mgsa/batch_plot.py
|
supernifty/mgsa
|
5f950f8c9c2bf0439a100a2348f1aef478e32934
|
[
"MIT"
] | 2
|
2016-11-02T20:27:00.000Z
|
2019-10-23T08:14:44.000Z
|
mgsa/batch_plot.py
|
supernifty/mgsa
|
5f950f8c9c2bf0439a100a2348f1aef478e32934
|
[
"MIT"
] | null | null | null |
mgsa/batch_plot.py
|
supernifty/mgsa
|
5f950f8c9c2bf0439a100a2348f1aef478e32934
|
[
"MIT"
] | null | null | null |
import sys
import matplotlib.pyplot as plt
import numpy as np
import helpers
def plot_series():
xs, ys = helpers.series_from_pipeline_batch( fh=open(sys.argv[1]), x='mult_snp_prob', y='mean_reference', bias_report=True )
xs = [ x * 100 for x in xs ]
ys = [ y * 100 for y in ys ]
plt.plot( xs, ys, label='Bias' )
plt.ylabel('Reference Bias (%)')
plt.xlabel('Reference Mutation')
#xs, ys = helpers.series_from_pipeline_batch( fh=open(sys.argv[1]), x='mult_snp_prob', y='unmapped', bias_report=True )
#xs = [ x * 100 for x in xs ]
#ys = [ y * 100 for y in ys ]
#plt.plot( xs, ys, label='Unmapped' )
#plt.ylabel('Unmapped (%)')
#plt.xlabel('Reference Mutation')
plt.legend()
plt.show()
def plot_error_bias():
#xs, ys = helpers.series_from_pipeline_result( fh=open(sys.argv[1]), y='reference_bias', bias_report=True )
xs, ys = helpers.series_from_pipeline_result( fh=open(sys.argv[1]), y='error_bias', bias_report=True )
xs = [ x * 100 for x in xs ]
print xs, ys
# weird crap
ys = [ y + 1e-7 for y in ys ]
width = 100. / len(ys) * 0.9
plt.bar( left=xs, height=ys, label='Bias', width=width )
plt.ylabel('SNPs')
plt.xlabel('Error Bias (%)')
#plt.xlabel('Reference Bias (%)')
plt.legend()
plt.show()
def plot_reference_bias(include_zero=False):
xs, ys = helpers.series_from_pipeline_result( fh=open(sys.argv[1]), y='reference_bias', bias_report=True )
xs = [ x * 100 for x in xs ]
print xs, ys
if include_zero:
plt.xlim( xmin=-5, xmax=110 )
else:
plt.xlim( xmin=5, xmax=110 )
# weird crap
ys = [ y * 100 + 1e-7 for y in ys ]
width = 100. / len(ys) * 0.75
#plt.bar( left=xs, height=ys, label='Bias', width=width )
if include_zero:
plt.bar( left=[x for x in xs], height=ys, label='SNPs affected (%)', width=width, color='b', log=False, align='center')
else:
plt.bar( left=[x for x in xs][1:], height=ys[1:], label='SNPs affected (%)', width=width, color='b', log=False, align='center')
plt.ylabel('SNPs (%)')
plt.xlabel('Reference Bias (%)')
plt.legend()
plt.show()
def plot_bias( include_zero=False, include_unmapped=False ):
xs, yrs = helpers.series_from_pipeline_result( fh=open(sys.argv[1]), y='reference_bias', bias_report=True, item=1 )
_, yes = helpers.series_from_pipeline_result( fh=open(sys.argv[1]), y='error_bias', bias_report=True, item=1 )
xs = [ x * 100 for x in xs ]
# determine averages
expected_err = helpers.item_from_pipeline_result( fh=open(sys.argv[1]), y='mean_error', bias_report=True, item=1 ) * 100.
expected_ref = helpers.item_from_pipeline_result( fh=open(sys.argv[1]), y='mean_reference', bias_report=True, item=1 ) * 100.
#expected_ref = 0.
#for i in xrange(len(xs)):
# expected_ref += xs[i] * yrs[i]
#expected_err = 0.
#for i in xrange(len(xs)):
# expected_err += xs[i] * yes[i]
plt.text( 108, 2.5, "Average reference bias: %.2f%%\nAverage error bias: %.2f%%" % ( expected_ref, expected_err ), ha='right' )
if include_zero:
plt.xlim( xmin=-5, xmax=110 )
else:
plt.xlim( xmin=5, xmax=110 )
if include_unmapped:
plt.xlim( xmax=115 )
xs.append( 110. ) # fix later
unmapped = helpers.item_from_pipeline_result( fh=open(sys.argv[1]), y='unmapped_variations', bias_report=True )
total = helpers.item_from_pipeline_result( fh=open(sys.argv[1]), y='total_variations', bias_report=True )
yrs.append( 1.0 * unmapped / total )
yes.append( 0 ) # dummy
plt.xticks( xs[1:], [ '%i' % int(x) for x in xs[1:-1] ] + [ 'Unmapped' ], rotation=-45 )
print xs, yrs, yes
# weird crap
yrs = [ y * 100 + 1e-7 for y in yrs ]
yes = [ y * 100 + 1e-7 for y in yes ]
width = 100. / len(yrs) * 0.4
#plt.bar( left=xs, height=yrs, label='Reference Bias', width=width, color='b', log=True )
#plt.bar( left=[x+width for x in xs], height=yes, label='Error Bias', width=width, color='y', log=True )
if include_zero:
plt.bar( left=[x-width/2 for x in xs], height=yrs, label='Reference Bias', width=width, color='b', log=False, align='center')
plt.bar( left=[x+width/2 for x in xs], height=yes, label='Error Bias', width=width, color='g', log=False, align='center')
else:
plt.bar( left=[x-width/2 for x in xs][1:], height=yrs[1:], label='Reference Bias', width=width, color='b', log=False, align='center')
plt.bar( left=[x+width/2 for x in xs][1:], height=yes[1:], label='Error Bias', width=width, color='g', log=False, align='center')
plt.ylabel('SNVs (%)')
plt.xlabel('Bias (%)')
plt.legend()
plt.show()
def plot_bias_compare( include_unmapped=False, column_offset=0 ):
xs, yrs = helpers.series_from_pipeline_result( fh=open(sys.argv[1]), y='reference_bias', bias_report=True, column_offset=column_offset )
_, yes = helpers.series_from_pipeline_result( fh=open(sys.argv[1]), y='reference_bias', bias_report=True, item=2, column_offset=column_offset )
xs = [ x * 100 for x in xs ]
plt.xlim( xmin=5, xmax=110 )
if include_unmapped:
plt.xlim( xmax=115 )
xs.append( 110. ) # fix later
unmapped = helpers.item_from_pipeline_result( fh=open(sys.argv[1]), y='unmapped_variations', bias_report=True, column_offset=column_offset )
total = helpers.item_from_pipeline_result( fh=open(sys.argv[1]), y='total_variations', bias_report=True, column_offset=column_offset )
yrs.append( 1.0 * unmapped / total )
unmapped = helpers.item_from_pipeline_result( fh=open(sys.argv[1]), y='unmapped_variations', bias_report=True, item=2, column_offset=column_offset )
total = helpers.item_from_pipeline_result( fh=open(sys.argv[1]), y='total_variations', bias_report=True, item=2, column_offset=column_offset )
yes.append( 1.0 * unmapped / total )
plt.xticks( xs[1:], [ '%i' % int(x) for x in xs[1:-1] ] + [ 'Unmapped' ], rotation=-45 )
print xs, yrs, yes
# weird crap
yrs = [ y * 100 + 1e-7 for y in yrs ]
yes = [ y * 100 + 1e-7 for y in yes ]
width = 100. / len(yrs) * 0.35
#plt.bar( left=xs, height=yrs, label='Reference Bias', width=width, color='b', log=True )
#plt.bar( left=[x+width for x in xs], height=yes, label='Error Bias', width=width, color='y', log=True )
plt.bar( left=[x-width/2 for x in xs][1:], height=yrs[1:], label='50 bp', width=width, color='b', log=False, align='center')
plt.bar( left=[x+width/2 for x in xs][1:], height=yes[1:], label='100 bp', width=width, color='g', log=False, align='center')
plt.ylabel('SNVs (%)')
plt.xlabel('Bias (%)')
# determine averages
expected_ref_1 = helpers.item_from_pipeline_result( fh=open(sys.argv[1]), y='mean_reference', bias_report=True, item=1, column_offset=column_offset ) * 100.
expected_ref_2 = helpers.item_from_pipeline_result( fh=open(sys.argv[1]), y='mean_reference', bias_report=True, item=2, column_offset=column_offset ) * 100.
plt.text( 108, 2.5, "Average bias (50bp): %.1f%%\nAverage bias (100bp): %.1f%%" % ( expected_ref_1, expected_ref_2 ), ha='right' )
if include_unmapped:
plt.legend(loc='upper left')
else:
plt.legend(loc='upper right')
#plt.legend()
plt.show()
if __name__ == '__main__':
from matplotlib import rcParams
rcParams.update({'figure.autolayout': True})
#plot_series()
#plot_error_bias()
plot_reference_bias(include_zero=False)
#plot_bias(include_zero=False, include_unmapped=True)
#plot_bias_compare( include_unmapped=True, column_offset=0 )
#plot_bias_compare( include_unmapped=False, column_offset=0 )
| 44.890909
| 158
| 0.668422
| 1,216
| 7,407
| 3.925987
| 0.100329
| 0.047759
| 0.035819
| 0.051739
| 0.854839
| 0.83054
| 0.788018
| 0.756179
| 0.719941
| 0.684751
| 0
| 0.033901
| 0.159714
| 7,407
| 164
| 159
| 45.164634
| 0.73313
| 0.177265
| 0
| 0.463636
| 0
| 0
| 0.118831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.045455
| null | null | 0.036364
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5e3c2f2d5e7614bad7d49f60a0142746dbe1ad71
| 20,963
|
py
|
Python
|
tests/test_15_transaction_complex.py
|
asymworks/jadetree-backend
|
5764d9971ef3fdc85b0b9cd51fad82076f464ae4
|
[
"BSD-3-Clause"
] | 7
|
2021-11-02T05:58:58.000Z
|
2022-03-04T22:16:20.000Z
|
tests/test_15_transaction_complex.py
|
asymworks/jadetree-backend
|
5764d9971ef3fdc85b0b9cd51fad82076f464ae4
|
[
"BSD-3-Clause"
] | 5
|
2021-01-27T14:18:01.000Z
|
2022-03-04T22:03:49.000Z
|
tests/test_15_transaction_complex.py
|
asymworks/jadetree-backend
|
5764d9971ef3fdc85b0b9cd51fad82076f464ae4
|
[
"BSD-3-Clause"
] | null | null | null |
# =============================================================================
#
# Jade Tree Personal Budgeting Application | jadetree.io
# Copyright (c) 2020 Asymworks, LLC. All Rights Reserved.
#
# =============================================================================
from decimal import Decimal
import pytest # noqa: F401
from jadetree.domain.models import Account, Transaction, User
from jadetree.domain.types import AccountRole, AccountType, TransactionType
from .helpers import check_transaction_entries as check_entries
# Always use 'app' fixture so ORM gets initialized
pytestmark = pytest.mark.usefixtures('app')
def test_transaction_can_add_splits_bff_nonsplit():
# Base Currency Transaction between Foreign Currency Accounts
# Use Case: ??
# Setup User and Accounts
u = User(currency='EUR')
a = Account(user=u, active=True, name='A', role=AccountRole.Personal, type=AccountType.Liability, currency='USD')
o = Account(user=u, active=True, name='O', role=AccountRole.Budget, type=AccountType.Expense, currency='USD')
# Setup Transaction
t = Transaction(
user=u,
account=a,
currency='EUR',
foreign_currency='USD',
foreign_exchrate=Decimal('0.8'),
)
# Add Transaction Line for EUR 100
t.add_split(o, Decimal(100), 'EUR')
# Ensure Lines were Added
assert t.lines is not None
assert isinstance(t.lines, list)
assert len(t.lines) == 2
assert t.lines[0].account == a
assert t.lines[0].amount == Decimal(125)
assert t.lines[1].account == o
assert t.lines[1].amount == Decimal(125)
# Ensure Split was Added
assert t.splits is not None
assert isinstance(t.splits, list)
assert len(t.splits) == 1
assert t.splits[0].left_line == t.lines[0]
assert t.splits[0].right_line == t.lines[1]
assert t.splits[0].type == TransactionType.Outflow
assert t.splits[0].amount == Decimal(100)
# Ensure Entries were Added
check_entries(t.splits[0], [
(a, Decimal(125), 'USD'),
(o, Decimal(125), 'USD'),
])
assert t.amount == Decimal(100)
def test_transaction_can_add_splits_bff_split():
# Base Currency Transaction between Foreign Currency Accounts
# Use Case: ??
# Setup User and Accounts
u = User(currency='EUR')
a = Account(user=u, active=True, name='A', role=AccountRole.Personal, type=AccountType.Liability, currency='USD')
o1 = Account(user=u, active=True, name='O1', role=AccountRole.Budget, type=AccountType.Expense, currency='USD')
o2 = Account(user=u, active=True, name='O2', role=AccountRole.Budget, type=AccountType.Expense, currency='USD')
# Setup Transaction
t = Transaction(
user=u,
account=a,
currency='EUR',
foreign_currency='USD',
foreign_exchrate=Decimal('0.8'),
)
# Add Transaction Line for EUR 100
t.add_split(o1, Decimal(80), 'EUR')
t.add_split(o2, Decimal(20), 'EUR')
# Ensure Lines were Added
assert t.lines is not None
assert isinstance(t.lines, list)
assert len(t.lines) == 3
assert t.lines[0].account == a
assert t.lines[0].amount == Decimal(125)
assert t.lines[1].account == o1
assert t.lines[1].amount == Decimal(100)
assert t.lines[2].account == o2
assert t.lines[2].amount == Decimal(25)
# Ensure Split was Added
assert t.splits is not None
assert isinstance(t.splits, list)
assert len(t.splits) == 2
assert t.splits[0].left_line == t.lines[0]
assert t.splits[0].right_line == t.lines[1]
assert t.splits[0].type == TransactionType.Outflow
assert t.splits[0].amount == Decimal(80)
assert t.splits[1].left_line == t.lines[0]
assert t.splits[1].right_line == t.lines[2]
assert t.splits[1].type == TransactionType.Outflow
assert t.splits[1].amount == Decimal(20)
# Ensure Entries were Added
check_entries(t.splits[0], [
(a, Decimal(100), 'USD'),
(o1, Decimal(100), 'USD'),
])
check_entries(t.splits[1], [
(a, Decimal(25), 'USD'),
(o2, Decimal(25), 'USD'),
])
assert t.amount == Decimal(100)
def test_transaction_can_add_splits_ffb_nonsplit():
# Foreign Transaction between Foreign and Base Currency
# Use Case: Expat budgeting in "home" currency spending from "local"
# checking account
# Setup User and Accounts
u = User(currency='USD')
a = Account(user=u, active=True, name='A', role=AccountRole.Personal, type=AccountType.Asset, currency='EUR')
o = Account(user=u, active=True, name='O', role=AccountRole.Budget, type=AccountType.Expense, currency='USD')
ta = Account(user=u, active=True, name='T', role=AccountRole.System, type=AccountType.Trading, currency=u.currency)
# Setup Transaction
t = Transaction(
user=u,
account=a,
currency='EUR',
foreign_currency='EUR',
foreign_exchrate=Decimal('1.25'),
)
# Add Transaction Line for EUR 100
t.add_split(o, Decimal(-100), 'EUR', trading=ta)
# Ensure Lines were Added
assert t.lines is not None
assert isinstance(t.lines, list)
assert len(t.lines) == 3
assert t.lines[0].account == a
assert t.lines[0].amount == Decimal(-100)
assert t.lines[1].account == o
assert t.lines[1].amount == Decimal(125)
assert t.lines[2].account == ta
# Ensure Split was Added
assert t.splits is not None
assert isinstance(t.splits, list)
assert len(t.splits) == 1
assert t.splits[0].left_line == t.lines[0]
assert t.splits[0].right_line == t.lines[1]
assert t.splits[0].type == TransactionType.Outflow
assert t.splits[0].amount == Decimal(-100)
# Ensure Entries were Added
check_entries(t.splits[0], [
(a, Decimal(-100), 'EUR'),
(ta, Decimal(-100), 'EUR'),
(ta, Decimal( 125), 'USD'),
(o, Decimal( 125), 'USD'),
])
assert t.amount == Decimal(-100)
def test_transaction_can_add_splits_ffb_split():
# Foreign Transaction between Base Currency Accounts
# Use Case: Buy something while travelling in a foreign country
# Setup User and Accounts
u = User(currency='USD')
a = Account(user=u, active=True, name='A', role=AccountRole.Personal, type=AccountType.Liability, currency='EUR')
o1 = Account(user=u, active=True, name='O1', role=AccountRole.Budget, type=AccountType.Expense, currency='USD')
o2 = Account(user=u, active=True, name='O2', role=AccountRole.Budget, type=AccountType.Expense, currency='USD')
ta = Account(user=u, active=True, name='T', role=AccountRole.System, type=AccountType.Trading, currency=u.currency)
# Setup Transaction
t = Transaction(
user=u,
account=a,
currency='EUR',
foreign_currency='EUR',
foreign_exchrate=Decimal('1.25'),
)
# Add Transaction Line for EUR 80
t.add_split(o1, Decimal(80), 'EUR', trading=ta)
t.add_split(o2, Decimal(20), 'EUR', trading=ta)
# Ensure Lines were Added
assert t.lines is not None
assert isinstance(t.lines, list)
assert len(t.lines) == 4
assert t.lines[0].account == a
assert t.lines[0].amount == Decimal(100)
assert t.lines[1].account == o1
assert t.lines[1].amount == Decimal(100)
assert t.lines[3].account == o2
assert t.lines[3].amount == Decimal(25)
assert t.lines[2].account == ta
# Ensure Split was Added
assert t.splits is not None
assert isinstance(t.splits, list)
assert len(t.splits) == 2
assert t.splits[0].left_line == t.lines[0]
assert t.splits[0].right_line == t.lines[1]
assert t.splits[0].type == TransactionType.Outflow
assert t.splits[0].amount == Decimal(80)
assert t.splits[1].left_line == t.lines[0]
assert t.splits[1].right_line == t.lines[3]
assert t.splits[1].type == TransactionType.Outflow
assert t.splits[1].amount == Decimal(20)
# Ensure Entries were Added
check_entries(t.splits[0], [
( a, Decimal( 80), 'EUR'),
(ta, Decimal( -80), 'EUR'),
(ta, Decimal( 100), 'USD'),
(o1, Decimal( 100), 'USD'),
])
check_entries(t.splits[1], [
( a, Decimal( 20), 'EUR'),
(ta, Decimal( -20), 'EUR'),
(ta, Decimal( 25), 'USD'),
(o2, Decimal( 25), 'USD'),
])
assert t.amount == Decimal(100)
def test_transaction_can_add_splits_fbf_nonsplit():
# Foreign Transaction between Foreign and Base Currency
# Use Case: Expat budgeting in "foreign" currency spending from "local"
# checking account
# Setup User and Accounts
u = User(currency='USD')
a = Account(user=u, active=True, name='A', role=AccountRole.Personal, type=AccountType.Asset, currency='USD')
o = Account(user=u, active=True, name='O', role=AccountRole.Budget, type=AccountType.Expense, currency='EUR')
ta = Account(user=u, active=True, name='T', role=AccountRole.System, type=AccountType.Trading, currency=u.currency)
# Setup Transaction
t = Transaction(
user=u,
account=a,
currency='EUR',
foreign_currency='EUR',
foreign_exchrate=Decimal('1.25'),
)
# Add Transaction Line for EUR 100
t.add_split(o, Decimal(-100), 'EUR', trading=ta)
# Ensure Lines were Added
assert t.lines is not None
assert isinstance(t.lines, list)
assert len(t.lines) == 3
assert t.lines[0].account == a
assert t.lines[0].amount == Decimal(-125)
assert t.lines[1].account == o
assert t.lines[1].amount == Decimal(100)
assert t.lines[2].account == ta
# Ensure Split was Added
assert t.splits is not None
assert isinstance(t.splits, list)
assert len(t.splits) == 1
assert t.splits[0].left_line == t.lines[0]
assert t.splits[0].right_line == t.lines[1]
assert t.splits[0].type == TransactionType.Outflow
assert t.splits[0].amount == Decimal(-100)
# Ensure Entries were Added
check_entries(t.splits[0], [
(a, Decimal(-125), 'USD'),
(ta, Decimal(-125), 'USD'),
(ta, Decimal( 100), 'EUR'),
(o, Decimal( 100), 'EUR'),
])
assert t.amount == Decimal(-100)
def test_transaction_can_add_splits_fbf_split():
# Foreign Transaction between Base Currency Accounts
# Use Case: Buy something while travelling in a foreign country
# Setup User and Accounts
u = User(currency='USD')
a = Account(user=u, active=True, name='A', role=AccountRole.Personal, type=AccountType.Liability, currency='USD')
o1 = Account(user=u, active=True, name='O1', role=AccountRole.Budget, type=AccountType.Expense, currency='EUR')
o2 = Account(user=u, active=True, name='O2', role=AccountRole.Budget, type=AccountType.Expense, currency='EUR')
ta = Account(user=u, active=True, name='T', role=AccountRole.System, type=AccountType.Trading, currency=u.currency)
# Setup Transaction
t = Transaction(
user=u,
account=a,
currency='EUR',
foreign_currency='EUR',
foreign_exchrate=Decimal('1.25'),
)
# Add Transaction Line for EUR 100
t.add_split(o1, Decimal(80), 'EUR', trading=ta)
t.add_split(o2, Decimal(20), 'EUR', trading=ta)
# Ensure Lines were Added
assert t.lines is not None
assert isinstance(t.lines, list)
assert len(t.lines) == 4
assert t.lines[0].account == a
assert t.lines[0].amount == Decimal(125)
assert t.lines[1].account == o1
assert t.lines[1].amount == Decimal(80)
assert t.lines[3].account == o2
assert t.lines[3].amount == Decimal(20)
assert t.lines[2].account == ta
# Ensure Split was Added
assert t.splits is not None
assert isinstance(t.splits, list)
assert len(t.splits) == 2
assert t.splits[0].left_line == t.lines[0]
assert t.splits[0].right_line == t.lines[1]
assert t.splits[0].type == TransactionType.Outflow
assert t.splits[0].amount == Decimal(80)
assert t.splits[1].left_line == t.lines[0]
assert t.splits[1].right_line == t.lines[3]
assert t.splits[1].type == TransactionType.Outflow
assert t.splits[1].amount == Decimal(20)
# Ensure Entries were Added
check_entries(t.splits[0], [
( a, Decimal( 100), 'USD'),
(ta, Decimal(-100), 'USD'),
(ta, Decimal( 80), 'EUR'),
(o1, Decimal( 80), 'EUR'),
])
check_entries(t.splits[1], [
( a, Decimal( 25), 'USD'),
(ta, Decimal( -25), 'USD'),
(ta, Decimal( 20), 'EUR'),
(o2, Decimal( 20), 'EUR'),
])
assert t.amount == Decimal(100)
def test_transaction_can_add_splits_bfb_nonsplit():
# Foreign Transaction between Foreign and Base Currency
# Use Case: Expat budgeting in "local" currency spending from "home"
# checking account
# Setup User and Accounts
u = User(currency='EUR')
a = Account(user=u, active=True, name='A', role=AccountRole.Personal, type=AccountType.Asset, currency='USD')
o = Account(user=u, active=True, name='O', role=AccountRole.Budget, type=AccountType.Expense, currency='EUR')
ta = Account(user=u, active=True, name='T', role=AccountRole.System, type=AccountType.Trading, currency=u.currency)
# Setup Transaction
t = Transaction(
user=u,
account=a,
currency='EUR',
foreign_currency='USD',
foreign_exchrate=Decimal('0.8'),
)
# Add Transaction Line for EUR 100
t.add_split(o, Decimal(-100), 'EUR', trading=ta)
# Ensure Lines were Added
assert t.lines is not None
assert isinstance(t.lines, list)
assert len(t.lines) == 3
assert t.lines[0].account == a
assert t.lines[0].amount == Decimal(-125)
assert t.lines[1].account == o
assert t.lines[1].amount == Decimal(100)
assert t.lines[2].account == ta
# Ensure Split was Added
assert t.splits is not None
assert isinstance(t.splits, list)
assert len(t.splits) == 1
assert t.splits[0].left_line == t.lines[0]
assert t.splits[0].right_line == t.lines[1]
assert t.splits[0].type == TransactionType.Outflow
assert t.splits[0].amount == Decimal(-100)
# Ensure Entries were Added
check_entries(t.splits[0], [
(a, Decimal(-125), 'USD'),
(ta, Decimal(-125), 'USD'),
(ta, Decimal( 100), 'EUR'),
(o, Decimal( 100), 'EUR'),
])
assert t.amount == Decimal(-100)
def test_transaction_can_add_splits_bfb_split():
# Foreign Transaction between Base Currency Accounts
# Use Case: Buy something while travelling in a foreign country
# Setup User and Accounts
u = User(currency='USD')
a = Account(user=u, active=True, name='A', role=AccountRole.Personal, type=AccountType.Liability, currency='EUR')
o1 = Account(user=u, active=True, name='O1', role=AccountRole.Budget, type=AccountType.Expense, currency='USD')
o2 = Account(user=u, active=True, name='O2', role=AccountRole.Budget, type=AccountType.Expense, currency='USD')
ta = Account(user=u, active=True, name='T', role=AccountRole.System, type=AccountType.Trading, currency=u.currency)
# Setup Transaction
t = Transaction(
user=u,
account=a,
currency='USD',
foreign_currency='EUR',
foreign_exchrate=Decimal('1.25'),
)
# Add Transaction Line for USD 125
t.add_split(o1, Decimal(100), 'USD', trading=ta)
t.add_split(o2, Decimal(25), 'USD', trading=ta)
# Ensure Lines were Added
assert t.lines is not None
assert isinstance(t.lines, list)
assert len(t.lines) == 4
assert t.lines[0].account == a
assert t.lines[0].amount == Decimal(100)
assert t.lines[1].account == o1
assert t.lines[1].amount == Decimal(100)
assert t.lines[3].account == o2
assert t.lines[3].amount == Decimal(25)
assert t.lines[2].account == ta
# Ensure Split was Added
assert t.splits is not None
assert isinstance(t.splits, list)
assert len(t.splits) == 2
assert t.splits[0].left_line == t.lines[0]
assert t.splits[0].right_line == t.lines[1]
assert t.splits[0].type == TransactionType.Outflow
assert t.splits[0].amount == Decimal(100)
assert t.splits[1].left_line == t.lines[0]
assert t.splits[1].right_line == t.lines[3]
assert t.splits[1].type == TransactionType.Outflow
assert t.splits[1].amount == Decimal(25)
# Ensure Entries were Added
check_entries(t.splits[0], [
( a, Decimal( 80), 'EUR'),
(ta, Decimal( -80), 'EUR'),
(ta, Decimal( 100), 'USD'),
(o1, Decimal( 100), 'USD'),
])
check_entries(t.splits[1], [
( a, Decimal( 20), 'EUR'),
(ta, Decimal( -20), 'EUR'),
(ta, Decimal( 25), 'USD'),
(o2, Decimal( 25), 'USD'),
])
assert t.amount == Decimal(125)
def test_transaction_can_add_splits_bbf_nonsplit():
# Foreign Transaction between Foreign and Base Currency
# Use Case: Expat budgeting in "local" currency spending from "home"
# checking account
# Setup User and Accounts
u = User(currency='EUR')
a = Account(user=u, active=True, name='A', role=AccountRole.Personal, type=AccountType.Asset, currency='EUR')
o = Account(user=u, active=True, name='O', role=AccountRole.Budget, type=AccountType.Expense, currency='USD')
ta = Account(user=u, active=True, name='T', role=AccountRole.System, type=AccountType.Trading, currency=u.currency)
# Setup Transaction
t = Transaction(
user=u,
account=a,
currency='EUR',
foreign_currency='USD',
foreign_exchrate=Decimal('0.8'),
)
# Add Transaction Line for EUR 100
t.add_split(o, Decimal(-100), 'EUR', trading=ta)
# Ensure Lines were Added
assert t.lines is not None
assert isinstance(t.lines, list)
assert len(t.lines) == 3
assert t.lines[0].account == a
assert t.lines[0].amount == Decimal(-100)
assert t.lines[1].account == o
assert t.lines[1].amount == Decimal(125)
assert t.lines[2].account == ta
# Ensure Split was Added
assert t.splits is not None
assert isinstance(t.splits, list)
assert len(t.splits) == 1
assert t.splits[0].left_line == t.lines[0]
assert t.splits[0].right_line == t.lines[1]
assert t.splits[0].type == TransactionType.Outflow
assert t.splits[0].amount == Decimal(-100)
# Ensure Entries were Added
check_entries(t.splits[0], [
(a, Decimal(-100), 'EUR'),
(ta, Decimal(-100), 'EUR'),
(ta, Decimal( 125), 'USD'),
(o, Decimal( 125), 'USD'),
])
assert t.amount == Decimal(-100)
def test_transaction_can_add_splits_bbf_split():
# Foreign Transaction between Base Currency Accounts
# Use Case: Buy something while travelling in a foreign country
# Setup User and Accounts
u = User(currency='USD')
a = Account(user=u, active=True, name='A', role=AccountRole.Personal, type=AccountType.Liability, currency='USD')
o1 = Account(user=u, active=True, name='O1', role=AccountRole.Budget, type=AccountType.Expense, currency='EUR')
o2 = Account(user=u, active=True, name='O2', role=AccountRole.Budget, type=AccountType.Expense, currency='EUR')
ta = Account(user=u, active=True, name='T', role=AccountRole.System, type=AccountType.Trading, currency=u.currency)
# Setup Transaction
t = Transaction(
user=u,
account=a,
currency='USD',
foreign_currency='EUR',
foreign_exchrate=Decimal('1.25'),
)
# Add Transaction Line for USD 125
t.add_split(o1, Decimal(100), 'USD', trading=ta)
t.add_split(o2, Decimal(25), 'USD', trading=ta)
# Ensure Lines were Added
assert t.lines is not None
assert isinstance(t.lines, list)
assert len(t.lines) == 4
assert t.lines[0].account == a
assert t.lines[0].amount == Decimal(125)
assert t.lines[1].account == o1
assert t.lines[1].amount == Decimal(80)
assert t.lines[3].account == o2
assert t.lines[3].amount == Decimal(20)
assert t.lines[2].account == ta
# Ensure Split was Added
assert t.splits is not None
assert isinstance(t.splits, list)
assert len(t.splits) == 2
assert t.splits[0].left_line == t.lines[0]
assert t.splits[0].right_line == t.lines[1]
assert t.splits[0].type == TransactionType.Outflow
assert t.splits[0].amount == Decimal(100)
assert t.splits[1].left_line == t.lines[0]
assert t.splits[1].right_line == t.lines[3]
assert t.splits[1].type == TransactionType.Outflow
assert t.splits[1].amount == Decimal(25)
# Ensure Entries were Added
check_entries(t.splits[0], [
( a, Decimal( 100), 'USD'),
(ta, Decimal(-100), 'USD'),
(ta, Decimal( 80), 'EUR'),
(o1, Decimal( 80), 'EUR'),
])
check_entries(t.splits[1], [
( a, Decimal( 25), 'USD'),
(ta, Decimal( -25), 'USD'),
(ta, Decimal( 20), 'EUR'),
(o2, Decimal( 20), 'EUR'),
])
assert t.amount == Decimal(125)
| 33.920712
| 119
| 0.632782
| 2,918
| 20,963
| 4.496916
| 0.04318
| 0.078951
| 0.069349
| 0.042676
| 0.967993
| 0.966316
| 0.966316
| 0.959305
| 0.959305
| 0.959305
| 0
| 0.036614
| 0.214378
| 20,963
| 617
| 120
| 33.975689
| 0.760155
| 0.141153
| 0
| 0.918269
| 0
| 0
| 0.025348
| 0
| 0
| 0
| 0
| 0
| 0.451923
| 1
| 0.024038
| false
| 0
| 0.012019
| 0
| 0.036058
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
eaf9b43ebd4e41b7c975cce86d92de478f54c8b5
| 5,588
|
py
|
Python
|
google/cloud/vmmigration_v1/types/__init__.py
|
renovate-bot/python-vmmigration
|
80a2cf46a21f516899da818a7aec0f2a67222047
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/vmmigration_v1/types/__init__.py
|
renovate-bot/python-vmmigration
|
80a2cf46a21f516899da818a7aec0f2a67222047
|
[
"Apache-2.0"
] | 10
|
2021-11-18T10:47:48.000Z
|
2022-03-07T15:48:54.000Z
|
google/cloud/vmmigration_v1/types/__init__.py
|
renovate-bot/python-vmmigration
|
80a2cf46a21f516899da818a7aec0f2a67222047
|
[
"Apache-2.0"
] | 1
|
2022-01-29T08:15:02.000Z
|
2022-01-29T08:15:02.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .vmmigration import (
AddGroupMigrationRequest,
AddGroupMigrationResponse,
AppliedLicense,
CancelCloneJobRequest,
CancelCloneJobResponse,
CancelCutoverJobRequest,
CancelCutoverJobResponse,
CloneJob,
ComputeEngineBootOption,
ComputeEngineDiskType,
ComputeEngineLicenseType,
ComputeEngineTargetDefaults,
ComputeEngineTargetDetails,
ComputeScheduling,
CreateCloneJobRequest,
CreateCutoverJobRequest,
CreateDatacenterConnectorRequest,
CreateGroupRequest,
CreateMigratingVmRequest,
CreateSourceRequest,
CreateTargetProjectRequest,
CreateUtilizationReportRequest,
CutoverJob,
DatacenterConnector,
DeleteDatacenterConnectorRequest,
DeleteGroupRequest,
DeleteMigratingVmRequest,
DeleteSourceRequest,
DeleteTargetProjectRequest,
DeleteUtilizationReportRequest,
FetchInventoryRequest,
FetchInventoryResponse,
FinalizeMigrationRequest,
FinalizeMigrationResponse,
GetCloneJobRequest,
GetCutoverJobRequest,
GetDatacenterConnectorRequest,
GetGroupRequest,
GetMigratingVmRequest,
GetSourceRequest,
GetTargetProjectRequest,
GetUtilizationReportRequest,
Group,
ListCloneJobsRequest,
ListCloneJobsResponse,
ListCutoverJobsRequest,
ListCutoverJobsResponse,
ListDatacenterConnectorsRequest,
ListDatacenterConnectorsResponse,
ListGroupsRequest,
ListGroupsResponse,
ListMigratingVmsRequest,
ListMigratingVmsResponse,
ListSourcesRequest,
ListSourcesResponse,
ListTargetProjectsRequest,
ListTargetProjectsResponse,
ListUtilizationReportsRequest,
ListUtilizationReportsResponse,
MigratingVm,
MigrationError,
NetworkInterface,
OperationMetadata,
PauseMigrationRequest,
PauseMigrationResponse,
RemoveGroupMigrationRequest,
RemoveGroupMigrationResponse,
ReplicationCycle,
ReplicationSync,
ResumeMigrationRequest,
ResumeMigrationResponse,
SchedulePolicy,
SchedulingNodeAffinity,
Source,
StartMigrationRequest,
StartMigrationResponse,
TargetProject,
UpdateGroupRequest,
UpdateMigratingVmRequest,
UpdateSourceRequest,
UpdateTargetProjectRequest,
UtilizationReport,
UtilizationReportView,
VmUtilizationInfo,
VmUtilizationMetrics,
VmwareSourceDetails,
VmwareVmDetails,
VmwareVmsDetails,
)
__all__ = (
"AddGroupMigrationRequest",
"AddGroupMigrationResponse",
"AppliedLicense",
"CancelCloneJobRequest",
"CancelCloneJobResponse",
"CancelCutoverJobRequest",
"CancelCutoverJobResponse",
"CloneJob",
"ComputeEngineTargetDefaults",
"ComputeEngineTargetDetails",
"ComputeScheduling",
"CreateCloneJobRequest",
"CreateCutoverJobRequest",
"CreateDatacenterConnectorRequest",
"CreateGroupRequest",
"CreateMigratingVmRequest",
"CreateSourceRequest",
"CreateTargetProjectRequest",
"CreateUtilizationReportRequest",
"CutoverJob",
"DatacenterConnector",
"DeleteDatacenterConnectorRequest",
"DeleteGroupRequest",
"DeleteMigratingVmRequest",
"DeleteSourceRequest",
"DeleteTargetProjectRequest",
"DeleteUtilizationReportRequest",
"FetchInventoryRequest",
"FetchInventoryResponse",
"FinalizeMigrationRequest",
"FinalizeMigrationResponse",
"GetCloneJobRequest",
"GetCutoverJobRequest",
"GetDatacenterConnectorRequest",
"GetGroupRequest",
"GetMigratingVmRequest",
"GetSourceRequest",
"GetTargetProjectRequest",
"GetUtilizationReportRequest",
"Group",
"ListCloneJobsRequest",
"ListCloneJobsResponse",
"ListCutoverJobsRequest",
"ListCutoverJobsResponse",
"ListDatacenterConnectorsRequest",
"ListDatacenterConnectorsResponse",
"ListGroupsRequest",
"ListGroupsResponse",
"ListMigratingVmsRequest",
"ListMigratingVmsResponse",
"ListSourcesRequest",
"ListSourcesResponse",
"ListTargetProjectsRequest",
"ListTargetProjectsResponse",
"ListUtilizationReportsRequest",
"ListUtilizationReportsResponse",
"MigratingVm",
"MigrationError",
"NetworkInterface",
"OperationMetadata",
"PauseMigrationRequest",
"PauseMigrationResponse",
"RemoveGroupMigrationRequest",
"RemoveGroupMigrationResponse",
"ReplicationCycle",
"ReplicationSync",
"ResumeMigrationRequest",
"ResumeMigrationResponse",
"SchedulePolicy",
"SchedulingNodeAffinity",
"Source",
"StartMigrationRequest",
"StartMigrationResponse",
"TargetProject",
"UpdateGroupRequest",
"UpdateMigratingVmRequest",
"UpdateSourceRequest",
"UpdateTargetProjectRequest",
"UtilizationReport",
"VmUtilizationInfo",
"VmUtilizationMetrics",
"VmwareSourceDetails",
"VmwareVmDetails",
"VmwareVmsDetails",
"ComputeEngineBootOption",
"ComputeEngineDiskType",
"ComputeEngineLicenseType",
"UtilizationReportView",
)
| 28.365482
| 74
| 0.749463
| 273
| 5,588
| 15.326007
| 0.578755
| 0.01434
| 0.006214
| 0.007648
| 0.844646
| 0.803059
| 0.803059
| 0.803059
| 0.726099
| 0.726099
| 0
| 0.00196
| 0.17806
| 5,588
| 196
| 75
| 28.510204
| 0.908992
| 0.101825
| 0
| 0
| 0
| 0
| 0.370977
| 0.256846
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005556
| 0
| 0.005556
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d82adf907aab1342f7de786de471408f78d7c783
| 7,243
|
py
|
Python
|
tests/test_v1/test_menu.py
|
matthewacha/BookAMeal
|
6af638a4cf71d72dd2a5fa80ba0e908b7ef70cf5
|
[
"MIT"
] | null | null | null |
tests/test_v1/test_menu.py
|
matthewacha/BookAMeal
|
6af638a4cf71d72dd2a5fa80ba0e908b7ef70cf5
|
[
"MIT"
] | null | null | null |
tests/test_v1/test_menu.py
|
matthewacha/BookAMeal
|
6af638a4cf71d72dd2a5fa80ba0e908b7ef70cf5
|
[
"MIT"
] | 1
|
2018-08-20T11:57:23.000Z
|
2018-08-20T11:57:23.000Z
|
import unittest
import json
from app import APP
def login(tester):
tester.post('api/v1/auth/signup',content_type='application/json',
data =json.dumps( dict(email='me@gmail.com',
password='lantern')))
login = tester.post('api/v1/auth/login',content_type='application/json',
data =json.dumps( dict(email='me@gmail.com',
password='lantern')))
return login
class TestMenu(unittest.TestCase):
def setUp(self):
self.tester = APP.test_client(self)
def test_create_menu(self):
"""test that a meal can be added to a menu"""
login_=login(self.tester)
result = json.loads(login_.data.decode())
self.tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Fries',
price=5000)),
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Beans',
price=5000)),
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Chicken',
price=15000)),
headers =dict(access_token = result['token']))
response=self.tester.post('/api/v1/menu/2',
headers =dict(access_token = result['token']))
self.assertIn(u'Successfully added to menu', response.data)
self.assertEqual(response.status_code, 201)
def test_unique_items_to_menu(self):
"""test that a meal option can be added only once to the menu"""
login_=login(self.tester)
result = json.loads(login_.data.decode())
self.tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Fries',
price=5000)),
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Beans',
price=5000)),
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/menu/2',
headers =dict(access_token = result['token']))
response=self.tester.post('/api/v1/menu/2',
headers =dict(access_token = result['token']))
self.assertIn(u'Meal already exists in menu', response.data)
def test_get_menu(self):
"""test that a menu can be got"""
login_=login(self.tester)
result = json.loads(login_.data.decode())
self.tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Fries',
price=5000)),
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Beans',
price=5000)),
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/menu/2',
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/menu/1',
headers =dict(access_token = result['token']))
response=self.tester.get('/api/v1/menu/',
headers =dict(access_token = result['token']))
self.assertIn(u'Beans', response.data)
def test_delete_menu_item(self):
"""test that a meal can be deleted from a menu"""
login_=login(self.tester)
result = json.loads(login_.data.decode())
self.tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Fries',
price=5000)),
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Beans',
price=5000)),
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Chicken',
price=15000)),
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/menu/2', headers =dict(access_token = result['token']))
self.tester.post('/api/v1/menu/3', headers =dict(access_token = result['token']))
response=self.tester.delete('/api/v1/menu/2', headers =dict(access_token = result['token']))
self.assertIn(u'Successfully deleted from menu', response.data)
self.assertEqual(response.status_code, 200)
def test_fail_delete_menu_item(self):
"""test that a meal cannot be deleted from a menu"""
login_=login(self.tester)
result = json.loads(login_.data.decode())
self.tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Fries',
price=5000)),
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Beans',
price=5000)),
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/meals/', content_type='application/json',
data =json.dumps( dict(name='Chicken',
price=15000)),
headers =dict(access_token = result['token']))
self.tester.post('/api/v1/menu/2', headers =dict(access_token = result['token']))
self.tester.post('/api/v1/menu/3', headers =dict(access_token = result['token']))
response=self.tester.delete('/api/v1/menu/4', headers =dict(access_token = result['token']))
self.assertIn(u'Meal does not exist', response.data)
self.assertEqual(response.status_code, 404)
if __name__=='__main__':
unittest.main()#pragma:no cover
| 55.715385
| 100
| 0.49427
| 738
| 7,243
| 4.746612
| 0.127371
| 0.088496
| 0.121325
| 0.157008
| 0.885812
| 0.870111
| 0.862975
| 0.842992
| 0.797317
| 0.769626
| 0
| 0.022566
| 0.375949
| 7,243
| 129
| 101
| 56.147287
| 0.752434
| 0.032169
| 0
| 0.733945
| 0
| 0
| 0.139318
| 0
| 0
| 0
| 0
| 0
| 0.073395
| 1
| 0.06422
| false
| 0.018349
| 0.027523
| 0
| 0.110092
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dc2bf72494de0f8873c9644f2a09a83ddd227464
| 167
|
py
|
Python
|
aiosql/__init__.py
|
georgejdanforth/aiosql
|
63c3adb1fcfc214e3ba645b10e1a7a15d3a22e47
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
aiosql/__init__.py
|
georgejdanforth/aiosql
|
63c3adb1fcfc214e3ba645b10e1a7a15d3a22e47
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
aiosql/__init__.py
|
georgejdanforth/aiosql
|
63c3adb1fcfc214e3ba645b10e1a7a15d3a22e47
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
from .aiosql import from_path, from_str, register_driver_adapter, SQLOperationType
__all__ = ["from_path", "from_str", "register_driver_adapter", "SQLOperationType"]
| 41.75
| 82
| 0.808383
| 20
| 167
| 6.15
| 0.5
| 0.130081
| 0.195122
| 0.243902
| 0.845528
| 0.845528
| 0.845528
| 0.845528
| 0
| 0
| 0
| 0
| 0.083832
| 167
| 3
| 83
| 55.666667
| 0.803922
| 0
| 0
| 0
| 0
| 0
| 0.335329
| 0.137725
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
dc4c16737821a4599dba044e36eba168fd5c70e1
| 15,008
|
py
|
Python
|
tests/test_synchronization.py
|
amorygalili/pynetworktables
|
59b5d1328274e78393202fe2a7e09845f5ff63a5
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_synchronization.py
|
amorygalili/pynetworktables
|
59b5d1328274e78393202fe2a7e09845f5ff63a5
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_synchronization.py
|
amorygalili/pynetworktables
|
59b5d1328274e78393202fe2a7e09845f5ff63a5
|
[
"BSD-3-Clause"
] | null | null | null |
#
# These tests stand up a separate client and server instance of
# networktables and tests the 'real' user API to ensure that it
# works correctly
#
from __future__ import print_function
import logging
logger = logging.getLogger("test")
#
# Distinction between the following test cases:
# -> instance.shutdown clears the storage for the nt instance
# -> instance.disconnect only shuts down the network connection, storage is retained
#
#
# Writes before connection
#
def test_sync_pre_client_writes_value(nt_server, nt_client):
"""
Client writes value
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| client foo=1 |
| connected | foo=1
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
ct.putString("foo", "1")
with nt_server.expect_changes(1):
nt_server.start_test()
nt_client.start_test()
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
def test_sync_pre_server_writes_value(nt_server, nt_client):
"""
Server writes value
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| server foo=1 |
| connected | foo=1
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
st.putString("foo", "1")
with nt_client.expect_changes(1):
nt_server.start_test()
nt_client.start_test()
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
def test_sync_pre_both_write_values(nt_server, nt_client):
"""
Both write values
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| server foo=1 |
| client foo=2 |
| connected | foo=2 | client wins
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
st.putString("foo", "1")
ct.putString("foo", "2")
with nt_server.expect_changes(1):
nt_server.start_test()
nt_client.start_test()
assert ct.getString("foo", None) == "2"
assert st.getString("foo", None) == "2"
def test_sync_srestart_client_writes(nt_server, nt_client):
"""
Server restart; Client wrote value locally
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| client foo=1 |
| connected | foo=1
| server restart |
| connected | foo=1
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
ct.putString("foo", "1")
with nt_server.expect_changes(1):
nt_server.start_test()
nt_client.start_test()
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
nt_server.shutdown()
st = nt_server.getTable("table")
assert st.getString("foo", None) == None
with nt_server.expect_changes(1):
nt_server.start_test()
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
def test_sync_srestart_server_writes(nt_server, nt_client):
"""
Server restart; Server wrote value
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| server foo=1 |
| connected | foo=1
| server restart |
| connected | foo deleted | Not an intuitive result
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
st.putString("foo", "1")
with nt_client.expect_changes(1):
nt_server.start_test()
nt_client.start_test()
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
nt_server.shutdown()
st = nt_server.getTable("table")
assert st.getString("foo", None) == None
# Required otherwise we don't have anything to wait for...
ct.putNumber("ignored", 1)
with nt_server.expect_changes(1):
nt_server.start_test()
assert ct.getString("foo", None) == None
assert st.getString("foo", None) == None
def test_sync_srestart_both_writes(nt_server, nt_client):
"""
Server restart; Both write value
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| connected
| client foo=1 | foo=1
| server foo=2 | foo=2
| server restart |
| connected | foo=2
"""
nt_server.start_test()
nt_client.start_test()
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
with nt_server.expect_changes(1):
ct.putString("foo", "1")
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
with nt_client.expect_changes(1):
st.putString("foo", "2")
assert ct.getString("foo", None) == "2"
assert st.getString("foo", None) == "2"
nt_server.shutdown()
st = nt_server.getTable("table")
assert st.getString("foo", None) == None
with nt_server.expect_changes(1):
nt_server.start_test()
assert ct.getString("foo", None) == "2"
assert st.getString("foo", None) == "2"
def test_sync_crestart_client_writes(nt_server, nt_client):
"""
Client restart
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| client foo=1 |
| connected | foo=1
| client restart |
| connected | foo=1
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
ct.putString("foo", "1")
with nt_server.expect_changes(1):
nt_server.start_test()
nt_client.start_test()
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
nt_client.shutdown()
ct = nt_client.getTable("table")
assert ct.getString("foo", None) == None
with nt_client.expect_changes(1):
nt_client.start_test()
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
def test_sync_crestart_server_writes(nt_server, nt_client):
"""
Client restart
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| server foo=1 |
| connected | foo=1
| client restart |
| connected | foo=1
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
st.putString("foo", "1")
with nt_client.expect_changes(1):
nt_server.start_test()
nt_client.start_test()
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
nt_client.shutdown()
ct = nt_client.getTable("table")
assert ct.getString("foo", None) == None
with nt_client.expect_changes(1):
nt_client.start_test()
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
def test_sync_crestart_server_writes_late(nt_server, nt_client):
"""
Client restart
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| server foo=1 |
| connected | foo=1
| client restart |
| server foo=2 |
| connected | foo=2
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
st.putString("foo", "1")
with nt_client.expect_changes(1):
nt_server.start_test()
nt_client.start_test()
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
nt_client.shutdown()
ct = nt_client.getTable("table")
assert ct.getString("foo", None) == None
st.putString("foo", "2")
with nt_client.expect_changes(1):
nt_client.start_test()
assert ct.getString("foo", None) == "2"
assert st.getString("foo", None) == "2"
def test_sync_disconnect_write_by_server(nt_server, nt_client):
"""
Server update during disconnect - server initiated value
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| connected
| server foo=1 | foo=1
| disconnect
| server foo=2
| connected | foo=2
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
nt_server.start_test()
nt_client.start_test()
with nt_client.expect_changes(1):
st.putString("foo", "1")
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
nt_client.disconnect()
st.putString("foo", "2")
with nt_client.expect_changes(1):
nt_client.start_test()
assert ct.getString("foo", None) == "2"
assert st.getString("foo", None) == "2"
def test_sync_disconnect_write_by_client(nt_server, nt_client):
"""
Server update during disconnect - client initiated value
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| connected
| client foo=1 | foo=1
| disconnect
| server foo=2
| connected | foo=1 | client wins
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
nt_server.start_test()
nt_client.start_test()
with nt_server.expect_changes(1):
ct.putString("foo", "1")
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
nt_client.disconnect()
st.putString("foo", "2")
with nt_server.expect_changes(1):
nt_client.start_test()
# client wins
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
def test_sync_disconnect_write_by_client2(nt_server, nt_client):
"""
Client update during disconnect
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| connected
| client foo=1 | foo=1
| disconnect
| client foo=2
| connected | foo=2
| client foo=3 | foo=3
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
nt_server.start_test()
nt_client.start_test()
with nt_server.expect_changes(1):
ct.putString("foo", "1")
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
nt_client.disconnect()
ct.putString("foo", "2")
with nt_server.expect_changes(1):
nt_client.start_test()
assert ct.getString("foo", None) == "2"
assert st.getString("foo", None) == "2"
with nt_server.expect_changes(1):
ct.putString("foo", "3")
# more writes succeed
assert ct.getString("foo", None) == "3"
assert st.getString("foo", None) == "3"
def test_sync_disconnect_write_by_both(nt_server, nt_client):
"""
Both update during disconnect
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| connected
| client foo=1 | foo=1
| disconnect
| client foo=2
| server foo=3
| connected | foo=2
| client foo=4 | foo=4
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
nt_server.start_test()
nt_client.start_test()
with nt_server.expect_changes(1):
ct.putString("foo", "1")
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
nt_client.disconnect()
ct.putString("foo", "2")
st.putString("foo", "3")
with nt_server.expect_changes(1):
nt_client.start_test()
# client wins
assert ct.getString("foo", None) == "2"
assert st.getString("foo", None) == "2"
with nt_server.expect_changes(1):
ct.putString("foo", "4")
# more writes succeed
assert ct.getString("foo", None) == "4"
assert st.getString("foo", None) == "4"
def test_sync_disconnect_write_by_both_prev(nt_server, nt_client):
"""
Client and server updates during disconnect (both previously written)
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| connected
| client foo=1 | foo=1
| server foo=2 | foo=2
| disconnect
| client foo=3
| connected | foo=3 | issue #270?
| client foo=4 | foo=4
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
nt_server.start_test()
nt_client.start_test()
with nt_server.expect_changes(1):
ct.putString("foo", "1")
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
with nt_client.expect_changes(1):
st.putString("foo", "2")
assert ct.getString("foo", None) == "2"
assert st.getString("foo", None) == "2"
nt_client.disconnect()
ct.putString("foo", "3")
with nt_server.expect_changes(1):
nt_client.start_test()
assert ct.getString("foo", None) == "3"
assert st.getString("foo", None) == "3"
with nt_server.expect_changes(1):
ct.putString("foo", "4")
# more writes succeed
assert ct.getString("foo", None) == "4"
assert st.getString("foo", None) == "4"
def test_sync_disconnect_write_by_both_both_prev(nt_server, nt_client):
"""
Client and server updates during disconnect (both previously written)
| Action | Global NT state | Notes
| ---------------- | --------------- | -----
| connected
| client foo=1 | foo=1
| server foo=2 | foo=2
| disconnect
| server foo=3
| client foo=4
| connected | foo=4 | issue #270?
| client foo=5 | foo=5
"""
ct = nt_client.getTable("table")
st = nt_server.getTable("table")
nt_server.start_test()
nt_client.start_test()
with nt_server.expect_changes(1):
ct.putString("foo", "1")
assert ct.getString("foo", None) == "1"
assert st.getString("foo", None) == "1"
with nt_client.expect_changes(1):
st.putString("foo", "2")
assert ct.getString("foo", None) == "2"
assert st.getString("foo", None) == "2"
nt_client.disconnect()
st.putString("foo", "3")
ct.putString("foo", "4")
with nt_server.expect_changes(1):
nt_client.start_test()
assert ct.getString("foo", None) == "4"
assert st.getString("foo", None) == "4"
with nt_server.expect_changes(1):
ct.putString("foo", "5")
# more writes succeed
assert ct.getString("foo", None) == "5"
assert st.getString("foo", None) == "5"
| 25.965398
| 84
| 0.548974
| 1,784
| 15,008
| 4.445628
| 0.064462
| 0.078679
| 0.149288
| 0.093305
| 0.901652
| 0.887278
| 0.870004
| 0.845165
| 0.811121
| 0.799395
| 0
| 0.019047
| 0.282849
| 15,008
| 577
| 85
| 26.010399
| 0.71783
| 0.271722
| 0
| 0.911647
| 0
| 0
| 0.062212
| 0
| 0
| 0
| 0
| 0
| 0.297189
| 1
| 0.060241
| false
| 0
| 0.008032
| 0
| 0.068273
| 0.004016
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dc6b98e8ea5958bd13cff82ceeb21584c1bd41ba
| 2,043
|
py
|
Python
|
tests/test_integration_auth.py
|
edcilo/edc_service_users_flask
|
ccb19956d1f83baae582b083e5c976938f2d31cd
|
[
"MIT"
] | null | null | null |
tests/test_integration_auth.py
|
edcilo/edc_service_users_flask
|
ccb19956d1f83baae582b083e5c976938f2d31cd
|
[
"MIT"
] | null | null | null |
tests/test_integration_auth.py
|
edcilo/edc_service_users_flask
|
ccb19956d1f83baae582b083e5c976938f2d31cd
|
[
"MIT"
] | null | null | null |
from flask import json
from fixture import app, client
from users.repositories import userRepo
def test_register(client):
data = {
'username': 'jhon.doe',
'email': 'jhon.doe@example.com',
'phone': '1231231231',
'password': 'secret',
'password_confirmation': 'secret'
}
res = client.post('/register', data=data)
assert res.status_code == 200
def test_login(client):
data = {
'username': 'jhon.doe',
'email': 'jhon.doe@example.com',
'phone': '1231231231',
'password': 'secret',
}
userRepo.add(data)
data = {
'username': 'jhon.doe',
'password': 'secret',
}
res = client.post('/login', data=data)
assert res.status_code == 200
def test_bad_credentials(client):
data = {
'username': 'jhon.doe',
'email': 'jhon.doe@example.com',
'phone': '1231231231',
'password': 'secret',
}
userRepo.add(data)
data = {
'username': 'jhon.doe',
'password': 'secre'
}
res = client.post('/login', data=data)
assert res.status_code == 400
def test_refresh_token(client):
data = {
'username': 'jhon.doe',
'email': 'jhon.doe@example.com',
'phone': '1231231231',
'password': 'secret',
}
userRepo.add(data)
loginres = client.post('/login', data=data)
data = json.loads(loginres.data)
token = data.get('token')
headers = {'Authorization': f'Bearer {token}'}
res = client.post('/refresh', headers=headers)
assert res.status_code == 200
def test_refresh_check(client):
data = {
'username': 'jhon.doe',
'email': 'jhon.doe@example.com',
'phone': '1231231231',
'password': 'secret',
}
userRepo.add(data)
loginres = client.post('/login', data=data)
data = json.loads(loginres.data)
token = data.get('token')
headers = {'Authorization': f'Bearer {token}'}
res = client.post('/check', headers=headers)
assert res.status_code == 204
| 26.881579
| 50
| 0.576603
| 224
| 2,043
| 5.196429
| 0.214286
| 0.072165
| 0.09622
| 0.114261
| 0.810137
| 0.810137
| 0.769759
| 0.744845
| 0.744845
| 0.704467
| 0
| 0.04282
| 0.256975
| 2,043
| 75
| 51
| 27.24
| 0.723979
| 0
| 0
| 0.681159
| 0
| 0
| 0.267744
| 0.010279
| 0
| 0
| 0
| 0
| 0.072464
| 1
| 0.072464
| false
| 0.115942
| 0.043478
| 0
| 0.115942
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
dc896a064ec42cbbd21b0a801436946f16008a89
| 202
|
py
|
Python
|
src/hommmer/metrics/smape.py
|
hammer-mt/hommmer
|
a02cb87841395f30911242a019f28f6ac15f27ec
|
[
"MIT"
] | 4
|
2021-11-09T21:27:30.000Z
|
2021-11-23T00:38:20.000Z
|
src/hommmer/metrics/smape.py
|
hammer-mt/hommmer
|
a02cb87841395f30911242a019f28f6ac15f27ec
|
[
"MIT"
] | null | null | null |
src/hommmer/metrics/smape.py
|
hammer-mt/hommmer
|
a02cb87841395f30911242a019f28f6ac15f27ec
|
[
"MIT"
] | null | null | null |
import numpy as np
def smape(y_actual, y_pred):
# symmetric mean absolute percentage error
return 100/len(y_actual) * np.sum(2 * np.abs(y_pred - y_actual) / (np.abs(y_actual) + np.abs(y_pred)))
| 40.4
| 106
| 0.70297
| 37
| 202
| 3.648649
| 0.540541
| 0.207407
| 0.2
| 0.148148
| 0.185185
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023669
| 0.163366
| 202
| 5
| 106
| 40.4
| 0.775148
| 0.19802
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
dc8d1366bda765ba29a737f8b9719490685c9eb2
| 104
|
py
|
Python
|
run.py
|
deanishe/bundler-icon-server
|
841d336bad8570d54a3013e0c1bf72a98c3d8e58
|
[
"MIT"
] | 11
|
2015-02-10T06:16:26.000Z
|
2020-05-11T03:29:40.000Z
|
run.py
|
deanishe/bundler-icon-server
|
841d336bad8570d54a3013e0c1bf72a98c3d8e58
|
[
"MIT"
] | 30
|
2015-09-04T00:01:47.000Z
|
2021-06-25T15:21:41.000Z
|
run.py
|
deanishe/bundler-icon-server
|
841d336bad8570d54a3013e0c1bf72a98c3d8e58
|
[
"MIT"
] | 2
|
2019-10-29T00:33:41.000Z
|
2020-05-11T03:29:41.000Z
|
#!env/bin/python
from iconserver import app
from iconserver import config
app.run(debug=config.DEBUG)
| 14.857143
| 29
| 0.798077
| 16
| 104
| 5.1875
| 0.625
| 0.337349
| 0.481928
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 104
| 6
| 30
| 17.333333
| 0.902174
| 0.144231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dca308114c54df4ae27a9cc7638ab9f035c7e7d7
| 11,428
|
py
|
Python
|
utils/conv_type.py
|
x-zho14/Swin-Transformer
|
577cac5b94281dcf773e8f4ea97fd2162f1ed060
|
[
"MIT"
] | null | null | null |
utils/conv_type.py
|
x-zho14/Swin-Transformer
|
577cac5b94281dcf773e8f4ea97fd2162f1ed060
|
[
"MIT"
] | null | null | null |
utils/conv_type.py
|
x-zho14/Swin-Transformer
|
577cac5b94281dcf773e8f4ea97fd2162f1ed060
|
[
"MIT"
] | null | null | null |
import numpy as np
import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
import math
from config import config as parser_args
DenseLinear = nn.Linear
class ReinforceLOOVRLinear(nn.Linear):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.scores = nn.Parameter(torch.Tensor(self.weight.size()[0], 1))
self.register_buffer('subnet', torch.zeros_like(self.scores))
self.train_weights = False
if parser_args.score_init_constant is not None:
self.scores.data = (
torch.ones_like(self.scores) * parser_args.score_init_constant
)
else:
nn.init.kaiming_uniform_(self.scores, a=math.sqrt(5))
if self.out_features == 10 or self.out_features == 100:
self.prune = False
self.subnet = torch.ones_like(self.scores)
else:
self.prune = True
self.register_buffer("stored_mask_0", torch.zeros_like(self.scores))
self.register_buffer("stored_mask_1", torch.zeros_like(self.scores))
@property
def clamped_scores(self):
return self.scores
def fix_subnet(self):
self.subnet = (torch.rand_like(self.scores) < self.clamped_scores).float()
def flip_one_channel(self):
self.subnet.data[0] = 1 - self.subnet.data[0]
def forward(self, x):
if self.prune:
if not self.train_weights:
self.subnet = StraightThroughBinomialSampleNoGrad.apply(self.scores)
if parser_args.j == 0:
self.stored_mask_0.data = (self.subnet-self.scores)/torch.sqrt((self.scores+1e-20)*(1-self.scores+1e-20))
else:
self.stored_mask_1.data = (self.subnet-self.scores)/torch.sqrt((self.scores+1e-20)*(1-self.scores+1e-20))
w = self.weight * self.subnet
x = F.linear(x, w, self.bias)
else:
w = self.weight * self.subnet
x = F.linear(x, w, self.bias)
else:
x = F.linear(x, self.weight, self.bias)
return x
class ReinforceLOO(nn.Conv2d):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.scores = nn.Parameter(torch.Tensor(self.weight.size()[0], 1, 1, 1))
self.register_buffer('subnet', torch.zeros_like(self.scores))
self.train_weights = False
if parser_args.score_init_constant is not None:
self.scores.data = (
torch.ones_like(self.scores) * parser_args.score_init_constant
)
else:
nn.init.kaiming_uniform_(self.scores, a=math.sqrt(5))
if self.out_channels == 10 or self.out_channels == 100:
self.prune = False
self.subnet = torch.ones_like(self.scores)
else:
self.prune = True
self.register_buffer("stored_mask_0", torch.zeros_like(self.scores))
self.register_buffer("stored_mask_1", torch.zeros_like(self.scores))
@property
def clamped_scores(self):
return self.scores
def fix_subnet(self):
self.subnet = (torch.rand_like(self.scores) < self.scores).float()
def forward(self, x):
if self.prune:
if not self.train_weights:
self.subnet = StraightThroughBinomialSampleNoGrad.apply(self.scores)
if parser_args.j == 0:
self.stored_mask_0.data = (self.subnet-self.scores)/((self.scores+1e-20)*(1-self.scores+1e-20))
else:
self.stored_mask_1.data = (self.subnet-self.scores)/((self.scores+1e-20)*(1-self.scores+1e-20))
w = self.weight * self.subnet
x = F.conv2d(x, w, self.bias, self.stride, self.padding, self.dilation, self.groups)
else:
w = self.weight * self.subnet
x = F.conv2d(x, w, self.bias, self.stride, self.padding, self.dilation, self.groups)
else:
x = F.conv2d(x, self.weight, self.bias, self.stride, self.padding, self.dilation, self.groups)
return x
class ReinforceLOOVRWeight(nn.Conv2d):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.scores = nn.Parameter(torch.Tensor(self.weight.size()))
self.register_buffer('subnet', torch.zeros_like(self.scores))
self.train_weights = False
if parser_args.score_init_constant is not None:
self.scores.data = (
torch.ones_like(self.scores) * parser_args.score_init_constant
)
else:
nn.init.kaiming_uniform_(self.scores, a=math.sqrt(5))
if self.out_channels == 10 or self.out_channels == 100:
self.prune = False
self.subnet = torch.ones_like(self.scores)
else:
self.prune = True
self.register_buffer("stored_mask_0", torch.zeros_like(self.scores))
self.register_buffer("stored_mask_1", torch.zeros_like(self.scores))
self.stored_mask_dict = {}
self.stored_mask_dict_vr = {}
@property
def clamped_scores(self):
return self.scores
def fix_subnet(self):
self.subnet = (torch.rand_like(self.scores) < self.clamped_scores).float()
def flip_one_channel(self):
self.subnet.data[0] = 1 - self.subnet.data[0]
def forward(self, x):
if self.prune:
if not self.train_weights:
self.subnet = StraightThroughBinomialSampleNoGrad.apply(self.scores)
if parser_args.j == 0:
self.stored_mask_0.data = (self.subnet-self.scores)/torch.sqrt((self.scores+1e-20)*(1-self.scores+1e-20))
else:
self.stored_mask_1.data = (self.subnet-self.scores)/torch.sqrt((self.scores+1e-20)*(1-self.scores+1e-20))
w = self.weight * self.subnet
x = F.conv2d(x, w, self.bias, self.stride, self.padding, self.dilation, self.groups)
else:
w = self.weight * self.subnet
x = F.conv2d(x, w, self.bias, self.stride, self.padding, self.dilation, self.groups)
else:
x = F.conv2d(x, self.weight, self.bias, self.stride, self.padding, self.dilation, self.groups)
return x
class ReinforceLOOVR(nn.Conv2d):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.scores = nn.Parameter(torch.Tensor(self.weight.size()[0], 1, 1, 1))
self.register_buffer('subnet', torch.zeros_like(self.scores))
self.train_weights = False
if parser_args.score_init_constant is not None:
self.scores.data = (
torch.ones_like(self.scores) * parser_args.score_init_constant
)
else:
nn.init.kaiming_uniform_(self.scores, a=math.sqrt(5))
if self.out_channels == 10 or self.out_channels == 100:
self.prune = False
self.subnet = torch.ones_like(self.scores)
else:
self.prune = True
self.register_buffer("stored_mask_0", torch.zeros_like(self.scores))
self.register_buffer("stored_mask_1", torch.zeros_like(self.scores))
self.stored_mask_dict = {}
self.stored_mask_dict_vr = {}
@property
def clamped_scores(self):
return self.scores
def fix_subnet(self):
self.subnet = (torch.rand_like(self.scores) < self.clamped_scores).float()
def flip_one_channel(self):
self.subnet.data[0] = 1 - self.subnet.data[0]
def forward(self, x):
if self.prune:
if not self.train_weights:
self.subnet = StraightThroughBinomialSampleNoGrad.apply(self.scores)
if parser_args.j == 0:
self.stored_mask_0.data = (self.subnet-self.scores)/torch.sqrt((self.scores+1e-20)*(1-self.scores+1e-20))
else:
self.stored_mask_1.data = (self.subnet-self.scores)/torch.sqrt((self.scores+1e-20)*(1-self.scores+1e-20))
w = self.weight * self.subnet
x = F.conv2d(x, w, self.bias, self.stride, self.padding, self.dilation, self.groups)
else:
w = self.weight * self.subnet
x = F.conv2d(x, w, self.bias, self.stride, self.padding, self.dilation, self.groups)
else:
x = F.conv2d(x, self.weight, self.bias, self.stride, self.padding, self.dilation, self.groups)
return x
class Reinforce(nn.Conv2d):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.scores = nn.Parameter(torch.Tensor(self.weight.size()[0], 1, 1, 1))
self.register_buffer('subnet', torch.zeros_like(self.scores))
self.train_weights = False
if parser_args.score_init_constant is not None:
self.scores.data = (
torch.ones_like(self.scores) * parser_args.score_init_constant
)
else:
nn.init.kaiming_uniform_(self.scores, a=math.sqrt(5))
if self.out_channels == 10 or self.out_channels == 100:
self.prune = False
self.subnet = torch.ones_like(self.scores)
else:
self.prune = True
self.register_buffer("stored_mask_0", torch.zeros_like(self.scores))
self.register_buffer("stored_mask_1", torch.zeros_like(self.scores))
self.stored_mask_dict = {}
self.stored_mask_dict_vr = {}
@property
def clamped_scores(self):
return self.scores
def fix_subnet(self):
self.subnet = (torch.rand_like(self.scores) < self.clamped_scores).float()
def forward(self, x):
if self.prune:
if not self.train_weights:
self.stored_mask_dict[parser_args.j] = (self.subnet-self.scores)/(self.scores+1e-20)*(1-self.scores+1e-20)
self.stored_mask_dict_vr[parser_args.j] = (self.subnet-self.scores)/(self.scores+1e-20)*(1-self.scores+1e-20)
self.subnet = StraightThroughBinomialSampleNoGrad.apply(self.scores)
if parser_args.j == 0:
self.stored_mask_0.data = (self.subnet-self.scores)/(self.scores+1e-20)*(1-self.scores+1e-20)
else:
self.stored_mask_1.data = (self.subnet-self.scores)/(self.scores+1e-20)*(1-self.scores+1e-20)
w = self.weight * self.subnet
x = F.conv2d(x, w, self.bias, self.stride, self.padding, self.dilation, self.groups)
else:
w = self.weight * self.subnet
x = F.conv2d(x, w, self.bias, self.stride, self.padding, self.dilation, self.groups)
else:
x = F.conv2d(x, self.weight, self.bias, self.stride, self.padding, self.dilation, self.groups)
return x
class StraightThroughBinomialSampleNoGrad(autograd.Function):
@staticmethod
def forward(ctx, scores):
output = (torch.rand_like(scores) < scores).float()
return output
@staticmethod
def backward(ctx, grad_outputs):
return torch.zeros_like(grad_outputs)
| 44.466926
| 126
| 0.59293
| 1,460
| 11,428
| 4.478767
| 0.067123
| 0.140694
| 0.06423
| 0.051384
| 0.922312
| 0.91696
| 0.91696
| 0.91696
| 0.91696
| 0.91696
| 0
| 0.021816
| 0.286052
| 11,428
| 257
| 127
| 44.466926
| 0.77963
| 0
| 0
| 0.847826
| 0
| 0
| 0.01432
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108696
| false
| 0
| 0.030435
| 0.026087
| 0.217391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4a0741740401c17dbb220c9545453086d5442e2
| 125
|
py
|
Python
|
__init__.py
|
katomasahiro10/modpy
|
5b9b61530e6dc520e269eb65f27ce281c8d8acec
|
[
"MIT"
] | null | null | null |
__init__.py
|
katomasahiro10/modpy
|
5b9b61530e6dc520e269eb65f27ce281c8d8acec
|
[
"MIT"
] | 28
|
2019-10-08T05:27:28.000Z
|
2020-06-21T01:36:10.000Z
|
__init__.py
|
katomasahiro10/modneat
|
5b9b61530e6dc520e269eb65f27ce281c8d8acec
|
[
"MIT"
] | null | null | null |
from . modneat import nn
from . modneat import evolution
from . modneat import modneat_settings
from . modneat import agents
| 25
| 38
| 0.808
| 17
| 125
| 5.882353
| 0.411765
| 0.44
| 0.68
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 125
| 4
| 39
| 31.25
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f4c55faaa3a18ea4b585422d04818d2ef9c1d1e8
| 73
|
py
|
Python
|
swig_mwe/fastLA/__init__.py
|
EricKightley/swig_mwe
|
3c3256a9f74dcaad6b172f91ce8031917dc3bf1d
|
[
"MIT"
] | null | null | null |
swig_mwe/fastLA/__init__.py
|
EricKightley/swig_mwe
|
3c3256a9f74dcaad6b172f91ce8031917dc3bf1d
|
[
"MIT"
] | null | null | null |
swig_mwe/fastLA/__init__.py
|
EricKightley/swig_mwe
|
3c3256a9f74dcaad6b172f91ce8031917dc3bf1d
|
[
"MIT"
] | null | null | null |
from .fastLA import fact
from .fastLA import rms
from .fastLA import dot
| 18.25
| 24
| 0.794521
| 12
| 73
| 4.833333
| 0.5
| 0.517241
| 0.827586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164384
| 73
| 3
| 25
| 24.333333
| 0.95082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f4e65645142ff8b8f244979ce04af8bb683e6853
| 3,746
|
py
|
Python
|
EstruturaDeDecisao/exercicio24.py
|
Nicolas-Wursthorn/exercicios-python-brasil
|
b2b564d48b519be04643636033ec0815e6d99ea1
|
[
"MIT"
] | null | null | null |
EstruturaDeDecisao/exercicio24.py
|
Nicolas-Wursthorn/exercicios-python-brasil
|
b2b564d48b519be04643636033ec0815e6d99ea1
|
[
"MIT"
] | null | null | null |
EstruturaDeDecisao/exercicio24.py
|
Nicolas-Wursthorn/exercicios-python-brasil
|
b2b564d48b519be04643636033ec0815e6d99ea1
|
[
"MIT"
] | null | null | null |
# Faça um Programa que leia 2 números e em seguida pergunte ao usuário qual operação ele deseja realizar.
# O resultado da operação deve ser acompanhado de uma frase que diga se o número é:
# par ou ímpar;
# positivo ou negativo;
# inteiro ou decimal.
num1 = float(input("Digite o primeiro número: "))
num2 = float(input("Digite o segundo número: "))
operacao = input("Qual operação deseja realizar entre os dois números (+ , - , / , * ): ")
if operacao == "+":
resultado = num1 + num2
# PAR OU ÍMPAR
if resultado == 0:
print("Esse número {} é nulo!".format(resultado))
elif resultado % 2 == 0:
print("O número {} é par!".format(resultado))
else:
print("O número {} é impar!".format(resultado))
# POSITIVO OU NEGATIVO
if resultado == 0:
print("Esse número {} é nulo!".format(resultado))
elif resultado > 0:
print("O número {} é positivo!".format(resultado))
else:
print("O número {} é negativo!".format(resultado))
# INTEIRO OU DECIMAL
if resultado == 0:
print("Esse número é nulo!")
elif resultado // 1 == resultado:
print("O número {} é inteiro!".format(resultado))
else:
print("O número {} é decimal!".format(resultado))
elif operacao == "-":
resultado = num1 - num2
# PAR OU ÍMPAR
if resultado == 0:
print("Esse número {} é nulo!".format(resultado))
elif resultado % 2 == 0:
print("O número {} é par!".format(resultado))
else:
print("O número {} é impar!".format(resultado))
# POSITIVO OU NEGATIVO
if resultado == 0:
print("Esse número {} é nulo!".format(resultado))
elif resultado > 0:
print("O número {} é positivo!".format(resultado))
else:
print("O número {} é negativo!".format(resultado))
# INTEIRO OU DECIMAL
if resultado == 0:
print("Esse número é nulo!")
elif resultado // 1 == resultado:
print("O número {} é inteiro!".format(resultado))
else:
print("O número {} é decimal!".format(resultado))
elif operacao == "/":
resultado = num1 / num2
# PAR OU ÍMPAR
if resultado == 0:
print("Esse número {} é nulo!".format(resultado))
elif resultado % 2 == 0:
print("O número {} é par!".format(resultado))
else:
print("O número {} é impar!".format(resultado))
# POSITIVO OU NEGATIVO
if resultado == 0:
print("Esse número {} é nulo!".format(resultado))
elif resultado > 0:
print("O número {} é positivo!".format(resultado))
else:
print("O número {} é negativo!".format(resultado))
# INTEIRO OU DECIMAL
if resultado == 0:
print("Esse número é nulo!")
elif resultado // 1 == resultado:
print("O número {} é inteiro!".format(resultado))
else:
print("O número {} é decimal!".format(resultado))
elif operacao == "*":
resultado = num1 * num2
# PAR OU ÍMPAR
if resultado == 0:
print("Esse número {} é nulo!".format(resultado))
elif resultado % 2 == 0:
print("O número {} é par!".format(resultado))
else:
print("O número {} é impar!".format(resultado))
# POSITIVO OU NEGATIVO
if resultado == 0:
print("Esse número {} é nulo!".format(resultado))
elif resultado > 0:
print("O número {} é positivo!".format(resultado))
else:
print("O número {} é negativo!".format(resultado))
# INTEIRO OU DECIMAL
if resultado == 0:
print("Esse número é nulo!")
elif resultado // 1 == resultado:
print("O número {} é inteiro!".format(resultado))
else:
print("O número {} é decimal!".format(resultado))
else:
print("Operação inválida!")
| 32.017094
| 105
| 0.592365
| 463
| 3,746
| 4.792657
| 0.12959
| 0.116719
| 0.090131
| 0.140604
| 0.843623
| 0.843623
| 0.843623
| 0.843623
| 0.843623
| 0.843623
| 0
| 0.014223
| 0.268019
| 3,746
| 117
| 106
| 32.017094
| 0.79504
| 0.122531
| 0
| 0.858824
| 0
| 0
| 0.27737
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.435294
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
f4f14704dd8480b7bb8fde0500c028466bfb8587
| 4,900
|
py
|
Python
|
tests/test_executor_syntax.py
|
LennartAtExasol/udf-mock-python
|
ce59699967ac90098bd811e82ce2a7f359da172a
|
[
"MIT"
] | null | null | null |
tests/test_executor_syntax.py
|
LennartAtExasol/udf-mock-python
|
ce59699967ac90098bd811e82ce2a7f359da172a
|
[
"MIT"
] | 15
|
2020-10-15T10:12:34.000Z
|
2022-03-15T18:33:18.000Z
|
tests/test_executor_syntax.py
|
LennartAtExasol/udf-mock-python
|
ce59699967ac90098bd811e82ce2a7f359da172a
|
[
"MIT"
] | 1
|
2021-03-23T10:11:17.000Z
|
2021-03-23T10:11:17.000Z
|
import pytest
from exasol_udf_mock_python.column import Column
from exasol_udf_mock_python.group import Group
from exasol_udf_mock_python.mock_exa_environment import MockExaEnvironment
from exasol_udf_mock_python.mock_meta_data import MockMetaData
from exasol_udf_mock_python.udf_mock_executor import UDFMockExecutor
def test_different_udf_wrapper_function_names():
def udf_wrapper():
def run(ctx):
pass
def udf_wrapper2():
def run(ctx):
pass
def my_wrapper():
def run(ctx):
pass
executor = UDFMockExecutor()
meta = MockMetaData(
script_code_wrapper_function=udf_wrapper,
input_type="SET",
input_columns=[Column("t", int, "INTEGER")],
output_type="EMITS",
output_columns=[Column("t", int, "INTEGER")]
)
exa = MockExaEnvironment(meta)
result=executor.run([Group([(1,)])], exa)
assert result == [Group([])]
meta = MockMetaData(
script_code_wrapper_function=udf_wrapper2,
input_type="SET",
input_columns=[Column("t", int, "INTEGER")],
output_type="EMITS",
output_columns=[Column("t", int, "INTEGER")]
)
exa = MockExaEnvironment(meta)
result=executor.run([Group([(1,)])], exa)
assert result == [Group([])]
meta = MockMetaData(
script_code_wrapper_function=my_wrapper,
input_type="SET",
input_columns=[Column("t", int, "INTEGER")],
output_type="EMITS",
output_columns=[Column("t", int, "INTEGER")]
)
exa = MockExaEnvironment(meta)
result=executor.run([Group([(1,)])], exa)
assert result == [Group([])]
def test_udf_wrapper_with_docstring_on_next_line():
def udf_wrapper():
"""
wrapper with docstring should raise Exception,
because their is no easy way to remove docstrings
to get only the source witin the function
"""
def run(ctx):
pass
executor = UDFMockExecutor()
meta = MockMetaData(
script_code_wrapper_function=udf_wrapper,
input_type="SET",
input_columns=[Column("t", int, "INTEGER")],
output_type="EMITS",
output_columns=[Column("t", int, "INTEGER")]
)
exa = MockExaEnvironment(meta)
result=executor.run([Group([(1,)])], exa)
assert result == [Group([])]
def test_udf_wrapper_with_docstring_after_empty_lines():
def udf_wrapper():
"""
wrapper with docstring should raise Exception,
because their is no easy way to remove docstrings
to get only the source witin the function
"""
def run(ctx):
pass
executor = UDFMockExecutor()
meta = MockMetaData(
script_code_wrapper_function=udf_wrapper,
input_type="SET",
input_columns=[Column("t", int, "INTEGER")],
output_type="EMITS",
output_columns=[Column("t", int, "INTEGER")]
)
exa = MockExaEnvironment(meta)
result=executor.run([Group([(1,)])], exa)
assert result == [Group([])]
def test_udf_wrapper_with_no_empty_line_after_function_name():
def udf_wrapper():
def run(ctx):
pass
executor = UDFMockExecutor()
meta = MockMetaData(
script_code_wrapper_function=udf_wrapper,
input_type="SET",
input_columns=[Column("t", int, "INTEGER")],
output_type="EMITS",
output_columns=[Column("t", int, "INTEGER")]
)
exa = MockExaEnvironment(meta)
result=executor.run([Group([(1,)])], exa)
assert result == [Group([])]
def test_udf_wrapper_with_white_spaces_in_function_definition():
def udf_wrapper ( ) :
def run(ctx):
pass
executor = UDFMockExecutor()
meta = MockMetaData(
script_code_wrapper_function=udf_wrapper,
input_type="SET",
input_columns=[Column("t", int, "INTEGER")],
output_type="EMITS",
output_columns=[Column("t", int, "INTEGER")]
)
exa = MockExaEnvironment(meta)
result=executor.run([Group([(1,)])], exa)
assert result == [Group([])]
def test_exception_udf_wrapper_with_parameter():
def udf_wrapper(param):
def run(ctx):
pass
executor = UDFMockExecutor()
with pytest.raises(Exception):
meta = MockMetaData(
script_code_wrapper_function=udf_wrapper,
input_type="SET",
input_columns=[Column("t", int, "INTEGER")],
output_type="EMITS",
output_columns=[Column("t", int, "INTEGER")]
)
exa = MockExaEnvironment(meta)
result=executor.run([Group([(1,)])], exa)
| 30.625
| 74
| 0.59102
| 521
| 4,900
| 5.301344
| 0.143954
| 0.06517
| 0.081101
| 0.098479
| 0.867125
| 0.835264
| 0.802679
| 0.79218
| 0.79218
| 0.79218
| 0
| 0.002871
| 0.289184
| 4,900
| 159
| 75
| 30.81761
| 0.790123
| 0.057347
| 0
| 0.786885
| 0
| 0
| 0.0423
| 0
| 0
| 0
| 0
| 0
| 0.057377
| 1
| 0.180328
| false
| 0.065574
| 0.04918
| 0
| 0.229508
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
520f0a163f0f364acded7670d3a22e00d105eae5
| 17,783
|
py
|
Python
|
FreeFaucet.io_Register_Bot.py
|
snoowl26/Crypto-Faucet-Bot
|
736695ce318e8f86846824d7c705a535e42650ef
|
[
"MIT"
] | 7
|
2021-03-04T05:08:12.000Z
|
2022-03-25T09:04:13.000Z
|
FreeFaucet.io_Register_Bot.py
|
snoowl26/Crypto-Faucet-Bot
|
736695ce318e8f86846824d7c705a535e42650ef
|
[
"MIT"
] | null | null | null |
FreeFaucet.io_Register_Bot.py
|
snoowl26/Crypto-Faucet-Bot
|
736695ce318e8f86846824d7c705a535e42650ef
|
[
"MIT"
] | 10
|
2021-03-05T06:08:57.000Z
|
2022-03-28T13:11:24.000Z
|
from selenium import webdriver
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import ElementNotInteractableException
from io import BytesIO
import time
import keyboard
import sys
from random import randrange
import os
driver_path = "chromedriver.exe"
brave_path = "C:/Program Files/Google/Chrome/Application/chrome.exe"
dir_path = os.path.dirname(os.path.realpath(__file__))
credentials = "creds.txt"
option = webdriver.ChromeOptions()
option.binary_location = brave_path
option.add_argument("--incognito")
#option.add_argument("--headless")
with open(credentials) as f:
creds = f.readlines()
time.sleep(1)
# Create new Instance of Chrome
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
print("Browser launched")
#r = 1
while True:
print("Navigating to Freedash.io")
browser.get("https://freedash.io/?ref=84771")
username = creds[9]
password = creds[10]
reg_button = browser.find_element_by_xpath("/html/body/header/div/div[1]/nav/div/ul/li[4]/a")
reg_button.click()
time.sleep(1)
dash_un_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered e-mail")
dash_pw_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
dash_pw_field2 = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[3]/input")
dash_pw_field2.click()
dash_pw_field2.send_keys(password)
print("Confirmed password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/button")
login_button.click()
print("Clicked Register Button")
time.sleep(5)
####################################################################
print("Navigating to Freenem.io")
browser.get("https://freenem.com/?ref=264523")
username = creds[13]
password = creds[14]
reg_button = browser.find_element_by_xpath("/html/body/header/div/div[1]/nav/div/ul/li[4]/a")
reg_button.click()
time.sleep(1)
dash_un_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered e-mail")
dash_pw_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
dash_pw_field2 = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[3]/input")
dash_pw_field2.click()
dash_pw_field2.send_keys(password)
print("Confirmed password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/button")
login_button.click()
print("Clicked Register Button")
time.sleep(5)
####################################################################
print("Navigating to Freecardano.com")
browser.get("https://freecardano.com/?ref=274019")
username = creds[17]
password = creds[18]
reg_button = browser.find_element_by_xpath("/html/body/header/div/div[1]/nav/div/ul/li[4]/a")
reg_button.click()
time.sleep(1)
dash_un_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered e-mail")
dash_pw_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
dash_pw_field2 = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[3]/input")
dash_pw_field2.click()
dash_pw_field2.send_keys(password)
print("Confirmed password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/button")
login_button.click()
print("Clicked Register Button")
time.sleep(5)
####################################################################
print("Navigating to Coinfaucet.io")
browser.get("https://coinfaucet.io/?ref=747848")
username = creds[21]
password = creds[22]
reg_button = browser.find_element_by_xpath("/html/body/header/div/div[1]/nav/div/ul/li[4]/a")
reg_button.click()
time.sleep(1)
dash_un_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered e-mail")
dash_pw_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
dash_pw_field2 = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[3]/input")
dash_pw_field2.click()
dash_pw_field2.send_keys(password)
print("Confirmed password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/button")
login_button.click()
print("Clicked Register Button")
time.sleep(5)
####################################################################
print("Navigating to freebitcoin.io")
browser.get("https://freebitcoin.io/?ref=424218")
username = creds[25]
password = creds[26]
reg_button = browser.find_element_by_xpath("/html/body/header/div/div[1]/nav/div/ul/li[4]/a")
reg_button.click()
time.sleep(1)
dash_un_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered e-mail")
dash_pw_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
dash_pw_field2 = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[3]/input")
dash_pw_field2.click()
dash_pw_field2.send_keys(password)
print("Confirmed password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/button")
login_button.click()
print("Clicked Register Button")
time.sleep(5)
####################################################################
print("Navigating to freesteam.io")
browser.get("https://freesteam.io/?ref=95823")
username = creds[29]
password = creds[30]
reg_button = browser.find_element_by_xpath("/html/body/header/div/div[1]/nav/div/ul/li[4]/a")
reg_button.click()
time.sleep(1)
dash_un_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered e-mail")
dash_pw_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
dash_pw_field2 = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[3]/input")
dash_pw_field2.click()
dash_pw_field2.send_keys(password)
print("Confirmed password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/button")
login_button.click()
print("Clicked Register Button")
time.sleep(5)
####################################################################
print("Navigating to freeusdcoin.com")
browser.get("https://freeusdcoin.com/?ref=99087")
username = creds[33]
password = creds[34]
reg_button = browser.find_element_by_xpath("/html/body/header/div/div[1]/nav/div/ul/li[4]/a")
reg_button.click()
time.sleep(1)
dash_un_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered e-mail")
dash_pw_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
dash_pw_field2 = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[3]/input")
dash_pw_field2.click()
dash_pw_field2.send_keys(password)
print("Confirmed password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/button")
login_button.click()
print("Clicked Register Button")
time.sleep(5)
####################################################################
print("Navigating to freechainlink.io")
browser.get("https://freechainlink.io/?ref=52222")
username = creds[37]
password = creds[38]
reg_button = browser.find_element_by_xpath("/html/body/header/div/div[1]/nav/div/ul/li[4]/a")
reg_button.click()
time.sleep(1)
dash_un_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered e-mail")
dash_pw_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
dash_pw_field2 = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[3]/input")
dash_pw_field2.click()
dash_pw_field2.send_keys(password)
print("Confirmed password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/button")
login_button.click()
print("Clicked Register Button")
time.sleep(5)
####################################################################
print("Navigating to free-tron.com")
browser.get("https://free-tron.com/?ref=147925")
username = creds[41]
password = creds[42]
reg_button = browser.find_element_by_xpath("/html/body/header/div/div[1]/nav/div/ul/li[4]/a")
reg_button.click()
time.sleep(1)
dash_un_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered e-mail")
dash_pw_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
dash_pw_field2 = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[3]/input")
dash_pw_field2.click()
dash_pw_field2.send_keys(password)
print("Confirmed password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/button")
login_button.click()
print("Clicked Register Button")
time.sleep(5)
####################################################################
print("Navigating to freebinancecoin.com")
browser.get("https://freebinancecoin.com/?ref=100259")
username = creds[45]
password = creds[46]
reg_button = browser.find_element_by_xpath("/html/body/header/div/div[1]/nav/div/ul/li[4]/a")
reg_button.click()
time.sleep(1)
dash_un_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered e-mail")
dash_pw_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
dash_pw_field2 = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[3]/input")
dash_pw_field2.click()
dash_pw_field2.send_keys(password)
print("Confirmed password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/button")
login_button.click()
print("Clicked Register Button")
time.sleep(5)
####################################################################
print("Navigating to freeneo.io")
browser.get("https://freeneo.io/?ref=62439")
username = creds[49]
password = creds[50]
reg_button = browser.find_element_by_xpath("/html/body/header/div/div[1]/nav/div/ul/li[4]/a")
reg_button.click()
time.sleep(1)
dash_un_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered e-mail")
dash_pw_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
dash_pw_field2 = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[3]/input")
dash_pw_field2.click()
dash_pw_field2.send_keys(password)
print("Confirmed password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/button")
login_button.click()
print("Clicked Register Button")
time.sleep(5)
####################################################################
print("Navigating to free-ltc.com")
browser.get("https://free-ltc.com/?ref=67660")
username = creds[53]
password = creds[54]
reg_button = browser.find_element_by_xpath("/html/body/header/div/div[1]/nav/div/ul/li[4]/a")
reg_button.click()
time.sleep(1)
dash_un_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered e-mail")
dash_pw_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
dash_pw_field2 = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[3]/input")
dash_pw_field2.click()
dash_pw_field2.send_keys(password)
print("Confirmed password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/button")
login_button.click()
print("Clicked Register Button")
time.sleep(5)
####################################################################
print("Navigating to https://freeethereum.com/")
browser.get("https://freeethereum.com/?ref=145922")
username = creds[57]
password = creds[58]
reg_button = browser.find_element_by_xpath("/html/body/header/div/div[1]/nav/div/ul/li[4]/a")
reg_button.click()
time.sleep(1)
dash_un_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered e-mail")
dash_pw_field = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
dash_pw_field2 = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/div[3]/input")
dash_pw_field2.click()
dash_pw_field2.send_keys(password)
print("Confirmed password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[2]/button")
login_button.click()
print("Clicked Register Button")
time.sleep(5)
browser.close()
print("All sites registered")
print("Click the registration links in your e-mail for each site")
print("Then run the main FreeFaucet.io_Bot")
| 35.637275
| 128
| 0.65945
| 2,613
| 17,783
| 4.282434
| 0.068504
| 0.090617
| 0.065058
| 0.081323
| 0.838695
| 0.828686
| 0.828686
| 0.828686
| 0.828686
| 0.828686
| 0
| 0.027939
| 0.146601
| 17,783
| 498
| 129
| 35.708835
| 0.70941
| 0.003824
| 0
| 0.765396
| 0
| 0.190616
| 0.382251
| 0.266606
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.190616
| 0.032258
| 0
| 0.032258
| 0.202346
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
52298421ee7d1a7256d80725509c93393d573612
| 58
|
py
|
Python
|
backend/ezinfo/online_codes.py
|
YizheZhang-Ervin/YeStock
|
a05c54a46d2bfba390adfe1b42b56aa42fb35c5e
|
[
"MIT"
] | null | null | null |
backend/ezinfo/online_codes.py
|
YizheZhang-Ervin/YeStock
|
a05c54a46d2bfba390adfe1b42b56aa42fb35c5e
|
[
"MIT"
] | null | null | null |
backend/ezinfo/online_codes.py
|
YizheZhang-Ervin/YeStock
|
a05c54a46d2bfba390adfe1b42b56aa42fb35c5e
|
[
"MIT"
] | null | null | null |
class Run:
def run():
return {'x':[],'y':[],'y2':[]}
| 19.333333
| 34
| 0.413793
| 8
| 58
| 3
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021277
| 0.189655
| 58
| 3
| 34
| 19.333333
| 0.489362
| 0
| 0
| 0
| 0
| 0
| 0.067797
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
522f3946783a169855de8b0cdd391905ece8a292
| 201
|
py
|
Python
|
tests/test_blocklist.py
|
di/disposable-email-domains
|
88845fec288650d8716498727e20b6de0d52930f
|
[
"MIT"
] | 31
|
2016-12-06T15:02:49.000Z
|
2021-01-11T19:47:27.000Z
|
tests/test_blocklist.py
|
di/disposable-email-domains
|
88845fec288650d8716498727e20b6de0d52930f
|
[
"MIT"
] | 3
|
2017-05-24T08:46:43.000Z
|
2020-09-03T18:16:22.000Z
|
tests/test_blocklist.py
|
di/disposable-email-domains
|
88845fec288650d8716498727e20b6de0d52930f
|
[
"MIT"
] | 10
|
2017-06-29T07:14:40.000Z
|
2020-09-19T06:25:14.000Z
|
from disposable_email_domains import blocklist
def test_blocklist_inclusion():
assert 'spamcowboy.com' in blocklist
def test_blocklist_exclusion():
assert 'spamcannon.com' not in blocklist
| 20.1
| 46
| 0.79602
| 25
| 201
| 6.16
| 0.64
| 0.155844
| 0.207792
| 0.324675
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144279
| 201
| 9
| 47
| 22.333333
| 0.895349
| 0
| 0
| 0
| 0
| 0
| 0.139303
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.