hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e63342928933b2fa8975974584375ccff8f33a74
| 25,246
|
py
|
Python
|
Step_Function/Plot_Step_function_experiments.py
|
jz-fun/Vector-valued-Control-Variates-Code
|
4b2e838c447b7da66abeb416a424f0dbd2375503
|
[
"MIT"
] | null | null | null |
Step_Function/Plot_Step_function_experiments.py
|
jz-fun/Vector-valued-Control-Variates-Code
|
4b2e838c447b7da66abeb416a424f0dbd2375503
|
[
"MIT"
] | null | null | null |
Step_Function/Plot_Step_function_experiments.py
|
jz-fun/Vector-valued-Control-Variates-Code
|
4b2e838c447b7da66abeb416a424f0dbd2375503
|
[
"MIT"
] | null | null | null |
import pickle
from matplotlib import pyplot as plt
import torch
import seaborn as sns
import numpy as np
from src.src_vvCV_MD1P.stein_operators import *
from src.src_vvCV_MD1P.sv_CV import *
from src.src_vvCV_MD1P.vv_CV_MD1P import *
from src.src_vvCV_MD1P.vv_CV_FixB_MD1P import *
from src.src_vvCV_MD1P.vv_CV_unbalanced_FixB_MD1P import *
# ======================
# Step Function
# ======================
# Set vv-CV kernel
my_base_kernel = rbf_kernel
my_lr = 0.0003
my_poly_ker_parm = torch.Tensor([1,1])
no_replica_ty2 = 1
no_epochs_ty2 = 400
no_points_per_func_ty2= 40
for i in range(no_replica_ty2):
print("REP {} out of {}-----------".format(i+1, no_replica_ty2 ))
dim = 1
factor = torch.ones(1) * 1
mu = torch.zeros(dim, dtype=torch.float) + 0
var = torch.eye(dim, dtype=torch.float) * factor # MUst use eye() here
print(mu, var)
def my_func_1(X):
return (0.5 + (2 * (X >= 0) - 1) * 1.5) * torch.ones(1, dtype=torch.float)
def my_func_2(X):
return (X >= 0) * torch.ones(1, dtype=torch.float)
# Training samples
print("REP {} out of {}-----------".format(i+1, no_replica_ty2 ))
torch.manual_seed(5)
X1 = mu + torch.sqrt(factor) * torch.randn(no_points_per_func_ty2, dim)
Y1 = my_func_1(X1)
# --- For MD1P
torch.manual_seed(6)
X2 = mu + torch.sqrt(factor) * torch.randn(no_points_per_func_ty2, dim)
Y2 = my_func_2(X2)
# --- For 1D1P
Y1_X2 = my_func_1(X2)
Ys_on_X2 = torch.stack((Y1_X2, Y2), dim=1).squeeze()
# Scores on X's
mu = torch.zeros(dim, 1)
cov = var
score_X1 = multivariate_Normal_score(mu, cov, X1)
score_X1.size()
score_X2 = multivariate_Normal_score(mu, cov, X2)
xall = torch.stack((X1, X2), dim=0)
xall.size()
yall = torch.stack((Y1, Y2), dim=0)
yall.size()
score_all = torch.stack((score_X1, score_X2), dim=0)
score_all.size()
# f1
print("REP {} out of {} --- sv-CV-f1 -----------".format(i+1, no_replica_ty2 ))
torch.manual_seed(0)
Ty2_SCV_scalarvaluedfunc1 = SV_CV_scalarvaluedfuncs_model(penalized_ls_objective_scalarvaluedfunc, stein_base_kernel_MV_2, my_base_kernel, X1, Y1, score_X1)
torch.manual_seed(0)
Ty2_SCV_scalarvaluedfunc1.do_tune_kernelparams_negmllk(batch_size_tune = 5, flag_if_use_medianheuristic=False, beta_cstkernel=0., lr=0.02, epochs=15, verbose=True)
torch.manual_seed(0)
Ty2_SCV_scalarvaluedfunc1.do_optimize_sv_CV(regularizer_const = 1e-5, batch_size = 10, lr = my_lr, epochs = no_epochs_ty2, verbose = True)
# f2
print("REP {} out of {}--- sv-CV-f2 -----------".format(i+1, no_replica_ty2 ))
torch.manual_seed(0)
Ty2_SCV_scalarvaluedfunc2 = SV_CV_scalarvaluedfuncs_model(penalized_ls_objective_scalarvaluedfunc,stein_base_kernel_MV_2, my_base_kernel, X2, Y2, score_X2)
torch.manual_seed(0)
Ty2_SCV_scalarvaluedfunc2.do_tune_kernelparams_negmllk(batch_size_tune = 5, flag_if_use_medianheuristic=False, beta_cstkernel=0., lr=0.02, epochs=15, verbose=True)
torch.manual_seed(0)
Ty2_SCV_scalarvaluedfunc2.do_optimize_sv_CV(regularizer_const=1e-5, batch_size=10, lr=my_lr, epochs=no_epochs_ty2, verbose=True)
# vv-CV: MD1P with B fixed
print("REP {} out of {} --- vv-CV: MD1P with B fixed -----------".format(i+1, no_replica_ty2 ))
torch.manual_seed(0)
Ty2_SCV_vectorvaluedfunc_fixB = VV_CV_vectorvaluedfuncs_model_fixB(vv_cv_objective=penalized_ls_objective_vectorvaluedfunc_fixB, prior_kernel=stein_base_kernel_MV_2, base_kernel=my_base_kernel, Xs_tensor=xall, Ys_tensor=yall, scores_Tensor=score_all)
torch.manual_seed(0)
Ty2_SCV_vectorvaluedfunc_fixB.do_tune_kernelparams_negmllk(batch_size_tune=5, flag_if_use_medianheuristic=False, beta_cstkernel=0., lr=0.02, epochs=15, verbose=True) # bs 5; lr 0.2; epochs 5
torch.manual_seed(0)
# set B
Ty2_SCV_vectorvaluedfunc_fixB.B = torch.Tensor([[0.1, 0.01], [0.01,0.1]])
Ty2_SCV_vectorvaluedfunc_fixB.do_optimize_vv_CV(regularizer_const=1e-5, batch_size=5, lr=my_lr, epochs=no_epochs_ty2, verbose=True)
# ---------------
# vv-CV: MD1P with B fixed -- ANOTHER B
print("REP {} out of {} --- vv-CV: MD1P with B fixed --- Another B-----------".format(i+1, no_replica_ty2 ))
torch.manual_seed(0)
Ty2_SCV_vectorvaluedfunc_fixB_another = VV_CV_vectorvaluedfuncs_model_fixB(vv_cv_objective=penalized_ls_objective_vectorvaluedfunc_fixB, prior_kernel=stein_base_kernel_MV_2, base_kernel=my_base_kernel, Xs_tensor=xall, Ys_tensor=yall, scores_Tensor=score_all)
torch.manual_seed(0)
Ty2_SCV_vectorvaluedfunc_fixB_another.do_tune_kernelparams_negmllk(batch_size_tune=5, flag_if_use_medianheuristic=False, beta_cstkernel=0., lr=0.02, epochs=15, verbose=True) # bs 5; lr 0.2; epochs 5
torch.manual_seed(0)
# set B
Ty2_SCV_vectorvaluedfunc_fixB_another.B = torch.Tensor([[0.5, 0.01], [0.01, 0.5]]) # a value close to estimated B
Ty2_SCV_vectorvaluedfunc_fixB_another.do_optimize_vv_CV(regularizer_const=1e-5, batch_size=5, lr=my_lr, epochs=no_epochs_ty2, verbose=True) # 0.002 ; 5
# ---------------
# vv-CV: MD1P with learning B
print("REP {} out of {} --- vv-CV: MD1P with learning B -----------".format(i+1, no_replica_ty2 ))
torch.manual_seed(0)
Ty2_SCV_vectorvaluedfunc = VV_CV_vectorvaluedfuncs_model(vv_cv_objective=penalized_ls_objective_vectorvaluedfunc, prior_kernel=stein_base_kernel_MV_2, base_kernel=my_base_kernel, Xs_tensor=xall, Ys_tensor=yall, scores_Tensor=score_all)
torch.manual_seed(0)
Ty2_SCV_vectorvaluedfunc.do_tune_kernelparams_negmllk(batch_size_tune = 5, flag_if_use_medianheuristic=False, beta_cstkernel=0., lr=0.02, epochs=15, verbose=True) # bs 5; lr 0.2; epochs 5
torch.manual_seed(0)
Ty2_SCV_vectorvaluedfunc.do_optimize_vv_CV(regularizer_const=1e-5, regularizer_const_FB=1, batch_size=5, lr=my_lr, epochs=no_epochs_ty2, verbose=True) # 0.002; 5
# --------------
# sv-polynomials: f1
print("REP {} out of {} --- sv-polynomials: f1 -----------".format(i + 1, no_replica_ty2))
torch.manual_seed(0)
Ty2_SCV_svpolynomials_f1 = SV_CV_scalarvaluedfuncs_model(penalized_ls_objective_scalarvaluedfunc, stein_base_kernel_MV_2, polynomial_kernel, X1, Y1, score_X1)
Ty2_SCV_svpolynomials_f1.optim_base_kernel_parms = my_poly_ker_parm
torch.manual_seed(0)
Ty2_SCV_svpolynomials_f1.do_optimize_sv_CV(regularizer_const=1e-5, batch_size=10, lr=my_lr, epochs=no_epochs_ty2, verbose=True) # 0.002
# sv-polynomials: f2
print("REP {} out of {} --- sv-polynomials: f2 -----------".format(i + 1, no_replica_ty2))
torch.manual_seed(0)
Ty2_SCV_svpolynomials_f2 = SV_CV_scalarvaluedfuncs_model(penalized_ls_objective_scalarvaluedfunc, stein_base_kernel_MV_2, polynomial_kernel, X2, Y2, score_X2)
Ty2_SCV_svpolynomials_f2.optim_base_kernel_parms = my_poly_ker_parm
torch.manual_seed(0)
Ty2_SCV_svpolynomials_f2.do_optimize_sv_CV(regularizer_const=1e-5, batch_size=10, lr=my_lr, epochs=no_epochs_ty2, verbose=True) # 0.002
# vv-polynomials: MD1P with B fixed
print("REP {} out of {} --- vv-polynomials: MD1P with B fixed -----------".format(i+1, no_replica_ty2 ))
torch.manual_seed(0)
Ty2_SCV_vvpolynomials_MD1P_fixB = VV_CV_vectorvaluedfuncs_model_fixB(vv_cv_objective=penalized_ls_objective_vectorvaluedfunc_fixB, prior_kernel=stein_base_kernel_MV_2, base_kernel=polynomial_kernel, Xs_tensor=xall, Ys_tensor=yall, scores_Tensor=score_all)
Ty2_SCV_vvpolynomials_MD1P_fixB.optim_base_kernel_parms = my_poly_ker_parm
torch.manual_seed(0)
# set B
Ty2_SCV_vvpolynomials_MD1P_fixB.B = torch.Tensor([[0.1, 0.01], [0.01,0.1]])
Ty2_SCV_vvpolynomials_MD1P_fixB.do_optimize_vv_CV(regularizer_const=1e-5, batch_size=5, lr=my_lr, epochs=no_epochs_ty2, verbose=True)
# vv-polynomials: MD1P with B fixed --- ANOTHER B
print("REP {} out of {} --- vv-polynomials: MD1P with B fixed ---Another B -----------".format(i+1, no_replica_ty2 ))
torch.manual_seed(0)
Ty2_SCV_vvpolynomials_MD1P_fixB_another = VV_CV_vectorvaluedfuncs_model_fixB(vv_cv_objective=penalized_ls_objective_vectorvaluedfunc_fixB, prior_kernel=stein_base_kernel_MV_2, base_kernel=polynomial_kernel, Xs_tensor=xall, Ys_tensor=yall, scores_Tensor=score_all)
Ty2_SCV_vvpolynomials_MD1P_fixB_another.optim_base_kernel_parms = my_poly_ker_parm
torch.manual_seed(0)
# set B
Ty2_SCV_vvpolynomials_MD1P_fixB_another.B = torch.Tensor([[0.5, 0.01], [0.01, 0.5]])
Ty2_SCV_vvpolynomials_MD1P_fixB_another.do_optimize_vv_CV(regularizer_const=1e-5, batch_size=5, lr=my_lr, epochs=no_epochs_ty2, verbose=True)
# vv-polynomials: MD1P with learning B
print("REP {} out of {} --- vv-polynomials: MD1P with learning B -----------".format(i+1, no_replica_ty2 ))
torch.manual_seed(0)
Ty2_SCV_vvpolynomials_MD1P = VV_CV_vectorvaluedfuncs_model(vv_cv_objective=penalized_ls_objective_vectorvaluedfunc, prior_kernel=stein_base_kernel_MV_2, base_kernel=polynomial_kernel, Xs_tensor=xall, Ys_tensor=yall, scores_Tensor=score_all)
Ty2_SCV_vvpolynomials_MD1P.optim_base_kernel_parms = my_poly_ker_parm
torch.manual_seed(0)
Ty2_SCV_vvpolynomials_MD1P.do_optimize_vv_CV(regularizer_const=1e-5, regularizer_const_FB=1, batch_size=5, lr=my_lr, epochs=no_epochs_ty2, verbose=True)
# Define a helper function to caculate the density of some samples from a standard normal distributions
def helper_standard_Gaussian_PDF(x):
assert x.size(1)==1, "Dim should be 1"
n = x.size(0)
d = x.size(1)
prob_densities_at_x = torch.zeros(n)
for i in range(n):
cur_x = x[i].squeeze()
prob_densities_at_x[i] = ((2.*math.pi)**(-0.5)) * torch.exp(-0.5* (cur_x.pow(2)) )
return prob_densities_at_x
## Plot a fitted line for squared exponetial kernel.
sns.set_style("white")
all_x = torch.cat((X1, X2), dim=0)
all_x_dens = helper_standard_Gaussian_PDF(all_x)
all_x = all_x.squeeze()
all_x.size()
X1_sorted_values, X1_sorted_indices = X1.squeeze().sort()
X2_sorted_values, X2_sorted_indices = X2.squeeze().sort()
test_x = torch.unique(torch.sort(torch.cat((X1_sorted_values, X2_sorted_values, torch.linspace(-3, 3, 100)))).values)
test_x = test_x.unsqueeze(1)
test_x.size()
test_x_sorted_values, test_x_sorted_indices = test_x.squeeze().sort()
score_X1 = multivariate_Normal_score(mu, cov, X1)
score_X2 = multivariate_Normal_score(mu, cov, X2)
score_all_x = multivariate_Normal_score(mu, cov, all_x.unsqueeze(1))
score_test_x = multivariate_Normal_score(mu, cov, test_x )
vv_SEk_theta_hat = Ty2_SCV_vectorvaluedfunc.fitting_obj.theta.detach().clone()
vv_SEk_B = Ty2_SCV_vectorvaluedfunc.fitting_obj.B.detach().clone()
vv_SEk_est = Ty2_SCV_vectorvaluedfunc.fitting_obj.c.detach().clone().squeeze()
with torch.no_grad():
vv_SEk_k_XX = Ty2_SCV_vectorvaluedfunc.fitting_obj.kernel_obj.cal_stein_base_kernel(test_x, all_x.unsqueeze(1), score_test_x, score_all_x)
vv_SEk_y_fitted = vv_SEk_k_XX @ vv_SEk_theta_hat @ vv_SEk_B + vv_SEk_est
vv_SEk_y_fitted.size()
vv_SEk_data_sorted_values, vv_SEk_data_sorted_indices = all_x.sort()
vv_1polnk_theta_hat = Ty2_SCV_vvpolynomials_MD1P.fitting_obj.theta.detach().clone()
vv_1polnk_B = Ty2_SCV_vvpolynomials_MD1P.fitting_obj.B.detach().clone()
vv_1polnk_est = Ty2_SCV_vvpolynomials_MD1P.fitting_obj.c.detach().clone().squeeze()
with torch.no_grad():
vv_1polnk_k_XX = Ty2_SCV_vvpolynomials_MD1P.fitting_obj.kernel_obj.cal_stein_base_kernel(test_x, all_x.unsqueeze(1), score_test_x, score_all_x)
vv_1polnk_y_fitted = vv_1polnk_k_XX @ vv_1polnk_theta_hat @ vv_1polnk_B + vv_1polnk_est
vv_1polnk_y_fitted.size()
vv_1polnk_data_sorted_values, vv_1polnk_data_sorted_indices = all_x.sort()
sv_SEk_LF_theta_hat = Ty2_SCV_scalarvaluedfunc1.fitting_obj.theta.detach().clone()
sv_SEk_LF_est = Ty2_SCV_scalarvaluedfunc1.fitting_obj.c.clone().detach()
with torch.no_grad():
sv_SEk_LF_k_XX = Ty2_SCV_scalarvaluedfunc1.fitting_obj.kernel_obj.cal_stein_base_kernel(test_x, X1, score_test_x, score_X1)
sv_SEk_LF_y_fitted = sv_SEk_LF_k_XX @ sv_SEk_LF_theta_hat + sv_SEk_LF_est
sv_SEk_LF_y_fitted = sv_SEk_LF_y_fitted.squeeze()
sv_SEk_LF_data_sorted_values, sv_SEk_LF_data_sorted_indices = X1.squeeze().sort()
sv_SEk_HF_theta_hat = Ty2_SCV_scalarvaluedfunc2.fitting_obj.theta.detach().clone()
sv_SEk_HF_est = Ty2_SCV_scalarvaluedfunc2.fitting_obj.c.clone().detach()
with torch.no_grad():
sv_SEk_HF_k_XX = Ty2_SCV_scalarvaluedfunc2.fitting_obj.kernel_obj.cal_stein_base_kernel(test_x, X2, score_test_x, score_X2)
sv_SEk_HF_y_fitted = sv_SEk_HF_k_XX @ sv_SEk_HF_theta_hat + sv_SEk_HF_est
sv_SEk_HF_y_fitted = sv_SEk_HF_y_fitted.squeeze()
sv_SEk_HF_data_sorted_values, sv_SEk_HF_data_sorted_indices = X2.squeeze().sort()
x_step = np.linspace(-3,3, 3)
y_LF = [-1, -1, 2]
y_HF = [0, 0 , 1]
x_illu = np.linspace(-3, 3, 500)
# Extract Saved Outputs
with open('../data/Step_funcion_all_data.pkl', 'rb') as input:
no_replica_ty2 = pickle.load(input)
no_epochs_ty2 = pickle.load(input)
no_points_per_func_ty2 = pickle.load(input)
#
large_saved_MC_ests_ty2 = pickle.load(input)
large_save_est_scalar_f1_ty2 = pickle.load(input)
large_save_closed_form_sols_scalar_f1_ty2 = pickle.load(input)
large_save_est_scalar_f2_ty2 = pickle.load(input)
large_save_closed_form_sols_scalar_f2_ty2 = pickle.load(input)
large_save_est_vecfunc_ty2 = pickle.load(input)
large_save_est_vecfunc_fixB_ty2 = pickle.load(input)
large_save_est_vecfunc_fixB_another_ty2 = pickle.load(input)
# sv-polynomials
large_save_est_scalar_f1_svpolynomials_ty2 = pickle.load(input)
large_save_closed_form_sols_scalar_f1_svpolynomials_ty2 = pickle.load(input)
large_save_est_scalar_f2_svpolynomials_ty2 = pickle.load(input)
large_save_closed_form_sols_scalar_f2_svpolynomials_ty2 = pickle.load(input)
large_save_est_vecfunc_vvpolynomials_ty2_MD1P = pickle.load(input)
large_save_est_vecfunc_vvpolynomials_fixB_ty2 = pickle.load(input)
large_save_est_vecfunc_vvpolynomials_fixB_another_ty2 = pickle.load(input)
with torch.no_grad():
true_vals = [0.5, 0.5]
fig, axs = plt.subplots(1, 4, sharex=False, sharey=False)
fig.set_figwidth(20)
sns.set_style("ticks") # sns.set_style("whitegrid")
clrs = sns.color_palette("husl", 16)
start_pos = 0
axs[2].set_xlabel('Number of Epochs', fontsize=20)
axs[3].set_xlabel('Number of Epochs', fontsize=20)
axs[2].tick_params(labelsize=20)
axs[3].tick_params(labelsize=20)
show_indx = np.arange(0, 410, 20)
show_indx = show_indx - 1
show_indx[0] = 0
show_indx
axs[2].set_title("Squared-exponential kernel CVs", fontsize=18)
axs[3].set_title("First-order polynomial kernel CVs", fontsize=18)
axs[2].set_ylabel(r'Absolute error for $\Pi_H [f_H]$', fontsize=18)
# fig.set_figwidth(12)
mc_f1_mean_ty2 = (large_saved_MC_ests_ty2[:, 0] - true_vals[0]).abs().mean().repeat(1, no_epochs_ty2)
mc_f2_mean_ty2 = (large_saved_MC_ests_ty2[:, 1] - true_vals[1]).abs().mean().repeat(1, no_epochs_ty2)
mc_f1_std_ty2 = (large_saved_MC_ests_ty2[:, 0] - true_vals[0]).abs().std(dim=0) / (torch.ones(1) * no_replica_ty2).sqrt().repeat(1, no_epochs_ty2)
mc_f2_std_ty2 = (large_saved_MC_ests_ty2[:, 1] - true_vals[1]).abs().std(dim=0) / (torch.ones(1) * no_replica_ty2).sqrt().repeat(1, no_epochs_ty2)
axs[2].axhline(mc_f2_mean_ty2[0, 0], color='black', label='MC')
axs[3].axhline(mc_f2_mean_ty2[0, 0], color='black', label='MC')
axs[2].axhline((large_save_closed_form_sols_scalar_f2_ty2 - true_vals[1]).abs().mean().detach().numpy(), color='black', linestyle='-.', label='CF')
axs[3].axhline((large_save_closed_form_sols_scalar_f2_svpolynomials_ty2 - true_vals[1]).abs().mean().detach().numpy(),color='black', linestyle='-.', label='CF')
# -------
sv_f1_mean_ty2 = (large_save_est_scalar_f1_ty2 - true_vals[0]).abs().mean(dim=0).detach().numpy()
sv_f2_mean_ty2 = (large_save_est_scalar_f2_ty2 - true_vals[1]).abs().mean(dim=0).detach().numpy()
sv_f1_std_ty2 = (large_save_est_scalar_f1_ty2 - true_vals[0]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
sv_f2_std_ty2 = (large_save_est_scalar_f2_ty2 - true_vals[1]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
axs[2].plot(show_indx + 1, sv_f2_mean_ty2[show_indx], c=clrs[1], marker='+', label='CV')
axs[2].fill_between(show_indx + 1, sv_f2_mean_ty2[show_indx] - sv_f2_std_ty2[show_indx], sv_f2_mean_ty2[show_indx] + sv_f2_std_ty2[show_indx], alpha=0.3, facecolor=clrs[1])
# -------
vv_f1_mean_ty2_fixB = (large_save_est_vecfunc_fixB_ty2[:, :, 0] - true_vals[0]).abs().mean(dim=0).detach().numpy()
vv_f2_mean_ty2_fixB = (large_save_est_vecfunc_fixB_ty2[:, :, 1] - true_vals[1]).abs().mean(dim=0).detach().numpy()
vv_f1_std_ty2_fixB = (large_save_est_vecfunc_fixB_ty2[:, :, 0] - true_vals[0]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
vv_f2_std_ty2_fixB = (large_save_est_vecfunc_fixB_ty2[:, :, 1] - true_vals[1]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
axs[2].plot(show_indx + 1, (large_save_est_vecfunc_fixB_ty2[:, :, 1] - true_vals[1]).abs().mean(dim=0).detach().numpy()[show_indx],\
c=clrs[7], marker='x', label='vv-CV with Fixed B (1)')
axs[2].fill_between(show_indx + 1, vv_f2_mean_ty2_fixB[show_indx] - vv_f2_std_ty2_fixB[show_indx], vv_f2_mean_ty2_fixB[show_indx] + vv_f2_std_ty2_fixB[show_indx], alpha=0.3, facecolor=clrs[7])
# -------
vv_f1_mean_ty2_fixB_another = (large_save_est_vecfunc_fixB_another_ty2[:, :, 0] - true_vals[0]).abs().mean(dim=0).detach().numpy()
vv_f2_mean_ty2_fixB_another = (large_save_est_vecfunc_fixB_another_ty2[:, :, 1] - true_vals[1]).abs().mean(dim=0).detach().numpy()
vv_f1_std_ty2_fixB_another = (large_save_est_vecfunc_fixB_another_ty2[:, :, 0] - true_vals[0]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
vv_f2_std_ty2_fixB_another = (large_save_est_vecfunc_fixB_another_ty2[:, :, 1] - true_vals[1]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
axs[2].plot(show_indx + 1,(large_save_est_vecfunc_fixB_another_ty2[:, :, 1] - true_vals[1]).abs().mean(dim=0).detach().numpy()[ show_indx], c=clrs[3], marker='x', label='vv-CV with Fixed B (2)')
axs[2].fill_between(show_indx + 1, vv_f2_mean_ty2_fixB_another[show_indx] - vv_f2_std_ty2_fixB_another[show_indx],vv_f2_mean_ty2_fixB_another[show_indx] + vv_f2_std_ty2_fixB_another[show_indx], alpha=0.3, facecolor=clrs[5])
# -------
vv_f1_mean_ty2 = (large_save_est_vecfunc_ty2[:, :, 0] - true_vals[0]).abs().mean(dim=0).detach().numpy()
vv_f2_mean_ty2 = (large_save_est_vecfunc_ty2[:, :, 1] - true_vals[1]).abs().mean(dim=0).detach().numpy()
vv_f1_std_ty2 = (large_save_est_vecfunc_ty2[:, :, 0] - true_vals[0]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
vv_f2_std_ty2 = (large_save_est_vecfunc_ty2[:, :, 1] - true_vals[1]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
axs[2].plot(show_indx + 1,(large_save_est_vecfunc_ty2[:, :, 1] - true_vals[1]).abs().mean(dim=0).detach().numpy()[show_indx], c=clrs[10], marker='.', label='vv-CV with Estimated B')
axs[2].fill_between(show_indx + 1, vv_f2_mean_ty2[show_indx] - vv_f2_std_ty2[show_indx], vv_f2_mean_ty2[show_indx] + vv_f2_std_ty2[show_indx], alpha=0.3, facecolor=clrs[10])
# -------
svpoly_f1_mean_ty2 = (large_save_est_scalar_f1_svpolynomials_ty2 - true_vals[0]).abs().mean(dim=0).detach().numpy()
svpoly_f2_mean_ty2 = (large_save_est_scalar_f2_svpolynomials_ty2 - true_vals[1]).abs().mean(dim=0).detach().numpy()
svpoly_f1_std_ty2 = (large_save_est_scalar_f1_svpolynomials_ty2 - true_vals[0]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
svpoly_f2_std_ty2 = (large_save_est_scalar_f2_svpolynomials_ty2 - true_vals[1]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
axs[3].plot(show_indx + 1, svpoly_f2_mean_ty2[show_indx], c=clrs[1], marker='+', label='CV')
axs[3].fill_between(show_indx + 1, svpoly_f2_mean_ty2[show_indx] - svpoly_f2_std_ty2[show_indx], svpoly_f2_mean_ty2[show_indx] + svpoly_f2_std_ty2[show_indx], alpha=0.3, facecolor=clrs[1])
# -------
vvpoly_f1_mean_ty2_fixB = (large_save_est_vecfunc_vvpolynomials_fixB_ty2[:, :, 0] - true_vals[0]).abs().mean(dim=0).detach().numpy()
vvpoly_f2_mean_ty2_fixB = (large_save_est_vecfunc_vvpolynomials_fixB_ty2[:, :, 1] - true_vals[1]).abs().mean(dim=0).detach().numpy()
vvpoly_f1_std_ty2_fixB = (large_save_est_vecfunc_vvpolynomials_fixB_ty2[:, :, 0] - true_vals[0]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
vvpoly_f2_std_ty2_fixB = (large_save_est_vecfunc_vvpolynomials_fixB_ty2[:, :, 1] - true_vals[1]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
axs[3].plot(show_indx + 1, (large_save_est_vecfunc_vvpolynomials_fixB_ty2[:, :, 1] - true_vals[1]).abs().mean(dim=0).detach().numpy()[show_indx], c=clrs[7], marker='x', label='vv-CV with Fixed B (1)')
axs[3].fill_between(show_indx + 1, vvpoly_f2_mean_ty2_fixB[show_indx] - vvpoly_f2_std_ty2_fixB[show_indx], vvpoly_f2_mean_ty2_fixB[show_indx] + vvpoly_f2_std_ty2_fixB[show_indx], alpha=0.3, facecolor=clrs[7])
# -------
vvpoly_f1_mean_ty2_fixB_another = (large_save_est_vecfunc_vvpolynomials_fixB_another_ty2[:, :, 0] - true_vals[0]).abs().mean(dim=0).detach().numpy()
vvpoly_f2_mean_ty2_fixB_another = (large_save_est_vecfunc_vvpolynomials_fixB_another_ty2[:, :, 1] - true_vals[1]).abs().mean(dim=0).detach().numpy()
vvpoly_f1_std_ty2_fixB_another = (large_save_est_vecfunc_vvpolynomials_fixB_another_ty2[:, :, 0] - true_vals[0]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
vvpoly_f2_std_ty2_fixB_another = (large_save_est_vecfunc_vvpolynomials_fixB_another_ty2[:, :, 1] - true_vals[1]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
axs[3].plot(show_indx + 1, (large_save_est_vecfunc_vvpolynomials_fixB_another_ty2[:, :, 1] - true_vals[1]).abs().mean(dim=0).detach().numpy()[show_indx], c=clrs[3], marker='x', label='vv-CV with Fixed B (2)')
axs[3].fill_between(show_indx + 1,vvpoly_f2_mean_ty2_fixB_another[show_indx] - vvpoly_f2_std_ty2_fixB_another[show_indx], vvpoly_f2_mean_ty2_fixB_another[show_indx] + vvpoly_f2_std_ty2_fixB_another[show_indx],alpha=0.3, facecolor=clrs[5])
# -------
vvpoly_f1_mean_ty2 = (large_save_est_vecfunc_vvpolynomials_ty2_MD1P[:, :, 0] - true_vals[0]).abs().mean(dim=0).detach().numpy()
vvpoly_f2_mean_ty2 = (large_save_est_vecfunc_vvpolynomials_ty2_MD1P[:, :, 1] - true_vals[1]).abs().mean(dim=0).detach().numpy()
vvpoly_f1_std_ty2 = (large_save_est_vecfunc_vvpolynomials_ty2_MD1P[:, :, 0] - true_vals[0]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
vvpoly_f2_std_ty2 = (large_save_est_vecfunc_vvpolynomials_ty2_MD1P[:, :, 1] - true_vals[1]).abs().std(dim=0).detach().numpy() / np.sqrt(no_replica_ty2)
axs[3].plot(show_indx + 1, (large_save_est_vecfunc_vvpolynomials_ty2_MD1P[:, :, 1] - true_vals[1]).abs().mean(dim=0).detach().numpy()[show_indx], c=clrs[10], marker='.', label='vv-CV with Estimated B')
axs[3].fill_between(show_indx + 1, vvpoly_f2_mean_ty2[show_indx] - vvpoly_f2_std_ty2[show_indx], vvpoly_f2_mean_ty2[show_indx] + vvpoly_f2_std_ty2[show_indx], alpha=0.3, facecolor=clrs[10])
# If want to include the legend inside the figure
axs[2].legend(loc="upper right", fontsize=13)
# sns.set_style("ticks") # sns.set_style("whitegrid")
axs[0].set_title("Low-fidelity model", fontsize=18)
axs[1].set_title("High-fidelity model", fontsize=18)
axs[0].set_ylim([-3, 3])
axs[1].set_ylim([-3, 3])
axs[2].set_ylim([0.03, 0.07])
axs[3].set_ylim([0.03, 0.07])
axs[0].plot(test_x_sorted_values, vv_SEk_y_fitted[:, 0][test_x_sorted_indices], color='blue', ls='dotted',label='vv-CV')
axs[0].plot(test_x_sorted_values, vv_1polnk_y_fitted[:, 0][test_x_sorted_indices], color='orange', ls='dotted',label='vv-CV (1st order polyn. k)')
axs[0].plot(test_x_sorted_values, sv_SEk_LF_y_fitted[test_x_sorted_indices], color='red', ls='dotted',label='CV (squared-exponetial k)')
axs[0].step(x_step, y_LF, color='black', label=r'$f(x)$')
axs[1].set_xlabel("x", fontsize=20)
axs[1].set_ylabel("y", fontsize=20)
axs[1].tick_params(labelsize=20)
axs[1].plot(test_x_sorted_values, vv_SEk_y_fitted[:, 1][test_x_sorted_indices], color='blue', ls='dotted',label='vv-CV (squared-exponetial k)')
axs[1].plot(test_x_sorted_values, vv_1polnk_y_fitted[:, 1][test_x_sorted_indices], color='orange', ls='dotted', label='vv-CV (1st order polyn. k)')
axs[1].plot(test_x_sorted_values, sv_SEk_HF_y_fitted[test_x_sorted_indices], color='red', ls='dotted', label='CV (squared-exponetial k)')
axs[1].step(x_step, y_HF, color='black', label=r'$f(x)$')
axs[0].set_xlabel("x", fontsize=20)
axs[0].set_ylabel("y", fontsize=20)
axs[0].tick_params(labelsize=20)
axs[1].legend(fontsize=13)
# plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0., fontsize=15)
plt.show()
fig.savefig('step_function_plot.pdf')
| 53.149474
| 267
| 0.734136
| 4,267
| 25,246
| 3.936021
| 0.074525
| 0.029056
| 0.034296
| 0.033939
| 0.854123
| 0.801429
| 0.758142
| 0.739089
| 0.694254
| 0.64543
| 0
| 0.046763
| 0.116533
| 25,246
| 474
| 268
| 53.261603
| 0.706241
| 0.043017
| 0
| 0.123288
| 0
| 0
| 0.053866
| 0.002282
| 0
| 0
| 0
| 0
| 0.003425
| 1
| 0.010274
| false
| 0
| 0.034247
| 0.006849
| 0.054795
| 0.044521
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
05086b431ffdda92f20f3283ac7fc02be03b571e
| 1,211
|
py
|
Python
|
Mi_primer_Socket.py
|
dart-security/Socket-Basico
|
84e4ffb594b1be670c21a043d6ff1de793984def
|
[
"Unlicense"
] | 2
|
2019-09-11T00:25:13.000Z
|
2019-09-20T05:56:27.000Z
|
Mi_primer_Socket.py
|
dart-security/Socket-Basico
|
84e4ffb594b1be670c21a043d6ff1de793984def
|
[
"Unlicense"
] | null | null | null |
Mi_primer_Socket.py
|
dart-security/Socket-Basico
|
84e4ffb594b1be670c21a043d6ff1de793984def
|
[
"Unlicense"
] | null | null | null |
import socket
print(" _____ _ _____ _ _ ")
print(" | __ \ | | / ____| (_) | ")
print(" | | | | __ _ _ __| |_ _____| (___ ___ ___ _ _ _ __ _| |_ _ _ ")
print(" | | | |/ _` | '__| __|______\___ \ / _ \/ __| | | | '__| | __| | | |")
print(" | |__| | (_| | | | |_ ____) | __/ (__| |_| | | | | |_| |_| |")
print(" |_____/ \__,_|_| \__| |_____/ \___|\___|\__,_|_| |_|\__|\__, |")
print(" __/ |")
print(" www.hc-security.com.mx by:Equinockx |___/ ")
print(" ")
print("Ingresa la Url:")
url = input()
try:
print("---" * 20)
print(f"La URL ingresada es: {url}")
print("Nombre del Dominio completo: \n" + socket.getfqdn(url))
print("Nombre de Host a direccion IP: \n" + socket.gethostbyname(url))
print("Nombre de host para extender la dirección IP: \n" + str(socket.gethostbyname_ex(url)))
print("Host de solicitud: \n" + socket.gethostname())
print("---" * 20)
except Exception as err:
print("Error" + str(err))
| 46.576923
| 96
| 0.427746
| 86
| 1,211
| 4.593023
| 0.5
| 0.202532
| 0.227848
| 0.253165
| 0.202532
| 0.101266
| 0.101266
| 0
| 0
| 0
| 0
| 0.005398
| 0.388109
| 1,211
| 25
| 97
| 48.44
| 0.527665
| 0
| 0
| 0.090909
| 0
| 0.181818
| 0.672998
| 0.018167
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.045455
| 0
| 0.045455
| 0.818182
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
0551072bbc90ec26f68a9be8032baadfe642f878
| 1,527
|
py
|
Python
|
polynomial_operations_8/unit-test.py
|
WarriorsSami/Algebra-Homework
|
e5b7ce23703165dfd4ffa2e7236898abca175510
|
[
"Apache-2.0"
] | 1
|
2021-12-14T09:13:24.000Z
|
2021-12-14T09:13:24.000Z
|
polynomial_operations_8/unit-test.py
|
WarriorsSami/Algebra-Homework
|
e5b7ce23703165dfd4ffa2e7236898abca175510
|
[
"Apache-2.0"
] | null | null | null |
polynomial_operations_8/unit-test.py
|
WarriorsSami/Algebra-Homework
|
e5b7ce23703165dfd4ffa2e7236898abca175510
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from random import randint
import numpy as np
from test_utils import test_util
class MyTestCase(unittest.TestCase):
def test_polynomials_1(self):
n = randint(10, 100) + 1
f = np.array(np.random.randint(low=-100, high=100, size=n))
m = randint(10, 100) + 1
g = np.array(np.random.randint(low=-100, high=100, size=m))
(add_val, add_res), (mul_val, mul_res) = test_util(n, f, m, g)
self.assertEqual(np.array_equal(add_val, add_res), True)
self.assertEqual(np.array_equal(mul_val, mul_res), True)
def test_polynomials_2(self):
n = randint(100, 1000) + 1
f = np.array(np.random.randint(low=-1000, high=1000, size=n))
m = randint(100, 1000) + 1
g = np.array(np.random.randint(low=-1000, high=1000, size=m))
(add_val, add_res), (mul_val, mul_res) = test_util(n, f, m, g)
self.assertEqual(np.array_equal(add_val, add_res), True)
self.assertEqual(np.array_equal(mul_val, mul_res), True)
def test_polynomials_3(self):
n = randint(1000, 10000) + 1
f = np.array(np.random.randint(low=-10000, high=10000, size=n))
m = randint(1000, 10000) + 1
g = np.array(np.random.randint(low=-10000, high=10000, size=m))
(add_val, add_res), (mul_val, mul_res) = test_util(n, f, m, g)
self.assertEqual(np.array_equal(add_val, add_res), True)
self.assertEqual(np.array_equal(mul_val, mul_res), True)
if __name__ == '__main__':
unittest.main()
| 33.933333
| 71
| 0.635232
| 244
| 1,527
| 3.77459
| 0.172131
| 0.091205
| 0.058632
| 0.09772
| 0.71987
| 0.71987
| 0.71987
| 0.71987
| 0.70684
| 0.70684
| 0
| 0.083054
| 0.219384
| 1,527
| 44
| 72
| 34.704545
| 0.689597
| 0
| 0
| 0.290323
| 0
| 0
| 0.005239
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 1
| 0.096774
| false
| 0
| 0.129032
| 0
| 0.258065
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
055276b0904518f4bcbdbab1c776674368787170
| 41
|
py
|
Python
|
poco/__init__.py
|
HBoPRC/Poco
|
c8b0dc5362db45ff7a8397eebb0c52d9047f4b67
|
[
"Apache-2.0"
] | 1,444
|
2018-01-24T03:27:52.000Z
|
2022-03-31T07:40:57.000Z
|
poco/__init__.py
|
HBoPRC/Poco
|
c8b0dc5362db45ff7a8397eebb0c52d9047f4b67
|
[
"Apache-2.0"
] | 524
|
2018-03-14T01:08:06.000Z
|
2022-03-31T08:21:52.000Z
|
poco/__init__.py
|
HBoPRC/Poco
|
c8b0dc5362db45ff7a8397eebb0c52d9047f4b67
|
[
"Apache-2.0"
] | 268
|
2018-01-25T03:58:33.000Z
|
2022-03-24T08:18:59.000Z
|
# coding=utf-8
from .pocofw import Poco
| 10.25
| 24
| 0.731707
| 7
| 41
| 4.285714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.170732
| 41
| 3
| 25
| 13.666667
| 0.852941
| 0.292683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
055715258a918ba790061b7a8b93a9777ca067bc
| 38
|
py
|
Python
|
anstosstools/__init__.py
|
ericziethen/anstosstools
|
912324b78fd720ab7ba7f4e5b5320e97e5dc7696
|
[
"MIT"
] | null | null | null |
anstosstools/__init__.py
|
ericziethen/anstosstools
|
912324b78fd720ab7ba7f4e5b5320e97e5dc7696
|
[
"MIT"
] | 9
|
2020-12-27T00:34:12.000Z
|
2020-12-28T03:15:47.000Z
|
anstosstools/__init__.py
|
ericziethen/anstosstools
|
912324b78fd720ab7ba7f4e5b5320e97e5dc7696
|
[
"MIT"
] | null | null | null |
"""Root package for anstoss tools."""
| 19
| 37
| 0.684211
| 5
| 38
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 1
| 38
| 38
| 0.787879
| 0.815789
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
056246187bed37e3c59670f59d59b69dbb846f9d
| 128
|
py
|
Python
|
src/wai/common/statistics/__init__.py
|
waikato-datamining/wai-common
|
bf3d7ae6e01bcb7ffe9f5c2b5d10a05908a68c34
|
[
"MIT"
] | null | null | null |
src/wai/common/statistics/__init__.py
|
waikato-datamining/wai-common
|
bf3d7ae6e01bcb7ffe9f5c2b5d10a05908a68c34
|
[
"MIT"
] | 8
|
2020-07-01T02:11:31.000Z
|
2020-12-17T01:57:17.000Z
|
src/wai/common/statistics/__init__.py
|
waikato-datamining/wai-common
|
bf3d7ae6e01bcb7ffe9f5c2b5d10a05908a68c34
|
[
"MIT"
] | null | null | null |
from ._functions import lower_quartile, lower_quartile_sorted, upper_quartile, upper_quartile_sorted, \
interquartile_range
| 42.666667
| 103
| 0.851563
| 15
| 128
| 6.733333
| 0.6
| 0.257426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101563
| 128
| 2
| 104
| 64
| 0.878261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
056ead00161615544b084a315327c2056bd512cd
| 102
|
py
|
Python
|
evalml/pipelines/components/transformers/samplers/__init__.py
|
Mahesh1822/evalml
|
aa0ec2379aeba12bbd0dcaaa000f9a2a62064169
|
[
"BSD-3-Clause"
] | 454
|
2020-09-25T15:36:06.000Z
|
2022-03-30T04:48:49.000Z
|
evalml/pipelines/components/transformers/samplers/__init__.py
|
Mahesh1822/evalml
|
aa0ec2379aeba12bbd0dcaaa000f9a2a62064169
|
[
"BSD-3-Clause"
] | 2,175
|
2020-09-25T17:05:45.000Z
|
2022-03-31T19:54:54.000Z
|
evalml/pipelines/components/transformers/samplers/__init__.py
|
Mahesh1822/evalml
|
aa0ec2379aeba12bbd0dcaaa000f9a2a62064169
|
[
"BSD-3-Clause"
] | 66
|
2020-09-25T18:46:27.000Z
|
2022-03-02T18:33:30.000Z
|
"""Sampler components."""
from .undersampler import Undersampler
from .oversampler import Oversampler
| 25.5
| 38
| 0.813725
| 10
| 102
| 8.3
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098039
| 102
| 3
| 39
| 34
| 0.902174
| 0.186275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
057d4f23e7ae9b355d5938029736d8bda4fcdb17
| 100
|
py
|
Python
|
messages/workspace_response.py
|
Legion-Engine/Hivemind
|
6511aba3a421ef06208aaf956ab06c81214f3c13
|
[
"MIT"
] | null | null | null |
messages/workspace_response.py
|
Legion-Engine/Hivemind
|
6511aba3a421ef06208aaf956ab06c81214f3c13
|
[
"MIT"
] | null | null | null |
messages/workspace_response.py
|
Legion-Engine/Hivemind
|
6511aba3a421ef06208aaf956ab06c81214f3c13
|
[
"MIT"
] | null | null | null |
class WorkspaceResponse:
def __init__(self, workspace: str):
self.workspace = workspace
| 25
| 39
| 0.71
| 10
| 100
| 6.7
| 0.7
| 0.38806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21
| 100
| 3
| 40
| 33.333333
| 0.848101
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
0580bcbcf3ce9693910b6f9d3aec3e6155d0a5f4
| 46
|
py
|
Python
|
system/display/__init__.py
|
gmattis/raspzen
|
429e99fdba713f996524042a8f732b1624566c7f
|
[
"MIT"
] | null | null | null |
system/display/__init__.py
|
gmattis/raspzen
|
429e99fdba713f996524042a8f732b1624566c7f
|
[
"MIT"
] | null | null | null |
system/display/__init__.py
|
gmattis/raspzen
|
429e99fdba713f996524042a8f732b1624566c7f
|
[
"MIT"
] | null | null | null |
from .screen import Screen
screen = Screen()
| 11.5
| 26
| 0.73913
| 6
| 46
| 5.666667
| 0.5
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 3
| 27
| 15.333333
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
559c77fb5aa6ecf539ccf2bdf105e4bf024c661b
| 253
|
py
|
Python
|
tests/endpoints/test_address.py
|
s0b0lev/raiden-client-python
|
4eecdda10650f081e4449449949067af6356d542
|
[
"MIT"
] | 3
|
2019-08-01T12:47:16.000Z
|
2020-07-05T15:28:53.000Z
|
tests/endpoints/test_address.py
|
s0b0lev/raiden-client-python
|
4eecdda10650f081e4449449949067af6356d542
|
[
"MIT"
] | 17
|
2019-08-01T07:51:58.000Z
|
2020-05-29T09:48:37.000Z
|
tests/endpoints/test_address.py
|
s0b0lev/raiden-client-python
|
4eecdda10650f081e4449449949067af6356d542
|
[
"MIT"
] | null | null | null |
from raiden_client.endpoints.address import Address
def test_address() -> None:
address = Address()
assert address.endpoint == "/address"
assert address.method == "get"
assert address.name == "address"
assert not address.payload()
| 25.3
| 51
| 0.695652
| 29
| 253
| 6
| 0.551724
| 0.224138
| 0.229885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189723
| 253
| 9
| 52
| 28.111111
| 0.84878
| 0
| 0
| 0
| 0
| 0
| 0.071146
| 0
| 0
| 0
| 0
| 0
| 0.571429
| 1
| 0.142857
| false
| 0
| 0.142857
| 0
| 0.285714
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
55ac12c511504f04650871b16c5af76ba721898b
| 238
|
py
|
Python
|
services/python-flask/swagger_server/models/__init__.py
|
gangtao/api-gateway
|
8f37de546b5af986e1e5fb62e79ed449792f29a0
|
[
"Apache-2.0"
] | 14
|
2021-04-27T04:15:38.000Z
|
2022-03-09T07:28:29.000Z
|
services/python-flask/swagger_server/models/__init__.py
|
4021019/api-gateway
|
8f37de546b5af986e1e5fb62e79ed449792f29a0
|
[
"Apache-2.0"
] | null | null | null |
services/python-flask/swagger_server/models/__init__.py
|
4021019/api-gateway
|
8f37de546b5af986e1e5fb62e79ed449792f29a0
|
[
"Apache-2.0"
] | 5
|
2021-06-21T03:59:07.000Z
|
2022-02-21T12:59:10.000Z
|
# coding: utf-8
# flake8: noqa
from __future__ import absolute_import
# import models into model package
from swagger_server.models.error import Error
from swagger_server.models.pet import Pet
from swagger_server.models.pets import Pets
| 26.444444
| 45
| 0.827731
| 36
| 238
| 5.25
| 0.5
| 0.174603
| 0.269841
| 0.365079
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009569
| 0.121849
| 238
| 8
| 46
| 29.75
| 0.894737
| 0.247899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
55b6193b765e3a24788e248ec4fc97c8b9e91551
| 365
|
py
|
Python
|
cv/views.py
|
Yacine22/resume_website
|
0bd43dfbf3820411a9f2178f69fe9b09dfc8d472
|
[
"Apache-2.0"
] | 1
|
2022-01-27T09:06:06.000Z
|
2022-01-27T09:06:06.000Z
|
cv/views.py
|
Yacine22/resume_website
|
0bd43dfbf3820411a9f2178f69fe9b09dfc8d472
|
[
"Apache-2.0"
] | null | null | null |
cv/views.py
|
Yacine22/resume_website
|
0bd43dfbf3820411a9f2178f69fe9b09dfc8d472
|
[
"Apache-2.0"
] | null | null | null |
from unicodedata import name
from django.shortcuts import render
def home_view(request):
return render(request, 'home.html')
def contact_view(request):
return render(request, 'contact.html')
def skills_view(request):
return render(request, 'skills.html')
def experience_view(request):
return render(request, 'experience.html')
| 24.333333
| 46
| 0.717808
| 45
| 365
| 5.733333
| 0.355556
| 0.170543
| 0.263566
| 0.356589
| 0.465116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.180822
| 365
| 14
| 47
| 26.071429
| 0.862876
| 0
| 0
| 0
| 0
| 0
| 0.133903
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
e982685211004f8b46c59f3a36d52336d1b63876
| 48
|
py
|
Python
|
alttprbot/exceptions.py
|
floresmatthew/sahasrahbot
|
a3fcc2aba9cd204331ce612ecf269d8a48a1ebc4
|
[
"MIT"
] | 15
|
2019-10-15T21:35:59.000Z
|
2022-03-31T19:49:39.000Z
|
alttprbot/exceptions.py
|
floresmatthew/sahasrahbot
|
a3fcc2aba9cd204331ce612ecf269d8a48a1ebc4
|
[
"MIT"
] | 12
|
2019-10-06T01:33:13.000Z
|
2022-03-10T14:35:16.000Z
|
alttprbot/exceptions.py
|
floresmatthew/sahasrahbot
|
a3fcc2aba9cd204331ce612ecf269d8a48a1ebc4
|
[
"MIT"
] | 28
|
2019-11-25T23:49:56.000Z
|
2022-03-10T04:03:31.000Z
|
class SahasrahBotException(Exception):
pass
| 16
| 38
| 0.791667
| 4
| 48
| 9.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 48
| 2
| 39
| 24
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
e99befc0e6a2286dec3bd837a95c58f6ec25dd4f
| 142
|
py
|
Python
|
network/__init__.py
|
Goochaozheng/ChunkFusion
|
7458a8e08886cc76cfeb87881c51e23b1d0674c3
|
[
"MIT"
] | 3
|
2022-03-15T08:34:15.000Z
|
2022-03-15T08:40:06.000Z
|
network/__init__.py
|
Goochaozheng/ChunkFusion
|
7458a8e08886cc76cfeb87881c51e23b1d0674c3
|
[
"MIT"
] | null | null | null |
network/__init__.py
|
Goochaozheng/ChunkFusion
|
7458a8e08886cc76cfeb87881c51e23b1d0674c3
|
[
"MIT"
] | null | null | null |
from .fusionNet import FusionNet
from .criteria import FusionLoss, GradLoss, IoU, SignLoss
from .fuser import Fuser
from .parser import Parser
| 35.5
| 57
| 0.823944
| 19
| 142
| 6.157895
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126761
| 142
| 4
| 58
| 35.5
| 0.943548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e9aa56a157088e6d7cc24cfc8dc7ff1a1353a719
| 86
|
py
|
Python
|
icovid/views.py
|
stevennovaryo/icovid
|
53b8e5980884a6c716911181445ca4f21de3b3a3
|
[
"MIT"
] | null | null | null |
icovid/views.py
|
stevennovaryo/icovid
|
53b8e5980884a6c716911181445ca4f21de3b3a3
|
[
"MIT"
] | null | null | null |
icovid/views.py
|
stevennovaryo/icovid
|
53b8e5980884a6c716911181445ca4f21de3b3a3
|
[
"MIT"
] | null | null | null |
from django.shortcuts import redirect
def index(request):
return redirect('/home/')
| 21.5
| 37
| 0.767442
| 11
| 86
| 6
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 86
| 4
| 38
| 21.5
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
e9c41f767ffe00c4afb1b49e17ae19c6e14b5f24
| 1,235
|
py
|
Python
|
primaryschool/subjects/_common_/keycode.py
|
larryw3i/primaryschool
|
ca2b1f2dfa042a31c2119a5098a87c441fbbbbbe
|
[
"MIT"
] | null | null | null |
primaryschool/subjects/_common_/keycode.py
|
larryw3i/primaryschool
|
ca2b1f2dfa042a31c2119a5098a87c441fbbbbbe
|
[
"MIT"
] | null | null | null |
primaryschool/subjects/_common_/keycode.py
|
larryw3i/primaryschool
|
ca2b1f2dfa042a31c2119a5098a87c441fbbbbbe
|
[
"MIT"
] | null | null | null |
class PsKeyCode:
def __init__(self):
pass
def keycode_in_alpha_upper(self, code):
return 65 <= code <= 90
def keycode_in_alpha_lower(self, code):
return 97 <= code <= 122
def keycode_in_alpha(self, code):
return self.keycode_in_alpha_lower(
code
) or self.keycode_in_alpha_upper(code)
def keycode_in_num_neg(self, code):
return self.keycode_in_pure_num(code) or self.keycode_in_hyphen(code)
def keycode_in_num_float(self, code):
return self.keycode_in_pure_num(code) or self.keycode_in_dot(code)
def keycode_in_pure_num(self, code):
return 48 <= code <= 57
def keycode_in_num(self, code):
return (
self.keycode_in_pure_num(code)
or self.keycode_in_hyphen(code)
or self.keycode_in_dot(code)
)
def keycode_in_dot(self, code):
return code == 46
def keycode_in_alpha_num(self, code):
return self.keycode_in_num(code) or self.keycode_in_alpha(code)
def keycode_in_space(self, code):
return code == 32
def keycode_in_hyphen(self, code):
return code == 45
def keycode_in_return(self, code):
return code == 0xD
| 26.847826
| 77
| 0.643725
| 179
| 1,235
| 4.100559
| 0.173184
| 0.282016
| 0.196185
| 0.138965
| 0.513624
| 0.467302
| 0.361035
| 0.316076
| 0.316076
| 0.316076
| 0
| 0.022173
| 0.269636
| 1,235
| 45
| 78
| 27.444444
| 0.791574
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002429
| 0
| 0
| 1
| 0.393939
| false
| 0.030303
| 0
| 0.363636
| 0.787879
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
7575fbec70cf12f3fc3fd6f67ce3610e4019f302
| 161
|
py
|
Python
|
utils/__init__.py
|
ilvar/lotien
|
c951dbe5dffb8e4e6c0f574e8b2192eefe2391e3
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
ilvar/lotien
|
c951dbe5dffb8e4e6c0f574e8b2192eefe2391e3
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
ilvar/lotien
|
c951dbe5dffb8e4e6c0f574e8b2192eefe2391e3
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from django.core.files.storage import get_storage_class
thumbnail_storage = get_storage_class(settings.DEFAULT_FILE_STORAGE)()
| 32.2
| 70
| 0.863354
| 23
| 161
| 5.73913
| 0.565217
| 0.151515
| 0.227273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074534
| 161
| 4
| 71
| 40.25
| 0.885906
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
75aaccfe80db4e6b04c554c475eb37a5e356ab45
| 50
|
py
|
Python
|
python/smqtk/algorithms/image_io/__init__.py
|
joshanderson-kw/SMQTK
|
594e7c733fe7f4e514a1a08a7343293a883a41fc
|
[
"BSD-3-Clause"
] | 82
|
2015-01-07T15:33:29.000Z
|
2021-08-11T18:34:05.000Z
|
python/smqtk/algorithms/image_io/__init__.py
|
joshanderson-kw/SMQTK
|
594e7c733fe7f4e514a1a08a7343293a883a41fc
|
[
"BSD-3-Clause"
] | 230
|
2015-04-08T14:36:51.000Z
|
2022-03-14T17:55:30.000Z
|
python/smqtk/algorithms/image_io/__init__.py
|
joshanderson-kw/SMQTK
|
594e7c733fe7f4e514a1a08a7343293a883a41fc
|
[
"BSD-3-Clause"
] | 65
|
2015-01-04T15:00:16.000Z
|
2021-11-19T18:09:11.000Z
|
from ._interface import ImageReader # noqa: F401
| 25
| 49
| 0.78
| 6
| 50
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0.16
| 50
| 1
| 50
| 50
| 0.833333
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
75b7e328adbf2d66487eb0d071b95b492d3a2a8b
| 221
|
py
|
Python
|
src/bpmn_python/graph/classes/events/__init__.py
|
ToJestKrzysio/ProcessVisualization
|
9a359a31816bf1be65e3684a571509e3a2c2c0ac
|
[
"MIT"
] | null | null | null |
src/bpmn_python/graph/classes/events/__init__.py
|
ToJestKrzysio/ProcessVisualization
|
9a359a31816bf1be65e3684a571509e3a2c2c0ac
|
[
"MIT"
] | null | null | null |
src/bpmn_python/graph/classes/events/__init__.py
|
ToJestKrzysio/ProcessVisualization
|
9a359a31816bf1be65e3684a571509e3a2c2c0ac
|
[
"MIT"
] | null | null | null |
# coding=utf-8
"""
Package init file
"""
__all__ = ["catch_event_type", "end_event_type", "event_type", "intermediate_catch_event_type",
"intermediate_throw_event_type", "start_event_type", "throw_event_type"]
| 31.571429
| 95
| 0.733032
| 29
| 221
| 4.931034
| 0.482759
| 0.440559
| 0.195804
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005155
| 0.122172
| 221
| 6
| 96
| 36.833333
| 0.731959
| 0.140271
| 0
| 0
| 0
| 0
| 0.714286
| 0.318681
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
75f20b1a9ba742ea7abd3c9cec55cb6cf3a0e9a6
| 7,945
|
py
|
Python
|
src/ReplayMemory.py
|
CSCI4850/S18-team1-project
|
7dbd418e3c197db309bb626f148aad9053628664
|
[
"MIT"
] | 2
|
2018-03-14T21:35:19.000Z
|
2018-04-16T06:31:58.000Z
|
src/ReplayMemory.py
|
CSCI4850/S18-team1-project
|
7dbd418e3c197db309bb626f148aad9053628664
|
[
"MIT"
] | 14
|
2018-03-26T01:33:45.000Z
|
2022-03-11T23:20:13.000Z
|
src/ReplayMemory.py
|
CSCI4850/S18-team1-project
|
7dbd418e3c197db309bb626f148aad9053628664
|
[
"MIT"
] | 1
|
2021-01-05T19:30:36.000Z
|
2021-01-05T19:30:36.000Z
|
###------------------------------------------------------###
### Replay and Remember Memory Class ###
###------------------------------------------------------###
import numpy as np
from hyperparameters import *
# expand dimensions to (1, 84, 84, 5) from (84, 84, 5)
# normalize 0-255 -> 0-1 to reduce exploding gradient
def normalize_states(current_frame_history):
return current_frame_history.astype('float32') / 255.
class ReplayMemory:
def __init__(self, memory_size, state_size, action_size):
# set the state size, HEIGHT : default 84px
self.state_height = state_size[0]
# set the state size, WIDTH : default 84px
self.state_width = state_size[1]
# set the state size, DEPTH : default 4 for discrete frame sets, for 4 frames
# 5 with frame windows
self.state_depth = state_size[2]
# set the action size, 3 actions, minus the first no-op
self.action_size = action_size
# initial size
self.size = 0
# set the max size of the remember and replay memory
self.maxsize = memory_size
# default current index
self.current_index = 0
if hp['DISCRETE_FRAMING']:
# create the current states of the game (N, 64, 64, 4)
self.current_states = np.zeros([memory_size, self.state_height, self.state_width, self.state_depth], dtype=np.uint8)
# create the next states of the game (N, 64, 64, 5)
self.next_states = np.zeros([memory_size, self.state_height, self.state_width, self.state_depth], dtype=np.uint8)
# used if using frame sliding
else:
self.states = np.zeros([memory_size, self.state_height, self.state_width, self.state_depth], dtype=np.uint8)
# reward array (N)
self.reward = np.zeros([memory_size], dtype=np.uint8)
# integer action
self.action = [0]*memory_size
# Boolean (terminal transition?)
self.lost_life = [False]*memory_size
def remember_discrete(self, current_states, action, reward, next_states, lost_life):
# Stores a single memory item
self.current_states[self.current_index,:] = current_states
self.next_states[self.current_index,:] = next_states
# get the rest of the items
self.action[self.current_index] = action
self.reward[self.current_index] = reward
self.lost_life[self.current_index] = lost_life
# offset the current index
self.current_index = (self.current_index + 1) % self.maxsize
# increase the size
self.size = max(self.current_index,self.size)
def replay_discrete(self, model, target_model):
# Run replay!
# set the number of samples to train on
num_samples = hp['REPLAY_ITERATIONS']
# set the sample size out of the memory bank
sample_size = hp['BATCH_SIZE']
# discount rate
gamma = hp['GAMMA']
# show the learning fit
show_fit = hp['SHOW_FIT']
# Can't train if we don't yet have enough samples to begin with...
if self.size < sample_size:
return
# number of replays
for i in range(num_samples):
# Select sample_size memory indices from the whole set
current_sample = np.random.choice(self.size, sample_size, replace=False)
# Slice memory into training sample
# current state is frames [0, 1, 2, 3]
# and normalize states [0,1] instead of 0-255
current_states = normalize_states(self.current_states[current_sample, :, :, :])
# next_state is frames [1, 2, 3, 4]
# and normalize states [0,1] instead of 0-255
next_states = normalize_states(self.next_states[current_sample, :, :, :])
# get the rest of the items from memory
actions = [self.action[j] for j in current_sample]
reward = self.reward[current_sample]
lost_lives = [self.lost_life[j] for j in current_sample]
# Obtain model's current Q-values
model_targets = model.predict(current_states)
# Create targets from argmax(Q(s+1,a+1))
# Use the target model!
targets = reward + gamma * np.amax(target_model.predict(next_states), axis=1)
# Absorb the reward on terminal state-action transitions
targets[lost_lives] = reward[lost_lives]
# Update just the relevant parts of the model_target vector...
model_targets[range(sample_size), actions] = targets
# Current State: (32, 84, 84, 4)
# Model Targets: (32, 4)
# Update the weights accordingly
model.fit(current_states, model_targets,
epochs=1 ,verbose=show_fit, batch_size=sample_size)
def remember_frame_sliding(self, states, action, reward, lost_life):
# Stores a single memory item
self.states[self.current_index,:] = states
# get the rest of the items
self.action[self.current_index] = action
self.reward[self.current_index] = reward
self.lost_life[self.current_index] = lost_life
# offset the current index
self.current_index = (self.current_index + 1) % self.maxsize
# increase the size
self.size = max(self.current_index,self.size)
def replay_slidding(self, model, target_model):
# Run replay!
# set the number of samples to train on
num_samples = hp['REPLAY_ITERATIONS']
# set the sample size out of the memory bank
sample_size = hp['BATCH_SIZE']
# discount rate
gamma = hp['GAMMA']
# show the learning fit
show_fit = hp['SHOW_FIT']
# Can't train if we don't yet have enough samples to begin with...
if self.size < sample_size:
return
# number of replays
for i in range(num_samples):
# Select sample_size memory indices from the whole set
current_sample = np.random.choice(self.size, sample_size, replace=False)
# Slice memory into training sample
# current state is frames [0, 1, 2, 3]
# and normalize states [0,1] instead of 0-255
current_states = normalize_states(self.states[current_sample, :, :, :hp['FRAME_BATCH_SIZE']])
# next_state is frames [1, 2, 3, 4]
# and normalize states [0,1] instead of 0-255
next_states = normalize_states(self.states[current_sample, :, :, 1:])
# get the rest of the items from memory
actions = [self.action[j] for j in current_sample]
reward = self.reward[current_sample]
lost_lives = [self.lost_life[j] for j in current_sample]
# Obtain model's current Q-values
model_targets = model.predict(current_states)
# Create targets from argmax(Q(s+1,a+1))
# Use the target model
targets = reward + gamma * np.amax(target_model.predict(next_states), axis=1)
# Absorb the reward on terminal state-action transitions
targets[lost_lives] = reward[lost_lives]
# Update just the relevant parts of the model_target vector...
model_targets[range(sample_size), actions] = targets
# Current State: (32, 84, 84, 4)
# Model Targets: (32, 4)
# Update the weights accordingly
model.fit(current_states, model_targets,
epochs=1 ,verbose=show_fit, batch_size=sample_size)
| 38.381643
| 128
| 0.585903
| 993
| 7,945
| 4.533736
| 0.164149
| 0.048867
| 0.056864
| 0.025544
| 0.741892
| 0.735673
| 0.735673
| 0.718347
| 0.702799
| 0.702799
| 0
| 0.0223
| 0.317055
| 7,945
| 206
| 129
| 38.567961
| 0.807409
| 0.321712
| 0
| 0.578947
| 0
| 0
| 0.02244
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078947
| false
| 0
| 0.026316
| 0.013158
| 0.157895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f9617c4d745a0d3842bc01689f63aba4cc07b1c3
| 91
|
py
|
Python
|
userena/compat.py
|
jdavidagudelo/django-userena-ce
|
970ca25ca367112625933bd61a0ba745b052692e
|
[
"BSD-3-Clause"
] | 86
|
2018-03-09T22:24:39.000Z
|
2021-12-12T22:30:33.000Z
|
userena/compat.py
|
jdavidagudelo/django-userena-ce
|
970ca25ca367112625933bd61a0ba745b052692e
|
[
"BSD-3-Clause"
] | 113
|
2018-02-25T12:24:13.000Z
|
2022-02-22T17:59:51.000Z
|
userena/compat.py
|
jdavidagudelo/django-userena-ce
|
970ca25ca367112625933bd61a0ba745b052692e
|
[
"BSD-3-Clause"
] | 19
|
2018-08-16T18:13:48.000Z
|
2021-12-11T18:14:30.000Z
|
# SiteProfileNotAvailable compatibility
class SiteProfileNotAvailable(Exception):
pass
| 22.75
| 41
| 0.846154
| 6
| 91
| 12.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10989
| 91
| 3
| 42
| 30.333333
| 0.950617
| 0.406593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
f97e8b09c143edea9368d4a65cd1588d89533b23
| 90
|
py
|
Python
|
dev/Tools/Python/2.7.13/mac/Python.framework/Versions/2.7/lib/python2.7/site-packages/pyxb/bundles/common/__init__.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 123
|
2015-01-12T06:43:22.000Z
|
2022-03-20T18:06:46.000Z
|
dev/Tools/Python/2.7.13/mac/Python.framework/Versions/2.7/lib/python2.7/site-packages/pyxb/bundles/common/__init__.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 103
|
2015-01-08T18:35:57.000Z
|
2022-01-18T01:44:14.000Z
|
dev/Tools/Python/2.7.13/mac/Python.framework/Versions/2.7/lib/python2.7/site-packages/pyxb/bundles/common/__init__.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 54
|
2015-02-15T17:12:00.000Z
|
2022-03-07T23:02:32.000Z
|
"""In this module are stored generated bindings for standard schema
like WSDL or SOAP."""
| 30
| 67
| 0.766667
| 14
| 90
| 4.928571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155556
| 90
| 2
| 68
| 45
| 0.907895
| 0.922222
| 0
| null | 1
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f9a7aa9d526b543e9504ec351ff1bc6a6ea77662
| 1,401
|
py
|
Python
|
remove_file.py
|
ralgond/kdd2021
|
340ad9003eb60b27af27a78eef7c6866b312afc2
|
[
"Apache-2.0"
] | 1
|
2021-11-04T03:58:18.000Z
|
2021-11-04T03:58:18.000Z
|
remove_file.py
|
ralgond/kdd2021
|
340ad9003eb60b27af27a78eef7c6866b312afc2
|
[
"Apache-2.0"
] | 1
|
2021-11-08T06:19:11.000Z
|
2021-11-08T07:10:43.000Z
|
remove_file.py
|
ralgond/kdd2021
|
340ad9003eb60b27af27a78eef7c6866b312afc2
|
[
"Apache-2.0"
] | null | null | null |
from numpy import frombuffer
from base_win_size_l import *
import os
for i in range(0,251):
path1 = f"interdata\\{i}"
print(path1)
for w in win_size_l:
path2 = path1+f"\\{w}"
# os.system(f"del {path2}\\acc_mad.txt")
# os.system(f"del {path2}\\acc_skew.txt")
# os.system(f"del {path2}\\diff_mad.txt")
# os.system(f"del {path2}\\entropy_orig.txt")
# os.system(f"del {path2}\\hotsax.txt")
# os.system(f"del {path2}\\orig_mad.txt")
# os.system(f"del {path2}\\orig_median.txt")
# os.system(f"del {path2}\\nmp_abjoin.txt")
# os.system(f"del {path2}\\nmp.txt")
# os.system(f"del {path2}\\nmp_selfjoin.txt")
# os.system(f"del {path2}\\fcm.txt")
# os.system(f"del {path2}\\nozmp_abjoin_normalized.txt")
# os.system(f"del {path2}\\mp_abjoin_normalized.txt")
# os.system(f"del {path2}\\mp_selfjoin_normalized.txt")
# os.system(f"del {path2}\\mp_selfjoin_normalized_scramp.txt")
# os.system(f"ren {path2}\\p2p_orig.txt orig_p2p.txt")
# os.system(f"ren {path2}\\p2p_diff.txt diff_p2p.txt")
# os.system(f"ren {path2}\\p2p_acc.txt acc_p2p.txt")
# os.system(f"del {path2}\\diff_small.txt")
# os.system(f"del {path2}\\mp_all_selfjoin.txt")
os.system(f"del {path2}\\orig_mp_selfjoin_p2p.txt")
| 31.840909
| 70
| 0.59172
| 216
| 1,401
| 3.680556
| 0.203704
| 0.211321
| 0.237736
| 0.301887
| 0.718239
| 0.714465
| 0.63522
| 0.233962
| 0.168553
| 0.113208
| 0
| 0.03318
| 0.225553
| 1,401
| 44
| 71
| 31.840909
| 0.699539
| 0.647395
| 0
| 0
| 0
| 0
| 0.118143
| 0.06962
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.111111
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f9d71ca970aeda6b81b4362bed771c27b6605c3b
| 133
|
py
|
Python
|
website/photos/forms/__init__.py
|
SebastiaanZ/minigigscyclingteam
|
6c8c4f7ae41a5b01a551c592dc81fd37fd4f686e
|
[
"MIT"
] | null | null | null |
website/photos/forms/__init__.py
|
SebastiaanZ/minigigscyclingteam
|
6c8c4f7ae41a5b01a551c592dc81fd37fd4f686e
|
[
"MIT"
] | 9
|
2020-01-25T12:24:43.000Z
|
2022-03-12T00:18:38.000Z
|
website/photos/forms/__init__.py
|
SebastiaanZ/minigigscyclingteam
|
6c8c4f7ae41a5b01a551c592dc81fd37fd4f686e
|
[
"MIT"
] | null | null | null |
from .photos import MultiplePhotosForm, RemovePhotosFromArticleForm
__all__ = ["MultiplePhotosForm", "RemovePhotosFromArticleForm"]
| 33.25
| 67
| 0.849624
| 8
| 133
| 13.625
| 0.75
| 0.825688
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075188
| 133
| 3
| 68
| 44.333333
| 0.886179
| 0
| 0
| 0
| 0
| 0
| 0.338346
| 0.203008
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f9dd654247085edbcda818e3de65f4503bc046d2
| 28
|
py
|
Python
|
app/libs/get_tags.py
|
Blesproject/bless_flask
|
78da13e64d1e6fb9add6c34050a7d907e05e782d
|
[
"MIT"
] | null | null | null |
app/libs/get_tags.py
|
Blesproject/bless_flask
|
78da13e64d1e6fb9add6c34050a7d907e05e782d
|
[
"MIT"
] | 2
|
2021-02-08T20:27:01.000Z
|
2021-04-30T20:45:06.000Z
|
app/libs/get_tags.py
|
Blesproject/bless_flask
|
78da13e64d1e6fb9add6c34050a7d907e05e782d
|
[
"MIT"
] | null | null | null |
def get_tags(data):
pass
| 14
| 19
| 0.678571
| 5
| 28
| 3.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 28
| 2
| 20
| 14
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
ddab740c5d210aac36a2fe60229df19b51262435
| 462
|
py
|
Python
|
Conduit_testing/conduit_methods.py
|
Bogi227/conduit
|
745eb567865f796aba58a7a55e1b0eb5c5636bc0
|
[
"MIT"
] | null | null | null |
Conduit_testing/conduit_methods.py
|
Bogi227/conduit
|
745eb567865f796aba58a7a55e1b0eb5c5636bc0
|
[
"MIT"
] | null | null | null |
Conduit_testing/conduit_methods.py
|
Bogi227/conduit
|
745eb567865f796aba58a7a55e1b0eb5c5636bc0
|
[
"MIT"
] | null | null | null |
def conduit_login(driver):
driver.find_element_by_xpath('//a[@href="#/login"]').click()
driver.find_element_by_xpath('//input[@placeholder="Email"]').send_keys("testmail61@test.hu")
driver.find_element_by_xpath('//input[@placeholder="Password"]').send_keys("Testpass1")
driver.find_element_by_xpath('//*[@id="app"]//form/button').click()
def conduit_logout(driver):
driver.find_element_by_xpath('//*[@id="app"]/nav/div/ul/li[5]/a').click()
| 46.2
| 97
| 0.703463
| 65
| 462
| 4.707692
| 0.476923
| 0.163399
| 0.277778
| 0.310458
| 0.568627
| 0.568627
| 0.45098
| 0
| 0
| 0
| 0
| 0.009281
| 0.0671
| 462
| 9
| 98
| 51.333333
| 0.700696
| 0
| 0
| 0
| 0
| 0
| 0.364425
| 0.262473
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.142857
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
fb1a2c11dd83e9f8809efa07747f8dcffaace070
| 1,022
|
py
|
Python
|
climatetalk/signal.py
|
kdschlosser/ClimateTalk
|
3b09a45c295cf5228283d7095834e8f133ed7de3
|
[
"MIT"
] | 3
|
2021-04-30T20:12:16.000Z
|
2022-03-09T11:53:12.000Z
|
climatetalk/signal.py
|
kdschlosser/ClimateTalk
|
3b09a45c295cf5228283d7095834e8f133ed7de3
|
[
"MIT"
] | null | null | null |
climatetalk/signal.py
|
kdschlosser/ClimateTalk
|
3b09a45c295cf5228283d7095834e8f133ed7de3
|
[
"MIT"
] | 2
|
2021-04-08T18:29:39.000Z
|
2021-04-30T20:13:55.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Kevin Schlosser
import threading
class Signal(object):
def __init__(self):
self._callbacks = {}
def send(self, signal, packet):
address = packet.address
subnet = packet.subnet
try:
self._callbacks[signal][(address, subnet)](packet)
except KeyError:
pass
def connect(self, signal, address, subnet, callback):
if signal not in self._callbacks:
self._callbacks[signal] = {}
self._callbacks[signal][(address, subnet)] = callback
def disconnect(self, signal, address, subnet):
try:
del self._callbacks[signal][(address, subnet)]
except KeyError:
pass
_signal = Signal()
def connect(signal, address, subnet, callback):
_signal.connect(signal, address, subnet, callback)
def disconnect(signal, address, subnet):
_signal.disconnect(signal, address, subnet)
def send(signal, packet):
_signal.send(signal, packet)
| 21.744681
| 62
| 0.62818
| 108
| 1,022
| 5.814815
| 0.287037
| 0.207006
| 0.272293
| 0.171975
| 0.315287
| 0.127389
| 0
| 0
| 0
| 0
| 0
| 0.006614
| 0.260274
| 1,022
| 46
| 63
| 22.217391
| 0.824074
| 0.050881
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.259259
| false
| 0.074074
| 0.037037
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
fb3ca175aa8c247ec79b88baaad4059c3abdfd4c
| 109
|
py
|
Python
|
enthought/chaco/layers/svg_range_selection_overlay.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/chaco/layers/svg_range_selection_overlay.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/chaco/layers/svg_range_selection_overlay.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from chaco.layers.svg_range_selection_overlay import *
| 27.25
| 54
| 0.862385
| 15
| 109
| 5.733333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100917
| 109
| 3
| 55
| 36.333333
| 0.877551
| 0.110092
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
34b19f78d570fa38761b8e51b66657a0f710698f
| 12,593
|
py
|
Python
|
pysnmp-with-texts/IANA-GMPLS-TC-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/IANA-GMPLS-TC-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/IANA-GMPLS-TC-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module IANA-GMPLS-TC-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/IANA-GMPLS-TC-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:19:44 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueRangeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
ObjectIdentity, Counter32, Gauge32, TimeTicks, mib_2, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, iso, Integer32, ModuleIdentity, Bits, NotificationType, Unsigned32, IpAddress, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "Counter32", "Gauge32", "TimeTicks", "mib-2", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "iso", "Integer32", "ModuleIdentity", "Bits", "NotificationType", "Unsigned32", "IpAddress", "Counter64")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
ianaGmpls = ModuleIdentity((1, 3, 6, 1, 2, 1, 152))
ianaGmpls.setRevisions(('2015-11-04 00:00', '2015-09-22 00:00', '2014-05-09 00:00', '2014-03-11 00:00', '2013-12-16 00:00', '2013-11-04 00:00', '2013-10-14 00:00', '2013-10-10 00:00', '2013-10-09 00:00', '2010-04-13 00:00', '2010-02-22 00:00', '2010-02-19 00:00', '2007-02-27 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ianaGmpls.setRevisionsDescriptions(('Updated description for Switching Type 151.', 'Added Switching Type 151.', 'Fixed typographical error that interfered with compilation.', 'Added Administrative Status Information Flags 23-24.', 'Added Switching Type 110.', 'Added missing value 40 to IANAGmplsSwitchingTypeTC.', 'Restored names,added comments for G-PIDs 47, 56; updated IANA contact info.', 'Deprecated 2-4 in IANAGmplsSwitchingTypeTC, added registry reference.', 'Added Generalized PIDs 59-70 and changed names for 47, 56.', 'Added LSP Encoding Type tunnelLine(14), Switching Type evpl(30).', 'Added missing Administrative Status Information Flags 25, 26, and 28.', 'Added dcsc(125).', 'Initial version issued as part of RFC 4802.',))
if mibBuilder.loadTexts: ianaGmpls.setLastUpdated('201511040000Z')
if mibBuilder.loadTexts: ianaGmpls.setOrganization('IANA')
if mibBuilder.loadTexts: ianaGmpls.setContactInfo('Internet Assigned Numbers Authority Postal: 12025 Waterfront Drive, Suite 300 Los Angeles, CA 90094 Tel: +1 310 301-5800 E-Mail: iana&iana.org')
if mibBuilder.loadTexts: ianaGmpls.setDescription('Copyright (C) The IETF Trust (2007). The initial version of this MIB module was published in RFC 4802. For full legal notices see the RFC itself. Supplementary information may be available on: http://www.ietf.org/copyrights/ianamib.html')
class IANAGmplsLSPEncodingTypeTC(TextualConvention, Integer32):
reference = '1. Generalized Multi-Protocol Label Switching (GMPLS) Signaling Functional Description, RFC 3471, section 3.1.1. 2. Generalized MPLS Signalling Extensions for G.709 Optical Transport Networks Control, RFC 4328, section 3.1.1.'
description = 'This type is used to represent and control the LSP encoding type of an LSP signaled by a GMPLS signaling protocol. This textual convention is strongly tied to the LSP Encoding Types sub-registry of the GMPLS Signaling Parameters registry managed by IANA. Values should be assigned by IANA in step with the LSP Encoding Types sub-registry and using the same registry management rules. However, the actual values used in this textual convention are solely within the purview of IANA and do not necessarily match the values in the LSP Encoding Types sub-registry. The definition of this textual convention with the addition of newly assigned values is published periodically by the IANA, in either the Assigned Numbers RFC, or some derivative of it specific to Internet Network Management number assignments. (The latest arrangements can be obtained by contacting the IANA.) Requests for new values should be made to IANA via email (iana&iana.org).'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 5, 7, 8, 9, 11, 12, 13, 14))
namedValues = NamedValues(("tunnelLspNotGmpls", 0), ("tunnelLspPacket", 1), ("tunnelLspEthernet", 2), ("tunnelLspAnsiEtsiPdh", 3), ("tunnelLspSdhSonet", 5), ("tunnelLspDigitalWrapper", 7), ("tunnelLspLambda", 8), ("tunnelLspFiber", 9), ("tunnelLspFiberChannel", 11), ("tunnelDigitalPath", 12), ("tunnelOpticalChannel", 13), ("tunnelLine", 14))
class IANAGmplsSwitchingTypeTC(TextualConvention, Integer32):
reference = '1. Routing Extensions in Support of Generalized Multi-Protocol Label Switching, RFC 4202, section 2.4. 2. Generalized Multi-Protocol Label Switching (GMPLS) Signaling Functional Description, RFC 3471, section 3.1.1. 3. Revised Definition of The GMPLS Switching Capability and Type Fields, RFC7074, section 5.'
description = 'This type is used to represent and control the LSP switching type of an LSP signaled by a GMPLS signaling protocol. This textual convention is strongly tied to the Switching Types sub-registry of the GMPLS Signaling Parameters registry managed by IANA. Values should be assigned by IANA in step with the Switching Types sub-registry and using the same registry management rules. However, the actual values used in this textual convention are solely within the purview of IANA and do not necessarily match the values in the Switching Types sub-registry. The definition of this textual convention with the addition of newly assigned values is published periodically by the IANA, in either the Assigned Numbers RFC, or some derivative of it specific to Internet Network Management number assignments. (The latest arrangements can be obtained by contacting the IANA.) Requests for new values should be made to IANA via email (iana&iana.org).'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 30, 40, 51, 100, 110, 125, 150, 151, 200))
namedValues = NamedValues(("unknown", 0), ("psc1", 1), ("psc2", 2), ("psc3", 3), ("psc4", 4), ("evpl", 30), ("pbb", 40), ("l2sc", 51), ("tdm", 100), ("otntdm", 110), ("dcsc", 125), ("lsc", 150), ("wsonlsc", 151), ("fsc", 200))
class IANAGmplsGeneralizedPidTC(TextualConvention, Integer32):
reference = '1. Generalized Multi-Protocol Label Switching (GMPLS) Signaling Functional Description, RFC 3471, section 3.1.1. 2. Generalized MPLS Signalling Extensions for G.709 Optical Transport Networks Control, RFC 4328, section 3.1.3. 3. Generalized Multi-Protocol Label Switching (GMPLS) Signaling Extensions for the evolving G.709 Optical Transport Networks Control,[RFC7139], sections 4 and 11.'
description = 'This data type is used to represent and control the LSP Generalized Protocol Identifier (G-PID) of an LSP signaled by a GMPLS signaling protocol. This textual convention is strongly tied to the Generalized PIDs (G-PID) sub-registry of the GMPLS Signaling Parameters registry managed by IANA. Values should be assigned by IANA in step with the Generalized PIDs (G-PID) sub-registry and using the same registry management rules. However, the actual values used in this textual convention are solely within the purview of IANA and do not necessarily match the values in the Generalized PIDs (G-PID) sub-registry. The definition of this textual convention with the addition of newly assigned values is published periodically by the IANA, in either the Assigned Numbers RFC, or some derivative of it specific to Internet Network Management number assignments. (The latest arrangements can be obtained by contacting the IANA.) Requests for new values should be made to IANA via email (iana&iana.org).'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70))
namedValues = NamedValues(("unknown", 0), ("asynchE4", 5), ("asynchDS3T3", 6), ("asynchE3", 7), ("bitsynchE3", 8), ("bytesynchE3", 9), ("asynchDS2T2", 10), ("bitsynchDS2T2", 11), ("reservedByRFC3471first", 12), ("asynchE1", 13), ("bytesynchE1", 14), ("bytesynch31ByDS0", 15), ("asynchDS1T1", 16), ("bitsynchDS1T1", 17), ("bytesynchDS1T1", 18), ("vc1vc12", 19), ("reservedByRFC3471second", 20), ("reservedByRFC3471third", 21), ("ds1SFAsynch", 22), ("ds1ESFAsynch", 23), ("ds3M23Asynch", 24), ("ds3CBitParityAsynch", 25), ("vtLovc", 26), ("stsSpeHovc", 27), ("posNoScramble16BitCrc", 28), ("posNoScramble32BitCrc", 29), ("posScramble16BitCrc", 30), ("posScramble32BitCrc", 31), ("atm", 32), ("ethernet", 33), ("sdhSonet", 34), ("digitalwrapper", 36), ("lambda", 37), ("ansiEtsiPdh", 38), ("lapsSdh", 40), ("fddi", 41), ("dqdb", 42), ("fiberChannel3", 43), ("hdlc", 44), ("ethernetV2DixOnly", 45), ("ethernet802dot3Only", 46), ("g709ODUj", 47), ("g709OTUk", 48), ("g709CBRorCBRa", 49), ("g709CBRb", 50), ("g709BSOT", 51), ("g709BSNT", 52), ("gfpIPorPPP", 53), ("gfpEthernetMAC", 54), ("gfpEthernetPHY", 55), ("g709ESCON", 56), ("g709FICON", 57), ("g709FiberChannel", 58), ("framedGFP", 59), ("sTM1", 60), ("sTM4", 61), ("infiniBand", 62), ("sDI", 63), ("sDI1point001", 64), ("dVBASI", 65), ("g709ODU125G", 66), ("g709ODUAny", 67), ("nullTest", 68), ("randomTest", 69), ("sixtyfourB66BGFPFEthernet", 70))
class IANAGmplsAdminStatusInformationTC(TextualConvention, Bits):
reference = '1. Generalized Multi-Protocol Label Switching (GMPLS) Signaling Functional Description, RFC 3471, section 8. 2. Generalized MPLS Signaling - RSVP-TE Extensions, RFC 3473, section 7. 3. GMPLS - Communication of Alarm Information, RFC 4783, section 3.2.1.'
description = 'This data type determines the setting of the Admin Status flags in the Admin Status object or TLV, as described in RFC 3471. Setting this object to a non-zero value will result in the inclusion of the Admin Status object or TLV on signaling messages. This textual convention is strongly tied to the Administrative Status Information Flags sub-registry of the GMPLS Signaling Parameters registry managed by IANA. Values should be assigned by IANA in step with the Administrative Status Flags sub-registry and using the same registry management rules. However, the actual values used in this textual convention are solely within the purview of IANA and do not necessarily match the values in the Administrative Status Information Flags sub-registry. The definition of this textual convention with the addition of newly assigned values is published periodically by the IANA, in either the Assigned Numbers RFC, or some derivative of it specific to Internet Network Management number assignments. (The latest arrangements can be obtained by contacting the IANA.) Requests for new values should be made to IANA via email (iana&iana.org).'
status = 'current'
namedValues = NamedValues(("reflect", 0), ("reserved1", 1), ("reserved2", 2), ("reserved3", 3), ("reserved4", 4), ("reserved5", 5), ("reserved6", 6), ("reserved7", 7), ("reserved8", 8), ("reserved9", 9), ("reserved10", 10), ("reserved11", 11), ("reserved12", 12), ("reserved13", 13), ("reserved14", 14), ("reserved15", 15), ("reserved16", 16), ("reserved17", 17), ("reserved18", 18), ("reserved19", 19), ("reserved20", 20), ("reserved21", 21), ("reserved22", 22), ("oamFlowsEnabled", 23), ("oamAlarmsEnabled", 24), ("handover", 25), ("lockout", 26), ("inhibitAlarmCommunication", 27), ("callControl", 28), ("testing", 29), ("administrativelyDown", 30), ("deleteInProgress", 31))
mibBuilder.exportSymbols("IANA-GMPLS-TC-MIB", IANAGmplsSwitchingTypeTC=IANAGmplsSwitchingTypeTC, IANAGmplsLSPEncodingTypeTC=IANAGmplsLSPEncodingTypeTC, IANAGmplsAdminStatusInformationTC=IANAGmplsAdminStatusInformationTC, PYSNMP_MODULE_ID=ianaGmpls, ianaGmpls=ianaGmpls, IANAGmplsGeneralizedPidTC=IANAGmplsGeneralizedPidTC)
| 246.921569
| 1,404
| 0.750735
| 1,668
| 12,593
| 5.666067
| 0.291367
| 0.005502
| 0.026664
| 0.018411
| 0.489789
| 0.461962
| 0.449476
| 0.430325
| 0.426092
| 0.422178
| 0
| 0.088059
| 0.126181
| 12,593
| 50
| 1,405
| 251.86
| 0.770811
| 0.026046
| 0
| 0.105263
| 0
| 0.263158
| 0.674853
| 0.024233
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.157895
| 0
| 0.763158
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
34c3748e608a3db1a56851be011f8d512481f149
| 206
|
py
|
Python
|
service_frontend_auth/frontend/controller/__init__.py
|
vykozlov/eosc-perf
|
77dcfeda162bb51d363267da1810b35b58651791
|
[
"MIT"
] | 3
|
2021-11-29T11:34:48.000Z
|
2021-12-13T16:19:42.000Z
|
service_frontend_auth/frontend/controller/__init__.py
|
vykozlov/eosc-perf
|
77dcfeda162bb51d363267da1810b35b58651791
|
[
"MIT"
] | 166
|
2020-08-17T12:42:30.000Z
|
2022-03-28T11:35:24.000Z
|
service_frontend_auth/frontend/controller/__init__.py
|
vykozlov/eosc-perf
|
77dcfeda162bb51d363267da1810b35b58651791
|
[
"MIT"
] | 6
|
2020-09-18T16:08:27.000Z
|
2022-03-25T14:11:01.000Z
|
"""The controller subpackage exposes the main components of the controller. This includes the IOController class itself,
but also the classes it uses, like the Authenticator and the JSONResultValidator."""
| 68.666667
| 120
| 0.815534
| 28
| 206
| 6
| 0.75
| 0.154762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135922
| 206
| 2
| 121
| 103
| 0.94382
| 0.966019
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
34d03890fa7be603897303b330fcd0959cc56f87
| 81
|
py
|
Python
|
cobl2/interfaces.py
|
blurks/cobl2
|
b5a1195ea73efbc5b9cebfa50151e677b87f995f
|
[
"Apache-2.0"
] | null | null | null |
cobl2/interfaces.py
|
blurks/cobl2
|
b5a1195ea73efbc5b9cebfa50151e677b87f995f
|
[
"Apache-2.0"
] | 17
|
2017-12-05T10:02:12.000Z
|
2020-10-16T09:51:35.000Z
|
cobl2/interfaces.py
|
blurks/cobl2
|
b5a1195ea73efbc5b9cebfa50151e677b87f995f
|
[
"Apache-2.0"
] | 3
|
2018-11-15T10:15:56.000Z
|
2021-12-07T01:34:57.000Z
|
from zope.interface import Interface
class IClade(Interface):
"""marker"""
| 13.5
| 36
| 0.716049
| 9
| 81
| 6.444444
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160494
| 81
| 5
| 37
| 16.2
| 0.852941
| 0.074074
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9b86554e673da645ef9bf7cbb25a54a112ab906a
| 29
|
py
|
Python
|
qoredl_project/qoredl_flask/util/MysqldbClass.py
|
qore-dl/qore-dl-code
|
dc60df8fd072df5c641005992630f43892b7f78e
|
[
"Apache-2.0"
] | null | null | null |
qoredl_project/qoredl_flask/util/MysqldbClass.py
|
qore-dl/qore-dl-code
|
dc60df8fd072df5c641005992630f43892b7f78e
|
[
"Apache-2.0"
] | null | null | null |
qoredl_project/qoredl_flask/util/MysqldbClass.py
|
qore-dl/qore-dl-code
|
dc60df8fd072df5c641005992630f43892b7f78e
|
[
"Apache-2.0"
] | null | null | null |
# from controller import db
| 9.666667
| 27
| 0.758621
| 4
| 29
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206897
| 29
| 2
| 28
| 14.5
| 0.956522
| 0.862069
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9bb3d4cfcacf935ff658ccd6223a3afa81754000
| 65
|
py
|
Python
|
sparse_causal_model_learner_rl/complexity/__init__.py
|
sergeivolodin/causality-disentanglement-rl
|
5a41b4a2e3d85fa7e9c8450215fdc6cf954df867
|
[
"CC0-1.0"
] | 2
|
2020-12-11T05:26:24.000Z
|
2021-04-21T06:12:58.000Z
|
sparse_causal_model_learner_rl/complexity/__init__.py
|
sergeivolodin/causality-disentanglement-rl
|
5a41b4a2e3d85fa7e9c8450215fdc6cf954df867
|
[
"CC0-1.0"
] | 9
|
2020-04-30T16:29:50.000Z
|
2021-03-26T07:32:18.000Z
|
sparse_causal_model_learner_rl/complexity/__init__.py
|
sergeivolodin/causality-disentanglement-rl
|
5a41b4a2e3d85fa7e9c8450215fdc6cf954df867
|
[
"CC0-1.0"
] | null | null | null |
from sparse_causal_model_learner_rl.complexity.l1 import L1, Lp
| 32.5
| 64
| 0.861538
| 11
| 65
| 4.727273
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033898
| 0.092308
| 65
| 1
| 65
| 65
| 0.847458
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
32cc37ef4eb8f86884d208ae6915679b3501514c
| 149
|
py
|
Python
|
uts/iqt/services/execution/t_simulated.py
|
yt7589/iching
|
6673da38f4c80e7fd297c86fedc5616aee8ac09b
|
[
"Apache-2.0"
] | 32
|
2020-04-14T08:32:18.000Z
|
2022-02-09T07:05:08.000Z
|
uts/iqt/services/execution/t_simulated.py
|
yt7589/iching
|
6673da38f4c80e7fd297c86fedc5616aee8ac09b
|
[
"Apache-2.0"
] | 1
|
2020-04-08T10:42:15.000Z
|
2020-04-15T01:38:03.000Z
|
uts/iqt/services/execution/t_simulated.py
|
yt7589/iching
|
6673da38f4c80e7fd297c86fedc5616aee8ac09b
|
[
"Apache-2.0"
] | 4
|
2020-08-25T03:56:46.000Z
|
2021-05-11T05:55:51.000Z
|
#
import unittest
import iqt.oms.services.execution.simulated as simulated
class TSimulated(unittest.TestCase):
def test_run(self):
pass
| 21.285714
| 56
| 0.758389
| 19
| 149
| 5.894737
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161074
| 149
| 7
| 57
| 21.285714
| 0.896
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
32d421794b3709a864259df94941ae313dd89a48
| 120
|
py
|
Python
|
classification/__init__.py
|
joedaws/lde2021
|
ece9857667bab8691cf617ed56af561676945b60
|
[
"MIT"
] | null | null | null |
classification/__init__.py
|
joedaws/lde2021
|
ece9857667bab8691cf617ed56af561676945b60
|
[
"MIT"
] | null | null | null |
classification/__init__.py
|
joedaws/lde2021
|
ece9857667bab8691cf617ed56af561676945b60
|
[
"MIT"
] | null | null | null |
import os
PARENT_DIR, _ = os.path.split(os.path.abspath(__file__))
ANALYSIS_DIR = os.path.join(PARENT_DIR, 'analysis')
| 24
| 56
| 0.758333
| 19
| 120
| 4.368421
| 0.526316
| 0.216867
| 0.216867
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091667
| 120
| 4
| 57
| 30
| 0.761468
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fd39329b73d6892abe114fbecbd000c8d6581f5c
| 175
|
py
|
Python
|
supplementary_content/apps.py
|
theabrad/cmcs-eregulations
|
1a8b4b7feed177dc6e23395d21687c3ceb77e31f
|
[
"CC0-1.0"
] | 1
|
2021-09-26T22:29:47.000Z
|
2021-09-26T22:29:47.000Z
|
supplementary_content/apps.py
|
EmmaIvy/cmcs-eregulations
|
5ac1c148ddc3164f4c7b1dfaea1cee9fb489c688
|
[
"CC0-1.0"
] | null | null | null |
supplementary_content/apps.py
|
EmmaIvy/cmcs-eregulations
|
5ac1c148ddc3164f4c7b1dfaea1cee9fb489c688
|
[
"CC0-1.0"
] | null | null | null |
from django.apps import AppConfig
class SupplementaryContentConfig(AppConfig):
name = "supplementary_content"
verbose_name = "Supplementary content for regulations"
| 25
| 58
| 0.8
| 17
| 175
| 8.117647
| 0.764706
| 0.246377
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 175
| 6
| 59
| 29.166667
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0.331429
| 0.12
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
fd503f17206a8d46c0c9ec50246e9c24bd181d01
| 26
|
py
|
Python
|
fsm_repository/master/__init__.py
|
shashank-iitj/traibot
|
30676413e30a0f7dc651f1918b33892728a01c1b
|
[
"Apache-2.0"
] | null | null | null |
fsm_repository/master/__init__.py
|
shashank-iitj/traibot
|
30676413e30a0f7dc651f1918b33892728a01c1b
|
[
"Apache-2.0"
] | null | null | null |
fsm_repository/master/__init__.py
|
shashank-iitj/traibot
|
30676413e30a0f7dc651f1918b33892728a01c1b
|
[
"Apache-2.0"
] | null | null | null |
from .fsm import MasterFsm
| 26
| 26
| 0.846154
| 4
| 26
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 26
| 1
| 26
| 26
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b5d984baa432d6f49c610bcd45dcac22190a6ff6
| 241
|
py
|
Python
|
nighres/filtering/__init__.py
|
jennydaman/nighres
|
9ced74e61db02261e4753a69b03f4479bfdc26b6
|
[
"Apache-2.0"
] | null | null | null |
nighres/filtering/__init__.py
|
jennydaman/nighres
|
9ced74e61db02261e4753a69b03f4479bfdc26b6
|
[
"Apache-2.0"
] | null | null | null |
nighres/filtering/__init__.py
|
jennydaman/nighres
|
9ced74e61db02261e4753a69b03f4479bfdc26b6
|
[
"Apache-2.0"
] | null | null | null |
from nighres.filtering.filter_ridge_structures import filter_ridge_structures
from nighres.filtering.recursive_ridge_diffusion import recursive_ridge_diffusion
from nighres.filtering.total_variation_filtering import total_variation_filtering
| 80.333333
| 81
| 0.929461
| 30
| 241
| 7.066667
| 0.366667
| 0.15566
| 0.283019
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045643
| 241
| 3
| 82
| 80.333333
| 0.921739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bd27aca5ee6622555a46cb575d92776b77f2e3db
| 184
|
py
|
Python
|
rmltraintfsemantic/rmltraintfsemantic/__init__.py
|
autognc/ravenML-plugins
|
0a7e63a6d1fcf937599c688d3b567978f0c51b8b
|
[
"MIT"
] | null | null | null |
rmltraintfsemantic/rmltraintfsemantic/__init__.py
|
autognc/ravenML-plugins
|
0a7e63a6d1fcf937599c688d3b567978f0c51b8b
|
[
"MIT"
] | 13
|
2020-04-25T00:29:21.000Z
|
2022-02-10T02:15:57.000Z
|
rmltraintfsemantic/rmltraintfsemantic/__init__.py
|
autognc/ravenML-plugins
|
0a7e63a6d1fcf937599c688d3b567978f0c51b8b
|
[
"MIT"
] | 1
|
2021-04-29T23:06:45.000Z
|
2021-04-29T23:06:45.000Z
|
import os
import sys
cwd = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
if os.path.join(cwd, 'slim') not in sys.path:
sys.path.append(os.path.join(cwd, 'slim'))
| 26.285714
| 68
| 0.679348
| 33
| 184
| 3.666667
| 0.424242
| 0.247934
| 0.247934
| 0.214876
| 0.280992
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11413
| 184
| 6
| 69
| 30.666667
| 0.742331
| 0
| 0
| 0
| 0
| 0
| 0.054348
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
bd35e172dd2bac53ab4a6839ac1d95905fdb17b7
| 15,474
|
py
|
Python
|
test/test_edit_preclusion_backend.py
|
nus-mtp/another-cs-study-planner
|
02b52871a34f580b779ede08750f2d4e887bcf65
|
[
"MIT"
] | 1
|
2017-04-30T17:59:08.000Z
|
2017-04-30T17:59:08.000Z
|
test/test_edit_preclusion_backend.py
|
nus-mtp/another-cs-study-planner
|
02b52871a34f580b779ede08750f2d4e887bcf65
|
[
"MIT"
] | 87
|
2017-02-13T09:06:13.000Z
|
2017-04-14T09:23:08.000Z
|
test/test_edit_preclusion_backend.py
|
nus-mtp/another-cs-study-planner
|
02b52871a34f580b779ede08750f2d4e887bcf65
|
[
"MIT"
] | 1
|
2017-04-11T05:26:00.000Z
|
2017-04-11T05:26:00.000Z
|
'''
test_edit_preclusion_backend.py
Contains test cases for functions to edit preclusions.
'''
from nose.tools import assert_equal, assert_false, assert_true
from components import model
# HOW TO RUN NOSE TESTS
# 1. Make sure you are in cs-modify main directory
# 2. Make sure the path "C:\Python27\Scripts" is added in your environment variables
# 3. Enter in cmd: "nosetests test/"
# 4. Nose will run all the tests inside the test/ folder
class TestCode(object):
'''
This class runs the test cases for functions to edit preclusions.
'''
def __init__(self):
self.test_module_code = "AA1111"
self.test_module_name = "Dummy Module"
self.test_module_desc = "Dummy Description"
self.test_module_mc = 4
self.test_module_status = "Active"
self.no_preclude_to_one_preclude_tested = False
self.no_preclude_to_no_preclude_tested = False
self.no_preclude_to_multiple_preclude_tested = False
self.preclude_to_one_preclude_tested = False
self.preclude_to_no_preclude_tested = False
self.preclude_to_multiple_preclude_tested = False
self.edit_preclude_duplicate_tested = False
self.edit_preclude_non_existent_tested = False
self.edit_preclude_already_in_prereq = False
self.edit_preclude_multiple_errors_tested = False
self.test_preclude_code = "BB1111"
self.test_preclude2_code = "BB1112"
self.test_preclude3_code = "BB1113"
self.test_invalid_module_code = "ZZ1597"
self.ERROR_MSG_MODULE_CANNOT_BE_ITSELF = "This module cannot be the same as target module"
self.ERROR_MSG_MODULE_DUPLICATED = "There cannot be more than one instance of this module"
self.ERROR_MSG_MODULE_DOESNT_EXIST = "This module does not exist"
self.ERROR_MSG_MODULE_PRECLUSION_ALREADY_PREREQ = \
"This module is a prerequisite of the target module"
def setUp(self):
'''
Populate database and perform testing
'''
model.add_module(self.test_module_code, self.test_module_name, self.test_module_desc,
self.test_module_mc, self.test_module_status)
model.add_module(self.test_preclude_code, self.test_module_name, self.test_module_desc,
self.test_module_mc, self.test_module_status)
model.add_module(self.test_preclude2_code, self.test_module_name, self.test_module_desc,
self.test_module_mc, self.test_module_status)
model.add_module(self.test_preclude3_code, self.test_module_name, self.test_module_desc,
self.test_module_mc, self.test_module_status)
self.test_no_preclude_to_one_preclude()
self.no_preclude_to_one_preclude_tested = True
self.test_preclude_to_one_preclude()
self.preclude_to_one_preclude_tested = True
self.test_no_preclude_to_no_preclude()
self.no_preclude_to_no_preclude_tested = True
self.test_preclude_to_no_preclude()
self.preclude_to_no_preclude_tested = True
self.test_no_preclude_to_multiple_preclude()
self.no_preclude_to_multiple_preclude_tested = True
self.test_preclude_to_multiple_preclude()
self.preclude_to_multiple_preclude_tested = True
self.test_edit_preclude_duplicate_modules()
self.edit_preclude_duplicate_tested = True
self.test_edit_preclude_non_existent_modules()
self.edit_preclude_non_existent_tested = True
self.test_edit_preclude_already_in_prereq()
self.edit_preclude_already_in_prereq = True
self.test_edit_preclude_multiple_errors()
self.edit_preclude_multiple_errors_tested = True
def tearDown(self):
'''
Clean up the database after all test cases are ran
'''
model.delete_module(self.test_module_code)
model.delete_module(self.test_preclude_code)
model.delete_module(self.test_preclude2_code)
model.delete_module(self.test_preclude3_code)
def test_no_preclude_to_one_preclude(self):
'''
Tests editing preclusion on a module originally with no preclude
to 1 preclude.
'''
if not self.no_preclude_to_one_preclude_tested:
preclude_units_to_change_to = [self.test_preclude_code]
outcome = model.edit_preclusion(self.test_module_code, preclude_units_to_change_to)
assert_true(outcome[0])
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(preclude_info is not None)
assert_equal(len(preclude_info), 1)
assert_equal(self.test_preclude_code, preclude_info[0][0])
model.delete_all_preclusions(self.test_module_code)
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(len(preclude_info) == 0)
return
def test_preclude_to_one_preclude(self):
'''
Tests editing preclusion on a module to 1 preclude.
'''
if not self.preclude_to_one_preclude_tested:
model.add_preclusion(self.test_module_code, self.test_preclude_code)
preclude_units_to_change_to = [self.test_preclude2_code]
outcome = model.edit_preclusion(self.test_module_code, preclude_units_to_change_to)
assert_true(outcome[0])
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(preclude_info is not None)
assert_equal(len(preclude_info), 1)
assert_equal(self.test_preclude2_code, preclude_info[0][0])
model.delete_all_preclusions(self.test_module_code)
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(len(preclude_info) == 0)
return
def test_no_preclude_to_no_preclude(self):
'''
Tests editing preclusion on a module originally with no preclude
to no preclude.
'''
if not self.no_preclude_to_no_preclude_tested:
preclude_units_to_change_to = []
outcome = model.edit_preclusion(self.test_module_code, preclude_units_to_change_to)
assert_true(outcome[0])
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(preclude_info is not None)
assert_true(len(preclude_info) == 0)
return
def test_preclude_to_no_preclude(self):
'''
Tests editing preclusion on a module to no preclude.
'''
if not self.preclude_to_no_preclude_tested:
model.add_preclusion(self.test_module_code, self.test_preclude_code)
preclude_units_to_change_to = []
outcome = model.edit_preclusion(self.test_module_code, preclude_units_to_change_to)
assert_true(outcome[0])
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(preclude_info is not None)
assert_true(len(preclude_info) == 0)
return
def test_no_preclude_to_multiple_preclude(self):
'''
Tests editing preclusion on a module originally with no preclude
to multiple precludes.
'''
if not self.no_preclude_to_multiple_preclude_tested:
preclude_units_to_change_to = [self.test_preclude_code,
self.test_preclude2_code, self.test_preclude3_code]
outcome = model.edit_preclusion(self.test_module_code, preclude_units_to_change_to)
assert_true(outcome[0])
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(preclude_info is not None)
assert_equal(len(preclude_info), 3)
assert_equal(self.test_preclude_code, preclude_info[0][0])
assert_equal(self.test_preclude2_code, preclude_info[1][0])
assert_equal(self.test_preclude3_code, preclude_info[2][0])
model.delete_all_preclusions(self.test_module_code)
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(len(preclude_info) == 0)
return
def test_preclude_to_multiple_preclude(self):
'''
Tests editing preclusion on a module to multiple preclude.
'''
if not self.preclude_to_multiple_preclude_tested:
model.add_preclusion(self.test_module_code, self.test_preclude_code)
preclude_units_to_change_to = [self.test_preclude_code,
self.test_preclude2_code, self.test_preclude3_code]
outcome = model.edit_preclusion(self.test_module_code, preclude_units_to_change_to)
assert_true(outcome[0])
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(preclude_info is not None)
assert_equal(len(preclude_info), 3)
assert_equal(self.test_preclude_code, preclude_info[0][0])
assert_equal(self.test_preclude2_code, preclude_info[1][0])
assert_equal(self.test_preclude3_code, preclude_info[2][0])
model.delete_all_preclusions(self.test_module_code)
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(len(preclude_info) == 0)
return
def test_edit_preclude_duplicate_modules(self):
'''
Tests editing preclusion on a module to precludes with duplicates,
note: this test case should fail to edit.
'''
if not self.edit_preclude_duplicate_tested:
model.add_preclusion(self.test_module_code, self.test_preclude_code)
preclude_units_to_change_to = [self.test_preclude2_code,
self.test_preclude2_code]
outcome = model.edit_preclusion(self.test_module_code, preclude_units_to_change_to)
assert_false(outcome[0])
error_list = outcome[1]
assert_equal(len(error_list), 1)
assert_equal(error_list[0],
[self.test_preclude2_code, self.ERROR_MSG_MODULE_DUPLICATED])
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(preclude_info is not None)
assert_equal(self.test_preclude_code, preclude_info[0][0])
model.delete_all_preclusions(self.test_module_code)
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(len(preclude_info) == 0)
def test_edit_preclude_non_existent_modules(self):
'''
Tests editing preclusion on a module to precludes which does
not exist, note: this test case should fail to edit.
'''
if not self.edit_preclude_non_existent_tested:
model.add_preclusion(self.test_module_code, self.test_preclude_code)
preclude_units_to_change_to = [self.test_preclude2_code,
self.test_invalid_module_code]
outcome = model.edit_preclusion(self.test_module_code, preclude_units_to_change_to)
assert_false(outcome[0])
error_list = outcome[1]
assert_equal(len(error_list), 1)
assert_equal(error_list[0],
[self.test_invalid_module_code, self.ERROR_MSG_MODULE_DOESNT_EXIST])
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(preclude_info is not None)
assert_equal(self.test_preclude_code, preclude_info[0][0])
model.delete_all_preclusions(self.test_module_code)
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(len(preclude_info) == 0)
# Test another form
model.add_preclusion(self.test_module_code, self.test_preclude_code)
preclude_units_to_change_to = [self.test_invalid_module_code,
self.test_preclude2_code]
outcome = model.edit_preclusion(self.test_module_code, preclude_units_to_change_to)
assert_false(outcome[0])
error_list = outcome[1]
assert_equal(len(error_list), 1)
assert_equal(error_list[0],
[self.test_invalid_module_code, self.ERROR_MSG_MODULE_DOESNT_EXIST])
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(preclude_info is not None)
assert_equal(self.test_preclude_code, preclude_info[0][0])
model.delete_all_preclusions(self.test_module_code)
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(len(preclude_info) == 0)
return
def test_edit_preclude_already_in_prereq(self):
'''
Tests editing preclusion on a module to precludes which already
exists as a prerequisite to that module.
Note: this test case should fail to edit.
'''
if not self.edit_preclude_non_existent_tested:
model.add_prerequisite(self.test_module_code, self.test_preclude_code,
0)
preclude_units_to_change_to = [self.test_preclude_code]
outcome = model.edit_preclusion(self.test_module_code, preclude_units_to_change_to)
assert_false(outcome[0])
error_list = outcome[1]
assert_equal(len(error_list), 1)
assert_equal(error_list[0],
[self.test_preclude_code, self.ERROR_MSG_MODULE_PRECLUSION_ALREADY_PREREQ])
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(preclude_info is not None)
assert_true(len(preclude_info) == 0)
model.delete_all_prerequisites(self.test_module_code)
prereq_info = model.get_prerequisite(self.test_module_code)
assert_true(len(prereq_info) == 0)
def test_edit_preclude_multiple_errors(self):
'''
Tests editing preclusion on a module to precludes with multiple
errors.
Note: this test case should fail to edit.
'''
if not self.edit_preclude_multiple_errors_tested:
model.add_preclusion(self.test_module_code, self.test_preclude_code)
preclude_units_to_change_to = [self.test_module_code,
self.test_invalid_module_code]
outcome = model.edit_preclusion(self.test_module_code, preclude_units_to_change_to)
assert_false(outcome[0])
error_list = outcome[1]
assert_equal(len(error_list), 2)
assert_equal(error_list[0],
[self.test_module_code, self.ERROR_MSG_MODULE_CANNOT_BE_ITSELF])
assert_equal(error_list[1],
[self.test_invalid_module_code, self.ERROR_MSG_MODULE_DOESNT_EXIST])
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(preclude_info is not None)
assert_equal(self.test_preclude_code, preclude_info[0][0])
model.delete_all_preclusions(self.test_module_code)
preclude_info = model.get_preclusion(self.test_module_code)
assert_true(len(preclude_info) == 0)
| 41.485255
| 100
| 0.668799
| 1,952
| 15,474
| 4.885758
| 0.077357
| 0.112404
| 0.107162
| 0.100031
| 0.893363
| 0.861067
| 0.790815
| 0.702842
| 0.670022
| 0.669393
| 0
| 0.010362
| 0.264056
| 15,474
| 372
| 101
| 41.596774
| 0.827099
| 0.090539
| 0
| 0.603524
| 0
| 0
| 0.017719
| 0
| 0
| 0
| 0
| 0
| 0.273128
| 1
| 0.057269
| false
| 0
| 0.008811
| 0
| 0.101322
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bd41cccda00d5215967e85051b1b932a562121c7
| 1,277
|
py
|
Python
|
server_src/controllers/TermTopicMatrix3.py
|
uwdata/termite-data-server
|
1085571407c627bdbbd21c352e793fed65d09599
|
[
"BSD-3-Clause"
] | 97
|
2015-01-17T09:41:57.000Z
|
2022-03-15T11:39:03.000Z
|
server_src/controllers/TermTopicMatrix3.py
|
afcarl/termite-data-server
|
1085571407c627bdbbd21c352e793fed65d09599
|
[
"BSD-3-Clause"
] | 12
|
2015-02-01T02:59:56.000Z
|
2021-06-09T02:31:34.000Z
|
server_src/controllers/TermTopicMatrix3.py
|
afcarl/termite-data-server
|
1085571407c627bdbbd21c352e793fed65d09599
|
[
"BSD-3-Clause"
] | 35
|
2015-01-25T04:48:37.000Z
|
2021-01-29T20:32:26.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from db.BOW_DB import BOW_DB
from db.LDA_DB import LDA_DB
from vis.TermTopicMatrix3 import TermTopicMatrix3
def index():
with BOW_DB() as bow_db:
with LDA_DB() as lda_db:
handler = TermTopicMatrix3(request, response, bow_db, lda_db)
return handler.GenerateResponse()
def GetTerms():
with BOW_DB() as bow_db:
with LDA_DB() as lda_db:
handler = TermTopicMatrix3(request, response, bow_db, lda_db)
data = handler.GetTerms()
dataStr = json.dumps(data, encoding='utf-8', indent=2, sort_keys=True)
response.headers['Content-Type'] = 'application/json'
return dataStr
def GetTopics():
with BOW_DB() as bow_db:
with LDA_DB() as lda_db:
handler = TermTopicMatrix3(request, response, bow_db, lda_db)
data = handler.GetTopics()
dataStr = json.dumps(data, encoding='utf-8', indent=2, sort_keys=True)
response.headers['Content-Type'] = 'application/json'
return dataStr
def GetTermTopicMatrix():
with BOW_DB() as bow_db:
with LDA_DB() as lda_db:
handler = TermTopicMatrix3(request, response, bow_db, lda_db)
data = handler.GetTermTopicMatrix()
dataStr = json.dumps(data, encoding='utf-8', indent=2, sort_keys=True)
response.headers['Content-Type'] = 'application/json'
return dataStr
| 31.146341
| 71
| 0.735317
| 188
| 1,277
| 4.829787
| 0.212766
| 0.077093
| 0.038546
| 0.048458
| 0.742291
| 0.742291
| 0.742291
| 0.742291
| 0.742291
| 0.742291
| 0
| 0.011818
| 0.138606
| 1,277
| 40
| 72
| 31.925
| 0.813636
| 0.03289
| 0
| 0.636364
| 0
| 0
| 0.080292
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.121212
| false
| 0
| 0.121212
| 0
| 0.363636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bd4f6fc4225859d59cf58a34e7fbf94514bcefa6
| 976
|
py
|
Python
|
thupoll/routes.py
|
octomen/thupoll
|
1114d8e9802a97c1fd9d3850c887df94e7fa609e
|
[
"MIT"
] | 2
|
2019-04-11T20:02:45.000Z
|
2019-04-15T01:43:09.000Z
|
thupoll/routes.py
|
octomen/thupoll
|
1114d8e9802a97c1fd9d3850c887df94e7fa609e
|
[
"MIT"
] | 37
|
2019-03-17T14:45:38.000Z
|
2019-06-07T03:19:22.000Z
|
thupoll/routes.py
|
octomen/thupoll
|
1114d8e9802a97c1fd9d3850c887df94e7fa609e
|
[
"MIT"
] | null | null | null |
from thupoll.blueprints.home import blueprint as home_blueprint
from thupoll.blueprints.login import blueprint as login_blueprint
from thupoll.blueprints.themes import blueprint as themes_blueprint
from thupoll.blueprints.namespaces import blueprint as namespaces_blueprint
from thupoll.blueprints.polls import blueprint as polls_blueprint
from thupoll.blueprints.me import blueprint as me_blueprint
from thupoll.blueprints.telegram.blueprint import telegram_blueprint
from thupoll.settings import env
def routify(app):
app.register_blueprint(home_blueprint)
app.register_blueprint(themes_blueprint, url_prefix='/themes')
app.register_blueprint(polls_blueprint, url_prefix='/polls')
app.register_blueprint(namespaces_blueprint, url_prefix='/namespaces')
app.register_blueprint(login_blueprint, url_prefix='/login')
app.register_blueprint(telegram_blueprint, url_prefix=env.telegram_url_pfx)
app.register_blueprint(me_blueprint, url_prefix='/me')
| 51.368421
| 79
| 0.839139
| 126
| 976
| 6.269841
| 0.174603
| 0.111392
| 0.186076
| 0.227848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091189
| 976
| 18
| 80
| 54.222222
| 0.890643
| 0
| 0
| 0
| 0
| 0
| 0.033811
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0.5
| 0
| 0.5625
| 0.875
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 5
|
1fa52d88d412740bb0c021f5bdca8eedc552f978
| 97
|
py
|
Python
|
Scripts/wheel-script.py
|
T3chy/WordAnalysis
|
5ac2f2409a4be24b74468be0c907946f6276af26
|
[
"PSF-2.0"
] | null | null | null |
Scripts/wheel-script.py
|
T3chy/WordAnalysis
|
5ac2f2409a4be24b74468be0c907946f6276af26
|
[
"PSF-2.0"
] | null | null | null |
Scripts/wheel-script.py
|
T3chy/WordAnalysis
|
5ac2f2409a4be24b74468be0c907946f6276af26
|
[
"PSF-2.0"
] | null | null | null |
if __name__ == '__main__':
import sys
import wheel.tool
sys.exit(wheel.tool.main())
| 16.166667
| 31
| 0.639175
| 13
| 97
| 4.153846
| 0.615385
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.226804
| 97
| 5
| 32
| 19.4
| 0.72
| 0
| 0
| 0
| 0
| 0
| 0.082474
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1fad44bd67a7e6b2dc275c2fe745369e404412a8
| 316
|
py
|
Python
|
chemex/plotters/__init__.py
|
gbouvignies/chemex
|
b021650928b6db930281957222529bc6bcab8aa2
|
[
"BSD-3-Clause"
] | 11
|
2015-03-16T16:45:42.000Z
|
2018-09-17T08:43:58.000Z
|
chemex/plotters/__init__.py
|
gbouvignies/chemex
|
b021650928b6db930281957222529bc6bcab8aa2
|
[
"BSD-3-Clause"
] | 33
|
2015-01-12T16:46:48.000Z
|
2018-03-29T15:01:15.000Z
|
chemex/plotters/__init__.py
|
gbouvignies/chemex
|
b021650928b6db930281957222529bc6bcab8aa2
|
[
"BSD-3-Clause"
] | 8
|
2015-01-15T21:53:16.000Z
|
2018-01-04T15:33:52.000Z
|
from chemex.plotters.cest import CestPlotter as CestPlotter
from chemex.plotters.cpmg import CpmgPlotter as CpmgPlotter
from chemex.plotters.plotter import Plotter as Plotter
from chemex.plotters.relaxation import RelaxationPlotter as RelaxationPlotter
from chemex.plotters.shift import ShiftPlotter as ShiftPlotter
| 52.666667
| 77
| 0.873418
| 40
| 316
| 6.9
| 0.35
| 0.181159
| 0.326087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094937
| 316
| 5
| 78
| 63.2
| 0.965035
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1fcbbdcd07b55822560b1278f213200e37c1cc70
| 133
|
py
|
Python
|
bot.py
|
jhonruda25/VentasCuentas-Telegram-Bot
|
cce24fce046d3b6a012b22697d87b246e668131f
|
[
"MIT"
] | null | null | null |
bot.py
|
jhonruda25/VentasCuentas-Telegram-Bot
|
cce24fce046d3b6a012b22697d87b246e668131f
|
[
"MIT"
] | null | null | null |
bot.py
|
jhonruda25/VentasCuentas-Telegram-Bot
|
cce24fce046d3b6a012b22697d87b246e668131f
|
[
"MIT"
] | null | null | null |
from telegram.ext import Updater, CommandHandler
def start(update, context):
update.message.reply_text('Hola, humano!')
| 14.777778
| 48
| 0.721805
| 16
| 133
| 5.9375
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172932
| 133
| 8
| 49
| 16.625
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0.097744
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1fee618415dfddc625741dbb28b9c865f7f85b09
| 239
|
py
|
Python
|
dataset/dataset_utils.py
|
CrossRef/reference-matching-evaluation
|
ed347f26a52d809db0884a548d581840e283d0e8
|
[
"MIT"
] | 14
|
2018-12-27T16:51:29.000Z
|
2021-11-28T20:59:17.000Z
|
dataset/dataset_utils.py
|
CrossRef/reference-matching-evaluation
|
ed347f26a52d809db0884a548d581840e283d0e8
|
[
"MIT"
] | 1
|
2019-04-10T22:26:01.000Z
|
2019-06-01T11:41:13.000Z
|
dataset/dataset_utils.py
|
CrossRef/reference-matching-evaluation
|
ed347f26a52d809db0884a548d581840e283d0e8
|
[
"MIT"
] | 1
|
2020-01-02T08:11:22.000Z
|
2020-01-02T08:11:22.000Z
|
import utils.data_format_keys as dfk
def get_target_gt_doi(item):
return item.get(dfk.DATASET_TARGET_GT, {}).get(dfk.CR_ITEM_DOI)
def get_target_test_doi(item):
return item.get(dfk.DATASET_TARGET_TEST, {}).get(dfk.CR_ITEM_DOI)
| 23.9
| 69
| 0.769874
| 43
| 239
| 3.906977
| 0.395349
| 0.142857
| 0.142857
| 0.202381
| 0.607143
| 0.428571
| 0.428571
| 0.428571
| 0
| 0
| 0
| 0
| 0.108787
| 239
| 9
| 70
| 26.555556
| 0.788732
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
1ffd5d157dc595c86121033df8bd227467261508
| 23
|
py
|
Python
|
backend/book/models/__init__.py
|
Mackrage/worm_rage_bot
|
7211c0c89ad12714eccdc98c6a84e7309108aabc
|
[
"MIT"
] | 216
|
2016-02-20T12:46:43.000Z
|
2022-02-23T07:07:00.000Z
|
models/medium/book/__init__.py
|
billvsme/tvCrawlers
|
e19111cc48d0a2a44c5245b0ddc9fad0c7a1824d
|
[
"MIT"
] | 3
|
2016-05-06T05:04:17.000Z
|
2021-12-13T19:41:39.000Z
|
models/medium/book/__init__.py
|
billvsme/tvCrawlers
|
e19111cc48d0a2a44c5245b0ddc9fad0c7a1824d
|
[
"MIT"
] | 99
|
2016-02-20T08:34:00.000Z
|
2022-02-10T20:52:01.000Z
|
from .book import Book
| 11.5
| 22
| 0.782609
| 4
| 23
| 4.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
9500d02e4e6adf12fb743fb62f4c8fa70d1c9f84
| 88
|
py
|
Python
|
Layers/__init__.py
|
mikuh/DRL
|
4de2e98efa91293a48256a183cb3e399d8e70b9a
|
[
"MIT"
] | null | null | null |
Layers/__init__.py
|
mikuh/DRL
|
4de2e98efa91293a48256a183cb3e399d8e70b9a
|
[
"MIT"
] | null | null | null |
Layers/__init__.py
|
mikuh/DRL
|
4de2e98efa91293a48256a183cb3e399d8e70b9a
|
[
"MIT"
] | null | null | null |
from Layers.layers import DenseEmbeddingNet, QNet, CNNEmbeddingNet, PolicyNet, ValueNet
| 44
| 87
| 0.852273
| 9
| 88
| 8.333333
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 88
| 1
| 88
| 88
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
952a6c0b89e90b4182fafd710172bb724485be65
| 34
|
py
|
Python
|
python/testData/psi/FStringIncompleteNamedUnicodePrecedingFragment.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/psi/FStringIncompleteNamedUnicodePrecedingFragment.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/psi/FStringIncompleteNamedUnicodePrecedingFragment.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
s = f'\N{LATIN SMALL LETTER A{42}'
| 34
| 34
| 0.647059
| 8
| 34
| 2.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 0.147059
| 34
| 1
| 34
| 34
| 0.689655
| 0
| 0
| 0
| 0
| 0
| 0.771429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1f0836916ec984b3f560d590641ecc70653194ef
| 180
|
py
|
Python
|
pattoo_agents/snmp/constants.py
|
palisadoes/pattoo-agents
|
d73453ceac1747573dfbcad4da724325e86b208d
|
[
"Apache-2.0"
] | null | null | null |
pattoo_agents/snmp/constants.py
|
palisadoes/pattoo-agents
|
d73453ceac1747573dfbcad4da724325e86b208d
|
[
"Apache-2.0"
] | null | null | null |
pattoo_agents/snmp/constants.py
|
palisadoes/pattoo-agents
|
d73453ceac1747573dfbcad4da724325e86b208d
|
[
"Apache-2.0"
] | null | null | null |
"""Module that defines constants shared between agents."""
# pattoo-snmp constants
PATTOO_AGENT_SNMPD = 'pattoo_agent_snmpd'
PATTOO_AGENT_SNMP_IFMIBD = 'pattoo_agent_snmp_ifmibd'
| 30
| 58
| 0.822222
| 24
| 180
| 5.75
| 0.5
| 0.318841
| 0.231884
| 0.318841
| 0.311594
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094444
| 180
| 5
| 59
| 36
| 0.846626
| 0.416667
| 0
| 0
| 0
| 0
| 0.424242
| 0.242424
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1f406f6d7d2a526cbf69bea19db6f2ddd4a0759f
| 421
|
py
|
Python
|
examples/prisoners_dilemma/utils.py
|
maxholloway/agent-exchange
|
13eab47b2aa709f416fbd3866d08cdfc876fbb1a
|
[
"MIT"
] | 1
|
2021-08-20T10:12:16.000Z
|
2021-08-20T10:12:16.000Z
|
examples/prisoners_dilemma/utils.py
|
maxholloway/agent-exchange
|
13eab47b2aa709f416fbd3866d08cdfc876fbb1a
|
[
"MIT"
] | 4
|
2020-10-25T05:28:40.000Z
|
2020-10-30T18:33:58.000Z
|
examples/prisoners_dilemma/utils.py
|
maxholloway/agent-exchange
|
13eab47b2aa709f416fbd3866d08cdfc876fbb1a
|
[
"MIT"
] | 1
|
2021-08-20T10:13:01.000Z
|
2021-08-20T10:13:01.000Z
|
class BufferList:
def __init__(self, maxlen):
self.data = []
self.maxlen = maxlen
def append(self, el):
if len(self.data) == self.maxlen:
popped = self.data.pop(0)
else:
popped = None
self.data.append(el)
return popped
def peek(self, i):
return self.data[-i-1]
def __len__(self):
return len(self.data)
| 22.157895
| 41
| 0.515439
| 51
| 421
| 4.098039
| 0.392157
| 0.229665
| 0.114833
| 0.172249
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007547
| 0.370546
| 421
| 19
| 42
| 22.157895
| 0.781132
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.266667
| false
| 0
| 0
| 0.133333
| 0.533333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
1f83ca730e7b5a599e853b43f7287f708d0a6726
| 10,043
|
py
|
Python
|
jaxkern/dependence.py
|
J-Garcke-SCAI/jaxkern
|
9de7ebf52fe2d186d316350a6692b2ecc0885adc
|
[
"MIT"
] | 7
|
2020-09-28T07:39:16.000Z
|
2022-03-11T14:09:41.000Z
|
jaxkern/dependence.py
|
J-Garcke-SCAI/jaxkern
|
9de7ebf52fe2d186d316350a6692b2ecc0885adc
|
[
"MIT"
] | 5
|
2020-09-25T01:25:57.000Z
|
2020-10-09T16:15:49.000Z
|
jaxkern/dependence.py
|
J-Garcke-SCAI/jaxkern
|
9de7ebf52fe2d186d316350a6692b2ecc0885adc
|
[
"MIT"
] | 2
|
2021-05-25T21:59:58.000Z
|
2022-01-11T07:23:32.000Z
|
from jaxkern.dist import sqeuclidean_distance
from typing import Callable, Dict
import jax
import jax.numpy as np
from jaxkern.kernels import gram, covariance_matrix
from jaxkern.utils import centering
jax_np = jax.numpy.ndarray
def hsic(
X: np.ndarray,
Y: np.ndarray,
kernel: Callable,
params_x: Dict[str, float],
params_y: Dict[str, float],
bias: bool = False,
) -> float:
"""Normalized HSIC (Tangent Kernel Alignment)
A normalized variant of HSIC method which divides by
the HS-Norm of each dataset.
Parameters
----------
X : jax.numpy.ndarray
the input value for one dataset
Y : jax.numpy.ndarray
the input value for the second dataset
kernel : Callable
the kernel function to be used for each of the kernel
calculations
params_x : Dict[str, float]
a dictionary of parameters to be used for calculating the
kernel function for X
params_y : Dict[str, float]
a dictionary of parameters to be used for calculating the
kernel function for Y
Returns
-------
cka_value : float
the normalized hsic value.
Notes
-----
This is a metric that is similar to the correlation, [0,1]
"""
# kernel matrix
Kx = covariance_matrix(kernel, params_x, X, X)
Ky = covariance_matrix(kernel, params_y, Y, Y)
Kx = centering(Kx)
Ky = centering(Ky)
hsic_value = np.sum(Kx * Ky)
if bias:
bias = 1 / (Kx.shape[0] ** 2)
else:
bias = 1 / (Kx.shape[0] - 1) ** 2
return bias * hsic_value
def nhsic_cka(
X: np.ndarray,
Y: np.ndarray,
kernel: Callable,
params_x: Dict[str, float],
params_y: Dict[str, float],
) -> float:
"""Normalized HSIC (Tangent Kernel Alignment)
A normalized variant of HSIC method which divides by
the HS-Norm of each dataset.
Parameters
----------
X : jax.numpy.ndarray
the input value for one dataset
Y : jax.numpy.ndarray
the input value for the second dataset
kernel : Callable
the kernel function to be used for each of the kernel
calculations
params_x : Dict[str, float]
a dictionary of parameters to be used for calculating the
kernel function for X
params_y : Dict[str, float]
a dictionary of parameters to be used for calculating the
kernel function for Y
Returns
-------
cka_value : float
the normalized hsic value.
Notes
-----
This is a metric that is similar to the correlation, [0,1]
References
----------
"""
# calculate hsic normally (numerator)
# Pxy = hsic(X, Y, kernel, params_x, params_y)
# # calculate denominator (normalize)
# Px = np.sqrt(hsic(X, X, kernel, params_x, params_x))
# Py = np.sqrt(hsic(Y, Y, kernel, params_y, params_y))
# # print(Pxy, Px, Py)
# # kernel tangent alignment value (normalized hsic)
# cka_value = Pxy / (Px * Py)
Kx = covariance_matrix(kernel, params_x, X, X)
Ky = covariance_matrix(kernel, params_y, Y, Y)
Kx = centering(Kx)
Ky = centering(Ky)
cka_value = np.sum(Kx * Ky) / np.linalg.norm(Kx) / np.linalg.norm(Ky)
return cka_value
def nhsic_nbs(
X: np.ndarray,
Y: np.ndarray,
kernel: Callable,
params_x: Dict[str, float],
params_y: Dict[str, float],
) -> float:
"""Normalized Bures Similarity (NBS)
A normalized variant of HSIC method which divides by
the HS-Norm of the eigenvalues of each dataset.
..math::
\\rho(K_x, K_y) = \\
\\text{Tr} ( K_x^{1/2} K_y K_x^{1/2)})^{1/2} \\
\ \\text{Tr} (K_x) \\text{Tr} (K_y)
Parameters
----------
X : jax.numpy.ndarray
the input value for one dataset
Y : jax.numpy.ndarray
the input value for the second dataset
kernel : Callable
the kernel function to be used for each of the kernel
calculations
params_x : Dict[str, float]
a dictionary of parameters to be used for calculating the
kernel function for X
params_y : Dict[str, float]
a dictionary of parameters to be used for calculating the
kernel function for Y
Returns
-------
cka_value : float
the normalized hsic value.
Notes
-----
This is a metric that is similar to the correlation, [0,1]
References
----------
@article{JMLR:v18:16-296,
author = {Austin J. Brockmeier and Tingting Mu and Sophia Ananiadou and John Y. Goulermas},
title = {Quantifying the Informativeness of Similarity Measurements},
journal = {Journal of Machine Learning Research},
year = {2017},
volume = {18},
number = {76},
pages = {1-61},
url = {http://jmlr.org/papers/v18/16-296.html}
}
"""
# calculate hsic normally (numerator)
# Pxy = hsic(X, Y, kernel, params_x, params_y)
# # calculate denominator (normalize)
# Px = np.sqrt(hsic(X, X, kernel, params_x, params_x))
# Py = np.sqrt(hsic(Y, Y, kernel, params_y, params_y))
# # print(Pxy, Px, Py)
# # kernel tangent alignment value (normalized hsic)
# cka_value = Pxy / (Px * Py)
Kx = covariance_matrix(kernel, params_x, X, X)
Ky = covariance_matrix(kernel, params_y, Y, Y)
Kx = centering(Kx)
Ky = centering(Ky)
# numerator
numerator = np.real(np.linalg.eigvals(np.dot(Kx, Ky)))
# clip rogue numbers
numerator = np.sqrt(np.clip(numerator, 0.0))
numerator = np.sum(numerator)
# denominator
denominator = np.sqrt(np.trace(Kx) * np.trace(Ky))
# return nbs value
return numerator / denominator
def nhsic_ka(
X: np.ndarray,
Y: np.ndarray,
kernel: Callable,
params_x: Dict[str, float],
params_y: Dict[str, float],
) -> float:
Kx = covariance_matrix(kernel, params_x, X, X)
Ky = covariance_matrix(kernel, params_y, Y, Y)
cka_value = np.sum(Kx * Ky) / np.linalg.norm(Kx) / np.linalg.norm(Ky)
return cka_value
def nhsic_cca(
X: np.ndarray,
Y: np.ndarray,
kernel: Callable,
params_x: Dict[str, float],
params_y: Dict[str, float],
epsilon: float = 1e-5,
bias: bool = False,
) -> float:
"""Normalized HSIC (Tangent Kernel Alignment)
A normalized variant of HSIC method which divides by
the HS-Norm of each dataset.
Parameters
----------
X : jax.numpy.ndarray
the input value for one dataset
Y : jax.numpy.ndarray
the input value for the second dataset
kernel : Callable
the kernel function to be used for each of the kernel
calculations
params_x : Dict[str, float]
a dictionary of parameters to be used for calculating the
kernel function for X
params_y : Dict[str, float]
a dictionary of parameters to be used for calculating the
kernel function for Y
Returns
-------
cka_value : float
the normalized hsic value.
Notes
-----
This is a metric that is similar to the correlation, [0,1]
"""
n_samples = X.shape[0]
# kernel matrix
Kx = gram(kernel, params_x, X, X)
Ky = gram(kernel, params_y, Y, Y)
# center kernel matrices
Kx = centering(Kx)
Ky = centering(Ky)
K_id = np.eye(Kx.shape[0])
Kx_inv = np.linalg.inv(Kx + epsilon * n_samples * K_id)
Ky_inv = np.linalg.inv(Ky + epsilon * n_samples * K_id)
Rx = np.dot(Kx, Kx_inv)
Ry = np.dot(Ky, Ky_inv)
hsic_value = np.sum(Rx * Ry)
if bias:
bias = 1 / (Kx.shape[0] ** 2)
else:
bias = 1 / (Kx.shape[0] - 1) ** 2
return bias * hsic_value
def _hsic_uncentered(
X: np.ndarray,
Y: np.ndarray,
kernel: Callable,
params_x: Dict[str, float],
params_y: Dict[str, float],
) -> float:
"""A method to calculate the uncentered HSIC version"""
# kernel matrix
Kx = gram(kernel, params_x, X, X)
Ky = gram(kernel, params_y, Y, Y)
#
K = np.dot(Kx, Ky.T)
hsic_value = np.mean(K)
return hsic_value
def mmd_mi(
X: np.ndarray,
Y: np.ndarray,
kernel: Callable,
params_x: Dict[str, float],
params_y: Dict[str, float],
) -> float:
"""Maximum Mean Discrepancy
Parameters
----------
X : jax.numpy.ndarray
array-like of shape (n_samples, n_features)
Y : np.ndarray
The data matrix.
Notes
-----
This method is equivalent to the HSIC method.
"""
# calculate kernel matrices
Kx = gram(kernel, params_x, X, X)
Ky = gram(kernel, params_y, Y, Y)
# center kernel matrices
Kx = centering(Kx)
Ky = centering(Ky)
# get the expectrations
A = np.mean(Kx * Ky)
B = np.mean(np.mean(Kx, axis=0) * np.mean(Ky, axis=0))
C = np.mean(Kx) * np.mean(Ky)
# calculate the mmd value
mmd_value = A - 2 * B + C
return mmd_value
def mmd(
X: np.ndarray,
Y: np.ndarray,
kernel: Callable,
params_x: Dict[str, float],
params_y: Dict[str, float],
params_xy: Dict[str, float],
bias: bool = False,
center: bool = False,
) -> float:
"""Maximum Mean Discrepancy
Parameters
----------
X : jax.numpy.ndarray
array-like of shape (n_samples, n_features)
Y : np.ndarray
The data matrix.
Notes
-----
This method is equivalent to the HSIC method.
"""
n_samples, m_samples = X.shape[0], Y.shape[0]
# constants
a00 = 1.0 / (n_samples * (n_samples - 1.0))
a11 = 1.0 / (m_samples * (m_samples - 1.0))
a01 = -1.0 / (n_samples * m_samples)
# kernel matrices
Kx = gram(kernel, params_x, X, X)
Ky = gram(kernel, params_y, Y, Y)
Kxy = gram(kernel, params_xy, X, Y)
if bias:
mmd = np.mean(Kx) + np.mean(Ky) - 2 * np.mean(Kxy)
return np.where(mmd >= 0.0, np.sqrt(mmd), 0.0)
else:
return (
2 * a01 * np.mean(Kxy)
+ a00 * (np.sum(Kx) - n_samples)
+ a11 * (np.sum(Ky) - m_samples)
)
| 23.742317
| 96
| 0.600617
| 1,430
| 10,043
| 4.133566
| 0.128671
| 0.03079
| 0.050753
| 0.028422
| 0.762984
| 0.754525
| 0.744882
| 0.744882
| 0.744882
| 0.744882
| 0
| 0.012808
| 0.284775
| 10,043
| 422
| 97
| 23.798578
| 0.810107
| 0.486508
| 0
| 0.645833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.041667
| 0
| 0.159722
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2f7be556d6c914fa516a53d7f444682d89227529
| 177
|
py
|
Python
|
mcq/core/tests/test_general.py
|
boyombo/django-mcq
|
8c7169d6f479ef3ef6a042b8b3261c66516cb032
|
[
"MIT"
] | null | null | null |
mcq/core/tests/test_general.py
|
boyombo/django-mcq
|
8c7169d6f479ef3ef6a042b8b3261c66516cb032
|
[
"MIT"
] | null | null | null |
mcq/core/tests/test_general.py
|
boyombo/django-mcq
|
8c7169d6f479ef3ef6a042b8b3261c66516cb032
|
[
"MIT"
] | 1
|
2021-04-30T14:25:13.000Z
|
2021-04-30T14:25:13.000Z
|
def test_home(client):
response = client.get('/')
#import pdb;pdb.set_trace()
assert response.status_code == 200
assert response.template_name == ['home.html']
| 25.285714
| 50
| 0.672316
| 23
| 177
| 5
| 0.73913
| 0.243478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02069
| 0.180791
| 177
| 6
| 51
| 29.5
| 0.772414
| 0.146893
| 0
| 0
| 0
| 0
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
85ece56037bb69964dce7495b271980079552acd
| 45,498
|
py
|
Python
|
Climate_Shocks/note_worthy_events/inital_event_recurance.py
|
Komanawa-Solutions-Ltd/SLMACC-2020-CSRA
|
914b6912c5f5b522107aa9406fb3d823e61c2ebe
|
[
"Apache-2.0"
] | null | null | null |
Climate_Shocks/note_worthy_events/inital_event_recurance.py
|
Komanawa-Solutions-Ltd/SLMACC-2020-CSRA
|
914b6912c5f5b522107aa9406fb3d823e61c2ebe
|
[
"Apache-2.0"
] | null | null | null |
Climate_Shocks/note_worthy_events/inital_event_recurance.py
|
Komanawa-Solutions-Ltd/SLMACC-2020-CSRA
|
914b6912c5f5b522107aa9406fb3d823e61c2ebe
|
[
"Apache-2.0"
] | null | null | null |
"""
Author: Matt Hanson
Created: 3/11/2020 9:04 AM
"""
from Climate_Shocks.vcsn_pull import vcsn_pull_single_site
from Climate_Shocks.note_worthy_events.simple_soil_moisture_pet import calc_sma_smd_historical, calc_smd_monthly
from Climate_Shocks.get_past_record import get_restriction_record, get_vcsn_record
from Pasture_Growth_Modelling.initialisation_support.pasture_growth_deficit import calc_past_pasture_growth_anomaly
import ksl_env
import numpy as np
import pandas as pd
import os
import matplotlib.pyplot as plt
import itertools
import sys
event_def_dir = sys.argv[1] # the path to the directory
print(event_def_dir)
vcsn_version = sys.argv[2] # 'trended', 'detrended2'
print(vcsn_version)
if vcsn_version not in ['trended', 'detrended2']:
raise ValueError('incorrect value for vcsn_version: {}'.format(vcsn_version, ))
if not os.path.exists(event_def_dir):
os.makedirs(event_def_dir)
irrigated_pga = calc_past_pasture_growth_anomaly('irrigated', site='eyrewell').reset_index()
irrigated_pga.loc[:, 'year'] = irrigated_pga.date.dt.year
irrigated_pga = irrigated_pga.set_index(['month', 'year'])
dryland_pga = calc_past_pasture_growth_anomaly('dryland').reset_index()
dryland_pga.loc[:, 'year'] = dryland_pga.date.dt.year
dryland_pga = dryland_pga.set_index(['month', 'year'])
def prob(x):
out = np.nansum(x) / len(x)
return out
def add_pga_from_idx(idx):
idx = idx.dropna()
irr_temp = irrigated_pga.loc[idx].reset_index()
irr_temp2 = irr_temp.loc[:, ['month', 'pga_norm']].groupby('month').describe().loc[:, 'pga_norm']
dry_temp = dryland_pga.loc[idx].reset_index()
dry_temp2 = dry_temp.loc[:, ['month', 'pga_norm']].groupby('month').describe().loc[:, 'pga_norm']
temp3 = pd.merge(irr_temp2, dry_temp2, left_index=True, right_index=True, suffixes=('_irr', '_dry'))
return pd.DataFrame(temp3)
def add_pga(grouped_data, sim_keys, outdata):
grouped_data = grouped_data.set_index(['month', 'year'])
years = {}
for k in sim_keys:
idx = grouped_data.loc[grouped_data.loc[:, k], k]
assert idx.all()
idx = idx.index
years[k] = idx.values
temp_irr = irrigated_pga.loc[idx].reset_index()
temp_irr2 = temp_irr.loc[:, ['month', 'pga_norm']].groupby('month').describe().loc[:, 'pga_norm']
temp_dry = dryland_pga.loc[idx].reset_index()
temp_dry2 = temp_dry.loc[:, ['month', 'pga_norm']].groupby('month').describe().loc[:, 'pga_norm']
for k2 in temp_irr2:
outdata.loc[:, (k, 'pga_irr_{}'.format(k2))] = temp_irr2.loc[:, k2]
outdata.loc[:, (k, 'pga_dry_{}'.format(k2))] = temp_dry2.loc[:, k2]
mx_years = 48 * 12 + 1
out_years = pd.DataFrame(index=range(mx_years), columns=sim_keys)
for k in sim_keys:
missing_len = mx_years - len(years[k])
out_years.loc[:, k] = np.concatenate((years[k], np.zeros(missing_len) * np.nan))
outdata = outdata.sort_index(axis=1, level=0, sort_remaining=False)
return outdata, out_years
def calc_dry_recurance_monthly_smd():
data = get_vcsn_record(vcsn_version)
t = calc_smd_monthly(rain=data.rain, pet=data.pet, dates=data.index)
data.loc[:, 'smd'] = t
t = data.loc[:, ['doy', 'smd']].groupby('doy').mean().to_dict()
data.loc[:, 'sma'] = data.loc[:, 'smd'] - data.loc[:, 'doy'].replace(t['smd'])
data.reset_index(inplace=True)
data.to_csv(os.path.join(event_def_dir, 'monthly_smd_dry_raw.csv'))
smd_thresholds = [0]
sma_thresholds = [-5, -10, -12, -15, -17, -20]
ndays = [5, 7, 10, 14]
out_keys = []
for smd_t, sma_t in itertools.product(smd_thresholds, sma_thresholds):
k = 'd_smd{:03d}_sma{:02d}'.format(smd_t, sma_t)
data.loc[:, k] = (data.loc[:, 'smd'] <= smd_t) & (data.loc[:, 'sma'] <= sma_t)
out_keys.append(k)
grouped_data = data.loc[:, ['month', 'year',
'smd', 'sma'] + out_keys].groupby(['month', 'year']).sum().reset_index()
grouped_data.to_csv(os.path.join(event_def_dir, 'monthly_smd_dry_monthly_data.csv'))
grouped_data.drop(columns=['year']).groupby('month').describe().to_csv(os.path.join(event_def_dir,
'monthly_smd_dry_monthly_data_desc.csv'))
out_keys2 = []
for nd in ndays:
for k in out_keys:
ok = '{:02d}d_{}'.format(nd, k)
out_keys2.append(ok)
grouped_data.loc[:, ok] = grouped_data.loc[:, k] >= nd
out = grouped_data.loc[:, ['month'] + out_keys2].groupby(['month']).aggregate(['sum', prob])
drop_keys = []
for k in out_keys2:
temp = (out.loc[:, k].loc[:, 'sum'] == 48).all() or (out.loc[:, k].loc[:, 'sum'] == 0).all()
if temp:
drop_keys.append(k)
out = out.drop(columns=drop_keys)
out, out_years = add_pga(grouped_data, set(out_keys2) - set(drop_keys), out)
t = pd.Series([' '.join(e) for e in out.columns])
idx = ~((t.str.contains('sum')) | (t.str.contains('count')))
out.loc[:, out.columns[idx]] *= 100
out.to_csv(os.path.join(event_def_dir, 'monthly_smd_dry_prob.csv'), float_format='%.1f%%')
out.loc[:, out.columns[idx]].to_csv(os.path.join(event_def_dir, 'monthly_smd_dry_prob_only_prob.csv'),
float_format='%.1f%%')
out_years.to_csv(os.path.join(event_def_dir, 'monthly_smd_dry_years.csv'))
def calc_dry_recurance():
data = get_vcsn_record(vcsn_version).reset_index()
temp = calc_sma_smd_historical(data['rain'], data['pet'], data.date, 150, 1)
trans_cols = ['mean_doy_smd', 'sma', 'smd', 'drain', 'aet_out']
data.loc[:, trans_cols] = temp.loc[:, trans_cols]
data.to_csv(os.path.join(event_def_dir, 'dry_raw.csv'))
smd_thresholds = [0, -110, -110]
sma_thresholds = [-20, 0, -20]
ndays = [5, 7, 10, 14]
out_keys = []
for smd_t, sma_t in zip(smd_thresholds, sma_thresholds):
k = 'd_smd{:03d}_sma{:02d}'.format(smd_t, sma_t)
data.loc[:, k] = (data.loc[:, 'smd'] <= smd_t) & (data.loc[:, 'sma'] <= sma_t)
out_keys.append(k)
grouped_data = data.loc[:, ['month', 'year',
'smd', 'sma'] + out_keys].groupby(['month', 'year']).sum().reset_index()
grouped_data.to_csv(os.path.join(event_def_dir, 'dry_monthly_data.csv'))
grouped_data.drop(columns=['year']).groupby('month').describe().to_csv(os.path.join(event_def_dir,
'dry_monthly_data_desc.csv'))
out_keys2 = []
for nd in ndays:
for k in out_keys:
ok = '{:02d}d_{}'.format(nd, k)
out_keys2.append(ok)
grouped_data.loc[:, ok] = grouped_data.loc[:, k] >= nd
out = grouped_data.loc[:, ['month'] + out_keys2].groupby(['month']).aggregate(['sum', prob])
drop_keys = []
for k in out_keys2:
temp = (out.loc[:, k].loc[:, 'sum'] == 48).all() or (out.loc[:, k].loc[:, 'sum'] == 0).all()
if temp:
drop_keys.append(k)
out = out.drop(columns=drop_keys)
out, out_years = add_pga(grouped_data, set(out_keys2) - set(drop_keys), out)
t = pd.Series([' '.join(e) for e in out.columns])
idx = ~((t.str.contains('sum')) | (t.str.contains('count')))
out.loc[:, out.columns[idx]] *= 100
out.to_csv(os.path.join(event_def_dir, 'dry_prob.csv'), float_format='%.1f%%')
out.loc[:, out.columns[idx]].to_csv(os.path.join(event_def_dir, 'dry_prob_only_prob.csv'), float_format='%.1f%%')
out_years.to_csv(os.path.join(event_def_dir, 'dry_years.csv'))
def calc_wet_recurance():
data = get_vcsn_record(vcsn_version).reset_index()
temp = calc_sma_smd_historical(data['rain'], data['pet'], data.date, 150, 1)
trans_cols = ['mean_doy_smd', 'sma', 'smd', 'drain', 'aet_out']
data.loc[:, trans_cols] = temp.loc[:, trans_cols]
temp = False
if temp: # just to look at some plots
fig, (ax, ax2, ax3) = plt.subplots(3, sharex=True)
ax.plot(data.date, data.smd)
ax2.plot(data.date, data.drain)
ax3.plot(data.date, data.rain)
plt.show()
data.to_csv(os.path.join(event_def_dir, 'smd_wet_raw.csv'))
thresholds_rain = [5, 3, 1, 0]
thresholds_smd = [0, -5, -10]
ndays = [7, 10, 14]
out_keys = []
for t_r, t_smd in itertools.product(thresholds_rain, thresholds_smd):
k = 'd_r{}_smd{}'.format(t_r, t_smd)
data.loc[:, k] = (data.loc[:, 'rain'] >= t_r) & (data.loc[:, 'smd'] >= t_smd)
out_keys.append(k)
grouped_data = data.loc[:, ['month', 'year', 'rain'] + out_keys].groupby(['month', 'year']).sum().reset_index()
# make montly restriction anaomaloy - mean
temp = grouped_data.groupby('month').mean().loc[:, 'rain'].to_dict()
grouped_data.loc[:, 'rain_an_mean'] = grouped_data.loc[:, 'month']
grouped_data = grouped_data.replace({'rain_an_mean': temp})
grouped_data.loc[:, 'rain_an_mean'] = grouped_data.loc[:, 'rain'] - grouped_data.loc[:, 'rain_an_mean']
# make montly restriction anaomaloy - median
temp = grouped_data.groupby('month').median().loc[:, 'rain'].to_dict()
grouped_data.loc[:, 'rain_an_med'] = grouped_data.loc[:, 'month']
grouped_data = grouped_data.replace({'rain_an_med': temp})
grouped_data.loc[:, 'rain_an_med'] = grouped_data.loc[:, 'rain'] - grouped_data.loc[:, 'rain_an_med']
grouped_data.to_csv(os.path.join(event_def_dir, 'smd_wet_monthly_data.csv'))
grouped_data.drop(columns=['year']).groupby('month').describe().to_csv(os.path.join(event_def_dir,
'smd_wet_monthly_data_desc.csv'))
# number of n days
out_keys2 = []
for nd in ndays:
for k in out_keys:
ok = '{:02d}d_{}'.format(nd, k)
out_keys2.append(ok)
grouped_data.loc[:, ok] = grouped_data.loc[:, k] >= nd
out = grouped_data.loc[:, ['month'] + out_keys2].groupby(['month']).aggregate(['sum', prob])
drop_keys = []
for k in out_keys2:
temp = (out.loc[:, k].loc[:, 'sum'] == 48).all() or (out.loc[:, k].loc[:, 'sum'] == 0).all()
if temp:
drop_keys.append(k)
out = out.drop(columns=drop_keys)
out, out_years = add_pga(grouped_data, set(out_keys2) - set(drop_keys), out)
t = pd.Series([' '.join(e) for e in out.columns])
idx = ~((t.str.contains('sum')) | (t.str.contains('count')))
out.loc[:, out.columns[idx]] *= 100
out.to_csv(os.path.join(event_def_dir, 'smd_wet_prob.csv'), float_format='%.1f%%')
out.loc[:, out.columns[idx]].to_csv(os.path.join(event_def_dir, 'smd_wet_prob_only_prob.csv'),
float_format='%.1f%%')
out_years.to_csv(os.path.join(event_def_dir, 'smd_wet_years.csv'))
def calc_wet_recurance_ndays():
ndays = {
'org': { # this is the best value!
5: 14,
6: 11,
7: 11,
8: 13,
9: 13,
}
}
for v in ndays.values():
v.update({
1: 99,
2: 99,
3: 99,
4: 99,
10: 99,
11: 99,
12: 99,
})
data = get_vcsn_record(vcsn_version).reset_index()
temp = calc_sma_smd_historical(data['rain'], data['pet'], data.date, 150, 1)
trans_cols = ['mean_doy_smd', 'sma', 'smd', 'drain', 'aet_out']
data.loc[:, trans_cols] = temp.loc[:, trans_cols]
data.loc[:, 'ndays_rain'] = (data.loc[:, 'rain'] > 0.01).astype(float)
data.to_csv(os.path.join(event_def_dir, 'ndays_wet_raw.csv'))
grouped_data = data.loc[:, ['month', 'year', 'rain', 'ndays_rain']].groupby(['month', 'year']).sum().reset_index()
grouped_data.to_csv(os.path.join(event_def_dir, 'ndays_wet_monthly_data.csv'))
grouped_data.drop(columns=['year']).groupby('month').describe().to_csv(os.path.join(event_def_dir,
'ndays_wet_monthly_data_desc.csv'))
# number of n days
out_keys2 = []
for k, val in ndays.items():
ok = '{}'.format(k)
out_keys2.append(ok)
grouped_data.loc[:, 'limit'] = grouped_data.loc[:, 'month']
grouped_data = grouped_data.replace({'limit': val})
grouped_data.loc[:, ok] = grouped_data.loc[:, 'ndays_rain'] >= grouped_data.loc[:, 'limit']
out = grouped_data.loc[:, ['month'] + out_keys2].groupby(['month']).aggregate(['sum', prob])
drop_keys = []
for k in out_keys2:
temp = (out.loc[:, k].loc[:, 'sum'] == 48).all() or (out.loc[:, k].loc[:, 'sum'] == 0).all()
if temp:
drop_keys.append(k)
out = out.drop(columns=drop_keys)
out, out_years = add_pga(grouped_data, set(out_keys2) - set(drop_keys), out)
t = pd.Series([' '.join(e) for e in out.columns])
idx = ~((t.str.contains('sum')) | (t.str.contains('count')))
out.loc[:, out.columns[idx]] *= 100
out.to_csv(os.path.join(event_def_dir, 'ndays_wet_prob.csv'), float_format='%.1f%%')
out.loc[:, out.columns[idx]].to_csv(os.path.join(event_def_dir, 'ndays_wet_prob_only_prob.csv'),
float_format='%.1f%%')
out_years.to_csv(os.path.join(event_def_dir, 'ndays_wet_years.csv'))
def calc_dry_rolling():
bulk_ndays = [5, 10, 15, 20]
ndays = {}
for bnd in bulk_ndays:
ndays['ndays{}'.format(bnd)] = {k: bnd for k in range(1, 13)}
thresholds = { # this did not end up getting used
'first': {
4: 15,
5: 10,
8: 5,
9: 10,
},
'first-3': {
4: 15 - 3,
5: 10 - 3,
8: 5 - 3,
9: 10 - 3,
},
'first-5': {
4: 15 - 5,
5: 10 - 5,
8: 5 - 5,
9: 10 - 5,
},
'first-10': {
4: 15 - 10,
5: 10 - 10,
8: 5 - 10,
9: 10 - 10,
},
'zero': {
4: 0,
5: 0,
8: 0,
9: 0,
},
'one': {
4: 1,
5: 1,
8: 1,
9: 1,
},
'first-7': {
4: 15 - 7,
5: 10 - 7,
8: 5 - 7,
9: 10 - 7,
},
}
for v in thresholds.values():
v.update({
1: -1,
2: -1,
3: -1,
6: -1,
7: -1,
10: -1,
11: -1,
12: -1,
})
data = get_vcsn_record(vcsn_version).reset_index()
data.loc[:, 'roll_rain_10'] = data.loc[:, 'rain'].rolling(10).sum()
out_keys = []
outdata = pd.DataFrame(
index=pd.MultiIndex.from_product([range(1, 13), range(1972, 2020)], names=['month', 'year']))
for nd, thresh in itertools.product(ndays.keys(), thresholds.keys()):
temp_data = data.copy(deep=True)
ok = '{}_{}'.format(thresh, nd)
out_keys.append(ok)
for m in range(1, 13):
idx = data.month == m
temp_data.loc[idx, ok] = temp_data.loc[idx, 'roll_rain_10'] <= thresholds[thresh][m]
temp_data.loc[idx, 'ndays'] = ndays[nd][m]
temp_data = temp_data.groupby(['month', 'year']).agg({ok: 'sum', 'ndays': 'mean'})
outdata.loc[:, ok] = temp_data.loc[:, ok] >= temp_data.loc[:, 'ndays']
outdata.to_csv(os.path.join(event_def_dir, 'rolling_dry_monthly.csv'))
outdata = outdata.reset_index()
out = outdata.loc[:, ['month'] + out_keys].groupby(['month']).aggregate(['sum', prob])
drop_keys = []
for k in out_keys:
temp = (out.loc[:, k].loc[:, 'sum'] == 48).all() or (out.loc[:, k].loc[:, 'sum'] == 0).all()
if temp:
drop_keys.append(k)
out = out.drop(columns=drop_keys)
out, out_years = add_pga(outdata, set(out_keys) - set(drop_keys), out)
t = pd.Series([' '.join(e) for e in out.columns])
idx = ~((t.str.contains('sum')) | (t.str.contains('count')))
out.loc[:, out.columns[idx]] *= 100
out.to_csv(os.path.join(event_def_dir, 'rolling_dry_prob.csv'), float_format='%.1f%%')
out.loc[:, out.columns[idx]].to_csv(os.path.join(event_def_dir, 'variable_hot_prob_only_prob.csv'),
float_format='%.1f%%')
out_years.to_csv(os.path.join(event_def_dir, 'rolling_dry_years.csv'))
return list(set(out_keys) - set(drop_keys)), out
def calc_dry_recurance_ndays():
ndays = { # happy with this value other than middle ones; this did not end up getting used
'lower_q': { # based on the sma -20 10days
1: 31, # lower quartile of normal
2: 45, # lower quartile of normal
3: 38, # lower quartile of normal
4: 46, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
5: 37, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
8: 35, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
9: 30, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
10: 53, # lower quartile of normal
11: 43, # lower quartile of normal
12: 47, # lower quartile of normal
},
'up_5': { # based on the sma -20 10days
1: 31, # lower quartile of normal
2: 45, # lower quartile of normal
3: 38, # lower quartile of normal
4: 46 + 5, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
5: 37 + 5, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
8: 35 + 5, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
9: 30 + 5, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
10: 53, # lower quartile of normal
11: 43, # lower quartile of normal
12: 47, # lower quartile of normal
},
'down_5': { # based on the sma -20 10days
1: 31, # lower quartile of normal
2: 45, # lower quartile of normal
3: 38, # lower quartile of normal
4: 46 - 5, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
5: 37 - 5, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
8: 35 - 5, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
9: 30 - 5, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
10: 53, # lower quartile of normal
11: 43, # lower quartile of normal
12: 47, # lower quartile of normal
},
'down_7': { # based on the sma -20 10days
1: 31, # lower quartile of normal
2: 45, # lower quartile of normal
3: 38, # lower quartile of normal
4: 46 - 7, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
5: 37 - 7, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
8: 35 - 7, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
9: 30 - 7, # lower quartile of normal, pair with 'hot' as pet is imporant in this month
10: 53, # lower quartile of normal
11: 43, # lower quartile of normal
12: 47, # lower quartile of normal
},
}
for v in ndays.values():
v.update({
6: -1,
7: -1,
})
data = get_vcsn_record(vcsn_version).reset_index()
temp = calc_sma_smd_historical(data['rain'], data['pet'], data.date, 150, 1)
trans_cols = ['mean_doy_smd', 'sma', 'smd', 'drain', 'aet_out']
data.loc[:, trans_cols] = temp.loc[:, trans_cols]
data.loc[:, 'ndays_rain'] = (data.loc[:, 'rain'] > 0.01).astype(float)
data.to_csv(os.path.join(event_def_dir, 'ndays_dry_raw.csv'))
grouped_data = data.loc[:, ['month', 'year', 'rain', 'ndays_rain']].groupby(['month', 'year']).sum().reset_index()
grouped_data.to_csv(os.path.join(event_def_dir, 'ndays_dry_monthly_data.csv'))
grouped_data.drop(columns=['year']).groupby('month').describe().to_csv(os.path.join(event_def_dir,
'ndays_dry_monthly_data_desc.csv'))
# number of n days
out_keys2 = []
for k, val in ndays.items():
ok = '{}'.format(k)
out_keys2.append(ok)
grouped_data.loc[:, 'limit'] = grouped_data.loc[:, 'month']
grouped_data = grouped_data.replace({'limit': val})
grouped_data.loc[:, ok] = grouped_data.loc[:, 'rain'] <= grouped_data.loc[:, 'limit']
out = grouped_data.loc[:, ['month'] + out_keys2].groupby(['month']).aggregate(['sum', prob])
drop_keys = []
for k in out_keys2:
temp = (out.loc[:, k].loc[:, 'sum'] == 48).all() or (out.loc[:, k].loc[:, 'sum'] == 0).all()
if temp:
drop_keys.append(k)
out = out.drop(columns=drop_keys)
out, out_years = add_pga(grouped_data, set(out_keys2) - set(drop_keys), out)
t = pd.Series([' '.join(e) for e in out.columns])
idx = ~((t.str.contains('sum')) | (t.str.contains('count')))
out.loc[:, out.columns[idx]] *= 100
out.to_csv(os.path.join(event_def_dir, 'ndays_dry_prob.csv'), float_format='%.1f%%')
out.loc[:, out.columns[idx]].to_csv(os.path.join(event_def_dir, 'ndays_dry_prob_only_prob.csv'),
float_format='%.1f%%')
out_years.to_csv(os.path.join(event_def_dir, 'ndays_dry_years.csv'))
def calc_hot_recurance_variable():
var_to_use = {
1: 'tmax',
2: 'tmax',
3: 'tmax',
4: 'tmean', # to use in conjunction with dry to get atual dry
5: 'tmean', # to use in conjunction with dry to get atual dry
6: 'tmax',
7: 'tmax',
8: 'tmean', # to use in conjunction with dry to get atual dry
9: 'tmean', # to use in conjunction with dry to get atual dry
10: 'tmax',
11: 'tmax',
12: 'tmax',
}
ndays = {
'5day': {
4: 5,
5: 5,
8: 5,
9: 5,
},
'7day': {
4: 7,
5: 7,
8: 7,
9: 7,
},
'10day': {
4: 10,
5: 10,
8: 10,
9: 10,
},
'15day': {
4: 15,
5: 15,
8: 15,
9: 15,
}
}
thresholds = {
'upper_q': { # based on the sma -20 10days
4: 18, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
5: 15, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
8: 13, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
9: 15, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
},
'2_less': { # based on the sma -20 10days
4: 18 - 2, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
5: 15 - 2, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
8: 13 - 2, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
9: 15 - 2, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
},
'5_less': { # based on the sma -20 10days
4: 18 - 5, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
5: 15 - 5, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
8: 13 - 5, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
9: 15 - 5, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
},
'7_less': { # based on the sma -20 10days
4: 18 - 7, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
5: 15 - 7, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
8: 13 - 7, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
9: 15 - 7, # upper quartile of normal, pair with 'hot' as pet is imporant in this month
}
}
for v in thresholds.values(): # set for actual hot events
v.update({
1: 25,
2: 25,
3: 25,
6: 25,
7: 25,
10: 25,
11: 25,
12: 25,
})
for v in ndays.values(): # set for actual hot events
v.update({
1: 7,
2: 7,
3: 7,
6: 7,
7: 7,
10: 7,
11: 7,
12: 7,
})
data = get_vcsn_record(vcsn_version).reset_index()
data.loc[:, 'tmean'] = (data.loc[:, 'tmax'] + data.loc[:, 'tmin']) / 2
out_keys = []
outdata = pd.DataFrame(index=pd.MultiIndex.from_product([range(1, 13), range(1972, 2020)], names=['month', 'year']))
for thresh, nd in itertools.product(thresholds.keys(), ndays.keys()):
temp_data = data.copy(deep=True)
ok = '{}_{}'.format(thresh, nd)
out_keys.append(ok)
for m in range(1, 13):
idx = data.month == m
temp_data.loc[idx, ok] = temp_data.loc[idx, var_to_use[m]] >= thresholds[thresh][m]
temp_data.loc[idx, 'ndays'] = ndays[nd][m]
temp_data = temp_data.groupby(['month', 'year']).agg({ok: 'sum', 'ndays': 'mean'})
outdata.loc[:, ok] = temp_data.loc[:, ok] >= temp_data.loc[:, 'ndays']
outdata.to_csv(os.path.join(event_def_dir, 'variable_hot_monthly.csv'))
outdata = outdata.reset_index()
out = outdata.loc[:, ['month'] + out_keys].groupby(['month']).aggregate(['sum', prob])
drop_keys = []
for k in out_keys:
temp = (out.loc[:, k].loc[:, 'sum'] == 48).all() or (out.loc[:, k].loc[:, 'sum'] == 0).all()
if temp:
drop_keys.append(k)
out = out.drop(columns=drop_keys)
out, out_years = add_pga(outdata, set(out_keys) - set(drop_keys), out)
t = pd.Series([' '.join(e) for e in out.columns])
idx = ~((t.str.contains('sum')) | (t.str.contains('count')))
out.loc[:, out.columns[idx]] *= 100
out.to_csv(os.path.join(event_def_dir, 'variable_hot_prob.csv'), float_format='%.1f%%')
out.loc[:, out.columns[idx]].to_csv(os.path.join(event_def_dir, 'variable_hot_prob_only_prob.csv'),
float_format='%.1f%%')
out_years.to_csv(os.path.join(event_def_dir, 'variable_hot_years.csv'))
def joint_hot_dry():
hot = pd.read_csv(os.path.join(event_def_dir, 'variable_hot_years.csv'), index_col=0)
hot_keys = list(hot.keys())
dry = pd.read_csv(os.path.join(event_def_dir, 'rolling_dry_years.csv'), index_col=0)
dry_keys = list(dry.keys())
data = pd.merge(hot, dry, left_index=True, right_index=True)
use_data = []
for d in data.keys():
use_data.append(
pd.Series([np.nan if isinstance(t, float) else tuple(int(e) for e in t.strip('()').split(',')) for t in
data.loc[:, d]]))
use_data = pd.concat(use_data, axis=1)
use_data.columns = data.columns
_org_describe_names = ['count', 'mean', 'std', 'min', '25%', '50%', '75%', 'max']
_describe_names = []
for e in _org_describe_names:
_describe_names.extend(['{}_irr'.format(e), '{}_dry'.format(e)])
full_event_names = ['hot:{}_dry:{}'.format(h, d) for h, d in itertools.product(hot_keys, dry_keys)]
outdata = pd.DataFrame(index=pd.Series(range(1, 13), name='month'),
columns=pd.MultiIndex.from_product((full_event_names,
(['prob'] + _describe_names))
, names=['event', 'pga_desc']), dtype=float)
# make base data
print('making base data')
for hot_nm, dry_nm in itertools.product(hot_keys, dry_keys):
en = 'hot:{}_dry:{}'.format(hot_nm, dry_nm)
joint_event = pd.Series(list(set(use_data.loc[:, hot_nm]).intersection(set(use_data.loc[:, dry_nm]))))
if joint_event.dropna().empty:
continue
temp = make_prob(joint_event)
outdata.loc[temp.index, (en, 'prob')] = temp.values[:, 0]
temp = add_pga_from_idx(joint_event)
outdata.loc[temp.index, (en, _describe_names)] = temp.loc[:, _describe_names].values
t = pd.Series([' '.join(e) for e in outdata.columns])
idx = ~((t.str.contains('sum')) | (t.str.contains('count')))
outdata.loc[:, outdata.columns[idx]] *= 100
outdata = outdata.sort_index(axis=1, level=0, sort_remaining=False)
outdata.to_csv(os.path.join(event_def_dir, 'joint_hot_dry_prob.csv'), float_format='%.1f%%')
idx = t.str.contains('prob')
outdata.loc[:, outdata.columns[idx]].to_csv(os.path.join(event_def_dir, 'joint_hot_dry_prob_only_prob.csv'),
float_format='%.1f%%')
idx = t.str.contains('mean')
outdata.loc[:, outdata.columns[idx]].to_csv(os.path.join(event_def_dir, 'joint_hot_dry_mean_impact.csv'),
float_format='%.1f%%')
return full_event_names, outdata
def make_prob(in_series):
in_series = in_series.dropna()
data = pd.DataFrame(np.atleast_2d(list(in_series.values)), columns=['month', 'year'])
out_series = data.groupby('month').count() / 48
return pd.DataFrame(out_series)
def old_calc_restrict_recurance():
data = get_restriction_record()
thresholds = [0.5, 0.75, 1]
tnames = ['half', '3/4', 'full']
ndays = [1, 5, 7, 10, 14]
out_keys = []
for thresh, tname in zip(thresholds, tnames):
k = 'd_>{}_rest'.format(tname)
data.loc[:, k] = data.loc[:, 'f_rest'] >= thresh
out_keys.append(k)
grouped_data = data.loc[:, ['month', 'year', 'f_rest'] + out_keys].groupby(['month', 'year']).sum().reset_index()
# make montly restriction anaomaloy - mean
temp = grouped_data.groupby('month').mean().loc[:, 'f_rest'].to_dict()
grouped_data.loc[:, 'f_rest_an_mean'] = grouped_data.loc[:, 'month']
grouped_data = grouped_data.replace({'f_rest_an_mean': temp})
grouped_data.loc[:, 'f_rest_an_mean'] = grouped_data.loc[:, 'f_rest'] - grouped_data.loc[:, 'f_rest_an_mean']
# make montly restriction anaomaloy
temp = grouped_data.groupby('month').median().loc[:, 'f_rest'].to_dict()
grouped_data.loc[:, 'f_rest_an_med'] = grouped_data.loc[:, 'month']
grouped_data = grouped_data.replace({'f_rest_an_med': temp})
grouped_data.loc[:, 'f_rest_an_med'] = grouped_data.loc[:, 'f_rest'] - grouped_data.loc[:, 'f_rest_an_med']
grouped_data.to_csv(os.path.join(event_def_dir, 'rest_monthly_data.csv'))
grouped_data.drop(columns=['year']).groupby('month').describe().to_csv(os.path.join(event_def_dir,
'rest_monthly_data_desc.csv'))
# number of n days
out_keys2 = []
for nd in ndays:
for k in out_keys:
ok = '{:02d}d_{}'.format(nd, k)
out_keys2.append(ok)
grouped_data.loc[:, ok] = grouped_data.loc[:, k] >= nd
out = grouped_data.loc[:, ['month'] + out_keys2].groupby(['month']).aggregate(['sum', prob])
drop_keys = []
for k in out_keys2:
temp = (out.loc[:, k].loc[:, 'sum'] == 48).all() or (
out.loc[:, k].loc[:, 'sum'] == 0).all()
if temp:
drop_keys.append(k)
out = out.drop(columns=drop_keys)
out, out_years = add_pga(grouped_data, set(out_keys2) - set(drop_keys), out)
out_years.to_csv(os.path.join(event_def_dir, 'rest_years.csv'))
t = pd.Series([' '.join(e) for e in out.columns])
idx = ~((t.str.contains('sum')) | (t.str.contains('count')))
out.loc[:, out.columns[idx]] *= 100
out.to_csv(os.path.join(event_def_dir, 'old_rest_prob.csv'), float_format='%.1f%%')
out.loc[:, out.columns[idx]].to_csv(os.path.join(event_def_dir, 'old_rest_prob_only_prob.csv'),
float_format='%.1f%%')
def calc_restrict_cumulative_recurance():
data = get_restriction_record()
ndays = [1, 5, 7, 10, 14, 21, 25, 29]
ndays = {'{:02d}'.format(e): e for e in ndays}
temp = {1: 10,
2: 17,
3: 17,
4: 10,
5: 7,
6: 10,
7: 10,
8: 10,
9: 7,
10: 5,
11: 5,
12: 7,
}
ndays['eqlikly'] = temp # note don't use 'prob' in this name!
grouped_data = data.loc[:, ['month', 'year', 'f_rest']].groupby(['month', 'year']).sum().reset_index()
# make montly restriction anaomaloy - mean
temp = grouped_data.groupby('month').mean().loc[:, 'f_rest'].to_dict()
grouped_data.loc[:, 'f_rest_an_mean'] = grouped_data.loc[:, 'month']
grouped_data = grouped_data.replace({'f_rest_an_mean': temp})
grouped_data.loc[:, 'f_rest_an_mean'] = grouped_data.loc[:, 'f_rest'] - grouped_data.loc[:, 'f_rest_an_mean']
# make montly restriction anaomaloy - median
temp = grouped_data.groupby('month').median().loc[:, 'f_rest'].to_dict()
grouped_data.loc[:, 'f_rest_an_med'] = grouped_data.loc[:, 'month']
grouped_data = grouped_data.replace({'f_rest_an_med': temp})
grouped_data.loc[:, 'f_rest_an_med'] = grouped_data.loc[:, 'f_rest'] - grouped_data.loc[:, 'f_rest_an_med']
grouped_data.to_csv(os.path.join(event_def_dir, 'rest_monthly_data.csv'))
grouped_data.drop(columns=['year']).groupby('month').describe().to_csv(os.path.join(event_def_dir,
'rest_monthly_data_desc.csv'))
# number of n days
out_keys2 = []
for k, nd in ndays.items():
ok = '{}d_rest'.format(k)
out_keys2.append(ok)
if isinstance(nd, int):
grouped_data.loc[:, ok] = grouped_data.loc[:, 'f_rest'] >= nd
elif isinstance(nd, dict):
grouped_data.loc[:, ok] = grouped_data.loc[:, 'f_rest'] >= grouped_data.loc[:, 'month'].replace(nd)
else:
raise ValueError('unexpected type for nd: {}'.format(type(nd)))
out = grouped_data.loc[:, ['month'] + out_keys2].groupby(['month']).aggregate(['sum', prob])
drop_keys = []
for k in out_keys2:
temp = (out.loc[:, k].loc[:, 'sum'] == 48).all() or (
out.loc[:, k].loc[:, 'sum'] == 0).all()
if temp:
drop_keys.append(k)
out = out.drop(columns=drop_keys)
out, out_years = add_pga(grouped_data, set(out_keys2) - set(drop_keys), out)
out_years.to_csv(os.path.join(event_def_dir, 'rest_years.csv'))
t = pd.Series([' '.join(e) for e in out.columns])
idx = ~((t.str.contains('sum')) | (t.str.contains('count')))
out.loc[:, out.columns[idx]] *= 100
out.to_csv(os.path.join(event_def_dir, 'rest_prob.csv'), float_format='%.1f%%')
idx = (t.str.contains('prob') | t.str.contains('sum'))
out.loc[:, out.columns[idx]].to_csv(os.path.join(event_def_dir, 'rest_prob_only_prob.csv'), float_format='%.1f%%')
def calc_restrict_recurance():
data = get_restriction_record()
thresholds = [0.001, 0.5, 0.75, 1]
tnames = ['any', 'half', '75rest', 'full']
con_days = [5, 7, 10]
ndays = [5, 7, 10, 15, 20]
consecutive_data = {}
for tnm, t in zip(tnames, thresholds):
test_value = tnm
data.loc[:, test_value] = data.loc[:, 'f_rest'] >= t
data.loc[:, 'con_id'] = (data.loc[:, ['year',
'month',
test_value]].diff(1) != 0).any(axis=1).astype('int').cumsum().values
temp = data.loc[data[test_value]].groupby('con_id')
consecutive_data[tnm] = temp.agg({'year': 'mean', 'month': 'mean', test_value: 'size'}).reset_index()
out_columns = ['total_rest_days', 'num_per', 'mean_per_len', 'min_per_len', 'max_per_len']
rename_mapper = {'sum': 'total_rest_days', 'count': 'num_per',
'mean': 'mean_per_len', 'min': 'min_per_len', 'max': 'max_per_len'}
all_data = pd.DataFrame(
index=pd.MultiIndex.from_product([set(data.year), set(data.month)], names=['year', 'month']),
columns=pd.MultiIndex.from_product([tnames, out_columns]))
all_data.loc[:] = np.nan
for k, v in consecutive_data.items():
v.to_csv(os.path.join(event_def_dir, 'len_rest_{}_raw.csv'.format(k)))
temp = v.groupby(['year', 'month']).agg({k: ['sum', 'count',
'mean', 'min', 'max']})
temp = temp.rename(columns=rename_mapper, level=1)
all_data = all_data.combine_first(temp)
all_data = all_data.loc[:, (tnames, out_columns)]
all_data.reset_index().astype(float).groupby('month').describe().to_csv(os.path.join(event_def_dir,
'len_rest_month_desc_no_zeros.csv'))
t = all_data['any']['num_per'].isna().reset_index().groupby('month').agg({'num_per': ['sum', prob]})
t.to_csv(os.path.join(event_def_dir, 'len_rest_prob_no_rest.csv'))
all_data = all_data.fillna(0)
all_data.to_csv(os.path.join(event_def_dir, 'len_rest_monthly.csv'))
all_data.reset_index().groupby('month').describe().to_csv(
os.path.join(event_def_dir, 'len_rest_month_desc_with_zeros.csv'))
prob_data = pd.DataFrame(index=all_data.index)
for rt, l, nd in itertools.product(tnames, con_days, ndays):
prob_data.loc[:, '{}d_{}_{}tot'.format(l, rt, nd)] = ((all_data.loc[:, (rt, 'max_per_len')] >= l) &
(all_data.loc[:, (rt, 'total_rest_days')] >= nd))
out = prob_data.reset_index().groupby('month').agg(['sum', prob])
out_keys2 = set(out.columns.levels[0]) - {'year'}
drop_keys = []
for k in out_keys2:
temp = (out.loc[:, k].loc[:, 'sum'] == 48).all() or (
out.loc[:, k].loc[:, 'sum'] == 0).all()
if temp:
drop_keys.append(k)
out = out.drop(columns=drop_keys)
out, out_years = add_pga(prob_data.reset_index(), set(out_keys2) - set(drop_keys), out)
t = pd.Series([' '.join(e) for e in out.columns])
idx = ~((t.str.contains('sum')) | (t.str.contains('count')))
out.loc[:, out.columns[idx]] *= 100
out.to_csv(os.path.join(event_def_dir, 'len_rest_prob.csv'), float_format='%.1f%%')
out_years.to_csv(os.path.join(event_def_dir, 'len_rest_years.csv'))
out.loc[:, out.columns[idx]].to_csv(os.path.join(event_def_dir, 'len_rest_prob_only_prob.csv'),
float_format='%.1f%%')
def calc_cold_recurance():
data = get_vcsn_record(vcsn_version)
data.loc[:, 'tmean'] = (data.loc[:, 'tmax'] + data.loc[:, 'tmin']) / 2
data.loc[:, 'tmean_raw'] = (data.loc[:, 'tmax'] + data.loc[:, 'tmin']) / 2
data.loc[:, 'tmean'] = data.loc[:, 'tmean'].rolling(3).mean()
data.to_csv(os.path.join(event_def_dir, 'rolling_cold_raw.csv'))
thresholds = [0, 5, 7, 10, 12]
vars = ['tmean']
ndays = [3, 5, 7, 10, 14]
out_keys = []
for thresh, v in itertools.product(thresholds, vars):
k = 'd_{}_{:02d}'.format(v, thresh)
data.loc[:, k] = data.loc[:, v] <= thresh
out_keys.append(k)
aggs = {e: 'sum' for e in out_keys}
aggs.update({e: 'mean' for e in vars})
grouped_data = data.loc[:, ['month', 'year'] + vars + out_keys].groupby(['month', 'year'])
grouped_data = grouped_data.aggregate(aggs).reset_index()
grouped_data.to_csv(os.path.join(event_def_dir, 'rolling_cold_monthly_data.csv'))
grouped_data.drop(columns=['year']).groupby('month').describe().to_csv(os.path.join(event_def_dir,
'rolling_cold_monthly_data_desc.csv'))
# number of n days
out_keys2 = []
for nd in ndays:
for k in out_keys:
ok = '{:02d}d_{}'.format(nd, k)
out_keys2.append(ok)
grouped_data.loc[:, ok] = grouped_data.loc[:, k] >= nd
out = grouped_data.loc[:, ['month'] + out_keys2].groupby(['month']).aggregate(['sum', prob])
drop_keys = []
for k in out_keys2:
temp = (out.loc[:, k].loc[:, 'sum'] == 48).all() or (
out.loc[:, k].loc[:, 'sum'] == 0).all()
if temp:
drop_keys.append(k)
out = out.drop(columns=drop_keys)
out, out_years = add_pga(grouped_data, set(out_keys2) - set(drop_keys), out)
t = pd.Series([' '.join(e) for e in out.columns])
idx = ~((t.str.contains('sum')) | (t.str.contains('count')))
out.loc[:, out.columns[idx]] *= 100
out.to_csv(os.path.join(event_def_dir, 'rolling_cold_prob.csv'), float_format='%.1f%%')
out_years.to_csv(os.path.join(event_def_dir, 'rolling_cold_years.csv'))
out.loc[:, out.columns[idx]].to_csv(os.path.join(event_def_dir, 'rolling_cold_prob_only_prob.csv'),
float_format='%.1f%%')
def calc_hot_recurance():
data = get_vcsn_record(vcsn_version)
data.loc[:, 'tmean'] = (data.loc[:, 'tmax'] + data.loc[:, 'tmin']) / 2
data.to_csv(os.path.join(event_def_dir, 'temp_raw.csv'))
thresholds = [20, 25, 28, 30, 35]
vars = ['tmax', 'tmean']
ndays = [3, 5, 7, 10, 14]
out_keys = []
for thresh, v in itertools.product(thresholds, vars):
k = 'd_{}_{:02d}'.format(v, thresh)
data.loc[:, k] = data.loc[:, v] >= thresh
out_keys.append(k)
aggs = {e: 'sum' for e in out_keys}
aggs.update({e: 'mean' for e in vars})
grouped_data = data.loc[:, ['month', 'year'] + vars + out_keys].groupby(['month', 'year'])
grouped_data = grouped_data.aggregate(aggs).reset_index()
grouped_data.to_csv(os.path.join(event_def_dir, 'hot_monthly_data.csv'))
grouped_data.drop(columns=['year']).groupby('month').describe().to_csv(os.path.join(event_def_dir,
'hot_monthly_data_desc.csv'))
# number of n days
out_keys2 = []
for nd in ndays:
for k in out_keys:
ok = '{:02d}d_{}'.format(nd, k)
out_keys2.append(ok)
grouped_data.loc[:, ok] = grouped_data.loc[:, k] >= nd
out = grouped_data.loc[:, ['month'] + out_keys2].groupby(['month']).aggregate(['sum', prob])
drop_keys = []
for k in out_keys2:
temp = (out.loc[:, k].loc[:, 'sum'] == 48).all() or (
out.loc[:, k].loc[:, 'sum'] == 0).all()
if temp:
drop_keys.append(k)
out = out.drop(columns=drop_keys)
out, out_years = add_pga(grouped_data, set(out_keys2) - set(drop_keys), out)
t = pd.Series([' '.join(e) for e in out.columns])
idx = ~((t.str.contains('sum')) | (t.str.contains('count')))
out.loc[:, out.columns[idx]] *= 100
out.to_csv(os.path.join(event_def_dir, 'hot_prob.csv'), float_format='%.1f%%')
out.loc[:, out.columns[idx]].to_csv(os.path.join(event_def_dir, 'hot_prob_only_prob.csv'), float_format='%.1f%%')
out_years.to_csv(os.path.join(event_def_dir, 'hot_years.csv'))
def plot_vcsn_smd():
data, use_cords = vcsn_pull_single_site(
lat=-43.358,
lon=172.301,
year_min=1972,
year_max=2019,
use_vars=('evspsblpot', 'pr'))
print(use_cords)
temp = calc_sma_smd_historical(data['pr'], data['evspsblpot'], data.date, 150, 1)
trans_cols = ['mean_doy_smd', 'sma', 'smd', 'drain', 'aet_out']
data.loc[:, trans_cols] = temp.loc[:, trans_cols]
data.set_index('date', inplace=True)
fig, (ax1, ax2, ax3, ax4) = plt.subplots(4, 1, sharex=True)
ax1.plot(data.index, data['evspsblpot'], label='pet')
ax1.plot(data.index, data['aet_out'], label='aet')
ax2.plot(data.index, data['pr'], label='rain')
ax3.plot(data.index, data['smd'], label='smd')
ax3.plot(data.index, data['mean_doy_smd'], label='daily_mean_smd')
ax4.plot(data.index, data['sma'], label='sma')
ax4.axhline(ls='--', c='k')
for ax in (ax1, ax2, ax3, ax4):
ax.legend()
plt.show()
def check_vcns_data():
data, use_cords = vcsn_pull_single_site(
lat=-43.358,
lon=172.301,
year_min=1972,
year_max=2019,
use_vars='all')
print(use_cords)
data.set_index('date', inplace=True)
for v in data.keys():
fix, (ax) = plt.subplots()
ax.plot(data.index, data[v])
ax.set_title(v)
plt.show()
def plot_restriction_record():
data = get_restriction_record()
fix, (ax) = plt.subplots()
ax.plot(pd.to_datetime(data['date']), data['f_rest'])
plt.show()
if __name__ == '__main__':
# final run set up
calc_dry_recurance_monthly_smd()
calc_dry_recurance()
calc_hot_recurance()
calc_cold_recurance()
calc_wet_recurance_ndays()
calc_restrict_cumulative_recurance()
| 41.512774
| 129
| 0.571564
| 6,530
| 45,498
| 3.77075
| 0.060643
| 0.041222
| 0.034399
| 0.038541
| 0.771636
| 0.748853
| 0.722211
| 0.71023
| 0.705316
| 0.680055
| 0
| 0.031038
| 0.260011
| 45,498
| 1,095
| 130
| 41.550685
| 0.700309
| 0.091389
| 0
| 0.471186
| 0
| 0
| 0.112057
| 0.030506
| 0
| 0
| 0
| 0
| 0.00113
| 1
| 0.022599
| false
| 0
| 0.012429
| 0
| 0.041808
| 0.00565
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c82cc41e23c5fb855d2ef58b1455def187bd6258
| 116
|
py
|
Python
|
opendeep/models/container/__init__.py
|
vitruvianscience/OpenDeep
|
e96efc449101094354b615cf15afe6d03644fc36
|
[
"Apache-2.0"
] | 252
|
2015-03-13T21:55:22.000Z
|
2021-09-06T21:37:38.000Z
|
opendeep/models/container/__init__.py
|
afcarl/OpenDeep
|
e96efc449101094354b615cf15afe6d03644fc36
|
[
"Apache-2.0"
] | 16
|
2015-03-14T06:47:04.000Z
|
2016-09-23T19:13:35.000Z
|
opendeep/models/container/__init__.py
|
afcarl/OpenDeep
|
e96efc449101094354b615cf15afe6d03644fc36
|
[
"Apache-2.0"
] | 68
|
2015-03-14T00:05:53.000Z
|
2020-06-04T13:36:13.000Z
|
from __future__ import division, absolute_import, print_function
from .prototype import *
from .repeating import *
| 23.2
| 64
| 0.818966
| 14
| 116
| 6.357143
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12931
| 116
| 4
| 65
| 29
| 0.881188
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c836b11e39c9d749595a367b734a3381d0b0adb6
| 65
|
py
|
Python
|
test/test.py
|
fanyongkang87/ykfan_utils
|
de4d27218ddeaaa2dcf5b2434bdc8fb2646ba431
|
[
"MIT"
] | 1
|
2019-06-06T10:17:05.000Z
|
2019-06-06T10:17:05.000Z
|
test/test.py
|
fanyongkang87/ykfan_utils
|
de4d27218ddeaaa2dcf5b2434bdc8fb2646ba431
|
[
"MIT"
] | null | null | null |
test/test.py
|
fanyongkang87/ykfan_utils
|
de4d27218ddeaaa2dcf5b2434bdc8fb2646ba431
|
[
"MIT"
] | null | null | null |
from ykfan_utils import clear_dir
path = 'test'
clear_dir(path)
| 13
| 33
| 0.784615
| 11
| 65
| 4.363636
| 0.727273
| 0.333333
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138462
| 65
| 4
| 34
| 16.25
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0.061538
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c85efc9fa206700830e967ce6d433863c964b0b5
| 150
|
py
|
Python
|
travis_doc/tests/test_travis_doc.py
|
has2k1/travis_doc
|
1317091ebb224feb7627ab3e7a6a2e294afd203b
|
[
"BSD-3-Clause"
] | null | null | null |
travis_doc/tests/test_travis_doc.py
|
has2k1/travis_doc
|
1317091ebb224feb7627ab3e7a6a2e294afd203b
|
[
"BSD-3-Clause"
] | null | null | null |
travis_doc/tests/test_travis_doc.py
|
has2k1/travis_doc
|
1317091ebb224feb7627ab3e7a6a2e294afd203b
|
[
"BSD-3-Clause"
] | null | null | null |
from ..travis_doc import function1, function2
def test_function1():
assert function1() == 1
def test_function2():
assert function2() == 2
| 15
| 45
| 0.693333
| 18
| 150
| 5.611111
| 0.611111
| 0.138614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066116
| 0.193333
| 150
| 9
| 46
| 16.666667
| 0.768595
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
c07fd5c92cddd474d26f9a3b13b50e1df348926a
| 412
|
py
|
Python
|
python_submitty_utils/submitty_utils/db_utils.py
|
zeez2030/Submitty
|
7118944ff4adc6f15d76984eb10a1e862926d724
|
[
"BSD-3-Clause"
] | 411
|
2016-06-14T20:52:25.000Z
|
2022-03-31T21:20:25.000Z
|
python_submitty_utils/submitty_utils/db_utils.py
|
KaelanWillauer/Submitty
|
cf9b6ceda15ec0a661e2ca81ea7864790094c64a
|
[
"BSD-3-Clause"
] | 5,730
|
2016-05-23T21:04:32.000Z
|
2022-03-31T10:08:06.000Z
|
python_submitty_utils/submitty_utils/db_utils.py
|
KaelanWillauer/Submitty
|
cf9b6ceda15ec0a661e2ca81ea7864790094c64a
|
[
"BSD-3-Clause"
] | 423
|
2016-09-22T21:11:30.000Z
|
2022-03-29T18:55:28.000Z
|
"""Utilities for interacting with databases"""
def generate_connect_string(
host: str,
port: int,
db: str,
user: str,
password: str,
) -> str:
conn_string = f"postgresql://{user}:{password}@"
if not host.startswith('/'):
conn_string += f"{host}:{port}"
conn_string += f"/{db}"
if host.startswith('/'):
conn_string += f"?host={host}"
return conn_string
| 20.6
| 52
| 0.587379
| 50
| 412
| 4.7
| 0.46
| 0.212766
| 0.187234
| 0.204255
| 0.246809
| 0.246809
| 0
| 0
| 0
| 0
| 0
| 0
| 0.240291
| 412
| 19
| 53
| 21.684211
| 0.750799
| 0.097087
| 0
| 0
| 1
| 0
| 0.172131
| 0.084699
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0.142857
| 0
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
c08accba25b56f2ec09b67c838ec425c7ba720e6
| 106
|
py
|
Python
|
python_mock/raise_exception.py
|
enamrik/python-mock
|
817bf1ada9346445912e2cd2e2f65306e21cb8d8
|
[
"MIT"
] | null | null | null |
python_mock/raise_exception.py
|
enamrik/python-mock
|
817bf1ada9346445912e2cd2e2f65306e21cb8d8
|
[
"MIT"
] | null | null | null |
python_mock/raise_exception.py
|
enamrik/python-mock
|
817bf1ada9346445912e2cd2e2f65306e21cb8d8
|
[
"MIT"
] | null | null | null |
class RaiseException:
def __init__(self, exception: Exception):
self.exception = exception
| 15.142857
| 45
| 0.698113
| 10
| 106
| 7
| 0.6
| 0.371429
| 0.628571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.226415
| 106
| 6
| 46
| 17.666667
| 0.853659
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c08cb34c5c8b6f18d3ecc65338b30cd5ef9a9f84
| 196,254
|
py
|
Python
|
jasmine-sql/jasmine/sql/parser/SQLParserListener.py
|
andrewsmike/jasmine
|
5209a65975e067eb0678b03dd3abf89729f1e5f2
|
[
"Apache-2.0"
] | 3
|
2022-02-01T06:39:29.000Z
|
2022-02-01T13:08:17.000Z
|
jasmine-sql/jasmine/sql/parser/autogen/SQLParserListener.py
|
andrewsmike/jasmine
|
5209a65975e067eb0678b03dd3abf89729f1e5f2
|
[
"Apache-2.0"
] | null | null | null |
jasmine-sql/jasmine/sql/parser/autogen/SQLParserListener.py
|
andrewsmike/jasmine
|
5209a65975e067eb0678b03dd3abf89729f1e5f2
|
[
"Apache-2.0"
] | null | null | null |
# Generated from SQLParser.g4 by ANTLR 4.9.3
from antlr4 import *
if __name__ is not None and "." in __name__:
from .SQLParser import SQLParser
else:
from SQLParser import SQLParser
"""
Copyright (c) 2018, 2020, Oracle and/or its affiliates. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License, version 2.0,
as published by the Free Software Foundation.
This program is also distributed with certain software (including
but not limited to OpenSSL) that is licensed under separate terms, as
designated in a particular file or component or in included license
documentation. The authors of MySQL hereby grant you an additional
permission to link the program and your derivative works with the
separately licensed software that they have included with MySQL.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
the GNU General Public License, version 2.0, for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
# mypy: ignore-errors
from jasmine.sql.parser.sql_base import *
# This class defines a complete listener for a parse tree produced by SQLParser.
class SQLParserListener(ParseTreeListener):
# Enter a parse tree produced by SQLParser#sqlProgram.
def enterSqlProgram(self, ctx:SQLParser.SqlProgramContext):
pass
# Exit a parse tree produced by SQLParser#sqlProgram.
def exitSqlProgram(self, ctx:SQLParser.SqlProgramContext):
pass
# Enter a parse tree produced by SQLParser#statement.
def enterStatement(self, ctx:SQLParser.StatementContext):
pass
# Exit a parse tree produced by SQLParser#statement.
def exitStatement(self, ctx:SQLParser.StatementContext):
pass
# Enter a parse tree produced by SQLParser#simpleStatement.
def enterSimpleStatement(self, ctx:SQLParser.SimpleStatementContext):
pass
# Exit a parse tree produced by SQLParser#simpleStatement.
def exitSimpleStatement(self, ctx:SQLParser.SimpleStatementContext):
pass
# Enter a parse tree produced by SQLParser#alterStatement.
def enterAlterStatement(self, ctx:SQLParser.AlterStatementContext):
pass
# Exit a parse tree produced by SQLParser#alterStatement.
def exitAlterStatement(self, ctx:SQLParser.AlterStatementContext):
pass
# Enter a parse tree produced by SQLParser#alterDatabase.
def enterAlterDatabase(self, ctx:SQLParser.AlterDatabaseContext):
pass
# Exit a parse tree produced by SQLParser#alterDatabase.
def exitAlterDatabase(self, ctx:SQLParser.AlterDatabaseContext):
pass
# Enter a parse tree produced by SQLParser#alterEvent.
def enterAlterEvent(self, ctx:SQLParser.AlterEventContext):
pass
# Exit a parse tree produced by SQLParser#alterEvent.
def exitAlterEvent(self, ctx:SQLParser.AlterEventContext):
pass
# Enter a parse tree produced by SQLParser#alterLogfileGroup.
def enterAlterLogfileGroup(self, ctx:SQLParser.AlterLogfileGroupContext):
pass
# Exit a parse tree produced by SQLParser#alterLogfileGroup.
def exitAlterLogfileGroup(self, ctx:SQLParser.AlterLogfileGroupContext):
pass
# Enter a parse tree produced by SQLParser#alterLogfileGroupOptions.
def enterAlterLogfileGroupOptions(self, ctx:SQLParser.AlterLogfileGroupOptionsContext):
pass
# Exit a parse tree produced by SQLParser#alterLogfileGroupOptions.
def exitAlterLogfileGroupOptions(self, ctx:SQLParser.AlterLogfileGroupOptionsContext):
pass
# Enter a parse tree produced by SQLParser#alterLogfileGroupOption.
def enterAlterLogfileGroupOption(self, ctx:SQLParser.AlterLogfileGroupOptionContext):
pass
# Exit a parse tree produced by SQLParser#alterLogfileGroupOption.
def exitAlterLogfileGroupOption(self, ctx:SQLParser.AlterLogfileGroupOptionContext):
pass
# Enter a parse tree produced by SQLParser#alterServer.
def enterAlterServer(self, ctx:SQLParser.AlterServerContext):
pass
# Exit a parse tree produced by SQLParser#alterServer.
def exitAlterServer(self, ctx:SQLParser.AlterServerContext):
pass
# Enter a parse tree produced by SQLParser#alterTable.
def enterAlterTable(self, ctx:SQLParser.AlterTableContext):
pass
# Exit a parse tree produced by SQLParser#alterTable.
def exitAlterTable(self, ctx:SQLParser.AlterTableContext):
pass
# Enter a parse tree produced by SQLParser#alterTableActions.
def enterAlterTableActions(self, ctx:SQLParser.AlterTableActionsContext):
pass
# Exit a parse tree produced by SQLParser#alterTableActions.
def exitAlterTableActions(self, ctx:SQLParser.AlterTableActionsContext):
pass
# Enter a parse tree produced by SQLParser#alterCommandList.
def enterAlterCommandList(self, ctx:SQLParser.AlterCommandListContext):
pass
# Exit a parse tree produced by SQLParser#alterCommandList.
def exitAlterCommandList(self, ctx:SQLParser.AlterCommandListContext):
pass
# Enter a parse tree produced by SQLParser#alterCommandsModifierList.
def enterAlterCommandsModifierList(self, ctx:SQLParser.AlterCommandsModifierListContext):
pass
# Exit a parse tree produced by SQLParser#alterCommandsModifierList.
def exitAlterCommandsModifierList(self, ctx:SQLParser.AlterCommandsModifierListContext):
pass
# Enter a parse tree produced by SQLParser#standaloneAlterCommands.
def enterStandaloneAlterCommands(self, ctx:SQLParser.StandaloneAlterCommandsContext):
pass
# Exit a parse tree produced by SQLParser#standaloneAlterCommands.
def exitStandaloneAlterCommands(self, ctx:SQLParser.StandaloneAlterCommandsContext):
pass
# Enter a parse tree produced by SQLParser#alterPartition.
def enterAlterPartition(self, ctx:SQLParser.AlterPartitionContext):
pass
# Exit a parse tree produced by SQLParser#alterPartition.
def exitAlterPartition(self, ctx:SQLParser.AlterPartitionContext):
pass
# Enter a parse tree produced by SQLParser#alterList.
def enterAlterList(self, ctx:SQLParser.AlterListContext):
pass
# Exit a parse tree produced by SQLParser#alterList.
def exitAlterList(self, ctx:SQLParser.AlterListContext):
pass
# Enter a parse tree produced by SQLParser#alterCommandsModifier.
def enterAlterCommandsModifier(self, ctx:SQLParser.AlterCommandsModifierContext):
pass
# Exit a parse tree produced by SQLParser#alterCommandsModifier.
def exitAlterCommandsModifier(self, ctx:SQLParser.AlterCommandsModifierContext):
pass
# Enter a parse tree produced by SQLParser#alterListItem.
def enterAlterListItem(self, ctx:SQLParser.AlterListItemContext):
pass
# Exit a parse tree produced by SQLParser#alterListItem.
def exitAlterListItem(self, ctx:SQLParser.AlterListItemContext):
pass
# Enter a parse tree produced by SQLParser#place.
def enterPlace(self, ctx:SQLParser.PlaceContext):
pass
# Exit a parse tree produced by SQLParser#place.
def exitPlace(self, ctx:SQLParser.PlaceContext):
pass
# Enter a parse tree produced by SQLParser#restrict.
def enterRestrict(self, ctx:SQLParser.RestrictContext):
pass
# Exit a parse tree produced by SQLParser#restrict.
def exitRestrict(self, ctx:SQLParser.RestrictContext):
pass
# Enter a parse tree produced by SQLParser#alterOrderList.
def enterAlterOrderList(self, ctx:SQLParser.AlterOrderListContext):
pass
# Exit a parse tree produced by SQLParser#alterOrderList.
def exitAlterOrderList(self, ctx:SQLParser.AlterOrderListContext):
pass
# Enter a parse tree produced by SQLParser#alterAlgorithmOption.
def enterAlterAlgorithmOption(self, ctx:SQLParser.AlterAlgorithmOptionContext):
pass
# Exit a parse tree produced by SQLParser#alterAlgorithmOption.
def exitAlterAlgorithmOption(self, ctx:SQLParser.AlterAlgorithmOptionContext):
pass
# Enter a parse tree produced by SQLParser#alterLockOption.
def enterAlterLockOption(self, ctx:SQLParser.AlterLockOptionContext):
pass
# Exit a parse tree produced by SQLParser#alterLockOption.
def exitAlterLockOption(self, ctx:SQLParser.AlterLockOptionContext):
pass
# Enter a parse tree produced by SQLParser#indexLockAndAlgorithm.
def enterIndexLockAndAlgorithm(self, ctx:SQLParser.IndexLockAndAlgorithmContext):
pass
# Exit a parse tree produced by SQLParser#indexLockAndAlgorithm.
def exitIndexLockAndAlgorithm(self, ctx:SQLParser.IndexLockAndAlgorithmContext):
pass
# Enter a parse tree produced by SQLParser#withValidation.
def enterWithValidation(self, ctx:SQLParser.WithValidationContext):
pass
# Exit a parse tree produced by SQLParser#withValidation.
def exitWithValidation(self, ctx:SQLParser.WithValidationContext):
pass
# Enter a parse tree produced by SQLParser#removePartitioning.
def enterRemovePartitioning(self, ctx:SQLParser.RemovePartitioningContext):
pass
# Exit a parse tree produced by SQLParser#removePartitioning.
def exitRemovePartitioning(self, ctx:SQLParser.RemovePartitioningContext):
pass
# Enter a parse tree produced by SQLParser#allOrPartitionNameList.
def enterAllOrPartitionNameList(self, ctx:SQLParser.AllOrPartitionNameListContext):
pass
# Exit a parse tree produced by SQLParser#allOrPartitionNameList.
def exitAllOrPartitionNameList(self, ctx:SQLParser.AllOrPartitionNameListContext):
pass
# Enter a parse tree produced by SQLParser#alterTablespace.
def enterAlterTablespace(self, ctx:SQLParser.AlterTablespaceContext):
pass
# Exit a parse tree produced by SQLParser#alterTablespace.
def exitAlterTablespace(self, ctx:SQLParser.AlterTablespaceContext):
pass
# Enter a parse tree produced by SQLParser#alterUndoTablespace.
def enterAlterUndoTablespace(self, ctx:SQLParser.AlterUndoTablespaceContext):
pass
# Exit a parse tree produced by SQLParser#alterUndoTablespace.
def exitAlterUndoTablespace(self, ctx:SQLParser.AlterUndoTablespaceContext):
pass
# Enter a parse tree produced by SQLParser#undoTableSpaceOptions.
def enterUndoTableSpaceOptions(self, ctx:SQLParser.UndoTableSpaceOptionsContext):
pass
# Exit a parse tree produced by SQLParser#undoTableSpaceOptions.
def exitUndoTableSpaceOptions(self, ctx:SQLParser.UndoTableSpaceOptionsContext):
pass
# Enter a parse tree produced by SQLParser#undoTableSpaceOption.
def enterUndoTableSpaceOption(self, ctx:SQLParser.UndoTableSpaceOptionContext):
pass
# Exit a parse tree produced by SQLParser#undoTableSpaceOption.
def exitUndoTableSpaceOption(self, ctx:SQLParser.UndoTableSpaceOptionContext):
pass
# Enter a parse tree produced by SQLParser#alterTablespaceOptions.
def enterAlterTablespaceOptions(self, ctx:SQLParser.AlterTablespaceOptionsContext):
pass
# Exit a parse tree produced by SQLParser#alterTablespaceOptions.
def exitAlterTablespaceOptions(self, ctx:SQLParser.AlterTablespaceOptionsContext):
pass
# Enter a parse tree produced by SQLParser#alterTablespaceOption.
def enterAlterTablespaceOption(self, ctx:SQLParser.AlterTablespaceOptionContext):
pass
# Exit a parse tree produced by SQLParser#alterTablespaceOption.
def exitAlterTablespaceOption(self, ctx:SQLParser.AlterTablespaceOptionContext):
pass
# Enter a parse tree produced by SQLParser#changeTablespaceOption.
def enterChangeTablespaceOption(self, ctx:SQLParser.ChangeTablespaceOptionContext):
pass
# Exit a parse tree produced by SQLParser#changeTablespaceOption.
def exitChangeTablespaceOption(self, ctx:SQLParser.ChangeTablespaceOptionContext):
pass
# Enter a parse tree produced by SQLParser#alterView.
def enterAlterView(self, ctx:SQLParser.AlterViewContext):
pass
# Exit a parse tree produced by SQLParser#alterView.
def exitAlterView(self, ctx:SQLParser.AlterViewContext):
pass
# Enter a parse tree produced by SQLParser#viewTail.
def enterViewTail(self, ctx:SQLParser.ViewTailContext):
pass
# Exit a parse tree produced by SQLParser#viewTail.
def exitViewTail(self, ctx:SQLParser.ViewTailContext):
pass
# Enter a parse tree produced by SQLParser#viewSelect.
def enterViewSelect(self, ctx:SQLParser.ViewSelectContext):
pass
# Exit a parse tree produced by SQLParser#viewSelect.
def exitViewSelect(self, ctx:SQLParser.ViewSelectContext):
pass
# Enter a parse tree produced by SQLParser#viewCheckOption.
def enterViewCheckOption(self, ctx:SQLParser.ViewCheckOptionContext):
pass
# Exit a parse tree produced by SQLParser#viewCheckOption.
def exitViewCheckOption(self, ctx:SQLParser.ViewCheckOptionContext):
pass
# Enter a parse tree produced by SQLParser#createStatement.
def enterCreateStatement(self, ctx:SQLParser.CreateStatementContext):
pass
# Exit a parse tree produced by SQLParser#createStatement.
def exitCreateStatement(self, ctx:SQLParser.CreateStatementContext):
pass
# Enter a parse tree produced by SQLParser#createDatabase.
def enterCreateDatabase(self, ctx:SQLParser.CreateDatabaseContext):
pass
# Exit a parse tree produced by SQLParser#createDatabase.
def exitCreateDatabase(self, ctx:SQLParser.CreateDatabaseContext):
pass
# Enter a parse tree produced by SQLParser#createDatabaseOption.
def enterCreateDatabaseOption(self, ctx:SQLParser.CreateDatabaseOptionContext):
pass
# Exit a parse tree produced by SQLParser#createDatabaseOption.
def exitCreateDatabaseOption(self, ctx:SQLParser.CreateDatabaseOptionContext):
pass
# Enter a parse tree produced by SQLParser#createTable.
def enterCreateTable(self, ctx:SQLParser.CreateTableContext):
pass
# Exit a parse tree produced by SQLParser#createTable.
def exitCreateTable(self, ctx:SQLParser.CreateTableContext):
pass
# Enter a parse tree produced by SQLParser#tableElementList.
def enterTableElementList(self, ctx:SQLParser.TableElementListContext):
pass
# Exit a parse tree produced by SQLParser#tableElementList.
def exitTableElementList(self, ctx:SQLParser.TableElementListContext):
pass
# Enter a parse tree produced by SQLParser#tableElement.
def enterTableElement(self, ctx:SQLParser.TableElementContext):
pass
# Exit a parse tree produced by SQLParser#tableElement.
def exitTableElement(self, ctx:SQLParser.TableElementContext):
pass
# Enter a parse tree produced by SQLParser#duplicateAsQueryExpression.
def enterDuplicateAsQueryExpression(self, ctx:SQLParser.DuplicateAsQueryExpressionContext):
pass
# Exit a parse tree produced by SQLParser#duplicateAsQueryExpression.
def exitDuplicateAsQueryExpression(self, ctx:SQLParser.DuplicateAsQueryExpressionContext):
pass
# Enter a parse tree produced by SQLParser#queryExpressionOrParens.
def enterQueryExpressionOrParens(self, ctx:SQLParser.QueryExpressionOrParensContext):
pass
# Exit a parse tree produced by SQLParser#queryExpressionOrParens.
def exitQueryExpressionOrParens(self, ctx:SQLParser.QueryExpressionOrParensContext):
pass
# Enter a parse tree produced by SQLParser#createRoutine.
def enterCreateRoutine(self, ctx:SQLParser.CreateRoutineContext):
pass
# Exit a parse tree produced by SQLParser#createRoutine.
def exitCreateRoutine(self, ctx:SQLParser.CreateRoutineContext):
pass
# Enter a parse tree produced by SQLParser#createProcedure.
def enterCreateProcedure(self, ctx:SQLParser.CreateProcedureContext):
pass
# Exit a parse tree produced by SQLParser#createProcedure.
def exitCreateProcedure(self, ctx:SQLParser.CreateProcedureContext):
pass
# Enter a parse tree produced by SQLParser#createFunction.
def enterCreateFunction(self, ctx:SQLParser.CreateFunctionContext):
pass
# Exit a parse tree produced by SQLParser#createFunction.
def exitCreateFunction(self, ctx:SQLParser.CreateFunctionContext):
pass
# Enter a parse tree produced by SQLParser#createUdf.
def enterCreateUdf(self, ctx:SQLParser.CreateUdfContext):
pass
# Exit a parse tree produced by SQLParser#createUdf.
def exitCreateUdf(self, ctx:SQLParser.CreateUdfContext):
pass
# Enter a parse tree produced by SQLParser#routineCreateOption.
def enterRoutineCreateOption(self, ctx:SQLParser.RoutineCreateOptionContext):
pass
# Exit a parse tree produced by SQLParser#routineCreateOption.
def exitRoutineCreateOption(self, ctx:SQLParser.RoutineCreateOptionContext):
pass
# Enter a parse tree produced by SQLParser#routineAlterOptions.
def enterRoutineAlterOptions(self, ctx:SQLParser.RoutineAlterOptionsContext):
pass
# Exit a parse tree produced by SQLParser#routineAlterOptions.
def exitRoutineAlterOptions(self, ctx:SQLParser.RoutineAlterOptionsContext):
pass
# Enter a parse tree produced by SQLParser#routineOption.
def enterRoutineOption(self, ctx:SQLParser.RoutineOptionContext):
pass
# Exit a parse tree produced by SQLParser#routineOption.
def exitRoutineOption(self, ctx:SQLParser.RoutineOptionContext):
pass
# Enter a parse tree produced by SQLParser#createIndex.
def enterCreateIndex(self, ctx:SQLParser.CreateIndexContext):
pass
# Exit a parse tree produced by SQLParser#createIndex.
def exitCreateIndex(self, ctx:SQLParser.CreateIndexContext):
pass
# Enter a parse tree produced by SQLParser#indexNameAndType.
def enterIndexNameAndType(self, ctx:SQLParser.IndexNameAndTypeContext):
pass
# Exit a parse tree produced by SQLParser#indexNameAndType.
def exitIndexNameAndType(self, ctx:SQLParser.IndexNameAndTypeContext):
pass
# Enter a parse tree produced by SQLParser#createIndexTarget.
def enterCreateIndexTarget(self, ctx:SQLParser.CreateIndexTargetContext):
pass
# Exit a parse tree produced by SQLParser#createIndexTarget.
def exitCreateIndexTarget(self, ctx:SQLParser.CreateIndexTargetContext):
pass
# Enter a parse tree produced by SQLParser#createLogfileGroup.
def enterCreateLogfileGroup(self, ctx:SQLParser.CreateLogfileGroupContext):
pass
# Exit a parse tree produced by SQLParser#createLogfileGroup.
def exitCreateLogfileGroup(self, ctx:SQLParser.CreateLogfileGroupContext):
pass
# Enter a parse tree produced by SQLParser#logfileGroupOptions.
def enterLogfileGroupOptions(self, ctx:SQLParser.LogfileGroupOptionsContext):
pass
# Exit a parse tree produced by SQLParser#logfileGroupOptions.
def exitLogfileGroupOptions(self, ctx:SQLParser.LogfileGroupOptionsContext):
pass
# Enter a parse tree produced by SQLParser#logfileGroupOption.
def enterLogfileGroupOption(self, ctx:SQLParser.LogfileGroupOptionContext):
pass
# Exit a parse tree produced by SQLParser#logfileGroupOption.
def exitLogfileGroupOption(self, ctx:SQLParser.LogfileGroupOptionContext):
pass
# Enter a parse tree produced by SQLParser#createServer.
def enterCreateServer(self, ctx:SQLParser.CreateServerContext):
pass
# Exit a parse tree produced by SQLParser#createServer.
def exitCreateServer(self, ctx:SQLParser.CreateServerContext):
pass
# Enter a parse tree produced by SQLParser#serverOptions.
def enterServerOptions(self, ctx:SQLParser.ServerOptionsContext):
pass
# Exit a parse tree produced by SQLParser#serverOptions.
def exitServerOptions(self, ctx:SQLParser.ServerOptionsContext):
pass
# Enter a parse tree produced by SQLParser#serverOption.
def enterServerOption(self, ctx:SQLParser.ServerOptionContext):
pass
# Exit a parse tree produced by SQLParser#serverOption.
def exitServerOption(self, ctx:SQLParser.ServerOptionContext):
pass
# Enter a parse tree produced by SQLParser#createTablespace.
def enterCreateTablespace(self, ctx:SQLParser.CreateTablespaceContext):
pass
# Exit a parse tree produced by SQLParser#createTablespace.
def exitCreateTablespace(self, ctx:SQLParser.CreateTablespaceContext):
pass
# Enter a parse tree produced by SQLParser#createUndoTablespace.
def enterCreateUndoTablespace(self, ctx:SQLParser.CreateUndoTablespaceContext):
pass
# Exit a parse tree produced by SQLParser#createUndoTablespace.
def exitCreateUndoTablespace(self, ctx:SQLParser.CreateUndoTablespaceContext):
pass
# Enter a parse tree produced by SQLParser#tsDataFileName.
def enterTsDataFileName(self, ctx:SQLParser.TsDataFileNameContext):
pass
# Exit a parse tree produced by SQLParser#tsDataFileName.
def exitTsDataFileName(self, ctx:SQLParser.TsDataFileNameContext):
pass
# Enter a parse tree produced by SQLParser#tsDataFile.
def enterTsDataFile(self, ctx:SQLParser.TsDataFileContext):
pass
# Exit a parse tree produced by SQLParser#tsDataFile.
def exitTsDataFile(self, ctx:SQLParser.TsDataFileContext):
pass
# Enter a parse tree produced by SQLParser#tablespaceOptions.
def enterTablespaceOptions(self, ctx:SQLParser.TablespaceOptionsContext):
pass
# Exit a parse tree produced by SQLParser#tablespaceOptions.
def exitTablespaceOptions(self, ctx:SQLParser.TablespaceOptionsContext):
pass
# Enter a parse tree produced by SQLParser#tablespaceOption.
def enterTablespaceOption(self, ctx:SQLParser.TablespaceOptionContext):
pass
# Exit a parse tree produced by SQLParser#tablespaceOption.
def exitTablespaceOption(self, ctx:SQLParser.TablespaceOptionContext):
pass
# Enter a parse tree produced by SQLParser#tsOptionInitialSize.
def enterTsOptionInitialSize(self, ctx:SQLParser.TsOptionInitialSizeContext):
pass
# Exit a parse tree produced by SQLParser#tsOptionInitialSize.
def exitTsOptionInitialSize(self, ctx:SQLParser.TsOptionInitialSizeContext):
pass
# Enter a parse tree produced by SQLParser#tsOptionUndoRedoBufferSize.
def enterTsOptionUndoRedoBufferSize(self, ctx:SQLParser.TsOptionUndoRedoBufferSizeContext):
pass
# Exit a parse tree produced by SQLParser#tsOptionUndoRedoBufferSize.
def exitTsOptionUndoRedoBufferSize(self, ctx:SQLParser.TsOptionUndoRedoBufferSizeContext):
pass
# Enter a parse tree produced by SQLParser#tsOptionAutoextendSize.
def enterTsOptionAutoextendSize(self, ctx:SQLParser.TsOptionAutoextendSizeContext):
pass
# Exit a parse tree produced by SQLParser#tsOptionAutoextendSize.
def exitTsOptionAutoextendSize(self, ctx:SQLParser.TsOptionAutoextendSizeContext):
pass
# Enter a parse tree produced by SQLParser#tsOptionMaxSize.
def enterTsOptionMaxSize(self, ctx:SQLParser.TsOptionMaxSizeContext):
pass
# Exit a parse tree produced by SQLParser#tsOptionMaxSize.
def exitTsOptionMaxSize(self, ctx:SQLParser.TsOptionMaxSizeContext):
pass
# Enter a parse tree produced by SQLParser#tsOptionExtentSize.
def enterTsOptionExtentSize(self, ctx:SQLParser.TsOptionExtentSizeContext):
pass
# Exit a parse tree produced by SQLParser#tsOptionExtentSize.
def exitTsOptionExtentSize(self, ctx:SQLParser.TsOptionExtentSizeContext):
pass
# Enter a parse tree produced by SQLParser#tsOptionNodegroup.
def enterTsOptionNodegroup(self, ctx:SQLParser.TsOptionNodegroupContext):
pass
# Exit a parse tree produced by SQLParser#tsOptionNodegroup.
def exitTsOptionNodegroup(self, ctx:SQLParser.TsOptionNodegroupContext):
pass
# Enter a parse tree produced by SQLParser#tsOptionEngine.
def enterTsOptionEngine(self, ctx:SQLParser.TsOptionEngineContext):
pass
# Exit a parse tree produced by SQLParser#tsOptionEngine.
def exitTsOptionEngine(self, ctx:SQLParser.TsOptionEngineContext):
pass
# Enter a parse tree produced by SQLParser#tsOptionWait.
def enterTsOptionWait(self, ctx:SQLParser.TsOptionWaitContext):
pass
# Exit a parse tree produced by SQLParser#tsOptionWait.
def exitTsOptionWait(self, ctx:SQLParser.TsOptionWaitContext):
pass
# Enter a parse tree produced by SQLParser#tsOptionComment.
def enterTsOptionComment(self, ctx:SQLParser.TsOptionCommentContext):
pass
# Exit a parse tree produced by SQLParser#tsOptionComment.
def exitTsOptionComment(self, ctx:SQLParser.TsOptionCommentContext):
pass
# Enter a parse tree produced by SQLParser#tsOptionFileblockSize.
def enterTsOptionFileblockSize(self, ctx:SQLParser.TsOptionFileblockSizeContext):
pass
# Exit a parse tree produced by SQLParser#tsOptionFileblockSize.
def exitTsOptionFileblockSize(self, ctx:SQLParser.TsOptionFileblockSizeContext):
pass
# Enter a parse tree produced by SQLParser#tsOptionEncryption.
def enterTsOptionEncryption(self, ctx:SQLParser.TsOptionEncryptionContext):
pass
# Exit a parse tree produced by SQLParser#tsOptionEncryption.
def exitTsOptionEncryption(self, ctx:SQLParser.TsOptionEncryptionContext):
pass
# Enter a parse tree produced by SQLParser#createView.
def enterCreateView(self, ctx:SQLParser.CreateViewContext):
pass
# Exit a parse tree produced by SQLParser#createView.
def exitCreateView(self, ctx:SQLParser.CreateViewContext):
pass
# Enter a parse tree produced by SQLParser#viewReplaceOrAlgorithm.
def enterViewReplaceOrAlgorithm(self, ctx:SQLParser.ViewReplaceOrAlgorithmContext):
pass
# Exit a parse tree produced by SQLParser#viewReplaceOrAlgorithm.
def exitViewReplaceOrAlgorithm(self, ctx:SQLParser.ViewReplaceOrAlgorithmContext):
pass
# Enter a parse tree produced by SQLParser#viewAlgorithm.
def enterViewAlgorithm(self, ctx:SQLParser.ViewAlgorithmContext):
pass
# Exit a parse tree produced by SQLParser#viewAlgorithm.
def exitViewAlgorithm(self, ctx:SQLParser.ViewAlgorithmContext):
pass
# Enter a parse tree produced by SQLParser#viewSuid.
def enterViewSuid(self, ctx:SQLParser.ViewSuidContext):
pass
# Exit a parse tree produced by SQLParser#viewSuid.
def exitViewSuid(self, ctx:SQLParser.ViewSuidContext):
pass
# Enter a parse tree produced by SQLParser#createTrigger.
def enterCreateTrigger(self, ctx:SQLParser.CreateTriggerContext):
pass
# Exit a parse tree produced by SQLParser#createTrigger.
def exitCreateTrigger(self, ctx:SQLParser.CreateTriggerContext):
pass
# Enter a parse tree produced by SQLParser#triggerFollowsPrecedesClause.
def enterTriggerFollowsPrecedesClause(self, ctx:SQLParser.TriggerFollowsPrecedesClauseContext):
pass
# Exit a parse tree produced by SQLParser#triggerFollowsPrecedesClause.
def exitTriggerFollowsPrecedesClause(self, ctx:SQLParser.TriggerFollowsPrecedesClauseContext):
pass
# Enter a parse tree produced by SQLParser#createEvent.
def enterCreateEvent(self, ctx:SQLParser.CreateEventContext):
pass
# Exit a parse tree produced by SQLParser#createEvent.
def exitCreateEvent(self, ctx:SQLParser.CreateEventContext):
pass
# Enter a parse tree produced by SQLParser#createRole.
def enterCreateRole(self, ctx:SQLParser.CreateRoleContext):
pass
# Exit a parse tree produced by SQLParser#createRole.
def exitCreateRole(self, ctx:SQLParser.CreateRoleContext):
pass
# Enter a parse tree produced by SQLParser#createSpatialReference.
def enterCreateSpatialReference(self, ctx:SQLParser.CreateSpatialReferenceContext):
pass
# Exit a parse tree produced by SQLParser#createSpatialReference.
def exitCreateSpatialReference(self, ctx:SQLParser.CreateSpatialReferenceContext):
pass
# Enter a parse tree produced by SQLParser#srsAttribute.
def enterSrsAttribute(self, ctx:SQLParser.SrsAttributeContext):
pass
# Exit a parse tree produced by SQLParser#srsAttribute.
def exitSrsAttribute(self, ctx:SQLParser.SrsAttributeContext):
pass
# Enter a parse tree produced by SQLParser#dropStatement.
def enterDropStatement(self, ctx:SQLParser.DropStatementContext):
pass
# Exit a parse tree produced by SQLParser#dropStatement.
def exitDropStatement(self, ctx:SQLParser.DropStatementContext):
pass
# Enter a parse tree produced by SQLParser#dropDatabase.
def enterDropDatabase(self, ctx:SQLParser.DropDatabaseContext):
pass
# Exit a parse tree produced by SQLParser#dropDatabase.
def exitDropDatabase(self, ctx:SQLParser.DropDatabaseContext):
pass
# Enter a parse tree produced by SQLParser#dropEvent.
def enterDropEvent(self, ctx:SQLParser.DropEventContext):
pass
# Exit a parse tree produced by SQLParser#dropEvent.
def exitDropEvent(self, ctx:SQLParser.DropEventContext):
pass
# Enter a parse tree produced by SQLParser#dropFunction.
def enterDropFunction(self, ctx:SQLParser.DropFunctionContext):
pass
# Exit a parse tree produced by SQLParser#dropFunction.
def exitDropFunction(self, ctx:SQLParser.DropFunctionContext):
pass
# Enter a parse tree produced by SQLParser#dropProcedure.
def enterDropProcedure(self, ctx:SQLParser.DropProcedureContext):
pass
# Exit a parse tree produced by SQLParser#dropProcedure.
def exitDropProcedure(self, ctx:SQLParser.DropProcedureContext):
pass
# Enter a parse tree produced by SQLParser#dropIndex.
def enterDropIndex(self, ctx:SQLParser.DropIndexContext):
pass
# Exit a parse tree produced by SQLParser#dropIndex.
def exitDropIndex(self, ctx:SQLParser.DropIndexContext):
pass
# Enter a parse tree produced by SQLParser#dropLogfileGroup.
def enterDropLogfileGroup(self, ctx:SQLParser.DropLogfileGroupContext):
pass
# Exit a parse tree produced by SQLParser#dropLogfileGroup.
def exitDropLogfileGroup(self, ctx:SQLParser.DropLogfileGroupContext):
pass
# Enter a parse tree produced by SQLParser#dropLogfileGroupOption.
def enterDropLogfileGroupOption(self, ctx:SQLParser.DropLogfileGroupOptionContext):
pass
# Exit a parse tree produced by SQLParser#dropLogfileGroupOption.
def exitDropLogfileGroupOption(self, ctx:SQLParser.DropLogfileGroupOptionContext):
pass
# Enter a parse tree produced by SQLParser#dropServer.
def enterDropServer(self, ctx:SQLParser.DropServerContext):
pass
# Exit a parse tree produced by SQLParser#dropServer.
def exitDropServer(self, ctx:SQLParser.DropServerContext):
pass
# Enter a parse tree produced by SQLParser#dropTable.
def enterDropTable(self, ctx:SQLParser.DropTableContext):
pass
# Exit a parse tree produced by SQLParser#dropTable.
def exitDropTable(self, ctx:SQLParser.DropTableContext):
pass
# Enter a parse tree produced by SQLParser#dropTableSpace.
def enterDropTableSpace(self, ctx:SQLParser.DropTableSpaceContext):
pass
# Exit a parse tree produced by SQLParser#dropTableSpace.
def exitDropTableSpace(self, ctx:SQLParser.DropTableSpaceContext):
pass
# Enter a parse tree produced by SQLParser#dropTrigger.
def enterDropTrigger(self, ctx:SQLParser.DropTriggerContext):
pass
# Exit a parse tree produced by SQLParser#dropTrigger.
def exitDropTrigger(self, ctx:SQLParser.DropTriggerContext):
pass
# Enter a parse tree produced by SQLParser#dropView.
def enterDropView(self, ctx:SQLParser.DropViewContext):
pass
# Exit a parse tree produced by SQLParser#dropView.
def exitDropView(self, ctx:SQLParser.DropViewContext):
pass
# Enter a parse tree produced by SQLParser#dropRole.
def enterDropRole(self, ctx:SQLParser.DropRoleContext):
pass
# Exit a parse tree produced by SQLParser#dropRole.
def exitDropRole(self, ctx:SQLParser.DropRoleContext):
pass
# Enter a parse tree produced by SQLParser#dropSpatialReference.
def enterDropSpatialReference(self, ctx:SQLParser.DropSpatialReferenceContext):
pass
# Exit a parse tree produced by SQLParser#dropSpatialReference.
def exitDropSpatialReference(self, ctx:SQLParser.DropSpatialReferenceContext):
pass
# Enter a parse tree produced by SQLParser#dropUndoTablespace.
def enterDropUndoTablespace(self, ctx:SQLParser.DropUndoTablespaceContext):
pass
# Exit a parse tree produced by SQLParser#dropUndoTablespace.
def exitDropUndoTablespace(self, ctx:SQLParser.DropUndoTablespaceContext):
pass
# Enter a parse tree produced by SQLParser#renameTableStatement.
def enterRenameTableStatement(self, ctx:SQLParser.RenameTableStatementContext):
pass
# Exit a parse tree produced by SQLParser#renameTableStatement.
def exitRenameTableStatement(self, ctx:SQLParser.RenameTableStatementContext):
pass
# Enter a parse tree produced by SQLParser#renamePair.
def enterRenamePair(self, ctx:SQLParser.RenamePairContext):
pass
# Exit a parse tree produced by SQLParser#renamePair.
def exitRenamePair(self, ctx:SQLParser.RenamePairContext):
pass
# Enter a parse tree produced by SQLParser#truncateTableStatement.
def enterTruncateTableStatement(self, ctx:SQLParser.TruncateTableStatementContext):
pass
# Exit a parse tree produced by SQLParser#truncateTableStatement.
def exitTruncateTableStatement(self, ctx:SQLParser.TruncateTableStatementContext):
pass
# Enter a parse tree produced by SQLParser#importStatement.
def enterImportStatement(self, ctx:SQLParser.ImportStatementContext):
pass
# Exit a parse tree produced by SQLParser#importStatement.
def exitImportStatement(self, ctx:SQLParser.ImportStatementContext):
pass
# Enter a parse tree produced by SQLParser#callStatement.
def enterCallStatement(self, ctx:SQLParser.CallStatementContext):
pass
# Exit a parse tree produced by SQLParser#callStatement.
def exitCallStatement(self, ctx:SQLParser.CallStatementContext):
pass
# Enter a parse tree produced by SQLParser#deleteStatement.
def enterDeleteStatement(self, ctx:SQLParser.DeleteStatementContext):
pass
# Exit a parse tree produced by SQLParser#deleteStatement.
def exitDeleteStatement(self, ctx:SQLParser.DeleteStatementContext):
pass
# Enter a parse tree produced by SQLParser#partitionDelete.
def enterPartitionDelete(self, ctx:SQLParser.PartitionDeleteContext):
pass
# Exit a parse tree produced by SQLParser#partitionDelete.
def exitPartitionDelete(self, ctx:SQLParser.PartitionDeleteContext):
pass
# Enter a parse tree produced by SQLParser#deleteStatementOption.
def enterDeleteStatementOption(self, ctx:SQLParser.DeleteStatementOptionContext):
pass
# Exit a parse tree produced by SQLParser#deleteStatementOption.
def exitDeleteStatementOption(self, ctx:SQLParser.DeleteStatementOptionContext):
pass
# Enter a parse tree produced by SQLParser#doStatement.
def enterDoStatement(self, ctx:SQLParser.DoStatementContext):
pass
# Exit a parse tree produced by SQLParser#doStatement.
def exitDoStatement(self, ctx:SQLParser.DoStatementContext):
pass
# Enter a parse tree produced by SQLParser#handlerStatement.
def enterHandlerStatement(self, ctx:SQLParser.HandlerStatementContext):
pass
# Exit a parse tree produced by SQLParser#handlerStatement.
def exitHandlerStatement(self, ctx:SQLParser.HandlerStatementContext):
pass
# Enter a parse tree produced by SQLParser#handlerReadOrScan.
def enterHandlerReadOrScan(self, ctx:SQLParser.HandlerReadOrScanContext):
pass
# Exit a parse tree produced by SQLParser#handlerReadOrScan.
def exitHandlerReadOrScan(self, ctx:SQLParser.HandlerReadOrScanContext):
pass
# Enter a parse tree produced by SQLParser#insertStatement.
def enterInsertStatement(self, ctx:SQLParser.InsertStatementContext):
pass
# Exit a parse tree produced by SQLParser#insertStatement.
def exitInsertStatement(self, ctx:SQLParser.InsertStatementContext):
pass
# Enter a parse tree produced by SQLParser#insertLockOption.
def enterInsertLockOption(self, ctx:SQLParser.InsertLockOptionContext):
pass
# Exit a parse tree produced by SQLParser#insertLockOption.
def exitInsertLockOption(self, ctx:SQLParser.InsertLockOptionContext):
pass
# Enter a parse tree produced by SQLParser#insertFromConstructor.
def enterInsertFromConstructor(self, ctx:SQLParser.InsertFromConstructorContext):
pass
# Exit a parse tree produced by SQLParser#insertFromConstructor.
def exitInsertFromConstructor(self, ctx:SQLParser.InsertFromConstructorContext):
pass
# Enter a parse tree produced by SQLParser#fields.
def enterFields(self, ctx:SQLParser.FieldsContext):
pass
# Exit a parse tree produced by SQLParser#fields.
def exitFields(self, ctx:SQLParser.FieldsContext):
pass
# Enter a parse tree produced by SQLParser#insertValues.
def enterInsertValues(self, ctx:SQLParser.InsertValuesContext):
pass
# Exit a parse tree produced by SQLParser#insertValues.
def exitInsertValues(self, ctx:SQLParser.InsertValuesContext):
pass
# Enter a parse tree produced by SQLParser#insertQueryExpression.
def enterInsertQueryExpression(self, ctx:SQLParser.InsertQueryExpressionContext):
pass
# Exit a parse tree produced by SQLParser#insertQueryExpression.
def exitInsertQueryExpression(self, ctx:SQLParser.InsertQueryExpressionContext):
pass
# Enter a parse tree produced by SQLParser#valueList.
def enterValueList(self, ctx:SQLParser.ValueListContext):
pass
# Exit a parse tree produced by SQLParser#valueList.
def exitValueList(self, ctx:SQLParser.ValueListContext):
pass
# Enter a parse tree produced by SQLParser#values.
def enterValues(self, ctx:SQLParser.ValuesContext):
pass
# Exit a parse tree produced by SQLParser#values.
def exitValues(self, ctx:SQLParser.ValuesContext):
pass
# Enter a parse tree produced by SQLParser#valuesReference.
def enterValuesReference(self, ctx:SQLParser.ValuesReferenceContext):
pass
# Exit a parse tree produced by SQLParser#valuesReference.
def exitValuesReference(self, ctx:SQLParser.ValuesReferenceContext):
pass
# Enter a parse tree produced by SQLParser#insertUpdateList.
def enterInsertUpdateList(self, ctx:SQLParser.InsertUpdateListContext):
pass
# Exit a parse tree produced by SQLParser#insertUpdateList.
def exitInsertUpdateList(self, ctx:SQLParser.InsertUpdateListContext):
pass
# Enter a parse tree produced by SQLParser#loadStatement.
def enterLoadStatement(self, ctx:SQLParser.LoadStatementContext):
pass
# Exit a parse tree produced by SQLParser#loadStatement.
def exitLoadStatement(self, ctx:SQLParser.LoadStatementContext):
pass
# Enter a parse tree produced by SQLParser#dataOrXml.
def enterDataOrXml(self, ctx:SQLParser.DataOrXmlContext):
pass
# Exit a parse tree produced by SQLParser#dataOrXml.
def exitDataOrXml(self, ctx:SQLParser.DataOrXmlContext):
pass
# Enter a parse tree produced by SQLParser#xmlRowsIdentifiedBy.
def enterXmlRowsIdentifiedBy(self, ctx:SQLParser.XmlRowsIdentifiedByContext):
pass
# Exit a parse tree produced by SQLParser#xmlRowsIdentifiedBy.
def exitXmlRowsIdentifiedBy(self, ctx:SQLParser.XmlRowsIdentifiedByContext):
pass
# Enter a parse tree produced by SQLParser#loadDataFileTail.
def enterLoadDataFileTail(self, ctx:SQLParser.LoadDataFileTailContext):
pass
# Exit a parse tree produced by SQLParser#loadDataFileTail.
def exitLoadDataFileTail(self, ctx:SQLParser.LoadDataFileTailContext):
pass
# Enter a parse tree produced by SQLParser#loadDataFileTargetList.
def enterLoadDataFileTargetList(self, ctx:SQLParser.LoadDataFileTargetListContext):
pass
# Exit a parse tree produced by SQLParser#loadDataFileTargetList.
def exitLoadDataFileTargetList(self, ctx:SQLParser.LoadDataFileTargetListContext):
pass
# Enter a parse tree produced by SQLParser#fieldOrVariableList.
def enterFieldOrVariableList(self, ctx:SQLParser.FieldOrVariableListContext):
pass
# Exit a parse tree produced by SQLParser#fieldOrVariableList.
def exitFieldOrVariableList(self, ctx:SQLParser.FieldOrVariableListContext):
pass
# Enter a parse tree produced by SQLParser#replaceStatement.
def enterReplaceStatement(self, ctx:SQLParser.ReplaceStatementContext):
pass
# Exit a parse tree produced by SQLParser#replaceStatement.
def exitReplaceStatement(self, ctx:SQLParser.ReplaceStatementContext):
pass
# Enter a parse tree produced by SQLParser#selectStatement.
def enterSelectStatement(self, ctx:SQLParser.SelectStatementContext):
pass
# Exit a parse tree produced by SQLParser#selectStatement.
def exitSelectStatement(self, ctx:SQLParser.SelectStatementContext):
pass
# Enter a parse tree produced by SQLParser#selectStatementWithInto.
def enterSelectStatementWithInto(self, ctx:SQLParser.SelectStatementWithIntoContext):
pass
# Exit a parse tree produced by SQLParser#selectStatementWithInto.
def exitSelectStatementWithInto(self, ctx:SQLParser.SelectStatementWithIntoContext):
pass
# Enter a parse tree produced by SQLParser#queryExpression.
def enterQueryExpression(self, ctx:SQLParser.QueryExpressionContext):
pass
# Exit a parse tree produced by SQLParser#queryExpression.
def exitQueryExpression(self, ctx:SQLParser.QueryExpressionContext):
pass
# Enter a parse tree produced by SQLParser#queryExpressionBody.
def enterQueryExpressionBody(self, ctx:SQLParser.QueryExpressionBodyContext):
pass
# Exit a parse tree produced by SQLParser#queryExpressionBody.
def exitQueryExpressionBody(self, ctx:SQLParser.QueryExpressionBodyContext):
pass
# Enter a parse tree produced by SQLParser#queryExpressionParens.
def enterQueryExpressionParens(self, ctx:SQLParser.QueryExpressionParensContext):
pass
# Exit a parse tree produced by SQLParser#queryExpressionParens.
def exitQueryExpressionParens(self, ctx:SQLParser.QueryExpressionParensContext):
pass
# Enter a parse tree produced by SQLParser#queryPrimary.
def enterQueryPrimary(self, ctx:SQLParser.QueryPrimaryContext):
pass
# Exit a parse tree produced by SQLParser#queryPrimary.
def exitQueryPrimary(self, ctx:SQLParser.QueryPrimaryContext):
pass
# Enter a parse tree produced by SQLParser#querySpecification.
def enterQuerySpecification(self, ctx:SQLParser.QuerySpecificationContext):
pass
# Exit a parse tree produced by SQLParser#querySpecification.
def exitQuerySpecification(self, ctx:SQLParser.QuerySpecificationContext):
pass
# Enter a parse tree produced by SQLParser#subquery.
def enterSubquery(self, ctx:SQLParser.SubqueryContext):
pass
# Exit a parse tree produced by SQLParser#subquery.
def exitSubquery(self, ctx:SQLParser.SubqueryContext):
pass
# Enter a parse tree produced by SQLParser#querySpecOption.
def enterQuerySpecOption(self, ctx:SQLParser.QuerySpecOptionContext):
pass
# Exit a parse tree produced by SQLParser#querySpecOption.
def exitQuerySpecOption(self, ctx:SQLParser.QuerySpecOptionContext):
pass
# Enter a parse tree produced by SQLParser#limitClause.
def enterLimitClause(self, ctx:SQLParser.LimitClauseContext):
pass
# Exit a parse tree produced by SQLParser#limitClause.
def exitLimitClause(self, ctx:SQLParser.LimitClauseContext):
pass
# Enter a parse tree produced by SQLParser#simpleLimitClause.
def enterSimpleLimitClause(self, ctx:SQLParser.SimpleLimitClauseContext):
pass
# Exit a parse tree produced by SQLParser#simpleLimitClause.
def exitSimpleLimitClause(self, ctx:SQLParser.SimpleLimitClauseContext):
pass
# Enter a parse tree produced by SQLParser#limitOptions.
def enterLimitOptions(self, ctx:SQLParser.LimitOptionsContext):
pass
# Exit a parse tree produced by SQLParser#limitOptions.
def exitLimitOptions(self, ctx:SQLParser.LimitOptionsContext):
pass
# Enter a parse tree produced by SQLParser#limitOption.
def enterLimitOption(self, ctx:SQLParser.LimitOptionContext):
pass
# Exit a parse tree produced by SQLParser#limitOption.
def exitLimitOption(self, ctx:SQLParser.LimitOptionContext):
pass
# Enter a parse tree produced by SQLParser#intoClause.
def enterIntoClause(self, ctx:SQLParser.IntoClauseContext):
pass
# Exit a parse tree produced by SQLParser#intoClause.
def exitIntoClause(self, ctx:SQLParser.IntoClauseContext):
pass
# Enter a parse tree produced by SQLParser#procedureAnalyseClause.
def enterProcedureAnalyseClause(self, ctx:SQLParser.ProcedureAnalyseClauseContext):
pass
# Exit a parse tree produced by SQLParser#procedureAnalyseClause.
def exitProcedureAnalyseClause(self, ctx:SQLParser.ProcedureAnalyseClauseContext):
pass
# Enter a parse tree produced by SQLParser#havingClause.
def enterHavingClause(self, ctx:SQLParser.HavingClauseContext):
pass
# Exit a parse tree produced by SQLParser#havingClause.
def exitHavingClause(self, ctx:SQLParser.HavingClauseContext):
pass
# Enter a parse tree produced by SQLParser#windowClause.
def enterWindowClause(self, ctx:SQLParser.WindowClauseContext):
pass
# Exit a parse tree produced by SQLParser#windowClause.
def exitWindowClause(self, ctx:SQLParser.WindowClauseContext):
pass
# Enter a parse tree produced by SQLParser#windowDefinition.
def enterWindowDefinition(self, ctx:SQLParser.WindowDefinitionContext):
pass
# Exit a parse tree produced by SQLParser#windowDefinition.
def exitWindowDefinition(self, ctx:SQLParser.WindowDefinitionContext):
pass
# Enter a parse tree produced by SQLParser#windowSpec.
def enterWindowSpec(self, ctx:SQLParser.WindowSpecContext):
pass
# Exit a parse tree produced by SQLParser#windowSpec.
def exitWindowSpec(self, ctx:SQLParser.WindowSpecContext):
pass
# Enter a parse tree produced by SQLParser#windowSpecDetails.
def enterWindowSpecDetails(self, ctx:SQLParser.WindowSpecDetailsContext):
pass
# Exit a parse tree produced by SQLParser#windowSpecDetails.
def exitWindowSpecDetails(self, ctx:SQLParser.WindowSpecDetailsContext):
pass
# Enter a parse tree produced by SQLParser#windowFrameClause.
def enterWindowFrameClause(self, ctx:SQLParser.WindowFrameClauseContext):
pass
# Exit a parse tree produced by SQLParser#windowFrameClause.
def exitWindowFrameClause(self, ctx:SQLParser.WindowFrameClauseContext):
pass
# Enter a parse tree produced by SQLParser#windowFrameUnits.
def enterWindowFrameUnits(self, ctx:SQLParser.WindowFrameUnitsContext):
pass
# Exit a parse tree produced by SQLParser#windowFrameUnits.
def exitWindowFrameUnits(self, ctx:SQLParser.WindowFrameUnitsContext):
pass
# Enter a parse tree produced by SQLParser#windowFrameExtent.
def enterWindowFrameExtent(self, ctx:SQLParser.WindowFrameExtentContext):
pass
# Exit a parse tree produced by SQLParser#windowFrameExtent.
def exitWindowFrameExtent(self, ctx:SQLParser.WindowFrameExtentContext):
pass
# Enter a parse tree produced by SQLParser#windowFrameStart.
def enterWindowFrameStart(self, ctx:SQLParser.WindowFrameStartContext):
pass
# Exit a parse tree produced by SQLParser#windowFrameStart.
def exitWindowFrameStart(self, ctx:SQLParser.WindowFrameStartContext):
pass
# Enter a parse tree produced by SQLParser#windowFrameBetween.
def enterWindowFrameBetween(self, ctx:SQLParser.WindowFrameBetweenContext):
pass
# Exit a parse tree produced by SQLParser#windowFrameBetween.
def exitWindowFrameBetween(self, ctx:SQLParser.WindowFrameBetweenContext):
pass
# Enter a parse tree produced by SQLParser#windowFrameBound.
def enterWindowFrameBound(self, ctx:SQLParser.WindowFrameBoundContext):
pass
# Exit a parse tree produced by SQLParser#windowFrameBound.
def exitWindowFrameBound(self, ctx:SQLParser.WindowFrameBoundContext):
pass
# Enter a parse tree produced by SQLParser#windowFrameExclusion.
def enterWindowFrameExclusion(self, ctx:SQLParser.WindowFrameExclusionContext):
pass
# Exit a parse tree produced by SQLParser#windowFrameExclusion.
def exitWindowFrameExclusion(self, ctx:SQLParser.WindowFrameExclusionContext):
pass
# Enter a parse tree produced by SQLParser#withClause.
def enterWithClause(self, ctx:SQLParser.WithClauseContext):
pass
# Exit a parse tree produced by SQLParser#withClause.
def exitWithClause(self, ctx:SQLParser.WithClauseContext):
pass
# Enter a parse tree produced by SQLParser#commonTableExpression.
def enterCommonTableExpression(self, ctx:SQLParser.CommonTableExpressionContext):
pass
# Exit a parse tree produced by SQLParser#commonTableExpression.
def exitCommonTableExpression(self, ctx:SQLParser.CommonTableExpressionContext):
pass
# Enter a parse tree produced by SQLParser#groupByClause.
def enterGroupByClause(self, ctx:SQLParser.GroupByClauseContext):
pass
# Exit a parse tree produced by SQLParser#groupByClause.
def exitGroupByClause(self, ctx:SQLParser.GroupByClauseContext):
pass
# Enter a parse tree produced by SQLParser#olapOption.
def enterOlapOption(self, ctx:SQLParser.OlapOptionContext):
pass
# Exit a parse tree produced by SQLParser#olapOption.
def exitOlapOption(self, ctx:SQLParser.OlapOptionContext):
pass
# Enter a parse tree produced by SQLParser#orderClause.
def enterOrderClause(self, ctx:SQLParser.OrderClauseContext):
pass
# Exit a parse tree produced by SQLParser#orderClause.
def exitOrderClause(self, ctx:SQLParser.OrderClauseContext):
pass
# Enter a parse tree produced by SQLParser#direction.
def enterDirection(self, ctx:SQLParser.DirectionContext):
pass
# Exit a parse tree produced by SQLParser#direction.
def exitDirection(self, ctx:SQLParser.DirectionContext):
pass
# Enter a parse tree produced by SQLParser#fromClause.
def enterFromClause(self, ctx:SQLParser.FromClauseContext):
pass
# Exit a parse tree produced by SQLParser#fromClause.
def exitFromClause(self, ctx:SQLParser.FromClauseContext):
pass
# Enter a parse tree produced by SQLParser#tableReferenceList.
def enterTableReferenceList(self, ctx:SQLParser.TableReferenceListContext):
pass
# Exit a parse tree produced by SQLParser#tableReferenceList.
def exitTableReferenceList(self, ctx:SQLParser.TableReferenceListContext):
pass
# Enter a parse tree produced by SQLParser#tableValueConstructor.
def enterTableValueConstructor(self, ctx:SQLParser.TableValueConstructorContext):
pass
# Exit a parse tree produced by SQLParser#tableValueConstructor.
def exitTableValueConstructor(self, ctx:SQLParser.TableValueConstructorContext):
pass
# Enter a parse tree produced by SQLParser#explicitTable.
def enterExplicitTable(self, ctx:SQLParser.ExplicitTableContext):
pass
# Exit a parse tree produced by SQLParser#explicitTable.
def exitExplicitTable(self, ctx:SQLParser.ExplicitTableContext):
pass
# Enter a parse tree produced by SQLParser#rowValueExplicit.
def enterRowValueExplicit(self, ctx:SQLParser.RowValueExplicitContext):
pass
# Exit a parse tree produced by SQLParser#rowValueExplicit.
def exitRowValueExplicit(self, ctx:SQLParser.RowValueExplicitContext):
pass
# Enter a parse tree produced by SQLParser#selectOption.
def enterSelectOption(self, ctx:SQLParser.SelectOptionContext):
pass
# Exit a parse tree produced by SQLParser#selectOption.
def exitSelectOption(self, ctx:SQLParser.SelectOptionContext):
pass
# Enter a parse tree produced by SQLParser#lockingClauseList.
def enterLockingClauseList(self, ctx:SQLParser.LockingClauseListContext):
pass
# Exit a parse tree produced by SQLParser#lockingClauseList.
def exitLockingClauseList(self, ctx:SQLParser.LockingClauseListContext):
pass
# Enter a parse tree produced by SQLParser#lockingClause.
def enterLockingClause(self, ctx:SQLParser.LockingClauseContext):
pass
# Exit a parse tree produced by SQLParser#lockingClause.
def exitLockingClause(self, ctx:SQLParser.LockingClauseContext):
pass
# Enter a parse tree produced by SQLParser#lockStrengh.
def enterLockStrengh(self, ctx:SQLParser.LockStrenghContext):
pass
# Exit a parse tree produced by SQLParser#lockStrengh.
def exitLockStrengh(self, ctx:SQLParser.LockStrenghContext):
pass
# Enter a parse tree produced by SQLParser#lockedRowAction.
def enterLockedRowAction(self, ctx:SQLParser.LockedRowActionContext):
pass
# Exit a parse tree produced by SQLParser#lockedRowAction.
def exitLockedRowAction(self, ctx:SQLParser.LockedRowActionContext):
pass
# Enter a parse tree produced by SQLParser#selectItemList.
def enterSelectItemList(self, ctx:SQLParser.SelectItemListContext):
pass
# Exit a parse tree produced by SQLParser#selectItemList.
def exitSelectItemList(self, ctx:SQLParser.SelectItemListContext):
pass
# Enter a parse tree produced by SQLParser#selectItem.
def enterSelectItem(self, ctx:SQLParser.SelectItemContext):
pass
# Exit a parse tree produced by SQLParser#selectItem.
def exitSelectItem(self, ctx:SQLParser.SelectItemContext):
pass
# Enter a parse tree produced by SQLParser#selectAlias.
def enterSelectAlias(self, ctx:SQLParser.SelectAliasContext):
pass
# Exit a parse tree produced by SQLParser#selectAlias.
def exitSelectAlias(self, ctx:SQLParser.SelectAliasContext):
pass
# Enter a parse tree produced by SQLParser#whereClause.
def enterWhereClause(self, ctx:SQLParser.WhereClauseContext):
pass
# Exit a parse tree produced by SQLParser#whereClause.
def exitWhereClause(self, ctx:SQLParser.WhereClauseContext):
pass
# Enter a parse tree produced by SQLParser#tableReference.
def enterTableReference(self, ctx:SQLParser.TableReferenceContext):
pass
# Exit a parse tree produced by SQLParser#tableReference.
def exitTableReference(self, ctx:SQLParser.TableReferenceContext):
pass
# Enter a parse tree produced by SQLParser#escapedTableReference.
def enterEscapedTableReference(self, ctx:SQLParser.EscapedTableReferenceContext):
pass
# Exit a parse tree produced by SQLParser#escapedTableReference.
def exitEscapedTableReference(self, ctx:SQLParser.EscapedTableReferenceContext):
pass
# Enter a parse tree produced by SQLParser#joinedTable.
def enterJoinedTable(self, ctx:SQLParser.JoinedTableContext):
pass
# Exit a parse tree produced by SQLParser#joinedTable.
def exitJoinedTable(self, ctx:SQLParser.JoinedTableContext):
pass
# Enter a parse tree produced by SQLParser#naturalJoinType.
def enterNaturalJoinType(self, ctx:SQLParser.NaturalJoinTypeContext):
pass
# Exit a parse tree produced by SQLParser#naturalJoinType.
def exitNaturalJoinType(self, ctx:SQLParser.NaturalJoinTypeContext):
pass
# Enter a parse tree produced by SQLParser#innerJoinType.
def enterInnerJoinType(self, ctx:SQLParser.InnerJoinTypeContext):
pass
# Exit a parse tree produced by SQLParser#innerJoinType.
def exitInnerJoinType(self, ctx:SQLParser.InnerJoinTypeContext):
pass
# Enter a parse tree produced by SQLParser#outerJoinType.
def enterOuterJoinType(self, ctx:SQLParser.OuterJoinTypeContext):
pass
# Exit a parse tree produced by SQLParser#outerJoinType.
def exitOuterJoinType(self, ctx:SQLParser.OuterJoinTypeContext):
pass
# Enter a parse tree produced by SQLParser#tableFactor.
def enterTableFactor(self, ctx:SQLParser.TableFactorContext):
pass
# Exit a parse tree produced by SQLParser#tableFactor.
def exitTableFactor(self, ctx:SQLParser.TableFactorContext):
pass
# Enter a parse tree produced by SQLParser#singleTable.
def enterSingleTable(self, ctx:SQLParser.SingleTableContext):
pass
# Exit a parse tree produced by SQLParser#singleTable.
def exitSingleTable(self, ctx:SQLParser.SingleTableContext):
pass
# Enter a parse tree produced by SQLParser#singleTableParens.
def enterSingleTableParens(self, ctx:SQLParser.SingleTableParensContext):
pass
# Exit a parse tree produced by SQLParser#singleTableParens.
def exitSingleTableParens(self, ctx:SQLParser.SingleTableParensContext):
pass
# Enter a parse tree produced by SQLParser#derivedTable.
def enterDerivedTable(self, ctx:SQLParser.DerivedTableContext):
pass
# Exit a parse tree produced by SQLParser#derivedTable.
def exitDerivedTable(self, ctx:SQLParser.DerivedTableContext):
pass
# Enter a parse tree produced by SQLParser#tableReferenceListParens.
def enterTableReferenceListParens(self, ctx:SQLParser.TableReferenceListParensContext):
pass
# Exit a parse tree produced by SQLParser#tableReferenceListParens.
def exitTableReferenceListParens(self, ctx:SQLParser.TableReferenceListParensContext):
pass
# Enter a parse tree produced by SQLParser#tableFunction.
def enterTableFunction(self, ctx:SQLParser.TableFunctionContext):
pass
# Exit a parse tree produced by SQLParser#tableFunction.
def exitTableFunction(self, ctx:SQLParser.TableFunctionContext):
pass
# Enter a parse tree produced by SQLParser#columnsClause.
def enterColumnsClause(self, ctx:SQLParser.ColumnsClauseContext):
pass
# Exit a parse tree produced by SQLParser#columnsClause.
def exitColumnsClause(self, ctx:SQLParser.ColumnsClauseContext):
pass
# Enter a parse tree produced by SQLParser#jtColumn.
def enterJtColumn(self, ctx:SQLParser.JtColumnContext):
pass
# Exit a parse tree produced by SQLParser#jtColumn.
def exitJtColumn(self, ctx:SQLParser.JtColumnContext):
pass
# Enter a parse tree produced by SQLParser#onEmptyOrError.
def enterOnEmptyOrError(self, ctx:SQLParser.OnEmptyOrErrorContext):
pass
# Exit a parse tree produced by SQLParser#onEmptyOrError.
def exitOnEmptyOrError(self, ctx:SQLParser.OnEmptyOrErrorContext):
pass
# Enter a parse tree produced by SQLParser#onEmpty.
def enterOnEmpty(self, ctx:SQLParser.OnEmptyContext):
pass
# Exit a parse tree produced by SQLParser#onEmpty.
def exitOnEmpty(self, ctx:SQLParser.OnEmptyContext):
pass
# Enter a parse tree produced by SQLParser#onError.
def enterOnError(self, ctx:SQLParser.OnErrorContext):
pass
# Exit a parse tree produced by SQLParser#onError.
def exitOnError(self, ctx:SQLParser.OnErrorContext):
pass
# Enter a parse tree produced by SQLParser#jtOnResponse.
def enterJtOnResponse(self, ctx:SQLParser.JtOnResponseContext):
pass
# Exit a parse tree produced by SQLParser#jtOnResponse.
def exitJtOnResponse(self, ctx:SQLParser.JtOnResponseContext):
pass
# Enter a parse tree produced by SQLParser#unionOption.
def enterUnionOption(self, ctx:SQLParser.UnionOptionContext):
pass
# Exit a parse tree produced by SQLParser#unionOption.
def exitUnionOption(self, ctx:SQLParser.UnionOptionContext):
pass
# Enter a parse tree produced by SQLParser#tableAlias.
def enterTableAlias(self, ctx:SQLParser.TableAliasContext):
pass
# Exit a parse tree produced by SQLParser#tableAlias.
def exitTableAlias(self, ctx:SQLParser.TableAliasContext):
pass
# Enter a parse tree produced by SQLParser#indexHintList.
def enterIndexHintList(self, ctx:SQLParser.IndexHintListContext):
pass
# Exit a parse tree produced by SQLParser#indexHintList.
def exitIndexHintList(self, ctx:SQLParser.IndexHintListContext):
pass
# Enter a parse tree produced by SQLParser#indexHint.
def enterIndexHint(self, ctx:SQLParser.IndexHintContext):
pass
# Exit a parse tree produced by SQLParser#indexHint.
def exitIndexHint(self, ctx:SQLParser.IndexHintContext):
pass
# Enter a parse tree produced by SQLParser#indexHintType.
def enterIndexHintType(self, ctx:SQLParser.IndexHintTypeContext):
pass
# Exit a parse tree produced by SQLParser#indexHintType.
def exitIndexHintType(self, ctx:SQLParser.IndexHintTypeContext):
pass
# Enter a parse tree produced by SQLParser#keyOrIndex.
def enterKeyOrIndex(self, ctx:SQLParser.KeyOrIndexContext):
pass
# Exit a parse tree produced by SQLParser#keyOrIndex.
def exitKeyOrIndex(self, ctx:SQLParser.KeyOrIndexContext):
pass
# Enter a parse tree produced by SQLParser#constraintKeyType.
def enterConstraintKeyType(self, ctx:SQLParser.ConstraintKeyTypeContext):
pass
# Exit a parse tree produced by SQLParser#constraintKeyType.
def exitConstraintKeyType(self, ctx:SQLParser.ConstraintKeyTypeContext):
pass
# Enter a parse tree produced by SQLParser#indexHintClause.
def enterIndexHintClause(self, ctx:SQLParser.IndexHintClauseContext):
pass
# Exit a parse tree produced by SQLParser#indexHintClause.
def exitIndexHintClause(self, ctx:SQLParser.IndexHintClauseContext):
pass
# Enter a parse tree produced by SQLParser#indexList.
def enterIndexList(self, ctx:SQLParser.IndexListContext):
pass
# Exit a parse tree produced by SQLParser#indexList.
def exitIndexList(self, ctx:SQLParser.IndexListContext):
pass
# Enter a parse tree produced by SQLParser#indexListElement.
def enterIndexListElement(self, ctx:SQLParser.IndexListElementContext):
pass
# Exit a parse tree produced by SQLParser#indexListElement.
def exitIndexListElement(self, ctx:SQLParser.IndexListElementContext):
pass
# Enter a parse tree produced by SQLParser#updateStatement.
def enterUpdateStatement(self, ctx:SQLParser.UpdateStatementContext):
pass
# Exit a parse tree produced by SQLParser#updateStatement.
def exitUpdateStatement(self, ctx:SQLParser.UpdateStatementContext):
pass
# Enter a parse tree produced by SQLParser#transactionOrLockingStatement.
def enterTransactionOrLockingStatement(self, ctx:SQLParser.TransactionOrLockingStatementContext):
pass
# Exit a parse tree produced by SQLParser#transactionOrLockingStatement.
def exitTransactionOrLockingStatement(self, ctx:SQLParser.TransactionOrLockingStatementContext):
pass
# Enter a parse tree produced by SQLParser#transactionStatement.
def enterTransactionStatement(self, ctx:SQLParser.TransactionStatementContext):
pass
# Exit a parse tree produced by SQLParser#transactionStatement.
def exitTransactionStatement(self, ctx:SQLParser.TransactionStatementContext):
pass
# Enter a parse tree produced by SQLParser#beginWork.
def enterBeginWork(self, ctx:SQLParser.BeginWorkContext):
pass
# Exit a parse tree produced by SQLParser#beginWork.
def exitBeginWork(self, ctx:SQLParser.BeginWorkContext):
pass
# Enter a parse tree produced by SQLParser#transactionCharacteristic.
def enterTransactionCharacteristic(self, ctx:SQLParser.TransactionCharacteristicContext):
pass
# Exit a parse tree produced by SQLParser#transactionCharacteristic.
def exitTransactionCharacteristic(self, ctx:SQLParser.TransactionCharacteristicContext):
pass
# Enter a parse tree produced by SQLParser#savepointStatement.
def enterSavepointStatement(self, ctx:SQLParser.SavepointStatementContext):
pass
# Exit a parse tree produced by SQLParser#savepointStatement.
def exitSavepointStatement(self, ctx:SQLParser.SavepointStatementContext):
pass
# Enter a parse tree produced by SQLParser#lockStatement.
def enterLockStatement(self, ctx:SQLParser.LockStatementContext):
pass
# Exit a parse tree produced by SQLParser#lockStatement.
def exitLockStatement(self, ctx:SQLParser.LockStatementContext):
pass
# Enter a parse tree produced by SQLParser#lockItem.
def enterLockItem(self, ctx:SQLParser.LockItemContext):
pass
# Exit a parse tree produced by SQLParser#lockItem.
def exitLockItem(self, ctx:SQLParser.LockItemContext):
pass
# Enter a parse tree produced by SQLParser#lockOption.
def enterLockOption(self, ctx:SQLParser.LockOptionContext):
pass
# Exit a parse tree produced by SQLParser#lockOption.
def exitLockOption(self, ctx:SQLParser.LockOptionContext):
pass
# Enter a parse tree produced by SQLParser#xaStatement.
def enterXaStatement(self, ctx:SQLParser.XaStatementContext):
pass
# Exit a parse tree produced by SQLParser#xaStatement.
def exitXaStatement(self, ctx:SQLParser.XaStatementContext):
pass
# Enter a parse tree produced by SQLParser#xaConvert.
def enterXaConvert(self, ctx:SQLParser.XaConvertContext):
pass
# Exit a parse tree produced by SQLParser#xaConvert.
def exitXaConvert(self, ctx:SQLParser.XaConvertContext):
pass
# Enter a parse tree produced by SQLParser#xid.
def enterXid(self, ctx:SQLParser.XidContext):
pass
# Exit a parse tree produced by SQLParser#xid.
def exitXid(self, ctx:SQLParser.XidContext):
pass
# Enter a parse tree produced by SQLParser#replicationStatement.
def enterReplicationStatement(self, ctx:SQLParser.ReplicationStatementContext):
pass
# Exit a parse tree produced by SQLParser#replicationStatement.
def exitReplicationStatement(self, ctx:SQLParser.ReplicationStatementContext):
pass
# Enter a parse tree produced by SQLParser#resetOption.
def enterResetOption(self, ctx:SQLParser.ResetOptionContext):
pass
# Exit a parse tree produced by SQLParser#resetOption.
def exitResetOption(self, ctx:SQLParser.ResetOptionContext):
pass
# Enter a parse tree produced by SQLParser#masterResetOptions.
def enterMasterResetOptions(self, ctx:SQLParser.MasterResetOptionsContext):
pass
# Exit a parse tree produced by SQLParser#masterResetOptions.
def exitMasterResetOptions(self, ctx:SQLParser.MasterResetOptionsContext):
pass
# Enter a parse tree produced by SQLParser#replicationLoad.
def enterReplicationLoad(self, ctx:SQLParser.ReplicationLoadContext):
pass
# Exit a parse tree produced by SQLParser#replicationLoad.
def exitReplicationLoad(self, ctx:SQLParser.ReplicationLoadContext):
pass
# Enter a parse tree produced by SQLParser#changeMaster.
def enterChangeMaster(self, ctx:SQLParser.ChangeMasterContext):
pass
# Exit a parse tree produced by SQLParser#changeMaster.
def exitChangeMaster(self, ctx:SQLParser.ChangeMasterContext):
pass
# Enter a parse tree produced by SQLParser#changeMasterOptions.
def enterChangeMasterOptions(self, ctx:SQLParser.ChangeMasterOptionsContext):
pass
# Exit a parse tree produced by SQLParser#changeMasterOptions.
def exitChangeMasterOptions(self, ctx:SQLParser.ChangeMasterOptionsContext):
pass
# Enter a parse tree produced by SQLParser#masterOption.
def enterMasterOption(self, ctx:SQLParser.MasterOptionContext):
pass
# Exit a parse tree produced by SQLParser#masterOption.
def exitMasterOption(self, ctx:SQLParser.MasterOptionContext):
pass
# Enter a parse tree produced by SQLParser#privilegeCheckDef.
def enterPrivilegeCheckDef(self, ctx:SQLParser.PrivilegeCheckDefContext):
pass
# Exit a parse tree produced by SQLParser#privilegeCheckDef.
def exitPrivilegeCheckDef(self, ctx:SQLParser.PrivilegeCheckDefContext):
pass
# Enter a parse tree produced by SQLParser#tablePrimaryKeyCheckDef.
def enterTablePrimaryKeyCheckDef(self, ctx:SQLParser.TablePrimaryKeyCheckDefContext):
pass
# Exit a parse tree produced by SQLParser#tablePrimaryKeyCheckDef.
def exitTablePrimaryKeyCheckDef(self, ctx:SQLParser.TablePrimaryKeyCheckDefContext):
pass
# Enter a parse tree produced by SQLParser#masterTlsCiphersuitesDef.
def enterMasterTlsCiphersuitesDef(self, ctx:SQLParser.MasterTlsCiphersuitesDefContext):
pass
# Exit a parse tree produced by SQLParser#masterTlsCiphersuitesDef.
def exitMasterTlsCiphersuitesDef(self, ctx:SQLParser.MasterTlsCiphersuitesDefContext):
pass
# Enter a parse tree produced by SQLParser#masterFileDef.
def enterMasterFileDef(self, ctx:SQLParser.MasterFileDefContext):
pass
# Exit a parse tree produced by SQLParser#masterFileDef.
def exitMasterFileDef(self, ctx:SQLParser.MasterFileDefContext):
pass
# Enter a parse tree produced by SQLParser#serverIdList.
def enterServerIdList(self, ctx:SQLParser.ServerIdListContext):
pass
# Exit a parse tree produced by SQLParser#serverIdList.
def exitServerIdList(self, ctx:SQLParser.ServerIdListContext):
pass
# Enter a parse tree produced by SQLParser#changeReplication.
def enterChangeReplication(self, ctx:SQLParser.ChangeReplicationContext):
pass
# Exit a parse tree produced by SQLParser#changeReplication.
def exitChangeReplication(self, ctx:SQLParser.ChangeReplicationContext):
pass
# Enter a parse tree produced by SQLParser#filterDefinition.
def enterFilterDefinition(self, ctx:SQLParser.FilterDefinitionContext):
pass
# Exit a parse tree produced by SQLParser#filterDefinition.
def exitFilterDefinition(self, ctx:SQLParser.FilterDefinitionContext):
pass
# Enter a parse tree produced by SQLParser#filterDbList.
def enterFilterDbList(self, ctx:SQLParser.FilterDbListContext):
pass
# Exit a parse tree produced by SQLParser#filterDbList.
def exitFilterDbList(self, ctx:SQLParser.FilterDbListContext):
pass
# Enter a parse tree produced by SQLParser#filterTableList.
def enterFilterTableList(self, ctx:SQLParser.FilterTableListContext):
pass
# Exit a parse tree produced by SQLParser#filterTableList.
def exitFilterTableList(self, ctx:SQLParser.FilterTableListContext):
pass
# Enter a parse tree produced by SQLParser#filterStringList.
def enterFilterStringList(self, ctx:SQLParser.FilterStringListContext):
pass
# Exit a parse tree produced by SQLParser#filterStringList.
def exitFilterStringList(self, ctx:SQLParser.FilterStringListContext):
pass
# Enter a parse tree produced by SQLParser#filterWildDbTableString.
def enterFilterWildDbTableString(self, ctx:SQLParser.FilterWildDbTableStringContext):
pass
# Exit a parse tree produced by SQLParser#filterWildDbTableString.
def exitFilterWildDbTableString(self, ctx:SQLParser.FilterWildDbTableStringContext):
pass
# Enter a parse tree produced by SQLParser#filterDbPairList.
def enterFilterDbPairList(self, ctx:SQLParser.FilterDbPairListContext):
pass
# Exit a parse tree produced by SQLParser#filterDbPairList.
def exitFilterDbPairList(self, ctx:SQLParser.FilterDbPairListContext):
pass
# Enter a parse tree produced by SQLParser#slave.
def enterSlave(self, ctx:SQLParser.SlaveContext):
pass
# Exit a parse tree produced by SQLParser#slave.
def exitSlave(self, ctx:SQLParser.SlaveContext):
pass
# Enter a parse tree produced by SQLParser#slaveUntilOptions.
def enterSlaveUntilOptions(self, ctx:SQLParser.SlaveUntilOptionsContext):
pass
# Exit a parse tree produced by SQLParser#slaveUntilOptions.
def exitSlaveUntilOptions(self, ctx:SQLParser.SlaveUntilOptionsContext):
pass
# Enter a parse tree produced by SQLParser#slaveConnectionOptions.
def enterSlaveConnectionOptions(self, ctx:SQLParser.SlaveConnectionOptionsContext):
pass
# Exit a parse tree produced by SQLParser#slaveConnectionOptions.
def exitSlaveConnectionOptions(self, ctx:SQLParser.SlaveConnectionOptionsContext):
pass
# Enter a parse tree produced by SQLParser#slaveThreadOptions.
def enterSlaveThreadOptions(self, ctx:SQLParser.SlaveThreadOptionsContext):
pass
# Exit a parse tree produced by SQLParser#slaveThreadOptions.
def exitSlaveThreadOptions(self, ctx:SQLParser.SlaveThreadOptionsContext):
pass
# Enter a parse tree produced by SQLParser#slaveThreadOption.
def enterSlaveThreadOption(self, ctx:SQLParser.SlaveThreadOptionContext):
pass
# Exit a parse tree produced by SQLParser#slaveThreadOption.
def exitSlaveThreadOption(self, ctx:SQLParser.SlaveThreadOptionContext):
pass
# Enter a parse tree produced by SQLParser#groupReplication.
def enterGroupReplication(self, ctx:SQLParser.GroupReplicationContext):
pass
# Exit a parse tree produced by SQLParser#groupReplication.
def exitGroupReplication(self, ctx:SQLParser.GroupReplicationContext):
pass
# Enter a parse tree produced by SQLParser#preparedStatement.
def enterPreparedStatement(self, ctx:SQLParser.PreparedStatementContext):
pass
# Exit a parse tree produced by SQLParser#preparedStatement.
def exitPreparedStatement(self, ctx:SQLParser.PreparedStatementContext):
pass
# Enter a parse tree produced by SQLParser#executeStatement.
def enterExecuteStatement(self, ctx:SQLParser.ExecuteStatementContext):
pass
# Exit a parse tree produced by SQLParser#executeStatement.
def exitExecuteStatement(self, ctx:SQLParser.ExecuteStatementContext):
pass
# Enter a parse tree produced by SQLParser#executeVarList.
def enterExecuteVarList(self, ctx:SQLParser.ExecuteVarListContext):
pass
# Exit a parse tree produced by SQLParser#executeVarList.
def exitExecuteVarList(self, ctx:SQLParser.ExecuteVarListContext):
pass
# Enter a parse tree produced by SQLParser#cloneStatement.
def enterCloneStatement(self, ctx:SQLParser.CloneStatementContext):
pass
# Exit a parse tree produced by SQLParser#cloneStatement.
def exitCloneStatement(self, ctx:SQLParser.CloneStatementContext):
pass
# Enter a parse tree produced by SQLParser#dataDirSSL.
def enterDataDirSSL(self, ctx:SQLParser.DataDirSSLContext):
pass
# Exit a parse tree produced by SQLParser#dataDirSSL.
def exitDataDirSSL(self, ctx:SQLParser.DataDirSSLContext):
pass
# Enter a parse tree produced by SQLParser#ssl.
def enterSsl(self, ctx:SQLParser.SslContext):
pass
# Exit a parse tree produced by SQLParser#ssl.
def exitSsl(self, ctx:SQLParser.SslContext):
pass
# Enter a parse tree produced by SQLParser#accountManagementStatement.
def enterAccountManagementStatement(self, ctx:SQLParser.AccountManagementStatementContext):
pass
# Exit a parse tree produced by SQLParser#accountManagementStatement.
def exitAccountManagementStatement(self, ctx:SQLParser.AccountManagementStatementContext):
pass
# Enter a parse tree produced by SQLParser#alterUser.
def enterAlterUser(self, ctx:SQLParser.AlterUserContext):
pass
# Exit a parse tree produced by SQLParser#alterUser.
def exitAlterUser(self, ctx:SQLParser.AlterUserContext):
pass
# Enter a parse tree produced by SQLParser#alterUserTail.
def enterAlterUserTail(self, ctx:SQLParser.AlterUserTailContext):
pass
# Exit a parse tree produced by SQLParser#alterUserTail.
def exitAlterUserTail(self, ctx:SQLParser.AlterUserTailContext):
pass
# Enter a parse tree produced by SQLParser#userFunction.
def enterUserFunction(self, ctx:SQLParser.UserFunctionContext):
pass
# Exit a parse tree produced by SQLParser#userFunction.
def exitUserFunction(self, ctx:SQLParser.UserFunctionContext):
pass
# Enter a parse tree produced by SQLParser#createUser.
def enterCreateUser(self, ctx:SQLParser.CreateUserContext):
pass
# Exit a parse tree produced by SQLParser#createUser.
def exitCreateUser(self, ctx:SQLParser.CreateUserContext):
pass
# Enter a parse tree produced by SQLParser#createUserTail.
def enterCreateUserTail(self, ctx:SQLParser.CreateUserTailContext):
pass
# Exit a parse tree produced by SQLParser#createUserTail.
def exitCreateUserTail(self, ctx:SQLParser.CreateUserTailContext):
pass
# Enter a parse tree produced by SQLParser#defaultRoleClause.
def enterDefaultRoleClause(self, ctx:SQLParser.DefaultRoleClauseContext):
pass
# Exit a parse tree produced by SQLParser#defaultRoleClause.
def exitDefaultRoleClause(self, ctx:SQLParser.DefaultRoleClauseContext):
pass
# Enter a parse tree produced by SQLParser#requireClause.
def enterRequireClause(self, ctx:SQLParser.RequireClauseContext):
pass
# Exit a parse tree produced by SQLParser#requireClause.
def exitRequireClause(self, ctx:SQLParser.RequireClauseContext):
pass
# Enter a parse tree produced by SQLParser#connectOptions.
def enterConnectOptions(self, ctx:SQLParser.ConnectOptionsContext):
pass
# Exit a parse tree produced by SQLParser#connectOptions.
def exitConnectOptions(self, ctx:SQLParser.ConnectOptionsContext):
pass
# Enter a parse tree produced by SQLParser#accountLockPasswordExpireOptions.
def enterAccountLockPasswordExpireOptions(self, ctx:SQLParser.AccountLockPasswordExpireOptionsContext):
pass
# Exit a parse tree produced by SQLParser#accountLockPasswordExpireOptions.
def exitAccountLockPasswordExpireOptions(self, ctx:SQLParser.AccountLockPasswordExpireOptionsContext):
pass
# Enter a parse tree produced by SQLParser#dropUser.
def enterDropUser(self, ctx:SQLParser.DropUserContext):
pass
# Exit a parse tree produced by SQLParser#dropUser.
def exitDropUser(self, ctx:SQLParser.DropUserContext):
pass
# Enter a parse tree produced by SQLParser#grant.
def enterGrant(self, ctx:SQLParser.GrantContext):
pass
# Exit a parse tree produced by SQLParser#grant.
def exitGrant(self, ctx:SQLParser.GrantContext):
pass
# Enter a parse tree produced by SQLParser#grantTargetList.
def enterGrantTargetList(self, ctx:SQLParser.GrantTargetListContext):
pass
# Exit a parse tree produced by SQLParser#grantTargetList.
def exitGrantTargetList(self, ctx:SQLParser.GrantTargetListContext):
pass
# Enter a parse tree produced by SQLParser#grantOptions.
def enterGrantOptions(self, ctx:SQLParser.GrantOptionsContext):
pass
# Exit a parse tree produced by SQLParser#grantOptions.
def exitGrantOptions(self, ctx:SQLParser.GrantOptionsContext):
pass
# Enter a parse tree produced by SQLParser#exceptRoleList.
def enterExceptRoleList(self, ctx:SQLParser.ExceptRoleListContext):
pass
# Exit a parse tree produced by SQLParser#exceptRoleList.
def exitExceptRoleList(self, ctx:SQLParser.ExceptRoleListContext):
pass
# Enter a parse tree produced by SQLParser#withRoles.
def enterWithRoles(self, ctx:SQLParser.WithRolesContext):
pass
# Exit a parse tree produced by SQLParser#withRoles.
def exitWithRoles(self, ctx:SQLParser.WithRolesContext):
pass
# Enter a parse tree produced by SQLParser#grantAs.
def enterGrantAs(self, ctx:SQLParser.GrantAsContext):
pass
# Exit a parse tree produced by SQLParser#grantAs.
def exitGrantAs(self, ctx:SQLParser.GrantAsContext):
pass
# Enter a parse tree produced by SQLParser#versionedRequireClause.
def enterVersionedRequireClause(self, ctx:SQLParser.VersionedRequireClauseContext):
pass
# Exit a parse tree produced by SQLParser#versionedRequireClause.
def exitVersionedRequireClause(self, ctx:SQLParser.VersionedRequireClauseContext):
pass
# Enter a parse tree produced by SQLParser#renameUser.
def enterRenameUser(self, ctx:SQLParser.RenameUserContext):
pass
# Exit a parse tree produced by SQLParser#renameUser.
def exitRenameUser(self, ctx:SQLParser.RenameUserContext):
pass
# Enter a parse tree produced by SQLParser#revoke.
def enterRevoke(self, ctx:SQLParser.RevokeContext):
pass
# Exit a parse tree produced by SQLParser#revoke.
def exitRevoke(self, ctx:SQLParser.RevokeContext):
pass
# Enter a parse tree produced by SQLParser#onTypeTo.
def enterOnTypeTo(self, ctx:SQLParser.OnTypeToContext):
pass
# Exit a parse tree produced by SQLParser#onTypeTo.
def exitOnTypeTo(self, ctx:SQLParser.OnTypeToContext):
pass
# Enter a parse tree produced by SQLParser#aclType.
def enterAclType(self, ctx:SQLParser.AclTypeContext):
pass
# Exit a parse tree produced by SQLParser#aclType.
def exitAclType(self, ctx:SQLParser.AclTypeContext):
pass
# Enter a parse tree produced by SQLParser#roleOrPrivilegesList.
def enterRoleOrPrivilegesList(self, ctx:SQLParser.RoleOrPrivilegesListContext):
pass
# Exit a parse tree produced by SQLParser#roleOrPrivilegesList.
def exitRoleOrPrivilegesList(self, ctx:SQLParser.RoleOrPrivilegesListContext):
pass
# Enter a parse tree produced by SQLParser#roleOrPrivilege.
def enterRoleOrPrivilege(self, ctx:SQLParser.RoleOrPrivilegeContext):
pass
# Exit a parse tree produced by SQLParser#roleOrPrivilege.
def exitRoleOrPrivilege(self, ctx:SQLParser.RoleOrPrivilegeContext):
pass
# Enter a parse tree produced by SQLParser#grantIdentifier.
def enterGrantIdentifier(self, ctx:SQLParser.GrantIdentifierContext):
pass
# Exit a parse tree produced by SQLParser#grantIdentifier.
def exitGrantIdentifier(self, ctx:SQLParser.GrantIdentifierContext):
pass
# Enter a parse tree produced by SQLParser#requireList.
def enterRequireList(self, ctx:SQLParser.RequireListContext):
pass
# Exit a parse tree produced by SQLParser#requireList.
def exitRequireList(self, ctx:SQLParser.RequireListContext):
pass
# Enter a parse tree produced by SQLParser#requireListElement.
def enterRequireListElement(self, ctx:SQLParser.RequireListElementContext):
pass
# Exit a parse tree produced by SQLParser#requireListElement.
def exitRequireListElement(self, ctx:SQLParser.RequireListElementContext):
pass
# Enter a parse tree produced by SQLParser#grantOption.
def enterGrantOption(self, ctx:SQLParser.GrantOptionContext):
pass
# Exit a parse tree produced by SQLParser#grantOption.
def exitGrantOption(self, ctx:SQLParser.GrantOptionContext):
pass
# Enter a parse tree produced by SQLParser#setRole.
def enterSetRole(self, ctx:SQLParser.SetRoleContext):
pass
# Exit a parse tree produced by SQLParser#setRole.
def exitSetRole(self, ctx:SQLParser.SetRoleContext):
pass
# Enter a parse tree produced by SQLParser#roleList.
def enterRoleList(self, ctx:SQLParser.RoleListContext):
pass
# Exit a parse tree produced by SQLParser#roleList.
def exitRoleList(self, ctx:SQLParser.RoleListContext):
pass
# Enter a parse tree produced by SQLParser#role.
def enterRole(self, ctx:SQLParser.RoleContext):
pass
# Exit a parse tree produced by SQLParser#role.
def exitRole(self, ctx:SQLParser.RoleContext):
pass
# Enter a parse tree produced by SQLParser#tableAdministrationStatement.
def enterTableAdministrationStatement(self, ctx:SQLParser.TableAdministrationStatementContext):
pass
# Exit a parse tree produced by SQLParser#tableAdministrationStatement.
def exitTableAdministrationStatement(self, ctx:SQLParser.TableAdministrationStatementContext):
pass
# Enter a parse tree produced by SQLParser#histogram.
def enterHistogram(self, ctx:SQLParser.HistogramContext):
pass
# Exit a parse tree produced by SQLParser#histogram.
def exitHistogram(self, ctx:SQLParser.HistogramContext):
pass
# Enter a parse tree produced by SQLParser#checkOption.
def enterCheckOption(self, ctx:SQLParser.CheckOptionContext):
pass
# Exit a parse tree produced by SQLParser#checkOption.
def exitCheckOption(self, ctx:SQLParser.CheckOptionContext):
pass
# Enter a parse tree produced by SQLParser#repairType.
def enterRepairType(self, ctx:SQLParser.RepairTypeContext):
pass
# Exit a parse tree produced by SQLParser#repairType.
def exitRepairType(self, ctx:SQLParser.RepairTypeContext):
pass
# Enter a parse tree produced by SQLParser#installUninstallStatment.
def enterInstallUninstallStatment(self, ctx:SQLParser.InstallUninstallStatmentContext):
pass
# Exit a parse tree produced by SQLParser#installUninstallStatment.
def exitInstallUninstallStatment(self, ctx:SQLParser.InstallUninstallStatmentContext):
pass
# Enter a parse tree produced by SQLParser#setStatement.
def enterSetStatement(self, ctx:SQLParser.SetStatementContext):
pass
# Exit a parse tree produced by SQLParser#setStatement.
def exitSetStatement(self, ctx:SQLParser.SetStatementContext):
pass
# Enter a parse tree produced by SQLParser#startOptionValueList.
def enterStartOptionValueList(self, ctx:SQLParser.StartOptionValueListContext):
pass
# Exit a parse tree produced by SQLParser#startOptionValueList.
def exitStartOptionValueList(self, ctx:SQLParser.StartOptionValueListContext):
pass
# Enter a parse tree produced by SQLParser#transactionCharacteristics.
def enterTransactionCharacteristics(self, ctx:SQLParser.TransactionCharacteristicsContext):
pass
# Exit a parse tree produced by SQLParser#transactionCharacteristics.
def exitTransactionCharacteristics(self, ctx:SQLParser.TransactionCharacteristicsContext):
pass
# Enter a parse tree produced by SQLParser#transactionAccessMode.
def enterTransactionAccessMode(self, ctx:SQLParser.TransactionAccessModeContext):
pass
# Exit a parse tree produced by SQLParser#transactionAccessMode.
def exitTransactionAccessMode(self, ctx:SQLParser.TransactionAccessModeContext):
pass
# Enter a parse tree produced by SQLParser#isolationLevel.
def enterIsolationLevel(self, ctx:SQLParser.IsolationLevelContext):
pass
# Exit a parse tree produced by SQLParser#isolationLevel.
def exitIsolationLevel(self, ctx:SQLParser.IsolationLevelContext):
pass
# Enter a parse tree produced by SQLParser#optionValueListContinued.
def enterOptionValueListContinued(self, ctx:SQLParser.OptionValueListContinuedContext):
pass
# Exit a parse tree produced by SQLParser#optionValueListContinued.
def exitOptionValueListContinued(self, ctx:SQLParser.OptionValueListContinuedContext):
pass
# Enter a parse tree produced by SQLParser#optionValueNoOptionType.
def enterOptionValueNoOptionType(self, ctx:SQLParser.OptionValueNoOptionTypeContext):
pass
# Exit a parse tree produced by SQLParser#optionValueNoOptionType.
def exitOptionValueNoOptionType(self, ctx:SQLParser.OptionValueNoOptionTypeContext):
pass
# Enter a parse tree produced by SQLParser#optionValue.
def enterOptionValue(self, ctx:SQLParser.OptionValueContext):
pass
# Exit a parse tree produced by SQLParser#optionValue.
def exitOptionValue(self, ctx:SQLParser.OptionValueContext):
pass
# Enter a parse tree produced by SQLParser#setSystemVariable.
def enterSetSystemVariable(self, ctx:SQLParser.SetSystemVariableContext):
pass
# Exit a parse tree produced by SQLParser#setSystemVariable.
def exitSetSystemVariable(self, ctx:SQLParser.SetSystemVariableContext):
pass
# Enter a parse tree produced by SQLParser#startOptionValueListFollowingOptionType.
def enterStartOptionValueListFollowingOptionType(self, ctx:SQLParser.StartOptionValueListFollowingOptionTypeContext):
pass
# Exit a parse tree produced by SQLParser#startOptionValueListFollowingOptionType.
def exitStartOptionValueListFollowingOptionType(self, ctx:SQLParser.StartOptionValueListFollowingOptionTypeContext):
pass
# Enter a parse tree produced by SQLParser#optionValueFollowingOptionType.
def enterOptionValueFollowingOptionType(self, ctx:SQLParser.OptionValueFollowingOptionTypeContext):
pass
# Exit a parse tree produced by SQLParser#optionValueFollowingOptionType.
def exitOptionValueFollowingOptionType(self, ctx:SQLParser.OptionValueFollowingOptionTypeContext):
pass
# Enter a parse tree produced by SQLParser#setExprOrDefault.
def enterSetExprOrDefault(self, ctx:SQLParser.SetExprOrDefaultContext):
pass
# Exit a parse tree produced by SQLParser#setExprOrDefault.
def exitSetExprOrDefault(self, ctx:SQLParser.SetExprOrDefaultContext):
pass
# Enter a parse tree produced by SQLParser#showStatement.
def enterShowStatement(self, ctx:SQLParser.ShowStatementContext):
pass
# Exit a parse tree produced by SQLParser#showStatement.
def exitShowStatement(self, ctx:SQLParser.ShowStatementContext):
pass
# Enter a parse tree produced by SQLParser#showCommandType.
def enterShowCommandType(self, ctx:SQLParser.ShowCommandTypeContext):
pass
# Exit a parse tree produced by SQLParser#showCommandType.
def exitShowCommandType(self, ctx:SQLParser.ShowCommandTypeContext):
pass
# Enter a parse tree produced by SQLParser#nonBlocking.
def enterNonBlocking(self, ctx:SQLParser.NonBlockingContext):
pass
# Exit a parse tree produced by SQLParser#nonBlocking.
def exitNonBlocking(self, ctx:SQLParser.NonBlockingContext):
pass
# Enter a parse tree produced by SQLParser#fromOrIn.
def enterFromOrIn(self, ctx:SQLParser.FromOrInContext):
pass
# Exit a parse tree produced by SQLParser#fromOrIn.
def exitFromOrIn(self, ctx:SQLParser.FromOrInContext):
pass
# Enter a parse tree produced by SQLParser#inDb.
def enterInDb(self, ctx:SQLParser.InDbContext):
pass
# Exit a parse tree produced by SQLParser#inDb.
def exitInDb(self, ctx:SQLParser.InDbContext):
pass
# Enter a parse tree produced by SQLParser#profileType.
def enterProfileType(self, ctx:SQLParser.ProfileTypeContext):
pass
# Exit a parse tree produced by SQLParser#profileType.
def exitProfileType(self, ctx:SQLParser.ProfileTypeContext):
pass
# Enter a parse tree produced by SQLParser#otherAdministrativeStatement.
def enterOtherAdministrativeStatement(self, ctx:SQLParser.OtherAdministrativeStatementContext):
pass
# Exit a parse tree produced by SQLParser#otherAdministrativeStatement.
def exitOtherAdministrativeStatement(self, ctx:SQLParser.OtherAdministrativeStatementContext):
pass
# Enter a parse tree produced by SQLParser#keyCacheListOrParts.
def enterKeyCacheListOrParts(self, ctx:SQLParser.KeyCacheListOrPartsContext):
pass
# Exit a parse tree produced by SQLParser#keyCacheListOrParts.
def exitKeyCacheListOrParts(self, ctx:SQLParser.KeyCacheListOrPartsContext):
pass
# Enter a parse tree produced by SQLParser#keyCacheList.
def enterKeyCacheList(self, ctx:SQLParser.KeyCacheListContext):
pass
# Exit a parse tree produced by SQLParser#keyCacheList.
def exitKeyCacheList(self, ctx:SQLParser.KeyCacheListContext):
pass
# Enter a parse tree produced by SQLParser#assignToKeycache.
def enterAssignToKeycache(self, ctx:SQLParser.AssignToKeycacheContext):
pass
# Exit a parse tree produced by SQLParser#assignToKeycache.
def exitAssignToKeycache(self, ctx:SQLParser.AssignToKeycacheContext):
pass
# Enter a parse tree produced by SQLParser#assignToKeycachePartition.
def enterAssignToKeycachePartition(self, ctx:SQLParser.AssignToKeycachePartitionContext):
pass
# Exit a parse tree produced by SQLParser#assignToKeycachePartition.
def exitAssignToKeycachePartition(self, ctx:SQLParser.AssignToKeycachePartitionContext):
pass
# Enter a parse tree produced by SQLParser#cacheKeyList.
def enterCacheKeyList(self, ctx:SQLParser.CacheKeyListContext):
pass
# Exit a parse tree produced by SQLParser#cacheKeyList.
def exitCacheKeyList(self, ctx:SQLParser.CacheKeyListContext):
pass
# Enter a parse tree produced by SQLParser#keyUsageElement.
def enterKeyUsageElement(self, ctx:SQLParser.KeyUsageElementContext):
pass
# Exit a parse tree produced by SQLParser#keyUsageElement.
def exitKeyUsageElement(self, ctx:SQLParser.KeyUsageElementContext):
pass
# Enter a parse tree produced by SQLParser#keyUsageList.
def enterKeyUsageList(self, ctx:SQLParser.KeyUsageListContext):
pass
# Exit a parse tree produced by SQLParser#keyUsageList.
def exitKeyUsageList(self, ctx:SQLParser.KeyUsageListContext):
pass
# Enter a parse tree produced by SQLParser#flushOption.
def enterFlushOption(self, ctx:SQLParser.FlushOptionContext):
pass
# Exit a parse tree produced by SQLParser#flushOption.
def exitFlushOption(self, ctx:SQLParser.FlushOptionContext):
pass
# Enter a parse tree produced by SQLParser#logType.
def enterLogType(self, ctx:SQLParser.LogTypeContext):
pass
# Exit a parse tree produced by SQLParser#logType.
def exitLogType(self, ctx:SQLParser.LogTypeContext):
pass
# Enter a parse tree produced by SQLParser#flushTables.
def enterFlushTables(self, ctx:SQLParser.FlushTablesContext):
pass
# Exit a parse tree produced by SQLParser#flushTables.
def exitFlushTables(self, ctx:SQLParser.FlushTablesContext):
pass
# Enter a parse tree produced by SQLParser#flushTablesOptions.
def enterFlushTablesOptions(self, ctx:SQLParser.FlushTablesOptionsContext):
pass
# Exit a parse tree produced by SQLParser#flushTablesOptions.
def exitFlushTablesOptions(self, ctx:SQLParser.FlushTablesOptionsContext):
pass
# Enter a parse tree produced by SQLParser#preloadTail.
def enterPreloadTail(self, ctx:SQLParser.PreloadTailContext):
pass
# Exit a parse tree produced by SQLParser#preloadTail.
def exitPreloadTail(self, ctx:SQLParser.PreloadTailContext):
pass
# Enter a parse tree produced by SQLParser#preloadList.
def enterPreloadList(self, ctx:SQLParser.PreloadListContext):
pass
# Exit a parse tree produced by SQLParser#preloadList.
def exitPreloadList(self, ctx:SQLParser.PreloadListContext):
pass
# Enter a parse tree produced by SQLParser#preloadKeys.
def enterPreloadKeys(self, ctx:SQLParser.PreloadKeysContext):
pass
# Exit a parse tree produced by SQLParser#preloadKeys.
def exitPreloadKeys(self, ctx:SQLParser.PreloadKeysContext):
pass
# Enter a parse tree produced by SQLParser#adminPartition.
def enterAdminPartition(self, ctx:SQLParser.AdminPartitionContext):
pass
# Exit a parse tree produced by SQLParser#adminPartition.
def exitAdminPartition(self, ctx:SQLParser.AdminPartitionContext):
pass
# Enter a parse tree produced by SQLParser#resourceGroupManagement.
def enterResourceGroupManagement(self, ctx:SQLParser.ResourceGroupManagementContext):
pass
# Exit a parse tree produced by SQLParser#resourceGroupManagement.
def exitResourceGroupManagement(self, ctx:SQLParser.ResourceGroupManagementContext):
pass
# Enter a parse tree produced by SQLParser#createResourceGroup.
def enterCreateResourceGroup(self, ctx:SQLParser.CreateResourceGroupContext):
pass
# Exit a parse tree produced by SQLParser#createResourceGroup.
def exitCreateResourceGroup(self, ctx:SQLParser.CreateResourceGroupContext):
pass
# Enter a parse tree produced by SQLParser#resourceGroupVcpuList.
def enterResourceGroupVcpuList(self, ctx:SQLParser.ResourceGroupVcpuListContext):
pass
# Exit a parse tree produced by SQLParser#resourceGroupVcpuList.
def exitResourceGroupVcpuList(self, ctx:SQLParser.ResourceGroupVcpuListContext):
pass
# Enter a parse tree produced by SQLParser#vcpuNumOrRange.
def enterVcpuNumOrRange(self, ctx:SQLParser.VcpuNumOrRangeContext):
pass
# Exit a parse tree produced by SQLParser#vcpuNumOrRange.
def exitVcpuNumOrRange(self, ctx:SQLParser.VcpuNumOrRangeContext):
pass
# Enter a parse tree produced by SQLParser#resourceGroupPriority.
def enterResourceGroupPriority(self, ctx:SQLParser.ResourceGroupPriorityContext):
pass
# Exit a parse tree produced by SQLParser#resourceGroupPriority.
def exitResourceGroupPriority(self, ctx:SQLParser.ResourceGroupPriorityContext):
pass
# Enter a parse tree produced by SQLParser#resourceGroupEnableDisable.
def enterResourceGroupEnableDisable(self, ctx:SQLParser.ResourceGroupEnableDisableContext):
pass
# Exit a parse tree produced by SQLParser#resourceGroupEnableDisable.
def exitResourceGroupEnableDisable(self, ctx:SQLParser.ResourceGroupEnableDisableContext):
pass
# Enter a parse tree produced by SQLParser#alterResourceGroup.
def enterAlterResourceGroup(self, ctx:SQLParser.AlterResourceGroupContext):
pass
# Exit a parse tree produced by SQLParser#alterResourceGroup.
def exitAlterResourceGroup(self, ctx:SQLParser.AlterResourceGroupContext):
pass
# Enter a parse tree produced by SQLParser#setResourceGroup.
def enterSetResourceGroup(self, ctx:SQLParser.SetResourceGroupContext):
pass
# Exit a parse tree produced by SQLParser#setResourceGroup.
def exitSetResourceGroup(self, ctx:SQLParser.SetResourceGroupContext):
pass
# Enter a parse tree produced by SQLParser#threadIdList.
def enterThreadIdList(self, ctx:SQLParser.ThreadIdListContext):
pass
# Exit a parse tree produced by SQLParser#threadIdList.
def exitThreadIdList(self, ctx:SQLParser.ThreadIdListContext):
pass
# Enter a parse tree produced by SQLParser#dropResourceGroup.
def enterDropResourceGroup(self, ctx:SQLParser.DropResourceGroupContext):
pass
# Exit a parse tree produced by SQLParser#dropResourceGroup.
def exitDropResourceGroup(self, ctx:SQLParser.DropResourceGroupContext):
pass
# Enter a parse tree produced by SQLParser#utilityStatement.
def enterUtilityStatement(self, ctx:SQLParser.UtilityStatementContext):
pass
# Exit a parse tree produced by SQLParser#utilityStatement.
def exitUtilityStatement(self, ctx:SQLParser.UtilityStatementContext):
pass
# Enter a parse tree produced by SQLParser#describeStatement.
def enterDescribeStatement(self, ctx:SQLParser.DescribeStatementContext):
pass
# Exit a parse tree produced by SQLParser#describeStatement.
def exitDescribeStatement(self, ctx:SQLParser.DescribeStatementContext):
pass
# Enter a parse tree produced by SQLParser#explainStatement.
def enterExplainStatement(self, ctx:SQLParser.ExplainStatementContext):
pass
# Exit a parse tree produced by SQLParser#explainStatement.
def exitExplainStatement(self, ctx:SQLParser.ExplainStatementContext):
pass
# Enter a parse tree produced by SQLParser#explainableStatement.
def enterExplainableStatement(self, ctx:SQLParser.ExplainableStatementContext):
pass
# Exit a parse tree produced by SQLParser#explainableStatement.
def exitExplainableStatement(self, ctx:SQLParser.ExplainableStatementContext):
pass
# Enter a parse tree produced by SQLParser#helpCommand.
def enterHelpCommand(self, ctx:SQLParser.HelpCommandContext):
pass
# Exit a parse tree produced by SQLParser#helpCommand.
def exitHelpCommand(self, ctx:SQLParser.HelpCommandContext):
pass
# Enter a parse tree produced by SQLParser#useCommand.
def enterUseCommand(self, ctx:SQLParser.UseCommandContext):
pass
# Exit a parse tree produced by SQLParser#useCommand.
def exitUseCommand(self, ctx:SQLParser.UseCommandContext):
pass
# Enter a parse tree produced by SQLParser#restartServer.
def enterRestartServer(self, ctx:SQLParser.RestartServerContext):
pass
# Exit a parse tree produced by SQLParser#restartServer.
def exitRestartServer(self, ctx:SQLParser.RestartServerContext):
pass
# Enter a parse tree produced by SQLParser#exprOr.
def enterExprOr(self, ctx:SQLParser.ExprOrContext):
pass
# Exit a parse tree produced by SQLParser#exprOr.
def exitExprOr(self, ctx:SQLParser.ExprOrContext):
pass
# Enter a parse tree produced by SQLParser#exprNot.
def enterExprNot(self, ctx:SQLParser.ExprNotContext):
pass
# Exit a parse tree produced by SQLParser#exprNot.
def exitExprNot(self, ctx:SQLParser.ExprNotContext):
pass
# Enter a parse tree produced by SQLParser#exprIs.
def enterExprIs(self, ctx:SQLParser.ExprIsContext):
pass
# Exit a parse tree produced by SQLParser#exprIs.
def exitExprIs(self, ctx:SQLParser.ExprIsContext):
pass
# Enter a parse tree produced by SQLParser#exprAnd.
def enterExprAnd(self, ctx:SQLParser.ExprAndContext):
pass
# Exit a parse tree produced by SQLParser#exprAnd.
def exitExprAnd(self, ctx:SQLParser.ExprAndContext):
pass
# Enter a parse tree produced by SQLParser#exprXor.
def enterExprXor(self, ctx:SQLParser.ExprXorContext):
pass
# Exit a parse tree produced by SQLParser#exprXor.
def exitExprXor(self, ctx:SQLParser.ExprXorContext):
pass
# Enter a parse tree produced by SQLParser#primaryExprPredicate.
def enterPrimaryExprPredicate(self, ctx:SQLParser.PrimaryExprPredicateContext):
pass
# Exit a parse tree produced by SQLParser#primaryExprPredicate.
def exitPrimaryExprPredicate(self, ctx:SQLParser.PrimaryExprPredicateContext):
pass
# Enter a parse tree produced by SQLParser#primaryExprCompare.
def enterPrimaryExprCompare(self, ctx:SQLParser.PrimaryExprCompareContext):
pass
# Exit a parse tree produced by SQLParser#primaryExprCompare.
def exitPrimaryExprCompare(self, ctx:SQLParser.PrimaryExprCompareContext):
pass
# Enter a parse tree produced by SQLParser#primaryExprAllAny.
def enterPrimaryExprAllAny(self, ctx:SQLParser.PrimaryExprAllAnyContext):
pass
# Exit a parse tree produced by SQLParser#primaryExprAllAny.
def exitPrimaryExprAllAny(self, ctx:SQLParser.PrimaryExprAllAnyContext):
pass
# Enter a parse tree produced by SQLParser#primaryExprIsNull.
def enterPrimaryExprIsNull(self, ctx:SQLParser.PrimaryExprIsNullContext):
pass
# Exit a parse tree produced by SQLParser#primaryExprIsNull.
def exitPrimaryExprIsNull(self, ctx:SQLParser.PrimaryExprIsNullContext):
pass
# Enter a parse tree produced by SQLParser#compOp.
def enterCompOp(self, ctx:SQLParser.CompOpContext):
pass
# Exit a parse tree produced by SQLParser#compOp.
def exitCompOp(self, ctx:SQLParser.CompOpContext):
pass
# Enter a parse tree produced by SQLParser#predicate.
def enterPredicate(self, ctx:SQLParser.PredicateContext):
pass
# Exit a parse tree produced by SQLParser#predicate.
def exitPredicate(self, ctx:SQLParser.PredicateContext):
pass
# Enter a parse tree produced by SQLParser#predicateExprIn.
def enterPredicateExprIn(self, ctx:SQLParser.PredicateExprInContext):
pass
# Exit a parse tree produced by SQLParser#predicateExprIn.
def exitPredicateExprIn(self, ctx:SQLParser.PredicateExprInContext):
pass
# Enter a parse tree produced by SQLParser#predicateExprBetween.
def enterPredicateExprBetween(self, ctx:SQLParser.PredicateExprBetweenContext):
pass
# Exit a parse tree produced by SQLParser#predicateExprBetween.
def exitPredicateExprBetween(self, ctx:SQLParser.PredicateExprBetweenContext):
pass
# Enter a parse tree produced by SQLParser#predicateExprLike.
def enterPredicateExprLike(self, ctx:SQLParser.PredicateExprLikeContext):
pass
# Exit a parse tree produced by SQLParser#predicateExprLike.
def exitPredicateExprLike(self, ctx:SQLParser.PredicateExprLikeContext):
pass
# Enter a parse tree produced by SQLParser#predicateExprRegex.
def enterPredicateExprRegex(self, ctx:SQLParser.PredicateExprRegexContext):
pass
# Exit a parse tree produced by SQLParser#predicateExprRegex.
def exitPredicateExprRegex(self, ctx:SQLParser.PredicateExprRegexContext):
pass
# Enter a parse tree produced by SQLParser#bitExpr.
def enterBitExpr(self, ctx:SQLParser.BitExprContext):
pass
# Exit a parse tree produced by SQLParser#bitExpr.
def exitBitExpr(self, ctx:SQLParser.BitExprContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprConvert.
def enterSimpleExprConvert(self, ctx:SQLParser.SimpleExprConvertContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprConvert.
def exitSimpleExprConvert(self, ctx:SQLParser.SimpleExprConvertContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprVariable.
def enterSimpleExprVariable(self, ctx:SQLParser.SimpleExprVariableContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprVariable.
def exitSimpleExprVariable(self, ctx:SQLParser.SimpleExprVariableContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprCast.
def enterSimpleExprCast(self, ctx:SQLParser.SimpleExprCastContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprCast.
def exitSimpleExprCast(self, ctx:SQLParser.SimpleExprCastContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprUnary.
def enterSimpleExprUnary(self, ctx:SQLParser.SimpleExprUnaryContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprUnary.
def exitSimpleExprUnary(self, ctx:SQLParser.SimpleExprUnaryContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprOdbc.
def enterSimpleExprOdbc(self, ctx:SQLParser.SimpleExprOdbcContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprOdbc.
def exitSimpleExprOdbc(self, ctx:SQLParser.SimpleExprOdbcContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprRuntimeFunction.
def enterSimpleExprRuntimeFunction(self, ctx:SQLParser.SimpleExprRuntimeFunctionContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprRuntimeFunction.
def exitSimpleExprRuntimeFunction(self, ctx:SQLParser.SimpleExprRuntimeFunctionContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprFunction.
def enterSimpleExprFunction(self, ctx:SQLParser.SimpleExprFunctionContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprFunction.
def exitSimpleExprFunction(self, ctx:SQLParser.SimpleExprFunctionContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprCollate.
def enterSimpleExprCollate(self, ctx:SQLParser.SimpleExprCollateContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprCollate.
def exitSimpleExprCollate(self, ctx:SQLParser.SimpleExprCollateContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprMatch.
def enterSimpleExprMatch(self, ctx:SQLParser.SimpleExprMatchContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprMatch.
def exitSimpleExprMatch(self, ctx:SQLParser.SimpleExprMatchContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprWindowingFunction.
def enterSimpleExprWindowingFunction(self, ctx:SQLParser.SimpleExprWindowingFunctionContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprWindowingFunction.
def exitSimpleExprWindowingFunction(self, ctx:SQLParser.SimpleExprWindowingFunctionContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprBinary.
def enterSimpleExprBinary(self, ctx:SQLParser.SimpleExprBinaryContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprBinary.
def exitSimpleExprBinary(self, ctx:SQLParser.SimpleExprBinaryContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprColumnRef.
def enterSimpleExprColumnRef(self, ctx:SQLParser.SimpleExprColumnRefContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprColumnRef.
def exitSimpleExprColumnRef(self, ctx:SQLParser.SimpleExprColumnRefContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprParamMarker.
def enterSimpleExprParamMarker(self, ctx:SQLParser.SimpleExprParamMarkerContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprParamMarker.
def exitSimpleExprParamMarker(self, ctx:SQLParser.SimpleExprParamMarkerContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprSum.
def enterSimpleExprSum(self, ctx:SQLParser.SimpleExprSumContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprSum.
def exitSimpleExprSum(self, ctx:SQLParser.SimpleExprSumContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprConvertUsing.
def enterSimpleExprConvertUsing(self, ctx:SQLParser.SimpleExprConvertUsingContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprConvertUsing.
def exitSimpleExprConvertUsing(self, ctx:SQLParser.SimpleExprConvertUsingContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprSubQuery.
def enterSimpleExprSubQuery(self, ctx:SQLParser.SimpleExprSubQueryContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprSubQuery.
def exitSimpleExprSubQuery(self, ctx:SQLParser.SimpleExprSubQueryContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprGroupingOperation.
def enterSimpleExprGroupingOperation(self, ctx:SQLParser.SimpleExprGroupingOperationContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprGroupingOperation.
def exitSimpleExprGroupingOperation(self, ctx:SQLParser.SimpleExprGroupingOperationContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprNot.
def enterSimpleExprNot(self, ctx:SQLParser.SimpleExprNotContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprNot.
def exitSimpleExprNot(self, ctx:SQLParser.SimpleExprNotContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprValues.
def enterSimpleExprValues(self, ctx:SQLParser.SimpleExprValuesContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprValues.
def exitSimpleExprValues(self, ctx:SQLParser.SimpleExprValuesContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprDefault.
def enterSimpleExprDefault(self, ctx:SQLParser.SimpleExprDefaultContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprDefault.
def exitSimpleExprDefault(self, ctx:SQLParser.SimpleExprDefaultContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprList.
def enterSimpleExprList(self, ctx:SQLParser.SimpleExprListContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprList.
def exitSimpleExprList(self, ctx:SQLParser.SimpleExprListContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprInterval.
def enterSimpleExprInterval(self, ctx:SQLParser.SimpleExprIntervalContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprInterval.
def exitSimpleExprInterval(self, ctx:SQLParser.SimpleExprIntervalContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprCase.
def enterSimpleExprCase(self, ctx:SQLParser.SimpleExprCaseContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprCase.
def exitSimpleExprCase(self, ctx:SQLParser.SimpleExprCaseContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprConcat.
def enterSimpleExprConcat(self, ctx:SQLParser.SimpleExprConcatContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprConcat.
def exitSimpleExprConcat(self, ctx:SQLParser.SimpleExprConcatContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprLiteral.
def enterSimpleExprLiteral(self, ctx:SQLParser.SimpleExprLiteralContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprLiteral.
def exitSimpleExprLiteral(self, ctx:SQLParser.SimpleExprLiteralContext):
pass
# Enter a parse tree produced by SQLParser#arrayCast.
def enterArrayCast(self, ctx:SQLParser.ArrayCastContext):
pass
# Exit a parse tree produced by SQLParser#arrayCast.
def exitArrayCast(self, ctx:SQLParser.ArrayCastContext):
pass
# Enter a parse tree produced by SQLParser#jsonOperator.
def enterJsonOperator(self, ctx:SQLParser.JsonOperatorContext):
pass
# Exit a parse tree produced by SQLParser#jsonOperator.
def exitJsonOperator(self, ctx:SQLParser.JsonOperatorContext):
pass
# Enter a parse tree produced by SQLParser#sumExpr.
def enterSumExpr(self, ctx:SQLParser.SumExprContext):
pass
# Exit a parse tree produced by SQLParser#sumExpr.
def exitSumExpr(self, ctx:SQLParser.SumExprContext):
pass
# Enter a parse tree produced by SQLParser#groupingOperation.
def enterGroupingOperation(self, ctx:SQLParser.GroupingOperationContext):
pass
# Exit a parse tree produced by SQLParser#groupingOperation.
def exitGroupingOperation(self, ctx:SQLParser.GroupingOperationContext):
pass
# Enter a parse tree produced by SQLParser#windowFunctionCall.
def enterWindowFunctionCall(self, ctx:SQLParser.WindowFunctionCallContext):
pass
# Exit a parse tree produced by SQLParser#windowFunctionCall.
def exitWindowFunctionCall(self, ctx:SQLParser.WindowFunctionCallContext):
pass
# Enter a parse tree produced by SQLParser#windowingClause.
def enterWindowingClause(self, ctx:SQLParser.WindowingClauseContext):
pass
# Exit a parse tree produced by SQLParser#windowingClause.
def exitWindowingClause(self, ctx:SQLParser.WindowingClauseContext):
pass
# Enter a parse tree produced by SQLParser#leadLagInfo.
def enterLeadLagInfo(self, ctx:SQLParser.LeadLagInfoContext):
pass
# Exit a parse tree produced by SQLParser#leadLagInfo.
def exitLeadLagInfo(self, ctx:SQLParser.LeadLagInfoContext):
pass
# Enter a parse tree produced by SQLParser#nullTreatment.
def enterNullTreatment(self, ctx:SQLParser.NullTreatmentContext):
pass
# Exit a parse tree produced by SQLParser#nullTreatment.
def exitNullTreatment(self, ctx:SQLParser.NullTreatmentContext):
pass
# Enter a parse tree produced by SQLParser#jsonFunction.
def enterJsonFunction(self, ctx:SQLParser.JsonFunctionContext):
pass
# Exit a parse tree produced by SQLParser#jsonFunction.
def exitJsonFunction(self, ctx:SQLParser.JsonFunctionContext):
pass
# Enter a parse tree produced by SQLParser#inSumExpr.
def enterInSumExpr(self, ctx:SQLParser.InSumExprContext):
pass
# Exit a parse tree produced by SQLParser#inSumExpr.
def exitInSumExpr(self, ctx:SQLParser.InSumExprContext):
pass
# Enter a parse tree produced by SQLParser#identListArg.
def enterIdentListArg(self, ctx:SQLParser.IdentListArgContext):
pass
# Exit a parse tree produced by SQLParser#identListArg.
def exitIdentListArg(self, ctx:SQLParser.IdentListArgContext):
pass
# Enter a parse tree produced by SQLParser#identList.
def enterIdentList(self, ctx:SQLParser.IdentListContext):
pass
# Exit a parse tree produced by SQLParser#identList.
def exitIdentList(self, ctx:SQLParser.IdentListContext):
pass
# Enter a parse tree produced by SQLParser#fulltextOptions.
def enterFulltextOptions(self, ctx:SQLParser.FulltextOptionsContext):
pass
# Exit a parse tree produced by SQLParser#fulltextOptions.
def exitFulltextOptions(self, ctx:SQLParser.FulltextOptionsContext):
pass
# Enter a parse tree produced by SQLParser#runtimeFunctionCall.
def enterRuntimeFunctionCall(self, ctx:SQLParser.RuntimeFunctionCallContext):
pass
# Exit a parse tree produced by SQLParser#runtimeFunctionCall.
def exitRuntimeFunctionCall(self, ctx:SQLParser.RuntimeFunctionCallContext):
pass
# Enter a parse tree produced by SQLParser#geometryFunction.
def enterGeometryFunction(self, ctx:SQLParser.GeometryFunctionContext):
pass
# Exit a parse tree produced by SQLParser#geometryFunction.
def exitGeometryFunction(self, ctx:SQLParser.GeometryFunctionContext):
pass
# Enter a parse tree produced by SQLParser#timeFunctionParameters.
def enterTimeFunctionParameters(self, ctx:SQLParser.TimeFunctionParametersContext):
pass
# Exit a parse tree produced by SQLParser#timeFunctionParameters.
def exitTimeFunctionParameters(self, ctx:SQLParser.TimeFunctionParametersContext):
pass
# Enter a parse tree produced by SQLParser#fractionalPrecision.
def enterFractionalPrecision(self, ctx:SQLParser.FractionalPrecisionContext):
pass
# Exit a parse tree produced by SQLParser#fractionalPrecision.
def exitFractionalPrecision(self, ctx:SQLParser.FractionalPrecisionContext):
pass
# Enter a parse tree produced by SQLParser#weightStringLevels.
def enterWeightStringLevels(self, ctx:SQLParser.WeightStringLevelsContext):
pass
# Exit a parse tree produced by SQLParser#weightStringLevels.
def exitWeightStringLevels(self, ctx:SQLParser.WeightStringLevelsContext):
pass
# Enter a parse tree produced by SQLParser#weightStringLevelListItem.
def enterWeightStringLevelListItem(self, ctx:SQLParser.WeightStringLevelListItemContext):
pass
# Exit a parse tree produced by SQLParser#weightStringLevelListItem.
def exitWeightStringLevelListItem(self, ctx:SQLParser.WeightStringLevelListItemContext):
pass
# Enter a parse tree produced by SQLParser#dateTimeTtype.
def enterDateTimeTtype(self, ctx:SQLParser.DateTimeTtypeContext):
pass
# Exit a parse tree produced by SQLParser#dateTimeTtype.
def exitDateTimeTtype(self, ctx:SQLParser.DateTimeTtypeContext):
pass
# Enter a parse tree produced by SQLParser#trimFunction.
def enterTrimFunction(self, ctx:SQLParser.TrimFunctionContext):
pass
# Exit a parse tree produced by SQLParser#trimFunction.
def exitTrimFunction(self, ctx:SQLParser.TrimFunctionContext):
pass
# Enter a parse tree produced by SQLParser#substringFunction.
def enterSubstringFunction(self, ctx:SQLParser.SubstringFunctionContext):
pass
# Exit a parse tree produced by SQLParser#substringFunction.
def exitSubstringFunction(self, ctx:SQLParser.SubstringFunctionContext):
pass
# Enter a parse tree produced by SQLParser#functionCall.
def enterFunctionCall(self, ctx:SQLParser.FunctionCallContext):
pass
# Exit a parse tree produced by SQLParser#functionCall.
def exitFunctionCall(self, ctx:SQLParser.FunctionCallContext):
pass
# Enter a parse tree produced by SQLParser#udfExprList.
def enterUdfExprList(self, ctx:SQLParser.UdfExprListContext):
pass
# Exit a parse tree produced by SQLParser#udfExprList.
def exitUdfExprList(self, ctx:SQLParser.UdfExprListContext):
pass
# Enter a parse tree produced by SQLParser#udfExpr.
def enterUdfExpr(self, ctx:SQLParser.UdfExprContext):
pass
# Exit a parse tree produced by SQLParser#udfExpr.
def exitUdfExpr(self, ctx:SQLParser.UdfExprContext):
pass
# Enter a parse tree produced by SQLParser#variable.
def enterVariable(self, ctx:SQLParser.VariableContext):
pass
# Exit a parse tree produced by SQLParser#variable.
def exitVariable(self, ctx:SQLParser.VariableContext):
pass
# Enter a parse tree produced by SQLParser#userVariable.
def enterUserVariable(self, ctx:SQLParser.UserVariableContext):
pass
# Exit a parse tree produced by SQLParser#userVariable.
def exitUserVariable(self, ctx:SQLParser.UserVariableContext):
pass
# Enter a parse tree produced by SQLParser#systemVariable.
def enterSystemVariable(self, ctx:SQLParser.SystemVariableContext):
pass
# Exit a parse tree produced by SQLParser#systemVariable.
def exitSystemVariable(self, ctx:SQLParser.SystemVariableContext):
pass
# Enter a parse tree produced by SQLParser#internalVariableName.
def enterInternalVariableName(self, ctx:SQLParser.InternalVariableNameContext):
pass
# Exit a parse tree produced by SQLParser#internalVariableName.
def exitInternalVariableName(self, ctx:SQLParser.InternalVariableNameContext):
pass
# Enter a parse tree produced by SQLParser#whenExpression.
def enterWhenExpression(self, ctx:SQLParser.WhenExpressionContext):
pass
# Exit a parse tree produced by SQLParser#whenExpression.
def exitWhenExpression(self, ctx:SQLParser.WhenExpressionContext):
pass
# Enter a parse tree produced by SQLParser#thenExpression.
def enterThenExpression(self, ctx:SQLParser.ThenExpressionContext):
pass
# Exit a parse tree produced by SQLParser#thenExpression.
def exitThenExpression(self, ctx:SQLParser.ThenExpressionContext):
pass
# Enter a parse tree produced by SQLParser#elseExpression.
def enterElseExpression(self, ctx:SQLParser.ElseExpressionContext):
pass
# Exit a parse tree produced by SQLParser#elseExpression.
def exitElseExpression(self, ctx:SQLParser.ElseExpressionContext):
pass
# Enter a parse tree produced by SQLParser#castType.
def enterCastType(self, ctx:SQLParser.CastTypeContext):
pass
# Exit a parse tree produced by SQLParser#castType.
def exitCastType(self, ctx:SQLParser.CastTypeContext):
pass
# Enter a parse tree produced by SQLParser#exprList.
def enterExprList(self, ctx:SQLParser.ExprListContext):
pass
# Exit a parse tree produced by SQLParser#exprList.
def exitExprList(self, ctx:SQLParser.ExprListContext):
pass
# Enter a parse tree produced by SQLParser#charset.
def enterCharset(self, ctx:SQLParser.CharsetContext):
pass
# Exit a parse tree produced by SQLParser#charset.
def exitCharset(self, ctx:SQLParser.CharsetContext):
pass
# Enter a parse tree produced by SQLParser#notRule.
def enterNotRule(self, ctx:SQLParser.NotRuleContext):
pass
# Exit a parse tree produced by SQLParser#notRule.
def exitNotRule(self, ctx:SQLParser.NotRuleContext):
pass
# Enter a parse tree produced by SQLParser#not2Rule.
def enterNot2Rule(self, ctx:SQLParser.Not2RuleContext):
pass
# Exit a parse tree produced by SQLParser#not2Rule.
def exitNot2Rule(self, ctx:SQLParser.Not2RuleContext):
pass
# Enter a parse tree produced by SQLParser#interval.
def enterInterval(self, ctx:SQLParser.IntervalContext):
pass
# Exit a parse tree produced by SQLParser#interval.
def exitInterval(self, ctx:SQLParser.IntervalContext):
pass
# Enter a parse tree produced by SQLParser#intervalTimeStamp.
def enterIntervalTimeStamp(self, ctx:SQLParser.IntervalTimeStampContext):
pass
# Exit a parse tree produced by SQLParser#intervalTimeStamp.
def exitIntervalTimeStamp(self, ctx:SQLParser.IntervalTimeStampContext):
pass
# Enter a parse tree produced by SQLParser#exprListWithParentheses.
def enterExprListWithParentheses(self, ctx:SQLParser.ExprListWithParenthesesContext):
pass
# Exit a parse tree produced by SQLParser#exprListWithParentheses.
def exitExprListWithParentheses(self, ctx:SQLParser.ExprListWithParenthesesContext):
pass
# Enter a parse tree produced by SQLParser#exprWithParentheses.
def enterExprWithParentheses(self, ctx:SQLParser.ExprWithParenthesesContext):
pass
# Exit a parse tree produced by SQLParser#exprWithParentheses.
def exitExprWithParentheses(self, ctx:SQLParser.ExprWithParenthesesContext):
pass
# Enter a parse tree produced by SQLParser#simpleExprWithParentheses.
def enterSimpleExprWithParentheses(self, ctx:SQLParser.SimpleExprWithParenthesesContext):
pass
# Exit a parse tree produced by SQLParser#simpleExprWithParentheses.
def exitSimpleExprWithParentheses(self, ctx:SQLParser.SimpleExprWithParenthesesContext):
pass
# Enter a parse tree produced by SQLParser#orderList.
def enterOrderList(self, ctx:SQLParser.OrderListContext):
pass
# Exit a parse tree produced by SQLParser#orderList.
def exitOrderList(self, ctx:SQLParser.OrderListContext):
pass
# Enter a parse tree produced by SQLParser#orderExpression.
def enterOrderExpression(self, ctx:SQLParser.OrderExpressionContext):
pass
# Exit a parse tree produced by SQLParser#orderExpression.
def exitOrderExpression(self, ctx:SQLParser.OrderExpressionContext):
pass
# Enter a parse tree produced by SQLParser#groupList.
def enterGroupList(self, ctx:SQLParser.GroupListContext):
pass
# Exit a parse tree produced by SQLParser#groupList.
def exitGroupList(self, ctx:SQLParser.GroupListContext):
pass
# Enter a parse tree produced by SQLParser#groupingExpression.
def enterGroupingExpression(self, ctx:SQLParser.GroupingExpressionContext):
pass
# Exit a parse tree produced by SQLParser#groupingExpression.
def exitGroupingExpression(self, ctx:SQLParser.GroupingExpressionContext):
pass
# Enter a parse tree produced by SQLParser#channel.
def enterChannel(self, ctx:SQLParser.ChannelContext):
pass
# Exit a parse tree produced by SQLParser#channel.
def exitChannel(self, ctx:SQLParser.ChannelContext):
pass
# Enter a parse tree produced by SQLParser#compoundStatement.
def enterCompoundStatement(self, ctx:SQLParser.CompoundStatementContext):
pass
# Exit a parse tree produced by SQLParser#compoundStatement.
def exitCompoundStatement(self, ctx:SQLParser.CompoundStatementContext):
pass
# Enter a parse tree produced by SQLParser#returnStatement.
def enterReturnStatement(self, ctx:SQLParser.ReturnStatementContext):
pass
# Exit a parse tree produced by SQLParser#returnStatement.
def exitReturnStatement(self, ctx:SQLParser.ReturnStatementContext):
pass
# Enter a parse tree produced by SQLParser#ifStatement.
def enterIfStatement(self, ctx:SQLParser.IfStatementContext):
pass
# Exit a parse tree produced by SQLParser#ifStatement.
def exitIfStatement(self, ctx:SQLParser.IfStatementContext):
pass
# Enter a parse tree produced by SQLParser#ifBody.
def enterIfBody(self, ctx:SQLParser.IfBodyContext):
pass
# Exit a parse tree produced by SQLParser#ifBody.
def exitIfBody(self, ctx:SQLParser.IfBodyContext):
pass
# Enter a parse tree produced by SQLParser#thenStatement.
def enterThenStatement(self, ctx:SQLParser.ThenStatementContext):
pass
# Exit a parse tree produced by SQLParser#thenStatement.
def exitThenStatement(self, ctx:SQLParser.ThenStatementContext):
pass
# Enter a parse tree produced by SQLParser#compoundStatementList.
def enterCompoundStatementList(self, ctx:SQLParser.CompoundStatementListContext):
pass
# Exit a parse tree produced by SQLParser#compoundStatementList.
def exitCompoundStatementList(self, ctx:SQLParser.CompoundStatementListContext):
pass
# Enter a parse tree produced by SQLParser#caseStatement.
def enterCaseStatement(self, ctx:SQLParser.CaseStatementContext):
pass
# Exit a parse tree produced by SQLParser#caseStatement.
def exitCaseStatement(self, ctx:SQLParser.CaseStatementContext):
pass
# Enter a parse tree produced by SQLParser#elseStatement.
def enterElseStatement(self, ctx:SQLParser.ElseStatementContext):
pass
# Exit a parse tree produced by SQLParser#elseStatement.
def exitElseStatement(self, ctx:SQLParser.ElseStatementContext):
pass
# Enter a parse tree produced by SQLParser#labeledBlock.
def enterLabeledBlock(self, ctx:SQLParser.LabeledBlockContext):
pass
# Exit a parse tree produced by SQLParser#labeledBlock.
def exitLabeledBlock(self, ctx:SQLParser.LabeledBlockContext):
pass
# Enter a parse tree produced by SQLParser#unlabeledBlock.
def enterUnlabeledBlock(self, ctx:SQLParser.UnlabeledBlockContext):
pass
# Exit a parse tree produced by SQLParser#unlabeledBlock.
def exitUnlabeledBlock(self, ctx:SQLParser.UnlabeledBlockContext):
pass
# Enter a parse tree produced by SQLParser#label.
def enterLabel(self, ctx:SQLParser.LabelContext):
pass
# Exit a parse tree produced by SQLParser#label.
def exitLabel(self, ctx:SQLParser.LabelContext):
pass
# Enter a parse tree produced by SQLParser#beginEndBlock.
def enterBeginEndBlock(self, ctx:SQLParser.BeginEndBlockContext):
pass
# Exit a parse tree produced by SQLParser#beginEndBlock.
def exitBeginEndBlock(self, ctx:SQLParser.BeginEndBlockContext):
pass
# Enter a parse tree produced by SQLParser#labeledControl.
def enterLabeledControl(self, ctx:SQLParser.LabeledControlContext):
pass
# Exit a parse tree produced by SQLParser#labeledControl.
def exitLabeledControl(self, ctx:SQLParser.LabeledControlContext):
pass
# Enter a parse tree produced by SQLParser#unlabeledControl.
def enterUnlabeledControl(self, ctx:SQLParser.UnlabeledControlContext):
pass
# Exit a parse tree produced by SQLParser#unlabeledControl.
def exitUnlabeledControl(self, ctx:SQLParser.UnlabeledControlContext):
pass
# Enter a parse tree produced by SQLParser#loopBlock.
def enterLoopBlock(self, ctx:SQLParser.LoopBlockContext):
pass
# Exit a parse tree produced by SQLParser#loopBlock.
def exitLoopBlock(self, ctx:SQLParser.LoopBlockContext):
pass
# Enter a parse tree produced by SQLParser#whileDoBlock.
def enterWhileDoBlock(self, ctx:SQLParser.WhileDoBlockContext):
pass
# Exit a parse tree produced by SQLParser#whileDoBlock.
def exitWhileDoBlock(self, ctx:SQLParser.WhileDoBlockContext):
pass
# Enter a parse tree produced by SQLParser#repeatUntilBlock.
def enterRepeatUntilBlock(self, ctx:SQLParser.RepeatUntilBlockContext):
pass
# Exit a parse tree produced by SQLParser#repeatUntilBlock.
def exitRepeatUntilBlock(self, ctx:SQLParser.RepeatUntilBlockContext):
pass
# Enter a parse tree produced by SQLParser#spDeclarations.
def enterSpDeclarations(self, ctx:SQLParser.SpDeclarationsContext):
pass
# Exit a parse tree produced by SQLParser#spDeclarations.
def exitSpDeclarations(self, ctx:SQLParser.SpDeclarationsContext):
pass
# Enter a parse tree produced by SQLParser#spDeclaration.
def enterSpDeclaration(self, ctx:SQLParser.SpDeclarationContext):
pass
# Exit a parse tree produced by SQLParser#spDeclaration.
def exitSpDeclaration(self, ctx:SQLParser.SpDeclarationContext):
pass
# Enter a parse tree produced by SQLParser#variableDeclaration.
def enterVariableDeclaration(self, ctx:SQLParser.VariableDeclarationContext):
pass
# Exit a parse tree produced by SQLParser#variableDeclaration.
def exitVariableDeclaration(self, ctx:SQLParser.VariableDeclarationContext):
pass
# Enter a parse tree produced by SQLParser#conditionDeclaration.
def enterConditionDeclaration(self, ctx:SQLParser.ConditionDeclarationContext):
pass
# Exit a parse tree produced by SQLParser#conditionDeclaration.
def exitConditionDeclaration(self, ctx:SQLParser.ConditionDeclarationContext):
pass
# Enter a parse tree produced by SQLParser#spCondition.
def enterSpCondition(self, ctx:SQLParser.SpConditionContext):
pass
# Exit a parse tree produced by SQLParser#spCondition.
def exitSpCondition(self, ctx:SQLParser.SpConditionContext):
pass
# Enter a parse tree produced by SQLParser#sqlstate.
def enterSqlstate(self, ctx:SQLParser.SqlstateContext):
pass
# Exit a parse tree produced by SQLParser#sqlstate.
def exitSqlstate(self, ctx:SQLParser.SqlstateContext):
pass
# Enter a parse tree produced by SQLParser#handlerDeclaration.
def enterHandlerDeclaration(self, ctx:SQLParser.HandlerDeclarationContext):
pass
# Exit a parse tree produced by SQLParser#handlerDeclaration.
def exitHandlerDeclaration(self, ctx:SQLParser.HandlerDeclarationContext):
pass
# Enter a parse tree produced by SQLParser#handlerCondition.
def enterHandlerCondition(self, ctx:SQLParser.HandlerConditionContext):
pass
# Exit a parse tree produced by SQLParser#handlerCondition.
def exitHandlerCondition(self, ctx:SQLParser.HandlerConditionContext):
pass
# Enter a parse tree produced by SQLParser#cursorDeclaration.
def enterCursorDeclaration(self, ctx:SQLParser.CursorDeclarationContext):
pass
# Exit a parse tree produced by SQLParser#cursorDeclaration.
def exitCursorDeclaration(self, ctx:SQLParser.CursorDeclarationContext):
pass
# Enter a parse tree produced by SQLParser#iterateStatement.
def enterIterateStatement(self, ctx:SQLParser.IterateStatementContext):
pass
# Exit a parse tree produced by SQLParser#iterateStatement.
def exitIterateStatement(self, ctx:SQLParser.IterateStatementContext):
pass
# Enter a parse tree produced by SQLParser#leaveStatement.
def enterLeaveStatement(self, ctx:SQLParser.LeaveStatementContext):
pass
# Exit a parse tree produced by SQLParser#leaveStatement.
def exitLeaveStatement(self, ctx:SQLParser.LeaveStatementContext):
pass
# Enter a parse tree produced by SQLParser#getDiagnostics.
def enterGetDiagnostics(self, ctx:SQLParser.GetDiagnosticsContext):
pass
# Exit a parse tree produced by SQLParser#getDiagnostics.
def exitGetDiagnostics(self, ctx:SQLParser.GetDiagnosticsContext):
pass
# Enter a parse tree produced by SQLParser#signalAllowedExpr.
def enterSignalAllowedExpr(self, ctx:SQLParser.SignalAllowedExprContext):
pass
# Exit a parse tree produced by SQLParser#signalAllowedExpr.
def exitSignalAllowedExpr(self, ctx:SQLParser.SignalAllowedExprContext):
pass
# Enter a parse tree produced by SQLParser#statementInformationItem.
def enterStatementInformationItem(self, ctx:SQLParser.StatementInformationItemContext):
pass
# Exit a parse tree produced by SQLParser#statementInformationItem.
def exitStatementInformationItem(self, ctx:SQLParser.StatementInformationItemContext):
pass
# Enter a parse tree produced by SQLParser#conditionInformationItem.
def enterConditionInformationItem(self, ctx:SQLParser.ConditionInformationItemContext):
pass
# Exit a parse tree produced by SQLParser#conditionInformationItem.
def exitConditionInformationItem(self, ctx:SQLParser.ConditionInformationItemContext):
pass
# Enter a parse tree produced by SQLParser#signalInformationItemName.
def enterSignalInformationItemName(self, ctx:SQLParser.SignalInformationItemNameContext):
pass
# Exit a parse tree produced by SQLParser#signalInformationItemName.
def exitSignalInformationItemName(self, ctx:SQLParser.SignalInformationItemNameContext):
pass
# Enter a parse tree produced by SQLParser#signalStatement.
def enterSignalStatement(self, ctx:SQLParser.SignalStatementContext):
pass
# Exit a parse tree produced by SQLParser#signalStatement.
def exitSignalStatement(self, ctx:SQLParser.SignalStatementContext):
pass
# Enter a parse tree produced by SQLParser#resignalStatement.
def enterResignalStatement(self, ctx:SQLParser.ResignalStatementContext):
pass
# Exit a parse tree produced by SQLParser#resignalStatement.
def exitResignalStatement(self, ctx:SQLParser.ResignalStatementContext):
pass
# Enter a parse tree produced by SQLParser#signalInformationItem.
def enterSignalInformationItem(self, ctx:SQLParser.SignalInformationItemContext):
pass
# Exit a parse tree produced by SQLParser#signalInformationItem.
def exitSignalInformationItem(self, ctx:SQLParser.SignalInformationItemContext):
pass
# Enter a parse tree produced by SQLParser#cursorOpen.
def enterCursorOpen(self, ctx:SQLParser.CursorOpenContext):
pass
# Exit a parse tree produced by SQLParser#cursorOpen.
def exitCursorOpen(self, ctx:SQLParser.CursorOpenContext):
pass
# Enter a parse tree produced by SQLParser#cursorClose.
def enterCursorClose(self, ctx:SQLParser.CursorCloseContext):
pass
# Exit a parse tree produced by SQLParser#cursorClose.
def exitCursorClose(self, ctx:SQLParser.CursorCloseContext):
pass
# Enter a parse tree produced by SQLParser#cursorFetch.
def enterCursorFetch(self, ctx:SQLParser.CursorFetchContext):
pass
# Exit a parse tree produced by SQLParser#cursorFetch.
def exitCursorFetch(self, ctx:SQLParser.CursorFetchContext):
pass
# Enter a parse tree produced by SQLParser#schedule.
def enterSchedule(self, ctx:SQLParser.ScheduleContext):
pass
# Exit a parse tree produced by SQLParser#schedule.
def exitSchedule(self, ctx:SQLParser.ScheduleContext):
pass
# Enter a parse tree produced by SQLParser#columnDefinition.
def enterColumnDefinition(self, ctx:SQLParser.ColumnDefinitionContext):
pass
# Exit a parse tree produced by SQLParser#columnDefinition.
def exitColumnDefinition(self, ctx:SQLParser.ColumnDefinitionContext):
pass
# Enter a parse tree produced by SQLParser#checkOrReferences.
def enterCheckOrReferences(self, ctx:SQLParser.CheckOrReferencesContext):
pass
# Exit a parse tree produced by SQLParser#checkOrReferences.
def exitCheckOrReferences(self, ctx:SQLParser.CheckOrReferencesContext):
pass
# Enter a parse tree produced by SQLParser#checkConstraint.
def enterCheckConstraint(self, ctx:SQLParser.CheckConstraintContext):
pass
# Exit a parse tree produced by SQLParser#checkConstraint.
def exitCheckConstraint(self, ctx:SQLParser.CheckConstraintContext):
pass
# Enter a parse tree produced by SQLParser#constraintEnforcement.
def enterConstraintEnforcement(self, ctx:SQLParser.ConstraintEnforcementContext):
pass
# Exit a parse tree produced by SQLParser#constraintEnforcement.
def exitConstraintEnforcement(self, ctx:SQLParser.ConstraintEnforcementContext):
pass
# Enter a parse tree produced by SQLParser#tableConstraintDef.
def enterTableConstraintDef(self, ctx:SQLParser.TableConstraintDefContext):
pass
# Exit a parse tree produced by SQLParser#tableConstraintDef.
def exitTableConstraintDef(self, ctx:SQLParser.TableConstraintDefContext):
pass
# Enter a parse tree produced by SQLParser#constraintName.
def enterConstraintName(self, ctx:SQLParser.ConstraintNameContext):
pass
# Exit a parse tree produced by SQLParser#constraintName.
def exitConstraintName(self, ctx:SQLParser.ConstraintNameContext):
pass
# Enter a parse tree produced by SQLParser#fieldDefinition.
def enterFieldDefinition(self, ctx:SQLParser.FieldDefinitionContext):
pass
# Exit a parse tree produced by SQLParser#fieldDefinition.
def exitFieldDefinition(self, ctx:SQLParser.FieldDefinitionContext):
pass
# Enter a parse tree produced by SQLParser#columnAttribute.
def enterColumnAttribute(self, ctx:SQLParser.ColumnAttributeContext):
pass
# Exit a parse tree produced by SQLParser#columnAttribute.
def exitColumnAttribute(self, ctx:SQLParser.ColumnAttributeContext):
pass
# Enter a parse tree produced by SQLParser#columnFormat.
def enterColumnFormat(self, ctx:SQLParser.ColumnFormatContext):
pass
# Exit a parse tree produced by SQLParser#columnFormat.
def exitColumnFormat(self, ctx:SQLParser.ColumnFormatContext):
pass
# Enter a parse tree produced by SQLParser#storageMedia.
def enterStorageMedia(self, ctx:SQLParser.StorageMediaContext):
pass
# Exit a parse tree produced by SQLParser#storageMedia.
def exitStorageMedia(self, ctx:SQLParser.StorageMediaContext):
pass
# Enter a parse tree produced by SQLParser#gcolAttribute.
def enterGcolAttribute(self, ctx:SQLParser.GcolAttributeContext):
pass
# Exit a parse tree produced by SQLParser#gcolAttribute.
def exitGcolAttribute(self, ctx:SQLParser.GcolAttributeContext):
pass
# Enter a parse tree produced by SQLParser#references.
def enterReferences(self, ctx:SQLParser.ReferencesContext):
pass
# Exit a parse tree produced by SQLParser#references.
def exitReferences(self, ctx:SQLParser.ReferencesContext):
pass
# Enter a parse tree produced by SQLParser#deleteOption.
def enterDeleteOption(self, ctx:SQLParser.DeleteOptionContext):
pass
# Exit a parse tree produced by SQLParser#deleteOption.
def exitDeleteOption(self, ctx:SQLParser.DeleteOptionContext):
pass
# Enter a parse tree produced by SQLParser#keyList.
def enterKeyList(self, ctx:SQLParser.KeyListContext):
pass
# Exit a parse tree produced by SQLParser#keyList.
def exitKeyList(self, ctx:SQLParser.KeyListContext):
pass
# Enter a parse tree produced by SQLParser#keyPart.
def enterKeyPart(self, ctx:SQLParser.KeyPartContext):
pass
# Exit a parse tree produced by SQLParser#keyPart.
def exitKeyPart(self, ctx:SQLParser.KeyPartContext):
pass
# Enter a parse tree produced by SQLParser#keyListWithExpression.
def enterKeyListWithExpression(self, ctx:SQLParser.KeyListWithExpressionContext):
pass
# Exit a parse tree produced by SQLParser#keyListWithExpression.
def exitKeyListWithExpression(self, ctx:SQLParser.KeyListWithExpressionContext):
pass
# Enter a parse tree produced by SQLParser#keyPartOrExpression.
def enterKeyPartOrExpression(self, ctx:SQLParser.KeyPartOrExpressionContext):
pass
# Exit a parse tree produced by SQLParser#keyPartOrExpression.
def exitKeyPartOrExpression(self, ctx:SQLParser.KeyPartOrExpressionContext):
pass
# Enter a parse tree produced by SQLParser#keyListVariants.
def enterKeyListVariants(self, ctx:SQLParser.KeyListVariantsContext):
pass
# Exit a parse tree produced by SQLParser#keyListVariants.
def exitKeyListVariants(self, ctx:SQLParser.KeyListVariantsContext):
pass
# Enter a parse tree produced by SQLParser#indexType.
def enterIndexType(self, ctx:SQLParser.IndexTypeContext):
pass
# Exit a parse tree produced by SQLParser#indexType.
def exitIndexType(self, ctx:SQLParser.IndexTypeContext):
pass
# Enter a parse tree produced by SQLParser#indexOption.
def enterIndexOption(self, ctx:SQLParser.IndexOptionContext):
pass
# Exit a parse tree produced by SQLParser#indexOption.
def exitIndexOption(self, ctx:SQLParser.IndexOptionContext):
pass
# Enter a parse tree produced by SQLParser#commonIndexOption.
def enterCommonIndexOption(self, ctx:SQLParser.CommonIndexOptionContext):
pass
# Exit a parse tree produced by SQLParser#commonIndexOption.
def exitCommonIndexOption(self, ctx:SQLParser.CommonIndexOptionContext):
pass
# Enter a parse tree produced by SQLParser#visibility.
def enterVisibility(self, ctx:SQLParser.VisibilityContext):
pass
# Exit a parse tree produced by SQLParser#visibility.
def exitVisibility(self, ctx:SQLParser.VisibilityContext):
pass
# Enter a parse tree produced by SQLParser#indexTypeClause.
def enterIndexTypeClause(self, ctx:SQLParser.IndexTypeClauseContext):
pass
# Exit a parse tree produced by SQLParser#indexTypeClause.
def exitIndexTypeClause(self, ctx:SQLParser.IndexTypeClauseContext):
pass
# Enter a parse tree produced by SQLParser#fulltextIndexOption.
def enterFulltextIndexOption(self, ctx:SQLParser.FulltextIndexOptionContext):
pass
# Exit a parse tree produced by SQLParser#fulltextIndexOption.
def exitFulltextIndexOption(self, ctx:SQLParser.FulltextIndexOptionContext):
pass
# Enter a parse tree produced by SQLParser#spatialIndexOption.
def enterSpatialIndexOption(self, ctx:SQLParser.SpatialIndexOptionContext):
pass
# Exit a parse tree produced by SQLParser#spatialIndexOption.
def exitSpatialIndexOption(self, ctx:SQLParser.SpatialIndexOptionContext):
pass
# Enter a parse tree produced by SQLParser#dataTypeDefinition.
def enterDataTypeDefinition(self, ctx:SQLParser.DataTypeDefinitionContext):
pass
# Exit a parse tree produced by SQLParser#dataTypeDefinition.
def exitDataTypeDefinition(self, ctx:SQLParser.DataTypeDefinitionContext):
pass
# Enter a parse tree produced by SQLParser#dataType.
def enterDataType(self, ctx:SQLParser.DataTypeContext):
pass
# Exit a parse tree produced by SQLParser#dataType.
def exitDataType(self, ctx:SQLParser.DataTypeContext):
pass
# Enter a parse tree produced by SQLParser#nchar.
def enterNchar(self, ctx:SQLParser.NcharContext):
pass
# Exit a parse tree produced by SQLParser#nchar.
def exitNchar(self, ctx:SQLParser.NcharContext):
pass
# Enter a parse tree produced by SQLParser#realType.
def enterRealType(self, ctx:SQLParser.RealTypeContext):
pass
# Exit a parse tree produced by SQLParser#realType.
def exitRealType(self, ctx:SQLParser.RealTypeContext):
pass
# Enter a parse tree produced by SQLParser#fieldLength.
def enterFieldLength(self, ctx:SQLParser.FieldLengthContext):
pass
# Exit a parse tree produced by SQLParser#fieldLength.
def exitFieldLength(self, ctx:SQLParser.FieldLengthContext):
pass
# Enter a parse tree produced by SQLParser#fieldOptions.
def enterFieldOptions(self, ctx:SQLParser.FieldOptionsContext):
pass
# Exit a parse tree produced by SQLParser#fieldOptions.
def exitFieldOptions(self, ctx:SQLParser.FieldOptionsContext):
pass
# Enter a parse tree produced by SQLParser#charsetWithOptBinary.
def enterCharsetWithOptBinary(self, ctx:SQLParser.CharsetWithOptBinaryContext):
pass
# Exit a parse tree produced by SQLParser#charsetWithOptBinary.
def exitCharsetWithOptBinary(self, ctx:SQLParser.CharsetWithOptBinaryContext):
pass
# Enter a parse tree produced by SQLParser#ascii.
def enterAscii(self, ctx:SQLParser.AsciiContext):
pass
# Exit a parse tree produced by SQLParser#ascii.
def exitAscii(self, ctx:SQLParser.AsciiContext):
pass
# Enter a parse tree produced by SQLParser#unicode.
def enterUnicode(self, ctx:SQLParser.UnicodeContext):
pass
# Exit a parse tree produced by SQLParser#unicode.
def exitUnicode(self, ctx:SQLParser.UnicodeContext):
pass
# Enter a parse tree produced by SQLParser#wsNumCodepoints.
def enterWsNumCodepoints(self, ctx:SQLParser.WsNumCodepointsContext):
pass
# Exit a parse tree produced by SQLParser#wsNumCodepoints.
def exitWsNumCodepoints(self, ctx:SQLParser.WsNumCodepointsContext):
pass
# Enter a parse tree produced by SQLParser#typeDatetimePrecision.
def enterTypeDatetimePrecision(self, ctx:SQLParser.TypeDatetimePrecisionContext):
pass
# Exit a parse tree produced by SQLParser#typeDatetimePrecision.
def exitTypeDatetimePrecision(self, ctx:SQLParser.TypeDatetimePrecisionContext):
pass
# Enter a parse tree produced by SQLParser#charsetName.
def enterCharsetName(self, ctx:SQLParser.CharsetNameContext):
pass
# Exit a parse tree produced by SQLParser#charsetName.
def exitCharsetName(self, ctx:SQLParser.CharsetNameContext):
pass
# Enter a parse tree produced by SQLParser#collationName.
def enterCollationName(self, ctx:SQLParser.CollationNameContext):
pass
# Exit a parse tree produced by SQLParser#collationName.
def exitCollationName(self, ctx:SQLParser.CollationNameContext):
pass
# Enter a parse tree produced by SQLParser#createTableOptions.
def enterCreateTableOptions(self, ctx:SQLParser.CreateTableOptionsContext):
pass
# Exit a parse tree produced by SQLParser#createTableOptions.
def exitCreateTableOptions(self, ctx:SQLParser.CreateTableOptionsContext):
pass
# Enter a parse tree produced by SQLParser#createTableOptionsSpaceSeparated.
def enterCreateTableOptionsSpaceSeparated(self, ctx:SQLParser.CreateTableOptionsSpaceSeparatedContext):
pass
# Exit a parse tree produced by SQLParser#createTableOptionsSpaceSeparated.
def exitCreateTableOptionsSpaceSeparated(self, ctx:SQLParser.CreateTableOptionsSpaceSeparatedContext):
pass
# Enter a parse tree produced by SQLParser#createTableOption.
def enterCreateTableOption(self, ctx:SQLParser.CreateTableOptionContext):
pass
# Exit a parse tree produced by SQLParser#createTableOption.
def exitCreateTableOption(self, ctx:SQLParser.CreateTableOptionContext):
pass
# Enter a parse tree produced by SQLParser#ternaryOption.
def enterTernaryOption(self, ctx:SQLParser.TernaryOptionContext):
pass
# Exit a parse tree produced by SQLParser#ternaryOption.
def exitTernaryOption(self, ctx:SQLParser.TernaryOptionContext):
pass
# Enter a parse tree produced by SQLParser#defaultCollation.
def enterDefaultCollation(self, ctx:SQLParser.DefaultCollationContext):
pass
# Exit a parse tree produced by SQLParser#defaultCollation.
def exitDefaultCollation(self, ctx:SQLParser.DefaultCollationContext):
pass
# Enter a parse tree produced by SQLParser#defaultEncryption.
def enterDefaultEncryption(self, ctx:SQLParser.DefaultEncryptionContext):
pass
# Exit a parse tree produced by SQLParser#defaultEncryption.
def exitDefaultEncryption(self, ctx:SQLParser.DefaultEncryptionContext):
pass
# Enter a parse tree produced by SQLParser#defaultCharset.
def enterDefaultCharset(self, ctx:SQLParser.DefaultCharsetContext):
pass
# Exit a parse tree produced by SQLParser#defaultCharset.
def exitDefaultCharset(self, ctx:SQLParser.DefaultCharsetContext):
pass
# Enter a parse tree produced by SQLParser#partitionClause.
def enterPartitionClause(self, ctx:SQLParser.PartitionClauseContext):
pass
# Exit a parse tree produced by SQLParser#partitionClause.
def exitPartitionClause(self, ctx:SQLParser.PartitionClauseContext):
pass
# Enter a parse tree produced by SQLParser#partitionDefKey.
def enterPartitionDefKey(self, ctx:SQLParser.PartitionDefKeyContext):
pass
# Exit a parse tree produced by SQLParser#partitionDefKey.
def exitPartitionDefKey(self, ctx:SQLParser.PartitionDefKeyContext):
pass
# Enter a parse tree produced by SQLParser#partitionDefHash.
def enterPartitionDefHash(self, ctx:SQLParser.PartitionDefHashContext):
pass
# Exit a parse tree produced by SQLParser#partitionDefHash.
def exitPartitionDefHash(self, ctx:SQLParser.PartitionDefHashContext):
pass
# Enter a parse tree produced by SQLParser#partitionDefRangeList.
def enterPartitionDefRangeList(self, ctx:SQLParser.PartitionDefRangeListContext):
pass
# Exit a parse tree produced by SQLParser#partitionDefRangeList.
def exitPartitionDefRangeList(self, ctx:SQLParser.PartitionDefRangeListContext):
pass
# Enter a parse tree produced by SQLParser#subPartitions.
def enterSubPartitions(self, ctx:SQLParser.SubPartitionsContext):
pass
# Exit a parse tree produced by SQLParser#subPartitions.
def exitSubPartitions(self, ctx:SQLParser.SubPartitionsContext):
pass
# Enter a parse tree produced by SQLParser#partitionKeyAlgorithm.
def enterPartitionKeyAlgorithm(self, ctx:SQLParser.PartitionKeyAlgorithmContext):
pass
# Exit a parse tree produced by SQLParser#partitionKeyAlgorithm.
def exitPartitionKeyAlgorithm(self, ctx:SQLParser.PartitionKeyAlgorithmContext):
pass
# Enter a parse tree produced by SQLParser#partitionDefinitions.
def enterPartitionDefinitions(self, ctx:SQLParser.PartitionDefinitionsContext):
pass
# Exit a parse tree produced by SQLParser#partitionDefinitions.
def exitPartitionDefinitions(self, ctx:SQLParser.PartitionDefinitionsContext):
pass
# Enter a parse tree produced by SQLParser#partitionDefinition.
def enterPartitionDefinition(self, ctx:SQLParser.PartitionDefinitionContext):
pass
# Exit a parse tree produced by SQLParser#partitionDefinition.
def exitPartitionDefinition(self, ctx:SQLParser.PartitionDefinitionContext):
pass
# Enter a parse tree produced by SQLParser#partitionValuesIn.
def enterPartitionValuesIn(self, ctx:SQLParser.PartitionValuesInContext):
pass
# Exit a parse tree produced by SQLParser#partitionValuesIn.
def exitPartitionValuesIn(self, ctx:SQLParser.PartitionValuesInContext):
pass
# Enter a parse tree produced by SQLParser#partitionOption.
def enterPartitionOption(self, ctx:SQLParser.PartitionOptionContext):
pass
# Exit a parse tree produced by SQLParser#partitionOption.
def exitPartitionOption(self, ctx:SQLParser.PartitionOptionContext):
pass
# Enter a parse tree produced by SQLParser#subpartitionDefinition.
def enterSubpartitionDefinition(self, ctx:SQLParser.SubpartitionDefinitionContext):
pass
# Exit a parse tree produced by SQLParser#subpartitionDefinition.
def exitSubpartitionDefinition(self, ctx:SQLParser.SubpartitionDefinitionContext):
pass
# Enter a parse tree produced by SQLParser#partitionValueItemListParen.
def enterPartitionValueItemListParen(self, ctx:SQLParser.PartitionValueItemListParenContext):
pass
# Exit a parse tree produced by SQLParser#partitionValueItemListParen.
def exitPartitionValueItemListParen(self, ctx:SQLParser.PartitionValueItemListParenContext):
pass
# Enter a parse tree produced by SQLParser#partitionValueItem.
def enterPartitionValueItem(self, ctx:SQLParser.PartitionValueItemContext):
pass
# Exit a parse tree produced by SQLParser#partitionValueItem.
def exitPartitionValueItem(self, ctx:SQLParser.PartitionValueItemContext):
pass
# Enter a parse tree produced by SQLParser#definerClause.
def enterDefinerClause(self, ctx:SQLParser.DefinerClauseContext):
pass
# Exit a parse tree produced by SQLParser#definerClause.
def exitDefinerClause(self, ctx:SQLParser.DefinerClauseContext):
pass
# Enter a parse tree produced by SQLParser#ifExists.
def enterIfExists(self, ctx:SQLParser.IfExistsContext):
pass
# Exit a parse tree produced by SQLParser#ifExists.
def exitIfExists(self, ctx:SQLParser.IfExistsContext):
pass
# Enter a parse tree produced by SQLParser#ifNotExists.
def enterIfNotExists(self, ctx:SQLParser.IfNotExistsContext):
pass
# Exit a parse tree produced by SQLParser#ifNotExists.
def exitIfNotExists(self, ctx:SQLParser.IfNotExistsContext):
pass
# Enter a parse tree produced by SQLParser#procedureParameter.
def enterProcedureParameter(self, ctx:SQLParser.ProcedureParameterContext):
pass
# Exit a parse tree produced by SQLParser#procedureParameter.
def exitProcedureParameter(self, ctx:SQLParser.ProcedureParameterContext):
pass
# Enter a parse tree produced by SQLParser#functionParameter.
def enterFunctionParameter(self, ctx:SQLParser.FunctionParameterContext):
pass
# Exit a parse tree produced by SQLParser#functionParameter.
def exitFunctionParameter(self, ctx:SQLParser.FunctionParameterContext):
pass
# Enter a parse tree produced by SQLParser#collate.
def enterCollate(self, ctx:SQLParser.CollateContext):
pass
# Exit a parse tree produced by SQLParser#collate.
def exitCollate(self, ctx:SQLParser.CollateContext):
pass
# Enter a parse tree produced by SQLParser#typeWithOptCollate.
def enterTypeWithOptCollate(self, ctx:SQLParser.TypeWithOptCollateContext):
pass
# Exit a parse tree produced by SQLParser#typeWithOptCollate.
def exitTypeWithOptCollate(self, ctx:SQLParser.TypeWithOptCollateContext):
pass
# Enter a parse tree produced by SQLParser#schemaIdentifierPair.
def enterSchemaIdentifierPair(self, ctx:SQLParser.SchemaIdentifierPairContext):
pass
# Exit a parse tree produced by SQLParser#schemaIdentifierPair.
def exitSchemaIdentifierPair(self, ctx:SQLParser.SchemaIdentifierPairContext):
pass
# Enter a parse tree produced by SQLParser#viewRefList.
def enterViewRefList(self, ctx:SQLParser.ViewRefListContext):
pass
# Exit a parse tree produced by SQLParser#viewRefList.
def exitViewRefList(self, ctx:SQLParser.ViewRefListContext):
pass
# Enter a parse tree produced by SQLParser#updateList.
def enterUpdateList(self, ctx:SQLParser.UpdateListContext):
pass
# Exit a parse tree produced by SQLParser#updateList.
def exitUpdateList(self, ctx:SQLParser.UpdateListContext):
pass
# Enter a parse tree produced by SQLParser#updateElement.
def enterUpdateElement(self, ctx:SQLParser.UpdateElementContext):
pass
# Exit a parse tree produced by SQLParser#updateElement.
def exitUpdateElement(self, ctx:SQLParser.UpdateElementContext):
pass
# Enter a parse tree produced by SQLParser#charsetClause.
def enterCharsetClause(self, ctx:SQLParser.CharsetClauseContext):
pass
# Exit a parse tree produced by SQLParser#charsetClause.
def exitCharsetClause(self, ctx:SQLParser.CharsetClauseContext):
pass
# Enter a parse tree produced by SQLParser#fieldsClause.
def enterFieldsClause(self, ctx:SQLParser.FieldsClauseContext):
pass
# Exit a parse tree produced by SQLParser#fieldsClause.
def exitFieldsClause(self, ctx:SQLParser.FieldsClauseContext):
pass
# Enter a parse tree produced by SQLParser#fieldTerm.
def enterFieldTerm(self, ctx:SQLParser.FieldTermContext):
pass
# Exit a parse tree produced by SQLParser#fieldTerm.
def exitFieldTerm(self, ctx:SQLParser.FieldTermContext):
pass
# Enter a parse tree produced by SQLParser#linesClause.
def enterLinesClause(self, ctx:SQLParser.LinesClauseContext):
pass
# Exit a parse tree produced by SQLParser#linesClause.
def exitLinesClause(self, ctx:SQLParser.LinesClauseContext):
pass
# Enter a parse tree produced by SQLParser#lineTerm.
def enterLineTerm(self, ctx:SQLParser.LineTermContext):
pass
# Exit a parse tree produced by SQLParser#lineTerm.
def exitLineTerm(self, ctx:SQLParser.LineTermContext):
pass
# Enter a parse tree produced by SQLParser#userList.
def enterUserList(self, ctx:SQLParser.UserListContext):
pass
# Exit a parse tree produced by SQLParser#userList.
def exitUserList(self, ctx:SQLParser.UserListContext):
pass
# Enter a parse tree produced by SQLParser#createUserList.
def enterCreateUserList(self, ctx:SQLParser.CreateUserListContext):
pass
# Exit a parse tree produced by SQLParser#createUserList.
def exitCreateUserList(self, ctx:SQLParser.CreateUserListContext):
pass
# Enter a parse tree produced by SQLParser#alterUserList.
def enterAlterUserList(self, ctx:SQLParser.AlterUserListContext):
pass
# Exit a parse tree produced by SQLParser#alterUserList.
def exitAlterUserList(self, ctx:SQLParser.AlterUserListContext):
pass
# Enter a parse tree produced by SQLParser#createUserEntry.
def enterCreateUserEntry(self, ctx:SQLParser.CreateUserEntryContext):
pass
# Exit a parse tree produced by SQLParser#createUserEntry.
def exitCreateUserEntry(self, ctx:SQLParser.CreateUserEntryContext):
pass
# Enter a parse tree produced by SQLParser#alterUserEntry.
def enterAlterUserEntry(self, ctx:SQLParser.AlterUserEntryContext):
pass
# Exit a parse tree produced by SQLParser#alterUserEntry.
def exitAlterUserEntry(self, ctx:SQLParser.AlterUserEntryContext):
pass
# Enter a parse tree produced by SQLParser#retainCurrentPassword.
def enterRetainCurrentPassword(self, ctx:SQLParser.RetainCurrentPasswordContext):
pass
# Exit a parse tree produced by SQLParser#retainCurrentPassword.
def exitRetainCurrentPassword(self, ctx:SQLParser.RetainCurrentPasswordContext):
pass
# Enter a parse tree produced by SQLParser#discardOldPassword.
def enterDiscardOldPassword(self, ctx:SQLParser.DiscardOldPasswordContext):
pass
# Exit a parse tree produced by SQLParser#discardOldPassword.
def exitDiscardOldPassword(self, ctx:SQLParser.DiscardOldPasswordContext):
pass
# Enter a parse tree produced by SQLParser#replacePassword.
def enterReplacePassword(self, ctx:SQLParser.ReplacePasswordContext):
pass
# Exit a parse tree produced by SQLParser#replacePassword.
def exitReplacePassword(self, ctx:SQLParser.ReplacePasswordContext):
pass
# Enter a parse tree produced by SQLParser#userIdentifierOrText.
def enterUserIdentifierOrText(self, ctx:SQLParser.UserIdentifierOrTextContext):
pass
# Exit a parse tree produced by SQLParser#userIdentifierOrText.
def exitUserIdentifierOrText(self, ctx:SQLParser.UserIdentifierOrTextContext):
pass
# Enter a parse tree produced by SQLParser#user.
def enterUser(self, ctx:SQLParser.UserContext):
pass
# Exit a parse tree produced by SQLParser#user.
def exitUser(self, ctx:SQLParser.UserContext):
pass
# Enter a parse tree produced by SQLParser#likeClause.
def enterLikeClause(self, ctx:SQLParser.LikeClauseContext):
pass
# Exit a parse tree produced by SQLParser#likeClause.
def exitLikeClause(self, ctx:SQLParser.LikeClauseContext):
pass
# Enter a parse tree produced by SQLParser#likeOrWhere.
def enterLikeOrWhere(self, ctx:SQLParser.LikeOrWhereContext):
pass
# Exit a parse tree produced by SQLParser#likeOrWhere.
def exitLikeOrWhere(self, ctx:SQLParser.LikeOrWhereContext):
pass
# Enter a parse tree produced by SQLParser#onlineOption.
def enterOnlineOption(self, ctx:SQLParser.OnlineOptionContext):
pass
# Exit a parse tree produced by SQLParser#onlineOption.
def exitOnlineOption(self, ctx:SQLParser.OnlineOptionContext):
pass
# Enter a parse tree produced by SQLParser#noWriteToBinLog.
def enterNoWriteToBinLog(self, ctx:SQLParser.NoWriteToBinLogContext):
pass
# Exit a parse tree produced by SQLParser#noWriteToBinLog.
def exitNoWriteToBinLog(self, ctx:SQLParser.NoWriteToBinLogContext):
pass
# Enter a parse tree produced by SQLParser#usePartition.
def enterUsePartition(self, ctx:SQLParser.UsePartitionContext):
pass
# Exit a parse tree produced by SQLParser#usePartition.
def exitUsePartition(self, ctx:SQLParser.UsePartitionContext):
pass
# Enter a parse tree produced by SQLParser#fieldIdentifier.
def enterFieldIdentifier(self, ctx:SQLParser.FieldIdentifierContext):
pass
# Exit a parse tree produced by SQLParser#fieldIdentifier.
def exitFieldIdentifier(self, ctx:SQLParser.FieldIdentifierContext):
pass
# Enter a parse tree produced by SQLParser#columnName.
def enterColumnName(self, ctx:SQLParser.ColumnNameContext):
pass
# Exit a parse tree produced by SQLParser#columnName.
def exitColumnName(self, ctx:SQLParser.ColumnNameContext):
pass
# Enter a parse tree produced by SQLParser#columnInternalRef.
def enterColumnInternalRef(self, ctx:SQLParser.ColumnInternalRefContext):
pass
# Exit a parse tree produced by SQLParser#columnInternalRef.
def exitColumnInternalRef(self, ctx:SQLParser.ColumnInternalRefContext):
pass
# Enter a parse tree produced by SQLParser#columnInternalRefList.
def enterColumnInternalRefList(self, ctx:SQLParser.ColumnInternalRefListContext):
pass
# Exit a parse tree produced by SQLParser#columnInternalRefList.
def exitColumnInternalRefList(self, ctx:SQLParser.ColumnInternalRefListContext):
pass
# Enter a parse tree produced by SQLParser#columnRef.
def enterColumnRef(self, ctx:SQLParser.ColumnRefContext):
pass
# Exit a parse tree produced by SQLParser#columnRef.
def exitColumnRef(self, ctx:SQLParser.ColumnRefContext):
pass
# Enter a parse tree produced by SQLParser#insertIdentifier.
def enterInsertIdentifier(self, ctx:SQLParser.InsertIdentifierContext):
pass
# Exit a parse tree produced by SQLParser#insertIdentifier.
def exitInsertIdentifier(self, ctx:SQLParser.InsertIdentifierContext):
pass
# Enter a parse tree produced by SQLParser#indexName.
def enterIndexName(self, ctx:SQLParser.IndexNameContext):
pass
# Exit a parse tree produced by SQLParser#indexName.
def exitIndexName(self, ctx:SQLParser.IndexNameContext):
pass
# Enter a parse tree produced by SQLParser#indexRef.
def enterIndexRef(self, ctx:SQLParser.IndexRefContext):
pass
# Exit a parse tree produced by SQLParser#indexRef.
def exitIndexRef(self, ctx:SQLParser.IndexRefContext):
pass
# Enter a parse tree produced by SQLParser#tableWild.
def enterTableWild(self, ctx:SQLParser.TableWildContext):
pass
# Exit a parse tree produced by SQLParser#tableWild.
def exitTableWild(self, ctx:SQLParser.TableWildContext):
pass
# Enter a parse tree produced by SQLParser#schemaName.
def enterSchemaName(self, ctx:SQLParser.SchemaNameContext):
pass
# Exit a parse tree produced by SQLParser#schemaName.
def exitSchemaName(self, ctx:SQLParser.SchemaNameContext):
pass
# Enter a parse tree produced by SQLParser#schemaRef.
def enterSchemaRef(self, ctx:SQLParser.SchemaRefContext):
pass
# Exit a parse tree produced by SQLParser#schemaRef.
def exitSchemaRef(self, ctx:SQLParser.SchemaRefContext):
pass
# Enter a parse tree produced by SQLParser#procedureName.
def enterProcedureName(self, ctx:SQLParser.ProcedureNameContext):
pass
# Exit a parse tree produced by SQLParser#procedureName.
def exitProcedureName(self, ctx:SQLParser.ProcedureNameContext):
pass
# Enter a parse tree produced by SQLParser#procedureRef.
def enterProcedureRef(self, ctx:SQLParser.ProcedureRefContext):
pass
# Exit a parse tree produced by SQLParser#procedureRef.
def exitProcedureRef(self, ctx:SQLParser.ProcedureRefContext):
pass
# Enter a parse tree produced by SQLParser#functionName.
def enterFunctionName(self, ctx:SQLParser.FunctionNameContext):
pass
# Exit a parse tree produced by SQLParser#functionName.
def exitFunctionName(self, ctx:SQLParser.FunctionNameContext):
pass
# Enter a parse tree produced by SQLParser#functionRef.
def enterFunctionRef(self, ctx:SQLParser.FunctionRefContext):
pass
# Exit a parse tree produced by SQLParser#functionRef.
def exitFunctionRef(self, ctx:SQLParser.FunctionRefContext):
pass
# Enter a parse tree produced by SQLParser#triggerName.
def enterTriggerName(self, ctx:SQLParser.TriggerNameContext):
pass
# Exit a parse tree produced by SQLParser#triggerName.
def exitTriggerName(self, ctx:SQLParser.TriggerNameContext):
pass
# Enter a parse tree produced by SQLParser#triggerRef.
def enterTriggerRef(self, ctx:SQLParser.TriggerRefContext):
pass
# Exit a parse tree produced by SQLParser#triggerRef.
def exitTriggerRef(self, ctx:SQLParser.TriggerRefContext):
pass
# Enter a parse tree produced by SQLParser#viewName.
def enterViewName(self, ctx:SQLParser.ViewNameContext):
pass
# Exit a parse tree produced by SQLParser#viewName.
def exitViewName(self, ctx:SQLParser.ViewNameContext):
pass
# Enter a parse tree produced by SQLParser#viewRef.
def enterViewRef(self, ctx:SQLParser.ViewRefContext):
pass
# Exit a parse tree produced by SQLParser#viewRef.
def exitViewRef(self, ctx:SQLParser.ViewRefContext):
pass
# Enter a parse tree produced by SQLParser#tablespaceName.
def enterTablespaceName(self, ctx:SQLParser.TablespaceNameContext):
pass
# Exit a parse tree produced by SQLParser#tablespaceName.
def exitTablespaceName(self, ctx:SQLParser.TablespaceNameContext):
pass
# Enter a parse tree produced by SQLParser#tablespaceRef.
def enterTablespaceRef(self, ctx:SQLParser.TablespaceRefContext):
pass
# Exit a parse tree produced by SQLParser#tablespaceRef.
def exitTablespaceRef(self, ctx:SQLParser.TablespaceRefContext):
pass
# Enter a parse tree produced by SQLParser#logfileGroupName.
def enterLogfileGroupName(self, ctx:SQLParser.LogfileGroupNameContext):
pass
# Exit a parse tree produced by SQLParser#logfileGroupName.
def exitLogfileGroupName(self, ctx:SQLParser.LogfileGroupNameContext):
pass
# Enter a parse tree produced by SQLParser#logfileGroupRef.
def enterLogfileGroupRef(self, ctx:SQLParser.LogfileGroupRefContext):
pass
# Exit a parse tree produced by SQLParser#logfileGroupRef.
def exitLogfileGroupRef(self, ctx:SQLParser.LogfileGroupRefContext):
pass
# Enter a parse tree produced by SQLParser#eventName.
def enterEventName(self, ctx:SQLParser.EventNameContext):
pass
# Exit a parse tree produced by SQLParser#eventName.
def exitEventName(self, ctx:SQLParser.EventNameContext):
pass
# Enter a parse tree produced by SQLParser#eventRef.
def enterEventRef(self, ctx:SQLParser.EventRefContext):
pass
# Exit a parse tree produced by SQLParser#eventRef.
def exitEventRef(self, ctx:SQLParser.EventRefContext):
pass
# Enter a parse tree produced by SQLParser#udfName.
def enterUdfName(self, ctx:SQLParser.UdfNameContext):
pass
# Exit a parse tree produced by SQLParser#udfName.
def exitUdfName(self, ctx:SQLParser.UdfNameContext):
pass
# Enter a parse tree produced by SQLParser#serverName.
def enterServerName(self, ctx:SQLParser.ServerNameContext):
pass
# Exit a parse tree produced by SQLParser#serverName.
def exitServerName(self, ctx:SQLParser.ServerNameContext):
pass
# Enter a parse tree produced by SQLParser#serverRef.
def enterServerRef(self, ctx:SQLParser.ServerRefContext):
pass
# Exit a parse tree produced by SQLParser#serverRef.
def exitServerRef(self, ctx:SQLParser.ServerRefContext):
pass
# Enter a parse tree produced by SQLParser#engineRef.
def enterEngineRef(self, ctx:SQLParser.EngineRefContext):
pass
# Exit a parse tree produced by SQLParser#engineRef.
def exitEngineRef(self, ctx:SQLParser.EngineRefContext):
pass
# Enter a parse tree produced by SQLParser#tableName.
def enterTableName(self, ctx:SQLParser.TableNameContext):
pass
# Exit a parse tree produced by SQLParser#tableName.
def exitTableName(self, ctx:SQLParser.TableNameContext):
pass
# Enter a parse tree produced by SQLParser#filterTableRef.
def enterFilterTableRef(self, ctx:SQLParser.FilterTableRefContext):
pass
# Exit a parse tree produced by SQLParser#filterTableRef.
def exitFilterTableRef(self, ctx:SQLParser.FilterTableRefContext):
pass
# Enter a parse tree produced by SQLParser#tableRefWithWildcard.
def enterTableRefWithWildcard(self, ctx:SQLParser.TableRefWithWildcardContext):
pass
# Exit a parse tree produced by SQLParser#tableRefWithWildcard.
def exitTableRefWithWildcard(self, ctx:SQLParser.TableRefWithWildcardContext):
pass
# Enter a parse tree produced by SQLParser#tableRef.
def enterTableRef(self, ctx:SQLParser.TableRefContext):
pass
# Exit a parse tree produced by SQLParser#tableRef.
def exitTableRef(self, ctx:SQLParser.TableRefContext):
pass
# Enter a parse tree produced by SQLParser#tableRefList.
def enterTableRefList(self, ctx:SQLParser.TableRefListContext):
pass
# Exit a parse tree produced by SQLParser#tableRefList.
def exitTableRefList(self, ctx:SQLParser.TableRefListContext):
pass
# Enter a parse tree produced by SQLParser#tableAliasRefList.
def enterTableAliasRefList(self, ctx:SQLParser.TableAliasRefListContext):
pass
# Exit a parse tree produced by SQLParser#tableAliasRefList.
def exitTableAliasRefList(self, ctx:SQLParser.TableAliasRefListContext):
pass
# Enter a parse tree produced by SQLParser#parameterName.
def enterParameterName(self, ctx:SQLParser.ParameterNameContext):
pass
# Exit a parse tree produced by SQLParser#parameterName.
def exitParameterName(self, ctx:SQLParser.ParameterNameContext):
pass
# Enter a parse tree produced by SQLParser#labelIdentifier.
def enterLabelIdentifier(self, ctx:SQLParser.LabelIdentifierContext):
pass
# Exit a parse tree produced by SQLParser#labelIdentifier.
def exitLabelIdentifier(self, ctx:SQLParser.LabelIdentifierContext):
pass
# Enter a parse tree produced by SQLParser#labelRef.
def enterLabelRef(self, ctx:SQLParser.LabelRefContext):
pass
# Exit a parse tree produced by SQLParser#labelRef.
def exitLabelRef(self, ctx:SQLParser.LabelRefContext):
pass
# Enter a parse tree produced by SQLParser#roleIdentifier.
def enterRoleIdentifier(self, ctx:SQLParser.RoleIdentifierContext):
pass
# Exit a parse tree produced by SQLParser#roleIdentifier.
def exitRoleIdentifier(self, ctx:SQLParser.RoleIdentifierContext):
pass
# Enter a parse tree produced by SQLParser#roleRef.
def enterRoleRef(self, ctx:SQLParser.RoleRefContext):
pass
# Exit a parse tree produced by SQLParser#roleRef.
def exitRoleRef(self, ctx:SQLParser.RoleRefContext):
pass
# Enter a parse tree produced by SQLParser#pluginRef.
def enterPluginRef(self, ctx:SQLParser.PluginRefContext):
pass
# Exit a parse tree produced by SQLParser#pluginRef.
def exitPluginRef(self, ctx:SQLParser.PluginRefContext):
pass
# Enter a parse tree produced by SQLParser#componentRef.
def enterComponentRef(self, ctx:SQLParser.ComponentRefContext):
pass
# Exit a parse tree produced by SQLParser#componentRef.
def exitComponentRef(self, ctx:SQLParser.ComponentRefContext):
pass
# Enter a parse tree produced by SQLParser#resourceGroupRef.
def enterResourceGroupRef(self, ctx:SQLParser.ResourceGroupRefContext):
pass
# Exit a parse tree produced by SQLParser#resourceGroupRef.
def exitResourceGroupRef(self, ctx:SQLParser.ResourceGroupRefContext):
pass
# Enter a parse tree produced by SQLParser#windowName.
def enterWindowName(self, ctx:SQLParser.WindowNameContext):
pass
# Exit a parse tree produced by SQLParser#windowName.
def exitWindowName(self, ctx:SQLParser.WindowNameContext):
pass
# Enter a parse tree produced by SQLParser#pureIdentifier.
def enterPureIdentifier(self, ctx:SQLParser.PureIdentifierContext):
pass
# Exit a parse tree produced by SQLParser#pureIdentifier.
def exitPureIdentifier(self, ctx:SQLParser.PureIdentifierContext):
pass
# Enter a parse tree produced by SQLParser#identifier.
def enterIdentifier(self, ctx:SQLParser.IdentifierContext):
pass
# Exit a parse tree produced by SQLParser#identifier.
def exitIdentifier(self, ctx:SQLParser.IdentifierContext):
pass
# Enter a parse tree produced by SQLParser#identifierList.
def enterIdentifierList(self, ctx:SQLParser.IdentifierListContext):
pass
# Exit a parse tree produced by SQLParser#identifierList.
def exitIdentifierList(self, ctx:SQLParser.IdentifierListContext):
pass
# Enter a parse tree produced by SQLParser#identifierListWithParentheses.
def enterIdentifierListWithParentheses(self, ctx:SQLParser.IdentifierListWithParenthesesContext):
pass
# Exit a parse tree produced by SQLParser#identifierListWithParentheses.
def exitIdentifierListWithParentheses(self, ctx:SQLParser.IdentifierListWithParenthesesContext):
pass
# Enter a parse tree produced by SQLParser#qualifiedIdentifier.
def enterQualifiedIdentifier(self, ctx:SQLParser.QualifiedIdentifierContext):
pass
# Exit a parse tree produced by SQLParser#qualifiedIdentifier.
def exitQualifiedIdentifier(self, ctx:SQLParser.QualifiedIdentifierContext):
pass
# Enter a parse tree produced by SQLParser#simpleIdentifier.
def enterSimpleIdentifier(self, ctx:SQLParser.SimpleIdentifierContext):
pass
# Exit a parse tree produced by SQLParser#simpleIdentifier.
def exitSimpleIdentifier(self, ctx:SQLParser.SimpleIdentifierContext):
pass
# Enter a parse tree produced by SQLParser#dotIdentifier.
def enterDotIdentifier(self, ctx:SQLParser.DotIdentifierContext):
pass
# Exit a parse tree produced by SQLParser#dotIdentifier.
def exitDotIdentifier(self, ctx:SQLParser.DotIdentifierContext):
pass
# Enter a parse tree produced by SQLParser#ulong_number.
def enterUlong_number(self, ctx:SQLParser.Ulong_numberContext):
pass
# Exit a parse tree produced by SQLParser#ulong_number.
def exitUlong_number(self, ctx:SQLParser.Ulong_numberContext):
pass
# Enter a parse tree produced by SQLParser#real_ulong_number.
def enterReal_ulong_number(self, ctx:SQLParser.Real_ulong_numberContext):
pass
# Exit a parse tree produced by SQLParser#real_ulong_number.
def exitReal_ulong_number(self, ctx:SQLParser.Real_ulong_numberContext):
pass
# Enter a parse tree produced by SQLParser#ulonglong_number.
def enterUlonglong_number(self, ctx:SQLParser.Ulonglong_numberContext):
pass
# Exit a parse tree produced by SQLParser#ulonglong_number.
def exitUlonglong_number(self, ctx:SQLParser.Ulonglong_numberContext):
pass
# Enter a parse tree produced by SQLParser#real_ulonglong_number.
def enterReal_ulonglong_number(self, ctx:SQLParser.Real_ulonglong_numberContext):
pass
# Exit a parse tree produced by SQLParser#real_ulonglong_number.
def exitReal_ulonglong_number(self, ctx:SQLParser.Real_ulonglong_numberContext):
pass
# Enter a parse tree produced by SQLParser#literal.
def enterLiteral(self, ctx:SQLParser.LiteralContext):
pass
# Exit a parse tree produced by SQLParser#literal.
def exitLiteral(self, ctx:SQLParser.LiteralContext):
pass
# Enter a parse tree produced by SQLParser#signedLiteral.
def enterSignedLiteral(self, ctx:SQLParser.SignedLiteralContext):
pass
# Exit a parse tree produced by SQLParser#signedLiteral.
def exitSignedLiteral(self, ctx:SQLParser.SignedLiteralContext):
pass
# Enter a parse tree produced by SQLParser#stringList.
def enterStringList(self, ctx:SQLParser.StringListContext):
pass
# Exit a parse tree produced by SQLParser#stringList.
def exitStringList(self, ctx:SQLParser.StringListContext):
pass
# Enter a parse tree produced by SQLParser#textStringLiteral.
def enterTextStringLiteral(self, ctx:SQLParser.TextStringLiteralContext):
pass
# Exit a parse tree produced by SQLParser#textStringLiteral.
def exitTextStringLiteral(self, ctx:SQLParser.TextStringLiteralContext):
pass
# Enter a parse tree produced by SQLParser#textString.
def enterTextString(self, ctx:SQLParser.TextStringContext):
pass
# Exit a parse tree produced by SQLParser#textString.
def exitTextString(self, ctx:SQLParser.TextStringContext):
pass
# Enter a parse tree produced by SQLParser#textStringHash.
def enterTextStringHash(self, ctx:SQLParser.TextStringHashContext):
pass
# Exit a parse tree produced by SQLParser#textStringHash.
def exitTextStringHash(self, ctx:SQLParser.TextStringHashContext):
pass
# Enter a parse tree produced by SQLParser#textLiteral.
def enterTextLiteral(self, ctx:SQLParser.TextLiteralContext):
pass
# Exit a parse tree produced by SQLParser#textLiteral.
def exitTextLiteral(self, ctx:SQLParser.TextLiteralContext):
pass
# Enter a parse tree produced by SQLParser#textStringNoLinebreak.
def enterTextStringNoLinebreak(self, ctx:SQLParser.TextStringNoLinebreakContext):
pass
# Exit a parse tree produced by SQLParser#textStringNoLinebreak.
def exitTextStringNoLinebreak(self, ctx:SQLParser.TextStringNoLinebreakContext):
pass
# Enter a parse tree produced by SQLParser#textStringLiteralList.
def enterTextStringLiteralList(self, ctx:SQLParser.TextStringLiteralListContext):
pass
# Exit a parse tree produced by SQLParser#textStringLiteralList.
def exitTextStringLiteralList(self, ctx:SQLParser.TextStringLiteralListContext):
pass
# Enter a parse tree produced by SQLParser#numLiteral.
def enterNumLiteral(self, ctx:SQLParser.NumLiteralContext):
pass
# Exit a parse tree produced by SQLParser#numLiteral.
def exitNumLiteral(self, ctx:SQLParser.NumLiteralContext):
pass
# Enter a parse tree produced by SQLParser#boolLiteral.
def enterBoolLiteral(self, ctx:SQLParser.BoolLiteralContext):
pass
# Exit a parse tree produced by SQLParser#boolLiteral.
def exitBoolLiteral(self, ctx:SQLParser.BoolLiteralContext):
pass
# Enter a parse tree produced by SQLParser#nullLiteral.
def enterNullLiteral(self, ctx:SQLParser.NullLiteralContext):
pass
# Exit a parse tree produced by SQLParser#nullLiteral.
def exitNullLiteral(self, ctx:SQLParser.NullLiteralContext):
pass
# Enter a parse tree produced by SQLParser#temporalLiteral.
def enterTemporalLiteral(self, ctx:SQLParser.TemporalLiteralContext):
pass
# Exit a parse tree produced by SQLParser#temporalLiteral.
def exitTemporalLiteral(self, ctx:SQLParser.TemporalLiteralContext):
pass
# Enter a parse tree produced by SQLParser#floatOptions.
def enterFloatOptions(self, ctx:SQLParser.FloatOptionsContext):
pass
# Exit a parse tree produced by SQLParser#floatOptions.
def exitFloatOptions(self, ctx:SQLParser.FloatOptionsContext):
pass
# Enter a parse tree produced by SQLParser#standardFloatOptions.
def enterStandardFloatOptions(self, ctx:SQLParser.StandardFloatOptionsContext):
pass
# Exit a parse tree produced by SQLParser#standardFloatOptions.
def exitStandardFloatOptions(self, ctx:SQLParser.StandardFloatOptionsContext):
pass
# Enter a parse tree produced by SQLParser#precision.
def enterPrecision(self, ctx:SQLParser.PrecisionContext):
pass
# Exit a parse tree produced by SQLParser#precision.
def exitPrecision(self, ctx:SQLParser.PrecisionContext):
pass
# Enter a parse tree produced by SQLParser#textOrIdentifier.
def enterTextOrIdentifier(self, ctx:SQLParser.TextOrIdentifierContext):
pass
# Exit a parse tree produced by SQLParser#textOrIdentifier.
def exitTextOrIdentifier(self, ctx:SQLParser.TextOrIdentifierContext):
pass
# Enter a parse tree produced by SQLParser#lValueIdentifier.
def enterLValueIdentifier(self, ctx:SQLParser.LValueIdentifierContext):
pass
# Exit a parse tree produced by SQLParser#lValueIdentifier.
def exitLValueIdentifier(self, ctx:SQLParser.LValueIdentifierContext):
pass
# Enter a parse tree produced by SQLParser#roleIdentifierOrText.
def enterRoleIdentifierOrText(self, ctx:SQLParser.RoleIdentifierOrTextContext):
pass
# Exit a parse tree produced by SQLParser#roleIdentifierOrText.
def exitRoleIdentifierOrText(self, ctx:SQLParser.RoleIdentifierOrTextContext):
pass
# Enter a parse tree produced by SQLParser#sizeNumber.
def enterSizeNumber(self, ctx:SQLParser.SizeNumberContext):
pass
# Exit a parse tree produced by SQLParser#sizeNumber.
def exitSizeNumber(self, ctx:SQLParser.SizeNumberContext):
pass
# Enter a parse tree produced by SQLParser#parentheses.
def enterParentheses(self, ctx:SQLParser.ParenthesesContext):
pass
# Exit a parse tree produced by SQLParser#parentheses.
def exitParentheses(self, ctx:SQLParser.ParenthesesContext):
pass
# Enter a parse tree produced by SQLParser#equal.
def enterEqual(self, ctx:SQLParser.EqualContext):
pass
# Exit a parse tree produced by SQLParser#equal.
def exitEqual(self, ctx:SQLParser.EqualContext):
pass
# Enter a parse tree produced by SQLParser#optionType.
def enterOptionType(self, ctx:SQLParser.OptionTypeContext):
pass
# Exit a parse tree produced by SQLParser#optionType.
def exitOptionType(self, ctx:SQLParser.OptionTypeContext):
pass
# Enter a parse tree produced by SQLParser#varIdentType.
def enterVarIdentType(self, ctx:SQLParser.VarIdentTypeContext):
pass
# Exit a parse tree produced by SQLParser#varIdentType.
def exitVarIdentType(self, ctx:SQLParser.VarIdentTypeContext):
pass
# Enter a parse tree produced by SQLParser#setVarIdentType.
def enterSetVarIdentType(self, ctx:SQLParser.SetVarIdentTypeContext):
pass
# Exit a parse tree produced by SQLParser#setVarIdentType.
def exitSetVarIdentType(self, ctx:SQLParser.SetVarIdentTypeContext):
pass
# Enter a parse tree produced by SQLParser#identifierKeyword.
def enterIdentifierKeyword(self, ctx:SQLParser.IdentifierKeywordContext):
pass
# Exit a parse tree produced by SQLParser#identifierKeyword.
def exitIdentifierKeyword(self, ctx:SQLParser.IdentifierKeywordContext):
pass
# Enter a parse tree produced by SQLParser#identifierKeywordsAmbiguous1RolesAndLabels.
def enterIdentifierKeywordsAmbiguous1RolesAndLabels(self, ctx:SQLParser.IdentifierKeywordsAmbiguous1RolesAndLabelsContext):
pass
# Exit a parse tree produced by SQLParser#identifierKeywordsAmbiguous1RolesAndLabels.
def exitIdentifierKeywordsAmbiguous1RolesAndLabels(self, ctx:SQLParser.IdentifierKeywordsAmbiguous1RolesAndLabelsContext):
pass
# Enter a parse tree produced by SQLParser#identifierKeywordsAmbiguous2Labels.
def enterIdentifierKeywordsAmbiguous2Labels(self, ctx:SQLParser.IdentifierKeywordsAmbiguous2LabelsContext):
pass
# Exit a parse tree produced by SQLParser#identifierKeywordsAmbiguous2Labels.
def exitIdentifierKeywordsAmbiguous2Labels(self, ctx:SQLParser.IdentifierKeywordsAmbiguous2LabelsContext):
pass
# Enter a parse tree produced by SQLParser#labelKeyword.
def enterLabelKeyword(self, ctx:SQLParser.LabelKeywordContext):
pass
# Exit a parse tree produced by SQLParser#labelKeyword.
def exitLabelKeyword(self, ctx:SQLParser.LabelKeywordContext):
pass
# Enter a parse tree produced by SQLParser#identifierKeywordsAmbiguous3Roles.
def enterIdentifierKeywordsAmbiguous3Roles(self, ctx:SQLParser.IdentifierKeywordsAmbiguous3RolesContext):
pass
# Exit a parse tree produced by SQLParser#identifierKeywordsAmbiguous3Roles.
def exitIdentifierKeywordsAmbiguous3Roles(self, ctx:SQLParser.IdentifierKeywordsAmbiguous3RolesContext):
pass
# Enter a parse tree produced by SQLParser#identifierKeywordsUnambiguous.
def enterIdentifierKeywordsUnambiguous(self, ctx:SQLParser.IdentifierKeywordsUnambiguousContext):
pass
# Exit a parse tree produced by SQLParser#identifierKeywordsUnambiguous.
def exitIdentifierKeywordsUnambiguous(self, ctx:SQLParser.IdentifierKeywordsUnambiguousContext):
pass
# Enter a parse tree produced by SQLParser#roleKeyword.
def enterRoleKeyword(self, ctx:SQLParser.RoleKeywordContext):
pass
# Exit a parse tree produced by SQLParser#roleKeyword.
def exitRoleKeyword(self, ctx:SQLParser.RoleKeywordContext):
pass
# Enter a parse tree produced by SQLParser#lValueKeyword.
def enterLValueKeyword(self, ctx:SQLParser.LValueKeywordContext):
pass
# Exit a parse tree produced by SQLParser#lValueKeyword.
def exitLValueKeyword(self, ctx:SQLParser.LValueKeywordContext):
pass
# Enter a parse tree produced by SQLParser#identifierKeywordsAmbiguous4SystemVariables.
def enterIdentifierKeywordsAmbiguous4SystemVariables(self, ctx:SQLParser.IdentifierKeywordsAmbiguous4SystemVariablesContext):
pass
# Exit a parse tree produced by SQLParser#identifierKeywordsAmbiguous4SystemVariables.
def exitIdentifierKeywordsAmbiguous4SystemVariables(self, ctx:SQLParser.IdentifierKeywordsAmbiguous4SystemVariablesContext):
pass
# Enter a parse tree produced by SQLParser#roleOrIdentifierKeyword.
def enterRoleOrIdentifierKeyword(self, ctx:SQLParser.RoleOrIdentifierKeywordContext):
pass
# Exit a parse tree produced by SQLParser#roleOrIdentifierKeyword.
def exitRoleOrIdentifierKeyword(self, ctx:SQLParser.RoleOrIdentifierKeywordContext):
pass
# Enter a parse tree produced by SQLParser#roleOrLabelKeyword.
def enterRoleOrLabelKeyword(self, ctx:SQLParser.RoleOrLabelKeywordContext):
pass
# Exit a parse tree produced by SQLParser#roleOrLabelKeyword.
def exitRoleOrLabelKeyword(self, ctx:SQLParser.RoleOrLabelKeywordContext):
pass
del SQLParser
| 33.48473
| 129
| 0.744566
| 19,685
| 196,254
| 7.420828
| 0.138329
| 0.05319
| 0.088651
| 0.159571
| 0.823541
| 0.492035
| 0.491631
| 0.490995
| 0.005237
| 0.003697
| 0
| 0.000367
| 0.195772
| 196,254
| 5,861
| 130
| 33.48473
| 0.925161
| 0.37104
| 0
| 0.498459
| 1
| 0
| 0.000008
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.498459
| false
| 0.501541
| 0.002311
| 0
| 0.501156
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
c0db4354b91b037de40f568d23c31e1e7c47ec03
| 126
|
py
|
Python
|
tests/test_apdu/__init__.py
|
amih90/bacpypes
|
27ab4f18aa252ceb6ffdc32d53af2995a2e92647
|
[
"MIT"
] | 240
|
2015-07-17T16:27:54.000Z
|
2022-03-29T13:53:06.000Z
|
tests/test_apdu/__init__.py
|
amih90/bacpypes
|
27ab4f18aa252ceb6ffdc32d53af2995a2e92647
|
[
"MIT"
] | 400
|
2015-07-23T05:37:52.000Z
|
2022-03-29T12:32:30.000Z
|
tests/test_apdu/__init__.py
|
amih90/bacpypes
|
27ab4f18aa252ceb6ffdc32d53af2995a2e92647
|
[
"MIT"
] | 143
|
2015-07-17T18:22:27.000Z
|
2022-03-22T01:21:24.000Z
|
#!/usr/bin/python
"""
Test BACpypes APDU Module
"""
from . import test_max_apdu_length_accepted, test_max_segments_accepted
| 15.75
| 71
| 0.785714
| 18
| 126
| 5.111111
| 0.722222
| 0.152174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 126
| 7
| 72
| 18
| 0.821429
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c0e78f4cc67c560e09e724676da39eb4b75f0c89
| 379
|
py
|
Python
|
text/_elisp/key/base/character/japanese/upper/__init__.py
|
jedhsu/text
|
8525b602d304ac571a629104c48703443244545c
|
[
"Apache-2.0"
] | null | null | null |
text/_elisp/key/base/character/japanese/upper/__init__.py
|
jedhsu/text
|
8525b602d304ac571a629104c48703443244545c
|
[
"Apache-2.0"
] | null | null | null |
text/_elisp/key/base/character/japanese/upper/__init__.py
|
jedhsu/text
|
8525b602d304ac571a629104c48703443244545c
|
[
"Apache-2.0"
] | null | null | null |
from .west import Ka
from .west import Ta
from .west import Ha
from .west import Ra
from .west import Wa
from .west import Ki
from .west import Chi
from .west import Hi
from .west import Ri
from .east import Sa
from .east import Na
from .east import Ma
from .east import A
from .east import Ya
from .east import Shi
from .east import Ni
from .east import Mi
from .east import I
| 18.95
| 21
| 0.759894
| 72
| 379
| 4
| 0.305556
| 0.25
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192612
| 379
| 19
| 22
| 19.947368
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8d08215b3b73a7924d2718de7cb7b754807f1b65
| 179
|
py
|
Python
|
src/uff/plane_wave_origin.py
|
davidbradway/uff.py
|
118001211018a4fc95d1dd7304ae6335bdf805f9
|
[
"MIT"
] | 7
|
2021-11-16T17:27:54.000Z
|
2021-12-25T18:09:35.000Z
|
src/uff/plane_wave_origin.py
|
davidbradway/uff.py
|
118001211018a4fc95d1dd7304ae6335bdf805f9
|
[
"MIT"
] | 6
|
2021-11-16T17:27:33.000Z
|
2022-02-04T08:51:06.000Z
|
src/uff/plane_wave_origin.py
|
davidbradway/uff.py
|
118001211018a4fc95d1dd7304ae6335bdf805f9
|
[
"MIT"
] | 1
|
2021-11-16T19:26:36.000Z
|
2021-11-16T19:26:36.000Z
|
from dataclasses import dataclass
from uff.origin import Origin
from uff.rotation import Rotation
@dataclass
class PlaneWaveOrigin(Origin):
rotation: Rotation = Rotation()
| 17.9
| 35
| 0.798883
| 21
| 179
| 6.809524
| 0.428571
| 0.097902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145251
| 179
| 9
| 36
| 19.888889
| 0.934641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
238542b3c0cb07fedde0fec5e62254046d99ff9d
| 1,075
|
py
|
Python
|
src/text_utils_tests/test_accents_dict.py
|
stefantaubert/text-utils
|
8fc52df37b6cbc5409695f16b7295be61107052c
|
[
"MIT"
] | null | null | null |
src/text_utils_tests/test_accents_dict.py
|
stefantaubert/text-utils
|
8fc52df37b6cbc5409695f16b7295be61107052c
|
[
"MIT"
] | 22
|
2020-12-19T18:54:03.000Z
|
2021-11-08T15:24:39.000Z
|
src/text_utils_tests/test_accents_dict.py
|
stefantaubert/text-utils
|
8fc52df37b6cbc5409695f16b7295be61107052c
|
[
"MIT"
] | null | null | null |
from text_utils.accents_dict import AccentsDict
def test_init_from_accents_adds_no_accents():
res = AccentsDict.init_from_accents({"a", "b", "c"})
assert len(res) == 3
def test_init_from_accents_is_sorted():
res = AccentsDict.init_from_accents({"c", "a", "b"})
assert res.get_accent(0) == "a"
assert res.get_accent(1) == "b"
assert res.get_accent(2) == "c"
def test_init_from_accents_with_pad_uses_pad_const():
res = AccentsDict.init_from_accents_with_pad({"b", "a"}, pad_accent="_")
assert res.get_accent(0) == "_"
assert res.get_accent(1) == "a"
assert res.get_accent(2) == "b"
def test_init_from_accents_with_pad_has_pad_at_idx_zero():
res = AccentsDict.init_from_accents_with_pad({"b", "a"}, "xx")
assert res.get_accent(0) == "xx"
assert res.get_accent(1) == "a"
assert res.get_accent(2) == "b"
def test_init_from_accents_with_pad_ignores_existing_pad():
res = AccentsDict.init_from_accents_with_pad({"b", "a", "xx"}, "xx")
assert res.get_accent(0) == "xx"
assert res.get_accent(1) == "a"
assert res.get_accent(2) == "b"
| 27.564103
| 74
| 0.702326
| 177
| 1,075
| 3.870057
| 0.20904
| 0.157664
| 0.210219
| 0.315328
| 0.80292
| 0.538686
| 0.538686
| 0.49635
| 0.49635
| 0.440876
| 0
| 0.013934
| 0.132093
| 1,075
| 38
| 75
| 28.289474
| 0.720257
| 0
| 0
| 0.333333
| 0
| 0
| 0.030698
| 0
| 0
| 0
| 0
| 0
| 0.541667
| 1
| 0.208333
| false
| 0
| 0.041667
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
23858a7010759c9b26295e469f8bb5f9a54899ac
| 42
|
py
|
Python
|
src/probnum/quad/solvers/policies/__init__.py
|
feimeng93/probnum
|
4e46273c0157d26b9be2a7a415ccf69a3691ec22
|
[
"MIT"
] | 1
|
2021-04-16T14:45:26.000Z
|
2021-04-16T14:45:26.000Z
|
src/probnum/quad/solvers/policies/__init__.py
|
pitmonticone/probnum
|
1fed705b2443a14d08419e16f98f6ef815ae9ffa
|
[
"MIT"
] | 42
|
2021-03-08T07:20:40.000Z
|
2022-03-28T05:04:48.000Z
|
src/probnum/quad/solvers/policies/__init__.py
|
pitmonticone/probnum
|
1fed705b2443a14d08419e16f98f6ef815ae9ffa
|
[
"MIT"
] | null | null | null |
from ._policy import Policy, RandomPolicy
| 21
| 41
| 0.833333
| 5
| 42
| 6.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 42
| 1
| 42
| 42
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
2390c1e8c517652cedab3d662000ca0eb6121780
| 51
|
py
|
Python
|
__init__.py
|
sourabh2k15/zulint
|
1b34e45c7da65fd470a44cdd3b272ccb316be648
|
[
"Apache-2.0"
] | null | null | null |
__init__.py
|
sourabh2k15/zulint
|
1b34e45c7da65fd470a44cdd3b272ccb316be648
|
[
"Apache-2.0"
] | null | null | null |
__init__.py
|
sourabh2k15/zulint
|
1b34e45c7da65fd470a44cdd3b272ccb316be648
|
[
"Apache-2.0"
] | null | null | null |
# Compatibility __init__ file
from zulint import *
| 17
| 29
| 0.803922
| 6
| 51
| 6.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156863
| 51
| 2
| 30
| 25.5
| 0.860465
| 0.529412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
239215a7126b469ed62fc9444b620d2812931d81
| 101
|
py
|
Python
|
pyglet/libs/darwin/__init__.py
|
bitcraft/pyglet
|
144257c365ca85528c6a4c5bed8141e683d7a9b6
|
[
"BSD-3-Clause"
] | 15
|
2015-01-21T12:29:01.000Z
|
2018-12-09T09:17:33.000Z
|
pyglet/libs/darwin/__init__.py
|
bitcraft/pyglet
|
144257c365ca85528c6a4c5bed8141e683d7a9b6
|
[
"BSD-3-Clause"
] | null | null | null |
pyglet/libs/darwin/__init__.py
|
bitcraft/pyglet
|
144257c365ca85528c6a4c5bed8141e683d7a9b6
|
[
"BSD-3-Clause"
] | 9
|
2015-12-12T09:12:46.000Z
|
2021-12-26T13:29:14.000Z
|
import pyglet
# Cocoa implementation:
if pyglet.options['darwin_cocoa']:
from .cocoapy import *
| 16.833333
| 34
| 0.742574
| 12
| 101
| 6.166667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158416
| 101
| 5
| 35
| 20.2
| 0.870588
| 0.207921
| 0
| 0
| 0
| 0
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
23c3355bf1fdcde499b548cc70d15c7c199674ba
| 24
|
py
|
Python
|
social_core/__init__.py
|
mvpoland/social-core
|
f7106463363cde54f67cb80f89cb8b4e39aa2344
|
[
"BSD-3-Clause"
] | null | null | null |
social_core/__init__.py
|
mvpoland/social-core
|
f7106463363cde54f67cb80f89cb8b4e39aa2344
|
[
"BSD-3-Clause"
] | null | null | null |
social_core/__init__.py
|
mvpoland/social-core
|
f7106463363cde54f67cb80f89cb8b4e39aa2344
|
[
"BSD-3-Clause"
] | null | null | null |
__version__ = '3.3.3.2'
| 12
| 23
| 0.625
| 5
| 24
| 2.2
| 0.6
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 0.125
| 24
| 1
| 24
| 24
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0.291667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
23e583223fb05de9b135b6c0861b783a042f5499
| 371
|
py
|
Python
|
{{ cookiecutter.repo_name }}/{{ cookiecutter.repo_name }}/base/dataset.py
|
gpnlab/cookiecutter-gpnlab
|
dd6e949fe2a203c77f6ea30e84c95f0306c7b64e
|
[
"MIT"
] | null | null | null |
{{ cookiecutter.repo_name }}/{{ cookiecutter.repo_name }}/base/dataset.py
|
gpnlab/cookiecutter-gpnlab
|
dd6e949fe2a203c77f6ea30e84c95f0306c7b64e
|
[
"MIT"
] | null | null | null |
{{ cookiecutter.repo_name }}/{{ cookiecutter.repo_name }}/base/dataset.py
|
gpnlab/cookiecutter-gpnlab
|
dd6e949fe2a203c77f6ea30e84c95f0306c7b64e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding=utf-8
from torch.utils.data import Dataset
__all__ = ['BaseDataset']
class BaseDataset(Dataset):
"""
Base class for all custom datasets
"""
def __init__(self):
raise NotImplementedError
def __len__(self):
raise NotImplementedError
def __getitem__(self, idx):
raise NotImplementedError
| 16.863636
| 38
| 0.668464
| 40
| 371
| 5.8
| 0.7
| 0.310345
| 0.241379
| 0.267241
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003546
| 0.239892
| 371
| 21
| 39
| 17.666667
| 0.819149
| 0.183288
| 0
| 0.333333
| 0
| 0
| 0.038462
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.111111
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
23e70ae9df6162caf1dff15d8220303b25a25de2
| 4,388
|
py
|
Python
|
projects/01_fyyur/worked_code/models.py
|
andreqts/UDACITY-Full-Stack-exercises
|
3504f76479de8da999ff3e8b3f02cd3a3689c360
|
[
"MIT"
] | null | null | null |
projects/01_fyyur/worked_code/models.py
|
andreqts/UDACITY-Full-Stack-exercises
|
3504f76479de8da999ff3e8b3f02cd3a3689c360
|
[
"MIT"
] | 34
|
2021-08-19T15:48:26.000Z
|
2022-02-24T00:30:19.000Z
|
projects/01_fyyur/worked_code/models.py
|
andreqts/UDACITY-Full-Stack-exercises
|
3504f76479de8da999ff3e8b3f02cd3a3689c360
|
[
"MIT"
] | null | null | null |
#----------------------------------------------------------------------------#
# Imports
#----------------------------------------------------------------------------#
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import func, or_, and_
from flask_migrate import Migrate
from flask_wtf import CsrfProtect, csrf
#----------------------------------------------------------------------------#
# App Config.
#----------------------------------------------------------------------------#
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
# Required by Flask when accessing remotely, not in localhost
#csrf.CSRFProtect(app)
# connect to a local postgresql database
migrate = Migrate(app, db)
#----------------------------------------------------------------------------#
# Models.
#----------------------------------------------------------------------------#
class Venue(db.Model):
__tablename__ = 'Venue'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, nullable = False)
genres = db.Column(db.String(120), nullable = False)
city = db.Column(db.String(120), nullable = False)
state = db.Column(db.String(120), nullable = False)
address = db.Column(db.String(120), nullable = False)
phone = db.Column(db.String(120), nullable = False)
seeking_talent = db.Column(db.Boolean, default=False)
seeking_description = db.Column(db.String(120))
website_link = db.Column(db.String(120))
image_link = db.Column(db.String(500), nullable = False)
facebook_link = db.Column(db.String(120))
venue_shows = db.relationship('Show', backref='venue', lazy=True)
# helper methods (guided by the famous "DRY" principle)
def get_upcoming_shows(self):
return Show.query.filter_by(venue_id=self.id).join(Artist, Show.artist_id==Artist.id).filter(Show.start_time > func.now()).all()
def get_upcoming_shows_count(self):
return Show.query.filter_by(venue_id=self.id).filter(Show.start_time > func.now()).count()
def get_past_shows(self):
return Show.query.filter_by(venue_id=self.id).join(Artist, Show.artist_id==Artist.id).filter(Show.start_time <= func.now()).all()
def get_past_shows_count(self):
return Show.query.filter_by(venue_id=self.id).filter(Show.start_time <= func.now()).count()
def __repr__(self):
return f'<Venue {self.id}: {self.name} phone: {self.phone}>'
class Artist(db.Model):
__tablename__ = 'Artist'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, nullable = False)
city = db.Column(db.String(120), nullable = False)
state = db.Column(db.String(120), nullable = False)
phone = db.Column(db.String(120), nullable = False)
seeking_venue = db.Column(db.Boolean, default=False)
seeking_description = db.Column(db.String(120))
genres = db.Column(db.String(120), nullable = False)
image_link = db.Column(db.String(500), nullable = False)
website_link = db.Column(db.String(120))
facebook_link = db.Column(db.String(120))
artist_shows = db.relationship('Show', backref='artist', lazy=True)
# helper methods (guided by the famous "DRY" principle)
def get_upcoming_shows(self):
return Show.query.filter_by(artist_id=self.id).join(Artist, Show.artist_id==Artist.id).filter(Show.start_time > func.now()).all()
def get_upcoming_shows_count(self):
return Show.query.filter_by(artist_id=self.id).filter(Show.start_time > func.now()).count()
def get_past_shows(self):
return Show.query.filter_by(artist_id=self.id).join(Artist, Show.artist_id==Artist.id).filter(Show.start_time <= func.now()).all()
def get_past_shows_count(self):
return Show.query.filter_by(artist_id=self.id).filter(Show.start_time <= func.now()).count()
def __repr__(self):
return f'<Venue {self.id}: {self.name} phone: {self.phone}>'
class Show(db.Model):
__tablename__ = 'Show'
artist_id = db.Column(db.Integer, db.ForeignKey('Artist.id'), nullable = False, primary_key=True)
venue_id = db.Column(db.Integer, db.ForeignKey('Venue.id'), nullable = False, primary_key=True)
start_time = db.Column(db.DateTime, nullable = False, primary_key=True)
def __repr__(self):
return f'<Show from {self.artist_id} in {self.venue_id} at {self.start_time}>'
| 41.790476
| 138
| 0.627165
| 578
| 4,388
| 4.579585
| 0.1609
| 0.07858
| 0.098224
| 0.114847
| 0.796751
| 0.757083
| 0.726861
| 0.657348
| 0.633547
| 0.602569
| 0
| 0.013499
| 0.139016
| 4,388
| 104
| 139
| 42.192308
| 0.687136
| 0.162261
| 0
| 0.507692
| 0
| 0
| 0.061543
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.169231
| false
| 0
| 0.076923
| 0.169231
| 0.938462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
9b0ed7d0e710d4cee42930fc2da58cd220a1ca04
| 163
|
py
|
Python
|
NLP_Flask/app/dataAPI/utils.py
|
Gxy-2001/NLPPlat
|
f339c1a7452b979b413919cf4bc128bf45af52ef
|
[
"MIT"
] | null | null | null |
NLP_Flask/app/dataAPI/utils.py
|
Gxy-2001/NLPPlat
|
f339c1a7452b979b413919cf4bc128bf45af52ef
|
[
"MIT"
] | null | null | null |
NLP_Flask/app/dataAPI/utils.py
|
Gxy-2001/NLPPlat
|
f339c1a7452b979b413919cf4bc128bf45af52ef
|
[
"MIT"
] | null | null | null |
import base64
from io import BytesIO
import numpy as np
from app.models.dataset import *
from app.models.model import *
from app.utils.global_utils import *
| 12.538462
| 36
| 0.773006
| 26
| 163
| 4.807692
| 0.538462
| 0.168
| 0.208
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014815
| 0.171779
| 163
| 12
| 37
| 13.583333
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f19df4e11a8e35cca7ef09184e8dae7783e90951
| 3,546
|
py
|
Python
|
tests/core/transaction-utils/conftest.py
|
ggs134/py-evm
|
5ad87356181b03c14a2452131f50fe8762127c84
|
[
"MIT"
] | 1,641
|
2017-11-24T04:24:22.000Z
|
2022-03-31T14:59:30.000Z
|
tests/core/transaction-utils/conftest.py
|
ggs134/py-evm
|
5ad87356181b03c14a2452131f50fe8762127c84
|
[
"MIT"
] | 1,347
|
2017-11-23T10:37:36.000Z
|
2022-03-20T16:31:44.000Z
|
tests/core/transaction-utils/conftest.py
|
ggs134/py-evm
|
5ad87356181b03c14a2452131f50fe8762127c84
|
[
"MIT"
] | 567
|
2017-11-22T18:03:27.000Z
|
2022-03-28T17:49:08.000Z
|
import pytest
# from https://github.com/ethereum/tests/blob/c951a3c105d600ccd8f1c3fc87856b2bcca3df0a/BasicTests/txtest.json # noqa: E501
TRANSACTION_FIXTURES = [
{
"chainId": None,
"key": "c85ef7d79691fe79573b1a7064c19c1a9819ebdbd1faaab1a8ec92344438aaf4",
"nonce": 0,
"gasPrice": 1000000000000,
"gas": 10000,
"to": "13978aee95f38490e9769c39b2773ed763d9cd5f",
"value": 10000000000000000,
"data": "",
"signed": "f86b8085e8d4a510008227109413978aee95f38490e9769c39b2773ed763d9cd5f872386f26fc10000801ba0eab47c1a49bf2fe5d40e01d313900e19ca485867d462fe06e139e3a536c6d4f4a014a569d327dcda4b29f74f93c0e9729d2f49ad726e703f9cd90dbb0fbf6649f1" # noqa: E501
},
{
"chainId": None,
"key": "c87f65ff3f271bf5dc8643484f66b200109caffe4bf98c4cb393dc35740b28c0",
"nonce": 0,
"gasPrice": 1000000000000,
"gas": 10000,
"to": "",
"value": 0,
"data": "6025515b525b600a37f260003556601b596020356000355760015b525b54602052f260255860005b525b54602052f2", # noqa: E501
"signed": "f87f8085e8d4a510008227108080af6025515b525b600a37f260003556601b596020356000355760015b525b54602052f260255860005b525b54602052f21ba05afed0244d0da90b67cf8979b0f246432a5112c0d31e8d5eedd2bc17b171c694a0bb1035c834677c2e1185b8dc90ca6d1fa585ab3d7ef23707e1a497a98e752d1b" # noqa: E501
},
{
"chainId": 1,
"key": "0x4c0883a69102937d6231471b5dbb6204fe5129617082792ae468d01a3f362318",
"nonce": 0,
"gasPrice": 234567897654321,
"gas": 2000000,
"to": "0xF0109fC8DF283027b6285cc889F5aA624EaC1F55",
"value": 1000000000,
"data": "",
"signed": "0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428", # noqa: E501
},
]
# Hand-built for 2930
TYPED_TRANSACTION_FIXTURES = [
{
"chainId": 1,
"nonce": 3,
"gasPrice": 1,
"gas": 25000,
"to": "b94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"value": 10,
"data": "5544",
"access_list": [
[b'\xf0' * 20, [b'\0' * 32, b'\xff' * 32]],
],
"key": (b'\0' * 31) + b'\x01',
"sender": b'~_ER\t\x1ai\x12]]\xfc\xb7\xb8\xc2e\x90)9[\xdf',
"intrinsic_gas": 21000 + 32 + 2400 + 1900 * 2,
"for_signing": '01f87a0103018261a894b94f5374fce5edbc8e2a8697c15331677e6ebf0b0a825544f85994f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f842a00000000000000000000000000000000000000000000000000000000000000000a0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', # noqa: E501
"signed": '01f8bf0103018261a894b94f5374fce5edbc8e2a8697c15331677e6ebf0b0a825544f85bf85994f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f842a00000000000000000000000000000000000000000000000000000000000000000a0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff80a017047e844eef895a876778a828731a33b67863aea7b9591a0001651ee47322faa043b4d0e8d59e8663c813ffa1bb99f020278a139f07c47f3858653071b3cec6b3', # noqa: E501
"hash": "13ab8b6371d8873405db20104705d7fecee2f9083f247250519e4b4c568b17fb",
}
]
@pytest.fixture(params=range(len(TRANSACTION_FIXTURES)))
def txn_fixture(request):
return TRANSACTION_FIXTURES[request.param]
@pytest.fixture(params=range(len(TYPED_TRANSACTION_FIXTURES)))
def typed_txn_fixture(request):
return TYPED_TRANSACTION_FIXTURES[request.param]
| 49.25
| 423
| 0.750705
| 180
| 3,546
| 14.7
| 0.522222
| 0.021164
| 0.015873
| 0.020408
| 0.048375
| 0.027967
| 0.027967
| 0
| 0
| 0
| 0
| 0.460791
| 0.158488
| 3,546
| 71
| 424
| 49.943662
| 0.425938
| 0.058094
| 0
| 0.209677
| 0
| 0.016129
| 0.627027
| 0.555856
| 0
| 0
| 0.097898
| 0
| 0
| 1
| 0.032258
| false
| 0
| 0.016129
| 0.032258
| 0.080645
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f19f087056a5e6dcba2dfd816055bda6300bf6b3
| 143
|
py
|
Python
|
utils/__init__.py
|
jaobernardi/roboscovid-redacted
|
831abbcf42781560c89c5a6782ab7de238b43aca
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
jaobernardi/roboscovid-redacted
|
831abbcf42781560c89c5a6782ab7de238b43aca
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
jaobernardi/roboscovid-redacted
|
831abbcf42781560c89c5a6782ab7de238b43aca
|
[
"MIT"
] | null | null | null |
from .parsing import parse_string
from .tokens import token
from .users import getUsers, getPlaces
from .flags import flag_define, value_assign
| 35.75
| 44
| 0.839161
| 21
| 143
| 5.571429
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118881
| 143
| 4
| 44
| 35.75
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f1aa93271970fbf027ff762de7ea1942fd69ee82
| 126
|
py
|
Python
|
mlaut/__init__.py
|
vishalbelsare/mlaut
|
a3bd4b2591c3144d100f413f6c4c2231392103e5
|
[
"BSD-3-Clause"
] | 23
|
2019-01-14T15:12:32.000Z
|
2022-03-31T12:23:34.000Z
|
mlaut/__init__.py
|
vishalbelsare/mlaut
|
a3bd4b2591c3144d100f413f6c4c2231392103e5
|
[
"BSD-3-Clause"
] | 11
|
2019-01-23T13:39:20.000Z
|
2020-04-17T13:25:27.000Z
|
mlaut/__init__.py
|
vishalbelsare/mlaut
|
a3bd4b2591c3144d100f413f6c4c2231392103e5
|
[
"BSD-3-Clause"
] | 4
|
2019-01-07T20:46:40.000Z
|
2022-03-25T00:00:00.000Z
|
# import mlaut.experiments
# import mlaut.analyze_results
# import mlaut.data
# import mlaut.shared
# import mlaut.resampling
| 21
| 30
| 0.801587
| 16
| 126
| 6.25
| 0.5
| 0.55
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 126
| 5
| 31
| 25.2
| 0.900901
| 0.912698
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f1b838678e1fecf8d22106416d47f732ef94903e
| 194
|
py
|
Python
|
databayes/utils/__init__.py
|
alphabayes/databayes
|
310622e2ecc66fb2b046e7e539eeecd4d9a9f528
|
[
"MIT"
] | null | null | null |
databayes/utils/__init__.py
|
alphabayes/databayes
|
310622e2ecc66fb2b046e7e539eeecd4d9a9f528
|
[
"MIT"
] | null | null | null |
databayes/utils/__init__.py
|
alphabayes/databayes
|
310622e2ecc66fb2b046e7e539eeecd4d9a9f528
|
[
"MIT"
] | null | null | null |
from .etl import Discretizer
from .etl import pdInterval_from_string, \
pdInterval_series_from_string, DataImporter
from .pandas_utils import ddomain_equals
from .misc import get_subclasses
| 32.333333
| 47
| 0.845361
| 26
| 194
| 6
| 0.576923
| 0.089744
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118557
| 194
| 5
| 48
| 38.8
| 0.912281
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f1eb9e88d2746ac1ba16236af105e27f5a2935b2
| 4,300
|
py
|
Python
|
spec/palindrome_spec.py
|
daltonamitchell/python-algorithms
|
470117e2f85732a4c1c86fcde50989f102f53bc3
|
[
"0BSD"
] | null | null | null |
spec/palindrome_spec.py
|
daltonamitchell/python-algorithms
|
470117e2f85732a4c1c86fcde50989f102f53bc3
|
[
"0BSD"
] | null | null | null |
spec/palindrome_spec.py
|
daltonamitchell/python-algorithms
|
470117e2f85732a4c1c86fcde50989f102f53bc3
|
[
"0BSD"
] | null | null | null |
"""
Write a method that takes in a string of lowercase letters (no
uppercase letters, no repeats). Consider the *substrings* of the
string: consecutive sequences of letters contained inside the string.
Find the longest such string of letters that is a palindrome.
Note that the entire string may itself be a palindrome.
"""
# pylint: disable=wildcard-import, unused-wildcard-import, undefined-variable
from expects import *
from lib.palindrome import is_palindrome, longest_palindrome
with description('is_palindrome'):
with it('finds a simple palindrome'):
expect(is_palindrome('eye')).to(be_true)
with it('returns false for no palindrone'):
expect(is_palindrome('almostomla')).to(be_false)
with description('longest_palindrome'):
with it('finds the longest in a small string'):
expect(longest_palindrome('abcbd')).to(equal('bcb'))
with it('returns the string if its a palindrome'):
expect(longest_palindrome('abba')).to(equal('abba'))
with it('finds the longest in a midsize string'):
expect(longest_palindrome('abceffebd')).to(equal('effe'))
with it('finds a palindrome at the beginning of the string'):
expect(longest_palindrome('racecarabcbdce')).to(equal('racecar'))
with it('finds a palindrome at the end of the string'):
expect(longest_palindrome('abc1232racecar')).to(equal('racecar'))
with it('finds the longest in a really long string'):
# pylint: disable=line-too-long
REALLY_LONG_STRING = 'bbbbbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
EXPECTED = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
expect(longest_palindrome(REALLY_LONG_STRING)).to(equal(EXPECTED))
with it('finds a palindrome only one char less than the string'):
expect(longest_palindrome('amom')).to(equal('mom'))
expect(longest_palindrome('aibohphobiaz')).to(equal('aibohphobia'))
| 89.583333
| 1,271
| 0.876744
| 246
| 4,300
| 15.243902
| 0.349594
| 0.045333
| 0.049067
| 0.038667
| 0.0736
| 0.0592
| 0.0336
| 0
| 0
| 0
| 0
| 0.001005
| 0.073953
| 4,300
| 47
| 1,272
| 91.489362
| 0.940733
| 0.098372
| 0
| 0
| 0
| 0
| 0.768356
| 0.639866
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.08
| 0
| 0.08
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7b1dc65fd0a1f7b4ecbad7af0c8e8ea08a9a43cd
| 25
|
py
|
Python
|
introducao/exemplo4.py
|
vinihf/Programacao_Python
|
d2f4ab39d6d56dd962f3b5d9bbb28a42a9ef961d
|
[
"CC-BY-4.0"
] | null | null | null |
introducao/exemplo4.py
|
vinihf/Programacao_Python
|
d2f4ab39d6d56dd962f3b5d9bbb28a42a9ef961d
|
[
"CC-BY-4.0"
] | null | null | null |
introducao/exemplo4.py
|
vinihf/Programacao_Python
|
d2f4ab39d6d56dd962f3b5d9bbb28a42a9ef961d
|
[
"CC-BY-4.0"
] | null | null | null |
a = 10
b = "2"
print(a+b)
| 8.333333
| 10
| 0.48
| 7
| 25
| 1.714286
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 0.24
| 25
| 3
| 10
| 8.333333
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7b2025133135a232872dc59d12c6fe3d74ecd55b
| 406
|
py
|
Python
|
deep_rl/agent/__init__.py
|
pladosz/MOHQA
|
60f8f2e7e9fc9eaac0985bbc63a8092b95b21161
|
[
"Apache-2.0"
] | 3
|
2020-05-01T10:25:27.000Z
|
2021-09-29T02:18:48.000Z
|
deep_rl/agent/__init__.py
|
pladosz/MOHQA
|
60f8f2e7e9fc9eaac0985bbc63a8092b95b21161
|
[
"Apache-2.0"
] | 3
|
2020-05-01T12:37:57.000Z
|
2022-03-12T00:26:55.000Z
|
deep_rl/agent/__init__.py
|
pladosz/MOHQA
|
60f8f2e7e9fc9eaac0985bbc63a8092b95b21161
|
[
"Apache-2.0"
] | null | null | null |
"""ApnnDQN_agent is implemented agent, other agents are for other reinforcment learning approaches. The key implmentation of the network is in network heads."""
from .DQN_agent import *
from .DDPG_agent import *
from .A2C_agent import *
from .CategoricalDQN_agent import *
from .NStepDQN_agent import *
from .QuantileRegressionDQN_agent import *
from .PPO_agent import *
from .OptionCritic_agent import *
| 36.909091
| 160
| 0.802956
| 55
| 406
| 5.763636
| 0.509091
| 0.277603
| 0.33123
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002849
| 0.135468
| 406
| 10
| 161
| 40.6
| 0.900285
| 0.37931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7b2867afd9777ba1cb55f6dc48bb9f0b16392d40
| 168
|
py
|
Python
|
intro_to_wc_modeling/cell_modeling/simulation/__init__.py
|
KarrLab/python_package_tutorial
|
dd20e0d3056138904e7e7fbbf6bb884d64dbf8f6
|
[
"MIT"
] | 15
|
2018-01-06T11:33:01.000Z
|
2022-03-01T15:18:40.000Z
|
intro_to_wc_modeling/cell_modeling/simulation/__init__.py
|
KarrLab/python_package_tutorial
|
dd20e0d3056138904e7e7fbbf6bb884d64dbf8f6
|
[
"MIT"
] | 2
|
2018-01-30T23:21:12.000Z
|
2018-03-23T20:22:06.000Z
|
intro_to_wc_modeling/cell_modeling/simulation/__init__.py
|
KarrLab/python_package_tutorial
|
dd20e0d3056138904e7e7fbbf6bb884d64dbf8f6
|
[
"MIT"
] | 8
|
2018-01-08T21:40:19.000Z
|
2022-01-04T14:48:02.000Z
|
from . import boolean
from . import dfba
from . import ode
from . import stochastic
from . import multi_algorithm
from . import mrna_and_proteins_using_several_methods
| 24
| 53
| 0.821429
| 24
| 168
| 5.5
| 0.583333
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 168
| 6
| 54
| 28
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
9e5e801a5da7763cc5d80a59893e26de333e2b01
| 51
|
py
|
Python
|
mlops/data_validation/__init__.py
|
colingwuyu/mlops
|
56ba9b7169c183c1f89ead64670ebfe873f9694c
|
[
"Apache-2.0"
] | 1
|
2022-01-11T01:17:08.000Z
|
2022-01-11T01:17:08.000Z
|
mlops/data_validation/__init__.py
|
colingwuyu/mlops
|
56ba9b7169c183c1f89ead64670ebfe873f9694c
|
[
"Apache-2.0"
] | null | null | null |
mlops/data_validation/__init__.py
|
colingwuyu/mlops
|
56ba9b7169c183c1f89ead64670ebfe873f9694c
|
[
"Apache-2.0"
] | null | null | null |
from mlops.data_validation.view import view_report
| 25.5
| 50
| 0.882353
| 8
| 51
| 5.375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 51
| 1
| 51
| 51
| 0.914894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9e67daa97c8c809eb59ba8c8fd79f3c761c2c329
| 107
|
py
|
Python
|
certn-python/certn/__init__.py
|
livebungalow/certn-python-public
|
aa411626a2918e37c3bbe26023b1b97014860414
|
[
"MIT"
] | null | null | null |
certn-python/certn/__init__.py
|
livebungalow/certn-python-public
|
aa411626a2918e37c3bbe26023b1b97014860414
|
[
"MIT"
] | null | null | null |
certn-python/certn/__init__.py
|
livebungalow/certn-python-public
|
aa411626a2918e37c3bbe26023b1b97014860414
|
[
"MIT"
] | 1
|
2019-07-04T00:19:15.000Z
|
2019-07-04T00:19:15.000Z
|
from certn.client import Client
from certn.version import __version__
__all__ = ['Client', '__version__']
| 21.4
| 37
| 0.785047
| 13
| 107
| 5.538462
| 0.461538
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121495
| 107
| 4
| 38
| 26.75
| 0.765957
| 0
| 0
| 0
| 0
| 0
| 0.158879
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9e821960da92684bf6b56b0cd325f4dbf836d809
| 230
|
py
|
Python
|
messenger/templates/html/Room.tpl.py
|
vinoth3v/In_addon_messenger
|
e1c5044d6ee4bfc2adbb1a81af16f7769b230c70
|
[
"Apache-2.0"
] | 1
|
2015-12-16T03:25:39.000Z
|
2015-12-16T03:25:39.000Z
|
messenger/templates/html/Room.tpl.py
|
vinoth3v/In_addon_messenger
|
e1c5044d6ee4bfc2adbb1a81af16f7769b230c70
|
[
"Apache-2.0"
] | null | null | null |
messenger/templates/html/Room.tpl.py
|
vinoth3v/In_addon_messenger
|
e1c5044d6ee4bfc2adbb1a81af16f7769b230c70
|
[
"Apache-2.0"
] | 1
|
2019-09-13T10:12:46.000Z
|
2019-09-13T10:12:46.000Z
|
<div <%= attributes %>>
<div class="i-clearfix">
<%= nabar_picture %>
<div class="i-nbfc">
<div>
<div id="Room-<%str= id %>-children"><%= children %></div>
<div><%= value %> </div>
</div>
</div>
</div>
</div>
| 20.909091
| 62
| 0.5
| 28
| 230
| 4.071429
| 0.464286
| 0.315789
| 0.236842
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.204348
| 230
| 11
| 63
| 20.909091
| 0.622951
| 0
| 0
| 0.363636
| 0
| 0
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7b6a8ba33b6684d816ca522426cc42a263647a7b
| 196
|
py
|
Python
|
hkey/admin.py
|
harvard-dce/hkey-demo-cas-server
|
2cae4625d393d8510c720fab0eab74bacdcdc3da
|
[
"MIT"
] | null | null | null |
hkey/admin.py
|
harvard-dce/hkey-demo-cas-server
|
2cae4625d393d8510c720fab0eab74bacdcdc3da
|
[
"MIT"
] | null | null | null |
hkey/admin.py
|
harvard-dce/hkey-demo-cas-server
|
2cae4625d393d8510c720fab0eab74bacdcdc3da
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import Huser, Grouper, MemberOf
admin.site.register(Huser)
admin.site.register(Grouper)
admin.site.register(MemberOf)
| 19.6
| 44
| 0.80102
| 27
| 196
| 5.814815
| 0.481481
| 0.171975
| 0.324841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 196
| 9
| 45
| 21.777778
| 0.897143
| 0.132653
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
7ba5aaa05d92bc83303ab335e1fc4d0e06d50a74
| 40
|
py
|
Python
|
travel/shenzhenair/shenzhenaircollect.py
|
Octoberr/swm0920
|
8f05a6b91fc205960edd57f9076facec04f49a1a
|
[
"Apache-2.0"
] | 2
|
2019-05-19T11:54:26.000Z
|
2019-05-19T12:03:49.000Z
|
travel/shenzhenair/shenzhenaircollect.py
|
Octoberr/swm0920
|
8f05a6b91fc205960edd57f9076facec04f49a1a
|
[
"Apache-2.0"
] | 1
|
2020-11-27T07:55:15.000Z
|
2020-11-27T07:55:15.000Z
|
travel/shenzhenair/shenzhenaircollect.py
|
Octoberr/swm0920
|
8f05a6b91fc205960edd57f9076facec04f49a1a
|
[
"Apache-2.0"
] | 2
|
2021-09-06T18:06:12.000Z
|
2021-12-31T07:44:43.000Z
|
"""
深圳航空订单采集
createby swm
2018/06/11
"""
| 8
| 12
| 0.675
| 6
| 40
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228571
| 0.125
| 40
| 5
| 13
| 8
| 0.542857
| 0.8
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7bac0d0396029ff6933b69d44bdffc0826f6497b
| 178
|
py
|
Python
|
alpha/admin.py
|
bebutler1/SoloQ-DTC-Website-writtten-with-Django-
|
3a74f37de9ea788efeb449b388e4aeca2f696a85
|
[
"MIT"
] | null | null | null |
alpha/admin.py
|
bebutler1/SoloQ-DTC-Website-writtten-with-Django-
|
3a74f37de9ea788efeb449b388e4aeca2f696a85
|
[
"MIT"
] | null | null | null |
alpha/admin.py
|
bebutler1/SoloQ-DTC-Website-writtten-with-Django-
|
3a74f37de9ea788efeb449b388e4aeca2f696a85
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from alpha.models import DataForm
# Register your models here.
admin.site.register(DataForm) #adds the ability to add forms from the admin site
| 29.666667
| 80
| 0.808989
| 28
| 178
| 5.142857
| 0.642857
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140449
| 178
| 5
| 81
| 35.6
| 0.941176
| 0.421348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c8bf07cb6e5cb2ab13ff6d2d76cd3459f36cb302
| 315
|
py
|
Python
|
day18/src_test.py
|
arcadecoffee/advent-2015
|
711ac1061f661c07d511c0b2c77c0b111a22ff44
|
[
"MIT"
] | null | null | null |
day18/src_test.py
|
arcadecoffee/advent-2015
|
711ac1061f661c07d511c0b2c77c0b111a22ff44
|
[
"MIT"
] | null | null | null |
day18/src_test.py
|
arcadecoffee/advent-2015
|
711ac1061f661c07d511c0b2c77c0b111a22ff44
|
[
"MIT"
] | null | null | null |
import day18.src as src
def test_part1():
assert src.part1(src.TEST1_INPUT_FILE, 4) == 4
def test_part1_full():
assert src.part1(src.FULL_INPUT_FILE) == 768
def test_part2():
assert src.part2(src.TEST2_INPUT_FILE, 5) == 17
def test_part2_full():
assert src.part2(src.FULL_INPUT_FILE) == 781
| 17.5
| 51
| 0.704762
| 53
| 315
| 3.924528
| 0.358491
| 0.134615
| 0.115385
| 0.163462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088123
| 0.171429
| 315
| 17
| 52
| 18.529412
| 0.708812
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 1
| 0.444444
| true
| 0
| 0.111111
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
cdbd3eb4f83647434faa7735ed8056990f115b81
| 25
|
py
|
Python
|
Python/HelloWorld_Spanish.py
|
saurabhcommand/Hello-world
|
647bad9da901a52d455f05ecc37c6823c22dc77e
|
[
"MIT"
] | 1,428
|
2018-10-03T15:15:17.000Z
|
2019-03-31T18:38:36.000Z
|
Python/HelloWorld_Spanish.py
|
saurabhcommand/Hello-world
|
647bad9da901a52d455f05ecc37c6823c22dc77e
|
[
"MIT"
] | 1,162
|
2018-10-03T15:05:49.000Z
|
2018-10-18T14:17:52.000Z
|
Python/HelloWorld_Spanish.py
|
saurabhcommand/Hello-world
|
647bad9da901a52d455f05ecc37c6823c22dc77e
|
[
"MIT"
] | 3,909
|
2018-10-03T15:07:19.000Z
|
2019-03-31T18:39:08.000Z
|
print("Hola, el mundo!")
| 12.5
| 24
| 0.64
| 4
| 25
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 25
| 1
| 25
| 25
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
cdd89a546262182464a495eb440cf2815cf8ad4c
| 100
|
py
|
Python
|
json_to_models/__init__.py
|
bogdandm/attrs-api-client
|
ebf9d0a72795f8a5f4667e4c41bca031a5ee798c
|
[
"MIT"
] | null | null | null |
json_to_models/__init__.py
|
bogdandm/attrs-api-client
|
ebf9d0a72795f8a5f4667e4c41bca031a5ee798c
|
[
"MIT"
] | 5
|
2018-09-25T12:19:58.000Z
|
2018-10-05T13:50:13.000Z
|
json_to_models/__init__.py
|
bogdandm/attrs-api-client
|
ebf9d0a72795f8a5f4667e4c41bca031a5ee798c
|
[
"MIT"
] | null | null | null |
from pkg_resources import parse_version
__version__ = "0.2.5"
VERSION = parse_version(__version__)
| 20
| 39
| 0.81
| 14
| 100
| 5
| 0.642857
| 0.342857
| 0.542857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033708
| 0.11
| 100
| 4
| 40
| 25
| 0.752809
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
cde13710c4974047e3f6ea8fb98855b31f72111b
| 5,572
|
py
|
Python
|
tests/unit/core/metrics/test_nlp_metrics.py
|
Aptus-John/whylogs
|
11e2697be33b7c3045cde6e24f58425ed0a66766
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/core/metrics/test_nlp_metrics.py
|
Aptus-John/whylogs
|
11e2697be33b7c3045cde6e24f58425ed0a66766
|
[
"Apache-2.0"
] | 1
|
2021-10-01T19:51:04.000Z
|
2021-10-01T19:51:04.000Z
|
tests/unit/core/metrics/test_nlp_metrics.py
|
jamie256/whylogs
|
e4b8288a61c00fbe033c0248a015e6e91ee6c8b0
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from whylogs.core.metrics.nlp_metrics import NLPMetrics
from whylogs.proto import NLPMetricsMessage
def test_nlp_metrics():
nlp_metric = NLPMetrics()
assert nlp_metric.mer is not None
assert nlp_metric.mer.count == 0
nlp_metric.update("hello", "hello")
assert nlp_metric.mer.histogram.get_min_value() == 0.0
assert nlp_metric.mer.histogram.get_max_value() == 0.0
assert nlp_metric.mer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.0, 0.0, 0.0]
assert nlp_metric.mer.count == 1
assert nlp_metric.wer.histogram.get_min_value() == 0.0
assert nlp_metric.wer.histogram.get_max_value() == 0.0
assert nlp_metric.wer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.0, 0.0, 0.0]
assert nlp_metric.wer.count == 1
assert nlp_metric.wil.histogram.get_min_value() == 0.0
assert nlp_metric.wil.histogram.get_max_value() == 0.0
assert nlp_metric.wil.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.0, 0.0, 0.0]
assert nlp_metric.wil.count == 1
def test_nlp_metrics_message():
nlp_metric = NLPMetrics()
assert nlp_metric.mer is not None
assert nlp_metric.mer.count == 0
nlp_metric.update(["hello brother"], ["hello sister"])
nlp_metric.update(["bye brother"], ["bye sister"])
nlp_metric.update(["what up brother"], ["what up ii sister"])
assert nlp_metric.mer.histogram.get_min_value() == 0.5
assert nlp_metric.mer.histogram.get_max_value() == 0.5
assert nlp_metric.mer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.5, 0.5, 0.5000]
assert nlp_metric.mer.count == 3
assert nlp_metric.wer.histogram.get_min_value() == 0.5
assert nlp_metric.wer.histogram.get_max_value() == 0.5
assert nlp_metric.wer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.5, 0.5, 0.5000]
assert nlp_metric.wer.count == 3
assert pytest.approx(nlp_metric.wil.histogram.get_min_value(), 0.001) == 0.6666
assert nlp_metric.wil.histogram.get_max_value() == 0.75
assert pytest.approx(nlp_metric.wil.histogram.get_quantiles([0.25, 0.5, 0.75]), 0.001) == [0.666666, 0.75, 0.75]
assert nlp_metric.wil.count == 3
nlp_message = nlp_metric.to_protobuf()
nlp_metric_new = NLPMetrics.from_protobuf(nlp_message)
assert nlp_metric_new.wil.histogram.get_min_value() == nlp_metric.wil.histogram.get_min_value()
assert nlp_metric_new.wil.histogram.get_max_value() == nlp_metric.wil.histogram.get_max_value()
assert nlp_metric_new.wil.histogram.get_quantiles([0.25, 0.5, 0.75]) == nlp_metric.wil.histogram.get_quantiles([0.25, 0.5, 0.75])
assert nlp_metric_new.wil.count == nlp_metric.wil.count
assert nlp_metric_new.mer.histogram.get_min_value() == nlp_metric.mer.histogram.get_min_value()
assert nlp_metric_new.mer.histogram.get_max_value() == nlp_metric.mer.histogram.get_max_value()
assert nlp_metric_new.mer.histogram.get_quantiles([0.25, 0.5, 0.75]) == nlp_metric.mer.histogram.get_quantiles([0.25, 0.5, 0.75])
assert nlp_metric_new.mer.count == nlp_metric.mer.count
assert nlp_metric_new.wer.histogram.get_min_value() == nlp_metric.wer.histogram.get_min_value()
assert nlp_metric_new.wer.histogram.get_max_value() == nlp_metric.wer.histogram.get_max_value()
assert nlp_metric_new.wer.histogram.get_quantiles([0.25, 0.5, 0.75]) == nlp_metric.wer.histogram.get_quantiles([0.25, 0.5, 0.75])
assert nlp_metric_new.wer.count == nlp_metric.wer.count
def test_nlp_merge():
nlp_metric = NLPMetrics()
nlp_metric.update("hello", "hello")
nlp_initial_test = None
nlp_merge_metrics = nlp_metric.merge(nlp_initial_test)
assert nlp_merge_metrics.mer.histogram.get_min_value() == 0.0
assert nlp_merge_metrics.mer.histogram.get_max_value() == 0.0
assert nlp_merge_metrics.mer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.0, 0.0, 0.0]
assert nlp_merge_metrics.mer.count == 1
assert nlp_merge_metrics.wer.histogram.get_min_value() == 0.0
assert nlp_merge_metrics.wer.histogram.get_max_value() == 0.0
assert nlp_merge_metrics.wer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.0, 0.0, 0.0]
assert nlp_merge_metrics.wer.count == 1
assert nlp_merge_metrics.wil.histogram.get_min_value() == 0.0
assert nlp_merge_metrics.wil.histogram.get_max_value() == 0.0
assert nlp_merge_metrics.wil.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.0, 0.0, 0.0]
assert nlp_merge_metrics.wil.count == 1
nlp_metric_2 = NLPMetrics()
nlp_metric_2.update(["hello brother"], ["hello sister"])
nlp_metric_2.update(["bye brother"], ["bye sister"])
nlp_metric_2.update(["what up brother"], ["what up ii sister"])
nlp_merge_metrics = nlp_merge_metrics.merge(nlp_metric_2)
assert nlp_merge_metrics.mer.histogram.get_min_value() == 0.0
assert nlp_merge_metrics.mer.histogram.get_max_value() == 0.5
assert nlp_merge_metrics.mer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.5, 0.5, 0.5000]
assert nlp_merge_metrics.mer.count == 4
assert nlp_merge_metrics.wer.histogram.get_min_value() == 0.0
assert nlp_merge_metrics.wer.histogram.get_max_value() == 0.5
assert nlp_merge_metrics.wer.histogram.get_quantiles([0.25, 0.5, 0.75]) == [0.5, 0.5, 0.5000]
assert nlp_merge_metrics.wer.count == 4
assert pytest.approx(nlp_merge_metrics.wil.histogram.get_min_value(), 0.001) == 0.0
assert nlp_merge_metrics.wil.histogram.get_max_value() == 0.75
assert pytest.approx(nlp_merge_metrics.wil.histogram.get_quantiles([0.25, 0.5, 0.75]), 0.001) == [0.666666, 0.75, 0.75]
assert nlp_merge_metrics.wil.count == 4
| 48.877193
| 133
| 0.718413
| 949
| 5,572
| 3.958904
| 0.056902
| 0.162896
| 0.151717
| 0.12297
| 0.879957
| 0.853873
| 0.822997
| 0.738089
| 0.630024
| 0.501996
| 0
| 0.068422
| 0.134422
| 5,572
| 113
| 134
| 49.309735
| 0.710554
| 0
| 0
| 0.149425
| 0
| 0
| 0.031587
| 0
| 0
| 0
| 0
| 0
| 0.735632
| 1
| 0.034483
| false
| 0
| 0.034483
| 0
| 0.068966
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cdf59254a67b64ad6bced72965ed8f03bef9e3df
| 3,626
|
py
|
Python
|
unit_tests/services/test_search_by_postcode.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | 1
|
2019-10-03T13:58:29.000Z
|
2019-10-03T13:58:29.000Z
|
unit_tests/services/test_search_by_postcode.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | null | null | null |
unit_tests/services/test_search_by_postcode.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | 1
|
2021-04-11T05:24:57.000Z
|
2021-04-11T05:24:57.000Z
|
from unittest.mock import Mock, patch
from unittest import TestCase
from maintain_frontend.exceptions import ApplicationError
from maintain_frontend.services.search_by_postcode import SearchByPostcode
class TestSearchByPostcode(TestCase):
SEARCH_BY_POSTCODE_PATH = 'maintain_frontend.services.search_by_postcode'
def setUp(self):
self.search_by_postcode = SearchByPostcode(Mock())
@patch("{}.AddressesService".format(SEARCH_BY_POSTCODE_PATH))
def test_search_by_postcode_no_search_query(self, mock_addresses_service):
response = self.search_by_postcode.process(None, None)
self.assertEqual(response['search_message'], "Enter a postcode")
self.assertEqual(response['inline_message'],
"Search for a different postcode if the address you need is not listed.")
self.assertEqual(response['status'], "error")
mock_addresses_service.assert_not_called()
@patch("{}.AddressesService".format(SEARCH_BY_POSTCODE_PATH))
def test_search_by_postcode_invalid_query(self, mock_addresses_service):
response = self.search_by_postcode.process('ABC', None)
self.assertEqual(response['search_message'], "No match found")
self.assertEqual(response['inline_message'], "Try a different postcode")
self.assertEqual(response['status'], "error")
mock_addresses_service.get_by.assert_not_called()
@patch("{}.AddressesService".format(SEARCH_BY_POSTCODE_PATH))
def test_search_by_postcode_valid(self, mock_addresses_service):
response = Mock()
response.status_code = 200
response.json.return_value = "abc"
mock_addresses_service.return_value.get_by.return_value = response
response = self.search_by_postcode.process('AB1 2CD', None)
self.assertEqual(response['status'], "success")
self.assertEqual(response['data'], "abc")
mock_addresses_service.get_by.assert_called()
@patch("{}.AddressesService".format(SEARCH_BY_POSTCODE_PATH))
def test_search_by_postcode_valid_400_response(self, mock_addresses_service):
response = Mock()
response.status_code = 400
mock_addresses_service.return_value.get_by.return_value = response
response = self.search_by_postcode.process('AB1 2CD', None)
self.assertEqual(response['status'], "error")
self.assertEqual(response['search_message'], "No match found")
self.assertEqual(response['inline_message'], "Try a different postcode")
mock_addresses_service.get_by.assert_called()
@patch("{}.AddressesService".format(SEARCH_BY_POSTCODE_PATH))
def test_search_by_postcode_valid_404_response(self, mock_addresses_service):
response = Mock()
response.status_code = 404
mock_addresses_service.return_value.get_by.return_value = response
response = self.search_by_postcode.process('AB1 2CD', None)
self.assertEqual(response['status'], "error")
self.assertEqual(response['search_message'], "No match found")
self.assertEqual(response['inline_message'], "Try a different postcode")
mock_addresses_service.get_by.assert_called()
@patch("{}.AddressesService".format(SEARCH_BY_POSTCODE_PATH))
def test_search_by_postcode_valid_500_response(self, mock_addresses_service):
response = Mock()
response.status_code = 500
mock_addresses_service.return_value.get_by.return_value = response
self.assertRaises(ApplicationError, self.search_by_postcode.process, 'AB1 2CD', None)
mock_addresses_service.get_by.assert_called()
| 43.686747
| 98
| 0.732763
| 430
| 3,626
| 5.84186
| 0.162791
| 0.070064
| 0.140127
| 0.055732
| 0.821258
| 0.806927
| 0.755573
| 0.740844
| 0.683121
| 0.661624
| 0
| 0.009596
| 0.166575
| 3,626
| 82
| 99
| 44.219512
| 0.821641
| 0
| 0
| 0.516667
| 0
| 0
| 0.156922
| 0.01241
| 0
| 0
| 0
| 0
| 0.35
| 1
| 0.116667
| false
| 0
| 0.066667
| 0
| 0.216667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cdf6b7b9f280fa828d9e160f5a8a587ad01e4bbe
| 78
|
py
|
Python
|
Python3-built-in-functions/0x30type.py
|
DropsDevopsOrg/PythonWiki
|
0c344edad37ed34c03cf066df991922cb4bdeee0
|
[
"Apache-2.0"
] | 15
|
2019-04-09T04:20:21.000Z
|
2022-02-08T20:33:42.000Z
|
Python3-built-in-functions/0x30type.py
|
sep8dog/PythonWiki
|
0c344edad37ed34c03cf066df991922cb4bdeee0
|
[
"Apache-2.0"
] | 1
|
2019-07-22T07:27:10.000Z
|
2020-10-09T08:00:17.000Z
|
Python3-built-in-functions/0x30type.py
|
sep8dog/PythonWiki
|
0c344edad37ed34c03cf066df991922cb4bdeee0
|
[
"Apache-2.0"
] | 16
|
2019-09-13T14:06:42.000Z
|
2022-03-15T06:02:01.000Z
|
print(type(1))
print(type('runnob'))
print(type([2]))
print(type({0:'zero'}))
| 15.6
| 23
| 0.628205
| 13
| 78
| 3.769231
| 0.538462
| 0.734694
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040541
| 0.051282
| 78
| 4
| 24
| 19.5
| 0.621622
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
a80ecc327889c09077e8f263103ed3f5998628cc
| 24
|
py
|
Python
|
code-metrics-api/config.py
|
clodonil/audit-aws-pipeline
|
44a41c63fc84096c2327bf6d34909dff1ca3fdab
|
[
"Apache-2.0"
] | null | null | null |
code-metrics-api/config.py
|
clodonil/audit-aws-pipeline
|
44a41c63fc84096c2327bf6d34909dff1ca3fdab
|
[
"Apache-2.0"
] | null | null | null |
code-metrics-api/config.py
|
clodonil/audit-aws-pipeline
|
44a41c63fc84096c2327bf6d34909dff1ca3fdab
|
[
"Apache-2.0"
] | null | null | null |
import os
PORT = 8080
| 4.8
| 11
| 0.666667
| 4
| 24
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 0.291667
| 24
| 4
| 12
| 6
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a8344b7a60027d1bc9276da9a5ff509be9c92f83
| 503
|
py
|
Python
|
dbsync/__init__.py
|
zhoubangtao/dbsync
|
5b7231a245b903f00c12a3bc0a50e7bc171bc9a8
|
[
"Apache-2.0"
] | null | null | null |
dbsync/__init__.py
|
zhoubangtao/dbsync
|
5b7231a245b903f00c12a3bc0a50e7bc171bc9a8
|
[
"Apache-2.0"
] | null | null | null |
dbsync/__init__.py
|
zhoubangtao/dbsync
|
5b7231a245b903f00c12a3bc0a50e7bc171bc9a8
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding:utf-8 -*-
import logging
import stores
release = "0.1.1"
logging.getLogger("dbsync")
class DBSync():
def __init__(self):
pass
def syncer(self, syncer):
return self
def validator(self, validator):
return self
def serializer(self, serializer):
return self
def notifier(self, notifier):
return self
def start(self):
pass
def stop(self):
pass
class Engine():
def __init__(self):
pass
| 13.236842
| 37
| 0.580517
| 58
| 503
| 4.896552
| 0.413793
| 0.112676
| 0.183099
| 0.105634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011594
| 0.314115
| 503
| 37
| 38
| 13.594595
| 0.811594
| 0.039761
| 0
| 0.454545
| 0
| 0
| 0.022869
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.363636
| false
| 0.181818
| 0.090909
| 0.181818
| 0.727273
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
b52b6c2562493534775a94335c6f84564810694b
| 194
|
py
|
Python
|
mojo/navigation/apps.py
|
django-mojo/mojo-navigation
|
cb4c838239b02dfaefc244c3e8c9f2f194efa5e7
|
[
"BSD-3-Clause"
] | null | null | null |
mojo/navigation/apps.py
|
django-mojo/mojo-navigation
|
cb4c838239b02dfaefc244c3e8c9f2f194efa5e7
|
[
"BSD-3-Clause"
] | null | null | null |
mojo/navigation/apps.py
|
django-mojo/mojo-navigation
|
cb4c838239b02dfaefc244c3e8c9f2f194efa5e7
|
[
"BSD-3-Clause"
] | null | null | null |
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class MojoNavigation(AppConfig):
name = 'mojo.navigation'
verbose_name = _("Mojo navigation")
| 24.25
| 55
| 0.773196
| 23
| 194
| 6.347826
| 0.695652
| 0.136986
| 0.246575
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149485
| 194
| 7
| 56
| 27.714286
| 0.884848
| 0
| 0
| 0
| 0
| 0
| 0.154639
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b55ae133e8f9e90d5405cbbab181e2dc8719fcfe
| 1,450
|
py
|
Python
|
pylaunches/objects/event.py
|
ludeeus/pylaunches
|
8f44f9b6084bb93f0429e17e94deb7af47a01a9b
|
[
"MIT"
] | 1
|
2021-11-14T19:08:52.000Z
|
2021-11-14T19:08:52.000Z
|
pylaunches/objects/event.py
|
ludeeus/pylaunches
|
8f44f9b6084bb93f0429e17e94deb7af47a01a9b
|
[
"MIT"
] | 2
|
2020-10-30T19:13:16.000Z
|
2022-01-25T17:00:36.000Z
|
pylaunches/objects/event.py
|
ludeeus/pylaunches
|
8f44f9b6084bb93f0429e17e94deb7af47a01a9b
|
[
"MIT"
] | 4
|
2019-03-04T03:05:53.000Z
|
2022-01-25T02:01:28.000Z
|
from pylaunches.objects.data import PyLaunchesData
from pylaunches.objects.launch import Launch
class EventType(PyLaunchesData):
"""Event type data object."""
@property
def id(self) -> str:
return self._data.get("id")
@property
def name(self) -> str:
return self._data.get("name")
class Event(PyLaunchesData):
"""Event data object."""
@property
def id(self) -> str:
return self._data.get("id")
@property
def url(self) -> str:
return self._data.get("url")
@property
def slug(self) -> str:
return self._data.get("slug")
@property
def name(self) -> str:
return self._data.get("name")
@property
def type(self) -> EventType:
return EventType(self._data.get("type", {}))
@property
def description(self) -> str:
return self._data.get("description")
@property
def location(self) -> str:
return self._data.get("location")
@property
def news_url(self) -> str:
return self._data.get("news_url")
@property
def video_url(self) -> str:
return self._data.get("video_url")
@property
def feature_image(self) -> str:
return self._data.get("feature_image")
@property
def date(self) -> str:
return self._data.get("date")
@property
def launches(self) -> Launch:
return [Launch(x) for x in self._data.get("launches", [])]
| 21.641791
| 66
| 0.602069
| 177
| 1,450
| 4.819209
| 0.186441
| 0.180539
| 0.180539
| 0.239156
| 0.464244
| 0.464244
| 0.323564
| 0.228605
| 0.228605
| 0.228605
| 0
| 0
| 0.257241
| 1,450
| 66
| 67
| 21.969697
| 0.792015
| 0.028966
| 0
| 0.478261
| 0
| 0
| 0.060129
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.304348
| false
| 0
| 0.043478
| 0.304348
| 0.695652
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
b56800f18e1be509be75b18af2a779ad101b30fb
| 258
|
py
|
Python
|
scanpy/api/pp.py
|
ckmah/scanpy
|
300435f30f6805e27ba59a7314fe06ef21d03c17
|
[
"BSD-3-Clause"
] | null | null | null |
scanpy/api/pp.py
|
ckmah/scanpy
|
300435f30f6805e27ba59a7314fe06ef21d03c17
|
[
"BSD-3-Clause"
] | null | null | null |
scanpy/api/pp.py
|
ckmah/scanpy
|
300435f30f6805e27ba59a7314fe06ef21d03c17
|
[
"BSD-3-Clause"
] | 1
|
2019-02-18T07:39:59.000Z
|
2019-02-18T07:39:59.000Z
|
# we need this dummy module as otherwise, sphinx cannot produce the overview of
# the api module
#
# importing all ("*") is ok, as scanpy.preprocessing.__init__ is carefully maintained
# to only contain the functions of the api
from ..preprocessing import *
| 36.857143
| 85
| 0.767442
| 38
| 258
| 5.105263
| 0.763158
| 0.051546
| 0.082474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 258
| 6
| 86
| 43
| 0.902326
| 0.841085
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b577f4812e01ba3322c621f94efdd12fe22b4fc2
| 66
|
py
|
Python
|
app/models/__init__.py
|
ZehLuckmann/integracao
|
030f5f696367857b9a47dfec703f2a1b3021a26e
|
[
"MIT"
] | 2
|
2019-03-02T22:18:18.000Z
|
2020-05-14T21:21:35.000Z
|
app/models/__init__.py
|
ZehLuckmann/integracao
|
030f5f696367857b9a47dfec703f2a1b3021a26e
|
[
"MIT"
] | null | null | null |
app/models/__init__.py
|
ZehLuckmann/integracao
|
030f5f696367857b9a47dfec703f2a1b3021a26e
|
[
"MIT"
] | null | null | null |
# app/medels/__init__.py
# coding:utf-8
from app.models import *
| 13.2
| 24
| 0.727273
| 11
| 66
| 4
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.136364
| 66
| 4
| 25
| 16.5
| 0.754386
| 0.530303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b595a1bfc0564726a536742f218bb4d060299587
| 75
|
py
|
Python
|
main.py
|
dwihdyn/alpaca-algo-community
|
114b0831d88655765609291a008c5b37574a8b32
|
[
"MIT"
] | 5
|
2019-02-04T11:15:38.000Z
|
2020-07-30T15:02:04.000Z
|
main.py
|
klepsydra/alpaca-algo-community
|
4c3fdbbd4b4e91121c09c0a32e42527b9f11e601
|
[
"MIT"
] | null | null | null |
main.py
|
klepsydra/alpaca-algo-community
|
4c3fdbbd4b4e91121c09c0a32e42527b9f11e601
|
[
"MIT"
] | 5
|
2020-02-17T01:36:16.000Z
|
2020-09-19T04:27:19.000Z
|
from algos import run_algo
if __name__ == '__main__':
run_algo.main()
| 15
| 26
| 0.706667
| 11
| 75
| 3.909091
| 0.727273
| 0.325581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186667
| 75
| 4
| 27
| 18.75
| 0.704918
| 0
| 0
| 0
| 0
| 0
| 0.106667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b59a16277a9753688f092e5dd650e044e30753da
| 54
|
py
|
Python
|
models/__init__.py
|
AndreaCossu/reproducible-continual-learning
|
0badbec82f179c0673767072be4d3bc59c112352
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
AndreaCossu/reproducible-continual-learning
|
0badbec82f179c0673767072be4d3bc59c112352
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
AndreaCossu/reproducible-continual-learning
|
0badbec82f179c0673767072be4d3bc59c112352
|
[
"MIT"
] | null | null | null |
from .models import *
from .reduced_resnet18 import *
| 18
| 31
| 0.777778
| 7
| 54
| 5.857143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 0.148148
| 54
| 2
| 32
| 27
| 0.847826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b5af967594e4a74de97f9ebbf43ea24009dbd804
| 88
|
py
|
Python
|
EduRec/meta/__init__.py
|
bigdata-ustc/EduRec
|
133f3fd0ce74b7c8f36f0e5a2cf71bbf759ee4c8
|
[
"MIT"
] | 2
|
2021-06-06T13:06:20.000Z
|
2022-02-17T06:17:29.000Z
|
EduRec/meta/__init__.py
|
bigdata-ustc/EduRec
|
133f3fd0ce74b7c8f36f0e5a2cf71bbf759ee4c8
|
[
"MIT"
] | null | null | null |
EduRec/meta/__init__.py
|
bigdata-ustc/EduRec
|
133f3fd0ce74b7c8f36f0e5a2cf71bbf759ee4c8
|
[
"MIT"
] | 1
|
2022-03-12T00:27:52.000Z
|
2022-03-12T00:27:52.000Z
|
# coding: utf-8
# 2021/2/10 @ tongshiwei
from .MeasurementModel import MeasurementModel
| 22
| 46
| 0.784091
| 11
| 88
| 6.272727
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103896
| 0.125
| 88
| 4
| 46
| 22
| 0.792208
| 0.409091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.